├── .coverage ├── .coveragerc ├── .env ├── .dev-sample └── .prod-sample ├── README.md ├── alembic.ini ├── alembic ├── README ├── env.py ├── script.py.mako └── versions │ ├── 399011ac3c75_.py │ ├── 6c61e0875de3_.py │ └── b48ac95a682c_.py ├── celery.log ├── celerybeat-schedule ├── compose ├── local │ └── fastapi │ │ ├── Dockerfile │ │ ├── celery │ │ ├── beat │ │ │ └── start │ │ ├── flower │ │ │ └── start │ │ └── worker │ │ │ └── start │ │ ├── entrypoint │ │ └── start └── production │ ├── fastapi │ ├── Dockerfile │ ├── celery │ │ ├── beat │ │ │ └── start │ │ ├── flower │ │ │ └── start │ │ └── worker │ │ │ └── start │ ├── entrypoint │ └── start │ └── nginx │ ├── Dockerfile │ └── nginx.conf ├── docker-compose.prod.yml ├── docker-compose.yml ├── main.py ├── project ├── __init__.py ├── asgi.py ├── celery_utils.py ├── config.py ├── database.py ├── logging.py ├── tdd │ ├── __init__.py │ ├── factories.py │ ├── models.py │ ├── tasks.py │ └── views.py ├── users │ ├── __init__.py │ ├── factories.py │ ├── models.py │ ├── schemas.py │ ├── tasks.py │ ├── templates │ │ ├── form.html │ │ ├── form_socketio.html │ │ └── form_ws.html │ └── views.py └── ws │ ├── __init__.py │ └── views.py ├── prometheus.yml ├── requirements.txt ├── tests ├── __init__.py ├── conftest.py ├── tdd │ ├── __init__.py │ ├── test_models.py │ ├── test_tasks.py │ └── test_views.py ├── test_celery_utils.py └── users │ ├── __init__.py │ ├── test_tasks.py │ └── test_views.py └── upload └── .gitignore /.coverage: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdrivenio/fastapi-celery-project/21a4810498d2615ee30ff79988bf91b0aaee3abd/.coverage -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | tests/* -------------------------------------------------------------------------------- /.env/.dev-sample: -------------------------------------------------------------------------------- 1 | FASTAPI_CONFIG=development 2 | DATABASE_URL=postgresql://fastapi_celery:fastapi_celery@db/fastapi_celery 3 | CELERY_BROKER_URL=redis://redis:6379/0 4 | CELERY_RESULT_BACKEND=redis://redis:6379/0 5 | WS_MESSAGE_QUEUE=redis://redis:6379/0 -------------------------------------------------------------------------------- /.env/.prod-sample: -------------------------------------------------------------------------------- 1 | FASTAPI_CONFIG=production 2 | DATABASE_URL=postgresql://fastapi_celery:fastapi_celery@db/fastapi_celery 3 | CELERY_BROKER_URL=amqp://admin:admin@rabbitmq:5672/ 4 | CELERY_RESULT_BACKEND=redis://redis:6379/0 5 | WS_MESSAGE_QUEUE=redis://redis:6379/0 6 | 7 | RABBITMQ_DEFAULT_USER=admin 8 | RABBITMQ_DEFAULT_PASS=admin 9 | 10 | CELERY_FLOWER_USER=admin 11 | CELERY_FLOWER_PASSWORD=admin -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Source code for the [The Definitive Guide to Celery and FastAPI](https://testdriven.io/courses/fastapi-celery/) course by [Michael Yin](https://github.com/michael-yin/). 2 | 3 | For Mac Silicon users, if you get some weird errors, you might need to run `export DOCKER_DEFAULT_PLATFORM=linux/amd64` before running the Docker Compose commands. For more details, please check out this [Github issue](https://github.com/testdrivenio/fastapi-celery-project/issues/7#issuecomment-1416787688). 4 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = alembic 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python>=3.9 or backports.zoneinfo library. 20 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to ZoneInfo() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | sqlalchemy.url = driver://user:pass@localhost/dbname 64 | 65 | 66 | [post_write_hooks] 67 | # post_write_hooks defines scripts or Python functions that are run 68 | # on newly generated revision scripts. See the documentation for further 69 | # detail and examples 70 | 71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 72 | # hooks = black 73 | # black.type = console_scripts 74 | # black.entrypoint = black 75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 76 | 77 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 78 | # hooks = ruff 79 | # ruff.type = exec 80 | # ruff.executable = %(here)s/.venv/bin/ruff 81 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 82 | 83 | # Logging configuration 84 | [loggers] 85 | keys = root,sqlalchemy,alembic 86 | 87 | [handlers] 88 | keys = console 89 | 90 | [formatters] 91 | keys = generic 92 | 93 | [logger_root] 94 | level = WARN 95 | handlers = console 96 | qualname = 97 | 98 | [logger_sqlalchemy] 99 | level = WARN 100 | handlers = 101 | qualname = sqlalchemy.engine 102 | 103 | [logger_alembic] 104 | level = INFO 105 | handlers = 106 | qualname = alembic 107 | 108 | [handler_console] 109 | class = StreamHandler 110 | args = (sys.stderr,) 111 | level = NOTSET 112 | formatter = generic 113 | 114 | [formatter_generic] 115 | format = %(levelname)-5.5s [%(name)s] %(message)s 116 | datefmt = %H:%M:%S 117 | -------------------------------------------------------------------------------- /alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /alembic/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | from project import create_app 9 | from project.config import settings 10 | from project.database import Base 11 | 12 | # this is the Alembic Config object, which provides 13 | # access to the values within the .ini file in use. 14 | config = context.config 15 | 16 | # Interpret the config file for Python logging. 17 | # This line sets up loggers basically. 18 | if config.config_file_name is not None: 19 | fileConfig(config.config_file_name) 20 | 21 | # add your model's MetaData object here 22 | # for 'autogenerate' support 23 | # from myapp import mymodel 24 | # target_metadata = mymodel.Base.metadata 25 | config.set_main_option("sqlalchemy.url", str(settings.DATABASE_URL)) 26 | 27 | fastapi_app = create_app() 28 | 29 | target_metadata = Base.metadata 30 | 31 | # other values from the config, defined by the needs of env.py, 32 | # can be acquired: 33 | # my_important_option = config.get_main_option("my_important_option") 34 | # ... etc. 35 | 36 | 37 | def run_migrations_offline() -> None: 38 | """Run migrations in 'offline' mode. 39 | 40 | This configures the context with just a URL 41 | and not an Engine, though an Engine is acceptable 42 | here as well. By skipping the Engine creation 43 | we don't even need a DBAPI to be available. 44 | 45 | Calls to context.execute() here emit the given string to the 46 | script output. 47 | 48 | """ 49 | url = config.get_main_option("sqlalchemy.url") 50 | context.configure( 51 | url=url, 52 | target_metadata=target_metadata, 53 | literal_binds=True, 54 | dialect_opts={"paramstyle": "named"}, 55 | ) 56 | 57 | with context.begin_transaction(): 58 | context.run_migrations() 59 | 60 | 61 | def run_migrations_online() -> None: 62 | """Run migrations in 'online' mode. 63 | 64 | In this scenario we need to create an Engine 65 | and associate a connection with the context. 66 | 67 | """ 68 | connectable = engine_from_config( 69 | config.get_section(config.config_ini_section, {}), 70 | prefix="sqlalchemy.", 71 | poolclass=pool.NullPool, 72 | ) 73 | 74 | with connectable.connect() as connection: 75 | context.configure( 76 | connection=connection, target_metadata=target_metadata 77 | ) 78 | 79 | with context.begin_transaction(): 80 | context.run_migrations() 81 | 82 | 83 | if context.is_offline_mode(): 84 | run_migrations_offline() 85 | else: 86 | run_migrations_online() 87 | -------------------------------------------------------------------------------- /alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | ${upgrades if upgrades else "pass"} 23 | 24 | 25 | def downgrade() -> None: 26 | ${downgrades if downgrades else "pass"} 27 | -------------------------------------------------------------------------------- /alembic/versions/399011ac3c75_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 399011ac3c75 4 | Revises: 5 | Create Date: 2024-01-04 15:58:51.354365 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '399011ac3c75' 16 | down_revision: Union[str, None] = None 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | pass 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade() -> None: 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | pass 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /alembic/versions/6c61e0875de3_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 6c61e0875de3 4 | Revises: b48ac95a682c 5 | Create Date: 2024-01-27 08:34:18.170471 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '6c61e0875de3' 16 | down_revision: Union[str, None] = 'b48ac95a682c' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_table('members', 24 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 25 | sa.Column('username', sa.String(length=128), nullable=False), 26 | sa.Column('email', sa.String(length=128), nullable=False), 27 | sa.Column('avatar', sa.String(length=256), nullable=False), 28 | sa.Column('avatar_thumbnail', sa.String(length=256), nullable=True), 29 | sa.PrimaryKeyConstraint('id'), 30 | sa.UniqueConstraint('email'), 31 | sa.UniqueConstraint('username') 32 | ) 33 | # ### end Alembic commands ### 34 | 35 | 36 | def downgrade() -> None: 37 | # ### commands auto generated by Alembic - please adjust! ### 38 | op.drop_table('members') 39 | # ### end Alembic commands ### 40 | -------------------------------------------------------------------------------- /alembic/versions/b48ac95a682c_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: b48ac95a682c 4 | Revises: 399011ac3c75 5 | Create Date: 2024-01-04 16:01:47.893843 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = 'b48ac95a682c' 16 | down_revision: Union[str, None] = '399011ac3c75' 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_table('users', 24 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), 25 | sa.Column('username', sa.String(length=128), nullable=False), 26 | sa.Column('email', sa.String(length=128), nullable=False), 27 | sa.PrimaryKeyConstraint('id'), 28 | sa.UniqueConstraint('email'), 29 | sa.UniqueConstraint('username') 30 | ) 31 | # ### end Alembic commands ### 32 | 33 | 34 | def downgrade() -> None: 35 | # ### commands auto generated by Alembic - please adjust! ### 36 | op.drop_table('users') 37 | # ### end Alembic commands ### 38 | -------------------------------------------------------------------------------- /celery.log: -------------------------------------------------------------------------------- 1 | [2024-01-28 04:04:44,682: INFO/MainProcess] beat: Starting... 2 | [2024-01-28 04:04:44,923: WARNING/MainProcess] /usr/local/lib/python3.11/site-packages/celery/app/utils.py:203: CDeprecationWarning: 3 | The 'CELERY_RESULT_BACKEND' setting is deprecated and scheduled for removal in 4 | version 6.0.0. Use the result_backend instead 5 | 6 | deprecated.warn(description=f'The {setting!r} setting', 7 | 8 | [2024-01-28 04:04:44,924: WARNING/MainProcess] Please run `celery upgrade settings path/to/settings.py` to avoid these warnings and to allow a smoother upgrade to Celery 6.0. 9 | [2024-01-28 04:04:45,998: WARNING/MainProcess] /usr/local/lib/python3.11/site-packages/celery/worker/consumer/consumer.py:507: CPendingDeprecationWarning: The broker_connection_retry configuration setting will no longer determine 10 | whether broker connection retries are made during startup in Celery 6.0 and above. 11 | If you wish to retain the existing behavior for retrying connections on startup, 12 | you should set broker_connection_retry_on_startup to True. 13 | warnings.warn( 14 | 15 | [2024-01-28 04:04:46,008: INFO/MainProcess] Connected to redis://redis:6379/0 16 | [2024-01-28 04:04:46,009: WARNING/MainProcess] /usr/local/lib/python3.11/site-packages/celery/worker/consumer/consumer.py:507: CPendingDeprecationWarning: The broker_connection_retry configuration setting will no longer determine 17 | whether broker connection retries are made during startup in Celery 6.0 and above. 18 | If you wish to retain the existing behavior for retrying connections on startup, 19 | you should set broker_connection_retry_on_startup to True. 20 | warnings.warn( 21 | 22 | [2024-01-28 04:04:46,013: INFO/MainProcess] mingle: searching for neighbors 23 | [2024-01-28 04:04:47,027: INFO/MainProcess] mingle: all alone 24 | [2024-01-28 04:04:47,045: INFO/MainProcess] celery@1796c238ae8d ready. 25 | [2024-01-28 04:04:55,323: INFO/MainProcess] Events of group {task} enabled by remote. 26 | [2024-01-28 04:05:19,474: INFO/MainProcess] Task project.users.tasks.sample_task[72aedca8-a3d0-4eef-baa3-e5799c5d37dd] received 27 | [2024-01-28 04:05:25,754: INFO/ForkPoolWorker-15] Task project.users.tasks.sample_task[72aedca8-a3d0-4eef-baa3-e5799c5d37dd] succeeded in 6.2770764349843375s: None 28 | [2024-01-28 04:05:25,757: INFO/ForkPoolWorker-15] Connecting to redis 29 | [2024-01-28 04:05:25,759: INFO/ForkPoolWorker-15] Redis connection made 30 | [2024-01-28 04:05:25,760: INFO/ForkPoolWorker-15] Connecting to redis 31 | [2024-01-28 04:05:25,761: INFO/ForkPoolWorker-15] Redis connection made 32 | [2024-01-28 04:05:25,768: INFO/ForkPoolWorker-15] Redis connection lost 33 | [2024-01-28 04:05:25,769: INFO/ForkPoolWorker-15] Redis connection lost 34 | -------------------------------------------------------------------------------- /celerybeat-schedule: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdrivenio/fastapi-celery-project/21a4810498d2615ee30ff79988bf91b0aaee3abd/celerybeat-schedule -------------------------------------------------------------------------------- /compose/local/fastapi/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-buster 2 | 3 | ENV PYTHONUNBUFFERED 1 4 | ENV PYTHONDONTWRITEBYTECODE 1 5 | 6 | RUN apt-get update \ 7 | # dependencies for building Python packages 8 | && apt-get install -y build-essential \ 9 | # psycopg2 dependencies 10 | && apt-get install -y libpq-dev \ 11 | # Additional dependencies 12 | && apt-get install -y telnet netcat \ 13 | # cleaning up unused files 14 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ 15 | && rm -rf /var/lib/apt/lists/* 16 | 17 | # Requirements are installed here to ensure they will be cached. 18 | COPY ./requirements.txt /requirements.txt 19 | RUN pip install -r /requirements.txt 20 | 21 | COPY ./compose/local/fastapi/entrypoint /entrypoint 22 | RUN sed -i 's/\r$//g' /entrypoint 23 | RUN chmod +x /entrypoint 24 | 25 | COPY ./compose/local/fastapi/start /start 26 | RUN sed -i 's/\r$//g' /start 27 | RUN chmod +x /start 28 | 29 | COPY ./compose/local/fastapi/celery/worker/start /start-celeryworker 30 | RUN sed -i 's/\r$//g' /start-celeryworker 31 | RUN chmod +x /start-celeryworker 32 | 33 | COPY ./compose/local/fastapi/celery/beat/start /start-celerybeat 34 | RUN sed -i 's/\r$//g' /start-celerybeat 35 | RUN chmod +x /start-celerybeat 36 | 37 | COPY ./compose/local/fastapi/celery/flower/start /start-flower 38 | RUN sed -i 's/\r$//g' /start-flower 39 | RUN chmod +x /start-flower 40 | 41 | WORKDIR /app 42 | 43 | ENTRYPOINT ["/entrypoint"] 44 | -------------------------------------------------------------------------------- /compose/local/fastapi/celery/beat/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | rm -f './celerybeat.pid' 7 | celery -A main.celery beat -l info -------------------------------------------------------------------------------- /compose/local/fastapi/celery/flower/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | worker_ready() { 7 | celery -A main.celery inspect ping 8 | } 9 | 10 | until worker_ready; do 11 | >&2 echo 'Celery workers not available' 12 | sleep 1 13 | done 14 | >&2 echo 'Celery workers is available' 15 | 16 | celery flower \ 17 | --app=main.celery \ 18 | --broker="${CELERY_BROKER_URL}" 19 | -------------------------------------------------------------------------------- /compose/local/fastapi/celery/worker/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | watchfiles \ 7 | --filter python \ 8 | 'celery -A main.celery worker --loglevel=info -Q high_priority,default' 9 | -------------------------------------------------------------------------------- /compose/local/fastapi/entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # if any of the commands in your code fails for any reason, the entire script fails 4 | set -o errexit 5 | # fail exit if one of your pipe command fails 6 | set -o pipefail 7 | # exits if any of your variables is not set 8 | set -o nounset 9 | 10 | postgres_ready() { 11 | python << END 12 | import sys 13 | 14 | import psycopg2 15 | import urllib.parse as urlparse 16 | import os 17 | 18 | url = urlparse.urlparse(os.environ['DATABASE_URL']) 19 | dbname = url.path[1:] 20 | user = url.username 21 | password = url.password 22 | host = url.hostname 23 | port = url.port 24 | 25 | try: 26 | psycopg2.connect( 27 | dbname=dbname, 28 | user=user, 29 | password=password, 30 | host=host, 31 | port=port 32 | ) 33 | except psycopg2.OperationalError: 34 | sys.exit(-1) 35 | sys.exit(0) 36 | 37 | END 38 | } 39 | until postgres_ready; do 40 | >&2 echo 'Waiting for PostgreSQL to become available...' 41 | sleep 1 42 | done 43 | >&2 echo 'PostgreSQL is available' 44 | 45 | exec "$@" 46 | -------------------------------------------------------------------------------- /compose/local/fastapi/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o pipefail 5 | set -o nounset 6 | 7 | alembic upgrade head 8 | uvicorn main:app --reload --reload-dir project --host 0.0.0.0 9 | -------------------------------------------------------------------------------- /compose/production/fastapi/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-buster 2 | 3 | ENV PYTHONUNBUFFERED 1 4 | ENV PYTHONDONTWRITEBYTECODE 1 5 | 6 | RUN apt-get update \ 7 | # dependencies for building Python packages 8 | && apt-get install -y build-essential \ 9 | # psycopg2 dependencies 10 | && apt-get install -y libpq-dev \ 11 | # Additional dependencies 12 | && apt-get install -y telnet netcat \ 13 | # cleaning up unused files 14 | && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ 15 | && rm -rf /var/lib/apt/lists/* 16 | 17 | RUN addgroup --system fastapi \ 18 | && adduser --system --ingroup fastapi fastapi 19 | 20 | # Requirements are installed here to ensure they will be cached. 21 | COPY ./requirements.txt /requirements.txt 22 | RUN pip install -r /requirements.txt 23 | 24 | COPY ./compose/production/fastapi/entrypoint /entrypoint 25 | RUN sed -i 's/\r$//g' /entrypoint 26 | RUN chmod +x /entrypoint 27 | RUN chown fastapi /entrypoint 28 | 29 | COPY ./compose/production/fastapi/start /start 30 | RUN sed -i 's/\r$//g' /start 31 | RUN chmod +x /start 32 | RUN chown fastapi /start 33 | 34 | COPY ./compose/production/fastapi/celery/worker/start /start-celeryworker 35 | RUN sed -i 's/\r$//g' /start-celeryworker 36 | RUN chmod +x /start-celeryworker 37 | RUN chown fastapi /start-celeryworker 38 | 39 | COPY ./compose/production/fastapi/celery/beat/start /start-celerybeat 40 | RUN sed -i 's/\r$//g' /start-celerybeat 41 | RUN chmod +x /start-celerybeat 42 | RUN chown fastapi /start-celerybeat 43 | 44 | COPY ./compose/production/fastapi/celery/flower/start /start-flower 45 | RUN sed -i 's/\r$//g' /start-flower 46 | RUN chmod +x /start-flower 47 | 48 | RUN mkdir /app 49 | RUN mkdir /app/upload 50 | RUN mkdir /app/flower_db 51 | WORKDIR /app 52 | 53 | # copy project code 54 | COPY . . 55 | 56 | RUN chown -R fastapi:fastapi /app 57 | 58 | USER fastapi 59 | 60 | ENTRYPOINT ["/entrypoint"] 61 | -------------------------------------------------------------------------------- /compose/production/fastapi/celery/beat/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | exec celery -A project.asgi.celery beat -l info -------------------------------------------------------------------------------- /compose/production/fastapi/celery/flower/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | worker_ready() { 7 | celery -A project.asgi.celery inspect ping 8 | } 9 | 10 | until worker_ready; do 11 | >&2 echo 'Celery workers not available' 12 | sleep 1 13 | done 14 | >&2 echo 'Celery workers is available' 15 | 16 | exec celery flower --db=/app/flower_db/flower.db --state_save_interval=5000 \ 17 | --app=project.asgi.celery \ 18 | --broker="${CELERY_BROKER_URL}" \ 19 | --basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}" 20 | -------------------------------------------------------------------------------- /compose/production/fastapi/celery/worker/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | 6 | exec celery -A project.asgi.celery worker --loglevel=info -------------------------------------------------------------------------------- /compose/production/fastapi/entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # if any of the commands in your code fails for any reason, the entire script fails 4 | set -o errexit 5 | # fail exit if one of your pipe command fails 6 | set -o pipefail 7 | # exits if any of your variables is not set 8 | set -o nounset 9 | 10 | postgres_ready() { 11 | python << END 12 | import sys 13 | 14 | import psycopg2 15 | import urllib.parse as urlparse 16 | import os 17 | 18 | url = urlparse.urlparse(os.environ['DATABASE_URL']) 19 | dbname = url.path[1:] 20 | user = url.username 21 | password = url.password 22 | host = url.hostname 23 | port = url.port 24 | 25 | try: 26 | psycopg2.connect( 27 | dbname=dbname, 28 | user=user, 29 | password=password, 30 | host=host, 31 | port=port 32 | ) 33 | except psycopg2.OperationalError: 34 | sys.exit(-1) 35 | sys.exit(0) 36 | 37 | END 38 | } 39 | until postgres_ready; do 40 | >&2 echo 'Waiting for PostgreSQL to become available...' 41 | sleep 1 42 | done 43 | >&2 echo 'PostgreSQL is available' 44 | 45 | rabbitmq_ready() { 46 | echo "Waiting for rabbitmq..." 47 | 48 | while ! nc -z rabbitmq 5672; do 49 | sleep 1 50 | done 51 | 52 | echo "rabbitmq started" 53 | } 54 | 55 | rabbitmq_ready 56 | 57 | exec "$@" 58 | -------------------------------------------------------------------------------- /compose/production/fastapi/start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | set -o pipefail 5 | set -o nounset 6 | 7 | alembic upgrade head 8 | 9 | # If we do not use python-socketio, we can increase the worker process number here 10 | # https://python-socketio.readthedocs.io/en/latest/server.html#eventlet-with-gunicorn 11 | gunicorn project.asgi:app -w 1 -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:8000 --chdir=/app 12 | -------------------------------------------------------------------------------- /compose/production/nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx:1.25.3-alpine 2 | 3 | RUN rm /etc/nginx/conf.d/default.conf 4 | COPY nginx.conf /etc/nginx/conf.d -------------------------------------------------------------------------------- /compose/production/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | upstream hello_web { 2 | server web:8000; 3 | } 4 | 5 | upstream celery_flower { 6 | server flower:5555; 7 | } 8 | 9 | upstream rabbitmq { 10 | server rabbitmq:15672; 11 | } 12 | 13 | server { 14 | listen 80; 15 | location / { 16 | proxy_pass http://hello_web; 17 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 18 | proxy_set_header Host $host; 19 | proxy_redirect off; 20 | } 21 | location /upload/ { 22 | alias /app/upload/; 23 | } 24 | 25 | location /ws { 26 | proxy_pass http://hello_web; 27 | proxy_set_header Upgrade $http_upgrade; 28 | proxy_set_header Connection "upgrade"; 29 | proxy_set_header Host $http_host; 30 | } 31 | } 32 | 33 | server { 34 | listen 5555; 35 | location / { 36 | proxy_pass http://celery_flower; 37 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 38 | proxy_set_header Host $host; 39 | proxy_redirect off; 40 | } 41 | } 42 | 43 | 44 | server { 45 | listen 15672; 46 | location / { 47 | proxy_pass http://rabbitmq; 48 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 49 | proxy_set_header Host $host; 50 | proxy_redirect off; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /docker-compose.prod.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | 5 | nginx: 6 | build: ./compose/production/nginx 7 | volumes: 8 | - uploadfiles:/app/upload 9 | ports: 10 | - 80:80 11 | - 5559:5555 12 | - 15672:15672 13 | depends_on: 14 | - web 15 | - flower 16 | 17 | web: 18 | build: 19 | context: . 20 | dockerfile: ./compose/production/fastapi/Dockerfile 21 | command: /start 22 | volumes: 23 | - uploadfiles:/app/upload 24 | env_file: 25 | - ./.env/.prod-sample 26 | depends_on: 27 | - redis 28 | - db 29 | - rabbitmq 30 | 31 | db: 32 | image: postgres:16-alpine 33 | volumes: 34 | - postgres_data:/var/lib/postgresql/data/ 35 | environment: 36 | - POSTGRES_DB=fastapi_celery 37 | - POSTGRES_USER=fastapi_celery 38 | - POSTGRES_PASSWORD=fastapi_celery 39 | 40 | redis: 41 | image: redis:7-alpine 42 | 43 | rabbitmq: 44 | image: rabbitmq:3-management 45 | env_file: 46 | - ./.env/.prod-sample 47 | 48 | celery_worker: 49 | build: 50 | context: . 51 | dockerfile: ./compose/production/fastapi/Dockerfile 52 | image: fastapi_celery_example_celery_worker 53 | command: /start-celeryworker 54 | volumes: 55 | - uploadfiles:/app/upload 56 | env_file: 57 | - ./.env/.prod-sample 58 | depends_on: 59 | - redis 60 | - db 61 | - rabbitmq 62 | 63 | celery_beat: 64 | build: 65 | context: . 66 | dockerfile: ./compose/production/fastapi/Dockerfile 67 | image: fastapi_celery_example_celery_beat 68 | command: /start-celerybeat 69 | volumes: 70 | - uploadfiles:/app/upload 71 | env_file: 72 | - ./.env/.prod-sample 73 | depends_on: 74 | - redis 75 | - db 76 | - rabbitmq 77 | 78 | flower: 79 | build: 80 | context: . 81 | dockerfile: ./compose/production/fastapi/Dockerfile 82 | image: fastapi_celery_example_celery_flower 83 | command: /start-flower 84 | volumes: 85 | - uploadfiles:/app/upload 86 | - flower_db:/app/flower_db 87 | env_file: 88 | - ./.env/.prod-sample 89 | depends_on: 90 | - redis 91 | - db 92 | - rabbitmq 93 | 94 | # prometheus: 95 | # image: prom/prometheus 96 | # ports: 97 | # - 9090:9090 98 | # command: 99 | # - --config.file=/etc/prometheus/prometheus.yml 100 | # volumes: 101 | # - ./prometheus.yml:/etc/prometheus/prometheus.yml:ro 102 | # depends_on: 103 | # - cadvisor 104 | # 105 | # cadvisor: 106 | # image: gcr.io/cadvisor/cadvisor 107 | # container_name: cadvisor 108 | # volumes: 109 | # - /:/rootfs:ro 110 | # - /var/run:/var/run:rw 111 | # - /sys:/sys:ro 112 | # - /var/lib/docker/:/var/lib/docker:ro 113 | # - /var/run/docker.sock:/var/run/docker.sock:ro 114 | 115 | 116 | volumes: 117 | postgres_data: 118 | uploadfiles: 119 | flower_db: 120 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | web: 5 | build: 6 | context: . 7 | dockerfile: ./compose/local/fastapi/Dockerfile 8 | image: fastapi_celery_example_web 9 | # '/start' is the shell script used to run the service 10 | command: /start 11 | # this volume is used to map the files and folders on the host to the container 12 | # so if we change code on the host, code in the docker container will also be changed 13 | volumes: 14 | - .:/app 15 | ports: 16 | - 8010:8000 17 | env_file: 18 | - .env/.dev-sample 19 | depends_on: 20 | - redis 21 | - db 22 | 23 | db: 24 | image: postgres:16-alpine 25 | volumes: 26 | - postgres_data:/var/lib/postgresql/data/ 27 | environment: 28 | - POSTGRES_DB=fastapi_celery 29 | - POSTGRES_USER=fastapi_celery 30 | - POSTGRES_PASSWORD=fastapi_celery 31 | 32 | redis: 33 | image: redis:7-alpine 34 | 35 | celery_worker: 36 | build: 37 | context: . 38 | dockerfile: ./compose/local/fastapi/Dockerfile 39 | image: fastapi_celery_example_celery_worker 40 | command: /start-celeryworker 41 | volumes: 42 | - .:/app 43 | env_file: 44 | - .env/.dev-sample 45 | depends_on: 46 | - redis 47 | - db 48 | 49 | celery_beat: 50 | build: 51 | context: . 52 | dockerfile: ./compose/local/fastapi/Dockerfile 53 | image: fastapi_celery_example_celery_beat 54 | command: /start-celerybeat 55 | volumes: 56 | - .:/app 57 | env_file: 58 | - .env/.dev-sample 59 | depends_on: 60 | - redis 61 | - db 62 | 63 | flower: 64 | build: 65 | context: . 66 | dockerfile: ./compose/local/fastapi/Dockerfile 67 | image: fastapi_celery_example_celery_flower 68 | command: /start-flower 69 | volumes: 70 | - .:/app 71 | env_file: 72 | - .env/.dev-sample 73 | ports: 74 | - 5557:5555 75 | depends_on: 76 | - redis 77 | - db 78 | 79 | volumes: 80 | postgres_data: 81 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from project import create_app 2 | 3 | app = create_app() 4 | celery = app.celery_app 5 | -------------------------------------------------------------------------------- /project/__init__.py: -------------------------------------------------------------------------------- 1 | from contextlib import asynccontextmanager 2 | from broadcaster import Broadcast 3 | from fastapi import FastAPI 4 | 5 | from project.config import settings 6 | 7 | broadcast = Broadcast(settings.WS_MESSAGE_QUEUE) 8 | 9 | 10 | @asynccontextmanager 11 | async def lifespan(app: FastAPI): 12 | await broadcast.connect() 13 | yield 14 | await broadcast.disconnect() 15 | 16 | 17 | def create_app() -> FastAPI: 18 | app = FastAPI(lifespan=lifespan) 19 | 20 | from project.logging import configure_logging 21 | configure_logging() 22 | 23 | # do this before loading routes 24 | from project.celery_utils import create_celery 25 | app.celery_app = create_celery() 26 | 27 | from project.users import users_router 28 | app.include_router(users_router) 29 | 30 | from project.tdd import tdd_router 31 | app.include_router(tdd_router) 32 | 33 | from project.ws import ws_router 34 | app.include_router(ws_router) 35 | 36 | from project.ws.views import register_socketio_app 37 | register_socketio_app(app) 38 | 39 | @app.get("/") 40 | async def root(): 41 | return {"message": "Hello World"} 42 | 43 | return app 44 | -------------------------------------------------------------------------------- /project/asgi.py: -------------------------------------------------------------------------------- 1 | from project import create_app 2 | 3 | app = create_app() 4 | celery = app.celery_app -------------------------------------------------------------------------------- /project/celery_utils.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | from celery import current_app as current_celery_app, shared_task 4 | from celery.result import AsyncResult 5 | from celery.utils.time import get_exponential_backoff_interval 6 | from project.config import settings 7 | 8 | 9 | def create_celery(): 10 | celery_app = current_celery_app 11 | celery_app.config_from_object(settings, namespace="CELERY") 12 | 13 | return celery_app 14 | 15 | 16 | def get_task_info(task_id): 17 | """ 18 | return task info according to the task_id 19 | """ 20 | task = AsyncResult(task_id) 21 | state = task.state 22 | 23 | if state == "FAILURE": 24 | error = str(task.result) 25 | response = { 26 | "state": task.state, 27 | "error": error, 28 | } 29 | else: 30 | response = { 31 | "state": task.state, 32 | } 33 | return response 34 | 35 | 36 | class custom_celery_task: 37 | 38 | EXCEPTION_BLOCK_LIST = ( 39 | IndexError, 40 | KeyError, 41 | TypeError, 42 | UnicodeDecodeError, 43 | ValueError, 44 | ) 45 | 46 | def __init__(self, *args, **kwargs): 47 | self.task_args = args 48 | self.task_kwargs = kwargs 49 | 50 | def __call__(self, func): 51 | @functools.wraps(func) 52 | def wrapper_func(*args, **kwargs): 53 | try: 54 | return func(*args, **kwargs) 55 | except self.EXCEPTION_BLOCK_LIST: 56 | # do not retry for those exceptions 57 | raise 58 | except Exception as e: 59 | # here we add Exponential Backoff just like Celery 60 | countdown = self._get_retry_countdown(task_func) 61 | raise task_func.retry(exc=e, countdown=countdown) 62 | 63 | task_func = shared_task(*self.task_args, **self.task_kwargs)(wrapper_func) 64 | return task_func 65 | 66 | def _get_retry_countdown(self, task_func): 67 | retry_backoff = int( 68 | max(1.0, float(self.task_kwargs.get('retry_backoff', True))) 69 | ) 70 | retry_backoff_max = int( 71 | self.task_kwargs.get('retry_backoff_max', 600) 72 | ) 73 | retry_jitter = self.task_kwargs.get( 74 | 'retry_jitter', True 75 | ) 76 | 77 | countdown = get_exponential_backoff_interval( 78 | factor=retry_backoff, 79 | retries=task_func.request.retries, 80 | maximum=retry_backoff_max, 81 | full_jitter=retry_jitter 82 | ) 83 | 84 | return countdown 85 | -------------------------------------------------------------------------------- /project/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | from functools import lru_cache 4 | from kombu import Queue 5 | 6 | 7 | def route_task(name, args, kwargs, options, task=None, **kw): 8 | if ":" in name: 9 | queue, _ = name.split(":") 10 | return {"queue": queue} 11 | return {"queue": "default"} 12 | 13 | 14 | class BaseConfig: 15 | BASE_DIR: pathlib.Path = pathlib.Path(__file__).parent.parent 16 | 17 | DATABASE_URL: str = os.environ.get("DATABASE_URL", f"sqlite:///{BASE_DIR}/db.sqlite3") 18 | DATABASE_CONNECT_DICT: dict = {} 19 | 20 | CELERY_BROKER_URL: str = os.environ.get("CELERY_BROKER_URL", "redis://127.0.0.1:6379/0") 21 | CELERY_RESULT_BACKEND: str = os.environ.get("CELERY_RESULT_BACKEND", "redis://127.0.0.1:6379/0") 22 | 23 | WS_MESSAGE_QUEUE: str = os.environ.get("WS_MESSAGE_QUEUE", "redis://127.0.0.1:6379/0") 24 | 25 | CELERY_BEAT_SCHEDULE: dict = { 26 | # "task-schedule-work": { 27 | # "task": "task_schedule_work", 28 | # "schedule": 5.0, # five seconds 29 | # }, 30 | } 31 | 32 | CELERY_TASK_DEFAULT_QUEUE: str = "default" 33 | 34 | # Force all queues to be explicitly listed in `CELERY_TASK_QUEUES` to help prevent typos 35 | CELERY_TASK_CREATE_MISSING_QUEUES: bool = False 36 | 37 | CELERY_TASK_QUEUES: list = ( 38 | # need to define default queue here or exception would be raised 39 | Queue("default"), 40 | 41 | Queue("high_priority"), 42 | Queue("low_priority"), 43 | ) 44 | 45 | CELERY_TASK_ROUTES = (route_task,) 46 | 47 | UPLOADS_DEFAULT_DEST: str = str(BASE_DIR / "upload") 48 | 49 | 50 | class DevelopmentConfig(BaseConfig): 51 | pass 52 | 53 | 54 | class ProductionConfig(BaseConfig): 55 | pass 56 | 57 | 58 | class TestingConfig(BaseConfig): 59 | # https://fastapi.tiangolo.com/advanced/testing-database/ 60 | DATABASE_URL: str = "sqlite:///./test.db" 61 | DATABASE_CONNECT_DICT: dict = {"check_same_thread": False} 62 | 63 | 64 | @lru_cache() 65 | def get_settings(): 66 | config_cls_dict = { 67 | "development": DevelopmentConfig, 68 | "production": ProductionConfig, 69 | "testing": TestingConfig 70 | } 71 | 72 | config_name = os.environ.get("FASTAPI_CONFIG", "development") 73 | config_cls = config_cls_dict[config_name] 74 | return config_cls() 75 | 76 | 77 | settings = get_settings() 78 | -------------------------------------------------------------------------------- /project/database.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | 3 | from sqlalchemy import create_engine 4 | from sqlalchemy.orm import sessionmaker, declarative_base 5 | 6 | from project.config import settings 7 | 8 | # https://fastapi.tiangolo.com/tutorial/sql-databases/#create-the-sqlalchemy-engine 9 | engine = create_engine( 10 | settings.DATABASE_URL, connect_args=settings.DATABASE_CONNECT_DICT 11 | ) 12 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 13 | 14 | Base = declarative_base() 15 | 16 | 17 | def get_db_session(): 18 | session = SessionLocal() 19 | try: 20 | yield session 21 | finally: 22 | session.close() 23 | 24 | 25 | db_context = contextmanager(get_db_session) -------------------------------------------------------------------------------- /project/logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import logging.config 3 | 4 | 5 | def configure_logging(): 6 | logging_dict = { 7 | "version": 1, 8 | "disable_existing_loggers": False, 9 | "formatters": { 10 | "verbose": { 11 | "format": "[%(asctime)s: %(levelname)s] [%(pathname)s:%(lineno)d] %(message)s", 12 | }, 13 | }, 14 | "handlers": { 15 | "console": { 16 | "class": "logging.StreamHandler", 17 | "formatter": "verbose", 18 | }, 19 | }, 20 | "root": { 21 | "handlers": ["console"], 22 | "level": "INFO", 23 | }, 24 | "loggers": { 25 | "project": { 26 | "handlers": ["console"], 27 | "propagate": False, 28 | }, 29 | "uvicorn.access": { 30 | "propagate": True, 31 | }, 32 | }, 33 | } 34 | 35 | logging.config.dictConfig(logging_dict) 36 | -------------------------------------------------------------------------------- /project/tdd/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | tdd_router = APIRouter( 4 | prefix="/tdd", 5 | ) 6 | 7 | from . import views, models # noqa 8 | -------------------------------------------------------------------------------- /project/tdd/factories.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import factory 4 | from factory import LazyAttribute, Faker 5 | from PIL import Image 6 | 7 | from project.config import settings 8 | from project.database import SessionLocal 9 | from project.tdd.models import Member 10 | 11 | 12 | class MemberFactory(factory.alchemy.SQLAlchemyModelFactory): 13 | class Meta: 14 | model = Member 15 | sqlalchemy_session = SessionLocal() 16 | sqlalchemy_get_or_create = ("username",) 17 | sqlalchemy_session_persistence = "commit" 18 | 19 | username = Faker("user_name") 20 | email = LazyAttribute(lambda o: "%s@example.com" % o.username) 21 | 22 | @factory.lazy_attribute 23 | def avatar(self): 24 | 25 | width = 300 26 | height = 300 27 | color = "blue" 28 | image_format = "JPEG" 29 | image_palette = "RGB" 30 | 31 | with Image.new(image_palette, (width, height), color) as thumb: 32 | filename = f"{self.username}.jpg" 33 | full_path = os.path.join( 34 | settings.UPLOADS_DEFAULT_DEST, 35 | filename 36 | ) 37 | thumb.save(full_path, format=image_format) 38 | 39 | return filename 40 | -------------------------------------------------------------------------------- /project/tdd/models.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Boolean, Column, ForeignKey, Integer, String 2 | 3 | from project.database import Base 4 | 5 | 6 | class Member(Base): 7 | 8 | __tablename__ = "members" 9 | 10 | id = Column(Integer, primary_key=True, autoincrement=True) 11 | username = Column(String(128), unique=True, nullable=False) 12 | email = Column(String(128), unique=True, nullable=False) 13 | 14 | avatar = Column(String(256), nullable=False) 15 | avatar_thumbnail = Column(String(256), nullable=True) -------------------------------------------------------------------------------- /project/tdd/tasks.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from celery import shared_task 4 | from PIL import Image 5 | 6 | from project.config import settings 7 | from project.database import db_context 8 | from project.tdd.models import Member 9 | 10 | 11 | @shared_task(name="generate_avatar_thumbnail") 12 | def generate_avatar_thumbnail(member_pk): 13 | with db_context() as session: 14 | member = session.get(Member, member_pk) 15 | 16 | full_path = os.path.join( 17 | settings.UPLOADS_DEFAULT_DEST, 18 | member.avatar 19 | ) 20 | 21 | thumbnail_path = f"{member.id}-avatar-thumbnail.jpg" 22 | thumbnail_full_path = os.path.join( 23 | settings.UPLOADS_DEFAULT_DEST, 24 | thumbnail_path 25 | ) 26 | 27 | im = Image.open(full_path) 28 | size = (100, 100) 29 | im.thumbnail(size) 30 | im.save(thumbnail_full_path, "JPEG") 31 | 32 | member.avatar_thumbnail = thumbnail_path 33 | session.add(member) 34 | session.commit() 35 | -------------------------------------------------------------------------------- /project/tdd/views.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from fastapi import FastAPI, File, UploadFile, Depends, Form 4 | from sqlalchemy.orm import Session 5 | 6 | from . import tdd_router 7 | from project.database import get_db_session 8 | from project.config import settings 9 | from project.tdd.models import Member 10 | from project.tdd.tasks import generate_avatar_thumbnail 11 | 12 | 13 | @tdd_router.post("/member_signup/") 14 | def member_signup( 15 | username: str = Form(...), 16 | email: str = Form(...), 17 | upload_file: UploadFile = File(...), 18 | session: Session = Depends(get_db_session) 19 | ): 20 | """ 21 | https://stackoverflow.com/questions/63580229/how-to-save-uploadfile-in-fastapi 22 | https://github.com/encode/starlette/issues/446 23 | """ 24 | file_location = os.path.join( 25 | settings.UPLOADS_DEFAULT_DEST, 26 | upload_file.filename, 27 | ) 28 | with open(file_location, "wb") as file_object: 29 | file_object.write(upload_file.file.read()) 30 | 31 | try: 32 | member = Member( 33 | username=username, 34 | email=email, 35 | avatar=upload_file.filename, 36 | ) 37 | session.add(member) 38 | session.commit() 39 | member_id = member.id 40 | except Exception as e: 41 | session.rollback() 42 | raise 43 | 44 | generate_avatar_thumbnail.delay(member_id) 45 | return {"message": "Sign up successful"} 46 | -------------------------------------------------------------------------------- /project/users/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | users_router = APIRouter( 4 | prefix="/users", 5 | ) 6 | 7 | from . import views, models, tasks 8 | -------------------------------------------------------------------------------- /project/users/factories.py: -------------------------------------------------------------------------------- 1 | import factory 2 | from factory import LazyAttribute, Faker 3 | 4 | from project.database import SessionLocal 5 | from project.users.models import User 6 | 7 | 8 | class UserFactory(factory.alchemy.SQLAlchemyModelFactory): 9 | class Meta: 10 | model = User 11 | sqlalchemy_session = SessionLocal() 12 | sqlalchemy_get_or_create = ("username",) 13 | sqlalchemy_session_persistence = "commit" 14 | 15 | username = Faker("user_name") 16 | email = LazyAttribute(lambda o: "%s@example.com" % o.username) 17 | -------------------------------------------------------------------------------- /project/users/models.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String 2 | 3 | from project.database import Base 4 | 5 | 6 | class User(Base): 7 | __tablename__ = "users" 8 | 9 | id = Column(Integer, primary_key=True, autoincrement=True) 10 | username = Column(String(128), unique=True, nullable=False) 11 | email = Column(String(128), unique=True, nullable=False) 12 | 13 | def __init__(self, username, email, *args, **kwargs): 14 | self.username = username 15 | self.email = email 16 | -------------------------------------------------------------------------------- /project/users/schemas.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class UserBody(BaseModel): 5 | 6 | username: str 7 | email: str 8 | -------------------------------------------------------------------------------- /project/users/tasks.py: -------------------------------------------------------------------------------- 1 | import random 2 | import logging 3 | 4 | import requests 5 | from asgiref.sync import async_to_sync 6 | from celery import shared_task 7 | from celery.signals import task_postrun 8 | from celery.utils.log import get_task_logger 9 | from celery.signals import after_setup_logger 10 | from project.database import db_context 11 | from project.celery_utils import custom_celery_task 12 | 13 | 14 | logger = get_task_logger(__name__) 15 | 16 | 17 | @shared_task 18 | def divide(x, y): 19 | # from celery.contrib import rdb 20 | # rdb.set_trace() 21 | 22 | import time 23 | time.sleep(5) 24 | return x / y 25 | 26 | 27 | @shared_task() 28 | def sample_task(email): 29 | from project.users.views import api_call 30 | 31 | api_call(email) 32 | 33 | 34 | @custom_celery_task(max_retries=3) 35 | def task_process_notification(): 36 | if not random.choice([0, 1]): 37 | # mimic random error 38 | raise Exception() 39 | 40 | requests.post("https://httpbin.org/delay/5") 41 | 42 | 43 | @task_postrun.connect 44 | def task_postrun_handler(task_id, **kwargs): 45 | from project.ws.views import update_celery_task_status 46 | async_to_sync(update_celery_task_status)(task_id) 47 | 48 | from project.ws.views import update_celery_task_status_socketio 49 | update_celery_task_status_socketio(task_id) 50 | 51 | 52 | @shared_task(name="task_schedule_work") 53 | def task_schedule_work(): 54 | logger.info("task_schedule_work run") 55 | 56 | 57 | @shared_task(name="default:dynamic_example_one") 58 | def dynamic_example_one(): 59 | logger.info("Example One") 60 | 61 | 62 | @shared_task(name="low_priority:dynamic_example_two") 63 | def dynamic_example_two(): 64 | logger.info("Example Two") 65 | 66 | 67 | @shared_task(name="high_priority:dynamic_example_three") 68 | def dynamic_example_three(): 69 | logger.info("Example Three") 70 | 71 | 72 | @shared_task() 73 | def task_send_welcome_email(user_pk): 74 | from project.users.models import User 75 | 76 | with db_context() as session: 77 | user = session.get(User, user_pk) 78 | logger.info(f'send email to {user.email} {user.id}') 79 | 80 | 81 | @shared_task() 82 | def task_test_logger(): 83 | logger.info("test") 84 | 85 | 86 | @after_setup_logger.connect() 87 | def on_after_setup_logger(logger, **kwargs): 88 | formatter = logger.handlers[0].formatter 89 | file_handler = logging.FileHandler('celery.log') 90 | file_handler.setFormatter(formatter) 91 | logger.addHandler(file_handler) 92 | 93 | 94 | @shared_task(bind=True) 95 | def task_add_subscribe(self, user_pk): 96 | with db_context() as session: 97 | try: 98 | from project.users.models import User 99 | 100 | user = session.get(User, user_pk) 101 | requests.post( 102 | "https://httpbin.org/delay/5", 103 | data={"email": user.email}, 104 | ) 105 | except Exception as exc: 106 | raise self.retry(exc=exc) 107 | -------------------------------------------------------------------------------- /project/users/templates/form.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 |