├── api ├── .dockerignore ├── requirements.txt ├── worker.py ├── Dockerfile.dev ├── Dockerfile └── app.py ├── celery-queue ├── .dockerignore ├── requirements.txt ├── Dockerfile ├── Dockerfile.dev └── tasks.py ├── .github └── workflows │ └── build.yml ├── docker-compose.yml ├── docker-compose.development.yml ├── README.md └── .gitignore /api/.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | .dockerignore 3 | -------------------------------------------------------------------------------- /celery-queue/.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | .dockerignore 3 | -------------------------------------------------------------------------------- /api/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==2.0.1 2 | celery==5.1.2 3 | redis==3.5.3 -------------------------------------------------------------------------------- /celery-queue/requirements.txt: -------------------------------------------------------------------------------- 1 | celery==5.1.2 2 | flower==1.0.0 3 | redis==3.5.3 4 | watchdog==2.1.5 5 | 6 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Docker Flask Celery Redis 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@master 11 | - name: Build 12 | run: docker-compose up -d 13 | -------------------------------------------------------------------------------- /celery-queue/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-alpine 2 | 3 | ENV CELERY_BROKER_URL redis://redis:6379/0 4 | ENV CELERY_RESULT_BACKEND redis://redis:6379/0 5 | ENV C_FORCE_ROOT true 6 | 7 | COPY . /queue 8 | WORKDIR /queue 9 | 10 | RUN pip install -U setuptools pip 11 | RUN pip install -r requirements.txt 12 | -------------------------------------------------------------------------------- /api/worker.py: -------------------------------------------------------------------------------- 1 | import os 2 | from celery import Celery 3 | 4 | 5 | CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379'), 6 | CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379') 7 | 8 | 9 | celery = Celery('tasks', broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND) 10 | -------------------------------------------------------------------------------- /api/Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM python:3.9-alpine 2 | 3 | ENV CELERY_BROKER_URL redis://redis:6379/0 4 | ENV CELERY_RESULT_BACKEND redis://redis:6379/0 5 | ENV C_FORCE_ROOT true 6 | 7 | ENV HOST 0.0.0.0 8 | ENV PORT 5001 9 | ENV DEBUG true 10 | 11 | COPY . /api 12 | WORKDIR /api 13 | 14 | RUN pip install -U setuptools pip 15 | RUN pip install -r requirements.txt 16 | 17 | EXPOSE 5001 18 | 19 | CMD [ "flask", "run", "--host=0.0.0.0", "--port=5001"] 20 | -------------------------------------------------------------------------------- /celery-queue/Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM python:3.9-alpine 2 | 3 | ENV CELERY_BROKER_URL redis://redis:6379/0 4 | ENV CELERY_RESULT_BACKEND redis://redis:6379/0 5 | ENV C_FORCE_ROOT true 6 | 7 | COPY . /queue 8 | WORKDIR /queue 9 | 10 | RUN pip install -U setuptools pip 11 | RUN pip install -r requirements.txt 12 | 13 | # hot code reloading 14 | CMD watchmedo auto-restart --directory=./ --pattern=*.py --recursive -- celery -A tasks worker --concurrency=1 --loglevel=INFO -E 15 | -------------------------------------------------------------------------------- /celery-queue/tasks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | from celery import Celery 4 | 5 | 6 | CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379'), 7 | CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379') 8 | 9 | celery = Celery('tasks', broker=CELERY_BROKER_URL, backend=CELERY_RESULT_BACKEND) 10 | 11 | 12 | @celery.task(name='tasks.add') 13 | def add(x: int, y: int) -> int: 14 | time.sleep(5) 15 | return x + y 16 | -------------------------------------------------------------------------------- /api/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-alpine 2 | 3 | ENV CELERY_BROKER_URL redis://redis:6379/0 4 | ENV CELERY_RESULT_BACKEND redis://redis:6379/0 5 | ENV C_FORCE_ROOT true 6 | 7 | ENV HOST 0.0.0.0 8 | ENV PORT 5001 9 | ENV DEBUG true 10 | 11 | COPY . /api 12 | WORKDIR /api 13 | 14 | RUN pip install -U setuptools pip 15 | RUN pip install -r requirements.txt 16 | 17 | EXPOSE 5001 18 | 19 | RUN pip install gunicorn 20 | 21 | CMD ["gunicorn", "--bind", "0.0.0.0:5001", "--workers", "3", "app:app"] -------------------------------------------------------------------------------- /api/app.py: -------------------------------------------------------------------------------- 1 | import celery.states as states 2 | from flask import Flask, Response 3 | from flask import url_for, jsonify 4 | from worker import celery 5 | 6 | dev_mode = True 7 | app = Flask(__name__) 8 | 9 | 10 | @app.route('/add//') 11 | def add(param1: int, param2: int) -> str: 12 | task = celery.send_task('tasks.add', args=[param1, param2], kwargs={}) 13 | response = f"check status of {task.id} " 14 | return response 15 | 16 | 17 | @app.route('/check/') 18 | def check_task(task_id: str) -> str: 19 | res = celery.AsyncResult(task_id) 20 | if res.state == states.PENDING: 21 | return res.state 22 | else: 23 | return str(res.result) 24 | 25 | 26 | @app.route('/health_check') 27 | def health_check() -> Response: 28 | return jsonify("OK") 29 | 30 | 31 | if __name__ == '__main__': 32 | app.run(host='0.0.0.0', port=5001) 33 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.7" 2 | services: 3 | web: 4 | build: 5 | context: ./api 6 | dockerfile: Dockerfile 7 | restart: always 8 | ports: 9 | - "5001:5001" 10 | depends_on: 11 | - redis 12 | volumes: ['./api:/api'] 13 | worker: 14 | build: 15 | context: ./celery-queue 16 | dockerfile: Dockerfile 17 | command: celery -A tasks worker -l info -E 18 | environment: 19 | CELERY_BROKER_URL: redis://redis 20 | CELERY_RESULT_BACKEND: redis://redis 21 | depends_on: 22 | - redis 23 | volumes: ['./celery-queue:/queue'] 24 | monitor: 25 | build: 26 | context: ./celery-queue 27 | dockerfile: Dockerfile 28 | ports: 29 | - "5555:5555" 30 | command: ['celery', 'flower', '-A', 'tasks'] 31 | environment: 32 | CELERY_BROKER_URL: redis://redis:6379/0 33 | CELERY_RESULT_BACKEND: redis://redis:6379/0 34 | depends_on: 35 | - redis 36 | - worker 37 | volumes: ['./celery-queue:/queue'] 38 | redis: 39 | image: redis:alpine 40 | ports: 41 | - "6379:6379" 42 | -------------------------------------------------------------------------------- /docker-compose.development.yml: -------------------------------------------------------------------------------- 1 | version: "3.7" 2 | services: 3 | web: 4 | build: 5 | context: ./api 6 | dockerfile: Dockerfile.dev 7 | environment: 8 | FLASK_DEBUG: "on" 9 | FLASK_APP: ./app.py 10 | restart: always 11 | ports: 12 | - "5001:5001" 13 | depends_on: 14 | - redis 15 | volumes: ['./api:/api'] 16 | worker: 17 | build: 18 | context: ./celery-queue 19 | dockerfile: Dockerfile.dev 20 | command: celery -A tasks worker -l info -E 21 | environment: 22 | CELERY_BROKER_URL: redis://redis 23 | CELERY_RESULT_BACKEND: redis://redis 24 | depends_on: 25 | - redis 26 | volumes: ['./celery-queue:/queue'] 27 | monitor: 28 | build: 29 | context: ./celery-queue 30 | dockerfile: Dockerfile.dev 31 | ports: 32 | - "5555:5555" 33 | command: ['celery', 'flower', '-A', 'tasks'] 34 | environment: 35 | CELERY_BROKER_URL: redis://redis:6379/0 36 | CELERY_RESULT_BACKEND: redis://redis:6379/0 37 | depends_on: 38 | - redis 39 | - worker 40 | volumes: ['./celery-queue:/queue'] 41 | redis: 42 | image: redis:alpine 43 | ports: 44 | - "6379:6379" 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Docker Flask Celery Redis 2 | 3 | A basic [Docker Compose](https://docs.docker.com/compose/) template for orchestrating a [Flask](http://flask.pocoo.org/) application & a [Celery](http://www.celeryproject.org/) queue with [Redis](https://redis.io/) 4 | 5 | ### Installation 6 | 7 | ```bash 8 | git clone https://github.com/mattkohl/docker-flask-celery-redis 9 | ``` 10 | 11 | ### Build & Launch 12 | 13 | ```bash 14 | docker-compose up -d --build 15 | ``` 16 | 17 | ### Enable hot code reload 18 | 19 | ``` 20 | docker-compose -f docker-compose.yml -f docker-compose.development.yml up --build 21 | ``` 22 | 23 | This will expose the Flask application's endpoints on port `5001` as well as a [Flower](https://github.com/mher/flower) server for monitoring workers on port `5555` 24 | 25 | To add more workers: 26 | ```bash 27 | docker-compose up -d --scale worker=5 --no-recreate 28 | ``` 29 | 30 | To shut down: 31 | 32 | ```bash 33 | docker-compose down 34 | ``` 35 | 36 | 37 | To change the endpoints, update the code in [api/app.py](api/app.py) 38 | 39 | Task changes should happen in [celery-queue/tasks.py](celery-queue/tasks.py) 40 | 41 | --- 42 | 43 | adapted from [https://github.com/itsrifat/flask-celery-docker-scale](https://github.com/itsrifat/flask-celery-docker-scale) 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Example user template template 3 | ### Example user template 4 | 5 | # IntelliJ project files 6 | .idea 7 | *.iml 8 | out 9 | gen### Python template 10 | # Byte-compiled / optimized / DLL files 11 | __pycache__/ 12 | *.py[cod] 13 | *$py.class 14 | 15 | # C extensions 16 | *.so 17 | 18 | # Distribution / packaging 19 | .Python 20 | env/ 21 | build/ 22 | develop-eggs/ 23 | dist/ 24 | downloads/ 25 | eggs/ 26 | .eggs/ 27 | lib/ 28 | lib64/ 29 | parts/ 30 | sdist/ 31 | var/ 32 | wheels/ 33 | *.egg-info/ 34 | .installed.cfg 35 | *.egg 36 | 37 | # PyInstaller 38 | # Usually these files are written by a python script from a template 39 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 40 | *.manifest 41 | *.spec 42 | 43 | # Installer logs 44 | pip-log.txt 45 | pip-delete-this-directory.txt 46 | 47 | # Unit test / coverage reports 48 | htmlcov/ 49 | .tox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *,cover 56 | .hypothesis/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # pyenv 83 | .python-version 84 | 85 | # celery beat schedule file 86 | celerybeat-schedule 87 | 88 | # SageMath parsed files 89 | *.sage.py 90 | 91 | # dotenv 92 | 93 | # virtualenv 94 | .venv 95 | venv/ 96 | ENV/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | ### Example user template template 104 | ### Example user template 105 | 106 | # IntelliJ project files 107 | .idea 108 | *.iml 109 | out 110 | gen 111 | tmp 112 | coverage-reports 113 | coverage* 114 | .DS_Store 115 | --------------------------------------------------------------------------------