├── .gitignore ├── myproject ├── myproject │ ├── __init__.py │ ├── serializers.py │ ├── celeryconf.py │ ├── wsgi.py │ ├── urls.py │ ├── views.py │ ├── models.py │ ├── tasks.py │ └── settings.py └── manage.py ├── requirements.txt ├── run_celery.sh ├── README.md ├── Dockerfile ├── run_web.sh ├── licence └── docker-compose.yml /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .idea 3 | -------------------------------------------------------------------------------- /myproject/myproject/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Django==1.9.8 2 | celery==3.1.20 3 | djangorestframework==3.3.1 4 | psycopg2==2.5.3 5 | redis==2.10.5 6 | -------------------------------------------------------------------------------- /myproject/myproject/serializers.py: -------------------------------------------------------------------------------- 1 | from rest_framework import serializers 2 | 3 | from .models import Job 4 | 5 | 6 | class JobSerializer(serializers.HyperlinkedModelSerializer): 7 | class Meta: 8 | model = Job -------------------------------------------------------------------------------- /run_celery.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # wait for RabbitMQ server to start 4 | sleep 10 5 | 6 | cd myproject 7 | # run Celery worker for our project myproject with Celery configuration stored in Celeryconf 8 | su -m myuser -c "celery worker -A myproject.celeryconf -Q default -n default@%h" 9 | -------------------------------------------------------------------------------- /myproject/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Configure django with celery and run everything in containers 2 | 3 | This projects shows an example of configuration django with celery. 4 | 5 | Everything runs inside Docker containers. 6 | 7 | More details about this configuration can be found [here](http://www.syncano.com/configuring-running-django-celery-docker-containers-pt-1/). 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # use base python image with python 2.7 2 | FROM python:2.7 3 | 4 | # add requirements.txt to the image 5 | ADD requirements.txt /app/requirements.txt 6 | 7 | # set working directory to /app/ 8 | WORKDIR /app/ 9 | 10 | # install python dependencies 11 | RUN pip install -r requirements.txt 12 | 13 | # create unprivileged user 14 | RUN adduser --disabled-password --gecos '' myuser 15 | -------------------------------------------------------------------------------- /run_web.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # wait for PSQL server to start 4 | sleep 10 5 | 6 | cd myproject 7 | # prepare init migration 8 | su -m myuser -c "python manage.py makemigrations myproject" 9 | # migrate db, so we have the latest db schema 10 | su -m myuser -c "python manage.py migrate" 11 | # start development server on public ip interface, on port 8000 12 | su -m myuser -c "python manage.py runserver 0.0.0.0:8000" 13 | -------------------------------------------------------------------------------- /myproject/myproject/celeryconf.py: -------------------------------------------------------------------------------- 1 | # coding=UTF8 2 | from __future__ import absolute_import 3 | 4 | import os 5 | 6 | from celery import Celery 7 | from django.conf import settings 8 | 9 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings") 10 | 11 | app = Celery('myproject') 12 | 13 | CELERY_TIMEZONE = 'UTC' 14 | 15 | app.config_from_object('django.conf:settings') 16 | app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) -------------------------------------------------------------------------------- /myproject/myproject/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for myproject project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings") 12 | 13 | from django.core.wsgi import get_wsgi_application 14 | application = get_wsgi_application() 15 | -------------------------------------------------------------------------------- /myproject/myproject/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import url, include 2 | from rest_framework import routers 3 | 4 | from myproject import views 5 | 6 | 7 | router = routers.DefaultRouter() 8 | router.register(r'jobs', views.JobViewSet) 9 | 10 | # Wire up our API using automatic URL routing. 11 | # Additionally, we include login URLs for the browsable API. 12 | urlpatterns = [ 13 | url(r'^', include(router.urls)), 14 | url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) 15 | ] -------------------------------------------------------------------------------- /myproject/myproject/views.py: -------------------------------------------------------------------------------- 1 | from rest_framework import mixins, viewsets 2 | 3 | from .models import Job 4 | from .serializers import JobSerializer 5 | 6 | 7 | class JobViewSet(mixins.CreateModelMixin, 8 | mixins.ListModelMixin, 9 | mixins.RetrieveModelMixin, 10 | viewsets.GenericViewSet): 11 | """ 12 | API endpoint that allows jobs to be viewed or created. 13 | """ 14 | queryset = Job.objects.all() 15 | serializer_class = JobSerializer 16 | -------------------------------------------------------------------------------- /myproject/myproject/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class Job(models.Model): 5 | TYPES = ( 6 | ('fibonacci', 'fibonacci'), 7 | ('power', 'power'), 8 | ) 9 | 10 | STATUSES = ( 11 | ('pending', 'pending'), 12 | ('started', 'started'), 13 | ('finished', 'finished'), 14 | ('failed', 'failed'), 15 | ) 16 | 17 | type = models.CharField(choices=TYPES, max_length=20) 18 | status = models.CharField(choices=STATUSES, max_length=20) 19 | 20 | created_at = models.DateTimeField(auto_now_add=True) 21 | updated_at = models.DateTimeField(auto_now=True) 22 | argument = models.PositiveIntegerField() 23 | result = models.IntegerField(null=True) 24 | 25 | def save(self, *args, **kwargs): 26 | super(Job, self).save(*args, **kwargs) 27 | if self.status == 'pending': 28 | from .tasks import TASK_MAPPING 29 | task = TASK_MAPPING[self.type] 30 | task.delay(job_id=self.id, n=self.argument) -------------------------------------------------------------------------------- /licence: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Justyna Edyta Ilczuk 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /myproject/myproject/tasks.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | from myproject.celeryconf import app 4 | from .models import Job 5 | 6 | 7 | def update_job(fn): 8 | @wraps(fn) 9 | def wrapper(job_id, *args, **kwargs): 10 | job = Job.objects.get(id=job_id) 11 | job.status = 'started' 12 | job.save() 13 | try: 14 | result = fn(*args, **kwargs) 15 | job.result = result 16 | job.status = 'finished' 17 | job.save() 18 | except: 19 | job.result = None 20 | job.status = 'failed' 21 | job.save() 22 | return wrapper 23 | 24 | 25 | @app.task 26 | @update_job 27 | def power(n): 28 | """Return 2 to the n'th power""" 29 | return 2 ** n 30 | 31 | 32 | @app.task 33 | @update_job 34 | def fib(n): 35 | """Return the n'th Fibonacci number. 36 | """ 37 | if n < 0: 38 | raise ValueError("Fibonacci numbers are only defined for n >= 0.") 39 | return _fib(n) 40 | 41 | 42 | def _fib(n): 43 | if n == 0 or n == 1: 44 | return n 45 | else: 46 | return _fib(n - 1) + _fib(n - 2) 47 | 48 | TASK_MAPPING = { 49 | 'power': power, 50 | 'fibonacci': fib 51 | } -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | # PostgreSQL database 5 | db: 6 | image: postgres:9.4 7 | hostname: db 8 | environment: 9 | - POSTGRES_USER=postgres 10 | - POSTGRES_PASSWORD=postgres 11 | - POSTGRES_DB=postgres 12 | ports: 13 | - "5432:5432" 14 | 15 | # Redis 16 | redis: 17 | image: redis:2.8.19 18 | hostname: redis 19 | 20 | # RabbitMQ 21 | rabbit: 22 | hostname: rabbit 23 | image: rabbitmq:3.6.0 24 | environment: 25 | - RABBITMQ_DEFAULT_USER=admin 26 | - RABBITMQ_DEFAULT_PASS=mypass 27 | ports: 28 | - "5672:5672" # we forward this port because it's useful for debugging 29 | - "15672:15672" # here, we can access rabbitmq management plugin 30 | 31 | # Django web server 32 | web: 33 | build: 34 | context: . 35 | dockerfile: Dockerfile 36 | hostname: web 37 | command: ./run_web.sh 38 | volumes: 39 | - .:/app # mount current directory inside container 40 | ports: 41 | - "8000:8000" 42 | # set up links so that web knows about db, rabbit and redis 43 | links: 44 | - db 45 | - rabbit 46 | - redis 47 | depends_on: 48 | - db 49 | 50 | # Celery worker 51 | worker: 52 | build: 53 | context: . 54 | dockerfile: Dockerfile 55 | command: ./run_celery.sh 56 | volumes: 57 | - .:/app 58 | links: 59 | - db 60 | - rabbit 61 | - redis 62 | depends_on: 63 | - rabbit 64 | -------------------------------------------------------------------------------- /myproject/myproject/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from kombu import Exchange, Queue 4 | 5 | 6 | BASE_DIR = os.path.dirname(os.path.dirname(__file__)) 7 | 8 | # SECURITY WARNING: keep the secret key used in production secret! 9 | SECRET_KEY = 'megg_yej86ln@xao^+)it4e&ueu#!4tl9p1h%2sjr7ey0)m25f' 10 | 11 | # SECURITY WARNING: don't run with debug turned on in production! 12 | DEBUG = True 13 | TEMPLATE_DEBUG = True 14 | ALLOWED_HOSTS = [] 15 | 16 | # Application definition 17 | 18 | INSTALLED_APPS = ( 19 | 'rest_framework', 20 | 'myproject', 21 | 'django.contrib.sites', 22 | 'django.contrib.staticfiles', 23 | 24 | # required by Django 1.9 25 | 'django.contrib.auth', 26 | 'django.contrib.contenttypes', 27 | 28 | ) 29 | 30 | MIDDLEWARE_CLASSES = ( 31 | ) 32 | 33 | REST_FRAMEWORK = { 34 | 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.AllowAny',), 35 | 'PAGINATE_BY': 10 36 | } 37 | 38 | ROOT_URLCONF = 'myproject.urls' 39 | 40 | WSGI_APPLICATION = 'myproject.wsgi.application' 41 | 42 | # Localization ant timezone settings 43 | 44 | TIME_ZONE = 'UTC' 45 | USE_TZ = True 46 | 47 | CELERY_ENABLE_UTC = True 48 | CELERY_TIMEZONE = "UTC" 49 | 50 | LANGUAGE_CODE = 'en-us' 51 | USE_I18N = True 52 | USE_L10N = True 53 | 54 | # Static files (CSS, JavaScript, Images) 55 | # https://docs.djangoproject.com/en/1.7/howto/static-files/ 56 | STATIC_URL = '/static/' 57 | 58 | # Database Condocker-composeuration 59 | DATABASES = { 60 | 'default': { 61 | 'ENGINE': 'django.db.backends.postgresql_psycopg2', 62 | 'NAME': os.environ.get('DB_ENV_DB', 'postgres'), 63 | 'USER': os.environ.get('DB_ENV_POSTGRES_USER', 'postgres'), 64 | 'PASSWORD': os.environ.get('DB_ENV_POSTGRES_PASSWORD', 'postgres'), 65 | 'HOST': os.environ.get('DB_PORT_5432_TCP_ADDR', 'db'), 66 | 'PORT': os.environ.get('DB_PORT_5432_TCP_PORT', ''), 67 | }, 68 | } 69 | 70 | # Redis 71 | 72 | REDIS_PORT = 6379 73 | REDIS_DB = 0 74 | REDIS_HOST = os.environ.get('REDIS_PORT_6379_TCP_ADDR', 'redis') 75 | 76 | RABBIT_HOSTNAME = os.environ.get('RABBIT_PORT_5672_TCP', 'rabbit') 77 | 78 | if RABBIT_HOSTNAME.startswith('tcp://'): 79 | RABBIT_HOSTNAME = RABBIT_HOSTNAME.split('//')[1] 80 | 81 | BROKER_URL = os.environ.get('BROKER_URL', 82 | '') 83 | if not BROKER_URL: 84 | BROKER_URL = 'amqp://{user}:{password}@{hostname}/{vhost}/'.format( 85 | user=os.environ.get('RABBIT_ENV_USER', 'admin'), 86 | password=os.environ.get('RABBIT_ENV_RABBITMQ_PASS', 'mypass'), 87 | hostname=RABBIT_HOSTNAME, 88 | vhost=os.environ.get('RABBIT_ENV_VHOST', '')) 89 | 90 | # We don't want to have dead connections stored on rabbitmq, so we have to negotiate using heartbeats 91 | BROKER_HEARTBEAT = '?heartbeat=30' 92 | if not BROKER_URL.endswith(BROKER_HEARTBEAT): 93 | BROKER_URL += BROKER_HEARTBEAT 94 | 95 | BROKER_POOL_LIMIT = 1 96 | BROKER_CONNECTION_TIMEOUT = 10 97 | 98 | # Celery configuration 99 | 100 | # configure queues, currently we have only one 101 | CELERY_DEFAULT_QUEUE = 'default' 102 | CELERY_QUEUES = ( 103 | Queue('default', Exchange('default'), routing_key='default'), 104 | ) 105 | 106 | # Sensible settings for celery 107 | CELERY_ALWAYS_EAGER = False 108 | CELERY_ACKS_LATE = True 109 | CELERY_TASK_PUBLISH_RETRY = True 110 | CELERY_DISABLE_RATE_LIMITS = False 111 | 112 | # By default we will ignore result 113 | # If you want to see results and try out tasks interactively, change it to False 114 | # Or change this setting on tasks level 115 | CELERY_IGNORE_RESULT = True 116 | CELERY_SEND_TASK_ERROR_EMAILS = False 117 | CELERY_TASK_RESULT_EXPIRES = 600 118 | 119 | # Set redis as celery result backend 120 | CELERY_RESULT_BACKEND = 'redis://%s:%d/%d' % (REDIS_HOST, REDIS_PORT, REDIS_DB) 121 | CELERY_REDIS_MAX_CONNECTIONS = 1 122 | 123 | # Don't use pickle as serializer, json is much safer 124 | CELERY_TASK_SERIALIZER = "json" 125 | CELERY_ACCEPT_CONTENT = ['application/json'] 126 | 127 | CELERYD_HIJACK_ROOT_LOGGER = False 128 | CELERYD_PREFETCH_MULTIPLIER = 1 129 | CELERYD_MAX_TASKS_PER_CHILD = 1000 130 | --------------------------------------------------------------------------------