├── .bumpversion.cfg ├── .coveragerc ├── .gitignore ├── .travis.yml ├── AUTHORS.txt ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── background_task ├── __init__.py ├── admin.py ├── apps.py ├── exceptions.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── process_tasks.py ├── migrations │ ├── 0001_initial.py │ ├── 0002_auto_20170927_1109.py │ └── __init__.py ├── models.py ├── settings.py ├── signals.py ├── tasks.py ├── tests │ ├── __init__.py │ ├── test_settings.py │ ├── test_settings_async.py │ └── test_tasks.py └── utils.py ├── classifiers ├── docs └── index.rst ├── manage.py ├── requirements-test.txt ├── requirements.txt ├── runtests.py ├── setup.py └── tox.ini /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | commit = True 3 | tag = True 4 | current_version = 1.2.8.post1 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+))? 6 | serialize = 7 | {major}.{minor}.{patch}-{release} 8 | {major}.{minor}.{patch} 9 | 10 | [bumpversion:file:background_task/__init__.py] 11 | 12 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = background_task 3 | omit = 4 | background_task/tests/* 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .coverage 2 | *.pyc 3 | *.swp 4 | .DS_Store 5 | /django_background_task.egg-info 6 | /html_coverage 7 | /bin 8 | /lib 9 | /.Python 10 | /src 11 | /include 12 | /dist 13 | # Byte-compiled / optimized / DLL files 14 | __pycache__/ 15 | *.py[cod] 16 | *$py.class 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Distribution / packaging 22 | .Python 23 | env*/ 24 | build/ 25 | develop-eggs/ 26 | dist/ 27 | downloads/ 28 | eggs/ 29 | .eggs/ 30 | lib/ 31 | lib64/ 32 | parts/ 33 | sdist/ 34 | var/ 35 | *.egg-info/ 36 | .installed.cfg 37 | *.egg 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .coverage 53 | .coverage.* 54 | .cache 55 | nosetests.xml 56 | coverage.xml 57 | *,cover 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | test_db 73 | .idea 74 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | 4 | # this list can be easily checked by running `tox -l` 5 | matrix: 6 | include: 7 | 8 | - { python: "2.7", env: TOX_ENV=py27-django18-async } 9 | - { python: "2.7", env: TOX_ENV=py27-django18-sync } 10 | - { python: "2.7", env: TOX_ENV=py27-django111-async } 11 | - { python: "2.7", env: TOX_ENV=py27-django111-sync } 12 | 13 | - { python: "3.4", env: TOX_ENV=py34-django18-async } 14 | - { python: "3.4", env: TOX_ENV=py34-django18-sync } 15 | - { python: "3.4", env: TOX_ENV=py34-django111-async } 16 | - { python: "3.4", env: TOX_ENV=py34-django111-sync } 17 | - { python: "3.4", env: TOX_ENV=py34-django20-sync } 18 | - { python: "3.4", env: TOX_ENV=py34-django20-async } 19 | 20 | - { python: "3.5", env: TOX_ENV=py35-django18-async } 21 | - { python: "3.5", env: TOX_ENV=py35-django18-sync } 22 | - { python: "3.5", env: TOX_ENV=py35-django111-async } 23 | - { python: "3.5", env: TOX_ENV=py35-django111-sync } 24 | - { python: "3.5", env: TOX_ENV=py35-django20-async } 25 | - { python: "3.5", env: TOX_ENV=py35-django20-sync } 26 | - { python: "3.5", env: TOX_ENV=py35-django21-async } 27 | - { python: "3.5", env: TOX_ENV=py35-django21-sync } 28 | 29 | - { python: "3.6", env: TOX_ENV=py36-django111-async } 30 | - { python: "3.6", env: TOX_ENV=py36-django111-sync } 31 | - { python: "3.6", env: TOX_ENV=py36-django20-async } 32 | - { python: "3.6", env: TOX_ENV=py36-django20-sync } 33 | - { python: "3.6", env: TOX_ENV=py36-django21-async } 34 | - { python: "3.6", env: TOX_ENV=py36-django21-sync } 35 | - { python: "3.6", env: TOX_ENV=py36-django30-async } 36 | - { python: "3.6", env: TOX_ENV=py36-django30-sync } 37 | 38 | # the dist: xenial, sudo: true settings are currently needed to test with python 3.7 39 | - { python: "3.7", env: TOX_ENV=py37-django20-async, dist: xenial, sudo: true } 40 | - { python: "3.7", env: TOX_ENV=py37-django20-sync, dist: xenial, sudo: true } 41 | - { python: "3.7", env: TOX_ENV=py37-django21-async, dist: xenial, sudo: true } 42 | - { python: "3.7", env: TOX_ENV=py37-django21-sync, dist: xenial, sudo: true } 43 | - { python: "3.7", env: TOX_ENV=py37-django30-async, dist: xenial, sudo: true } 44 | - { python: "3.7", env: TOX_ENV=py37-django30-sync, dist: xenial, sudo: true } 45 | 46 | # the dist: xenial, sudo: true settings are currently needed to test with python 3.8 47 | - { python: "3.8", env: TOX_ENV=py38-django30-async, dist: xenial, sudo: true } 48 | - { python: "3.8", env: TOX_ENV=py38-django30-sync, dist: xenial, sudo: true } 49 | 50 | install: pip install tox-travis coveralls 51 | 52 | script: tox -e $TOX_ENV 53 | 54 | after_success: coveralls 55 | -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Contributors 2 | 3 | * John Montgomery (lilspikey & johnsensible, initiator) 4 | * Yannik Ammann (yannik-ammann) 5 | * Luthaf (luthaf) 6 | * Philippe O. Wagner (philippeowagner) 7 | * weijia (weijia) 8 | * tdruez (tdruez) 9 | * Chad G. Hansen (chadgh) 10 | * Grant McConnaughey (grantmcconnaughey) 11 | * James Mason (bear454) 12 | * Pavel Zagrebelin (Zagrebelin) 13 | * Stephen Brown (december1981) 14 | * Adam Johnson (adamchainz) 15 | * (kherrett) 16 | * Johannes Dillmann (kleingeist) 17 | * TheLovinator1 18 | * AMRivkin 19 | * jtimmons 20 | * jedie 21 | * lorddaedra 22 | * dbreen 23 | * Osman A. Osman (oaosman84) 24 | * David Vogt (winged) 25 | * Aleksandr Levchuk (alevchuk) 26 | * Kai-Lun Huang (kaibaooo) 27 | * cedricfarinazzo 28 | 29 | Your name could stand here :) 30 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to this project 2 | 3 | Please take a moment to review this document in order to make the contribution 4 | process easy and effective for everyone involved. 5 | 6 | Following these guidelines helps to communicate that you respect the time of 7 | the developers managing and developing this open source project. In return, 8 | they should reciprocate that respect in addressing your issue or assessing 9 | patches and features. 10 | 11 | 12 | ## Using the issue tracker 13 | 14 | The issue tracker is the preferred channel for [bug reports](#bugs), 15 | [features requests](#features) and [submitting pull 16 | requests](#pull-requests), but please respect the following restrictions: 17 | 18 | * Please **do not** use the issue tracker for personal support requests (use 19 | [Stack Overflow](http://stackoverflow.com) or IRC). 20 | 21 | * Please **do not** derail or troll issues. Keep the discussion on topic and 22 | respect the opinions of others. 23 | 24 | 25 | 26 | ## Bug reports 27 | 28 | A bug is a _demonstrable problem_ that is caused by the code in the repository. 29 | Good bug reports are extremely helpful - thank you! 30 | 31 | Guidelines for bug reports: 32 | 33 | 1. **Use the GitHub issue search** — check if the issue has already been 34 | reported. 35 | 36 | 2. **Check if the issue has been fixed** — try to reproduce it using the 37 | latest `master` or development branch in the repository. 38 | 39 | 3. **Isolate the problem** — create a [reduced test 40 | case](http://css-tricks.com/reduced-test-cases/) and a live example. 41 | 42 | A good bug report shouldn't leave others needing to chase you up for more 43 | information. Please try to be as detailed as possible in your report. What is 44 | your environment? What steps will reproduce the issue? What browser(s) and OS 45 | experience the problem? What would you expect to be the outcome? All these 46 | details will help people to fix any potential bugs. 47 | 48 | Example: 49 | 50 | > Short and descriptive example bug report title 51 | > 52 | > A summary of the issue and the browser/OS environment in which it occurs. If 53 | > suitable, include the steps required to reproduce the bug. 54 | > 55 | > 1. This is the first step 56 | > 2. This is the second step 57 | > 3. Further steps, etc. 58 | > 59 | > `` - a link to the reduced test case 60 | > 61 | > Any other information you want to share that is relevant to the issue being 62 | > reported. This might include the lines of code that you have identified as 63 | > causing the bug, and potential solutions (and your opinions on their 64 | > merits). 65 | 66 | 67 | 68 | ## Feature requests 69 | 70 | Feature requests are welcome. But take a moment to find out whether your idea 71 | fits with the scope and aims of the project. It's up to *you* to make a strong 72 | case to convince the project's developers of the merits of this feature. Please 73 | provide as much detail and context as possible. 74 | 75 | 76 | 77 | ## Pull requests 78 | 79 | Good pull requests - patches, improvements, new features - are a fantastic 80 | help. They should remain focused in scope and avoid containing unrelated 81 | commits. 82 | 83 | **Please ask first** before embarking on any significant pull request (e.g. 84 | implementing features, refactoring code, porting to a different language), 85 | otherwise you risk spending a lot of time working on something that the 86 | project's developers might not want to merge into the project. 87 | 88 | Please adhere to the coding conventions used throughout a project (indentation, 89 | accurate comments, etc.) and any other requirements (such as test coverage). 90 | 91 | Follow this process if you'd like your work considered for inclusion in the 92 | project: 93 | 94 | 1. [Fork](http://help.github.com/fork-a-repo/) the project, clone your fork, 95 | and configure the remotes: 96 | 97 | ```bash 98 | # Clone your fork of the repo into the current directory 99 | git clone https://github.com// 100 | # Navigate to the newly cloned directory 101 | cd 102 | # Assign the original repo to a remote called "upstream" 103 | git remote add upstream https://github.com// 104 | ``` 105 | 106 | 2. If you cloned a while ago, get the latest changes from upstream: 107 | 108 | ```bash 109 | git checkout 110 | git pull upstream 111 | ``` 112 | 113 | 3. Create a new topic branch (off the main project development branch) to 114 | contain your feature, change, or fix: 115 | 116 | ```bash 117 | git checkout -b 118 | ``` 119 | 120 | 4. Commit your changes in logical chunks. Please adhere to these [git commit 121 | message guidelines](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) 122 | or your code is unlikely be merged into the main project. Use Git's 123 | [interactive rebase](https://help.github.com/articles/interactive-rebase) 124 | feature to tidy up your commits before making them public. 125 | 126 | 5. Locally merge (or rebase) the upstream development branch into your topic branch: 127 | 128 | ```bash 129 | git pull [--rebase] upstream 130 | ``` 131 | 132 | 6. Push your topic branch up to your fork: 133 | 134 | ```bash 135 | git push origin 136 | ``` 137 | 138 | 7. [Open a Pull Request](https://help.github.com/articles/using-pull-requests/) 139 | with a clear title and description. 140 | 141 | **IMPORTANT**: By submitting a patch, you agree to allow the project owner to 142 | license your work under the same license as that used by the project. 143 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015, arteria GmbH. 2 | Copyright (c) 2010, John Montgomery. 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without modification, 6 | are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of Django Bakground Task nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 20 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 21 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 23 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 24 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 25 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 26 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 28 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | include AUTHORS.txt 4 | include requirements.txt 5 | include classifiers 6 | recursive-include tests * 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # This code has been merged into the original repository and the original package is now updated. 2 | 3 | Please use the original repository 4 | at [django-background-tasks](https://github.com/django-background-tasks/django-background-tasks) and the original 5 | package at [django-background-tasks](https://pypi.org/project/django-background-tasks/) going forward. 6 | -------------------------------------------------------------------------------- /background_task/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | __version__ = '1.2.8.post1' 3 | 4 | import warnings 5 | 6 | warnings.warn( 7 | "This package is deprecated. All code has been merged into the original package django-background-tasks.", 8 | DeprecationWarning, 9 | stacklevel=2 10 | ) 11 | 12 | default_app_config = 'background_task.apps.BackgroundTasksAppConfig' 13 | 14 | 15 | def background(*arg, **kw): 16 | from background_task.tasks import tasks 17 | return tasks.background(*arg, **kw) 18 | -------------------------------------------------------------------------------- /background_task/admin.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from django.contrib import admin 3 | from background_task.models import Task 4 | from background_task.models import CompletedTask 5 | 6 | 7 | def inc_priority(modeladmin, request, queryset): 8 | for obj in queryset: 9 | obj.priority += 1 10 | obj.save() 11 | inc_priority.short_description = "priority += 1" 12 | 13 | def dec_priority(modeladmin, request, queryset): 14 | for obj in queryset: 15 | obj.priority -= 1 16 | obj.save() 17 | dec_priority.short_description = "priority -= 1" 18 | 19 | class TaskAdmin(admin.ModelAdmin): 20 | display_filter = ['task_name'] 21 | search_fields = ['task_name', 'task_params', ] 22 | list_display = ['task_name', 'task_params', 'run_at', 'priority', 'attempts', 'has_error', 'locked_by', 'locked_by_pid_running', ] 23 | actions = [inc_priority, dec_priority] 24 | 25 | class CompletedTaskAdmin(admin.ModelAdmin): 26 | display_filter = ['task_name'] 27 | search_fields = ['task_name', 'task_params', ] 28 | list_display = ['task_name', 'task_params', 'run_at', 'priority', 'attempts', 'has_error', 'locked_by', 'locked_by_pid_running', ] 29 | 30 | 31 | admin.site.register(Task, TaskAdmin) 32 | admin.site.register(CompletedTask, CompletedTaskAdmin) 33 | -------------------------------------------------------------------------------- /background_task/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class BackgroundTasksAppConfig(AppConfig): 5 | name = 'background_task' 6 | from background_task import __version__ as version_info 7 | verbose_name = 'Background Tasks ({})'.format(version_info) 8 | 9 | def ready(self): 10 | import background_task.signals # noqa 11 | -------------------------------------------------------------------------------- /background_task/exceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | class BackgroundTaskError(Exception): 5 | 6 | def __init__(self, message, errors=None): 7 | super(BackgroundTaskError, self).__init__(message) 8 | self.errors = errors 9 | 10 | 11 | class InvalidTaskError(BackgroundTaskError): 12 | """ 13 | The task will not be rescheduled if it fails with this error 14 | """ 15 | pass 16 | -------------------------------------------------------------------------------- /background_task/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iamjonmiller/django-background-tasks/e8967f7e6bbd5dc04625ac3a0641911e4e751fa6/background_task/management/__init__.py -------------------------------------------------------------------------------- /background_task/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iamjonmiller/django-background-tasks/e8967f7e6bbd5dc04625ac3a0641911e4e751fa6/background_task/management/commands/__init__.py -------------------------------------------------------------------------------- /background_task/management/commands/process_tasks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import logging 3 | import random 4 | import sys 5 | import time 6 | 7 | from django import VERSION 8 | from django.core.management.base import BaseCommand 9 | from django.utils import autoreload 10 | 11 | from background_task.tasks import tasks, autodiscover 12 | from background_task.utils import SignalManager 13 | from django.db import close_old_connections as close_connection 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | def _configure_log_std(): 20 | class StdOutWrapper(object): 21 | def write(self, s): 22 | logger.info(s) 23 | 24 | class StdErrWrapper(object): 25 | def write(self, s): 26 | logger.error(s) 27 | sys.stdout = StdOutWrapper() 28 | sys.stderr = StdErrWrapper() 29 | 30 | 31 | class Command(BaseCommand): 32 | help = 'Run tasks that are scheduled to run on the queue' 33 | 34 | # Command options are specified in an abstract way to enable Django < 1.8 compatibility 35 | OPTIONS = ( 36 | (('--duration', ), { 37 | 'action': 'store', 38 | 'dest': 'duration', 39 | 'type': int, 40 | 'default': 0, 41 | 'help': 'Run task for this many seconds (0 or less to run forever) - default is 0', 42 | }), 43 | (('--sleep', ), { 44 | 'action': 'store', 45 | 'dest': 'sleep', 46 | 'type': float, 47 | 'default': 5.0, 48 | 'help': 'Sleep for this many seconds before checking for new tasks (if none were found) - default is 5', 49 | }), 50 | (('--queue', ), { 51 | 'action': 'store', 52 | 'dest': 'queue', 53 | 'help': 'Only process tasks on this named queue', 54 | }), 55 | (('--log-std', ), { 56 | 'action': 'store_true', 57 | 'dest': 'log_std', 58 | 'help': 'Redirect stdout and stderr to the logging system', 59 | }), 60 | (('--dev', ), { 61 | 'action': 'store_true', 62 | 'dest': 'dev', 63 | 'help': 'Auto-reload your code on changes. Use this only for development', 64 | }), 65 | ) 66 | 67 | if VERSION < (1, 8): 68 | from optparse import make_option 69 | option_list = BaseCommand.option_list + tuple([make_option(*args, **kwargs) for args, kwargs in OPTIONS]) 70 | 71 | # Used in Django >= 1.8 72 | def add_arguments(self, parser): 73 | for (args, kwargs) in self.OPTIONS: 74 | parser.add_argument(*args, **kwargs) 75 | 76 | def __init__(self, *args, **kwargs): 77 | super(Command, self).__init__(*args, **kwargs) 78 | self.sig_manager = None 79 | self._tasks = tasks 80 | 81 | def run(self, *args, **options): 82 | duration = options.get('duration', 0) 83 | sleep = options.get('sleep', 5.0) 84 | queue = options.get('queue', None) 85 | log_std = options.get('log_std', False) 86 | is_dev = options.get('dev', False) 87 | sig_manager = self.sig_manager 88 | 89 | if is_dev: 90 | # raise last Exception is exist 91 | autoreload.raise_last_exception() 92 | 93 | if log_std: 94 | _configure_log_std() 95 | 96 | autodiscover() 97 | 98 | start_time = time.time() 99 | 100 | while (duration <= 0) or (time.time() - start_time) <= duration: 101 | if sig_manager.kill_now: 102 | # shutting down gracefully 103 | break 104 | 105 | if not self._tasks.run_next_task(queue): 106 | # there were no tasks in the queue, let's recover. 107 | close_connection() 108 | logger.debug('waiting for tasks') 109 | time.sleep(sleep) 110 | else: 111 | # there were some tasks to process, let's check if there is more work to do after a little break. 112 | time.sleep(random.uniform(sig_manager.time_to_wait[0], sig_manager.time_to_wait[1])) 113 | 114 | def handle(self, *args, **options): 115 | is_dev = options.get('dev', False) 116 | self.sig_manager = SignalManager() 117 | if is_dev: 118 | reload_func = autoreload.run_with_reloader 119 | if VERSION < (2, 2): 120 | reload_func = autoreload.main 121 | reload_func(self.run, *args, **options) 122 | else: 123 | self.run(*args, **options) 124 | -------------------------------------------------------------------------------- /background_task/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.10.6 on 2017-04-03 21:42 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | import django.db.models.deletion 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | initial = True 12 | 13 | dependencies = [ 14 | ('contenttypes', '0002_remove_content_type_name'), 15 | ] 16 | 17 | operations = [ 18 | migrations.CreateModel( 19 | name='CompletedTask', 20 | fields=[ 21 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 22 | ('task_name', models.CharField(db_index=True, max_length=190)), 23 | ('task_params', models.TextField()), 24 | ('task_hash', models.CharField(db_index=True, max_length=40)), 25 | ('verbose_name', models.CharField(blank=True, max_length=255, null=True)), 26 | ('priority', models.IntegerField(db_index=True, default=0)), 27 | ('run_at', models.DateTimeField(db_index=True)), 28 | ('repeat', models.BigIntegerField(choices=[(3600, 'hourly'), (86400, 'daily'), (604800, 'weekly'), (1209600, 'every 2 weeks'), (2419200, 'every 4 weeks'), (0, 'never')], default=0)), 29 | ('repeat_until', models.DateTimeField(blank=True, null=True)), 30 | ('queue', models.CharField(blank=True, db_index=True, max_length=190, null=True)), 31 | ('attempts', models.IntegerField(db_index=True, default=0)), 32 | ('failed_at', models.DateTimeField(blank=True, db_index=True, null=True)), 33 | ('last_error', models.TextField(blank=True)), 34 | ('locked_by', models.CharField(blank=True, db_index=True, max_length=64, null=True)), 35 | ('locked_at', models.DateTimeField(blank=True, db_index=True, null=True)), 36 | ('creator_object_id', models.PositiveIntegerField(blank=True, null=True)), 37 | ('creator_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='completed_background_task', to='contenttypes.ContentType')), 38 | ], 39 | ), 40 | migrations.CreateModel( 41 | name='Task', 42 | fields=[ 43 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 44 | ('task_name', models.CharField(db_index=True, max_length=190)), 45 | ('task_params', models.TextField()), 46 | ('task_hash', models.CharField(db_index=True, max_length=40)), 47 | ('verbose_name', models.CharField(blank=True, max_length=255, null=True)), 48 | ('priority', models.IntegerField(db_index=True, default=0)), 49 | ('run_at', models.DateTimeField(db_index=True)), 50 | ('repeat', models.BigIntegerField(choices=[(3600, 'hourly'), (86400, 'daily'), (604800, 'weekly'), (1209600, 'every 2 weeks'), (2419200, 'every 4 weeks'), (0, 'never')], default=0)), 51 | ('repeat_until', models.DateTimeField(blank=True, null=True)), 52 | ('queue', models.CharField(blank=True, db_index=True, max_length=190, null=True)), 53 | ('attempts', models.IntegerField(db_index=True, default=0)), 54 | ('failed_at', models.DateTimeField(blank=True, db_index=True, null=True)), 55 | ('last_error', models.TextField(blank=True)), 56 | ('locked_by', models.CharField(blank=True, db_index=True, max_length=64, null=True)), 57 | ('locked_at', models.DateTimeField(blank=True, db_index=True, null=True)), 58 | ('creator_object_id', models.PositiveIntegerField(blank=True, null=True)), 59 | ('creator_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='background_task', to='contenttypes.ContentType')), 60 | ], 61 | options={ 62 | 'db_table': 'background_task', 63 | }, 64 | ), 65 | ] 66 | -------------------------------------------------------------------------------- /background_task/migrations/0002_auto_20170927_1109.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.5 on 2017-09-27 16:09 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('background_task', '0001_initial'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='completedtask', 17 | name='queue', 18 | field=models.CharField(blank=True, db_index=True, max_length=190, null=True), 19 | ), 20 | migrations.AlterField( 21 | model_name='completedtask', 22 | name='task_name', 23 | field=models.CharField(db_index=True, max_length=190), 24 | ), 25 | migrations.AlterField( 26 | model_name='task', 27 | name='queue', 28 | field=models.CharField(blank=True, db_index=True, max_length=190, null=True), 29 | ), 30 | migrations.AlterField( 31 | model_name='task', 32 | name='task_name', 33 | field=models.CharField(db_index=True, max_length=190), 34 | ), 35 | ] 36 | -------------------------------------------------------------------------------- /background_task/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iamjonmiller/django-background-tasks/e8967f7e6bbd5dc04625ac3a0641911e4e751fa6/background_task/migrations/__init__.py -------------------------------------------------------------------------------- /background_task/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from datetime import timedelta 3 | from hashlib import sha1 4 | import json 5 | import logging 6 | import os 7 | import traceback 8 | 9 | from io import StringIO 10 | from django.contrib.contenttypes.fields import GenericForeignKey 11 | from django.contrib.contenttypes.models import ContentType 12 | from django.db import models 13 | from django.db.models import Q 14 | from django.utils import timezone 15 | from six import python_2_unicode_compatible 16 | 17 | from background_task.exceptions import InvalidTaskError 18 | from background_task.settings import app_settings 19 | from background_task.signals import task_failed 20 | from background_task.signals import task_rescheduled 21 | 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | class TaskQuerySet(models.QuerySet): 27 | 28 | def created_by(self, creator): 29 | """ 30 | :return: A Task queryset filtered by creator 31 | """ 32 | content_type = ContentType.objects.get_for_model(creator) 33 | return self.filter( 34 | creator_content_type=content_type, 35 | creator_object_id=creator.id, 36 | ) 37 | 38 | 39 | class TaskManager(models.Manager): 40 | 41 | def get_queryset(self): 42 | return TaskQuerySet(self.model, using=self._db) 43 | 44 | def created_by(self, creator): 45 | return self.get_queryset().created_by(creator) 46 | 47 | def find_available(self, queue=None): 48 | now = timezone.now() 49 | qs = self.unlocked(now) 50 | if queue: 51 | qs = qs.filter(queue=queue) 52 | ready = qs.filter(run_at__lte=now, failed_at=None) 53 | _priority_ordering = '{}priority'.format( 54 | app_settings.BACKGROUND_TASK_PRIORITY_ORDERING) 55 | ready = ready.order_by(_priority_ordering, 'run_at') 56 | 57 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 58 | currently_failed = self.failed().count() 59 | currently_locked = self.locked(now).count() 60 | count = app_settings.BACKGROUND_TASK_ASYNC_THREADS - \ 61 | (currently_locked - currently_failed) 62 | if count > 0: 63 | ready = ready[:count] 64 | else: 65 | ready = self.none() 66 | return ready 67 | 68 | def unlocked(self, now): 69 | max_run_time = app_settings.BACKGROUND_TASK_MAX_RUN_TIME 70 | qs = self.get_queryset() 71 | expires_at = now - timedelta(seconds=max_run_time) 72 | unlocked = Q(locked_by=None) | Q(locked_at__lt=expires_at) 73 | return qs.filter(unlocked) 74 | 75 | def locked(self, now): 76 | max_run_time = app_settings.BACKGROUND_TASK_MAX_RUN_TIME 77 | qs = self.get_queryset() 78 | expires_at = now - timedelta(seconds=max_run_time) 79 | locked = Q(locked_by__isnull=False) | Q(locked_at__gt=expires_at) 80 | return qs.filter(locked) 81 | 82 | def failed(self): 83 | """ 84 | `currently_locked - currently_failed` in `find_available` assues that 85 | tasks marked as failed are also in processing by the running PID. 86 | """ 87 | qs = self.get_queryset() 88 | return qs.filter(failed_at__isnull=False) 89 | 90 | def new_task(self, task_name, args=None, kwargs=None, 91 | run_at=None, priority=0, queue=None, verbose_name=None, 92 | creator=None, repeat=None, repeat_until=None, 93 | remove_existing_tasks=False): 94 | """ 95 | If `remove_existing_tasks` is True, all unlocked tasks with the identical task hash will be removed. 96 | The attributes `repeat` and `repeat_until` are not supported at the moment. 97 | """ 98 | args = args or () 99 | kwargs = kwargs or {} 100 | if run_at is None: 101 | run_at = timezone.now() 102 | task_params = json.dumps((args, kwargs), sort_keys=True) 103 | s = "%s%s" % (task_name, task_params) 104 | task_hash = sha1(s.encode('utf-8')).hexdigest() 105 | if remove_existing_tasks: 106 | Task.objects.filter(task_hash=task_hash, 107 | locked_at__isnull=True).delete() 108 | return Task(task_name=task_name, 109 | task_params=task_params, 110 | task_hash=task_hash, 111 | priority=priority, 112 | run_at=run_at, 113 | queue=queue, 114 | verbose_name=verbose_name, 115 | creator=creator, 116 | repeat=repeat or Task.NEVER, 117 | repeat_until=repeat_until, 118 | ) 119 | 120 | def get_task(self, task_name, args=None, kwargs=None): 121 | args = args or () 122 | kwargs = kwargs or {} 123 | task_params = json.dumps((args, kwargs), sort_keys=True) 124 | s = "%s%s" % (task_name, task_params) 125 | task_hash = sha1(s.encode('utf-8')).hexdigest() 126 | qs = self.get_queryset() 127 | return qs.filter(task_hash=task_hash) 128 | 129 | def drop_task(self, task_name, args=None, kwargs=None): 130 | return self.get_task(task_name, args, kwargs).delete() 131 | 132 | 133 | @python_2_unicode_compatible 134 | class Task(models.Model): 135 | # the "name" of the task/function to be run 136 | task_name = models.CharField(max_length=190, db_index=True) 137 | # the json encoded parameters to pass to the task 138 | task_params = models.TextField() 139 | # a sha1 hash of the name and params, to lookup already scheduled tasks 140 | task_hash = models.CharField(max_length=40, db_index=True) 141 | 142 | verbose_name = models.CharField(max_length=255, null=True, blank=True) 143 | 144 | # what priority the task has 145 | priority = models.IntegerField(default=0, db_index=True) 146 | # when the task should be run 147 | run_at = models.DateTimeField(db_index=True) 148 | 149 | # Repeat choices are encoded as number of seconds 150 | # The repeat implementation is based on this encoding 151 | HOURLY = 3600 152 | DAILY = 24 * HOURLY 153 | WEEKLY = 7 * DAILY 154 | EVERY_2_WEEKS = 2 * WEEKLY 155 | EVERY_4_WEEKS = 4 * WEEKLY 156 | NEVER = 0 157 | REPEAT_CHOICES = ( 158 | (HOURLY, 'hourly'), 159 | (DAILY, 'daily'), 160 | (WEEKLY, 'weekly'), 161 | (EVERY_2_WEEKS, 'every 2 weeks'), 162 | (EVERY_4_WEEKS, 'every 4 weeks'), 163 | (NEVER, 'never'), 164 | ) 165 | repeat = models.BigIntegerField(choices=REPEAT_CHOICES, default=NEVER) 166 | repeat_until = models.DateTimeField(null=True, blank=True) 167 | 168 | # the "name" of the queue this is to be run on 169 | queue = models.CharField(max_length=190, db_index=True, 170 | null=True, blank=True) 171 | 172 | # how many times the task has been tried 173 | attempts = models.IntegerField(default=0, db_index=True) 174 | # when the task last failed 175 | failed_at = models.DateTimeField(db_index=True, null=True, blank=True) 176 | # details of the error that occurred 177 | last_error = models.TextField(blank=True) 178 | 179 | # details of who's trying to run the task at the moment 180 | locked_by = models.CharField(max_length=64, db_index=True, 181 | null=True, blank=True) 182 | locked_at = models.DateTimeField(db_index=True, null=True, blank=True) 183 | 184 | creator_content_type = models.ForeignKey( 185 | ContentType, null=True, blank=True, 186 | related_name='background_task', on_delete=models.CASCADE 187 | ) 188 | creator_object_id = models.PositiveIntegerField(null=True, blank=True) 189 | creator = GenericForeignKey('creator_content_type', 'creator_object_id') 190 | 191 | objects = TaskManager() 192 | 193 | def locked_by_pid_running(self): 194 | """ 195 | Check if the locked_by process is still running. 196 | """ 197 | if self.locked_by: 198 | try: 199 | # won't kill the process. kill is a bad named system call 200 | os.kill(int(self.locked_by), 0) 201 | return True 202 | except: 203 | return False 204 | else: 205 | return None 206 | locked_by_pid_running.boolean = True 207 | 208 | def has_error(self): 209 | """ 210 | Check if the last_error field is empty. 211 | """ 212 | return bool(self.last_error) 213 | has_error.boolean = True 214 | 215 | def params(self): 216 | args, kwargs = json.loads(self.task_params) 217 | # need to coerce kwargs keys to str 218 | kwargs = dict((str(k), v) for k, v in kwargs.items()) 219 | return args, kwargs 220 | 221 | def lock(self, locked_by): 222 | now = timezone.now() 223 | unlocked = Task.objects.unlocked(now).filter(pk=self.pk) 224 | updated = unlocked.update(locked_by=locked_by, locked_at=now) 225 | if updated: 226 | return Task.objects.get(pk=self.pk) 227 | return None 228 | 229 | def _extract_error(self, type, err, tb): 230 | file = StringIO() 231 | traceback.print_exception(type, err, tb, None, file) 232 | return file.getvalue() 233 | 234 | def increment_attempts(self): 235 | self.attempts += 1 236 | self.save() 237 | 238 | def has_reached_max_attempts(self): 239 | max_attempts = app_settings.BACKGROUND_TASK_MAX_ATTEMPTS 240 | return self.attempts >= max_attempts 241 | 242 | def is_repeating_task(self): 243 | return self.repeat > self.NEVER 244 | 245 | def reschedule(self, type, err, traceback): 246 | ''' 247 | Set a new time to run the task in future, or create a CompletedTask and delete the Task 248 | if it has reached the maximum of allowed attempts 249 | ''' 250 | self.last_error = self._extract_error(type, err, traceback) 251 | self.increment_attempts() 252 | if self.has_reached_max_attempts() or isinstance(err, InvalidTaskError): 253 | self.failed_at = timezone.now() 254 | logger.warning('Marking task %s as failed', self) 255 | completed = self.create_completed_task() 256 | task_failed.send(sender=self.__class__, 257 | task_id=self.id, completed_task=completed) 258 | self.delete() 259 | else: 260 | backoff = timedelta(seconds=(self.attempts ** 4) + 5) 261 | self.run_at = timezone.now() + backoff 262 | logger.warning('Rescheduling task %s for %s later at %s', self, 263 | backoff, self.run_at) 264 | task_rescheduled.send(sender=self.__class__, task=self) 265 | self.locked_by = None 266 | self.locked_at = None 267 | self.save() 268 | 269 | def create_completed_task(self): 270 | ''' 271 | Returns a new CompletedTask instance with the same values 272 | ''' 273 | completed_task = CompletedTask( 274 | task_name=self.task_name, 275 | task_params=self.task_params, 276 | task_hash=self.task_hash, 277 | priority=self.priority, 278 | run_at=timezone.now(), 279 | queue=self.queue, 280 | attempts=self.attempts, 281 | failed_at=self.failed_at, 282 | last_error=self.last_error, 283 | locked_by=self.locked_by, 284 | locked_at=self.locked_at, 285 | verbose_name=self.verbose_name, 286 | creator=self.creator, 287 | repeat=self.repeat, 288 | repeat_until=self.repeat_until, 289 | ) 290 | completed_task.save() 291 | return completed_task 292 | 293 | def create_repetition(self): 294 | """ 295 | :return: A new Task with an offset of self.repeat, or None if the self.repeat_until is reached 296 | """ 297 | if not self.is_repeating_task(): 298 | return None 299 | 300 | if self.repeat_until and self.repeat_until <= timezone.now(): 301 | # Repeat chain completed 302 | return None 303 | 304 | args, kwargs = self.params() 305 | new_run_at = self.run_at + timedelta(seconds=self.repeat) 306 | while new_run_at < timezone.now(): 307 | new_run_at += timedelta(seconds=self.repeat) 308 | 309 | new_task = TaskManager().new_task( 310 | task_name=self.task_name, 311 | args=args, 312 | kwargs=kwargs, 313 | run_at=new_run_at, 314 | priority=self.priority, 315 | queue=self.queue, 316 | verbose_name=self.verbose_name, 317 | creator=self.creator, 318 | repeat=self.repeat, 319 | repeat_until=self.repeat_until, 320 | ) 321 | new_task.save() 322 | return new_task 323 | 324 | def save(self, *arg, **kw): 325 | # force NULL rather than empty string 326 | self.locked_by = self.locked_by or None 327 | return super(Task, self).save(*arg, **kw) 328 | 329 | def __str__(self): 330 | return u'{}'.format(self.verbose_name or self.task_name) 331 | 332 | class Meta: 333 | db_table = 'background_task' 334 | 335 | 336 | class CompletedTaskQuerySet(models.QuerySet): 337 | 338 | def created_by(self, creator): 339 | """ 340 | :return: A CompletedTask queryset filtered by creator 341 | """ 342 | content_type = ContentType.objects.get_for_model(creator) 343 | return self.filter( 344 | creator_content_type=content_type, 345 | creator_object_id=creator.id, 346 | ) 347 | 348 | def failed(self, within=None): 349 | """ 350 | :param within: A timedelta object 351 | :return: A queryset of CompletedTasks that failed within the given timeframe (e.g. less than 1h ago) 352 | """ 353 | qs = self.filter( 354 | failed_at__isnull=False, 355 | ) 356 | if within: 357 | time_limit = timezone.now() - within 358 | qs = qs.filter(failed_at__gt=time_limit) 359 | return qs 360 | 361 | def succeeded(self, within=None): 362 | """ 363 | :param within: A timedelta object 364 | :return: A queryset of CompletedTasks that completed successfully within the given timeframe 365 | (e.g. less than 1h ago) 366 | """ 367 | qs = self.filter( 368 | failed_at__isnull=True, 369 | ) 370 | if within: 371 | time_limit = timezone.now() - within 372 | qs = qs.filter(run_at__gt=time_limit) 373 | return qs 374 | 375 | 376 | @python_2_unicode_compatible 377 | class CompletedTask(models.Model): 378 | # the "name" of the task/function to be run 379 | task_name = models.CharField(max_length=190, db_index=True) 380 | # the json encoded parameters to pass to the task 381 | task_params = models.TextField() 382 | # a sha1 hash of the name and params, to lookup already scheduled tasks 383 | task_hash = models.CharField(max_length=40, db_index=True) 384 | 385 | verbose_name = models.CharField(max_length=255, null=True, blank=True) 386 | 387 | # what priority the task has 388 | priority = models.IntegerField(default=0, db_index=True) 389 | # when the task should be run 390 | run_at = models.DateTimeField(db_index=True) 391 | 392 | repeat = models.BigIntegerField( 393 | choices=Task.REPEAT_CHOICES, default=Task.NEVER) 394 | repeat_until = models.DateTimeField(null=True, blank=True) 395 | 396 | # the "name" of the queue this is to be run on 397 | queue = models.CharField(max_length=190, db_index=True, 398 | null=True, blank=True) 399 | 400 | # how many times the task has been tried 401 | attempts = models.IntegerField(default=0, db_index=True) 402 | # when the task last failed 403 | failed_at = models.DateTimeField(db_index=True, null=True, blank=True) 404 | # details of the error that occurred 405 | last_error = models.TextField(blank=True) 406 | 407 | # details of who's trying to run the task at the moment 408 | locked_by = models.CharField(max_length=64, db_index=True, 409 | null=True, blank=True) 410 | locked_at = models.DateTimeField(db_index=True, null=True, blank=True) 411 | 412 | creator_content_type = models.ForeignKey( 413 | ContentType, null=True, blank=True, 414 | related_name='completed_background_task', on_delete=models.CASCADE 415 | ) 416 | creator_object_id = models.PositiveIntegerField(null=True, blank=True) 417 | creator = GenericForeignKey('creator_content_type', 'creator_object_id') 418 | 419 | objects = CompletedTaskQuerySet.as_manager() 420 | 421 | def locked_by_pid_running(self): 422 | """ 423 | Check if the locked_by process is still running. 424 | """ 425 | if self.locked_by: 426 | try: 427 | # won't kill the process. kill is a bad named system call 428 | os.kill(int(self.locked_by), 0) 429 | return True 430 | except: 431 | return False 432 | else: 433 | return None 434 | locked_by_pid_running.boolean = True 435 | 436 | def has_error(self): 437 | """ 438 | Check if the last_error field is empty. 439 | """ 440 | return bool(self.last_error) 441 | has_error.boolean = True 442 | 443 | def __str__(self): 444 | return u'{} - {}'.format( 445 | self.verbose_name or self.task_name, 446 | self.run_at, 447 | ) 448 | -------------------------------------------------------------------------------- /background_task/settings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import multiprocessing 3 | 4 | from django.conf import settings 5 | 6 | try: 7 | cpu_count = multiprocessing.cpu_count() 8 | except Exception: 9 | cpu_count = 1 10 | 11 | 12 | class AppSettings(object): 13 | """ 14 | """ 15 | @property 16 | def MAX_ATTEMPTS(self): 17 | """Control how many times a task will be attempted.""" 18 | return getattr(settings, 'MAX_ATTEMPTS', 25) 19 | 20 | @property 21 | def BACKGROUND_TASK_MAX_ATTEMPTS(self): 22 | """Control how many times a task will be attempted.""" 23 | return self.MAX_ATTEMPTS 24 | 25 | @property 26 | def MAX_RUN_TIME(self): 27 | """Maximum possible task run time, after which tasks will be unlocked and tried again.""" 28 | return getattr(settings, 'MAX_RUN_TIME', 3600) 29 | 30 | @property 31 | def BACKGROUND_TASK_MAX_RUN_TIME(self): 32 | """Maximum possible task run time, after which tasks will be unlocked and tried again.""" 33 | return self.MAX_RUN_TIME 34 | 35 | @property 36 | def BACKGROUND_TASK_RUN_ASYNC(self): 37 | """Control if tasks will run asynchronous in a ThreadPool.""" 38 | return getattr(settings, 'BACKGROUND_TASK_RUN_ASYNC', False) 39 | 40 | @property 41 | def BACKGROUND_TASK_ASYNC_THREADS(self): 42 | """Specify number of concurrent threads.""" 43 | return getattr(settings, 'BACKGROUND_TASK_ASYNC_THREADS', cpu_count) 44 | 45 | @property 46 | def BACKGROUND_TASK_PRIORITY_ORDERING(self): 47 | """ 48 | Control the ordering of tasks in the queue. 49 | Choose either `DESC` or `ASC`. 50 | 51 | https://en.m.wikipedia.org/wiki/Nice_(Unix) 52 | A niceness of −20 is the highest priority and 19 is the lowest priority. The default niceness for processes is inherited from its parent process and is usually 0. 53 | """ 54 | order = getattr(settings, 'BACKGROUND_TASK_PRIORITY_ORDERING', 'DESC') 55 | if order == 'ASC': 56 | prefix = '' 57 | else: 58 | prefix = '-' 59 | return prefix 60 | 61 | app_settings = AppSettings() 62 | -------------------------------------------------------------------------------- /background_task/signals.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import django.dispatch 3 | from django.db import connections 4 | from background_task.settings import app_settings 5 | 6 | task_created = django.dispatch.Signal(['task']) 7 | task_error = django.dispatch.Signal(['task']) 8 | task_rescheduled = django.dispatch.Signal(['task']) 9 | task_failed = django.dispatch.Signal(['task_id', 'completed_task']) 10 | task_successful = django.dispatch.Signal(['task_id', 'completed_task']) 11 | task_started = django.dispatch.Signal() 12 | task_finished = django.dispatch.Signal() 13 | 14 | 15 | # Register an event to reset saved queries when a Task is started. 16 | def reset_queries(**kwargs): 17 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 18 | for conn in connections.all(): 19 | conn.queries_log.clear() 20 | 21 | 22 | task_started.connect(reset_queries) 23 | 24 | 25 | # Register an event to reset transaction state and close connections past 26 | # their lifetime. 27 | def close_old_connections(**kwargs): 28 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 29 | for conn in connections.all(): 30 | conn.close_if_unusable_or_obsolete() 31 | 32 | 33 | task_started.connect(close_old_connections) 34 | task_finished.connect(close_old_connections) 35 | -------------------------------------------------------------------------------- /background_task/tasks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from datetime import datetime, timedelta 5 | from importlib import import_module 6 | from multiprocessing.pool import ThreadPool 7 | import logging 8 | import os 9 | import sys 10 | 11 | from django.db.utils import OperationalError 12 | from django.utils import timezone 13 | from six import python_2_unicode_compatible 14 | 15 | from background_task.exceptions import BackgroundTaskError 16 | from background_task.models import Task 17 | from background_task.settings import app_settings 18 | from background_task import signals 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | def bg_runner(proxy_task, task=None, *args, **kwargs): 24 | """ 25 | Executes the function attached to task. Used to enable threads. 26 | If a Task instance is provided, args and kwargs are ignored and retrieved from the Task itself. 27 | """ 28 | signals.task_started.send(Task) 29 | try: 30 | func = getattr(proxy_task, 'task_function', None) 31 | if isinstance(task, Task): 32 | args, kwargs = task.params() 33 | else: 34 | task_name = getattr(proxy_task, 'name', None) 35 | task_queue = getattr(proxy_task, 'queue', None) 36 | task_qs = Task.objects.get_task(task_name=task_name, args=args, kwargs=kwargs) 37 | if task_queue: 38 | task_qs = task_qs.filter(queue=task_queue) 39 | if task_qs: 40 | task = task_qs[0] 41 | if func is None: 42 | raise BackgroundTaskError("Function is None, can't execute!") 43 | func(*args, **kwargs) 44 | 45 | if task: 46 | # task done, so can delete it 47 | task.increment_attempts() 48 | completed = task.create_completed_task() 49 | signals.task_successful.send(sender=task.__class__, task_id=task.id, completed_task=completed) 50 | task.create_repetition() 51 | task.delete() 52 | logger.info('Ran task and deleting %s', task) 53 | 54 | except Exception as ex: 55 | t, e, traceback = sys.exc_info() 56 | if task: 57 | logger.error('Rescheduling %s', task, exc_info=(t, e, traceback)) 58 | signals.task_error.send(sender=ex.__class__, task=task) 59 | task.reschedule(t, e, traceback) 60 | del traceback 61 | signals.task_finished.send(Task) 62 | 63 | 64 | class PoolRunner: 65 | def __init__(self, bg_runner, num_processes): 66 | self._bg_runner = bg_runner 67 | self._num_processes = num_processes 68 | 69 | _pool_instance = None 70 | 71 | @property 72 | def _pool(self): 73 | if not self._pool_instance: 74 | self._pool_instance = ThreadPool(processes=self._num_processes) 75 | return self._pool_instance 76 | 77 | def run(self, proxy_task, task=None, *args, **kwargs): 78 | self._pool.apply_async(func=self._bg_runner, args=(proxy_task, task) + tuple(args), kwds=kwargs) 79 | 80 | __call__ = run 81 | 82 | 83 | class Tasks(object): 84 | def __init__(self): 85 | self._tasks = {} 86 | self._runner = DBTaskRunner() 87 | self._task_proxy_class = TaskProxy 88 | self._bg_runner = bg_runner 89 | self._pool_runner = PoolRunner(bg_runner, app_settings.BACKGROUND_TASK_ASYNC_THREADS) 90 | 91 | def background(self, name=None, schedule=None, queue=None, 92 | remove_existing_tasks=False): 93 | ''' 94 | decorator to turn a regular function into 95 | something that gets run asynchronously in 96 | the background, at a later time 97 | ''' 98 | 99 | # see if used as simple decorator 100 | # where first arg is the function to be decorated 101 | fn = None 102 | if name and callable(name): 103 | fn = name 104 | name = None 105 | 106 | def _decorator(fn): 107 | _name = name 108 | if not _name: 109 | _name = '%s.%s' % (fn.__module__, fn.__name__) 110 | proxy = self._task_proxy_class(_name, fn, schedule, queue, 111 | remove_existing_tasks, self._runner) 112 | self._tasks[_name] = proxy 113 | return proxy 114 | 115 | if fn: 116 | return _decorator(fn) 117 | 118 | return _decorator 119 | 120 | def run_task(self, task_name, args=None, kwargs=None): 121 | # task_name can be either the name of a task or a Task instance. 122 | if isinstance(task_name, Task): 123 | task = task_name 124 | task_name = task.task_name 125 | # When we have a Task instance we do not need args and kwargs, but 126 | # they are kept for backward compatibility 127 | args = [] 128 | kwargs = {} 129 | else: 130 | task = None 131 | proxy_task = self._tasks[task_name] 132 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 133 | self._pool_runner(proxy_task, task, *args, **kwargs) 134 | else: 135 | self._bg_runner(proxy_task, task, *args, **kwargs) 136 | 137 | def run_next_task(self, queue=None): 138 | return self._runner.run_next_task(self, queue) 139 | 140 | 141 | class TaskSchedule(object): 142 | SCHEDULE = 0 143 | RESCHEDULE_EXISTING = 1 144 | CHECK_EXISTING = 2 145 | 146 | def __init__(self, run_at=None, priority=None, action=None): 147 | self._run_at = run_at 148 | self._priority = priority 149 | self._action = action 150 | 151 | @classmethod 152 | def create(self, schedule): 153 | if isinstance(schedule, TaskSchedule): 154 | return schedule 155 | priority = None 156 | run_at = None 157 | action = None 158 | 159 | if schedule: 160 | if isinstance(schedule, (int, timedelta, datetime)): 161 | run_at = schedule 162 | else: 163 | run_at = schedule.get('run_at', None) 164 | priority = schedule.get('priority', None) 165 | action = schedule.get('action', None) 166 | 167 | return TaskSchedule(run_at=run_at, priority=priority, action=action) 168 | 169 | def merge(self, schedule): 170 | params = {} 171 | for name in ['run_at', 'priority', 'action']: 172 | attr_name = '_%s' % name 173 | value = getattr(self, attr_name, None) 174 | if value is None: 175 | params[name] = getattr(schedule, attr_name, None) 176 | else: 177 | params[name] = value 178 | return TaskSchedule(**params) 179 | 180 | @property 181 | def run_at(self): 182 | run_at = self._run_at or timezone.now() 183 | if isinstance(run_at, int): 184 | run_at = timezone.now() + timedelta(seconds=run_at) 185 | if isinstance(run_at, timedelta): 186 | run_at = timezone.now() + run_at 187 | return run_at 188 | 189 | @property 190 | def priority(self): 191 | return self._priority or 0 192 | 193 | @property 194 | def action(self): 195 | return self._action or TaskSchedule.SCHEDULE 196 | 197 | def __repr__(self): 198 | return 'TaskSchedule(run_at=%s, priority=%s)' % (self._run_at, 199 | self._priority) 200 | 201 | def __eq__(self, other): 202 | return self._run_at == other._run_at \ 203 | and self._priority == other._priority \ 204 | and self._action == other._action 205 | 206 | 207 | class DBTaskRunner(object): 208 | ''' 209 | Encapsulate the model related logic in here, in case 210 | we want to support different queues in the future 211 | ''' 212 | 213 | def __init__(self): 214 | self.worker_name = str(os.getpid()) 215 | 216 | def schedule(self, task_name, args, kwargs, run_at=None, 217 | priority=0, action=TaskSchedule.SCHEDULE, queue=None, 218 | verbose_name=None, creator=None, 219 | repeat=None, repeat_until=None, remove_existing_tasks=False): 220 | '''Simply create a task object in the database''' 221 | task = Task.objects.new_task(task_name, args, kwargs, run_at, priority, 222 | queue, verbose_name, creator, repeat, 223 | repeat_until, remove_existing_tasks) 224 | if action != TaskSchedule.SCHEDULE: 225 | task_hash = task.task_hash 226 | now = timezone.now() 227 | unlocked = Task.objects.unlocked(now) 228 | existing = unlocked.filter(task_hash=task_hash) 229 | if queue: 230 | existing = existing.filter(queue=queue) 231 | if action == TaskSchedule.RESCHEDULE_EXISTING: 232 | updated = existing.update(run_at=run_at, priority=priority) 233 | if updated: 234 | return 235 | elif action == TaskSchedule.CHECK_EXISTING: 236 | if existing.count(): 237 | return 238 | 239 | task.save() 240 | signals.task_created.send(sender=self.__class__, task=task) 241 | return task 242 | 243 | def get_task_to_run(self, tasks, queue=None): 244 | try: 245 | available_tasks = [task for task in Task.objects.find_available(queue) 246 | if task.task_name in tasks._tasks][:5] 247 | for task in available_tasks: 248 | # try to lock task 249 | locked_task = task.lock(self.worker_name) 250 | if locked_task: 251 | return locked_task 252 | return None 253 | except OperationalError: 254 | logger.warning('Failed to retrieve tasks. Database unreachable.') 255 | 256 | def run_task(self, tasks, task): 257 | logger.info('Running %s', task) 258 | tasks.run_task(task) 259 | 260 | def run_next_task(self, tasks, queue=None): 261 | task = self.get_task_to_run(tasks, queue) 262 | if task: 263 | self.run_task(tasks, task) 264 | return True 265 | else: 266 | return False 267 | 268 | 269 | @python_2_unicode_compatible 270 | class TaskProxy(object): 271 | def __init__(self, name, task_function, schedule, queue, remove_existing_tasks, runner): 272 | self.name = name 273 | self.now = self.task_function = task_function 274 | self.runner = runner 275 | self.schedule = TaskSchedule.create(schedule) 276 | self.queue = queue 277 | self.remove_existing_tasks = remove_existing_tasks 278 | 279 | def __call__(self, *args, **kwargs): 280 | schedule = kwargs.pop('schedule', None) 281 | schedule = TaskSchedule.create(schedule).merge(self.schedule) 282 | run_at = schedule.run_at 283 | priority = kwargs.pop('priority', schedule.priority) 284 | action = schedule.action 285 | queue = kwargs.pop('queue', self.queue) 286 | verbose_name = kwargs.pop('verbose_name', None) 287 | creator = kwargs.pop('creator', None) 288 | repeat = kwargs.pop('repeat', None) 289 | repeat_until = kwargs.pop('repeat_until', None) 290 | remove_existing_tasks = kwargs.pop('remove_existing_tasks', self.remove_existing_tasks) 291 | 292 | return self.runner.schedule(self.name, args, kwargs, run_at, priority, 293 | action, queue, verbose_name, creator, 294 | repeat, repeat_until, 295 | remove_existing_tasks) 296 | 297 | def __str__(self): 298 | return 'TaskProxy(%s)' % self.name 299 | 300 | 301 | tasks = Tasks() 302 | 303 | 304 | def autodiscover(): 305 | """ 306 | Autodiscover tasks.py files in much the same way as admin app 307 | """ 308 | from django.conf import settings 309 | 310 | for app in settings.INSTALLED_APPS: 311 | try: 312 | import_module("%s.tasks" % app) 313 | except ImportError: 314 | continue 315 | -------------------------------------------------------------------------------- /background_task/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iamjonmiller/django-background-tasks/e8967f7e6bbd5dc04625ac3a0641911e4e751fa6/background_task/tests/__init__.py -------------------------------------------------------------------------------- /background_task/tests/test_settings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | DEBUG = True 3 | TEMPLATE_DEBUG = DEBUG 4 | 5 | DATABASES = { 6 | 'default': { 7 | 'ENGINE': 'django.db.backends.sqlite3', 8 | 'NAME': ':memory:', 9 | 'TEST': # This will force django to create a real sqlite database on 10 | # the disk, instead of creating it in memory. 11 | # We need this to test the async behavior. 12 | { 13 | 'NAME': 'test_db', 14 | }, 15 | 'USER': '', 16 | 'PASSWORD': '', 17 | 'HOST': '', 18 | 'PORT': '', 19 | } 20 | } 21 | 22 | INSTALLED_APPS = [ 23 | 'django.contrib.contenttypes', 24 | 'django.contrib.auth', 25 | 'background_task', 26 | ] 27 | 28 | SECRET_KEY = 'foo' 29 | 30 | USE_TZ = True 31 | BACKGROUND_TASK_RUN_ASYNC = False 32 | 33 | LOGGING = { 34 | 'version': 1, 35 | 'disable_existing_loggers': False, 36 | 'handlers': { 37 | 'console': { 38 | 'class': 'logging.StreamHandler', 39 | }, 40 | }, 41 | 'loggers': { 42 | 'background_task': { 43 | 'handlers': ['console'], 44 | 'level': 'INFO', 45 | }, 46 | }, 47 | } 48 | -------------------------------------------------------------------------------- /background_task/tests/test_settings_async.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .test_settings import * 3 | 4 | BACKGROUND_TASK_RUN_ASYNC = True 5 | -------------------------------------------------------------------------------- /background_task/tests/test_tasks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import time 3 | from datetime import timedelta, datetime 4 | from mock import patch, Mock 5 | 6 | from django.db.utils import OperationalError 7 | from django.contrib.auth.models import User 8 | from django.test import override_settings 9 | from django.test.testcases import TransactionTestCase 10 | from django.conf import settings 11 | from django.utils import timezone 12 | 13 | from background_task.exceptions import InvalidTaskError 14 | from background_task.tasks import tasks, TaskSchedule, TaskProxy 15 | from background_task.models import Task 16 | from background_task.models import CompletedTask 17 | from background_task import background 18 | from background_task.settings import app_settings 19 | 20 | _recorded = [] 21 | 22 | 23 | def mocked_run_task(name, args=None, kwargs=None): 24 | """ 25 | We mock tasks.run_task to give other threads some time to update the database. 26 | 27 | Otherwise we run into a locked database. 28 | """ 29 | val = tasks.run_task(name, args, kwargs) 30 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 31 | time.sleep(1) 32 | return val 33 | 34 | 35 | def mocked_run_next_task(queue=None): 36 | """ 37 | We mock tasks.mocked_run_next_task to give other threads some time to update the database. 38 | 39 | Otherwise we run into a locked database. 40 | """ 41 | val = tasks.run_next_task(queue) 42 | if app_settings.BACKGROUND_TASK_RUN_ASYNC: 43 | time.sleep(1) 44 | return val 45 | 46 | run_task = mocked_run_task 47 | run_next_task = mocked_run_next_task 48 | 49 | 50 | def empty_task(): 51 | pass 52 | 53 | 54 | def record_task(*arg, **kw): 55 | _recorded.append((arg, kw)) 56 | 57 | 58 | class TestBackgroundDecorator(TransactionTestCase): 59 | 60 | def test_get_proxy(self): 61 | proxy = tasks.background()(empty_task) 62 | self.assertNotEqual(proxy, empty_task) 63 | self.assertTrue(isinstance(proxy, TaskProxy)) 64 | 65 | # and alternate form 66 | proxy = tasks.background(empty_task) 67 | self.assertNotEqual(proxy, empty_task) 68 | self.assertTrue(isinstance(proxy, TaskProxy)) 69 | 70 | def test_default_name(self): 71 | proxy = tasks.background()(empty_task) 72 | self.assertEqual(proxy.name, 'background_task.tests.test_tasks.empty_task') 73 | 74 | proxy = tasks.background()(record_task) 75 | self.assertEqual(proxy.name, 'background_task.tests.test_tasks.record_task') 76 | 77 | proxy = tasks.background(empty_task) 78 | # print proxy 79 | self.assertTrue(isinstance(proxy, TaskProxy)) 80 | self.assertEqual(proxy.name, 'background_task.tests.test_tasks.empty_task') 81 | 82 | def test_specified_name(self): 83 | proxy = tasks.background(name='mytask')(empty_task) 84 | self.assertEqual(proxy.name, 'mytask') 85 | 86 | def test_task_function(self): 87 | proxy = tasks.background()(empty_task) 88 | self.assertEqual(proxy.task_function, empty_task) 89 | 90 | proxy = tasks.background()(record_task) 91 | self.assertEqual(proxy.task_function, record_task) 92 | 93 | def test_default_schedule(self): 94 | proxy = tasks.background()(empty_task) 95 | self.assertEqual(TaskSchedule(), proxy.schedule) 96 | 97 | def test_schedule(self): 98 | proxy = tasks.background(schedule=10)(empty_task) 99 | self.assertEqual(TaskSchedule(run_at=10), proxy.schedule) 100 | 101 | def test_str(self): 102 | proxy = tasks.background()(empty_task) 103 | self.assertEqual( 104 | u'TaskProxy(background_task.tests.test_tasks.empty_task)', 105 | str(proxy) 106 | ) 107 | 108 | def test_shortcut(self): 109 | '''check shortcut to decorator works''' 110 | proxy = background()(empty_task) 111 | self.assertNotEqual(proxy, empty_task) 112 | self.assertEqual(proxy.task_function, empty_task) 113 | 114 | def test_launch_sync(self): 115 | ''' Check launch original function in synchronous mode ''' 116 | @background 117 | def add(x, y): 118 | return x + y 119 | 120 | t = Task.objects.count() 121 | ct = CompletedTask.objects.count() 122 | answer = add.now(2, 3) 123 | self.assertEqual(answer, 5) 124 | self.assertEqual(Task.objects.count(), t, 'Task was created') 125 | self.assertEqual(CompletedTask.objects.count(), ct, 'Completed task was created') 126 | 127 | 128 | class TestTaskProxy(TransactionTestCase): 129 | 130 | def setUp(self): 131 | super(TestTaskProxy, self).setUp() 132 | self.proxy = tasks.background()(record_task) 133 | 134 | def test_run_task(self): 135 | run_task(self.proxy.name, [], {}) 136 | self.assertEqual(((), {}), _recorded.pop()) 137 | 138 | run_task(self.proxy.name, ['hi'], {}) 139 | self.assertEqual((('hi',), {}), _recorded.pop()) 140 | 141 | run_task(self.proxy.name, [], {'kw': 1}) 142 | self.assertEqual(((), {'kw': 1}), _recorded.pop()) 143 | 144 | 145 | class TestTaskSchedule(TransactionTestCase): 146 | 147 | def test_priority(self): 148 | self.assertEqual(0, TaskSchedule().priority) 149 | self.assertEqual(0, TaskSchedule(priority=0).priority) 150 | self.assertEqual(1, TaskSchedule(priority=1).priority) 151 | self.assertEqual(2, TaskSchedule(priority=2).priority) 152 | 153 | def _within_one_second(self, d1, d2): 154 | self.assertTrue(isinstance(d1, datetime)) 155 | self.assertTrue(isinstance(d2, datetime)) 156 | self.assertTrue(abs(d1 - d2) <= timedelta(seconds=1)) 157 | 158 | def test_run_at(self): 159 | for schedule in [None, 0, timedelta(seconds=0)]: 160 | now = timezone.now() 161 | run_at = TaskSchedule(run_at=schedule).run_at 162 | self._within_one_second(run_at, now) 163 | 164 | now = timezone.now() 165 | run_at = TaskSchedule(run_at=now).run_at 166 | self._within_one_second(run_at, now) 167 | 168 | fixed_dt = timezone.now() + timedelta(seconds=60) 169 | run_at = TaskSchedule(run_at=fixed_dt).run_at 170 | self._within_one_second(run_at, fixed_dt) 171 | 172 | run_at = TaskSchedule(run_at=90).run_at 173 | self._within_one_second(run_at, timezone.now() + timedelta(seconds=90)) 174 | 175 | run_at = TaskSchedule(run_at=timedelta(seconds=35)).run_at 176 | self._within_one_second(run_at, timezone.now() + timedelta(seconds=35)) 177 | 178 | def test_create(self): 179 | fixed_dt = timezone.now() + timedelta(seconds=10) 180 | schedule = TaskSchedule.create({'run_at': fixed_dt}) 181 | self.assertEqual(schedule.run_at, fixed_dt) 182 | self.assertEqual(0, schedule.priority) 183 | self.assertEqual(TaskSchedule.SCHEDULE, schedule.action) 184 | 185 | schedule = {'run_at': fixed_dt, 'priority': 2, 186 | 'action': TaskSchedule.RESCHEDULE_EXISTING} 187 | schedule = TaskSchedule.create(schedule) 188 | self.assertEqual(schedule.run_at, fixed_dt) 189 | self.assertEqual(2, schedule.priority) 190 | self.assertEqual(TaskSchedule.RESCHEDULE_EXISTING, schedule.action) 191 | 192 | schedule = TaskSchedule.create(0) 193 | self._within_one_second(schedule.run_at, timezone.now()) 194 | 195 | schedule = TaskSchedule.create(10) 196 | self._within_one_second(schedule.run_at, 197 | timezone.now() + timedelta(seconds=10)) 198 | 199 | schedule = TaskSchedule.create(TaskSchedule(run_at=fixed_dt)) 200 | self.assertEqual(schedule.run_at, fixed_dt) 201 | self.assertEqual(0, schedule.priority) 202 | self.assertEqual(TaskSchedule.SCHEDULE, schedule.action) 203 | 204 | def test_merge(self): 205 | default = TaskSchedule(run_at=10, priority=2, 206 | action=TaskSchedule.RESCHEDULE_EXISTING) 207 | schedule = TaskSchedule.create(20).merge(default) 208 | 209 | self._within_one_second(timezone.now() + timedelta(seconds=20), 210 | schedule.run_at) 211 | self.assertEqual(2, schedule.priority) 212 | self.assertEqual(TaskSchedule.RESCHEDULE_EXISTING, schedule.action) 213 | 214 | schedule = TaskSchedule.create({'priority': 0}).merge(default) 215 | self._within_one_second(timezone.now() + timedelta(seconds=10), 216 | schedule.run_at) 217 | self.assertEqual(0, schedule.priority) 218 | self.assertEqual(TaskSchedule.RESCHEDULE_EXISTING, schedule.action) 219 | 220 | action = TaskSchedule.CHECK_EXISTING 221 | schedule = TaskSchedule.create({'action': action}).merge(default) 222 | self._within_one_second(timezone.now() + timedelta(seconds=10), 223 | schedule.run_at) 224 | self.assertEqual(2, schedule.priority) 225 | self.assertEqual(action, schedule.action) 226 | 227 | def test_repr(self): 228 | self.assertEqual('TaskSchedule(run_at=10, priority=0)', 229 | repr(TaskSchedule(run_at=10, priority=0))) 230 | 231 | 232 | class TestSchedulingTasks(TransactionTestCase): 233 | 234 | def test_background_gets_scheduled(self): 235 | self.result = None 236 | 237 | @tasks.background(name='test_background_gets_scheduled') 238 | def set_result(result): 239 | self.result = result 240 | 241 | # calling set_result should now actually create a record in the db 242 | set_result(1) 243 | 244 | all_tasks = Task.objects.all() 245 | self.assertEqual(1, all_tasks.count()) 246 | task = all_tasks[0] 247 | self.assertEqual('test_background_gets_scheduled', task.task_name) 248 | self.assertEqual('[[1], {}]', task.task_params) 249 | 250 | def test_reschedule_existing(self): 251 | 252 | reschedule_existing = TaskSchedule.RESCHEDULE_EXISTING 253 | 254 | @tasks.background(name='test_reschedule_existing', 255 | schedule=TaskSchedule(action=reschedule_existing)) 256 | def reschedule_fn(): 257 | pass 258 | 259 | # this should only end up with one task 260 | # and it should be scheduled for the later time 261 | reschedule_fn() 262 | reschedule_fn(schedule=90) 263 | 264 | all_tasks = Task.objects.all() 265 | self.assertEqual(1, all_tasks.count()) 266 | task = all_tasks[0] 267 | self.assertEqual('test_reschedule_existing', task.task_name) 268 | 269 | # check task is scheduled for later on 270 | now = timezone.now() 271 | self.assertTrue(now + timedelta(seconds=89) < task.run_at) 272 | self.assertTrue(now + timedelta(seconds=91) > task.run_at) 273 | 274 | def test_check_existing(self): 275 | 276 | check_existing = TaskSchedule.CHECK_EXISTING 277 | 278 | @tasks.background(name='test_check_existing', 279 | schedule=TaskSchedule(action=check_existing)) 280 | def check_fn(): 281 | pass 282 | 283 | # this should only end up with the first call 284 | # scheduled 285 | check_fn() 286 | check_fn(schedule=90) 287 | 288 | all_tasks = Task.objects.all() 289 | self.assertEqual(1, all_tasks.count()) 290 | task = all_tasks[0] 291 | self.assertEqual('test_check_existing', task.task_name) 292 | 293 | # check new task is scheduled for the earlier time 294 | now = timezone.now() 295 | self.assertTrue(now - timedelta(seconds=1) < task.run_at) 296 | self.assertTrue(now + timedelta(seconds=1) > task.run_at) 297 | 298 | 299 | class TestTaskRunner(TransactionTestCase): 300 | 301 | def setUp(self): 302 | super(TestTaskRunner, self).setUp() 303 | self.runner = tasks._runner 304 | 305 | def test_get_task_to_run_no_tasks(self): 306 | self.assertFalse(self.runner.get_task_to_run(tasks)) 307 | 308 | def test_get_task_to_run(self): 309 | task = Task.objects.new_task('mytask', (1), {}) 310 | task.save() 311 | self.assertTrue(task.locked_by is None) 312 | self.assertTrue(task.locked_at is None) 313 | 314 | locked_task = self.runner.get_task_to_run(tasks) 315 | self.assertFalse(locked_task is None) 316 | self.assertFalse(locked_task.locked_by is None) 317 | self.assertEqual(self.runner.worker_name, locked_task.locked_by) 318 | self.assertFalse(locked_task.locked_at is None) 319 | self.assertEqual('mytask', locked_task.task_name) 320 | 321 | 322 | class TestTaskModel(TransactionTestCase): 323 | 324 | def test_lock_uncontested(self): 325 | task = Task.objects.new_task('mytask') 326 | task.save() 327 | self.assertTrue(task.locked_by is None) 328 | self.assertTrue(task.locked_at is None) 329 | 330 | locked_task = task.lock('mylock') 331 | self.assertEqual('mylock', locked_task.locked_by) 332 | self.assertFalse(locked_task.locked_at is None) 333 | self.assertEqual(task.pk, locked_task.pk) 334 | 335 | def test_lock_contested(self): 336 | # locking should actually look at db, not object 337 | # in memory 338 | task = Task.objects.new_task('mytask') 339 | task.save() 340 | self.assertFalse(task.lock('mylock') is None) 341 | 342 | self.assertTrue(task.lock('otherlock') is None) 343 | 344 | def test_lock_expired(self): 345 | task = Task.objects.new_task('mytask') 346 | task.save() 347 | locked_task = task.lock('mylock') 348 | 349 | # force expire the lock 350 | expire_by = timedelta(seconds=(app_settings.BACKGROUND_TASK_MAX_RUN_TIME + 2)) 351 | locked_task.locked_at = locked_task.locked_at - expire_by 352 | locked_task.save() 353 | 354 | # now try to get the lock again 355 | self.assertFalse(task.lock('otherlock') is None) 356 | 357 | def test_str(self): 358 | task = Task.objects.new_task('mytask') 359 | self.assertEqual(u'mytask', str(task)) 360 | task = Task.objects.new_task('mytask', verbose_name="My Task") 361 | self.assertEqual(u'My Task', str(task)) 362 | 363 | def test_creator(self): 364 | user = User.objects.create_user(username='bob', email='bob@example.com', password='12345') 365 | task = Task.objects.new_task('mytask', creator=user) 366 | task.save() 367 | self.assertEqual(task.creator, user) 368 | 369 | def test_repeat(self): 370 | repeat_until = timezone.now() + timedelta(days=1) 371 | task = Task.objects.new_task('mytask', repeat=Task.HOURLY, repeat_until=repeat_until) 372 | task.save() 373 | self.assertEqual(task.repeat, Task.HOURLY) 374 | self.assertEqual(task.repeat_until, repeat_until) 375 | 376 | def test_create_completed_task(self): 377 | task = Task.objects.new_task( 378 | task_name='mytask', 379 | args=[1], 380 | kwargs={'q': 's'}, 381 | priority=1, 382 | queue='myqueue', 383 | verbose_name='My Task', 384 | creator=User.objects.create_user(username='bob', email='bob@example.com', password='12345'), 385 | ) 386 | task.save() 387 | completed_task = task.create_completed_task() 388 | self.assertEqual(completed_task.task_name, task.task_name) 389 | self.assertEqual(completed_task.task_params, task.task_params) 390 | self.assertEqual(completed_task.priority, task.priority) 391 | self.assertEqual(completed_task.queue, task.queue) 392 | self.assertEqual(completed_task.verbose_name, task.verbose_name) 393 | self.assertEqual(completed_task.creator, task.creator) 394 | self.assertEqual(completed_task.repeat, task.repeat) 395 | self.assertEqual(completed_task.repeat_until, task.repeat_until) 396 | 397 | 398 | class TestTasks(TransactionTestCase): 399 | 400 | def setUp(self): 401 | super(TestTasks, self).setUp() 402 | 403 | @tasks.background(name='set_fields') 404 | def set_fields(**fields): 405 | for key, value in fields.items(): 406 | setattr(self, key, value) 407 | 408 | @tasks.background(name='throws_error') 409 | def throws_error(): 410 | raise RuntimeError("an error") 411 | 412 | self.set_fields = set_fields 413 | self.throws_error = throws_error 414 | 415 | def test_run_next_task_nothing_scheduled(self): 416 | self.assertFalse(run_next_task()) 417 | 418 | def test_run_next_task_one_task_scheduled(self): 419 | self.set_fields(worked=True) 420 | self.assertFalse(hasattr(self, 'worked')) 421 | 422 | self.assertTrue(run_next_task()) 423 | 424 | self.assertTrue(hasattr(self, 'worked')) 425 | self.assertTrue(self.worked) 426 | 427 | def test_run_next_task_several_tasks_scheduled(self): 428 | self.set_fields(one='1') 429 | self.set_fields(two='2') 430 | self.set_fields(three='3') 431 | 432 | for i in range(3): 433 | self.assertTrue(run_next_task()) 434 | 435 | self.assertFalse(run_next_task()) # everything should have been run 436 | 437 | for field, value in [('one', '1'), ('two', '2'), ('three', '3')]: 438 | self.assertTrue(hasattr(self, field)) 439 | self.assertEqual(value, getattr(self, field)) 440 | 441 | def test_run_next_task_error_handling(self): 442 | self.throws_error() 443 | 444 | all_tasks = Task.objects.all() 445 | self.assertEqual(1, all_tasks.count()) 446 | original_task = all_tasks[0] 447 | 448 | # should run, but trigger error 449 | self.assertTrue(run_next_task()) 450 | 451 | all_tasks = Task.objects.all() 452 | self.assertEqual(1, all_tasks.count()) 453 | 454 | failed_task = all_tasks[0] 455 | # should have an error recorded 456 | self.assertNotEqual('', failed_task.last_error) 457 | self.assertTrue(failed_task.failed_at is None) 458 | self.assertEqual(1, failed_task.attempts) 459 | 460 | # should have been rescheduled for the future 461 | # and no longer locked 462 | self.assertTrue(failed_task.run_at > original_task.run_at) 463 | self.assertTrue(failed_task.locked_by is None) 464 | self.assertTrue(failed_task.locked_at is None) 465 | 466 | def test_run_next_task_does_not_run_locked(self): 467 | self.set_fields(locked=True) 468 | self.assertFalse(hasattr(self, 'locked')) 469 | 470 | all_tasks = Task.objects.all() 471 | self.assertEqual(1, all_tasks.count()) 472 | original_task = all_tasks[0] 473 | original_task.lock('lockname') 474 | 475 | self.assertFalse(run_next_task()) 476 | 477 | self.assertFalse(hasattr(self, 'locked')) 478 | all_tasks = Task.objects.all() 479 | self.assertEqual(1, all_tasks.count()) 480 | 481 | def test_run_next_task_unlocks_after_MAX_RUN_TIME(self): 482 | self.set_fields(lock_overridden=True) 483 | 484 | all_tasks = Task.objects.all() 485 | self.assertEqual(1, all_tasks.count()) 486 | original_task = all_tasks[0] 487 | locked_task = original_task.lock('lockname') 488 | 489 | self.assertFalse(run_next_task()) 490 | 491 | self.assertFalse(hasattr(self, 'lock_overridden')) 492 | 493 | # put lot time into past 494 | expire_by = timedelta(seconds=(app_settings.BACKGROUND_TASK_MAX_RUN_TIME + 2)) 495 | locked_task.locked_at = locked_task.locked_at - expire_by 496 | locked_task.save() 497 | 498 | # so now we should be able to override the lock 499 | # and run the task 500 | self.assertTrue(run_next_task()) 501 | self.assertEqual(0, Task.objects.count()) 502 | 503 | self.assertTrue(hasattr(self, 'lock_overridden')) 504 | self.assertTrue(self.lock_overridden) 505 | 506 | def test_default_schedule_used_for_run_at(self): 507 | 508 | @tasks.background(name='default_schedule_used_for_run_at', schedule=60) 509 | def default_schedule_used_for_time(): 510 | pass 511 | 512 | now = timezone.now() 513 | default_schedule_used_for_time() 514 | 515 | all_tasks = Task.objects.all() 516 | self.assertEqual(1, all_tasks.count()) 517 | task = all_tasks[0] 518 | 519 | self.assertTrue(now < task.run_at) 520 | self.assertTrue((task.run_at - now) <= timedelta(seconds=61)) 521 | self.assertTrue((task.run_at - now) >= timedelta(seconds=59)) 522 | 523 | def test_default_schedule_used_for_priority(self): 524 | 525 | @tasks.background(name='default_schedule_used_for_priority', 526 | schedule={'priority': 2}) 527 | def default_schedule_used_for_priority(): 528 | pass 529 | 530 | default_schedule_used_for_priority() 531 | 532 | all_tasks = Task.objects.all() 533 | self.assertEqual(1, all_tasks.count()) 534 | task = all_tasks[0] 535 | self.assertEqual(2, task.priority) 536 | 537 | def test_non_default_schedule_used(self): 538 | default_run_at = timezone.now() + timedelta(seconds=90) 539 | 540 | @tasks.background(name='non_default_schedule_used', 541 | schedule={'run_at': default_run_at, 'priority': 2}) 542 | def default_schedule_used_for_priority(): 543 | pass 544 | 545 | run_at = timezone.now().replace(microsecond=0) + timedelta(seconds=60) 546 | default_schedule_used_for_priority(schedule=run_at) 547 | 548 | all_tasks = Task.objects.all() 549 | self.assertEqual(1, all_tasks.count()) 550 | task = all_tasks[0] 551 | self.assertEqual(run_at, task.run_at) 552 | 553 | def test_failed_at_set_after_MAX_ATTEMPTS(self): 554 | @tasks.background(name='test_failed_at_set_after_MAX_ATTEMPTS') 555 | def failed_at_set_after_MAX_ATTEMPTS(): 556 | raise RuntimeError('failed') 557 | 558 | failed_at_set_after_MAX_ATTEMPTS() 559 | 560 | available = Task.objects.find_available() 561 | self.assertEqual(1, available.count()) 562 | task = available[0] 563 | 564 | self.assertTrue(task.failed_at is None) 565 | 566 | task.attempts = app_settings.BACKGROUND_TASK_MAX_ATTEMPTS 567 | task.save() 568 | 569 | # task should be scheduled to run now 570 | # but will be marked as failed straight away 571 | self.assertTrue(run_next_task()) 572 | 573 | available = Task.objects.find_available() 574 | self.assertEqual(0, available.count()) 575 | 576 | all_tasks = Task.objects.all() 577 | self.assertEqual(0, all_tasks.count()) 578 | self.assertEqual(1, CompletedTask.objects.count()) 579 | completed_task = CompletedTask.objects.all()[0] 580 | self.assertFalse(completed_task.failed_at is None) 581 | 582 | def test_run_task_return_value(self): 583 | return_value = self.set_fields(test='test') 584 | self.assertEqual(Task.objects.count(), 1) 585 | task = Task.objects.first() 586 | self.assertEqual(return_value, task) 587 | self.assertEqual(return_value.pk, task.pk) 588 | 589 | def test_verbose_name_param(self): 590 | verbose_name = 'My Task' 591 | task = self.set_fields(test='test1', verbose_name=verbose_name) 592 | self.assertEqual(task.verbose_name, verbose_name) 593 | 594 | def test_creator_param(self): 595 | user = User.objects.create_user(username='bob', email='bob@example.com', password='12345') 596 | task = self.set_fields(test='test2', creator=user) 597 | self.assertEqual(task.creator, user) 598 | 599 | 600 | class MaxAttemptsTestCase(TransactionTestCase): 601 | 602 | def setUp(self): 603 | @tasks.background(name='failing task') 604 | def failing_task(): 605 | raise Exception("error") 606 | # return 0 / 0 607 | self.failing_task = failing_task 608 | self.task1 = self.failing_task() 609 | self.task2 = self.failing_task() 610 | self.task1_id = self.task1.id 611 | self.task2_id = self.task2.id 612 | 613 | @override_settings(MAX_ATTEMPTS=1) 614 | def test_max_attempts_one(self): 615 | self.assertEqual(settings.MAX_ATTEMPTS, 1) 616 | self.assertEqual(Task.objects.count(), 2) 617 | 618 | run_next_task() 619 | self.assertEqual(Task.objects.count(), 1) 620 | self.assertEqual(Task.objects.all()[0].id, self.task2_id) 621 | self.assertEqual(CompletedTask.objects.count(), 1) 622 | completed_task = CompletedTask.objects.all()[0] 623 | self.assertEqual(completed_task.attempts, 1) 624 | self.assertEqual(completed_task.task_name, self.task1.task_name) 625 | self.assertEqual(completed_task.task_params, self.task1.task_params) 626 | self.assertIsNotNone(completed_task.last_error) 627 | self.assertIsNotNone(completed_task.failed_at) 628 | 629 | run_next_task() 630 | self.assertEqual(Task.objects.count(), 0) 631 | self.assertEqual(CompletedTask.objects.count(), 2) 632 | 633 | @override_settings(MAX_ATTEMPTS=2) 634 | def test_max_attempts_two(self): 635 | self.assertEqual(settings.MAX_ATTEMPTS, 2) 636 | run_next_task() 637 | self.assertEqual(Task.objects.count(), 2) 638 | self.assertEqual(CompletedTask.objects.count(), 0) 639 | 640 | 641 | class InvalidTaskTestCase(TransactionTestCase): 642 | 643 | class SomeInvalidTaskError(InvalidTaskError): 644 | pass 645 | 646 | def setUp(self): 647 | @tasks.background(name='failing task') 648 | def failing_task(): 649 | raise self.SomeInvalidTaskError("invalid") 650 | 651 | self.failing_task = failing_task 652 | self.task1 = self.failing_task() 653 | self.task1_id = self.task1.id 654 | 655 | @override_settings(MAX_ATTEMPTS=2) 656 | def test_invalid_task(self): 657 | self.assertEqual(settings.MAX_ATTEMPTS, 2) 658 | run_next_task() 659 | self.assertEqual(Task.objects.count(), 0) 660 | self.assertEqual(CompletedTask.objects.count(), 1) 661 | 662 | 663 | class ArgumentsWithDictTestCase(TransactionTestCase): 664 | def setUp(self): 665 | @tasks.background(name='failing task') 666 | def task(d): 667 | pass 668 | self.task = task 669 | 670 | def test_task_with_dictionary_in_args(self): 671 | self.assertEqual(Task.objects.count(), 0) 672 | d = {22222: 2, 11111: 1} 673 | self.task(d) 674 | self.assertEqual(Task.objects.count(), 1) 675 | run_next_task() 676 | self.assertEqual(Task.objects.count(), 0) 677 | 678 | 679 | completed_named_queue_tasks = [] 680 | 681 | 682 | @background(queue='named_queue') 683 | def named_queue_task(message): 684 | completed_named_queue_tasks.append(message) 685 | 686 | 687 | class NamedQueueTestCase(TransactionTestCase): 688 | 689 | def test_process_queue(self): 690 | named_queue_task('test1') 691 | run_next_task(queue='named_queue') 692 | self.assertIn('test1', completed_named_queue_tasks, msg='Task should be processed') 693 | 694 | def test_process_all_tasks(self): 695 | named_queue_task('test2') 696 | run_next_task() 697 | self.assertIn('test2', completed_named_queue_tasks, msg='Task should be processed') 698 | 699 | def test_process_other_queue(self): 700 | named_queue_task('test3') 701 | run_next_task(queue='other_named_queue') 702 | self.assertNotIn('test3', completed_named_queue_tasks, msg='Task should be ignored') 703 | run_next_task() 704 | 705 | 706 | class RepetitionTestCase(TransactionTestCase): 707 | 708 | def setUp(self): 709 | @tasks.background() 710 | def my_task(*args, **kwargs): 711 | pass 712 | self.my_task = my_task 713 | 714 | def test_repeat(self): 715 | repeat_until = timezone.now() + timedelta(weeks=1) 716 | old_task = self.my_task( 717 | 'test-repeat', 718 | foo='bar', 719 | repeat=Task.HOURLY, 720 | repeat_until=repeat_until, 721 | verbose_name="Test repeat", 722 | ) 723 | self.assertEqual(old_task.repeat, Task.HOURLY) 724 | self.assertEqual(old_task.repeat_until, repeat_until) 725 | tasks.run_next_task() 726 | time.sleep(0.5) 727 | 728 | self.assertEqual(Task.objects.filter(repeat=Task.HOURLY).count(), 1) 729 | new_task = Task.objects.get(repeat=Task.HOURLY) 730 | self.assertNotEqual(new_task.id, old_task.id) 731 | self.assertEqual(new_task.task_name, old_task.task_name) 732 | self.assertEqual(new_task.params(), old_task.params()) 733 | self.assertEqual(new_task.task_hash, old_task.task_hash) 734 | self.assertEqual(new_task.verbose_name, old_task.verbose_name) 735 | self.assertEqual((new_task.run_at - old_task.run_at), timedelta(hours=1)) 736 | self.assertEqual(new_task.repeat_until, old_task.repeat_until) 737 | 738 | def test_repetition_in_future(self): 739 | repeat_until = timezone.now() + timedelta(weeks=1) 740 | old_task = self.my_task( 741 | 'test-repetition', 742 | repeat=Task.HOURLY, 743 | repeat_until=repeat_until, 744 | verbose_name="Test repetition in future", 745 | ) 746 | old_task.run_at = timezone.now() - timedelta(weeks=1) # task is one week old 747 | old_task.save() 748 | tasks.run_next_task() 749 | time.sleep(0.5) 750 | 751 | self.assertEqual(Task.objects.filter(repeat=Task.HOURLY).count(), 1) 752 | new_task = Task.objects.get(repeat=Task.HOURLY) 753 | self.assertNotEqual(new_task.id, old_task.id) 754 | # new task skipped exactly one week of downtime in the past, keeps period 755 | self.assertEqual((new_task.run_at - old_task.run_at), timedelta(weeks=1, hours=1)) 756 | # new task will be executed in the future 757 | self.assertTrue(new_task.run_at > timezone.now()) 758 | # new task will be executed in less than one hour 759 | self.assertTrue((new_task.run_at - timezone.now()) <= timedelta(hours=1)) 760 | 761 | 762 | class QuerySetManagerTestCase(TransactionTestCase): 763 | 764 | def setUp(self): 765 | @tasks.background() 766 | def succeeding_task(): 767 | return 0/1 768 | 769 | @tasks.background() 770 | def failing_task(): 771 | return 0/0 772 | 773 | self.user1 = User.objects.create_user(username='bob', email='bob@example.com', password='12345') 774 | self.user2 = User.objects.create_user(username='bob2', email='bob@example.com', password='12345') 775 | self.task_all = succeeding_task() 776 | self.task_user = succeeding_task(creator=self.user1) 777 | self.failing_task_all = failing_task() 778 | self.failing_task_user = failing_task(creator=self.user1) 779 | 780 | @override_settings(MAX_ATTEMPTS=1) 781 | def test_task_manager(self): 782 | self.assertEqual(len(Task.objects.all()), 4) 783 | self.assertEqual(len(Task.objects.created_by(self.user1)), 2) 784 | self.assertEqual(len(Task.objects.created_by(self.user2)), 0) 785 | for i in range(4): 786 | run_next_task() 787 | self.assertEqual(len(Task.objects.all()), 0) 788 | self.assertEqual(len(Task.objects.created_by(self.user1)), 0) 789 | self.assertEqual(len(Task.objects.created_by(self.user2)), 0) 790 | 791 | @override_settings(MAX_ATTEMPTS=1) 792 | def test_completed_task_manager(self): 793 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1)), 0) 794 | self.assertEqual(len(CompletedTask.objects.created_by(self.user2)), 0) 795 | self.assertEqual(len(CompletedTask.objects.failed()), 0) 796 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1).failed()), 0) 797 | self.assertEqual(len(CompletedTask.objects.failed(within=timedelta(hours=1))), 0) 798 | self.assertEqual(len(CompletedTask.objects.succeeded()), 0) 799 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1).succeeded()), 0) 800 | self.assertEqual(len(CompletedTask.objects.succeeded(within=timedelta(hours=1))), 0) 801 | for i in range(4): 802 | run_next_task() 803 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1)), 2) 804 | self.assertEqual(len(CompletedTask.objects.created_by(self.user2)), 0) 805 | self.assertEqual(len(CompletedTask.objects.failed()), 2) 806 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1).failed()), 1) 807 | self.assertEqual(len(CompletedTask.objects.failed(within=timedelta(hours=1))), 2) 808 | self.assertEqual(len(CompletedTask.objects.succeeded()), 2) 809 | self.assertEqual(len(CompletedTask.objects.created_by(self.user1).succeeded()), 1) 810 | self.assertEqual(len(CompletedTask.objects.succeeded(within=timedelta(hours=1))), 2) 811 | 812 | 813 | class PriorityTestCase(TransactionTestCase): 814 | 815 | def setUp(self): 816 | @tasks.background() 817 | def mytask(): 818 | pass 819 | 820 | run_at = timezone.now() - timedelta(minutes=1) 821 | 822 | self.high_priority_task = mytask(priority=99, schedule=run_at) 823 | self.low_priority_task = mytask(priority=-1, schedule=run_at) 824 | 825 | def test_priority(self): 826 | self.assertEqual(self.high_priority_task.priority, 99) 827 | self.assertEqual(self.low_priority_task.priority, -1) 828 | 829 | available = Task.objects.find_available() 830 | self.assertEqual(available.count(), 2) 831 | self.assertEqual(available.first(), self.high_priority_task) 832 | # Using a list here. QuerySet.last() is prohibited after slicing (new in Django 2.0) 833 | self.assertEqual(list(available)[-1], self.low_priority_task) 834 | 835 | self.assertFalse(CompletedTask.objects.filter(priority=self.high_priority_task.priority).exists()) 836 | self.assertFalse(CompletedTask.objects.filter(priority=self.low_priority_task.priority).exists()) 837 | run_next_task() 838 | self.assertTrue(CompletedTask.objects.filter(priority=self.high_priority_task.priority).exists()) 839 | self.assertFalse(CompletedTask.objects.filter(priority=self.low_priority_task.priority).exists()) 840 | run_next_task() 841 | self.assertTrue(CompletedTask.objects.filter(priority=self.high_priority_task.priority).exists()) 842 | self.assertTrue(CompletedTask.objects.filter(priority=self.low_priority_task.priority).exists()) 843 | 844 | 845 | class LoggingTestCase(TransactionTestCase): 846 | 847 | def setUp(self): 848 | @tasks.background() 849 | def succeeding_task(): 850 | return 0/1 851 | 852 | @tasks.background() 853 | def failing_task(): 854 | return 0/0 855 | 856 | self.succeeding_task = succeeding_task 857 | self.failing_task = failing_task 858 | 859 | @patch('background_task.tasks.logger') 860 | def test_success_logging(self, mock_logger): 861 | self.succeeding_task() 862 | run_next_task() 863 | self.assertFalse(mock_logger.warning.called) 864 | self.assertFalse(mock_logger.error.called) 865 | self.assertFalse(mock_logger.critical.called) 866 | 867 | @patch('background_task.tasks.logger') 868 | def test_error_logging(self, mock_logger): 869 | self.failing_task() 870 | run_next_task() 871 | self.assertFalse(mock_logger.warning.called) 872 | self.assertTrue(mock_logger.error.called) 873 | self.assertFalse(mock_logger.critical.called) 874 | 875 | 876 | class DatabaseOutageTestCase(TransactionTestCase): 877 | 878 | def setUp(self): 879 | @tasks.background() 880 | def my_task(*args, **kwargs): 881 | pass 882 | self.my_task = my_task 883 | 884 | @patch('background_task.tasks.logger') 885 | def test_dropped_db_connection(self, mock_logger): 886 | self.my_task() # Entered into database successfully 887 | 888 | cursor_wrapper = Mock() 889 | cursor_wrapper.side_effect = OperationalError 890 | 891 | # Force django cursor to throw OperationalError as if connection were dropped 892 | with patch("django.db.backends.utils.CursorWrapper", cursor_wrapper) as patched_method: 893 | run_next_task() 894 | 895 | self.assertTrue(mock_logger.warning.called) 896 | self.assertFalse(mock_logger.error.called) 897 | self.assertFalse(mock_logger.critical.called) 898 | -------------------------------------------------------------------------------- /background_task/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import signal 3 | import platform 4 | 5 | TTW_SLOW = [0.5, 1.5] 6 | TTW_FAST = [0.0, 0.1] 7 | 8 | 9 | class SignalManager(object): 10 | """Manages POSIX signals.""" 11 | 12 | kill_now = False 13 | time_to_wait = TTW_FAST 14 | 15 | def __init__(self): 16 | # Temporary workaround for signals not available on Windows 17 | if platform.system() == 'Windows': 18 | signal.signal(signal.SIGTERM, self.exit_gracefully) 19 | else: 20 | signal.signal(signal.SIGTSTP, self.exit_gracefully) 21 | signal.signal(signal.SIGUSR1, self.speed_up) 22 | signal.signal(signal.SIGUSR2, self.slow_down) 23 | 24 | def exit_gracefully(self, signum, frame): 25 | self.kill_now = True 26 | 27 | def speed_up(self, signum, frame): 28 | self.time_to_wait = TTW_FAST 29 | 30 | def slow_down(self, signum, frame): 31 | self.time_to_wait = TTW_SLOW 32 | -------------------------------------------------------------------------------- /classifiers: -------------------------------------------------------------------------------- 1 | Development Status :: 5 - Production/Stable 2 | Environment :: Web Environment 3 | Intended Audience :: Developers 4 | License :: OSI Approved :: BSD License 5 | Operating System :: OS Independent 6 | Topic :: Software Development :: Libraries :: Python Modules 7 | Framework :: Django 8 | Framework :: Django :: 3.0 9 | Framework :: Django :: 4.0 10 | Framework :: Django :: 5.0 11 | Programming Language :: Python :: 3.4 12 | Programming Language :: Python :: 3.5 13 | Programming Language :: Python :: 3.6 14 | Programming Language :: Python :: 3.7 15 | Programming Language :: Python :: 3.8 16 | Programming Language :: Python :: 3.9 17 | Programming Language :: Python :: 3.10 18 | Programming Language :: Python :: 3.11 19 | Programming Language :: Python :: 3.12 -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ======================= 2 | Django Background Tasks 3 | ======================= 4 | 5 | 6 | Django Background Task is a databased-backed work queue for Django_, loosely based around Ruby's DelayedJob_ library. This project was adopted and adapted from this_ repo. 7 | 8 | To avoid conflicts on PyPI we renamed it to django-background-tasks (plural). For an easy upgrade from django-background-task to django-background-tasks, the internal module structure were left untouched. 9 | 10 | In Django Background Task, all tasks are implemented as functions (or any other callable). 11 | 12 | There are two parts to using background tasks: 13 | 14 | * creating the task functions and registering them with the scheduler 15 | * setup a cron task (or long running process) to execute the tasks 16 | 17 | 18 | Installation 19 | ============ 20 | 21 | Install from PyPI: 22 | 23 | .. code-block:: sh 24 | 25 | pip install django-background-tasks 26 | 27 | Add to INSTALLED_APPS: 28 | 29 | .. code-block:: python 30 | 31 | INSTALLED_APPS = ( 32 | # ... 33 | 'background_task', 34 | # ... 35 | ) 36 | 37 | Migrate your database: 38 | 39 | .. code-block:: sh 40 | 41 | python manage.py migrate 42 | 43 | Supported versions and compatibility 44 | ==================================== 45 | * Python: 3.8-3.12 46 | * Django: 4.0+ 47 | 48 | 49 | Creating and registering tasks 50 | ============================== 51 | 52 | To register a task use the background decorator: 53 | 54 | .. code-block:: python 55 | 56 | from background_task import background 57 | from django.contrib.auth.models import User 58 | 59 | @background(schedule=60) 60 | def notify_user(user_id): 61 | # lookup user by id and send them a message 62 | user = User.objects.get(pk=user_id) 63 | user.email_user('Here is a notification', 'You have been notified') 64 | 65 | This will convert the notify_user into a background task function. When you call it from regular code it will actually create a Task object and stores it in the database. The database then contains serialised information about which function actually needs running later on. This does place limits on the parameters that can be passed when calling the function - they must all be serializable as JSON. Hence why in the example above a user_id is passed rather than a User object. 66 | 67 | Calling notify_user as normal will schedule the original function to be run 60 seconds from now: 68 | 69 | .. code-block:: python 70 | 71 | notify_user(user.id) 72 | 73 | This is the default schedule time (as set in the decorator), but it can be overridden: 74 | 75 | .. code-block:: python 76 | 77 | notify_user(user.id, schedule=90) # 90 seconds from now 78 | notify_user(user.id, schedule=timedelta(minutes=20)) # 20 minutes from now 79 | notify_user(user.id, schedule=timezone.now()) # at a specific time 80 | 81 | 82 | Also you can run original function right now in synchronous mode: 83 | 84 | .. code-block:: python 85 | 86 | notify_user.now(user.id) # launch a notify_user function and wait for it 87 | notify_user = notify_user.now # revert task function back to normal function. Useful for testing. 88 | 89 | You can specify a verbose name and a creator when scheduling a task: 90 | 91 | .. code-block:: python 92 | 93 | notify_user(user.id, verbose_name="Notify user", creator=user) 94 | 95 | The creator is stored as a ``GenericForeignKey``, so any model may be used. 96 | 97 | To get the functions decorated by ``background`` picked up by the auto discovery mechanism, they must be placed in a file named ``tasks.py`` in your module, eg. ``myapp/tasks.py``. 98 | 99 | Repeating Tasks 100 | =============== 101 | 102 | Repeating tasks can be initialized like this: 103 | 104 | .. code-block:: python 105 | 106 | notify_user(user.id, repeat=, repeat_until=) 107 | 108 | When a repeating task completes successfully, a new Task with an offset of ``repeat`` is scheduled. On the other hand, if a repeating task fails and is not restarted, the repetition chain is stopped. 109 | 110 | ``repeat`` is given in seconds. The following constants are provided: ``Task.NEVER`` (default), ``Task.HOURLY``, ``Task.DAILY``, ``Task.WEEKLY``, ``Task.EVERY_2_WEEKS``, ``Task.EVERY_4_WEEKS``. 111 | 112 | If you want to use the repeat constants, you need to import them from this module. 113 | 114 | .. code-block:: python 115 | 116 | from background_task.models import Task 117 | 118 | The time offset is computed from the initially scheduled time of the original task, not the time the task was actually executed. If the process command is interrupted, the interval between the original task and its repetition may be shorter than ``repeat``. 119 | 120 | Multiple Queues 121 | =============== 122 | You can pass a queue name to the ``background`` decorator: 123 | 124 | .. code-block:: python 125 | 126 | @background(queue='my-queue') 127 | def notify_user(user_id): 128 | ... 129 | 130 | If you run the command ``process_tasks`` with the option ``--queue `` you can restrict the tasks processed to the given queue. 131 | 132 | Scheduling the same task twice 133 | ============================== 134 | 135 | Normally, when you schedule the exact same task twice, it will also be executed twice. If you want to remove existing tasks with the same parameters, you can set the parameter `remove_existing_tasks` to `True`. Only tasks that are pending (not in execution) will be removed. 136 | 137 | .. code-block:: python 138 | 139 | @background(remove_existing_tasks=True) 140 | def recalculate_data(): 141 | ... 142 | 143 | Running tasks 144 | ============= 145 | 146 | There is a management command to run tasks that have been scheduled: 147 | 148 | .. code-block:: sh 149 | 150 | python manage.py process_tasks 151 | 152 | This will simply poll the database queue every few seconds to see if there is a new task to run. 153 | 154 | The ``process_tasks`` management command has the following options: 155 | 156 | * ``duration`` - Run task for this many seconds (0 or less to run forever) - default is 0 157 | * ``sleep`` - Sleep for this many seconds before checking for new tasks (if none were found) - default is 5 158 | * ``log-std`` - Redirect stdout and stderr to the logging system 159 | * ``dev`` - Auto-reload your code on changes. Use this only for development 160 | 161 | You can use the ``duration`` option for simple process control, by running the management command via a cron job and setting the duration to the time till cron calls the command again. This way if the command fails it will get restarted by the cron job later anyway. It also avoids having to worry about resource/memory leaks too much. The alternative is to use a grown-up program like supervisord_ to handle this for you. 162 | 163 | Settings 164 | ======== 165 | 166 | There are a few settings options that can be set in your ``settings.py`` file. 167 | 168 | * ``MAX_ATTEMPTS`` - controls how many times a task will be attempted (default 25) 169 | * ``MAX_RUN_TIME`` - maximum possible task run time, after which tasks will be unlocked and tried again (default 3600 seconds) 170 | * ``BACKGROUND_TASK_RUN_ASYNC`` - If ``True``, will run the tasks asynchronous. This means the tasks will be processed in parallel (at the same time) instead of processing one by one (one after the other). 171 | * ``BACKGROUND_TASK_ASYNC_THREADS`` - Specifies number of concurrent threads. Default is ``multiprocessing.cpu_count()``. 172 | * ``BACKGROUND_TASK_PRIORITY_ORDERING`` - Control the ordering of tasks in the queue. Default is ``"DESC"`` (tasks with a higher number are processed first). Choose ``"ASC"`` to switch to the "niceness_" ordering. A niceness of −20 is the highest priority and 19 is the lowest priority. 173 | 174 | Task errors 175 | =========== 176 | 177 | Tasks are retried if they fail and the error recorded in last_error (and logged). A task is retried as it may be a temporary issue, such as a transient network problem. However each time a task is retried it is retried later and later, using an exponential back off, based on the number of attempts: 178 | 179 | .. code-block:: python 180 | 181 | (attempts ** 4) + 5 182 | 183 | This means that initially the task will be tried again a few seconds later. After four attempts the task is tried again 261 seconds later (about four minutes). At twenty five attempts the task will not be tried again for nearly four days! It is not unheard of for a transient error to last a long time and this behavior is intended to stop tasks that are triggering errors constantly (i.e. due to a coding error) form dominating task processing. You should probably monitor the task queue to check for tasks that have errors. After ``MAX_ATTEMPTS`` the task will be marked as failed and will not be rescheduled again. 184 | 185 | Known issues 186 | ============ 187 | 188 | * ``django.db.utils.OperationalError: database is locked`` when using SQLite. This is a SQLite specific error, see https://docs.djangoproject.com/en/dev/ref/databases/#database-is-locked-errors for more details. 189 | 190 | 191 | 192 | Example project 193 | =============== 194 | 195 | Hiroaki Nakamura has written an example project demonstrating how django-background-tasks works. You find it here_. 196 | 197 | 198 | Tests 199 | ===== 200 | 201 | You can run the test suite on all supported versions of Django and Python: 202 | 203 | .. code-block:: bash 204 | 205 | $ tox 206 | 207 | 208 | Contributing 209 | ============ 210 | 211 | Anyone and everyone is welcome to contribute. Please take a moment to review the `guidelines for contributing 212 | `_. 213 | 214 | 215 | .. _Django: http://www.djangoproject.com/ 216 | .. _DelayedJob: http://github.com/tobi/delayed_job 217 | .. _supervisord: http://supervisord.org/ 218 | .. _this: https://github.com/lilspikey/django-background-task 219 | .. _compat: https://github.com/arteria/django-compat 220 | .. _django-compat: https://github.com/arteria/django-compat 221 | .. _25: https://github.com/arteria/django-background-tasks/issues/25 222 | .. _here: https://github.com/hnakamur/django-background-tasks-example/ 223 | .. _niceness: https://en.wikipedia.org/wiki/Nice_(Unix) 224 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", 7 | "background_task.tests.test_settings") 8 | 9 | from django.core.management import execute_from_command_line 10 | 11 | execute_from_command_line(sys.argv) 12 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | mock 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | six 2 | Django>=4.0 3 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | import sys 5 | 6 | import django 7 | from django.conf import settings 8 | from django.test.utils import get_runner 9 | from argparse import ArgumentParser 10 | 11 | 12 | def main(argv): 13 | parser = ArgumentParser() 14 | parser.add_argument( 15 | "--async", "-a", action="store_true", default=False, dest="run_async", 16 | help="process background tasks in multiple threads") 17 | args = parser.parse_args(argv) 18 | 19 | os.environ['DJANGO_SETTINGS_MODULE'] = 'background_task.tests.test_settings' 20 | 21 | if args.run_async: 22 | os.environ['DJANGO_SETTINGS_MODULE'] = 'background_task.tests.test_settings_async' 23 | 24 | django.setup() 25 | TestRunner = get_runner(settings) 26 | test_runner = TestRunner() 27 | failures = test_runner.run_tests(["background_task.tests"]) 28 | sys.exit(bool(failures)) 29 | 30 | 31 | if __name__ == "__main__": 32 | main(argv=sys.argv[1:]) 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import codecs 3 | 4 | version = __import__('background_task').__version__ 5 | 6 | classifiers = [c for c in open('classifiers').read().splitlines() if '#' not in c] 7 | 8 | with open("README.md", "r") as fh: 9 | long_description = fh.read() 10 | 11 | setup( 12 | name='django-background-tasks-updated', 13 | version=version, 14 | description='Database backed asynchronous task queue', 15 | long_description=long_description, 16 | long_description_content_type='text/markdown', 17 | author='arteria GmbH, John Montgomery, Alberto Petrucci, Jon Miller', 18 | author_email='iamjonamiller@gmail.com', 19 | url='http://github.com/iamjonmiller/django-background-tasks', 20 | license='BSD', 21 | packages=find_packages(exclude=['ez_setup']), 22 | include_package_data=True, 23 | install_requires=open('requirements.txt').read().splitlines(), 24 | zip_safe=True, 25 | classifiers=classifiers, 26 | ) 27 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{38,39}-django{40, 41, 42}-{sync,async} 4 | py{310}-django{40, 41, 42, 50}-{sync,async} 5 | py{311}-django{41, 42, 50}-{sync,async} 6 | py{312}-django{42, 50}-{sync,async} 7 | 8 | [testenv] 9 | deps = 10 | coverage 11 | django-coverage 12 | django40: Django>=4.0,<4.1 13 | django41: Django>=4.1,<4.2 14 | django42: Django>=4.2,<4.3 15 | django50: Django>=5.0,<5.1 16 | -r{toxinidir}/requirements-test.txt 17 | -r{toxinidir}/requirements.txt 18 | 19 | commands = 20 | coverage erase 21 | sync: coverage run -a ./runtests.py 22 | async: coverage run -a ./runtests.py {posargs:--async} 23 | --------------------------------------------------------------------------------