├── .github └── workflows │ └── ci.yml ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── django_tasks ├── __init__.py ├── apps.py ├── backends │ ├── __init__.py │ ├── base.py │ ├── database │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── backend.py │ │ ├── management │ │ │ └── commands │ │ │ │ ├── db_worker.py │ │ │ │ └── prune_db_task_results.py │ │ ├── migrations │ │ │ ├── 0001_initial.py │ │ │ ├── 0002_alter_dbtaskresult_options.py │ │ │ ├── 0003_dbtaskresult_enqueued_at_dbtaskresult_finished_at.py │ │ │ ├── 0004_dbtaskresult_started_at.py │ │ │ ├── 0005_alter_dbtaskresult_priority_and_more.py │ │ │ ├── 0006_alter_dbtaskresult_args_kwargs_and_more.py │ │ │ ├── 0007_add_separate_results_fields.py │ │ │ ├── 0008_separate_results_field.py │ │ │ ├── 0009_remove_results_field.py │ │ │ ├── 0010_alter_dbtaskresult_status.py │ │ │ ├── 0011_rename_complete_status.py │ │ │ ├── 0012_add_separate_exception_fields.py │ │ │ ├── 0013_separate_exception_fields.py │ │ │ ├── 0014_remove_dbtaskresult_exception_data.py │ │ │ ├── 0015_correctly_order_run_after.py │ │ │ ├── 0016_alter_dbtaskresult_options_and_more.py │ │ │ └── __init__.py │ │ ├── models.py │ │ ├── signal_handlers.py │ │ └── utils.py │ ├── dummy.py │ ├── immediate.py │ └── rq.py ├── checks.py ├── exceptions.py ├── py.typed ├── signal_handlers.py ├── signals.py ├── task.py └── utils.py ├── docker-compose.yml ├── justfile ├── manage.py ├── pyproject.toml └── tests ├── __init__.py ├── db_worker_test_settings.py ├── settings.py ├── settings_fast.py ├── tasks.py ├── tests ├── __init__.py ├── is_module_level_function_fixture.py ├── test_custom_backend.py ├── test_database_backend.py ├── test_dummy_backend.py ├── test_immediate_backend.py ├── test_rq_backend.py ├── test_tasks.py └── test_utils.py ├── urls.py └── views.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | tags: 9 | - "*" 10 | 11 | jobs: 12 | test: 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | os: [windows-latest, macos-latest, ubuntu-latest] 18 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 19 | django-version: ["4.2", "5.0", "5.1", "5.2"] 20 | exclude: 21 | - django-version: "5.0" 22 | python-version: "3.9" 23 | - django-version: "5.1" 24 | python-version: "3.9" 25 | - django-version: "5.2" 26 | python-version: "3.9" 27 | 28 | steps: 29 | - uses: actions/checkout@v4 30 | - name: Set up Python ${{ matrix.python-version }} 31 | uses: actions/setup-python@v5 32 | with: 33 | python-version: ${{ matrix.python-version }} 34 | - uses: actions/cache@v4 35 | with: 36 | path: ~/.cache/pip 37 | key: ${{ runner.os }}-${{ matrix.python-version }}-pip-${{ hashFiles('pyproject.toml') }} 38 | - uses: taiki-e/install-action@just 39 | - name: Install dependencies 40 | run: | 41 | pip install --upgrade pip 42 | pip install -e '.[dev]' 43 | pip install Django~=${{ matrix.django-version }} 44 | - name: Lint 45 | run: just lint 46 | - name: Run fast tests 47 | if: ${{ !cancelled() }} 48 | run: just test-fast 49 | - name: Run tests 50 | if: ${{ !cancelled() }} 51 | run: just test 52 | 53 | test-postgres: 54 | runs-on: ubuntu-latest 55 | services: 56 | postgres: 57 | image: postgres:16-alpine 58 | env: 59 | POSTGRES_USER: postgres 60 | POSTGRES_PASSWORD: postgres 61 | ports: 62 | - 5432:5432 63 | options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 64 | strategy: 65 | fail-fast: false 66 | matrix: 67 | django-version: ["4.2", "5.0", "5.1", "5.2"] 68 | steps: 69 | - uses: actions/checkout@v4 70 | - name: Set up Python 3.13 71 | uses: actions/setup-python@v5 72 | with: 73 | python-version: "3.13" 74 | - uses: actions/cache@v4 75 | with: 76 | path: ~/.cache/pip 77 | key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} 78 | - uses: taiki-e/install-action@just 79 | - name: Install dependencies 80 | run: | 81 | pip install --upgrade pip 82 | pip install -e '.[dev,postgres]' 83 | pip install Django~=${{ matrix.django-version }} 84 | - name: Run tests 85 | run: just test 86 | env: 87 | DATABASE_URL: postgres://postgres:postgres@localhost/postgres 88 | 89 | test-mysql: 90 | runs-on: ubuntu-latest 91 | services: 92 | mysql: 93 | image: mysql:8.4 94 | env: 95 | MYSQL_ROOT_PASSWORD: django 96 | MYSQL_DATABASE: django 97 | ports: 98 | - 3306:3306 99 | strategy: 100 | fail-fast: false 101 | matrix: 102 | django-version: ["4.2", "5.0", "5.1", "5.2"] 103 | steps: 104 | - uses: actions/checkout@v4 105 | - name: Set up Python 3.13 106 | uses: actions/setup-python@v5 107 | with: 108 | python-version: "3.13" 109 | - uses: actions/cache@v4 110 | with: 111 | path: ~/.cache/pip 112 | key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} 113 | - uses: taiki-e/install-action@just 114 | - name: Install dependencies 115 | run: | 116 | pip install --upgrade pip 117 | pip install -e '.[dev,mysql]' 118 | pip install Django~=${{ matrix.django-version }} 119 | - name: Run tests 120 | run: just test 121 | env: 122 | DATABASE_URL: mysql://root:django@127.0.0.1/django 123 | 124 | build: 125 | permissions: 126 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing 127 | runs-on: ubuntu-latest 128 | needs: 129 | - test 130 | - test-postgres 131 | - test-mysql 132 | steps: 133 | - uses: actions/checkout@v4 134 | with: 135 | fetch-depth: 0 136 | - name: Set up Python 137 | uses: actions/setup-python@v5 138 | with: 139 | python-version: "3.13" 140 | - name: Install dependencies 141 | run: | 142 | python -m pip install --upgrade pip build 143 | - name: Build package 144 | run: python -m build 145 | - name: Save built package 146 | uses: actions/upload-artifact@v4 147 | with: 148 | name: package 149 | path: dist 150 | - name: Publish to PyPi 151 | if: ${{ github.ref_type == 'tag' }} 152 | uses: pypa/gh-action-pypi-publish@release/v1 153 | with: 154 | print-hash: true 155 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/python 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 3 | 4 | ### Python ### 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | *.sqlite3 66 | *.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # poetry 102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 103 | # This is especially recommended for binary packages to ensure reproducibility, and is more 104 | # commonly ignored for libraries. 105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 106 | #poetry.lock 107 | 108 | # pdm 109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 110 | #pdm.lock 111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 112 | # in version control. 113 | # https://pdm.fming.dev/#use-with-ide 114 | .pdm.toml 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .venv 129 | env/ 130 | venv/ 131 | ENV/ 132 | env.bak/ 133 | venv.bak/ 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # Pyre type checker 151 | .pyre/ 152 | 153 | # pytype static type analyzer 154 | .pytype/ 155 | 156 | # Cython debug symbols 157 | cython_debug/ 158 | 159 | # PyCharm 160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 162 | # and can be added to the global gitignore or merged into this file. For a more nuclear 163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 164 | #.idea/ 165 | 166 | ### Python Patch ### 167 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration 168 | poetry.toml 169 | 170 | # ruff 171 | .ruff_cache/ 172 | 173 | # LSP config files 174 | pyrightconfig.json 175 | 176 | # End of https://www.toptal.com/developers/gitignore/api/python 177 | 178 | # Editor config files 179 | .vscode 180 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Found a bug? Want to fix an open issue? Got an idea for an improvement? Please contribute! 4 | 5 | **All** contributions are welcome, from absolutely anyone. Just open a PR, Issue or Discussion (as relevant) - no need to ask beforehand. If you're going to work on an issue, it's a good idea to say so on the issue, to make sure work isn't duplicated. 6 | 7 | ## Development set up 8 | 9 | Fork, then clone the repo: 10 | 11 | ```sh 12 | git clone git@github.com:your-username/django-tasks.git 13 | ``` 14 | 15 | Set up a venv: 16 | 17 | ```sh 18 | python -m venv .venv 19 | source .venv/bin/activate 20 | python -m pip install -e '.[dev]' 21 | ``` 22 | 23 | > [!TIP] 24 | > Add an extra name for each database you want to develop with (e.g. `[dev,mysql]`, `[dev,postgres]` or `[dev,mysql,postgres]`). This is optional. 25 | 26 | Then you can run the tests with the [just](https://just.systems/man/en/) command runner: 27 | 28 | ```sh 29 | just test 30 | ``` 31 | 32 | If you don't have `just` installed, you can look in the `justfile` for the commands that are run. 33 | 34 | To help with testing on different databases, there's a `docker-compose.yml` file to run PostgreSQL and MySQL in Docker, as well as some additional `just` commands for testing: 35 | 36 | ```sh 37 | just start-dbs 38 | just test-postgres 39 | just test-mysql 40 | just test-sqlite 41 | 42 | # To run all of the above: 43 | just test-dbs 44 | ``` 45 | 46 | Due to database worker process' tests, tests cannot run using an in-memory database, which means tests run quite slow locally. If you're not modifying the worker, and want you tests run run quicker, run: 47 | 48 | ```sh 49 | just test-fast 50 | ``` 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) Jake Howard and individual contributors. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 14 | 3. Neither the name of the copyright holder nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Django Tasks 2 | 3 | [![CI](https://github.com/RealOrangeOne/django-tasks/actions/workflows/ci.yml/badge.svg)](https://github.com/RealOrangeOne/django-tasks/actions/workflows/ci.yml) 4 | ![PyPI](https://img.shields.io/pypi/v/django-tasks.svg) 5 | ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/django-tasks.svg) 6 | ![PyPI - Status](https://img.shields.io/pypi/status/django-tasks.svg) 7 | ![PyPI - License](https://img.shields.io/pypi/l/django-tasks.svg) 8 | 9 | An implementation and backport of background workers and tasks in Django, as defined in [DEP 0014](https://github.com/django/deps/blob/main/accepted/0014-background-workers.rst). 10 | 11 | **Warning**: This package is under active development, and breaking changes may be released at any time. Be sure to pin to specific versions if you're using this package in a production environment. 12 | 13 | ## Installation 14 | 15 | ``` 16 | python -m pip install django-tasks 17 | ``` 18 | 19 | The first step is to add `django_tasks` to your `INSTALLED_APPS`. 20 | 21 | ```python 22 | INSTALLED_APPS = [ 23 | # ... 24 | "django_tasks", 25 | ] 26 | ``` 27 | 28 | Secondly, you'll need to configure a backend. This connects the tasks to whatever is going to execute them. 29 | 30 | If omitted, the following configuration is used: 31 | 32 | ```python 33 | TASKS = { 34 | "default": { 35 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend" 36 | } 37 | } 38 | ``` 39 | 40 | A few backends are included by default: 41 | 42 | - `django_tasks.backends.dummy.DummyBackend`: Don't execute the tasks, just store them. This is especially useful for testing. 43 | - `django_tasks.backends.immediate.ImmediateBackend`: Execute the task immediately in the current thread 44 | - `django_tasks.backends.database.DatabaseBackend`: Store tasks in the database (via Django's ORM), and retrieve and execute them using the `db_worker` management command 45 | - `django_tasks.backends.rq.RQBackend`: A backend which enqueues tasks using [RQ](https://python-rq.org/) via [`django-rq`](https://github.com/rq/django-rq) (requires installing `django-tasks[rq]`). 46 | 47 | Note: `DatabaseBackend` additionally requires `django_tasks.backends.database` adding to `INSTALLED_APPS`. 48 | 49 | ## Usage 50 | 51 | **Note**: This documentation is still work-in-progress. Further details can also be found on the [DEP](https://github.com/django/deps/blob/main/accepted/0014-background-workers.rst). [The tests](./tests/tests/) are also a good exhaustive reference. 52 | 53 | ### Defining tasks 54 | 55 | A task is created with the `task` decorator. 56 | 57 | ```python 58 | from django_tasks import task 59 | 60 | 61 | @task() 62 | def calculate_meaning_of_life() -> int: 63 | return 42 64 | ``` 65 | 66 | The task decorator accepts a few arguments to customize the task: 67 | 68 | - `priority`: The priority of the task (between -100 and 100. Larger numbers are higher priority. 0 by default) 69 | - `queue_name`: Whether to run the task on a specific queue 70 | - `backend`: Name of the backend for this task to use (as defined in `TASKS`) 71 | - `enqueue_on_commit`: Whether the task is enqueued when the current transaction commits successfully, or enqueued immediately. By default, this is handled by the backend (see below). `enqueue_on_commit` may not be modified with `.using`. 72 | 73 | These attributes (besides `enqueue_on_commit`) can also be modified at run-time with `.using`: 74 | 75 | ```python 76 | modified_task = calculate_meaning_of_life.using(priority=10) 77 | ``` 78 | 79 | In addition to the above attributes, `run_after` can be passed to specify a specific time the task should run. 80 | 81 | ### Enqueueing tasks 82 | 83 | To execute a task, call the `enqueue` method on it: 84 | 85 | ```python 86 | result = calculate_meaning_of_life.enqueue() 87 | ``` 88 | 89 | The returned `TaskResult` can be interrogated to query the current state of the running task, as well as its return value. 90 | 91 | If the task takes arguments, these can be passed as-is to `enqueue`. 92 | 93 | #### Transactions 94 | 95 | By default, tasks are enqueued after the current transaction (if there is one) commits successfully (using Django's `transaction.on_commit` method), rather than enqueueing immediately. 96 | 97 | This can be configured using the `ENQUEUE_ON_COMMIT` setting. `True` and `False` force the behaviour. 98 | 99 | ```python 100 | TASKS = { 101 | "default": { 102 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 103 | "ENQUEUE_ON_COMMIT": False 104 | } 105 | } 106 | ``` 107 | 108 | This can also be configured per-task by passing `enqueue_on_commit` to the `task` decorator. 109 | 110 | ### Queue names 111 | 112 | By default, tasks are enqueued onto the "default" queue. When using multiple queues, it can be useful to constrain the allowed names, so tasks aren't missed. 113 | 114 | ```python 115 | TASKS = { 116 | "default": { 117 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 118 | "QUEUES": ["default", "special"] 119 | } 120 | } 121 | ``` 122 | 123 | Enqueueing tasks to an unknown queue name raises `InvalidTaskError`. 124 | 125 | To disable queue name validation, set `QUEUES` to `[]`. 126 | 127 | ### The database backend worker 128 | 129 | First, you'll need to add `django_tasks.backends.database` to `INSTALLED_APPS`: 130 | 131 | ```python 132 | INSTALLED_APPS = [ 133 | # ... 134 | "django_tasks", 135 | "django_tasks.backends.database", 136 | ] 137 | ``` 138 | 139 | Then, run migrations: 140 | 141 | ```shell 142 | ./manage.py migrate 143 | ``` 144 | 145 | Next, configure the database backend: 146 | 147 | ```python 148 | TASKS = { 149 | "default": { 150 | "BACKEND": "django_tasks.backends.database.DatabaseBackend" 151 | } 152 | } 153 | ``` 154 | 155 | Finally, you can run the `db_worker` command to run tasks as they're created. Check the `--help` for more options. 156 | 157 | ```shell 158 | ./manage.py db_worker 159 | ``` 160 | 161 | In `DEBUG`, the worker will automatically reload when code is changed (or by using `--reload`). This is not recommended in production environments as tasks may not be stopped cleanly. 162 | 163 | ### Pruning old tasks 164 | 165 | After a while, tasks may start to build up in your database. This can be managed using the `prune_db_task_results` management command, which deletes completed tasks according to the given retention policy. Check the `--help` for the available options. 166 | 167 | ### Retrieving task result 168 | 169 | When enqueueing a task, you get a `TaskResult`, however it may be useful to retrieve said result from somewhere else (another request, another task etc). This can be done with `get_result` (or `aget_result`): 170 | 171 | ```python 172 | result_id = result.id 173 | 174 | # Later, somewhere else... 175 | calculate_meaning_of_life.get_result(result_id) 176 | ``` 177 | 178 | A result `id` should be considered an opaque string, whose length could be up to 64 characters. ID generation is backend-specific. 179 | 180 | Only tasks of the same type can be retrieved this way. To retrieve the result of any task, you can call `get_result` on the backend: 181 | 182 | ```python 183 | from django_tasks import default_task_backend 184 | 185 | default_task_backend.get_result(result_id) 186 | ``` 187 | 188 | ### Return values 189 | 190 | If your task returns something, it can be retrieved from the `.return_value` attribute on a `TaskResult`. Accessing this property on an unsuccessful task (ie not `SUCCEEDED`) will raise a `ValueError`. 191 | 192 | ```python 193 | assert result.status == ResultStatus.SUCCEEDED 194 | assert result.return_value == 42 195 | ``` 196 | 197 | If a result has been updated in the background, you can call `refresh` on it to update its values. Results obtained using `get_result` will always be up-to-date. 198 | 199 | ```python 200 | assert result.status == ResultStatus.NEW 201 | result.refresh() 202 | assert result.status == ResultStatus.SUCCEEDED 203 | ``` 204 | 205 | #### Exceptions 206 | 207 | If a task raised an exception, its `.exception_class` will be the exception class raised: 208 | 209 | ```python 210 | assert result.exception_class == ValueError 211 | ``` 212 | 213 | Note that this is just the type of exception, and contains no other values. The traceback information is reduced to a string that you can print to help debugging: 214 | 215 | ```python 216 | assert isinstance(result.traceback, str) 217 | ``` 218 | 219 | ### Backend introspecting 220 | 221 | Because `django-tasks` enables support for multiple different backends, those backends may not support all features, and it can be useful to determine this at runtime to ensure the chosen task queue meets the requirements, or to gracefully degrade functionality if it doesn't. 222 | 223 | - `supports_defer`: Can tasks be enqueued with the `run_after` attribute? 224 | - `supports_async_task`: Can coroutines be enqueued? 225 | - `supports_get_result`: Can results be retrieved after the fact (from **any** thread / process)? 226 | 227 | ```python 228 | from django_tasks import default_task_backend 229 | 230 | assert default_task_backend.supports_get_result 231 | ``` 232 | 233 | This is particularly useful in combination with Django's [system check framework](https://docs.djangoproject.com/en/stable/topics/checks/). 234 | 235 | ### Signals 236 | 237 | A few [Signals](https://docs.djangoproject.com/en/stable/topics/signals/) are provided to more easily respond to certain task events. 238 | 239 | Whilst signals are available, they may not be the most maintainable approach. 240 | 241 | - `django_tasks.signals.task_enqueued`: Called when a task is enqueued. The sender is the backend class. Also called with the enqueued `task_result`. 242 | - `django_tasks.signals.task_finished`: Called when a task finishes (`SUCCEEDED` or `FAILED`). The sender is the backend class. Also called with the finished `task_result`. 243 | - `django_tasks.signals.task_started`: Called immediately before a task starts executing. The sender is the backend class. Also called with the started `task_result`. 244 | 245 | ## RQ 246 | 247 | The RQ-based backend acts as an interface between `django_tasks` and `RQ`, allowing tasks to be defined and enqueued using `django_tasks`, but stored in Redis and executed using RQ's workers. 248 | 249 | Once RQ is configured as necessary, the relevant `django_tasks` configuration can be added: 250 | 251 | ```python 252 | TASKS = { 253 | "default": { 254 | "BACKEND": "django_tasks.backends.rq.RQBackend", 255 | "QUEUES": ["default"] 256 | } 257 | } 258 | ``` 259 | 260 | Any queues defined in `QUEUES` must also be defined in `django-rq`'s `RQ_QUEUES` setting. 261 | 262 | ### Job class 263 | 264 | To use `rq` with `django-tasks`, a custom `Job` class must be used. This can be passed to the worker using `--job-class`: 265 | 266 | ```shell 267 | ./manage.py rqworker --job-class django_tasks.backend.rq.Job 268 | ``` 269 | 270 | ### Priorities 271 | 272 | `rq` has no native concept of priorities - instead relying on workers to define which queues they should pop tasks from in order. Therefore, `task.priority` has little effect on execution priority. 273 | 274 | If a task has a priority of `100`, it is enqueued at the top of the queue, and will be the next task executed by a worker. All other priorities will enqueue the task to the back of the queue. The queue value is not stored, and will always be `0`. 275 | 276 | ## Contributing 277 | 278 | See [CONTRIBUTING.md](./CONTRIBUTING.md) for information on how to contribute. 279 | -------------------------------------------------------------------------------- /django_tasks/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: E402 2 | import django_stubs_ext 3 | 4 | django_stubs_ext.monkeypatch() 5 | 6 | import importlib.metadata 7 | from typing import Optional 8 | 9 | from django.utils.connection import BaseConnectionHandler, ConnectionProxy 10 | from django.utils.module_loading import import_string 11 | 12 | from .backends.base import BaseTaskBackend 13 | from .exceptions import InvalidTaskBackendError 14 | from .task import ( 15 | DEFAULT_QUEUE_NAME, 16 | DEFAULT_TASK_BACKEND_ALIAS, 17 | ResultStatus, 18 | Task, 19 | TaskResult, 20 | task, 21 | ) 22 | 23 | __version__ = importlib.metadata.version(__name__) 24 | 25 | __all__ = [ 26 | "tasks", 27 | "default_task_backend", 28 | "DEFAULT_TASK_BACKEND_ALIAS", 29 | "DEFAULT_QUEUE_NAME", 30 | "task", 31 | "ResultStatus", 32 | "Task", 33 | "TaskResult", 34 | ] 35 | 36 | 37 | class TasksHandler(BaseConnectionHandler[BaseTaskBackend]): 38 | settings_name = "TASKS" 39 | exception_class = InvalidTaskBackendError 40 | 41 | def configure_settings(self, settings: Optional[dict]) -> dict: 42 | try: 43 | return super().configure_settings(settings) 44 | except AttributeError: 45 | # HACK: Force a default task backend. 46 | # Can be replaced with `django.conf.global_settings` once vendored. 47 | return { 48 | DEFAULT_TASK_BACKEND_ALIAS: { 49 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend" 50 | } 51 | } 52 | 53 | def create_connection(self, alias: str) -> BaseTaskBackend: 54 | params = self.settings[alias] 55 | 56 | backend = params["BACKEND"] 57 | 58 | try: 59 | backend_cls = import_string(backend) 60 | except ImportError as e: 61 | raise InvalidTaskBackendError( 62 | f"Could not find backend '{backend}': {e}" 63 | ) from e 64 | 65 | return backend_cls(alias=alias, params=params) # type:ignore[no-any-return] 66 | 67 | 68 | tasks = TasksHandler() 69 | 70 | default_task_backend: BaseTaskBackend = ConnectionProxy( # type:ignore[assignment] 71 | tasks, DEFAULT_TASK_BACKEND_ALIAS 72 | ) 73 | -------------------------------------------------------------------------------- /django_tasks/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | from django.core import checks 3 | 4 | from django_tasks.checks import check_tasks 5 | 6 | 7 | class TasksAppConfig(AppConfig): 8 | name = "django_tasks" 9 | 10 | def ready(self) -> None: 11 | from . import signal_handlers # noqa 12 | 13 | checks.register(check_tasks) 14 | -------------------------------------------------------------------------------- /django_tasks/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealOrangeOne/django-tasks/80c3414ba074834ab857253618e2838e09d357ec/django_tasks/backends/__init__.py -------------------------------------------------------------------------------- /django_tasks/backends/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | from collections.abc import Iterable 3 | from inspect import iscoroutinefunction 4 | from typing import Any, TypeVar 5 | 6 | from asgiref.sync import sync_to_async 7 | from django.core.checks import messages 8 | from django.db import connections 9 | from django.utils import timezone 10 | from typing_extensions import ParamSpec 11 | 12 | from django_tasks.exceptions import InvalidTaskError 13 | from django_tasks.task import MAX_PRIORITY, MIN_PRIORITY, Task, TaskResult 14 | from django_tasks.utils import is_module_level_function 15 | 16 | T = TypeVar("T") 17 | P = ParamSpec("P") 18 | 19 | 20 | class BaseTaskBackend(metaclass=ABCMeta): 21 | alias: str 22 | enqueue_on_commit: bool 23 | 24 | task_class = Task 25 | 26 | supports_defer = False 27 | """Can tasks be enqueued with the run_after attribute""" 28 | 29 | supports_async_task = False 30 | """Can coroutines be enqueued""" 31 | 32 | supports_get_result = False 33 | """Can results be retrieved after the fact (from **any** thread / process)""" 34 | 35 | def __init__(self, alias: str, params: dict) -> None: 36 | from django_tasks import DEFAULT_QUEUE_NAME 37 | 38 | self.alias = alias 39 | self.queues = set(params.get("QUEUES", [DEFAULT_QUEUE_NAME])) 40 | self.enqueue_on_commit = bool(params.get("ENQUEUE_ON_COMMIT", True)) 41 | 42 | def _get_enqueue_on_commit_for_task(self, task: Task) -> bool: 43 | """ 44 | Determine the correct `enqueue_on_commit` setting to use for a given task. 45 | """ 46 | 47 | # If the task defines it, use that, otherwise, fall back to the backend. 48 | return ( 49 | task.enqueue_on_commit 50 | if task.enqueue_on_commit is not None 51 | else self.enqueue_on_commit 52 | ) 53 | 54 | def validate_task(self, task: Task) -> None: 55 | """ 56 | Determine whether the provided task is one which can be executed by the backend. 57 | """ 58 | if not is_module_level_function(task.func): 59 | raise InvalidTaskError("Task function must be defined at a module level") 60 | 61 | if not self.supports_async_task and iscoroutinefunction(task.func): 62 | raise InvalidTaskError("Backend does not support async tasks") 63 | 64 | if ( 65 | task.priority < MIN_PRIORITY 66 | or task.priority > MAX_PRIORITY 67 | or int(task.priority) != task.priority 68 | ): 69 | raise InvalidTaskError( 70 | f"priority must be a whole number between {MIN_PRIORITY} and {MAX_PRIORITY}" 71 | ) 72 | 73 | if not self.supports_defer and task.run_after is not None: 74 | raise InvalidTaskError("Backend does not support run_after") 75 | 76 | if task.run_after is not None and not timezone.is_aware(task.run_after): 77 | raise InvalidTaskError("run_after must be an aware datetime") 78 | 79 | if self.queues and task.queue_name not in self.queues: 80 | raise InvalidTaskError( 81 | f"Queue '{task.queue_name}' is not valid for backend" 82 | ) 83 | 84 | @abstractmethod 85 | def enqueue( 86 | self, 87 | task: Task[P, T], 88 | args: P.args, # type:ignore[valid-type] 89 | kwargs: P.kwargs, # type:ignore[valid-type] 90 | ) -> TaskResult[T]: 91 | """ 92 | Queue up a task to be executed 93 | """ 94 | ... 95 | 96 | async def aenqueue( 97 | self, 98 | task: Task[P, T], 99 | args: P.args, # type:ignore[valid-type] 100 | kwargs: P.kwargs, # type:ignore[valid-type] 101 | ) -> TaskResult[T]: 102 | """ 103 | Queue up a task function (or coroutine) to be executed 104 | """ 105 | return await sync_to_async(self.enqueue, thread_sensitive=True)( 106 | task=task, args=args, kwargs=kwargs 107 | ) 108 | 109 | def get_result(self, result_id: str) -> TaskResult: 110 | """ 111 | Retrieve a result by its id (if one exists). 112 | If one doesn't, raises ResultDoesNotExist. 113 | """ 114 | raise NotImplementedError( 115 | "This backend does not support retrieving or refreshing results." 116 | ) 117 | 118 | async def aget_result(self, result_id: str) -> TaskResult: 119 | """ 120 | Queue up a task function (or coroutine) to be executed 121 | """ 122 | return await sync_to_async(self.get_result, thread_sensitive=True)( 123 | result_id=result_id 124 | ) 125 | 126 | def check(self, **kwargs: Any) -> Iterable[messages.CheckMessage]: 127 | if self.enqueue_on_commit and not connections._settings: # type: ignore[attr-defined] 128 | yield messages.Error( 129 | "`ENQUEUE_ON_COMMIT` cannot be used when no databases are configured", 130 | hint="Set `ENQUEUE_ON_COMMIT` to False", 131 | ) 132 | -------------------------------------------------------------------------------- /django_tasks/backends/database/__init__.py: -------------------------------------------------------------------------------- 1 | from .backend import DatabaseBackend 2 | 3 | __all__ = ["DatabaseBackend"] 4 | -------------------------------------------------------------------------------- /django_tasks/backends/database/admin.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from django.contrib import admin 4 | from django.http import HttpRequest 5 | 6 | from .models import DBTaskResult 7 | 8 | 9 | @admin.register(DBTaskResult) 10 | class DBTaskResultAdmin(admin.ModelAdmin): 11 | list_display = ( 12 | "id", 13 | "task_name", 14 | "status", 15 | "enqueued_at", 16 | "started_at", 17 | "finished_at", 18 | "priority", 19 | "queue_name", 20 | ) 21 | list_filter = ("status", "priority", "queue_name") 22 | ordering = ["-enqueued_at"] 23 | 24 | def has_add_permission( 25 | self, request: HttpRequest, obj: Optional[DBTaskResult] = None 26 | ) -> bool: 27 | return False 28 | 29 | def has_delete_permission( 30 | self, request: HttpRequest, obj: Optional[DBTaskResult] = None 31 | ) -> bool: 32 | return False 33 | 34 | def has_change_permission( 35 | self, request: HttpRequest, obj: Optional[DBTaskResult] = None 36 | ) -> bool: 37 | return False 38 | 39 | def get_readonly_fields( 40 | self, request: HttpRequest, obj: Optional[DBTaskResult] = None 41 | ) -> list[str]: 42 | return [f.name for f in self.model._meta.fields] 43 | -------------------------------------------------------------------------------- /django_tasks/backends/database/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class TasksAppConfig(AppConfig): 5 | name = "django_tasks.backends.database" 6 | label = "django_tasks_database" 7 | verbose_name = "Tasks Database Backend" 8 | 9 | def ready(self) -> None: 10 | from . import signal_handlers # noqa 11 | -------------------------------------------------------------------------------- /django_tasks/backends/database/backend.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from dataclasses import dataclass 3 | from typing import TYPE_CHECKING, Any, TypeVar 4 | 5 | from django.apps import apps 6 | from django.core.checks import messages 7 | from django.core.exceptions import ValidationError 8 | from django.db import transaction 9 | from typing_extensions import ParamSpec 10 | 11 | from django_tasks.backends.base import BaseTaskBackend 12 | from django_tasks.exceptions import ResultDoesNotExist 13 | from django_tasks.signals import task_enqueued 14 | from django_tasks.task import Task 15 | from django_tasks.task import TaskResult as BaseTaskResult 16 | 17 | if TYPE_CHECKING: 18 | from .models import DBTaskResult 19 | 20 | T = TypeVar("T") 21 | P = ParamSpec("P") 22 | 23 | 24 | @dataclass(frozen=True) 25 | class TaskResult(BaseTaskResult[T]): 26 | db_result: "DBTaskResult" 27 | 28 | 29 | class DatabaseBackend(BaseTaskBackend): 30 | supports_async_task = True 31 | supports_get_result = True 32 | supports_defer = True 33 | 34 | def _task_to_db_task( 35 | self, 36 | task: Task[P, T], 37 | args: P.args, # type:ignore[valid-type] 38 | kwargs: P.kwargs, # type:ignore[valid-type] 39 | ) -> "DBTaskResult": 40 | from .models import DBTaskResult 41 | 42 | return DBTaskResult( 43 | args_kwargs={"args": args, "kwargs": kwargs}, 44 | priority=task.priority, 45 | task_path=task.module_path, 46 | queue_name=task.queue_name, 47 | run_after=task.run_after, # type: ignore[misc] 48 | backend_name=self.alias, 49 | ) 50 | 51 | def enqueue( 52 | self, 53 | task: Task[P, T], 54 | args: P.args, # type:ignore[valid-type] 55 | kwargs: P.kwargs, # type:ignore[valid-type] 56 | ) -> TaskResult[T]: 57 | self.validate_task(task) 58 | 59 | db_result = self._task_to_db_task(task, args, kwargs) 60 | 61 | def save_result() -> None: 62 | db_result.save() 63 | task_enqueued.send(type(self), task_result=db_result.task_result) 64 | 65 | if self._get_enqueue_on_commit_for_task(task): 66 | transaction.on_commit(save_result) 67 | else: 68 | save_result() 69 | 70 | return db_result.task_result 71 | 72 | def get_result(self, result_id: str) -> TaskResult: 73 | from .models import DBTaskResult 74 | 75 | try: 76 | return DBTaskResult.objects.get(id=result_id).task_result 77 | except (DBTaskResult.DoesNotExist, ValidationError) as e: 78 | raise ResultDoesNotExist(result_id) from e 79 | 80 | async def aget_result(self, result_id: str) -> TaskResult: 81 | from .models import DBTaskResult 82 | 83 | try: 84 | return (await DBTaskResult.objects.aget(id=result_id)).task_result 85 | except (DBTaskResult.DoesNotExist, ValidationError) as e: 86 | raise ResultDoesNotExist(result_id) from e 87 | 88 | def check(self, **kwargs: Any) -> Iterable[messages.CheckMessage]: 89 | yield from super().check(**kwargs) 90 | 91 | backend_name = self.__class__.__name__ 92 | 93 | if not apps.is_installed("django_tasks.backends.database"): 94 | yield messages.Error( 95 | f"{backend_name} configured as django_tasks backend, but database app not installed", 96 | "Insert 'django_tasks.backends.database' in INSTALLED_APPS", 97 | ) 98 | -------------------------------------------------------------------------------- /django_tasks/backends/database/management/commands/db_worker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import math 3 | import os 4 | import random 5 | import signal 6 | import sys 7 | import time 8 | from argparse import ArgumentParser, ArgumentTypeError 9 | from types import FrameType 10 | from typing import Optional 11 | 12 | from django.conf import settings 13 | from django.core.exceptions import SuspiciousOperation 14 | from django.core.management.base import BaseCommand 15 | from django.db import close_old_connections 16 | from django.db.utils import OperationalError 17 | from django.utils.autoreload import DJANGO_AUTORELOAD_ENV, run_with_reloader 18 | 19 | from django_tasks import DEFAULT_TASK_BACKEND_ALIAS, tasks 20 | from django_tasks.backends.database.backend import DatabaseBackend 21 | from django_tasks.backends.database.models import DBTaskResult 22 | from django_tasks.backends.database.utils import exclusive_transaction 23 | from django_tasks.exceptions import InvalidTaskBackendError 24 | from django_tasks.signals import task_finished, task_started 25 | from django_tasks.task import DEFAULT_QUEUE_NAME 26 | 27 | package_logger = logging.getLogger("django_tasks") 28 | logger = logging.getLogger("django_tasks.backends.database.db_worker") 29 | 30 | 31 | class Worker: 32 | def __init__( 33 | self, 34 | *, 35 | queue_names: list[str], 36 | interval: float, 37 | batch: bool, 38 | backend_name: str, 39 | startup_delay: bool, 40 | max_tasks: Optional[int], 41 | ): 42 | self.queue_names = queue_names 43 | self.process_all_queues = "*" in queue_names 44 | self.interval = interval 45 | self.batch = batch 46 | self.backend_name = backend_name 47 | self.startup_delay = startup_delay 48 | self.max_tasks = max_tasks 49 | 50 | self.running = True 51 | self.running_task = False 52 | self._run_tasks = 0 53 | 54 | def shutdown(self, signum: int, frame: Optional[FrameType]) -> None: 55 | if not self.running: 56 | logger.warning( 57 | "Received %s - terminating current task.", signal.strsignal(signum) 58 | ) 59 | sys.exit(1) 60 | 61 | logger.warning( 62 | "Received %s - shutting down gracefully... (press Ctrl+C again to force)", 63 | signal.strsignal(signum), 64 | ) 65 | self.running = False 66 | 67 | if not self.running_task: 68 | # If we're not currently running a task, exit immediately. 69 | # This is useful if we're currently in a `sleep`. 70 | sys.exit(0) 71 | 72 | def configure_signals(self) -> None: 73 | signal.signal(signal.SIGINT, self.shutdown) 74 | signal.signal(signal.SIGTERM, self.shutdown) 75 | if hasattr(signal, "SIGQUIT"): 76 | signal.signal(signal.SIGQUIT, self.shutdown) 77 | 78 | def start(self) -> None: 79 | logger.info("Starting worker for queues=%s", ",".join(self.queue_names)) 80 | 81 | if self.startup_delay and self.interval: 82 | # Add a random small delay before starting the loop to avoid a thundering herd 83 | time.sleep(random.random()) 84 | 85 | while self.running: 86 | tasks = DBTaskResult.objects.ready().filter(backend_name=self.backend_name) 87 | if not self.process_all_queues: 88 | tasks = tasks.filter(queue_name__in=self.queue_names) 89 | 90 | # During this transaction, all "ready" tasks are locked. Therefore, it's important 91 | # it be as efficient as possible. 92 | with exclusive_transaction(tasks.db): 93 | try: 94 | task_result = tasks.get_locked() 95 | except OperationalError as e: 96 | # Ignore locked databases and keep trying. 97 | # It should unlock eventually. 98 | if "is locked" in e.args[0]: 99 | task_result = None 100 | else: 101 | raise 102 | 103 | if task_result is not None: 104 | # "claim" the task, so it isn't run by another worker process 105 | task_result.claim() 106 | 107 | if task_result is not None: 108 | self.run_task(task_result) 109 | 110 | if self.batch and task_result is None: 111 | # If we're running in "batch" mode, terminate the loop (and thus the worker) 112 | logger.info("No more tasks to run - exiting gracefully.") 113 | return None 114 | 115 | if self.max_tasks is not None and self._run_tasks >= self.max_tasks: 116 | logger.info( 117 | "Run maximum tasks (%d) - exiting gracefully.", self._run_tasks 118 | ) 119 | return None 120 | 121 | # Emulate Django's request behaviour and check for expired 122 | # database connections periodically. 123 | close_old_connections() 124 | 125 | # If ctrl-c has just interrupted a task, self.running was cleared, 126 | # and we should not sleep, but rather exit immediately. 127 | if self.running and not task_result: 128 | # Wait before checking for another task 129 | time.sleep(self.interval) 130 | 131 | def run_task(self, db_task_result: DBTaskResult) -> None: 132 | """ 133 | Run the given task, marking it as succeeded or failed. 134 | """ 135 | try: 136 | self.running_task = True 137 | task = db_task_result.task 138 | task_result = db_task_result.task_result 139 | 140 | backend_type = task.get_backend() 141 | 142 | task_started.send(sender=backend_type, task_result=task_result) 143 | 144 | return_value = task.call(*task_result.args, **task_result.kwargs) 145 | 146 | # Setting the return and success value inside the error handling, 147 | # So errors setting it (eg JSON encode) can still be recorded 148 | db_task_result.set_succeeded(return_value) 149 | task_finished.send( 150 | sender=backend_type, task_result=db_task_result.task_result 151 | ) 152 | except BaseException as e: 153 | db_task_result.set_failed(e) 154 | try: 155 | sender = type(db_task_result.task.get_backend()) 156 | task_result = db_task_result.task_result 157 | except (ModuleNotFoundError, SuspiciousOperation): 158 | logger.exception("Task id=%s failed unexpectedly", db_task_result.id) 159 | else: 160 | task_finished.send( 161 | sender=sender, 162 | task_result=task_result, 163 | ) 164 | finally: 165 | self.running_task = False 166 | self._run_tasks += 1 167 | 168 | 169 | def valid_backend_name(val: str) -> str: 170 | try: 171 | backend = tasks[val] 172 | except InvalidTaskBackendError as e: 173 | raise ArgumentTypeError(e.args[0]) from e 174 | if not isinstance(backend, DatabaseBackend): 175 | raise ArgumentTypeError(f"Backend '{val}' is not a database backend") 176 | return val 177 | 178 | 179 | def valid_interval(val: str) -> float: 180 | num = float(val) 181 | if not math.isfinite(num): 182 | raise ArgumentTypeError("Must be a finite floating point value") 183 | if num < 0: 184 | raise ArgumentTypeError("Must be greater than zero") 185 | return num 186 | 187 | 188 | def valid_max_tasks(val: str) -> int: 189 | num = int(val) 190 | if num < 0: 191 | raise ArgumentTypeError("Must be greater than zero") 192 | return num 193 | 194 | 195 | class Command(BaseCommand): 196 | help = "Run a database background worker" 197 | 198 | def add_arguments(self, parser: ArgumentParser) -> None: 199 | parser.add_argument( 200 | "--queue-name", 201 | nargs="?", 202 | default=DEFAULT_QUEUE_NAME, 203 | type=str, 204 | help="The queues to process. Separate multiple with a comma. To process all queues, use '*' (default: %(default)r)", 205 | ) 206 | parser.add_argument( 207 | "--interval", 208 | nargs="?", 209 | default=1, 210 | type=valid_interval, 211 | help="The interval (in seconds) to wait, when there are no tasks in the queue, before checking for tasks again (default: %(default)r)", 212 | ) 213 | parser.add_argument( 214 | "--batch", 215 | action="store_true", 216 | help="Process all outstanding tasks, then exit. Can be used in combination with --max-tasks.", 217 | ) 218 | parser.add_argument( 219 | "--reload", 220 | action="store_true", 221 | default=settings.DEBUG, 222 | help="Reload the worker on code changes. Not recommended for production as tasks may not be stopped cleanly (default: DEBUG)", 223 | ) 224 | parser.add_argument( 225 | "--backend", 226 | nargs="?", 227 | default=DEFAULT_TASK_BACKEND_ALIAS, 228 | type=valid_backend_name, 229 | dest="backend_name", 230 | help="The backend to operate on (default: %(default)r)", 231 | ) 232 | parser.add_argument( 233 | "--no-startup-delay", 234 | action="store_false", 235 | dest="startup_delay", 236 | help="Don't add a small delay at startup.", 237 | ) 238 | parser.add_argument( 239 | "--max-tasks", 240 | nargs="?", 241 | default=None, 242 | type=valid_max_tasks, 243 | help="If provided, the maximum number of tasks the worker will execute before exiting.", 244 | ) 245 | 246 | def configure_logging(self, verbosity: int) -> None: 247 | if verbosity == 0: 248 | package_logger.setLevel(logging.CRITICAL) 249 | elif verbosity == 1: 250 | package_logger.setLevel(logging.INFO) 251 | else: 252 | package_logger.setLevel(logging.DEBUG) 253 | 254 | # If no handler is configured, the logs won't show, 255 | # regardless of the set level. 256 | if not package_logger.hasHandlers(): 257 | package_logger.addHandler(logging.StreamHandler(self.stdout)) 258 | 259 | def handle( 260 | self, 261 | *, 262 | verbosity: int, 263 | queue_name: str, 264 | interval: float, 265 | batch: bool, 266 | backend_name: str, 267 | startup_delay: bool, 268 | reload: bool, 269 | max_tasks: Optional[int], 270 | **options: dict, 271 | ) -> None: 272 | self.configure_logging(verbosity) 273 | 274 | if reload and batch: 275 | logger.warning( 276 | "Warning: --reload and --batch cannot be specified together. Disabling autoreload." 277 | ) 278 | reload = False 279 | 280 | worker = Worker( 281 | queue_names=queue_name.split(","), 282 | interval=interval, 283 | batch=batch, 284 | backend_name=backend_name, 285 | startup_delay=startup_delay, 286 | max_tasks=max_tasks, 287 | ) 288 | 289 | if reload: 290 | if os.environ.get(DJANGO_AUTORELOAD_ENV) == "true": 291 | # Only the child process should configure its signals 292 | worker.configure_signals() 293 | 294 | run_with_reloader(worker.start) 295 | else: 296 | worker.configure_signals() 297 | worker.start() 298 | -------------------------------------------------------------------------------- /django_tasks/backends/database/management/commands/prune_db_task_results.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from argparse import ArgumentParser, ArgumentTypeError 3 | from datetime import timedelta 4 | from typing import Optional 5 | 6 | from django.core.management.base import BaseCommand 7 | from django.db.models import Q 8 | from django.utils import timezone 9 | 10 | from django_tasks import DEFAULT_QUEUE_NAME, DEFAULT_TASK_BACKEND_ALIAS, tasks 11 | from django_tasks.backends.database.backend import DatabaseBackend 12 | from django_tasks.backends.database.models import DBTaskResult 13 | from django_tasks.exceptions import InvalidTaskBackendError 14 | from django_tasks.task import ResultStatus 15 | 16 | logger = logging.getLogger("django_tasks.backends.database.prune_db_task_results") 17 | 18 | 19 | def valid_backend_name(val: str) -> DatabaseBackend: 20 | try: 21 | backend = tasks[val] 22 | except InvalidTaskBackendError as e: 23 | raise ArgumentTypeError(e.args[0]) from e 24 | if not isinstance(backend, DatabaseBackend): 25 | raise ArgumentTypeError(f"Backend '{val}' is not a database backend") 26 | return backend 27 | 28 | 29 | def valid_positive_int(val: str) -> int: 30 | num = int(val) 31 | if num < 0: 32 | raise ArgumentTypeError("Must be greater than zero") 33 | return num 34 | 35 | 36 | class Command(BaseCommand): 37 | help = "Prune finished database task results" 38 | 39 | def add_arguments(self, parser: ArgumentParser) -> None: 40 | parser.add_argument( 41 | "--backend", 42 | nargs="?", 43 | default=DEFAULT_TASK_BACKEND_ALIAS, 44 | type=valid_backend_name, 45 | dest="backend", 46 | help="The backend to operate on (default: %(default)r)", 47 | ) 48 | parser.add_argument( 49 | "--queue-name", 50 | nargs="?", 51 | default=DEFAULT_QUEUE_NAME, 52 | type=str, 53 | help="The queues to process. Separate multiple with a comma. To process all queues, use '*' (default: %(default)r)", 54 | ) 55 | parser.add_argument( 56 | "--min-age-days", 57 | nargs="?", 58 | default=14, 59 | type=valid_positive_int, 60 | help="The minimum age (in days) of a finished task result to be pruned (default: %(default)r)", 61 | ) 62 | parser.add_argument( 63 | "--failed-min-age-days", 64 | nargs="?", 65 | default=None, 66 | type=valid_positive_int, 67 | help="The minimum age (in days) of a failed task result to be pruned (default: min-age-days)", 68 | ) 69 | parser.add_argument( 70 | "--dry-run", 71 | action="store_true", 72 | help="Don't delete the task results, just show how many would be deleted", 73 | ) 74 | 75 | def configure_logging(self, verbosity: int) -> None: 76 | if verbosity == 0: 77 | logger.setLevel(logging.WARNING) 78 | elif verbosity == 1: 79 | logger.setLevel(logging.INFO) 80 | else: 81 | logger.setLevel(logging.DEBUG) 82 | 83 | # If no handler is configured, the logs won't show, 84 | # regardless of the set level. 85 | if not logger.hasHandlers(): 86 | logger.addHandler(logging.StreamHandler(self.stdout)) 87 | 88 | def handle( 89 | self, 90 | *, 91 | verbosity: int, 92 | backend: DatabaseBackend, 93 | min_age_days: int, 94 | failed_min_age_days: Optional[int], 95 | queue_name: str, 96 | dry_run: bool, 97 | **options: dict, 98 | ) -> None: 99 | self.configure_logging(verbosity) 100 | 101 | min_age = timezone.now() - timedelta(days=min_age_days) 102 | failed_min_age = ( 103 | (timezone.now() - timedelta(days=failed_min_age_days)) 104 | if failed_min_age_days 105 | else None 106 | ) 107 | 108 | results = DBTaskResult.objects.finished().filter(backend_name=backend.alias) 109 | 110 | queue_names = queue_name.split(",") 111 | if "*" not in queue_names: 112 | results = results.filter(queue_name__in=queue_names) 113 | 114 | if failed_min_age is None: 115 | results = results.filter(finished_at__lte=min_age) 116 | else: 117 | results = results.filter( 118 | Q(status=ResultStatus.SUCCEEDED, finished_at__lte=min_age) 119 | | Q(status=ResultStatus.FAILED, finished_at__lte=failed_min_age) 120 | ) 121 | 122 | if dry_run: 123 | logger.info("Would delete %d task result(s)", results.count()) 124 | else: 125 | deleted, _ = results.delete() 126 | logger.info("Deleted %d task result(s)", deleted) 127 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-06-08 09:10 2 | 3 | import uuid 4 | 5 | from django.db import migrations, models 6 | 7 | import django_tasks.backends.database.models 8 | 9 | 10 | class Migration(migrations.Migration): 11 | initial = True 12 | 13 | dependencies = [] 14 | 15 | operations = [ 16 | migrations.CreateModel( 17 | name="DBTaskResult", 18 | fields=[ 19 | ( 20 | "id", 21 | models.UUIDField( 22 | default=uuid.uuid4, 23 | editable=False, 24 | primary_key=True, 25 | serialize=False, 26 | ), 27 | ), 28 | ( 29 | "status", 30 | models.CharField( 31 | choices=[ 32 | ("NEW", "New"), 33 | ("RUNNING", "Running"), 34 | ("FAILED", "Failed"), 35 | ("COMPLETE", "Complete"), 36 | ], 37 | default="NEW", 38 | max_length=8, 39 | ), 40 | ), 41 | ("args_kwargs", models.JSONField()), 42 | ("priority", models.PositiveSmallIntegerField(default=0)), 43 | ("task_path", models.TextField()), 44 | ("queue_name", models.TextField(default="default")), 45 | ("backend_name", models.TextField()), 46 | ("run_after", models.DateTimeField(null=True)), 47 | ("result", models.JSONField(default=None, null=True)), 48 | ], 49 | options={ 50 | "ordering": [ 51 | models.OrderBy(models.F("priority"), descending=True), 52 | models.OrderBy( 53 | models.F("run_after"), descending=True, nulls_last=True 54 | ), 55 | ], 56 | }, 57 | bases=(django_tasks.backends.database.models.GenericBase, models.Model), 58 | ), 59 | ] 60 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0002_alter_dbtaskresult_options.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-06-08 15:24 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0001_initial"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterModelOptions( 13 | name="dbtaskresult", 14 | options={ 15 | "ordering": [ 16 | models.OrderBy(models.F("priority"), descending=True), 17 | models.OrderBy( 18 | models.F("run_after"), descending=True, nulls_last=True 19 | ), 20 | ], 21 | "verbose_name": "Task Result", 22 | "verbose_name_plural": "Task Results", 23 | }, 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0003_dbtaskresult_enqueued_at_dbtaskresult_finished_at.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-06-13 10:24 2 | 3 | import django.utils.timezone 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("django_tasks_database", "0002_alter_dbtaskresult_options"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="dbtaskresult", 15 | name="enqueued_at", 16 | field=models.DateTimeField( 17 | auto_now_add=True, default=django.utils.timezone.now 18 | ), 19 | preserve_default=False, 20 | ), 21 | migrations.AddField( 22 | model_name="dbtaskresult", 23 | name="finished_at", 24 | field=models.DateTimeField(null=True), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0004_dbtaskresult_started_at.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-07-04 09:31 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ( 9 | "django_tasks_database", 10 | "0003_dbtaskresult_enqueued_at_dbtaskresult_finished_at", 11 | ), 12 | ] 13 | 14 | operations = [ 15 | migrations.AddField( 16 | model_name="dbtaskresult", 17 | name="started_at", 18 | field=models.DateTimeField(null=True), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0005_alter_dbtaskresult_priority_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-07-10 15:48 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0004_dbtaskresult_started_at"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="dbtaskresult", 14 | name="priority", 15 | field=models.IntegerField(default=0), 16 | ), 17 | migrations.AddConstraint( 18 | model_name="dbtaskresult", 19 | constraint=models.CheckConstraint( 20 | check=models.Q(("priority__range", (-100, 100))), name="priority_range" 21 | ), 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0006_alter_dbtaskresult_args_kwargs_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.14 on 2024-08-02 13:17 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0005_alter_dbtaskresult_priority_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="dbtaskresult", 14 | name="args_kwargs", 15 | field=models.JSONField(verbose_name="args kwargs"), 16 | ), 17 | migrations.AlterField( 18 | model_name="dbtaskresult", 19 | name="backend_name", 20 | field=models.TextField(verbose_name="backend name"), 21 | ), 22 | migrations.AlterField( 23 | model_name="dbtaskresult", 24 | name="enqueued_at", 25 | field=models.DateTimeField(auto_now_add=True, verbose_name="enqueued at"), 26 | ), 27 | migrations.AlterField( 28 | model_name="dbtaskresult", 29 | name="finished_at", 30 | field=models.DateTimeField(null=True, verbose_name="finished at"), 31 | ), 32 | migrations.AlterField( 33 | model_name="dbtaskresult", 34 | name="priority", 35 | field=models.IntegerField(default=0, verbose_name="priority"), 36 | ), 37 | migrations.AlterField( 38 | model_name="dbtaskresult", 39 | name="queue_name", 40 | field=models.TextField(default="default", verbose_name="queue name"), 41 | ), 42 | migrations.AlterField( 43 | model_name="dbtaskresult", 44 | name="result", 45 | field=models.JSONField(default=None, null=True, verbose_name="result"), 46 | ), 47 | migrations.AlterField( 48 | model_name="dbtaskresult", 49 | name="run_after", 50 | field=models.DateTimeField(null=True, verbose_name="run after"), 51 | ), 52 | migrations.AlterField( 53 | model_name="dbtaskresult", 54 | name="started_at", 55 | field=models.DateTimeField(null=True, verbose_name="started at"), 56 | ), 57 | migrations.AlterField( 58 | model_name="dbtaskresult", 59 | name="status", 60 | field=models.CharField( 61 | choices=[ 62 | ("NEW", "New"), 63 | ("RUNNING", "Running"), 64 | ("FAILED", "Failed"), 65 | ("COMPLETE", "Complete"), 66 | ], 67 | default="NEW", 68 | max_length=8, 69 | verbose_name="status", 70 | ), 71 | ), 72 | migrations.AlterField( 73 | model_name="dbtaskresult", 74 | name="task_path", 75 | field=models.TextField(verbose_name="task path"), 76 | ), 77 | ] 78 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0007_add_separate_results_fields.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-08-23 14:38 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0006_alter_dbtaskresult_args_kwargs_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="dbtaskresult", 14 | name="exception_data", 15 | field=models.JSONField( 16 | default=None, null=True, verbose_name="exception data" 17 | ), 18 | ), 19 | migrations.AddField( 20 | model_name="dbtaskresult", 21 | name="return_value", 22 | field=models.JSONField( 23 | default=None, null=True, verbose_name="return value" 24 | ), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0008_separate_results_field.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-08-23 14:38 2 | 3 | from django.db import migrations, models 4 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor 5 | from django.db.migrations.state import StateApps 6 | 7 | from django_tasks import ResultStatus 8 | 9 | 10 | def separate_results_field( 11 | apps: StateApps, schema_editor: BaseDatabaseSchemaEditor 12 | ) -> None: 13 | DBTaskResult = apps.get_model("django_tasks_database", "DBTaskResult") 14 | 15 | # If a task succeeded, the result is its return value 16 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 17 | status=ResultStatus.SUCCEEDED 18 | ).update(return_value=models.F("result")) 19 | 20 | # If a task failed, the result is the exception data (or nothing) 21 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 22 | status=ResultStatus.FAILED 23 | ).update(exception_data=models.F("result")) 24 | 25 | 26 | def merge_results_field( 27 | apps: StateApps, schema_editor: BaseDatabaseSchemaEditor 28 | ) -> None: 29 | DBTaskResult = apps.get_model("django_tasks_database", "DBTaskResult") 30 | 31 | # If a task succeeded, the result is its return value 32 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 33 | status=ResultStatus.SUCCEEDED 34 | ).update(result=models.F("return_value")) 35 | 36 | # If a task failed, the result is the exception data (or nothing) 37 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 38 | status=ResultStatus.FAILED 39 | ).update(result=models.F("exception_data")) 40 | 41 | 42 | class Migration(migrations.Migration): 43 | dependencies = [ 44 | ("django_tasks_database", "0007_add_separate_results_fields"), 45 | ] 46 | 47 | operations = [migrations.RunPython(separate_results_field, merge_results_field)] 48 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0009_remove_results_field.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-08-23 14:51 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0008_separate_results_field"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="dbtaskresult", 14 | name="result", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0010_alter_dbtaskresult_status.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-11-21 13:46 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0009_remove_results_field"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="dbtaskresult", 14 | name="status", 15 | field=models.CharField( 16 | choices=[ 17 | ("NEW", "New"), 18 | ("RUNNING", "Running"), 19 | ("FAILED", "Failed"), 20 | ("SUCCEEDED", "Succeeded"), 21 | ], 22 | default="NEW", 23 | max_length=9, 24 | verbose_name="status", 25 | ), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0011_rename_complete_status.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-08-23 14:38 2 | 3 | from django.db import migrations 4 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor 5 | from django.db.migrations.state import StateApps 6 | 7 | from django_tasks import ResultStatus 8 | 9 | 10 | def separate_results_field( 11 | apps: StateApps, schema_editor: BaseDatabaseSchemaEditor 12 | ) -> None: 13 | DBTaskResult = apps.get_model("django_tasks_database", "DBTaskResult") 14 | 15 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 16 | status="COMPLETE" 17 | ).update(status=ResultStatus.SUCCEEDED) 18 | 19 | 20 | def merge_results_field( 21 | apps: StateApps, schema_editor: BaseDatabaseSchemaEditor 22 | ) -> None: 23 | DBTaskResult = apps.get_model("django_tasks_database", "DBTaskResult") 24 | 25 | DBTaskResult.objects.using(schema_editor.connection.alias).filter( 26 | status=ResultStatus.SUCCEEDED 27 | ).update(status="COMPLETE") 28 | 29 | 30 | class Migration(migrations.Migration): 31 | dependencies = [ 32 | ("django_tasks_database", "0010_alter_dbtaskresult_status"), 33 | ] 34 | 35 | operations = [migrations.RunPython(separate_results_field, merge_results_field)] 36 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0012_add_separate_exception_fields.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1.1 on 2024-11-22 16:32 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0011_rename_complete_status"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="dbtaskresult", 14 | name="exception_class_path", 15 | field=models.TextField(default="", verbose_name="exception class path"), 16 | preserve_default=False, 17 | ), 18 | migrations.AddField( 19 | model_name="dbtaskresult", 20 | name="traceback", 21 | field=models.TextField(default="", verbose_name="traceback"), 22 | preserve_default=False, 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0013_separate_exception_fields.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.13 on 2024-08-23 14:38 2 | 3 | from django.db import migrations, models 4 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor 5 | from django.db.migrations.state import StateApps 6 | from django.db.models.functions import Coalesce 7 | 8 | 9 | def separate_exception_fields( 10 | apps: StateApps, schema_editor: BaseDatabaseSchemaEditor 11 | ) -> None: 12 | DBTaskResult = apps.get_model("django_tasks_database", "DBTaskResult") 13 | 14 | DBTaskResult.objects.using(schema_editor.connection.alias).update( 15 | exception_class_path=Coalesce( 16 | models.F("exception_data__exc_type"), models.Value("", models.JSONField()) 17 | ), 18 | traceback=Coalesce( 19 | models.F("exception_data__exc_traceback"), 20 | models.Value("", models.JSONField()), 21 | ), 22 | ) 23 | 24 | 25 | class Migration(migrations.Migration): 26 | dependencies = [ 27 | ("django_tasks_database", "0012_add_separate_exception_fields"), 28 | ] 29 | 30 | operations = [migrations.RunPython(separate_exception_fields)] 31 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0014_remove_dbtaskresult_exception_data.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1.1 on 2024-11-22 16:32 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0013_separate_exception_fields"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="dbtaskresult", 14 | name="exception_data", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0015_correctly_order_run_after.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1.5 on 2025-04-27 12:00 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("django_tasks_database", "0014_remove_dbtaskresult_exception_data"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterModelOptions( 13 | name="dbtaskresult", 14 | options={ 15 | "ordering": [ 16 | models.OrderBy(models.F("priority"), descending=True), 17 | models.OrderBy(models.F("run_after"), nulls_last=True), 18 | ], 19 | "verbose_name": "Task Result", 20 | "verbose_name_plural": "Task Results", 21 | }, 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/0016_alter_dbtaskresult_options_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2 on 2025-05-02 13:48 2 | 3 | import datetime 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | ("django_tasks_database", "0015_correctly_order_run_after"), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterModelOptions( 15 | name="dbtaskresult", 16 | options={ 17 | "ordering": [ 18 | models.OrderBy(models.F("priority"), descending=True), 19 | models.OrderBy(models.F("run_after")), 20 | ], 21 | "verbose_name": "Task Result", 22 | "verbose_name_plural": "Task Results", 23 | }, 24 | ), 25 | migrations.AlterField( 26 | model_name="dbtaskresult", 27 | name="backend_name", 28 | field=models.CharField(max_length=32, verbose_name="backend name"), 29 | ), 30 | migrations.AlterField( 31 | model_name="dbtaskresult", 32 | name="queue_name", 33 | field=models.CharField( 34 | default="default", max_length=32, verbose_name="queue name" 35 | ), 36 | ), 37 | migrations.AlterField( 38 | model_name="dbtaskresult", 39 | name="run_after", 40 | field=models.DateTimeField( 41 | default=datetime.datetime( 42 | 9999, 1, 1, 0, 0, tzinfo=datetime.timezone.utc 43 | ), 44 | verbose_name="run after", 45 | ), 46 | preserve_default=False, 47 | ), 48 | migrations.AddIndex( 49 | model_name="dbtaskresult", 50 | index=models.Index( 51 | models.F("status"), 52 | models.OrderBy(models.F("priority"), descending=True), 53 | models.OrderBy(models.F("run_after")), 54 | condition=models.Q(("status", "NEW")), 55 | name="django_task_new_ordering_idx", 56 | ), 57 | ), 58 | migrations.AddIndex( 59 | model_name="dbtaskresult", 60 | index=models.Index( 61 | fields=["queue_name"], name="django_task_queue_n_99c321_idx" 62 | ), 63 | ), 64 | migrations.AddIndex( 65 | model_name="dbtaskresult", 66 | index=models.Index( 67 | fields=["backend_name"], name="django_task_backend_071754_idx" 68 | ), 69 | ), 70 | ] 71 | -------------------------------------------------------------------------------- /django_tasks/backends/database/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealOrangeOne/django-tasks/80c3414ba074834ab857253618e2838e09d357ec/django_tasks/backends/database/migrations/__init__.py -------------------------------------------------------------------------------- /django_tasks/backends/database/models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import uuid 4 | from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar 5 | 6 | import django 7 | from django.core.exceptions import SuspiciousOperation 8 | from django.db import models 9 | from django.db.models import F, Q 10 | from django.db.models.constraints import CheckConstraint 11 | from django.utils import timezone 12 | from django.utils.module_loading import import_string 13 | from django.utils.translation import gettext_lazy as _ 14 | from typing_extensions import ParamSpec 15 | 16 | from django_tasks.task import ( 17 | DEFAULT_PRIORITY, 18 | DEFAULT_QUEUE_NAME, 19 | MAX_PRIORITY, 20 | MIN_PRIORITY, 21 | ResultStatus, 22 | Task, 23 | ) 24 | from django_tasks.utils import get_exception_traceback, get_module_path, retry 25 | 26 | from .utils import normalize_uuid 27 | 28 | logger = logging.getLogger("django_tasks.backends.database") 29 | 30 | T = TypeVar("T") 31 | P = ParamSpec("P") 32 | 33 | if TYPE_CHECKING: 34 | from .backend import TaskResult 35 | 36 | class GenericBase(Generic[P, T]): 37 | pass 38 | 39 | else: 40 | 41 | class GenericBase: 42 | """ 43 | https://code.djangoproject.com/ticket/33174 44 | """ 45 | 46 | def __class_getitem__(cls, _): 47 | return cls 48 | 49 | 50 | DATE_MAX = datetime.datetime(9999, 1, 1, tzinfo=datetime.timezone.utc) 51 | 52 | 53 | class DBTaskResultQuerySet(models.QuerySet): 54 | def ready(self) -> "DBTaskResultQuerySet": 55 | """ 56 | Return tasks which are ready to be processed. 57 | """ 58 | return self.filter( 59 | status=ResultStatus.NEW, 60 | ).filter(models.Q(run_after=DATE_MAX) | models.Q(run_after__lte=timezone.now())) 61 | 62 | def succeeded(self) -> "DBTaskResultQuerySet": 63 | return self.filter(status=ResultStatus.SUCCEEDED) 64 | 65 | def failed(self) -> "DBTaskResultQuerySet": 66 | return self.filter(status=ResultStatus.FAILED) 67 | 68 | def running(self) -> "DBTaskResultQuerySet": 69 | return self.filter(status=ResultStatus.RUNNING) 70 | 71 | def finished(self) -> "DBTaskResultQuerySet": 72 | return self.failed() | self.succeeded() 73 | 74 | @retry() 75 | def get_locked(self) -> Optional["DBTaskResult"]: 76 | """ 77 | Get a job, locking the row and accounting for deadlocks. 78 | """ 79 | return self.select_for_update(skip_locked=True).first() 80 | 81 | 82 | class DBTaskResult(GenericBase[P, T], models.Model): 83 | id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) 84 | 85 | status = models.CharField( 86 | _("status"), 87 | choices=ResultStatus.choices, 88 | default=ResultStatus.NEW, 89 | max_length=max(len(value) for value in ResultStatus.values), 90 | ) 91 | 92 | enqueued_at = models.DateTimeField(_("enqueued at"), auto_now_add=True) 93 | started_at = models.DateTimeField(_("started at"), null=True) 94 | finished_at = models.DateTimeField(_("finished at"), null=True) 95 | 96 | args_kwargs = models.JSONField(_("args kwargs")) 97 | 98 | priority = models.IntegerField(_("priority"), default=DEFAULT_PRIORITY) 99 | 100 | task_path = models.TextField(_("task path")) 101 | 102 | queue_name = models.CharField( 103 | _("queue name"), default=DEFAULT_QUEUE_NAME, max_length=32 104 | ) 105 | backend_name = models.CharField(_("backend name"), max_length=32) 106 | 107 | run_after = models.DateTimeField(_("run after")) 108 | 109 | return_value = models.JSONField(_("return value"), default=None, null=True) 110 | 111 | exception_class_path = models.TextField(_("exception class path")) 112 | traceback = models.TextField(_("traceback")) 113 | 114 | objects = DBTaskResultQuerySet.as_manager() 115 | 116 | class Meta: 117 | ordering = [F("priority").desc(), F("run_after").asc()] 118 | verbose_name = _("Task Result") 119 | verbose_name_plural = _("Task Results") 120 | indexes = [ 121 | models.Index( 122 | "status", 123 | *ordering, 124 | name="django_task_new_ordering_idx", 125 | condition=Q(status=ResultStatus.NEW), 126 | ), 127 | models.Index(fields=["queue_name"]), 128 | models.Index(fields=["backend_name"]), 129 | ] 130 | 131 | if django.VERSION >= (5, 1): 132 | constraints = [ 133 | CheckConstraint( 134 | condition=Q(priority__range=(MIN_PRIORITY, MAX_PRIORITY)), 135 | name="priority_range", 136 | ) 137 | ] 138 | else: 139 | constraints = [ 140 | CheckConstraint( 141 | check=Q(priority__range=(MIN_PRIORITY, MAX_PRIORITY)), 142 | name="priority_range", 143 | ) 144 | ] 145 | 146 | @property 147 | def task(self) -> Task[P, T]: 148 | task = import_string(self.task_path) 149 | 150 | if not isinstance(task, Task): 151 | raise SuspiciousOperation( 152 | f"Task {self.id} does not point to a Task ({self.task_path})" 153 | ) 154 | 155 | return task.using( 156 | priority=self.priority, 157 | queue_name=self.queue_name, 158 | run_after=None if self.run_after == DATE_MAX else self.run_after, 159 | backend=self.backend_name, 160 | ) 161 | 162 | @property 163 | def task_result(self) -> "TaskResult[T]": 164 | from .backend import TaskResult 165 | 166 | try: 167 | exception_class = import_string(self.exception_class_path) 168 | except ImportError: 169 | exception_class = None 170 | 171 | task_result = TaskResult[T]( 172 | db_result=self, 173 | task=self.task, 174 | id=normalize_uuid(self.id), 175 | status=ResultStatus[self.status], 176 | enqueued_at=self.enqueued_at, 177 | started_at=self.started_at, 178 | finished_at=self.finished_at, 179 | args=self.args_kwargs["args"], 180 | kwargs=self.args_kwargs["kwargs"], 181 | backend=self.backend_name, 182 | ) 183 | 184 | object.__setattr__(task_result, "_exception_class", exception_class) 185 | object.__setattr__(task_result, "_traceback", self.traceback or None) 186 | object.__setattr__(task_result, "_return_value", self.return_value) 187 | 188 | return task_result 189 | 190 | @property 191 | def task_name(self) -> str: 192 | # If the function for an existing task is no longer available, it'll either raise an 193 | # ImportError or ModuleNotFoundError (a subclass of ImportError). 194 | try: 195 | return self.task.name 196 | except ImportError: 197 | pass 198 | 199 | try: 200 | return self.task_path.rsplit(".", 1)[1] 201 | except IndexError: 202 | return self.task_path 203 | 204 | @retry(backoff_delay=0) 205 | def claim(self) -> None: 206 | """ 207 | Mark as job as being run 208 | """ 209 | self.status = ResultStatus.RUNNING 210 | self.started_at = timezone.now() 211 | self.save(update_fields=["status", "started_at"]) 212 | 213 | @retry() 214 | def set_succeeded(self, return_value: Any) -> None: 215 | self.status = ResultStatus.SUCCEEDED 216 | self.finished_at = timezone.now() 217 | self.return_value = return_value 218 | self.exception_class_path = "" 219 | self.traceback = "" 220 | self.save( 221 | update_fields=[ 222 | "status", 223 | "return_value", 224 | "finished_at", 225 | "exception_class_path", 226 | "traceback", 227 | ] 228 | ) 229 | 230 | @retry() 231 | def set_failed(self, exc: BaseException) -> None: 232 | self.status = ResultStatus.FAILED 233 | self.finished_at = timezone.now() 234 | self.exception_class_path = get_module_path(type(exc)) 235 | self.traceback = get_exception_traceback(exc) 236 | self.return_value = None 237 | self.save( 238 | update_fields=[ 239 | "status", 240 | "return_value", 241 | "finished_at", 242 | "exception_class_path", 243 | "traceback", 244 | ] 245 | ) 246 | -------------------------------------------------------------------------------- /django_tasks/backends/database/signal_handlers.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from django.db.models.signals import pre_save 4 | from django.dispatch import receiver 5 | 6 | from .models import DATE_MAX, DBTaskResult 7 | 8 | 9 | @receiver(pre_save, sender=DBTaskResult) 10 | def set_run_after(sender: Any, instance: DBTaskResult, **kwargs: Any) -> None: 11 | if instance.run_after is None: 12 | instance.run_after = DATE_MAX 13 | -------------------------------------------------------------------------------- /django_tasks/backends/database/utils.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Generator 2 | from contextlib import contextmanager 3 | from typing import Any, Optional, Union 4 | from uuid import UUID 5 | 6 | import django 7 | from django.db import transaction 8 | from django.db.backends.base.base import BaseDatabaseWrapper 9 | 10 | 11 | def connection_requires_manual_exclusive_transaction( 12 | connection: BaseDatabaseWrapper, 13 | ) -> bool: 14 | """ 15 | Determine whether the backend requires manual transaction handling. 16 | 17 | Extracted from `exclusive_transaction` for unit testing purposes. 18 | """ 19 | if connection.vendor != "sqlite": 20 | return False 21 | 22 | if django.VERSION < (5, 1): 23 | return True 24 | 25 | if not hasattr(connection, "transaction_mode"): 26 | # Manually called to set `transaction_mode` 27 | connection.get_connection_params() 28 | 29 | return connection.transaction_mode != "EXCLUSIVE" # type:ignore[attr-defined,no-any-return] 30 | 31 | 32 | @contextmanager 33 | def exclusive_transaction(using: Optional[str] = None) -> Generator[Any, Any, Any]: 34 | """ 35 | Wrapper around `transaction.atomic` which ensures transactions on SQLite are exclusive. 36 | 37 | This functionality is built-in to Django 5.1+. 38 | """ 39 | connection: BaseDatabaseWrapper = transaction.get_connection(using) 40 | 41 | if connection_requires_manual_exclusive_transaction(connection): 42 | with connection.cursor() as c: 43 | c.execute("BEGIN EXCLUSIVE") 44 | try: 45 | yield 46 | finally: 47 | c.execute("COMMIT") 48 | else: 49 | with transaction.atomic(using=using): 50 | yield 51 | 52 | 53 | def normalize_uuid(val: Union[str, UUID]) -> str: 54 | """ 55 | Normalize a UUID into its dashed representation. 56 | 57 | This works around engines like MySQL which don't store values in a uuid field, 58 | and thus drops the dashes. 59 | """ 60 | if isinstance(val, str): 61 | val = UUID(val) 62 | 63 | return str(val) 64 | -------------------------------------------------------------------------------- /django_tasks/backends/dummy.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | from functools import partial 3 | from typing import TypeVar 4 | 5 | from django.db import transaction 6 | from django.utils import timezone 7 | from typing_extensions import ParamSpec 8 | 9 | from django_tasks.exceptions import ResultDoesNotExist 10 | from django_tasks.signals import task_enqueued 11 | from django_tasks.task import ResultStatus, Task, TaskResult 12 | from django_tasks.utils import get_random_id 13 | 14 | from .base import BaseTaskBackend 15 | 16 | T = TypeVar("T") 17 | P = ParamSpec("P") 18 | 19 | 20 | class DummyBackend(BaseTaskBackend): 21 | supports_defer = True 22 | supports_async_task = True 23 | results: list[TaskResult] 24 | 25 | def __init__(self, alias: str, params: dict) -> None: 26 | super().__init__(alias, params) 27 | 28 | self.results = [] 29 | 30 | def _store_result(self, result: TaskResult) -> None: 31 | object.__setattr__(result, "enqueued_at", timezone.now()) 32 | self.results.append(result) 33 | task_enqueued.send(type(self), task_result=result) 34 | 35 | def enqueue( 36 | self, 37 | task: Task[P, T], 38 | args: P.args, # type:ignore[valid-type] 39 | kwargs: P.kwargs, # type:ignore[valid-type] 40 | ) -> TaskResult[T]: 41 | self.validate_task(task) 42 | 43 | result = TaskResult[T]( 44 | task=task, 45 | id=get_random_id(), 46 | status=ResultStatus.NEW, 47 | enqueued_at=None, 48 | started_at=None, 49 | finished_at=None, 50 | args=args, 51 | kwargs=kwargs, 52 | backend=self.alias, 53 | ) 54 | 55 | if self._get_enqueue_on_commit_for_task(task) is not False: 56 | transaction.on_commit(partial(self._store_result, result)) 57 | else: 58 | self._store_result(result) 59 | 60 | # Copy the task to prevent mutation issues 61 | return deepcopy(result) 62 | 63 | # We don't set `supports_get_result` as the results are scoped to the current thread 64 | def get_result(self, result_id: str) -> TaskResult: 65 | try: 66 | return next(result for result in self.results if result.id == result_id) 67 | except StopIteration: 68 | raise ResultDoesNotExist(result_id) from None 69 | 70 | def clear(self) -> None: 71 | self.results.clear() 72 | -------------------------------------------------------------------------------- /django_tasks/backends/immediate.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import partial 3 | from inspect import iscoroutinefunction 4 | from typing import TypeVar 5 | 6 | from asgiref.sync import async_to_sync 7 | from django.db import transaction 8 | from django.utils import timezone 9 | from typing_extensions import ParamSpec 10 | 11 | from django_tasks.signals import task_enqueued, task_finished, task_started 12 | from django_tasks.task import ResultStatus, Task, TaskResult 13 | from django_tasks.utils import get_exception_traceback, get_random_id, json_normalize 14 | 15 | from .base import BaseTaskBackend 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | T = TypeVar("T") 21 | P = ParamSpec("P") 22 | 23 | 24 | class ImmediateBackend(BaseTaskBackend): 25 | supports_async_task = True 26 | 27 | def _execute_task(self, task_result: TaskResult) -> None: 28 | """ 29 | Execute the task for the given `TaskResult`, mutating it with the outcome 30 | """ 31 | object.__setattr__(task_result, "enqueued_at", timezone.now()) 32 | task_enqueued.send(type(self), task_result=task_result) 33 | 34 | task = task_result.task 35 | 36 | calling_task_func = ( 37 | async_to_sync(task.func) if iscoroutinefunction(task.func) else task.func 38 | ) 39 | 40 | object.__setattr__(task_result, "status", ResultStatus.RUNNING) 41 | object.__setattr__(task_result, "started_at", timezone.now()) 42 | task_started.send(sender=type(self), task_result=task_result) 43 | 44 | try: 45 | object.__setattr__( 46 | task_result, 47 | "_return_value", 48 | json_normalize( 49 | calling_task_func(*task_result.args, **task_result.kwargs) 50 | ), 51 | ) 52 | except BaseException as e: 53 | # If the user tried to terminate, let them 54 | if isinstance(e, KeyboardInterrupt): 55 | raise 56 | 57 | object.__setattr__(task_result, "finished_at", timezone.now()) 58 | 59 | object.__setattr__(task_result, "_traceback", get_exception_traceback(e)) 60 | object.__setattr__(task_result, "_exception_class", type(e)) 61 | 62 | object.__setattr__(task_result, "status", ResultStatus.FAILED) 63 | 64 | task_finished.send(type(self), task_result=task_result) 65 | else: 66 | object.__setattr__(task_result, "finished_at", timezone.now()) 67 | object.__setattr__(task_result, "status", ResultStatus.SUCCEEDED) 68 | 69 | task_finished.send(type(self), task_result=task_result) 70 | 71 | def enqueue( 72 | self, 73 | task: Task[P, T], 74 | args: P.args, # type:ignore[valid-type] 75 | kwargs: P.kwargs, # type:ignore[valid-type] 76 | ) -> TaskResult[T]: 77 | self.validate_task(task) 78 | 79 | task_result = TaskResult[T]( 80 | task=task, 81 | id=get_random_id(), 82 | status=ResultStatus.NEW, 83 | enqueued_at=None, 84 | started_at=None, 85 | finished_at=None, 86 | args=args, 87 | kwargs=kwargs, 88 | backend=self.alias, 89 | ) 90 | 91 | if self._get_enqueue_on_commit_for_task(task) is not False: 92 | transaction.on_commit(partial(self._execute_task, task_result)) 93 | else: 94 | self._execute_task(task_result) 95 | 96 | return task_result 97 | -------------------------------------------------------------------------------- /django_tasks/backends/rq.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from dataclasses import dataclass 3 | from types import TracebackType 4 | from typing import Any, Optional, TypeVar 5 | 6 | import django_rq 7 | from django.apps import apps 8 | from django.core.checks import messages 9 | from django.core.exceptions import SuspiciousOperation 10 | from django.db import transaction 11 | from django.utils.module_loading import import_string 12 | from redis.client import Redis 13 | from rq.job import Callback, JobStatus 14 | from rq.job import Job as BaseJob 15 | from rq.registry import ScheduledJobRegistry 16 | from typing_extensions import ParamSpec 17 | 18 | from django_tasks.backends.base import BaseTaskBackend 19 | from django_tasks.exceptions import ResultDoesNotExist 20 | from django_tasks.signals import task_enqueued, task_finished, task_started 21 | from django_tasks.task import DEFAULT_PRIORITY, MAX_PRIORITY, ResultStatus, Task 22 | from django_tasks.task import TaskResult as BaseTaskResult 23 | from django_tasks.utils import get_module_path, get_random_id 24 | 25 | T = TypeVar("T") 26 | P = ParamSpec("P") 27 | 28 | RQ_STATUS_TO_RESULT_STATUS = { 29 | JobStatus.QUEUED: ResultStatus.NEW, 30 | JobStatus.FINISHED: ResultStatus.SUCCEEDED, 31 | JobStatus.FAILED: ResultStatus.FAILED, 32 | JobStatus.STARTED: ResultStatus.RUNNING, 33 | JobStatus.DEFERRED: ResultStatus.NEW, 34 | JobStatus.SCHEDULED: ResultStatus.NEW, 35 | JobStatus.STOPPED: ResultStatus.FAILED, 36 | JobStatus.CANCELED: ResultStatus.FAILED, 37 | None: ResultStatus.NEW, 38 | } 39 | 40 | 41 | @dataclass(frozen=True) 42 | class TaskResult(BaseTaskResult[T]): 43 | pass 44 | 45 | 46 | class Job(BaseJob): 47 | def perform(self) -> Any: 48 | task_result = self.into_task_result() 49 | 50 | task_started.send(type(task_result.task.get_backend()), task_result=task_result) 51 | 52 | return super().perform() 53 | 54 | def _execute(self) -> Any: 55 | """ 56 | Shim RQ's `Job` to call the underlying `Task` function. 57 | """ 58 | return self.func.call(*self.args, **self.kwargs) 59 | 60 | @property 61 | def func(self) -> Task: 62 | func = super().func 63 | 64 | if not isinstance(func, Task): 65 | raise SuspiciousOperation( 66 | f"Task {self.id} does not point to a Task ({self.func_name})" 67 | ) 68 | 69 | return func 70 | 71 | def into_task_result(self) -> TaskResult: 72 | task: Task = self.func 73 | 74 | scheduled_job_registry = ScheduledJobRegistry( # type: ignore[no-untyped-call] 75 | queue=django_rq.get_queue(self.origin) 76 | ) 77 | 78 | if self.is_scheduled: 79 | run_after = scheduled_job_registry.get_scheduled_time(self) 80 | else: 81 | run_after = None 82 | 83 | task_result: TaskResult = TaskResult( 84 | task=task.using( 85 | priority=DEFAULT_PRIORITY, 86 | queue_name=self.origin, 87 | run_after=run_after, 88 | backend=self.meta["backend_name"], 89 | ), 90 | id=self.id, 91 | status=RQ_STATUS_TO_RESULT_STATUS[self.get_status()], 92 | enqueued_at=self.enqueued_at, 93 | started_at=self.started_at, 94 | finished_at=self.ended_at, 95 | args=list(self.args), 96 | kwargs=self.kwargs, 97 | backend=self.meta["backend_name"], 98 | ) 99 | 100 | latest_result = self.latest_result() 101 | 102 | if latest_result is not None: 103 | if "exception_class" in self.meta: 104 | object.__setattr__( 105 | task_result, 106 | "_exception_class", 107 | import_string(self.meta["exception_class"]), 108 | ) 109 | object.__setattr__(task_result, "_traceback", latest_result.exc_string) 110 | object.__setattr__(task_result, "_return_value", latest_result.return_value) 111 | 112 | return task_result 113 | 114 | 115 | def failed_callback( 116 | job: Job, 117 | connection: Optional[Redis], 118 | exception_class: type[Exception], 119 | exception_value: Exception, 120 | traceback: TracebackType, 121 | ) -> None: 122 | # Smuggle the exception class through meta 123 | job.meta["exception_class"] = get_module_path(exception_class) 124 | job.save_meta() # type: ignore[no-untyped-call] 125 | 126 | task_result = job.into_task_result() 127 | 128 | object.__setattr__(task_result, "status", ResultStatus.FAILED) 129 | 130 | task_finished.send(type(task_result.task.get_backend()), task_result=task_result) 131 | 132 | 133 | def success_callback(job: Job, connection: Optional[Redis], result: Any) -> None: 134 | task_result = job.into_task_result() 135 | 136 | object.__setattr__(task_result, "status", ResultStatus.SUCCEEDED) 137 | 138 | task_finished.send(type(task_result.task.get_backend()), task_result=task_result) 139 | 140 | 141 | class RQBackend(BaseTaskBackend): 142 | supports_async_task = True 143 | supports_get_result = True 144 | supports_defer = True 145 | 146 | def __init__(self, alias: str, params: dict) -> None: 147 | super().__init__(alias, params) 148 | 149 | if not self.queues: 150 | self.queues = set(django_rq.settings.QUEUES.keys()) 151 | 152 | def enqueue( 153 | self, 154 | task: Task[P, T], 155 | args: P.args, # type:ignore[valid-type] 156 | kwargs: P.kwargs, # type:ignore[valid-type] 157 | ) -> TaskResult[T]: 158 | self.validate_task(task) 159 | 160 | queue = django_rq.get_queue(task.queue_name, job_class=Job) 161 | 162 | task_result = TaskResult[T]( 163 | task=task, 164 | id=get_random_id(), 165 | status=ResultStatus.NEW, 166 | enqueued_at=None, 167 | started_at=None, 168 | finished_at=None, 169 | args=args, 170 | kwargs=kwargs, 171 | backend=self.alias, 172 | ) 173 | 174 | job = queue.create_job( 175 | task.module_path, 176 | args=args, 177 | kwargs=kwargs, 178 | job_id=task_result.id, 179 | status=JobStatus.SCHEDULED if task.run_after else JobStatus.QUEUED, 180 | on_failure=Callback(failed_callback), 181 | on_success=Callback(success_callback), 182 | meta={"backend_name": self.alias}, 183 | ) 184 | 185 | def save_result() -> None: 186 | nonlocal job 187 | if task.run_after is None: 188 | job = queue.enqueue_job(job, at_front=task.priority == MAX_PRIORITY) 189 | else: 190 | job = queue.schedule_job(job, task.run_after) 191 | 192 | object.__setattr__(task_result, "enqueued_at", job.enqueued_at) 193 | 194 | task_enqueued.send(type(self), task_result=task_result) 195 | 196 | if self._get_enqueue_on_commit_for_task(task): 197 | transaction.on_commit(save_result) 198 | else: 199 | save_result() 200 | 201 | return task_result 202 | 203 | def _get_queues(self) -> list[django_rq.queues.DjangoRQ]: 204 | return django_rq.queues.get_queues(*self.queues, job_class=Job) # type: ignore[no-any-return,no-untyped-call] 205 | 206 | def _get_job(self, job_id: str) -> Optional[Job]: 207 | for queue in self._get_queues(): 208 | job = queue.fetch_job(job_id) 209 | if job is not None: 210 | return job # type: ignore[return-value] 211 | 212 | return None 213 | 214 | def get_result(self, result_id: str) -> TaskResult: 215 | job = self._get_job(result_id) 216 | 217 | if job is None: 218 | raise ResultDoesNotExist(result_id) 219 | 220 | return job.into_task_result() 221 | 222 | def check(self, **kwargs: Any) -> Iterable[messages.CheckMessage]: 223 | yield from super().check(**kwargs) 224 | 225 | backend_name = self.__class__.__name__ 226 | 227 | if not apps.is_installed("django_rq"): 228 | yield messages.Error( 229 | f"{backend_name} configured as django_tasks backend, but django_rq app not installed", 230 | "Insert 'django_rq' in INSTALLED_APPS", 231 | ) 232 | 233 | for queue_name in self.queues: 234 | try: 235 | django_rq.get_queue(queue_name) 236 | except KeyError: 237 | yield messages.Error( 238 | f"{queue_name!r} is not configured for django-rq", 239 | f"Add {queue_name!r} to RQ_QUEUES", 240 | ) 241 | -------------------------------------------------------------------------------- /django_tasks/checks.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable, Sequence 2 | from typing import Any 3 | 4 | from django.apps.config import AppConfig 5 | from django.core.checks.messages import CheckMessage 6 | 7 | from django_tasks import tasks 8 | 9 | 10 | def check_tasks( 11 | app_configs: Sequence[AppConfig] = None, **kwargs: Any 12 | ) -> Iterable[CheckMessage]: 13 | """Checks all registered task backends.""" 14 | 15 | for backend in tasks.all(): 16 | yield from backend.check() 17 | -------------------------------------------------------------------------------- /django_tasks/exceptions.py: -------------------------------------------------------------------------------- 1 | from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist 2 | 3 | 4 | class InvalidTaskError(Exception): 5 | """ 6 | The provided task is invalid. 7 | """ 8 | 9 | 10 | class InvalidTaskBackendError(ImproperlyConfigured): 11 | pass 12 | 13 | 14 | class ResultDoesNotExist(ObjectDoesNotExist): 15 | pass 16 | -------------------------------------------------------------------------------- /django_tasks/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealOrangeOne/django-tasks/80c3414ba074834ab857253618e2838e09d357ec/django_tasks/py.typed -------------------------------------------------------------------------------- /django_tasks/signal_handlers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from asgiref.local import Local 4 | from django.core.signals import setting_changed 5 | from django.dispatch import receiver 6 | 7 | from django_tasks import BaseTaskBackend, ResultStatus, TaskResult 8 | 9 | from .signals import task_enqueued, task_finished, task_started 10 | 11 | logger = logging.getLogger("django_tasks") 12 | 13 | 14 | @receiver(setting_changed) 15 | def clear_tasks_handlers(*, setting: str, **kwargs: dict) -> None: 16 | """ 17 | Reset the connection handler whenever the settings change 18 | """ 19 | if setting == "TASKS": 20 | from django_tasks import tasks 21 | 22 | tasks._settings = tasks.settings = tasks.configure_settings(None) # type:ignore[attr-defined] 23 | tasks._connections = Local() # type:ignore[attr-defined] 24 | 25 | 26 | @receiver(task_enqueued) 27 | def log_task_enqueued( 28 | sender: type[BaseTaskBackend], task_result: TaskResult, **kwargs: dict 29 | ) -> None: 30 | logger.debug( 31 | "Task id=%s path=%s enqueued backend=%s", 32 | task_result.id, 33 | task_result.task.module_path, 34 | task_result.backend, 35 | ) 36 | 37 | 38 | @receiver(task_started) 39 | def log_task_started( 40 | sender: type[BaseTaskBackend], task_result: TaskResult, **kwargs: dict 41 | ) -> None: 42 | logger.info( 43 | "Task id=%s path=%s state=%s", 44 | task_result.id, 45 | task_result.task.module_path, 46 | task_result.status, 47 | ) 48 | 49 | 50 | @receiver(task_finished) 51 | def log_task_finished( 52 | sender: type[BaseTaskBackend], task_result: TaskResult, **kwargs: dict 53 | ) -> None: 54 | if task_result.status == ResultStatus.FAILED: 55 | # Use `.exception` to integrate with error monitoring tools (eg Sentry) 56 | log_method = logger.exception 57 | else: 58 | log_method = logger.info 59 | 60 | log_method( 61 | "Task id=%s path=%s state=%s", 62 | task_result.id, 63 | task_result.task.module_path, 64 | task_result.status, 65 | ) 66 | -------------------------------------------------------------------------------- /django_tasks/signals.py: -------------------------------------------------------------------------------- 1 | from django.dispatch import Signal 2 | 3 | task_enqueued = Signal() 4 | task_finished = Signal() 5 | task_started = Signal() 6 | -------------------------------------------------------------------------------- /django_tasks/task.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field, replace 2 | from datetime import datetime 3 | from inspect import iscoroutinefunction 4 | from typing import ( 5 | TYPE_CHECKING, 6 | Any, 7 | Callable, 8 | Generic, 9 | Optional, 10 | TypeVar, 11 | Union, 12 | cast, 13 | overload, 14 | ) 15 | 16 | from asgiref.sync import async_to_sync, sync_to_async 17 | from django.db.models.enums import TextChoices 18 | from django.utils.translation import gettext_lazy as _ 19 | from typing_extensions import ParamSpec, Self 20 | 21 | from .exceptions import ResultDoesNotExist 22 | from .utils import ( 23 | get_module_path, 24 | json_normalize, 25 | ) 26 | 27 | if TYPE_CHECKING: 28 | from .backends.base import BaseTaskBackend 29 | 30 | DEFAULT_TASK_BACKEND_ALIAS = "default" 31 | DEFAULT_QUEUE_NAME = "default" 32 | MIN_PRIORITY = -100 33 | MAX_PRIORITY = 100 34 | DEFAULT_PRIORITY = 0 35 | 36 | TASK_REFRESH_ATTRS = { 37 | "_exception_class", 38 | "_traceback", 39 | "_return_value", 40 | "finished_at", 41 | "started_at", 42 | "status", 43 | "enqueued_at", 44 | } 45 | 46 | 47 | class ResultStatus(TextChoices): 48 | NEW = ("NEW", _("New")) 49 | RUNNING = ("RUNNING", _("Running")) 50 | FAILED = ("FAILED", _("Failed")) 51 | SUCCEEDED = ("SUCCEEDED", _("Succeeded")) 52 | 53 | 54 | T = TypeVar("T") 55 | P = ParamSpec("P") 56 | 57 | 58 | @dataclass(frozen=True) 59 | class Task(Generic[P, T]): 60 | priority: int 61 | """The priority of the task""" 62 | 63 | func: Callable[P, T] 64 | """The task function""" 65 | 66 | backend: str 67 | """The name of the backend the task will run on""" 68 | 69 | queue_name: str = DEFAULT_QUEUE_NAME 70 | """The name of the queue the task will run on""" 71 | 72 | run_after: Optional[datetime] = None 73 | """The earliest this task will run""" 74 | 75 | enqueue_on_commit: Optional[bool] = None 76 | """ 77 | Whether the task will be enqueued when the current transaction commits, 78 | immediately, or whatever the backend decides 79 | """ 80 | 81 | def __post_init__(self) -> None: 82 | self.get_backend().validate_task(self) 83 | 84 | @property 85 | def name(self) -> str: 86 | """ 87 | An identifier for the task 88 | """ 89 | return self.func.__name__ 90 | 91 | def using( 92 | self, 93 | *, 94 | priority: Optional[int] = None, 95 | queue_name: Optional[str] = None, 96 | run_after: Optional[datetime] = None, 97 | backend: Optional[str] = None, 98 | ) -> Self: 99 | """ 100 | Create a new task with modified defaults 101 | """ 102 | 103 | changes: dict[str, Any] = {} 104 | 105 | if priority is not None: 106 | changes["priority"] = priority 107 | if queue_name is not None: 108 | changes["queue_name"] = queue_name 109 | if run_after is not None: 110 | changes["run_after"] = run_after 111 | if backend is not None: 112 | changes["backend"] = backend 113 | 114 | return replace(self, **changes) 115 | 116 | def enqueue(self, *args: P.args, **kwargs: P.kwargs) -> "TaskResult[T]": 117 | """ 118 | Queue up the task to be executed 119 | """ 120 | return self.get_backend().enqueue( 121 | self, json_normalize(args), json_normalize(kwargs) 122 | ) 123 | 124 | async def aenqueue(self, *args: P.args, **kwargs: P.kwargs) -> "TaskResult[T]": 125 | """ 126 | Queue up a task function (or coroutine) to be executed 127 | """ 128 | return await self.get_backend().aenqueue( 129 | self, json_normalize(args), json_normalize(kwargs) 130 | ) 131 | 132 | def get_result(self, result_id: str) -> "TaskResult[T]": 133 | """ 134 | Retrieve the result for a task of this type by its id (if one exists). 135 | If one doesn't, or is the wrong type, raises ResultDoesNotExist. 136 | """ 137 | result = self.get_backend().get_result(result_id) 138 | 139 | if result.task.func != self.func: 140 | raise ResultDoesNotExist 141 | 142 | return result 143 | 144 | async def aget_result(self, result_id: str) -> "TaskResult[T]": 145 | """ 146 | Retrieve the result for a task of this type by its id (if one exists). 147 | If one doesn't, or is the wrong type, raises ResultDoesNotExist. 148 | """ 149 | result = await self.get_backend().aget_result(result_id) 150 | 151 | if result.task.func != self.func: 152 | raise ResultDoesNotExist 153 | 154 | return result 155 | 156 | def call(self, *args: P.args, **kwargs: P.kwargs) -> T: 157 | if iscoroutinefunction(self.func): 158 | return async_to_sync(self.func)(*args, **kwargs) # type:ignore[no-any-return] 159 | return self.func(*args, **kwargs) 160 | 161 | async def acall(self, *args: P.args, **kwargs: P.kwargs) -> T: 162 | if iscoroutinefunction(self.func): 163 | return await self.func(*args, **kwargs) # type:ignore[no-any-return] 164 | return await sync_to_async(self.func)(*args, **kwargs) 165 | 166 | def get_backend(self) -> "BaseTaskBackend": 167 | from . import tasks 168 | 169 | return tasks[self.backend] 170 | 171 | @property 172 | def module_path(self) -> str: 173 | return get_module_path(self.func) 174 | 175 | 176 | # Bare decorator usage 177 | # e.g. @task 178 | @overload 179 | def task(function: Callable[P, T], /) -> Task[P, T]: ... 180 | 181 | 182 | # Decorator with arguments 183 | # e.g. @task() or @task(priority=1, ...) 184 | @overload 185 | def task( 186 | *, 187 | priority: int = DEFAULT_PRIORITY, 188 | queue_name: str = DEFAULT_QUEUE_NAME, 189 | backend: str = DEFAULT_TASK_BACKEND_ALIAS, 190 | enqueue_on_commit: Optional[bool] = None, 191 | ) -> Callable[[Callable[P, T]], Task[P, T]]: ... 192 | 193 | 194 | # Implementation 195 | def task( 196 | function: Optional[Callable[P, T]] = None, 197 | *, 198 | priority: int = DEFAULT_PRIORITY, 199 | queue_name: str = DEFAULT_QUEUE_NAME, 200 | backend: str = DEFAULT_TASK_BACKEND_ALIAS, 201 | enqueue_on_commit: Optional[bool] = None, 202 | ) -> Union[Task[P, T], Callable[[Callable[P, T]], Task[P, T]]]: 203 | """ 204 | A decorator used to create a task. 205 | """ 206 | from . import tasks 207 | 208 | def wrapper(f: Callable[P, T]) -> Task[P, T]: 209 | return tasks[backend].task_class( 210 | priority=priority, 211 | func=f, 212 | queue_name=queue_name, 213 | backend=backend, 214 | enqueue_on_commit=enqueue_on_commit, 215 | ) 216 | 217 | if function: 218 | return wrapper(function) 219 | 220 | return wrapper 221 | 222 | 223 | @dataclass(frozen=True) 224 | class TaskResult(Generic[T]): 225 | task: Task 226 | """The task for which this is a result""" 227 | 228 | id: str 229 | """A unique identifier for the task result""" 230 | 231 | status: ResultStatus 232 | """The status of the running task""" 233 | 234 | enqueued_at: Optional[datetime] 235 | """The time this task was enqueued""" 236 | 237 | started_at: Optional[datetime] 238 | """The time this task was started""" 239 | 240 | finished_at: Optional[datetime] 241 | """The time this task was finished""" 242 | 243 | args: list 244 | """The arguments to pass to the task function""" 245 | 246 | kwargs: dict[str, Any] 247 | """The keyword arguments to pass to the task function""" 248 | 249 | backend: str 250 | """The name of the backend the task will run on""" 251 | 252 | _exception_class: Optional[type[BaseException]] = field(init=False, default=None) 253 | _traceback: Optional[str] = field(init=False, default=None) 254 | 255 | _return_value: Optional[T] = field(init=False, default=None) 256 | 257 | @property 258 | def return_value(self) -> Optional[T]: 259 | """ 260 | Get the return value of the task. 261 | 262 | If the task didn't succeed, an exception is raised. 263 | This is to distinguish against the task returning None. 264 | """ 265 | if self.status == ResultStatus.SUCCEEDED: 266 | return cast(T, self._return_value) 267 | elif self.status == ResultStatus.FAILED: 268 | raise ValueError("Task failed") 269 | else: 270 | raise ValueError("Task has not finished yet") 271 | 272 | @property 273 | def exception_class(self) -> Optional[type[BaseException]]: 274 | """The exception raised by the task function""" 275 | if not self.is_finished: 276 | raise ValueError("Task has not finished yet") 277 | 278 | return self._exception_class 279 | 280 | @property 281 | def traceback(self) -> Optional[str]: 282 | """The traceback of the exception if the task failed""" 283 | if not self.is_finished: 284 | raise ValueError("Task has not finished yet") 285 | 286 | return self._traceback 287 | 288 | @property 289 | def is_finished(self) -> bool: 290 | """Has the task finished?""" 291 | return self.status in {ResultStatus.FAILED, ResultStatus.SUCCEEDED} 292 | 293 | def refresh(self) -> None: 294 | """ 295 | Reload the cached task data from the task store 296 | """ 297 | refreshed_task = self.task.get_backend().get_result(self.id) 298 | 299 | for attr in TASK_REFRESH_ATTRS: 300 | object.__setattr__(self, attr, getattr(refreshed_task, attr)) 301 | 302 | async def arefresh(self) -> None: 303 | """ 304 | Reload the cached task data from the task store 305 | """ 306 | refreshed_task = await self.task.get_backend().aget_result(self.id) 307 | 308 | for attr in TASK_REFRESH_ATTRS: 309 | object.__setattr__(self, attr, getattr(refreshed_task, attr)) 310 | -------------------------------------------------------------------------------- /django_tasks/utils.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import json 3 | import random 4 | import time 5 | from functools import wraps 6 | from traceback import format_exception 7 | from typing import Any, Callable, TypeVar 8 | 9 | from django.utils.crypto import RANDOM_STRING_CHARS 10 | from typing_extensions import ParamSpec 11 | 12 | T = TypeVar("T") 13 | P = ParamSpec("P") 14 | 15 | 16 | def is_module_level_function(func: Callable) -> bool: 17 | if not inspect.isfunction(func) or inspect.isbuiltin(func): 18 | return False 19 | 20 | if "" in func.__qualname__: 21 | return False 22 | 23 | return True 24 | 25 | 26 | def json_normalize(obj: Any) -> Any: 27 | """ 28 | Round-trip encode object as JSON to normalize types. 29 | """ 30 | return json.loads(json.dumps(obj)) 31 | 32 | 33 | def retry(*, retries: int = 3, backoff_delay: float = 0.1) -> Callable: 34 | """ 35 | Retry the given code `retries` times, raising the final error. 36 | 37 | `backoff_delay` can be used to add a delay between attempts. 38 | """ 39 | 40 | def wrapper(f: Callable[P, T]) -> Callable[P, T]: 41 | @wraps(f) 42 | def inner_wrapper(*args: P.args, **kwargs: P.kwargs) -> T: # type:ignore[return] 43 | for attempt in range(1, retries + 1): 44 | try: 45 | return f(*args, **kwargs) 46 | except KeyboardInterrupt: 47 | # Let the user ctrl-C out of the program without a retry 48 | raise 49 | except BaseException: 50 | if attempt == retries: 51 | raise 52 | time.sleep(backoff_delay) 53 | 54 | return inner_wrapper 55 | 56 | return wrapper 57 | 58 | 59 | def get_module_path(val: Any) -> str: 60 | return f"{val.__module__}.{val.__qualname__}" 61 | 62 | 63 | def get_exception_traceback(exc: BaseException) -> str: 64 | return "".join(format_exception(type(exc), exc, exc.__traceback__)) 65 | 66 | 67 | def get_random_id() -> str: 68 | """ 69 | Return a random string for use as a task id. 70 | 71 | Whilst 64 characters is the max, just use 32 as a sensible middle-ground. 72 | 73 | This should be much faster than Django's `get_random_string`, since 74 | it's not cryptographically secure. 75 | """ 76 | return "".join(random.choices(RANDOM_STRING_CHARS, k=32)) 77 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | postgresql: 3 | image: postgres:16-alpine 4 | environment: 5 | POSTGRES_USER: postgres 6 | POSTGRES_PASSWORD: postgres 7 | ports: 8 | - 15432:5432 9 | 10 | mysql: 11 | image: mysql:8.4 12 | environment: 13 | MYSQL_ROOT_PASSWORD: django 14 | MYSQL_DATABASE: django 15 | ports: 16 | - 13306:3306 17 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | # Recipes 2 | @default: 3 | just --list 4 | 5 | test *ARGS: 6 | python -m manage check 7 | python -m manage makemigrations --dry-run --check --noinput 8 | python -m coverage run --source=django_tasks -m manage test --shuffle --noinput {{ ARGS }} 9 | python -m coverage report 10 | python -m coverage html 11 | 12 | test-fast *ARGS: 13 | python -m manage test --shuffle --noinput --settings tests.settings_fast {{ ARGS }} 14 | 15 | format: 16 | python -m ruff check django_tasks tests --fix 17 | python -m ruff format django_tasks tests 18 | 19 | lint: 20 | python -m ruff check django_tasks tests 21 | python -m ruff format django_tasks tests --check 22 | python -m mypy django_tasks tests 23 | 24 | start-dbs: 25 | docker-compose up -d 26 | 27 | test-sqlite *ARGS: 28 | python -m manage test --shuffle --noinput {{ ARGS }} 29 | 30 | test-postgres *ARGS: 31 | DATABASE_URL=postgres://postgres:postgres@localhost:15432/postgres python -m manage test --shuffle --noinput {{ ARGS }} 32 | 33 | test-mysql *ARGS: 34 | DATABASE_URL=mysql://root:django@127.0.0.1:13306/django python -m manage test --shuffle --noinput {{ ARGS }} 35 | 36 | test-dbs *ARGS: start-dbs test-postgres test-mysql test-sqlite 37 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "setuptools.build_meta" 3 | requires = [ 4 | "setuptools", 5 | ] 6 | 7 | [project] 8 | name = "django-tasks" 9 | description = "An implementation and backport of background workers and tasks in Django" 10 | authors = [ 11 | {name = "Jake Howard"}, 12 | ] 13 | version = "0.7.0" 14 | readme = "README.md" 15 | license = {file = "LICENSE"} 16 | classifiers = [ 17 | "Development Status :: 3 - Alpha", 18 | "License :: OSI Approved :: BSD License", 19 | "Programming Language :: Python :: 3", 20 | "Programming Language :: Python :: 3.9", 21 | "Programming Language :: Python :: 3.10", 22 | "Programming Language :: Python :: 3.11", 23 | "Programming Language :: Python :: 3.12", 24 | "Programming Language :: Python :: 3.13", 25 | "Programming Language :: Python :: Implementation :: CPython", 26 | "Environment :: Web Environment", 27 | "Framework :: Django", 28 | "Framework :: Django", 29 | "Framework :: Django :: 4.2", 30 | "Framework :: Django :: 5.0", 31 | "Framework :: Django :: 5.1", 32 | "Framework :: Django :: 5.2", 33 | "Intended Audience :: Developers", 34 | "Operating System :: OS Independent", 35 | "Natural Language :: English", 36 | "Topic :: Internet :: WWW/HTTP", 37 | "Typing :: Typed" 38 | ] 39 | requires-python = ">=3.9" 40 | dependencies = [ 41 | "Django>=4.2", 42 | "typing_extensions", 43 | "django-stubs-ext", 44 | ] 45 | 46 | [project.urls] 47 | Source = "https://github.com/RealOrangeOne/django-tasks" 48 | Issues = "https://github.com/RealOrangeOne/django-tasks/issues" 49 | Changelog = "https://github.com/RealOrangeOne/django-tasks/releases" 50 | 51 | [project.optional-dependencies] 52 | dev = [ 53 | "ruff", 54 | "coverage", 55 | "django-stubs[compatible-mypy]", 56 | "dj-database-url", 57 | "django-tasks[rq]", 58 | "fakeredis", 59 | ] 60 | mysql = [ 61 | "mysqlclient" 62 | ] 63 | postgres = [ 64 | "psycopg[binary]", 65 | ] 66 | rq = [ 67 | "django-rq", 68 | "rq_scheduler", 69 | ] 70 | 71 | [tool.ruff.lint] 72 | select = ["E", "F", "I", "W", "N", "B", "A", "C4", "T20", "DJ"] 73 | ignore = ["E501", "DJ008"] 74 | 75 | [tool.ruff.lint.per-file-ignores] 76 | "tests/db_worker_test_settings.py" = ["F403", "F405"] 77 | "tests/settings_fast.py" = ["F403", "F405"] 78 | 79 | [tool.mypy] 80 | plugins = ["mypy_django_plugin.main"] 81 | warn_unused_ignores = true 82 | warn_return_any = true 83 | show_error_codes = true 84 | strict_optional = true 85 | implicit_optional = true 86 | disallow_subclassing_any = true 87 | disallow_untyped_calls = true 88 | disallow_untyped_defs = true 89 | disallow_incomplete_defs = true 90 | disallow_untyped_decorators = true 91 | check_untyped_defs = true 92 | ignore_missing_imports = true 93 | 94 | [tool.django-stubs] 95 | django_settings_module = "tests.settings" 96 | 97 | [tool.coverage.run] 98 | branch = true 99 | 100 | [tool.coverage.report] 101 | show_missing = true 102 | ignore_errors = true 103 | exclude_also = [ 104 | # Don't complain if tests don't hit defensive assertion code: 105 | "raise AssertionError", 106 | "raise NotImplementedError", 107 | 108 | # Don't complain about abstract methods, they aren't run: 109 | "@(abc.)?abstractmethod", 110 | 111 | # Nor complain about type checking 112 | "if TYPE_CHECKING:", 113 | ] 114 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealOrangeOne/django-tasks/80c3414ba074834ab857253618e2838e09d357ec/tests/__init__.py -------------------------------------------------------------------------------- /tests/db_worker_test_settings.py: -------------------------------------------------------------------------------- 1 | from .settings import * 2 | 3 | TASKS = {"default": {"BACKEND": "django_tasks.backends.database.DatabaseBackend"}} 4 | 5 | # Force the test DB to be used 6 | if "sqlite" in DATABASES["default"]["ENGINE"]: 7 | DATABASES["default"]["NAME"] = DATABASES["default"]["TEST"]["NAME"] 8 | else: 9 | DATABASES["default"]["NAME"] = "test_" + DATABASES["default"]["NAME"] 10 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | import dj_database_url 5 | 6 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 7 | 8 | IN_TEST = "IN_TEST" in os.environ or (len(sys.argv) > 1 and sys.argv[1] == "test") 9 | 10 | ALLOWED_HOSTS = ["*"] 11 | 12 | INSTALLED_APPS = [ 13 | "django.contrib.admin", 14 | "django.contrib.auth", 15 | "django.contrib.contenttypes", 16 | "django.contrib.messages", 17 | "django.contrib.sessions", 18 | "django.contrib.staticfiles", 19 | "django_tasks", 20 | "django_tasks.backends.database", 21 | "tests", 22 | ] 23 | 24 | TEMPLATES = [ 25 | { 26 | "BACKEND": "django.template.backends.django.DjangoTemplates", 27 | "DIRS": [], 28 | "APP_DIRS": True, 29 | "OPTIONS": { 30 | "context_processors": [ 31 | "django.template.context_processors.debug", 32 | "django.template.context_processors.request", 33 | "django.contrib.auth.context_processors.auth", 34 | "django.contrib.messages.context_processors.messages", 35 | "django.template.context_processors.static", 36 | ] 37 | }, 38 | }, 39 | ] 40 | 41 | MIDDLEWARE = [ 42 | "django.contrib.sessions.middleware.SessionMiddleware", 43 | "django.contrib.auth.middleware.AuthenticationMiddleware", 44 | "django.contrib.messages.middleware.MessageMiddleware", 45 | ] 46 | 47 | STATIC_URL = "/static/" 48 | 49 | SECRET_KEY = "abcde12345" 50 | 51 | ROOT_URLCONF = "tests.urls" 52 | 53 | DEFAULT_AUTO_FIELD = "django.db.models.AutoField" 54 | 55 | DATABASES = { 56 | "default": dj_database_url.config( 57 | default="sqlite:///" + os.path.join(BASE_DIR, "db.sqlite3") 58 | ) 59 | } 60 | 61 | if "sqlite" in DATABASES["default"]["ENGINE"]: 62 | DATABASES["default"]["TEST"] = {"NAME": os.path.join(BASE_DIR, "db-test.sqlite3")} 63 | 64 | 65 | USE_TZ = True 66 | 67 | if not IN_TEST: 68 | DEBUG = True 69 | TASKS = {"default": {"BACKEND": "django_tasks.backends.database.DatabaseBackend"}} 70 | -------------------------------------------------------------------------------- /tests/settings_fast.py: -------------------------------------------------------------------------------- 1 | from .settings import * 2 | 3 | # Unset custom test settings to use in-memory DB 4 | if "sqlite" in DATABASES["default"]["ENGINE"]: 5 | del DATABASES["default"]["TEST"] 6 | -------------------------------------------------------------------------------- /tests/tasks.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any 3 | 4 | from django_tasks import task 5 | 6 | 7 | @task() 8 | def noop_task(*args: Any, **kwargs: Any) -> None: 9 | return None 10 | 11 | 12 | @task 13 | def noop_task_from_bare_decorator(*args: Any, **kwargs: Any) -> None: 14 | return None 15 | 16 | 17 | @task() 18 | async def noop_task_async(*args: Any, **kwargs: Any) -> None: 19 | return None 20 | 21 | 22 | @task() 23 | def calculate_meaning_of_life() -> int: 24 | return 42 25 | 26 | 27 | @task() 28 | def failing_task_value_error() -> None: 29 | raise ValueError("This task failed due to ValueError") 30 | 31 | 32 | @task() 33 | def failing_task_system_exit() -> None: 34 | raise SystemExit("This task failed due to SystemExit") 35 | 36 | 37 | @task() 38 | def failing_task_keyboard_interrupt() -> None: 39 | raise KeyboardInterrupt("This task failed due to KeyboardInterrupt") 40 | 41 | 42 | @task() 43 | def complex_exception() -> None: 44 | raise ValueError(ValueError("This task failed")) 45 | 46 | 47 | @task() 48 | def exit_task() -> None: 49 | exit(1) 50 | 51 | 52 | @task(enqueue_on_commit=True) 53 | def enqueue_on_commit_task() -> None: 54 | pass 55 | 56 | 57 | @task(enqueue_on_commit=False) 58 | def never_enqueue_on_commit_task() -> None: 59 | pass 60 | 61 | 62 | @task() 63 | def hang() -> None: 64 | """ 65 | Do nothing for 5 minutes 66 | """ 67 | time.sleep(300) 68 | 69 | 70 | @task() 71 | def sleep_for(seconds: float) -> None: 72 | time.sleep(seconds) 73 | -------------------------------------------------------------------------------- /tests/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealOrangeOne/django-tasks/80c3414ba074834ab857253618e2838e09d357ec/tests/tests/__init__.py -------------------------------------------------------------------------------- /tests/tests/is_module_level_function_fixture.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is used to test function is considered module level even if it's not defined yet 3 | because it's covered by a decorator. 4 | """ 5 | 6 | from django_tasks.utils import is_module_level_function 7 | 8 | 9 | @is_module_level_function 10 | def really_module_level_function() -> None: 11 | pass 12 | 13 | 14 | inner_func_is_module_level_function = None 15 | 16 | 17 | def main() -> None: 18 | global inner_func_is_module_level_function 19 | 20 | @is_module_level_function 21 | def inner_func() -> None: 22 | pass 23 | 24 | inner_func_is_module_level_function = inner_func 25 | 26 | 27 | main() 28 | -------------------------------------------------------------------------------- /tests/tests/test_custom_backend.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from unittest import mock 3 | 4 | from django.test import SimpleTestCase, override_settings 5 | 6 | from django_tasks import default_task_backend, tasks 7 | from django_tasks.backends.base import BaseTaskBackend 8 | from django_tasks.exceptions import InvalidTaskError 9 | from django_tasks.utils import get_module_path 10 | from tests import tasks as test_tasks 11 | 12 | 13 | class CustomBackend(BaseTaskBackend): 14 | def enqueue(self, *args: Any, **kwargs: Any) -> Any: 15 | pass 16 | 17 | 18 | @override_settings( 19 | TASKS={ 20 | "default": { 21 | "BACKEND": get_module_path(CustomBackend), 22 | "ENQUEUE_ON_COMMIT": False, 23 | } 24 | } 25 | ) 26 | class CustomBackendTestCase(SimpleTestCase): 27 | def test_using_correct_backend(self) -> None: 28 | self.assertEqual(default_task_backend, tasks["default"]) 29 | self.assertIsInstance(tasks["default"], CustomBackend) 30 | 31 | @mock.patch.multiple(CustomBackend, supports_async_task=False) 32 | def test_enqueue_async_task_on_non_async_backend(self) -> None: 33 | with self.assertRaisesMessage( 34 | InvalidTaskError, "Backend does not support async tasks" 35 | ): 36 | default_task_backend.validate_task(test_tasks.noop_task_async) 37 | -------------------------------------------------------------------------------- /tests/tests/test_dummy_backend.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import cast 3 | from unittest import mock 4 | 5 | from django.db import transaction 6 | from django.db.utils import ConnectionHandler 7 | from django.test import ( 8 | SimpleTestCase, 9 | TransactionTestCase, 10 | override_settings, 11 | ) 12 | from django.urls import reverse 13 | 14 | from django_tasks import ResultStatus, Task, default_task_backend, tasks 15 | from django_tasks.backends.dummy import DummyBackend 16 | from django_tasks.exceptions import InvalidTaskError, ResultDoesNotExist 17 | from tests import tasks as test_tasks 18 | 19 | 20 | @override_settings( 21 | TASKS={ 22 | "default": { 23 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 24 | "ENQUEUE_ON_COMMIT": False, 25 | } 26 | } 27 | ) 28 | class DummyBackendTestCase(SimpleTestCase): 29 | def setUp(self) -> None: 30 | default_task_backend.clear() # type:ignore[attr-defined] 31 | 32 | def test_using_correct_backend(self) -> None: 33 | self.assertEqual(default_task_backend, tasks["default"]) 34 | self.assertIsInstance(tasks["default"], DummyBackend) 35 | 36 | def test_enqueue_task(self) -> None: 37 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 38 | with self.subTest(task): 39 | result = cast(Task, task).enqueue(1, two=3) 40 | 41 | self.assertEqual(result.status, ResultStatus.NEW) 42 | self.assertFalse(result.is_finished) 43 | self.assertIsNone(result.started_at) 44 | self.assertIsNone(result.finished_at) 45 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 46 | result.return_value # noqa:B018 47 | self.assertEqual(result.task, task) 48 | self.assertEqual(result.args, [1]) 49 | self.assertEqual(result.kwargs, {"two": 3}) 50 | 51 | self.assertIn(result, default_task_backend.results) # type:ignore[attr-defined] 52 | 53 | async def test_enqueue_task_async(self) -> None: 54 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 55 | with self.subTest(task): 56 | result = await cast(Task, task).aenqueue() 57 | 58 | self.assertEqual(result.status, ResultStatus.NEW) 59 | self.assertFalse(result.is_finished) 60 | self.assertIsNone(result.started_at) 61 | self.assertIsNone(result.finished_at) 62 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 63 | result.return_value # noqa:B018 64 | self.assertEqual(result.task, task) 65 | self.assertEqual(result.args, []) 66 | self.assertEqual(result.kwargs, {}) 67 | 68 | self.assertIn(result, default_task_backend.results) # type:ignore[attr-defined] 69 | 70 | def test_get_result(self) -> None: 71 | result = default_task_backend.enqueue(test_tasks.noop_task, (), {}) 72 | 73 | new_result = default_task_backend.get_result(result.id) 74 | 75 | self.assertEqual(result, new_result) 76 | 77 | async def test_get_result_async(self) -> None: 78 | result = await default_task_backend.aenqueue(test_tasks.noop_task, (), {}) 79 | 80 | new_result = await default_task_backend.aget_result(result.id) 81 | 82 | self.assertEqual(result, new_result) 83 | 84 | def test_refresh_result(self) -> None: 85 | result = default_task_backend.enqueue( 86 | test_tasks.calculate_meaning_of_life, (), {} 87 | ) 88 | 89 | enqueued_result = default_task_backend.results[0] # type:ignore[attr-defined] 90 | object.__setattr__(enqueued_result, "status", ResultStatus.SUCCEEDED) 91 | 92 | self.assertEqual(result.status, ResultStatus.NEW) 93 | result.refresh() 94 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 95 | 96 | async def test_refresh_result_async(self) -> None: 97 | result = await default_task_backend.aenqueue( 98 | test_tasks.calculate_meaning_of_life, (), {} 99 | ) 100 | 101 | enqueued_result = default_task_backend.results[0] # type:ignore[attr-defined] 102 | object.__setattr__(enqueued_result, "status", ResultStatus.SUCCEEDED) 103 | 104 | self.assertEqual(result.status, ResultStatus.NEW) 105 | await result.arefresh() 106 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 107 | 108 | async def test_get_missing_result(self) -> None: 109 | with self.assertRaises(ResultDoesNotExist): 110 | default_task_backend.get_result("123") 111 | 112 | with self.assertRaises(ResultDoesNotExist): 113 | await default_task_backend.aget_result("123") 114 | 115 | def test_meaning_of_life_view(self) -> None: 116 | for url in [ 117 | reverse("meaning-of-life"), 118 | reverse("meaning-of-life-async"), 119 | ]: 120 | with self.subTest(url): 121 | response = self.client.get(url) 122 | self.assertEqual(response.status_code, 200) 123 | 124 | data = json.loads(response.content) 125 | 126 | self.assertEqual(data["result"], None) 127 | self.assertEqual(data["status"], ResultStatus.NEW) 128 | 129 | result = default_task_backend.get_result(data["result_id"]) 130 | self.assertEqual(result.status, ResultStatus.NEW) 131 | 132 | def test_get_result_from_different_request(self) -> None: 133 | response = self.client.get(reverse("meaning-of-life")) 134 | self.assertEqual(response.status_code, 200) 135 | 136 | data = json.loads(response.content) 137 | result_id = data["result_id"] 138 | 139 | response = self.client.get(reverse("result", args=[result_id])) 140 | self.assertEqual(response.status_code, 200) 141 | 142 | self.assertEqual( 143 | json.loads(response.content), 144 | {"result_id": result_id, "result": None, "status": ResultStatus.NEW}, 145 | ) 146 | 147 | def test_enqueue_on_commit(self) -> None: 148 | self.assertTrue( 149 | default_task_backend._get_enqueue_on_commit_for_task( 150 | test_tasks.enqueue_on_commit_task 151 | ) 152 | ) 153 | 154 | def test_enqueue_logs(self) -> None: 155 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 156 | result = test_tasks.noop_task.enqueue() 157 | 158 | self.assertEqual(len(captured_logs.output), 1) 159 | self.assertIn("enqueued", captured_logs.output[0]) 160 | self.assertIn(result.id, captured_logs.output[0]) 161 | 162 | def test_exceptions(self) -> None: 163 | result = test_tasks.noop_task.enqueue() 164 | 165 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 166 | result.exception_class # noqa: B018 167 | 168 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 169 | result.traceback # noqa: B018 170 | 171 | def test_validate_disallowed_async_task(self) -> None: 172 | with mock.patch.multiple(default_task_backend, supports_async_task=False): 173 | with self.assertRaisesMessage( 174 | InvalidTaskError, "Backend does not support async tasks" 175 | ): 176 | default_task_backend.validate_task(test_tasks.noop_task_async) 177 | 178 | def test_check(self) -> None: 179 | errors = list(default_task_backend.check()) 180 | 181 | self.assertEqual(len(errors), 0, errors) 182 | 183 | @override_settings( 184 | TASKS={ 185 | "default": { 186 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 187 | "ENQUEUE_ON_COMMIT": True, 188 | } 189 | } 190 | ) 191 | @mock.patch("django_tasks.backends.base.connections", ConnectionHandler({})) 192 | def test_enqueue_on_commit_with_no_databases(self) -> None: 193 | errors = list(default_task_backend.check()) 194 | 195 | self.assertEqual(len(errors), 1) 196 | self.assertIn("Set `ENQUEUE_ON_COMMIT` to False", errors[0].hint) # type:ignore[arg-type] 197 | 198 | 199 | class DummyBackendTransactionTestCase(TransactionTestCase): 200 | @override_settings( 201 | TASKS={ 202 | "default": { 203 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 204 | "ENQUEUE_ON_COMMIT": True, 205 | } 206 | } 207 | ) 208 | def test_wait_until_transaction_commit(self) -> None: 209 | self.assertTrue(default_task_backend.enqueue_on_commit) 210 | self.assertTrue( 211 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 212 | ) 213 | 214 | with transaction.atomic(): 215 | test_tasks.noop_task.enqueue() 216 | 217 | self.assertEqual(len(default_task_backend.results), 0) # type:ignore[attr-defined] 218 | 219 | self.assertEqual(len(default_task_backend.results), 1) # type:ignore[attr-defined] 220 | 221 | @override_settings( 222 | TASKS={ 223 | "default": { 224 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 225 | "ENQUEUE_ON_COMMIT": False, 226 | } 227 | } 228 | ) 229 | def test_doesnt_wait_until_transaction_commit(self) -> None: 230 | self.assertFalse(default_task_backend.enqueue_on_commit) 231 | self.assertFalse( 232 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 233 | ) 234 | 235 | with transaction.atomic(): 236 | result = test_tasks.noop_task.enqueue() 237 | 238 | self.assertIsNotNone(result.enqueued_at) 239 | 240 | self.assertEqual(len(default_task_backend.results), 1) # type:ignore[attr-defined] 241 | 242 | self.assertEqual(len(default_task_backend.results), 1) # type:ignore[attr-defined] 243 | 244 | @override_settings( 245 | TASKS={ 246 | "default": { 247 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 248 | } 249 | } 250 | ) 251 | def test_wait_until_transaction_by_default(self) -> None: 252 | self.assertTrue(default_task_backend.enqueue_on_commit) 253 | self.assertTrue( 254 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 255 | ) 256 | 257 | with transaction.atomic(): 258 | result = test_tasks.noop_task.enqueue() 259 | 260 | self.assertIsNone(result.enqueued_at) 261 | 262 | self.assertEqual(len(default_task_backend.results), 0) # type:ignore[attr-defined] 263 | 264 | self.assertEqual(len(default_task_backend.results), 1) # type:ignore[attr-defined] 265 | self.assertIsNone(result.enqueued_at) 266 | result.refresh() 267 | self.assertIsNotNone(result.enqueued_at) 268 | 269 | @override_settings( 270 | TASKS={ 271 | "default": { 272 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 273 | "ENQUEUE_ON_COMMIT": False, 274 | } 275 | } 276 | ) 277 | def test_task_specific_enqueue_on_commit(self) -> None: 278 | self.assertFalse(default_task_backend.enqueue_on_commit) 279 | self.assertTrue(test_tasks.enqueue_on_commit_task.enqueue_on_commit) 280 | self.assertTrue( 281 | default_task_backend._get_enqueue_on_commit_for_task( 282 | test_tasks.enqueue_on_commit_task 283 | ) 284 | ) 285 | 286 | with transaction.atomic(): 287 | result = test_tasks.enqueue_on_commit_task.enqueue() 288 | 289 | self.assertIsNone(result.enqueued_at) 290 | 291 | self.assertEqual(len(default_task_backend.results), 0) # type:ignore[attr-defined] 292 | 293 | self.assertEqual(len(default_task_backend.results), 1) # type:ignore[attr-defined] 294 | self.assertIsNone(result.enqueued_at) 295 | result.refresh() 296 | self.assertIsNotNone(result.enqueued_at) 297 | -------------------------------------------------------------------------------- /tests/tests/test_immediate_backend.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import cast 3 | 4 | from django.db import transaction 5 | from django.test import SimpleTestCase, TransactionTestCase, override_settings 6 | from django.urls import reverse 7 | from django.utils import timezone 8 | 9 | from django_tasks import ResultStatus, Task, default_task_backend, tasks 10 | from django_tasks.backends.immediate import ImmediateBackend 11 | from django_tasks.exceptions import InvalidTaskError 12 | from tests import tasks as test_tasks 13 | 14 | 15 | @override_settings( 16 | TASKS={ 17 | "default": { 18 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 19 | "ENQUEUE_ON_COMMIT": False, 20 | } 21 | } 22 | ) 23 | class ImmediateBackendTestCase(SimpleTestCase): 24 | def test_using_correct_backend(self) -> None: 25 | self.assertEqual(default_task_backend, tasks["default"]) 26 | self.assertIsInstance(tasks["default"], ImmediateBackend) 27 | 28 | def test_enqueue_task(self) -> None: 29 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 30 | with self.subTest(task): 31 | result = cast(Task, task).enqueue(1, two=3) 32 | 33 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 34 | self.assertTrue(result.is_finished) 35 | self.assertIsNotNone(result.started_at) 36 | self.assertIsNotNone(result.finished_at) 37 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type, misc] 38 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type, misc] 39 | self.assertIsNone(result.return_value) 40 | self.assertEqual(result.task, task) 41 | self.assertEqual(result.args, [1]) 42 | self.assertEqual(result.kwargs, {"two": 3}) 43 | 44 | async def test_enqueue_task_async(self) -> None: 45 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 46 | with self.subTest(task): 47 | result = await cast(Task, task).aenqueue() 48 | 49 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 50 | self.assertTrue(result.is_finished) 51 | self.assertIsNotNone(result.started_at) 52 | self.assertIsNotNone(result.finished_at) 53 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type, misc] 54 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type, misc] 55 | self.assertIsNone(result.return_value) 56 | self.assertEqual(result.task, task) 57 | self.assertEqual(result.args, []) 58 | self.assertEqual(result.kwargs, {}) 59 | 60 | def test_catches_exception(self) -> None: 61 | test_data = [ 62 | ( 63 | test_tasks.failing_task_value_error, # task function 64 | ValueError, # expected exception 65 | "This task failed due to ValueError", # expected message 66 | ), 67 | ( 68 | test_tasks.failing_task_system_exit, 69 | SystemExit, 70 | "This task failed due to SystemExit", 71 | ), 72 | ] 73 | for task, exception, message in test_data: 74 | with ( 75 | self.subTest(task), 76 | self.assertLogs("django_tasks", level="ERROR") as captured_logs, 77 | ): 78 | result = task.enqueue() 79 | 80 | # assert logging 81 | self.assertEqual(len(captured_logs.output), 1) 82 | self.assertIn(message, captured_logs.output[0]) 83 | 84 | # assert result 85 | self.assertEqual(result.status, ResultStatus.FAILED) 86 | with self.assertRaisesMessage(ValueError, "Task failed"): 87 | result.return_value # noqa: B018 88 | self.assertTrue(result.is_finished) 89 | self.assertIsNotNone(result.started_at) 90 | self.assertIsNotNone(result.finished_at) 91 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type, misc] 92 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type, misc] 93 | self.assertEqual(result.exception_class, exception) 94 | self.assertTrue( 95 | result.traceback 96 | and result.traceback.endswith(f"{exception.__name__}: {message}\n") 97 | ) 98 | self.assertEqual(result.task, task) 99 | self.assertEqual(result.args, []) 100 | self.assertEqual(result.kwargs, {}) 101 | 102 | def test_throws_keyboard_interrupt(self) -> None: 103 | with self.assertRaises(KeyboardInterrupt): 104 | with self.assertLogs("django_tasks", level="ERROR") as captured_logs: 105 | default_task_backend.enqueue( 106 | test_tasks.failing_task_keyboard_interrupt, [], {} 107 | ) 108 | 109 | # assert logging 110 | self.assertEqual(len(captured_logs.output), 0) 111 | 112 | def test_complex_exception(self) -> None: 113 | with self.assertLogs("django_tasks", level="ERROR"): 114 | result = test_tasks.complex_exception.enqueue() 115 | 116 | self.assertEqual(result.status, ResultStatus.FAILED) 117 | self.assertIsNotNone(result.started_at) 118 | self.assertIsNotNone(result.finished_at) 119 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type,misc] 120 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type,misc] 121 | 122 | self.assertIsNone(result._return_value) 123 | self.assertEqual(result.exception_class, ValueError) 124 | self.assertIn('ValueError(ValueError("This task failed"))', result.traceback) # type: ignore[arg-type] 125 | 126 | self.assertEqual(result.task, test_tasks.complex_exception) 127 | self.assertEqual(result.args, []) 128 | self.assertEqual(result.kwargs, {}) 129 | 130 | def test_result(self) -> None: 131 | result = default_task_backend.enqueue( 132 | test_tasks.calculate_meaning_of_life, [], {} 133 | ) 134 | 135 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 136 | self.assertEqual(result.return_value, 42) 137 | 138 | async def test_result_async(self) -> None: 139 | result = await default_task_backend.aenqueue( 140 | test_tasks.calculate_meaning_of_life, [], {} 141 | ) 142 | 143 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 144 | self.assertEqual(result.return_value, 42) 145 | 146 | async def test_cannot_get_result(self) -> None: 147 | with self.assertRaisesMessage( 148 | NotImplementedError, 149 | "This backend does not support retrieving or refreshing results.", 150 | ): 151 | default_task_backend.get_result("123") 152 | 153 | with self.assertRaisesMessage( 154 | NotImplementedError, 155 | "This backend does not support retrieving or refreshing results.", 156 | ): 157 | await default_task_backend.aget_result(123) # type:ignore[arg-type] 158 | 159 | async def test_cannot_refresh_result(self) -> None: 160 | result = await default_task_backend.aenqueue( 161 | test_tasks.calculate_meaning_of_life, (), {} 162 | ) 163 | 164 | with self.assertRaisesMessage( 165 | NotImplementedError, 166 | "This backend does not support retrieving or refreshing results.", 167 | ): 168 | await result.arefresh() 169 | 170 | with self.assertRaisesMessage( 171 | NotImplementedError, 172 | "This backend does not support retrieving or refreshing results.", 173 | ): 174 | result.refresh() 175 | 176 | def test_cannot_pass_run_after(self) -> None: 177 | with self.assertRaisesMessage( 178 | InvalidTaskError, 179 | "Backend does not support run_after", 180 | ): 181 | default_task_backend.validate_task( 182 | test_tasks.failing_task_value_error.using(run_after=timezone.now()) 183 | ) 184 | 185 | def test_meaning_of_life_view(self) -> None: 186 | for url in [ 187 | reverse("meaning-of-life"), 188 | reverse("meaning-of-life-async"), 189 | ]: 190 | with self.subTest(url): 191 | response = self.client.get(url) 192 | self.assertEqual(response.status_code, 200) 193 | 194 | data = json.loads(response.content) 195 | 196 | self.assertEqual(data["result"], 42) 197 | self.assertEqual(data["status"], ResultStatus.SUCCEEDED) 198 | 199 | def test_get_result_from_different_request(self) -> None: 200 | response = self.client.get(reverse("meaning-of-life")) 201 | self.assertEqual(response.status_code, 200) 202 | 203 | data = json.loads(response.content) 204 | result_id = data["result_id"] 205 | 206 | with self.assertRaisesMessage( 207 | NotImplementedError, 208 | "This backend does not support retrieving or refreshing results.", 209 | ): 210 | response = self.client.get(reverse("result", args=[result_id])) 211 | 212 | def test_enqueue_on_commit(self) -> None: 213 | self.assertTrue( 214 | default_task_backend._get_enqueue_on_commit_for_task( 215 | test_tasks.enqueue_on_commit_task 216 | ) 217 | ) 218 | 219 | def test_enqueue_logs(self) -> None: 220 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 221 | result = test_tasks.noop_task.enqueue() 222 | 223 | self.assertEqual(len(captured_logs.output), 3) 224 | 225 | self.assertIn("enqueued", captured_logs.output[0]) 226 | self.assertIn(result.id, captured_logs.output[0]) 227 | 228 | self.assertIn("state=RUNNING", captured_logs.output[1]) 229 | self.assertIn(result.id, captured_logs.output[1]) 230 | 231 | self.assertIn("state=SUCCEEDED", captured_logs.output[2]) 232 | self.assertIn(result.id, captured_logs.output[2]) 233 | 234 | def test_failed_logs(self) -> None: 235 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 236 | result = test_tasks.failing_task_value_error.enqueue() 237 | 238 | self.assertEqual(len(captured_logs.output), 3) 239 | self.assertIn("state=RUNNING", captured_logs.output[1]) 240 | self.assertIn(result.id, captured_logs.output[1]) 241 | 242 | self.assertIn("state=FAILED", captured_logs.output[2]) 243 | self.assertIn(result.id, captured_logs.output[2]) 244 | 245 | def test_check(self) -> None: 246 | errors = list(default_task_backend.check()) 247 | 248 | self.assertEqual(len(errors), 0, errors) 249 | 250 | 251 | class ImmediateBackendTransactionTestCase(TransactionTestCase): 252 | @override_settings( 253 | TASKS={ 254 | "default": { 255 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 256 | "ENQUEUE_ON_COMMIT": True, 257 | } 258 | } 259 | ) 260 | def test_wait_until_transaction_commit(self) -> None: 261 | self.assertTrue(default_task_backend.enqueue_on_commit) 262 | self.assertTrue( 263 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 264 | ) 265 | 266 | with transaction.atomic(): 267 | result = test_tasks.noop_task.enqueue() 268 | 269 | self.assertIsNone(result.enqueued_at) 270 | self.assertEqual(result.status, ResultStatus.NEW) 271 | 272 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 273 | self.assertIsNotNone(result.enqueued_at) 274 | 275 | @override_settings( 276 | TASKS={ 277 | "default": { 278 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 279 | "ENQUEUE_ON_COMMIT": False, 280 | } 281 | } 282 | ) 283 | def test_doesnt_wait_until_transaction_commit(self) -> None: 284 | self.assertFalse(default_task_backend.enqueue_on_commit) 285 | self.assertFalse( 286 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 287 | ) 288 | 289 | with transaction.atomic(): 290 | result = test_tasks.noop_task.enqueue() 291 | 292 | self.assertIsNotNone(result.enqueued_at) 293 | 294 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 295 | 296 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 297 | 298 | @override_settings( 299 | TASKS={ 300 | "default": { 301 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 302 | } 303 | } 304 | ) 305 | def test_wait_until_transaction_by_default(self) -> None: 306 | self.assertTrue(default_task_backend.enqueue_on_commit) 307 | self.assertTrue( 308 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 309 | ) 310 | 311 | with transaction.atomic(): 312 | result = test_tasks.noop_task.enqueue() 313 | 314 | self.assertIsNone(result.enqueued_at) 315 | self.assertEqual(result.status, ResultStatus.NEW) 316 | 317 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 318 | 319 | @override_settings( 320 | TASKS={ 321 | "default": { 322 | "BACKEND": "django_tasks.backends.immediate.ImmediateBackend", 323 | "ENQUEUE_ON_COMMIT": False, 324 | } 325 | } 326 | ) 327 | def test_task_specific_enqueue_on_commit(self) -> None: 328 | self.assertFalse(default_task_backend.enqueue_on_commit) 329 | self.assertTrue(test_tasks.enqueue_on_commit_task.enqueue_on_commit) 330 | self.assertTrue( 331 | default_task_backend._get_enqueue_on_commit_for_task( 332 | test_tasks.enqueue_on_commit_task 333 | ) 334 | ) 335 | 336 | with transaction.atomic(): 337 | result = test_tasks.enqueue_on_commit_task.enqueue() 338 | 339 | self.assertIsNone(result.enqueued_at) 340 | self.assertEqual(result.status, ResultStatus.NEW) 341 | 342 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 343 | -------------------------------------------------------------------------------- /tests/tests/test_rq_backend.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import uuid 4 | from typing import Union, cast 5 | from unittest.mock import patch 6 | 7 | import django_rq 8 | from asgiref.sync import async_to_sync 9 | from django.core.exceptions import SuspiciousOperation 10 | from django.db import transaction 11 | from django.test import TransactionTestCase, modify_settings, override_settings 12 | from django.urls import reverse 13 | from fakeredis import FakeRedis, FakeStrictRedis 14 | from rq.timeouts import TimerDeathPenalty 15 | 16 | from django_tasks import ResultStatus, Task, default_task_backend, tasks 17 | from django_tasks.backends.rq import Job, RQBackend 18 | from django_tasks.exceptions import ResultDoesNotExist 19 | from tests import tasks as test_tasks 20 | 21 | 22 | # RQ 23 | # Configuration to pretend there is a Redis service available. 24 | # Set up the connection before RQ Django reads the settings. 25 | # The connection must be the same because in fakeredis connections 26 | # do not share the state. Therefore, we define a singleton object to reuse it. 27 | def get_fake_connection( 28 | config: dict, strict: bool 29 | ) -> Union[FakeRedis, FakeStrictRedis]: 30 | redis_cls = FakeStrictRedis if strict else FakeRedis 31 | if "URL" in config: 32 | return redis_cls.from_url( 33 | config["URL"], 34 | db=config.get("DB"), 35 | ) 36 | return redis_cls( 37 | host=config["HOST"], 38 | port=config["PORT"], 39 | db=config.get("DB", 0), 40 | username=config.get("USERNAME", None), 41 | password=config.get("PASSWORD"), 42 | ) 43 | 44 | 45 | @override_settings( 46 | TASKS={ 47 | "default": { 48 | "BACKEND": "django_tasks.backends.rq.RQBackend", 49 | "QUEUES": ["default", "queue-1"], 50 | } 51 | }, 52 | RQ_QUEUES={ 53 | "default": { 54 | "HOST": "localhost", 55 | "PORT": 6379, 56 | }, 57 | "queue-1": { 58 | "HOST": "localhost", 59 | "PORT": 6379, 60 | }, 61 | }, 62 | ) 63 | @modify_settings(INSTALLED_APPS={"append": ["django_rq"]}) 64 | class DatabaseBackendTestCase(TransactionTestCase): 65 | def setUp(self) -> None: 66 | super().setUp() 67 | 68 | fake_connection_patcher = patch( 69 | "django_rq.queues.get_redis_connection", get_fake_connection 70 | ) 71 | fake_connection_patcher.start() 72 | self.addCleanup(fake_connection_patcher.stop) 73 | 74 | django_rq.get_connection().flushall() 75 | 76 | def run_worker(self) -> None: 77 | from rq import SimpleWorker 78 | 79 | for queue in default_task_backend._get_queues(): # type: ignore[attr-defined] 80 | worker = SimpleWorker([queue], prepare_for_work=False, job_class=Job) 81 | 82 | # Use timer death penalty to support Windows 83 | worker.death_penalty_class = TimerDeathPenalty # type: ignore[assignment] 84 | 85 | # HACK: Work around fakeredis not supporting `CLIENT LIST` 86 | worker.hostname = "example-hostname" 87 | worker.pid = os.getpid() 88 | 89 | with self.assertLogs("rq.worker"): 90 | worker.work(burst=True) 91 | 92 | def test_using_correct_backend(self) -> None: 93 | self.assertEqual(default_task_backend, tasks["default"]) 94 | self.assertIsInstance(tasks["default"], RQBackend) 95 | 96 | def test_enqueue_task(self) -> None: 97 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 98 | with self.subTest(task): 99 | result = cast(Task, task).enqueue(1, two=3) 100 | 101 | self.assertEqual(result.status, ResultStatus.NEW) 102 | self.assertFalse(result.is_finished) 103 | self.assertIsNone(result.started_at) 104 | self.assertIsNone(result.finished_at) 105 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 106 | result.return_value # noqa:B018 107 | self.assertEqual(result.task, task) 108 | self.assertEqual(result.args, [1]) 109 | self.assertEqual(result.kwargs, {"two": 3}) 110 | 111 | async def test_enqueue_task_async(self) -> None: 112 | for task in [test_tasks.noop_task, test_tasks.noop_task_async]: 113 | with self.subTest(task): 114 | result = await cast(Task, task).aenqueue() 115 | 116 | self.assertEqual(result.status, ResultStatus.NEW) 117 | self.assertFalse(result.is_finished) 118 | self.assertIsNone(result.started_at) 119 | self.assertIsNone(result.finished_at) 120 | with self.assertRaisesMessage(ValueError, "Task has not finished yet"): 121 | result.return_value # noqa:B018 122 | self.assertEqual(result.task, task) 123 | self.assertEqual(result.args, []) 124 | self.assertEqual(result.kwargs, {}) 125 | 126 | def test_catches_exception(self) -> None: 127 | test_data = [ 128 | ( 129 | test_tasks.failing_task_value_error, # task function 130 | ValueError, # expected exception 131 | "This task failed due to ValueError", # expected message 132 | ), 133 | ( 134 | test_tasks.failing_task_system_exit, 135 | SystemExit, 136 | "This task failed due to SystemExit", 137 | ), 138 | ] 139 | for task, exception, message in test_data: 140 | with ( 141 | self.subTest(task), 142 | ): 143 | result = task.enqueue() 144 | 145 | with self.assertLogs("django_tasks", "DEBUG"): 146 | self.run_worker() 147 | 148 | result.refresh() 149 | 150 | # assert result 151 | self.assertEqual(result.status, ResultStatus.FAILED) 152 | with self.assertRaisesMessage(ValueError, "Task failed"): 153 | result.return_value # noqa: B018 154 | self.assertTrue(result.is_finished) 155 | self.assertIsNotNone(result.started_at) 156 | self.assertIsNotNone(result.finished_at) 157 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type, misc] 158 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type, misc] 159 | self.assertEqual(result.exception_class, exception) 160 | self.assertTrue( 161 | result.traceback 162 | and result.traceback.endswith(f"{exception.__name__}: {message}\n") 163 | ) 164 | self.assertEqual(result.task, task) 165 | self.assertEqual(result.args, []) 166 | self.assertEqual(result.kwargs, {}) 167 | 168 | def test_complex_exception(self) -> None: 169 | result = test_tasks.complex_exception.enqueue() 170 | 171 | with self.assertLogs("django_tasks", "DEBUG"): 172 | self.run_worker() 173 | 174 | result.refresh() 175 | 176 | self.assertEqual(result.status, ResultStatus.FAILED) 177 | self.assertIsNotNone(result.started_at) 178 | self.assertIsNotNone(result.finished_at) 179 | self.assertGreaterEqual(result.started_at, result.enqueued_at) # type:ignore[arg-type,misc] 180 | self.assertGreaterEqual(result.finished_at, result.started_at) # type:ignore[arg-type,misc] 181 | 182 | self.assertIsNone(result._return_value) 183 | self.assertEqual(result.exception_class, ValueError) 184 | self.assertIn('ValueError(ValueError("This task failed"))', result.traceback) # type: ignore[arg-type] 185 | 186 | self.assertEqual(result.task, test_tasks.complex_exception) 187 | self.assertEqual(result.args, []) 188 | self.assertEqual(result.kwargs, {}) 189 | 190 | def test_get_result(self) -> None: 191 | result = default_task_backend.enqueue(test_tasks.noop_task, [], {}) 192 | 193 | new_result = default_task_backend.get_result(result.id) 194 | 195 | self.assertEqual(result, new_result) 196 | 197 | async def test_get_result_async(self) -> None: 198 | result = await default_task_backend.aenqueue(test_tasks.noop_task, [], {}) 199 | 200 | new_result = await default_task_backend.aget_result(result.id) 201 | 202 | self.assertEqual(result, new_result) 203 | 204 | def test_refresh_result(self) -> None: 205 | result = default_task_backend.enqueue( 206 | test_tasks.calculate_meaning_of_life, (), {} 207 | ) 208 | 209 | self.run_worker() 210 | 211 | self.assertEqual(result.status, ResultStatus.NEW) 212 | self.assertFalse(result.is_finished) 213 | self.assertIsNone(result.started_at) 214 | self.assertIsNone(result.finished_at) 215 | 216 | result.refresh() 217 | 218 | self.assertIsNotNone(result.started_at) 219 | self.assertIsNotNone(result.finished_at) 220 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 221 | self.assertTrue(result.is_finished) 222 | self.assertEqual(result.return_value, 42) 223 | 224 | def test_refresh_result_async(self) -> None: 225 | result = async_to_sync(default_task_backend.aenqueue)( 226 | test_tasks.calculate_meaning_of_life, (), {} 227 | ) 228 | 229 | self.run_worker() 230 | 231 | self.assertEqual(result.status, ResultStatus.NEW) 232 | self.assertFalse(result.is_finished) 233 | self.assertIsNone(result.started_at) 234 | self.assertIsNone(result.finished_at) 235 | 236 | async_to_sync(result.arefresh)() 237 | 238 | self.assertIsNotNone(result.started_at) 239 | self.assertIsNotNone(result.finished_at) 240 | self.assertEqual(result.status, ResultStatus.SUCCEEDED) 241 | self.assertTrue(result.is_finished) 242 | self.assertEqual(result.return_value, 42) 243 | 244 | def test_get_missing_result(self) -> None: 245 | with self.assertRaises(ResultDoesNotExist): 246 | default_task_backend.get_result(str(uuid.uuid4())) 247 | 248 | async def test_async_get_missing_result(self) -> None: 249 | with self.assertRaises(ResultDoesNotExist): 250 | await default_task_backend.aget_result(str(uuid.uuid4())) 251 | 252 | def test_invalid_uuid(self) -> None: 253 | with self.assertRaises(ResultDoesNotExist): 254 | default_task_backend.get_result("123") 255 | 256 | async def test_async_invalid_uuid(self) -> None: 257 | with self.assertRaises(ResultDoesNotExist): 258 | await default_task_backend.aget_result("123") 259 | 260 | def test_meaning_of_life_view(self) -> None: 261 | for url in [ 262 | reverse("meaning-of-life"), 263 | reverse("meaning-of-life-async"), 264 | ]: 265 | with self.subTest(url): 266 | response = self.client.get(url) 267 | self.assertEqual(response.status_code, 200) 268 | 269 | data = json.loads(response.content) 270 | 271 | self.assertEqual(data["result"], None) 272 | self.assertEqual(data["status"], ResultStatus.NEW) 273 | 274 | result = default_task_backend.get_result(data["result_id"]) 275 | self.assertEqual(result.status, ResultStatus.NEW) 276 | 277 | def test_get_result_from_different_request(self) -> None: 278 | response = self.client.get(reverse("meaning-of-life")) 279 | self.assertEqual(response.status_code, 200) 280 | 281 | data = json.loads(response.content) 282 | result_id = data["result_id"] 283 | 284 | response = self.client.get(reverse("result", args=[result_id])) 285 | self.assertEqual(response.status_code, 200) 286 | 287 | self.assertEqual( 288 | json.loads(response.content), 289 | {"result_id": result_id, "result": None, "status": ResultStatus.NEW}, 290 | ) 291 | 292 | def test_invalid_task_path(self) -> None: 293 | job = django_rq.get_queue("default", job_class=Job).enqueue_call( # type: ignore[no-untyped-call] 294 | "subprocess.check_output", args=["exit", "1"] 295 | ) 296 | 297 | with self.assertRaisesMessage( 298 | SuspiciousOperation, 299 | f"Task {job.id} does not point to a Task (subprocess.check_output)", 300 | ): 301 | default_task_backend.get_result(job.id) 302 | 303 | def test_check(self) -> None: 304 | errors = list(default_task_backend.check()) 305 | 306 | self.assertEqual(len(errors), 0, errors) 307 | 308 | @override_settings(INSTALLED_APPS=[]) 309 | def test_rq_app_missing(self) -> None: 310 | errors = list(default_task_backend.check()) 311 | 312 | self.assertEqual(len(errors), 1) 313 | self.assertIn("django_rq", errors[0].hint) # type:ignore[arg-type] 314 | 315 | @override_settings( 316 | TASKS={ 317 | "default": { 318 | "BACKEND": "django_tasks.backends.rq.RQBackend", 319 | "ENQUEUE_ON_COMMIT": True, 320 | } 321 | } 322 | ) 323 | def test_wait_until_transaction_commit(self) -> None: 324 | self.assertTrue(default_task_backend.enqueue_on_commit) 325 | self.assertTrue( 326 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 327 | ) 328 | 329 | queue = django_rq.get_queue("default", job_class=Job) 330 | 331 | with transaction.atomic(): 332 | result = test_tasks.noop_task.enqueue() 333 | 334 | self.assertIsNone(result.enqueued_at) 335 | 336 | self.assertEqual(queue.count, 0) 337 | self.assertEqual(queue.count, 1) 338 | 339 | result.refresh() 340 | self.assertIsNotNone(result.enqueued_at) 341 | 342 | @override_settings( 343 | TASKS={ 344 | "default": { 345 | "BACKEND": "django_tasks.backends.rq.RQBackend", 346 | "ENQUEUE_ON_COMMIT": False, 347 | } 348 | } 349 | ) 350 | def test_doesnt_wait_until_transaction_commit(self) -> None: 351 | self.assertFalse(default_task_backend.enqueue_on_commit) 352 | self.assertFalse( 353 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 354 | ) 355 | 356 | queue = django_rq.get_queue("default", job_class=Job) 357 | 358 | with transaction.atomic(): 359 | result = test_tasks.noop_task.enqueue() 360 | 361 | self.assertIsNotNone(result.enqueued_at) 362 | 363 | self.assertEqual(queue.count, 1) 364 | 365 | self.assertEqual(queue.count, 1) 366 | 367 | @override_settings( 368 | TASKS={ 369 | "default": { 370 | "BACKEND": "django_tasks.backends.rq.RQBackend", 371 | } 372 | } 373 | ) 374 | def test_wait_until_transaction_by_default(self) -> None: 375 | self.assertTrue(default_task_backend.enqueue_on_commit) 376 | self.assertTrue( 377 | default_task_backend._get_enqueue_on_commit_for_task(test_tasks.noop_task) 378 | ) 379 | 380 | @override_settings( 381 | TASKS={ 382 | "default": { 383 | "BACKEND": "django_tasks.backends.rq.RQBackend", 384 | "ENQUEUE_ON_COMMIT": False, 385 | } 386 | } 387 | ) 388 | def test_task_specific_enqueue_on_commit(self) -> None: 389 | self.assertFalse(default_task_backend.enqueue_on_commit) 390 | self.assertTrue(test_tasks.enqueue_on_commit_task.enqueue_on_commit) 391 | self.assertTrue( 392 | default_task_backend._get_enqueue_on_commit_for_task( 393 | test_tasks.enqueue_on_commit_task 394 | ) 395 | ) 396 | 397 | def test_enqueue_logs(self) -> None: 398 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 399 | result = test_tasks.noop_task.enqueue() 400 | 401 | self.assertEqual(len(captured_logs.output), 1) 402 | self.assertIn("enqueued", captured_logs.output[0]) 403 | self.assertIn(result.id, captured_logs.output[0]) 404 | 405 | def test_started_finished_logs(self) -> None: 406 | result = test_tasks.noop_task.enqueue() 407 | 408 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 409 | self.run_worker() 410 | 411 | self.assertEqual(len(captured_logs.output), 2) 412 | self.assertIn("state=RUNNING", captured_logs.output[0]) 413 | self.assertIn(result.id, captured_logs.output[0]) 414 | 415 | self.assertIn("state=SUCCEEDED", captured_logs.output[1]) 416 | self.assertIn(result.id, captured_logs.output[1]) 417 | 418 | def test_failed_logs(self) -> None: 419 | result = test_tasks.failing_task_value_error.enqueue() 420 | 421 | with self.assertLogs("django_tasks", level="DEBUG") as captured_logs: 422 | self.run_worker() 423 | 424 | self.assertEqual(len(captured_logs.output), 2) 425 | self.assertIn("state=RUNNING", captured_logs.output[0]) 426 | self.assertIn(result.id, captured_logs.output[0]) 427 | 428 | self.assertIn("state=FAILED", captured_logs.output[1]) 429 | self.assertIn(result.id, captured_logs.output[1]) 430 | 431 | def test_enqueue_priority(self) -> None: 432 | task_1 = test_tasks.noop_task.enqueue() 433 | task_2 = test_tasks.noop_task.using(priority=100).enqueue() 434 | 435 | queue = django_rq.get_queue("default") 436 | 437 | self.assertEqual(queue.job_ids, [task_2.id, task_1.id]) 438 | 439 | self.assertEqual(task_2.task.priority, 100) 440 | 441 | self.assertEqual(default_task_backend.get_result(task_2.id).task.priority, 0) 442 | 443 | def test_queue_isolation(self) -> None: 444 | default_task = test_tasks.noop_task.enqueue() 445 | other_task = test_tasks.noop_task.using(queue_name="queue-1").enqueue() 446 | 447 | default_task_backend.get_result(default_task.id) 448 | default_task_backend.get_result(other_task.id) 449 | 450 | self.assertEqual(django_rq.get_queue("default").job_ids, [default_task.id]) 451 | self.assertEqual(django_rq.get_queue("queue-1").job_ids, [other_task.id]) 452 | 453 | @override_settings( 454 | TASKS={ 455 | "default": {"BACKEND": "django_tasks.backends.rq.RQBackend", "QUEUES": []} 456 | } 457 | ) 458 | def test_uses_rq_queues_for_queue_names(self) -> None: 459 | self.assertEqual(default_task_backend.queues, {"default", "queue-1"}) 460 | 461 | @override_settings( 462 | TASKS={ 463 | "default": { 464 | "BACKEND": "django_tasks.backends.rq.RQBackend", 465 | "QUEUES": ["queue-2"], 466 | } 467 | } 468 | ) 469 | def test_unknown_queue_name(self) -> None: 470 | errors = list(default_task_backend.check()) 471 | 472 | self.assertEqual(len(errors), 1) 473 | self.assertIn("Add 'queue-2' to RQ_QUEUES", errors[0].hint) # type:ignore[arg-type] 474 | -------------------------------------------------------------------------------- /tests/tests/test_tasks.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from datetime import datetime 3 | 4 | from django.test import SimpleTestCase, override_settings 5 | from django.utils import timezone 6 | from django.utils.module_loading import import_string 7 | 8 | from django_tasks import ( 9 | DEFAULT_QUEUE_NAME, 10 | ResultStatus, 11 | Task, 12 | default_task_backend, 13 | task, 14 | tasks, 15 | ) 16 | from django_tasks.backends.dummy import DummyBackend 17 | from django_tasks.backends.immediate import ImmediateBackend 18 | from django_tasks.exceptions import ( 19 | InvalidTaskBackendError, 20 | InvalidTaskError, 21 | ResultDoesNotExist, 22 | ) 23 | from django_tasks.task import MAX_PRIORITY, MIN_PRIORITY 24 | from tests import tasks as test_tasks 25 | 26 | 27 | @override_settings( 28 | TASKS={ 29 | "default": { 30 | "BACKEND": "django_tasks.backends.dummy.DummyBackend", 31 | "QUEUES": ["default", "queue_1"], 32 | "ENQUEUE_ON_COMMIT": False, 33 | }, 34 | "immediate": {"BACKEND": "django_tasks.backends.immediate.ImmediateBackend"}, 35 | "missing": {"BACKEND": "does.not.exist"}, 36 | } 37 | ) 38 | class TaskTestCase(SimpleTestCase): 39 | def setUp(self) -> None: 40 | default_task_backend.clear() # type:ignore[attr-defined] 41 | 42 | def test_using_correct_backend(self) -> None: 43 | self.assertEqual(default_task_backend, tasks["default"]) 44 | self.assertIsInstance(tasks["default"], DummyBackend) 45 | 46 | def test_task_decorator(self) -> None: 47 | self.assertIsInstance(test_tasks.noop_task, Task) 48 | self.assertIsInstance(test_tasks.noop_task_async, Task) 49 | self.assertIsInstance(test_tasks.noop_task_from_bare_decorator, Task) 50 | 51 | def test_enqueue_task(self) -> None: 52 | result = test_tasks.noop_task.enqueue() 53 | 54 | self.assertEqual(result.status, ResultStatus.NEW) 55 | self.assertEqual(result.task, test_tasks.noop_task) 56 | self.assertEqual(result.args, []) 57 | self.assertEqual(result.kwargs, {}) 58 | 59 | self.assertEqual(default_task_backend.results, [result]) # type:ignore[attr-defined] 60 | 61 | async def test_enqueue_task_async(self) -> None: 62 | result = await test_tasks.noop_task.aenqueue() 63 | 64 | self.assertEqual(result.status, ResultStatus.NEW) 65 | self.assertEqual(result.task, test_tasks.noop_task) 66 | self.assertEqual(result.args, []) 67 | self.assertEqual(result.kwargs, {}) 68 | 69 | self.assertEqual(default_task_backend.results, [result]) # type:ignore[attr-defined] 70 | 71 | def test_enqueue_with_invalid_argument(self) -> None: 72 | with self.assertRaisesMessage( 73 | TypeError, "Object of type datetime is not JSON serializable" 74 | ): 75 | test_tasks.noop_task.enqueue(datetime.now()) 76 | 77 | async def test_aenqueue_with_invalid_argument(self) -> None: 78 | with self.assertRaisesMessage( 79 | TypeError, "Object of type datetime is not JSON serializable" 80 | ): 81 | await test_tasks.noop_task.aenqueue(datetime.now()) 82 | 83 | def test_using_priority(self) -> None: 84 | self.assertEqual(test_tasks.noop_task.priority, 0) 85 | self.assertEqual(test_tasks.noop_task.using(priority=1).priority, 1) 86 | self.assertEqual(test_tasks.noop_task.priority, 0) 87 | 88 | def test_using_queue_name(self) -> None: 89 | self.assertEqual(test_tasks.noop_task.queue_name, DEFAULT_QUEUE_NAME) 90 | self.assertEqual( 91 | test_tasks.noop_task.using(queue_name="queue_1").queue_name, "queue_1" 92 | ) 93 | self.assertEqual(test_tasks.noop_task.queue_name, DEFAULT_QUEUE_NAME) 94 | 95 | def test_using_run_after(self) -> None: 96 | now = timezone.now() 97 | 98 | self.assertIsNone(test_tasks.noop_task.run_after) 99 | self.assertEqual(test_tasks.noop_task.using(run_after=now).run_after, now) 100 | self.assertIsNone(test_tasks.noop_task.run_after) 101 | 102 | def test_using_unknown_backend(self) -> None: 103 | self.assertEqual(test_tasks.noop_task.backend, "default") 104 | 105 | with self.assertRaisesMessage( 106 | InvalidTaskBackendError, "The connection 'unknown' doesn't exist." 107 | ): 108 | test_tasks.noop_task.using(backend="unknown") 109 | 110 | def test_using_missing_backend(self) -> None: 111 | self.assertEqual(test_tasks.noop_task.backend, "default") 112 | 113 | with self.assertRaisesMessage( 114 | InvalidTaskBackendError, 115 | "Could not find backend 'does.not.exist': No module named 'does'", 116 | ): 117 | test_tasks.noop_task.using(backend="missing") 118 | 119 | def test_using_creates_new_instance(self) -> None: 120 | new_task = test_tasks.noop_task.using() 121 | 122 | self.assertEqual(new_task, test_tasks.noop_task) 123 | self.assertIsNot(new_task, test_tasks.noop_task) 124 | 125 | def test_chained_using(self) -> None: 126 | now = timezone.now() 127 | 128 | run_after_task = test_tasks.noop_task.using(run_after=now) 129 | self.assertEqual(run_after_task.run_after, now) 130 | 131 | priority_task = run_after_task.using(priority=10) 132 | self.assertEqual(priority_task.priority, 10) 133 | self.assertEqual(priority_task.run_after, now) 134 | 135 | self.assertEqual(run_after_task.priority, 0) 136 | 137 | async def test_refresh_result(self) -> None: 138 | result = await test_tasks.noop_task.aenqueue() 139 | 140 | original_result = dataclasses.asdict(result) 141 | 142 | result.refresh() 143 | 144 | self.assertEqual(dataclasses.asdict(result), original_result) 145 | 146 | await result.arefresh() 147 | 148 | self.assertEqual(dataclasses.asdict(result), original_result) 149 | 150 | def test_naive_datetime(self) -> None: 151 | with self.assertRaisesMessage( 152 | InvalidTaskError, "run_after must be an aware datetime" 153 | ): 154 | test_tasks.noop_task.using(run_after=datetime.now()) 155 | 156 | def test_invalid_priority(self) -> None: 157 | with self.assertRaisesMessage( 158 | InvalidTaskError, 159 | f"priority must be a whole number between {MIN_PRIORITY} and {MAX_PRIORITY}", 160 | ): 161 | test_tasks.noop_task.using(priority=-101) 162 | 163 | with self.assertRaisesMessage( 164 | InvalidTaskError, 165 | f"priority must be a whole number between {MIN_PRIORITY} and {MAX_PRIORITY}", 166 | ): 167 | test_tasks.noop_task.using(priority=101) 168 | 169 | with self.assertRaisesMessage( 170 | InvalidTaskError, 171 | f"priority must be a whole number between {MIN_PRIORITY} and {MAX_PRIORITY}", 172 | ): 173 | test_tasks.noop_task.using(priority=3.1) # type:ignore[arg-type] 174 | 175 | test_tasks.noop_task.using(priority=100) 176 | test_tasks.noop_task.using(priority=-100) 177 | test_tasks.noop_task.using(priority=0) 178 | 179 | def test_unknown_queue_name(self) -> None: 180 | with self.assertRaisesMessage( 181 | InvalidTaskError, "Queue 'queue-2' is not valid for backend" 182 | ): 183 | test_tasks.noop_task.using(queue_name="queue-2") 184 | 185 | def test_call_task(self) -> None: 186 | self.assertEqual(test_tasks.calculate_meaning_of_life.call(), 42) 187 | 188 | async def test_call_task_async(self) -> None: 189 | self.assertEqual(await test_tasks.calculate_meaning_of_life.acall(), 42) 190 | 191 | async def test_call_async_task(self) -> None: 192 | self.assertIsNone(await test_tasks.noop_task_async.acall()) 193 | 194 | def test_call_async_task_sync(self) -> None: 195 | self.assertIsNone(test_tasks.noop_task_async.call()) 196 | 197 | def test_get_result(self) -> None: 198 | result = default_task_backend.enqueue(test_tasks.noop_task, (), {}) 199 | 200 | new_result = test_tasks.noop_task.get_result(result.id) 201 | 202 | self.assertEqual(result, new_result) 203 | 204 | async def test_get_result_async(self) -> None: 205 | result = await default_task_backend.aenqueue(test_tasks.noop_task, (), {}) 206 | 207 | new_result = await test_tasks.noop_task.aget_result(result.id) 208 | 209 | self.assertEqual(result, new_result) 210 | 211 | async def test_get_missing_result(self) -> None: 212 | with self.assertRaises(ResultDoesNotExist): 213 | test_tasks.noop_task.get_result("123") 214 | 215 | with self.assertRaises(ResultDoesNotExist): 216 | await test_tasks.noop_task.aget_result("123") 217 | 218 | def test_get_incorrect_result(self) -> None: 219 | result = default_task_backend.enqueue(test_tasks.noop_task_async, (), {}) 220 | with self.assertRaises(ResultDoesNotExist): 221 | test_tasks.noop_task.get_result(result.id) 222 | 223 | async def test_get_incorrect_result_async(self) -> None: 224 | result = await default_task_backend.aenqueue(test_tasks.noop_task_async, (), {}) 225 | with self.assertRaises(ResultDoesNotExist): 226 | await test_tasks.noop_task.aget_result(result.id) 227 | 228 | def test_invalid_function(self) -> None: 229 | for invalid_function in [any, self.test_invalid_function]: 230 | with self.subTest(invalid_function): 231 | with self.assertRaisesMessage( 232 | InvalidTaskError, 233 | "Task function must be defined at a module level", 234 | ): 235 | task()(invalid_function) # type:ignore[arg-type] 236 | 237 | def test_get_backend(self) -> None: 238 | self.assertEqual(test_tasks.noop_task.backend, "default") 239 | self.assertIsInstance(test_tasks.noop_task.get_backend(), DummyBackend) 240 | 241 | immediate_task = test_tasks.noop_task.using(backend="immediate") 242 | self.assertEqual(immediate_task.backend, "immediate") 243 | self.assertIsInstance(immediate_task.get_backend(), ImmediateBackend) 244 | 245 | def test_name(self) -> None: 246 | self.assertEqual(test_tasks.noop_task.name, "noop_task") 247 | self.assertEqual(test_tasks.noop_task_async.name, "noop_task_async") 248 | 249 | def test_module_path(self) -> None: 250 | self.assertEqual(test_tasks.noop_task.module_path, "tests.tasks.noop_task") 251 | self.assertEqual( 252 | test_tasks.noop_task_async.module_path, "tests.tasks.noop_task_async" 253 | ) 254 | 255 | self.assertIs( 256 | import_string(test_tasks.noop_task.module_path), test_tasks.noop_task 257 | ) 258 | self.assertIs( 259 | import_string(test_tasks.noop_task_async.module_path), 260 | test_tasks.noop_task_async, 261 | ) 262 | 263 | @override_settings(TASKS={}) 264 | def test_no_backends(self) -> None: 265 | with self.assertRaises(InvalidTaskBackendError): 266 | test_tasks.noop_task.enqueue() 267 | -------------------------------------------------------------------------------- /tests/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import subprocess 3 | from unittest.mock import Mock 4 | 5 | from django.test import SimpleTestCase 6 | 7 | from django_tasks import utils 8 | from tests import tasks as test_tasks 9 | 10 | 11 | class IsModuleLevelFunctionTestCase(SimpleTestCase): 12 | def test_builtin(self) -> None: 13 | self.assertFalse(utils.is_module_level_function(any)) 14 | self.assertFalse(utils.is_module_level_function(isinstance)) 15 | 16 | def test_from_module(self) -> None: 17 | self.assertTrue(utils.is_module_level_function(subprocess.run)) 18 | self.assertTrue(utils.is_module_level_function(subprocess.check_output)) 19 | self.assertTrue(utils.is_module_level_function(test_tasks.noop_task.func)) 20 | 21 | def test_private_function(self) -> None: 22 | def private_function() -> None: 23 | pass 24 | 25 | self.assertFalse(utils.is_module_level_function(private_function)) 26 | 27 | def test_coroutine(self) -> None: 28 | self.assertTrue(utils.is_module_level_function(test_tasks.noop_task_async.func)) 29 | 30 | def test_method(self) -> None: 31 | self.assertFalse(utils.is_module_level_function(self.test_method)) 32 | self.assertFalse(utils.is_module_level_function(self.setUp)) 33 | 34 | def test_lambda(self) -> None: 35 | self.assertFalse(utils.is_module_level_function(lambda: True)) 36 | 37 | def test_uninitialised_method(self) -> None: 38 | # This import has to be here, so the module is loaded during the test 39 | from . import is_module_level_function_fixture 40 | 41 | self.assertTrue(is_module_level_function_fixture.really_module_level_function) 42 | self.assertIsNotNone( 43 | is_module_level_function_fixture.inner_func_is_module_level_function 44 | ) 45 | self.assertFalse( 46 | is_module_level_function_fixture.inner_func_is_module_level_function 47 | ) 48 | 49 | 50 | class JSONNormalizeTestCase(SimpleTestCase): 51 | def test_round_trip(self) -> None: 52 | self.assertEqual(utils.json_normalize({}), {}) 53 | self.assertEqual(utils.json_normalize([]), []) 54 | self.assertEqual(utils.json_normalize(()), []) 55 | self.assertEqual(utils.json_normalize({"foo": ()}), {"foo": []}) 56 | 57 | def test_encode_error(self) -> None: 58 | for example in [self, any, datetime.datetime.now()]: 59 | with self.subTest(example): 60 | self.assertRaises(TypeError, utils.json_normalize, example) 61 | 62 | 63 | class RetryTestCase(SimpleTestCase): 64 | def test_retry(self) -> None: 65 | sentinel = Mock(side_effect=ValueError("")) 66 | 67 | with self.assertRaises(ValueError): 68 | utils.retry()(sentinel)() 69 | 70 | self.assertEqual(sentinel.call_count, 3) 71 | 72 | def test_keeps_return_value(self) -> None: 73 | self.assertTrue(utils.retry()(lambda: True)()) 74 | self.assertFalse(utils.retry()(lambda: False)()) 75 | 76 | def test_skip_retry_on_keyboard_interrupt(self) -> None: 77 | sentinel = Mock(side_effect=KeyboardInterrupt("")) 78 | 79 | with self.assertRaises(KeyboardInterrupt): 80 | utils.retry()(sentinel)() 81 | 82 | self.assertEqual(sentinel.call_count, 1) 83 | 84 | 85 | class RandomIdTestCase(SimpleTestCase): 86 | def test_correct_length(self) -> None: 87 | self.assertEqual(len(utils.get_random_id()), 32) 88 | 89 | def test_random_ish(self) -> None: 90 | random_ids = [utils.get_random_id() for _ in range(1000)] 91 | 92 | self.assertEqual(len(random_ids), len(set(random_ids))) 93 | 94 | 95 | class ExceptionTracebackTestCase(SimpleTestCase): 96 | def test_literal_exception(self) -> None: 97 | self.assertEqual( 98 | utils.get_exception_traceback(ValueError("Failure")), 99 | "ValueError: Failure\n", 100 | ) 101 | 102 | def test_exception(self) -> None: 103 | try: 104 | 1 / 0 # noqa:B018 105 | except ZeroDivisionError as e: 106 | traceback = utils.get_exception_traceback(e) 107 | self.assertIn("ZeroDivisionError: division by zero", traceback) 108 | else: 109 | self.fail("ZeroDivisionError not raised") 110 | 111 | def test_complex_exception(self) -> None: 112 | try: 113 | {}[datetime.datetime.now()] # type: ignore 114 | except KeyError as e: 115 | traceback = utils.get_exception_traceback(e) 116 | self.assertIn("KeyError: datetime.datetime", traceback) 117 | else: 118 | self.fail("KeyError not raised") 119 | -------------------------------------------------------------------------------- /tests/urls.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.urls import path 3 | 4 | from . import views 5 | 6 | urlpatterns = [ 7 | path("meaning-of-life/", views.calculate_meaning_of_life, name="meaning-of-life"), 8 | path("result/", views.get_task_result, name="result"), 9 | path( 10 | "meaning-of-life-async/", 11 | views.calculate_meaning_of_life_async, 12 | name="meaning-of-life-async", 13 | ), 14 | path("admin/", admin.site.urls), 15 | ] 16 | -------------------------------------------------------------------------------- /tests/views.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from django.http import Http404, HttpRequest, HttpResponse, JsonResponse 4 | 5 | from django_tasks import ResultStatus, default_task_backend 6 | from django_tasks.exceptions import ResultDoesNotExist 7 | from django_tasks.task import TaskResult 8 | 9 | from . import tasks 10 | 11 | 12 | def get_result_value(result: TaskResult) -> Any: 13 | if result.status == ResultStatus.SUCCEEDED: 14 | return result.return_value 15 | elif result.status == ResultStatus.FAILED: 16 | return result.traceback 17 | 18 | return None 19 | 20 | 21 | def calculate_meaning_of_life(request: HttpRequest) -> HttpResponse: 22 | result = tasks.calculate_meaning_of_life.enqueue() 23 | 24 | return JsonResponse( 25 | { 26 | "result_id": result.id, 27 | "result": get_result_value(result), 28 | "status": result.status, 29 | } 30 | ) 31 | 32 | 33 | async def calculate_meaning_of_life_async(request: HttpRequest) -> HttpResponse: 34 | result = await tasks.calculate_meaning_of_life.aenqueue() 35 | 36 | return JsonResponse( 37 | { 38 | "result_id": result.id, 39 | "result": get_result_value(result), 40 | "status": result.status, 41 | } 42 | ) 43 | 44 | 45 | async def get_task_result(request: HttpRequest, result_id: str) -> HttpResponse: 46 | try: 47 | result = await default_task_backend.aget_result(result_id) 48 | except ResultDoesNotExist: 49 | raise Http404 from None 50 | 51 | return JsonResponse( 52 | { 53 | "result_id": result.id, 54 | "result": get_result_value(result), 55 | "status": result.status, 56 | } 57 | ) 58 | --------------------------------------------------------------------------------