├── .github ├── CONTRIBUTING.md ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── actions │ └── test-coverage │ │ └── action.yml ├── dependabot.yml ├── release-drafter.yml ├── workflows │ ├── publish-documentation.yml │ ├── publish.yml │ └── test.yml └── zizmor.yml ├── .gitignore ├── .readthedocs.yaml ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── SECURITY.md ├── docs ├── changelog.md ├── commands.md ├── configuration.md ├── drt-model.md ├── index.md ├── installation.md ├── media │ ├── add-args.jpg │ ├── add-scheduled-task.jpg │ ├── admin-job-details.jpg │ ├── admin-queue-registry.jpg │ ├── admin-queues-list.jpg │ ├── admin-task-details.jpg │ ├── admin-tasks-list.jpg │ ├── admin-worker-details.jpg │ └── admin-workers-list.jpg ├── migrate_to_v3.md ├── requirements.txt └── usage.md ├── mkdocs.yml ├── pyproject.toml ├── scheduler ├── __init__.py ├── admin │ ├── __init__.py │ ├── ephemeral_models.py │ └── task_admin.py ├── apps.py ├── decorators.py ├── helpers │ ├── __init__.py │ ├── callback.py │ ├── queues │ │ ├── __init__.py │ │ ├── getters.py │ │ └── queue_logic.py │ └── utils.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ ├── delete_failed_executions.py │ │ ├── export.py │ │ ├── import.py │ │ ├── run_job.py │ │ ├── scheduler_stats.py │ │ └── scheduler_worker.py ├── migrations │ ├── 0001_initial_squashed_0005_added_result_ttl.py │ ├── 0002_alter_cronjob_id_alter_repeatablejob_id_and_more.py │ ├── 0003_auto_20220329_2107.py │ ├── 0004_cronjob_at_front_repeatablejob_at_front_and_more.py │ ├── 0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more.py │ ├── 0006_auto_20230118_1640.py │ ├── 0007_add_result_ttl.py │ ├── 0008_rename_str_val_jobarg_val_and_more.py │ ├── 0009_alter_jobarg_arg_type_alter_jobarg_val_and_more.py │ ├── 0010_queue.py │ ├── 0011_worker_alter_queue_options_alter_cronjob_at_front_and_more.py │ ├── 0012_alter_cronjob_name_alter_repeatablejob_name_and_more.py │ ├── 0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more.py │ ├── 0014_alter_cronjob_created_alter_cronjob_modified_and_more.py │ ├── 0015_rename_cronjob_crontask_and_more.py │ ├── 0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more.py │ ├── 0017_remove_crontask_repeat_crontask_failed_runs_and_more.py │ ├── 0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py │ ├── 0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more.py │ ├── 0020_remove_repeatabletask_new_task_id_and_more.py │ ├── 0021_remove_task_job_id_task_job_name.py │ └── __init__.py ├── models │ ├── __init__.py │ ├── args.py │ ├── ephemeral_models.py │ └── task.py ├── py.typed ├── redis_models │ ├── __init__.py │ ├── base.py │ ├── job.py │ ├── lock.py │ ├── registry │ │ ├── __init__.py │ │ ├── base_registry.py │ │ └── queue_registries.py │ ├── result.py │ └── worker.py ├── settings.py ├── static │ └── admin │ │ └── js │ │ └── select-fields.js ├── templates │ └── admin │ │ └── scheduler │ │ ├── change_form.html │ │ ├── change_list.html │ │ ├── confirm_action.html │ │ ├── job_detail.html │ │ ├── jobs-list-with-tasks.partial.html │ │ ├── jobs-list.partial.html │ │ ├── jobs.html │ │ ├── queue_workers.html │ │ ├── scheduler_base.html │ │ ├── single_job_action.html │ │ ├── stats.html │ │ ├── worker_details.html │ │ ├── workers-list.partial.html │ │ └── workers_list.html ├── templatetags │ ├── __init__.py │ └── scheduler_tags.py ├── tests │ ├── __init__.py │ ├── conf.py │ ├── jobs.py │ ├── test_internals.py │ ├── test_job_arg_models.py │ ├── test_job_decorator.py │ ├── test_mgmt_commands │ │ ├── __init__.py │ │ ├── test_delete_failed_executions.py │ │ ├── test_export.py │ │ ├── test_import.py │ │ ├── test_run_job.py │ │ ├── test_scheduler_stats.py │ │ └── test_scheduler_worker.py │ ├── test_multiprocess │ │ ├── __init__.py │ │ └── test_integrity.py │ ├── test_redis_models.py │ ├── test_settings.py │ ├── test_task_types │ │ ├── __init__.py │ │ ├── test_cron_task.py │ │ ├── test_once_task.py │ │ ├── test_repeatable_task.py │ │ └── test_task_model.py │ ├── test_views │ │ ├── __init__.py │ │ ├── base.py │ │ ├── test_job_detail_action.py │ │ ├── test_job_details.py │ │ ├── test_queue_actions.py │ │ ├── test_queue_registry_jobs.py │ │ └── test_workers_view.py │ ├── test_worker │ │ ├── __init__.py │ │ ├── test_scheduler.py │ │ ├── test_worker_commands.py │ │ ├── test_worker_commands_multiprocess.py │ │ └── test_worker_creation.py │ └── testtools.py ├── timeouts.py ├── types │ ├── __init__.py │ ├── broker_types.py │ └── settings_types.py ├── urls.py ├── views │ ├── __init__.py │ ├── helpers.py │ ├── job_views.py │ ├── queue_job_actions.py │ ├── queue_registry_actions.py │ ├── queue_views.py │ └── worker_views.py └── worker │ ├── __init__.py │ ├── commands │ ├── __init__.py │ ├── kill_worker.py │ ├── shutdown.py │ ├── stop_job.py │ ├── suspend_worker.py │ └── worker_commands.py │ ├── scheduler.py │ └── worker.py ├── testproject ├── manage.py └── testproject │ ├── __init__.py │ ├── settings.py │ ├── urls.py │ ├── views.py │ └── wsgi.py └── uv.lock /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: cunla 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - python version 29 | - django version 30 | - requirements.txt? 31 | 32 | **Additional context** 33 | Add any other context about the problem here. 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/actions/test-coverage/action.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests with coverage 2 | description: 'Run tests with coverage and publish results to PR' 3 | inputs: 4 | pythonVer: 5 | description: 'python version' 6 | required: true 7 | djangoVer: 8 | description: 'django version' 9 | required: true 10 | repoToken: 11 | description: 'Token for PR comment' 12 | required: true 13 | outputs: 14 | coverage: 15 | description: "Coverage" 16 | value: ${{ steps.json-report.outputs.coverage }} 17 | runs: 18 | using: "composite" 19 | steps: 20 | - name: Run regular tests with coverage 21 | shell: bash 22 | run: | 23 | cd testproject 24 | uv run coverage run manage.py test --exclude-tag multiprocess scheduler 25 | - name: Coverage report 26 | id: coverage_report 27 | shell: bash 28 | run: | 29 | mv testproject/.coverage . 30 | echo 'REPORT<> $GITHUB_ENV 31 | uv run coverage report >> $GITHUB_ENV 32 | echo 'EOF' >> $GITHUB_ENV 33 | - name: json report 34 | id: json-report 35 | shell: bash 36 | run: | 37 | uv run coverage json 38 | echo "COVERAGE=$(jq '.totals.percent_covered_display|tonumber' coverage.json)" >> $GITHUB_ENV 39 | - uses: mshick/add-pr-comment@dd126dd8c253650d181ad9538d8b4fa218fc31e8 40 | if: ${{ github.event_name == 'pull_request' }} 41 | with: 42 | message: | 43 | Coverage report python v${{ inputs.pythonVer }} django v${{ inputs.djangoVer }} 44 | ``` 45 | ${{ env.REPORT }} 46 | ``` 47 | repo-token: ${{ inputs.repoToken }} 48 | allow-repeats: true 49 | update-only: true 50 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: 'v$RESOLVED_VERSION 🌈' 2 | tag-template: 'v$RESOLVED_VERSION' 3 | categories: 4 | - title: '🚀 Features' 5 | labels: 6 | - 'feature' 7 | - 'enhancement' 8 | - title: '🐛 Bug Fixes' 9 | labels: 10 | - 'fix' 11 | - 'bugfix' 12 | - 'bug' 13 | - title: '🧰 Maintenance' 14 | label: 'chore' 15 | change-template: '- $TITLE @$AUTHOR (#$NUMBER)' 16 | change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. 17 | version-resolver: 18 | major: 19 | labels: 20 | - 'major' 21 | minor: 22 | labels: 23 | - 'minor' 24 | patch: 25 | labels: 26 | - 'patch' 27 | default: patch 28 | template: | 29 | ## Changes 30 | 31 | $CHANGES -------------------------------------------------------------------------------- /.github/workflows/publish-documentation.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: Generate and publish documentation 4 | 5 | on: 6 | release: 7 | types: [published] 8 | workflow_dispatch: 9 | 10 | jobs: 11 | publish_documentation: 12 | runs-on: ubuntu-latest 13 | permissions: 14 | contents: write 15 | environment: 16 | name: pypi 17 | url: https://pypi.org/p/fakeredis 18 | steps: 19 | - uses: actions/checkout@v4 20 | with: 21 | persist-credentials: false 22 | - name: Set up Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: "3.13" 26 | - name: Configure Git Credentials 27 | run: | 28 | git config user.name github-actions[bot] 29 | git config user.email 41898282+github-actions[bot]@users.noreply.github.com 30 | - name: Publish documentation 31 | env: 32 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 33 | GOOGLE_ANALYTICS_KEY: ${{ secrets.GOOGLE_ANALYTICS_KEY }} 34 | run: | 35 | pip install -r docs/requirements.txt 36 | mkdocs gh-deploy --force 37 | mkdocs --version 38 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | env: 8 | # Change these for your project's URLs 9 | PYPI_URL: https://pypi.org/p/django-tasks-scheduler 10 | PYPI_TEST_URL: https://test.pypi.org/p/django-tasks-scheduler 11 | 12 | jobs: 13 | build: 14 | name: Build distribution 📦 15 | runs-on: ubuntu-latest 16 | permissions: 17 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing 18 | steps: 19 | - uses: actions/checkout@v4 20 | with: 21 | persist-credentials: false 22 | - name: Set up Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: "3.13" 26 | - name: Install pypa/build 27 | run: 28 | python3 -m pip install build --user 29 | - name: Build a binary wheel and a source tarball 30 | run: python3 -m build 31 | - name: Store the distribution packages 32 | uses: actions/upload-artifact@v4 33 | with: 34 | name: python-package-distributions 35 | path: dist/ 36 | 37 | publish-to-pypi: 38 | name: >- 39 | Publish Python 🐍 distribution 📦 to PyPI 40 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes 41 | needs: 42 | - build 43 | runs-on: ubuntu-latest 44 | environment: 45 | name: pypi 46 | url: ${{ env.PYPI_URL }} 47 | permissions: 48 | id-token: write # IMPORTANT: mandatory for trusted publishing 49 | steps: 50 | - name: Download all the dists 51 | uses: actions/download-artifact@v4 52 | with: 53 | name: python-package-distributions 54 | path: dist/ 55 | - name: Publish distribution 📦 to PyPI 56 | uses: pypa/gh-action-pypi-publish@v1.12.4 57 | 58 | publish-to-testpypi: 59 | name: Publish Python 🐍 distribution 📦 to TestPyPI 60 | needs: 61 | - build 62 | runs-on: ubuntu-latest 63 | 64 | environment: 65 | name: testpypi 66 | url: ${{ env.PYPI_TEST_URL }} 67 | 68 | permissions: 69 | id-token: write # IMPORTANT: mandatory for trusted publishing 70 | 71 | steps: 72 | - name: Download all the dists 73 | uses: actions/download-artifact@v4 74 | with: 75 | name: python-package-distributions 76 | path: dist/ 77 | - name: Publish distribution 📦 to TestPyPI 78 | uses: pypa/gh-action-pypi-publish@v1.12.4 79 | with: 80 | repository-url: https://test.pypi.org/legacy/ 81 | skip-existing: true -------------------------------------------------------------------------------- /.github/zizmor.yml: -------------------------------------------------------------------------------- 1 | rules: 2 | unpinned-images: 3 | ignore: 4 | - 'test.yml' 5 | - 'test-dragonfly.yml' 6 | unpinned-uses: 7 | config: 8 | policies: 9 | actions/*: any 10 | astral-sh/*: any 11 | pypa/gh-action-pypi-publish: any 12 | github-env: 13 | ignore: 14 | - 'action.yml:36:7' 15 | - 'action.yml:28:7' -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | tags 5 | *.so 6 | 7 | .Python 8 | .venv/ 9 | docker-compose.yml 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | *.manifest 26 | *.spec 27 | 28 | pip-log.txt 29 | pip-delete-this-directory.txt 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .coverage.* 34 | .cache 35 | nosetests.xml 36 | coverage.xml 37 | *,cover 38 | .hypothesis/ 39 | *.mo 40 | *.pot 41 | *.log 42 | docs/_build/ 43 | target/ 44 | 45 | .ipynb_checkpoints 46 | .idea 47 | *.sqlite3 48 | .DS_Store 49 | *.iml 50 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | build: 3 | os: "ubuntu-20.04" 4 | tools: 5 | python: "3.12" 6 | 7 | mkdocs: 8 | configuration: mkdocs.yml 9 | fail_on_warning: false 10 | 11 | python: 12 | install: 13 | - requirements: docs/requirements.txt 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022- Daniel Moran, (Before 2016 - iStrategyLabs, LLC) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Django Tasks Scheduler 2 | =================== 3 | [![Django CI](https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml/badge.svg)](https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml) 4 | ![badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/cunla/b756396efb895f0e34558c980f1ca0c7/raw/django-tasks-scheduler-4.json) 5 | [![badge](https://img.shields.io/pypi/dm/django-tasks-scheduler)](https://pypi.org/project/django-tasks-scheduler/) 6 | 7 | Documentation can be found in https://django-tasks-scheduler.readthedocs.io/ 8 | 9 | # Usage 10 | 11 | 1. Update `settings.py` to include scheduler configuration: 12 | 13 | ```python 14 | import os 15 | from typing import Dict 16 | from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration 17 | 18 | INSTALLED_APPS = [ 19 | # ... 20 | 'scheduler', 21 | # ... 22 | ] 23 | SCHEDULER_CONFIG = SchedulerConfiguration( 24 | EXECUTIONS_IN_PAGE=20, 25 | SCHEDULER_INTERVAL=10, 26 | BROKER=Broker.REDIS, 27 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) 28 | # Default values, can be overriden per task/job 29 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results 30 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information 31 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information 32 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job 33 | # General configuration values 34 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat 35 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. 36 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. 37 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks 38 | ) 39 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { 40 | 'default': QueueConfiguration(URL='redis://localhost:6379/0'), 41 | } 42 | ``` 43 | 44 | 2. Update `urls.py` to include scheduler urls: 45 | 46 | ```python 47 | from django.urls import path, include 48 | 49 | urlpatterns = [ 50 | # ... 51 | path('scheduler/', include('scheduler.urls')), 52 | ] 53 | ``` 54 | 55 | 3. Run migrations: 56 | 57 | ```bash 58 | python manage.py migrate 59 | ``` 60 | 61 | 4. Check out the admin views: 62 | ![](./docs/media/admin-tasks-list.jpg) 63 | 64 | # Sponsor 65 | 66 | django-tasks-scheduler is developed for free. 67 | 68 | You can support this project by becoming a sponsor using [this link](https://github.com/sponsors/cunla). 69 | 70 | # Contributing 71 | 72 | Interested in contributing, providing suggestions, or submitting bugs? See 73 | guidelines [at this link](.github/CONTRIBUTING.md). 74 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | |----------|--------------------| 7 | | 4.latest | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | To report a security vulnerability, please use the 12 | [Tidelift security contact](https://tidelift.com/security). 13 | Tidelift will coordinate the fix and disclosure. -------------------------------------------------------------------------------- /docs/configuration.md: -------------------------------------------------------------------------------- 1 | # Configure your django-tasks-scheduler 2 | 3 | ## settings.py 4 | 5 | All default settings for scheduler can be in one dictionary in `settings.py`: 6 | 7 | ```python 8 | import os 9 | from typing import Dict 10 | from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration 11 | 12 | SCHEDULER_CONFIG = SchedulerConfiguration( 13 | EXECUTIONS_IN_PAGE=20, 14 | SCHEDULER_INTERVAL=10, 15 | BROKER=Broker.REDIS, 16 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) 17 | # Default values, can be overriden per task/job 18 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results 19 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information 20 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information 21 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job 22 | # General configuration values 23 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat 24 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. 25 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. 26 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks 27 | ) 28 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { 29 | 'default': QueueConfiguration( 30 | HOST='localhost', 31 | PORT=6379, 32 | USERNAME='some-user', 33 | PASSWORD='some-password', 34 | CONNECTION_KWARGS={ # Eventual additional Broker connection arguments 35 | 'ssl_cert_reqs': 'required', 36 | 'ssl': True, 37 | }, 38 | ), 39 | 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')), 40 | 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False), 41 | } 42 | ``` 43 | 44 | ### SCHEDULER_CONFIG: `EXECUTIONS_IN_PAGE` 45 | 46 | Number of job executions to show in a page in a ScheduledJob admin view. 47 | 48 | Default: `20`. 49 | 50 | ### SCHEDULER_CONFIG: `SCHEDULER_INTERVAL` 51 | 52 | Default scheduler interval, a scheduler is a subprocess of a worker and 53 | will check which job executions are pending. 54 | 55 | Default: `10` (10 seconds). 56 | 57 | ### SCHEDULER_CONFIG: `BROKER` 58 | 59 | ### SCHEDULER_CONFIG: `CALLBACK_TIMEOUT` 60 | 61 | ### SCHEDULER_CONFIG: `DEFAULT_SUCCESS_TTL` 62 | 63 | Default time to live for job execution result when it is successful. 64 | 65 | Default: `600` (10 minutes). 66 | 67 | ### SCHEDULER_CONFIG: `DEFAULT_FAILURE_TTL` 68 | 69 | Default time to live for job execution result when it is failed. 70 | 71 | Default: `600` (10 minutes). 72 | 73 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_TTL` 74 | 75 | Default timeout for job info. 76 | 77 | Default: `300` (5 minutes). 78 | 79 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_TIMEOUT` 80 | 81 | timeout (seconds) for a job. 82 | 83 | Default: `300` (5 minutes). 84 | 85 | ### SCHEDULER_CONFIG: `DEFAULT_WORKER_TTL` 86 | 87 | Time To Live (TTL) in seconds to keep worker information after last heartbeat. 88 | Default: `600` (10 minutes). 89 | 90 | ### SCHEDULER_CONFIG: `DEFAULT_MAINTENANCE_TASK_INTERVAL` 91 | 92 | The interval to run worker maintenance tasks in seconds. 93 | Default: `600` 10 minutes. 94 | 95 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_MONITORING_INTERVAL` 96 | 97 | The interval to monitor jobs in seconds. 98 | 99 | ### SCHEDULER_CONFIG: `SCHEDULER_FALLBACK_PERIOD_SECS` 100 | 101 | Period (secs) to wait before requiring to reacquire locks. 102 | 103 | ### SCHEDULER_CONFIG: `TOKEN_VALIDATION_METHOD` 104 | 105 | Method to validate request `Authorization` header with. 106 | Enables checking stats using API token. 107 | 108 | Default: no tokens allowed. 109 | 110 | ### `SCHEDULER_QUEUES` 111 | 112 | You can configure the queues to work with. 113 | That way you can have different workers listening to different queues. 114 | 115 | Different queues can use different redis servers/connections. 116 | -------------------------------------------------------------------------------- /docs/drt-model.md: -------------------------------------------------------------------------------- 1 | # Worker related flows 2 | 3 | Running `python manage.py scheduler_worker --name 'X' --queues high default low` 4 | 5 | ## Register new worker for queues 6 | ```mermaid 7 | sequenceDiagram 8 | autonumber 9 | 10 | participant worker as WorkerProcess 11 | 12 | participant qlist as QueueHash
name -> key 13 | participant wlist as WorkerList 14 | participant wkey as WorkerKey 15 | participant queue as QueueKey 16 | participant job as JobHash 17 | 18 | 19 | note over worker,qlist: Checking sanity 20 | 21 | break when a queue-name in the args is not in queue-list 22 | worker ->>+ qlist: Query queue names 23 | qlist -->>- worker: All queue names 24 | worker ->> worker: check that queue names exists in the system 25 | end 26 | 27 | note over worker,wkey: register 28 | worker ->> wkey: Create workerKey with all info (new id, queues, status) 29 | worker ->> wlist: Add new worker to list, last heartbeat set to now() 30 | ``` 31 | 32 | ## Work (execute jobs on queues) 33 | 34 | ```mermaid 35 | sequenceDiagram 36 | autonumber 37 | 38 | participant worker as WorkerProcess 39 | 40 | participant qlist as QueueHash
name -> key 41 | participant wlist as WorkerList 42 | participant wkey as WorkerKey 43 | participant queue as QueueKey 44 | participant job as JobHash 45 | 46 | loop Until death 47 | worker ->> wlist: Update last heartbeat 48 | note over worker,job: Find next job 49 | 50 | loop over queueKeys until job to run is found or all queues are empty 51 | worker ->>+ queue: get next job name and remove it or None (zrange+zpop) 52 | queue -->>- worker: job name / nothing 53 | end 54 | 55 | note over worker,job: Execute job or sleep 56 | critical [job is found] 57 | worker ->> wkey: Update worker status to busy 58 | worker ->>+ job: query job data 59 | job -->>- worker: job data 60 | 61 | worker ->> job: update job status to running 62 | worker ->> worker: execute job 63 | worker ->> job: update job status to done/failed 64 | worker ->> wkey: Update worker status to idle 65 | option No job pending 66 | worker ->> worker: sleep 67 | end 68 | end 69 | ``` 70 | 71 | # Scheduler flows 72 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | 1. Use pip to install: 4 | ```shell 5 | pip install django-tasks-scheduler 6 | ``` 7 | 8 | 2. In `settings.py`, add `scheduler` to `INSTALLED_APPS`: 9 | ```python 10 | INSTALLED_APPS = [ 11 | # ... 12 | 'scheduler', 13 | # ... 14 | ] 15 | ``` 16 | 17 | 3. Configure your queues. 18 | Add at least one Redis Queue to your `settings.py`. 19 | Note that the usage of `QueueConfiguration` is optional, you can use a simple dictionary, but `QueueConfiguration` 20 | helps preventing configuration errors. 21 | ```python 22 | import os 23 | from typing import Dict 24 | from scheduler.types import QueueConfiguration 25 | 26 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { 27 | 'default': QueueConfiguration( 28 | HOST='localhost', 29 | PORT=6379, 30 | USERNAME='some-user', 31 | PASSWORD='some-password', 32 | CONNECTION_KWARGS={ # Eventual additional Broker connection arguments 33 | 'ssl_cert_reqs': 'required', 34 | 'ssl': True, 35 | }, 36 | ), 37 | 'with-sentinel': QueueConfiguration( 38 | SENTINELS= [('localhost', 26736), ('localhost', 26737)], 39 | MASTER_NAME= 'redismaster', 40 | DB= 0, 41 | USERNAME= 'redis-user', 42 | PASSWORD= 'secret', 43 | CONNECTION_KWARGS= { 44 | 'ssl': True}, 45 | SENTINEL_KWARGS= { 46 | 'username': 'sentinel-user', 47 | 'password': 'secret', 48 | }), 49 | 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')), 50 | 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False), 51 | } 52 | ``` 53 | 54 | 4. Optional: Configure default values for queuing jobs from code: 55 | ```python 56 | from scheduler.types import SchedulerConfiguration, Broker 57 | 58 | SCHEDULER_CONFIG = SchedulerConfiguration( 59 | EXECUTIONS_IN_PAGE=20, 60 | SCHEDULER_INTERVAL=10, 61 | BROKER=Broker.REDIS, 62 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) 63 | # Default values, can be overriden per task/job 64 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results 65 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information 66 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information 67 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job 68 | # General configuration values 69 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat 70 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. 71 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. 72 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks 73 | ) 74 | ``` 75 | 76 | 5. Add `scheduler.urls` to your django application `urls.py`: 77 | ```python 78 | from django.urls import path, include 79 | 80 | urlpatterns = [ 81 | # ... 82 | path('scheduler/', include('scheduler.urls')), 83 | ] 84 | ``` 85 | 86 | 6. Run migrations to generate django models 87 | ```shell 88 | python manage.py migrate 89 | ``` 90 | 91 | -------------------------------------------------------------------------------- /docs/media/add-args.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/add-args.jpg -------------------------------------------------------------------------------- /docs/media/add-scheduled-task.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/add-scheduled-task.jpg -------------------------------------------------------------------------------- /docs/media/admin-job-details.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-job-details.jpg -------------------------------------------------------------------------------- /docs/media/admin-queue-registry.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-queue-registry.jpg -------------------------------------------------------------------------------- /docs/media/admin-queues-list.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-queues-list.jpg -------------------------------------------------------------------------------- /docs/media/admin-task-details.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-task-details.jpg -------------------------------------------------------------------------------- /docs/media/admin-tasks-list.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-tasks-list.jpg -------------------------------------------------------------------------------- /docs/media/admin-worker-details.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-worker-details.jpg -------------------------------------------------------------------------------- /docs/media/admin-workers-list.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/docs/media/admin-workers-list.jpg -------------------------------------------------------------------------------- /docs/migrate_to_v3.md: -------------------------------------------------------------------------------- 1 | Migration from v2 to v3 2 | ======================= 3 | 4 | Version 3.0.0 introduced a major design change. Instead of three separate models, there is one new `Task` model. The 5 | goal is to have one centralized admin view for all your scheduled tasks, regardless of the scheduling type. 6 | 7 | You need to migrate the scheduled tasks using the old models (`ScheduledTask`, `RepeatableTask`, `CronTask`) to the new 8 | model. It can be done using the export/import commands provided. 9 | 10 | After upgrading to django-tasks-scheduler v3.0.0, you will notice you are not able to create new scheduled tasks in the 11 | old models, that is intentional. In the next version of django-tasks-scheduler (v3.1), the old models will be deleted, 12 | so make sure you migrate your old models. 13 | 14 | !!! Note 15 | While we tested different scenarios heavily and left the code for old tasks, we could not account for all different 16 | use cases, therefore, please [open an issue][issues] if you encounter any. 17 | 18 | There are two ways to migrate your existing scheduled tasks: 19 | 20 | # Using the admin views of the old models 21 | 22 | If you go to the admin view of the old models, you will notice there is a new action in the actions drop down menu for 23 | migrating the selected tasks. Use it, and you will also have a link to the new task to compare the migration result. 24 | 25 | Note once you migrate using this method, the old task will be disabled automatically. 26 | 27 | # Export/Import management commands 28 | 29 | Run in your project directory: 30 | 31 | ```shell 32 | python manage.py export > scheduled_tasks.json 33 | python manage.py import --filename scheduled_tasks.json 34 | ``` 35 | 36 | [issues]: https://github.com/django-commons/django-tasks-scheduler/issues -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocs==1.6.1 2 | mkdocs-material==9.6.14 3 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | site_name: django-tasks-scheduler 3 | site_author: Daniel Moran 4 | site_description: >- 5 | Documentation for django-tasks-scheduler django library 6 | # Repository 7 | repo_name: dsoftwareinc/django-tasks-scheduler 8 | repo_url: https://github.com/django-commons/django-tasks-scheduler 9 | 10 | # Copyright 11 | copyright: Copyright © 2022 - 2023 Daniel Moran 12 | 13 | extra: 14 | generator: false 15 | analytics: 16 | provider: google 17 | property: G-GJBJBKXT19 18 | 19 | markdown_extensions: 20 | - abbr 21 | - admonition 22 | - attr_list 23 | - def_list 24 | - footnotes 25 | - md_in_html 26 | - pymdownx.arithmatex: 27 | generic: true 28 | - pymdownx.betterem: 29 | smart_enable: all 30 | - pymdownx.caret 31 | - pymdownx.details 32 | - pymdownx.emoji: 33 | emoji_generator: !!python/name:material.extensions.emoji.to_svg 34 | emoji_index: !!python/name:material.extensions.emoji.twemoji 35 | - pymdownx.highlight: 36 | anchor_linenums: true 37 | - pymdownx.inlinehilite 38 | - pymdownx.keys 39 | - pymdownx.magiclink: 40 | repo_url_shorthand: true 41 | user: dsoftware-inc 42 | repo: django-tasks-scheduler 43 | - pymdownx.mark 44 | - pymdownx.smartsymbols 45 | - pymdownx.superfences: 46 | custom_fences: 47 | - name: mermaid 48 | class: mermaid 49 | format: !!python/name:pymdownx.superfences.fence_code_format 50 | - pymdownx.tabbed: 51 | alternate_style: true 52 | - pymdownx.tasklist: 53 | custom_checkbox: true 54 | - pymdownx.tilde 55 | - toc: 56 | permalink: true 57 | toc_depth: 3 58 | 59 | 60 | theme: 61 | name: material 62 | palette: 63 | - scheme: default 64 | primary: indigo 65 | accent: indigo 66 | toggle: 67 | icon: material/brightness-7 68 | name: Switch to dark mode 69 | - scheme: slate 70 | primary: indigo 71 | accent: indigo 72 | toggle: 73 | icon: material/brightness-4 74 | name: Switch to light mode 75 | features: 76 | # - announce.dismiss 77 | - content.action.edit 78 | - content.action.view 79 | - content.code.annotate 80 | - content.code.copy 81 | # - content.tabs.link 82 | - content.tooltips 83 | # - header.autohide 84 | # - navigation.expand 85 | - navigation.footer 86 | - navigation.indexes 87 | # - navigation.instant 88 | # - navigation.prune 89 | - navigation.sections 90 | # - navigation.tabs.sticky 91 | - navigation.tracking 92 | - search.highlight 93 | - search.share 94 | - search.suggest 95 | - toc.follow 96 | # - toc.integrate 97 | highlightjs: true 98 | hljs_languages: 99 | - yaml 100 | - django 101 | 102 | nav: 103 | - Home: index.md 104 | - Migrate v2 to v3: migrate_to_v3.md 105 | - Installation: installation.md 106 | - Configuration: configuration.md 107 | - Usage: usage.md 108 | - Management commands: commands.md 109 | - Change log: changelog.md 110 | 111 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "django-tasks-scheduler" 7 | version = "4.0.4" 8 | description = "An async job scheduler for django using redis/valkey brokers" 9 | authors = [{ name = "Daniel Moran", email = "daniel@moransoftware.ca" }] 10 | requires-python = "~=3.10" 11 | readme = "README.md" 12 | license = "MIT" 13 | maintainers = [{ name = "Daniel Moran", email = "daniel@moransoftware.ca" }] 14 | keywords = [ 15 | "redis", 16 | "valkey", 17 | "django", 18 | "background-jobs", 19 | "job-queue", 20 | "task-queue", 21 | "redis-queue", 22 | "scheduled-jobs", 23 | ] 24 | classifiers = [ 25 | "Development Status :: 5 - Production/Stable", 26 | "Environment :: Web Environment", 27 | "Intended Audience :: Developers", 28 | "License :: OSI Approved :: MIT License", 29 | "Operating System :: OS Independent", 30 | "Programming Language :: Python", 31 | "Programming Language :: Python :: 3.10", 32 | "Programming Language :: Python :: 3.11", 33 | "Programming Language :: Python :: 3.12", 34 | "Programming Language :: Python :: 3.13", 35 | "Framework :: Django", 36 | "Framework :: Django :: 5.0", 37 | "Framework :: Django :: 5.1", 38 | "Framework :: Django :: 5.2", 39 | ] 40 | dependencies = [ 41 | "django>=5", 42 | "croniter>=2.0", 43 | "click~=8.2", 44 | ] 45 | 46 | [project.optional-dependencies] 47 | yaml = ["pyyaml~=6.0"] 48 | valkey = ["valkey>=6.0.2,<7"] 49 | sentry = ["sentry-sdk~=2.19"] 50 | 51 | [project.urls] 52 | Homepage = "https://github.com/django-commons/django-tasks-scheduler" 53 | Documentation = "https://django-tasks-scheduler.readthedocs.io/" 54 | "Bug Tracker" = "https://github.com/django-commons/django-tasks-scheduler/issues" 55 | Funding = "https://github.com/sponsors/cunla" 56 | 57 | [dependency-groups] 58 | dev = [ 59 | "time-machine>=2.16.0,<3", 60 | "ruff>=0.11", 61 | "coverage~=7.6", 62 | "fakeredis~=2.28", 63 | "pyyaml>=6,<7", 64 | ] 65 | 66 | [tool.hatch.build.targets.sdist] 67 | include = ["scheduler"] 68 | 69 | [tool.hatch.build.targets.wheel] 70 | include = ["scheduler"] 71 | 72 | [tool.ruff] 73 | line-length = 120 74 | exclude = [ 75 | 'scheduler/migrations', 76 | 'testproject', 77 | '.venv', 78 | '.github', 79 | '__pycache__', 80 | ] 81 | 82 | [tool.ruff.format] 83 | quote-style = "double" 84 | indent-style = "space" 85 | skip-magic-trailing-comma = false 86 | line-ending = "auto" 87 | -------------------------------------------------------------------------------- /scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata 2 | 3 | __version__ = importlib.metadata.version("django-tasks-scheduler") 4 | 5 | __all__ = [ 6 | "job", 7 | ] 8 | 9 | from scheduler.decorators import job 10 | -------------------------------------------------------------------------------- /scheduler/admin/__init__.py: -------------------------------------------------------------------------------- 1 | from .ephemeral_models import QueueAdmin, WorkerAdmin 2 | from .task_admin import TaskAdmin 3 | 4 | __all__ = [ 5 | "QueueAdmin", 6 | "WorkerAdmin", 7 | "TaskAdmin", 8 | ] 9 | -------------------------------------------------------------------------------- /scheduler/admin/ephemeral_models.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | from scheduler import views 4 | from scheduler.models.ephemeral_models import Queue, Worker 5 | 6 | 7 | class ImmutableAdmin(admin.ModelAdmin): 8 | def has_add_permission(self, request): 9 | return False # Hide the admin "+ Add" link for Queues 10 | 11 | def has_change_permission(self, request, obj=None): 12 | return True 13 | 14 | def has_module_permission(self, request): 15 | """Returns True if the given request has any permission in the given app label. 16 | 17 | Can be overridden by the user in subclasses. In such case, it should return True if the given request has 18 | permission to view the module on the admin index page and access the module's index page. Overriding it does 19 | not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for 20 | that. 21 | """ 22 | return request.user.has_module_perms("django-tasks-scheduler") 23 | 24 | 25 | @admin.register(Queue) 26 | class QueueAdmin(ImmutableAdmin): 27 | """Admin View for queues""" 28 | 29 | def changelist_view(self, request, extra_context=None): 30 | """The 'change list' admin view for this model.""" 31 | return views.stats(request) 32 | 33 | 34 | @admin.register(Worker) 35 | class WorkerAdmin(ImmutableAdmin): 36 | """Admin View for workers""" 37 | 38 | def changelist_view(self, request, extra_context=None): 39 | """The 'change list' admin view for this model.""" 40 | return views.workers_list(request) 41 | -------------------------------------------------------------------------------- /scheduler/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | from django.utils.translation import gettext_lazy as _ 3 | 4 | 5 | class SchedulerConfig(AppConfig): 6 | default_auto_field = "django.db.models.AutoField" 7 | name = "scheduler" 8 | verbose_name = _("Tasks Scheduler") 9 | 10 | def ready(self): 11 | pass 12 | -------------------------------------------------------------------------------- /scheduler/decorators.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from typing import Any, Callable, Dict, Optional, Union, List 3 | 4 | from scheduler.helpers.callback import Callback 5 | from scheduler.types import ConnectionType 6 | 7 | JOB_METHODS_LIST: List[str] = list() 8 | 9 | 10 | class job: 11 | def __init__( 12 | self, 13 | queue: Union["Queue", str, None] = None, # noqa: F821 14 | connection: Optional[ConnectionType] = None, 15 | timeout: Optional[int] = None, 16 | result_ttl: Optional[int] = None, 17 | job_info_ttl: Optional[int] = None, 18 | at_front: bool = False, 19 | meta: Optional[Dict[Any, Any]] = None, 20 | description: Optional[str] = None, 21 | on_failure: Optional[Union["Callback", Callable[..., Any]]] = None, 22 | on_success: Optional[Union["Callback", Callable[..., Any]]] = None, 23 | on_stopped: Optional[Union["Callback", Callable[..., Any]]] = None, 24 | ): 25 | """A decorator that adds a ``delay`` method to the decorated function, which in turn creates a RQ job when 26 | called. Accepts a required ``queue`` argument that can be either a ``Queue`` instance or a string 27 | denoting the queue name. For example:: 28 | 29 | 30 | >>> @job(queue='default') 31 | >>> def simple_add(x, y): 32 | >>> return x + y 33 | >>> ... 34 | >>> # Puts `simple_add` function into queue 35 | >>> simple_add.delay(1, 2) 36 | 37 | :param queue: The queue to use, can be the Queue class itself, or the queue name (str) 38 | :type queue: Union['Queue', str] 39 | :param connection: Broker Connection 40 | :param timeout: Job timeout 41 | :param result_ttl: Result time to live 42 | :param job_info_ttl: Time to live for job info 43 | :param at_front: Whether to enqueue the job at front of the queue 44 | :param meta: Arbitraty metadata about the job 45 | :param description: Job description 46 | :param on_failure: Callable to run on failure 47 | :param on_success: Callable to run on success 48 | :param on_stopped: Callable to run when stopped 49 | """ 50 | from scheduler.helpers.queues import get_queue 51 | 52 | if queue is None: 53 | queue = "default" 54 | self.queue = get_queue(queue) if isinstance(queue, str) else queue 55 | self.connection = connection 56 | self.timeout = timeout 57 | self.result_ttl = result_ttl 58 | self.job_info_ttl = job_info_ttl 59 | self.meta = meta 60 | self.at_front = at_front 61 | self.description = description 62 | self.on_success = on_success 63 | self.on_failure = on_failure 64 | self.on_stopped = on_stopped 65 | 66 | def __call__(self, f): 67 | @wraps(f) 68 | def delay(*args, **kwargs): 69 | from scheduler.helpers.queues import get_queue 70 | 71 | queue = get_queue(self.queue) if isinstance(self.queue, str) else self.queue 72 | 73 | job_name = kwargs.pop("job_name", None) 74 | at_front = kwargs.pop("at_front", False) 75 | 76 | if not at_front: 77 | at_front = self.at_front 78 | 79 | return queue.create_and_enqueue_job( 80 | f, 81 | args=args, 82 | kwargs=kwargs, 83 | timeout=self.timeout, 84 | result_ttl=self.result_ttl, 85 | job_info_ttl=self.job_info_ttl, 86 | name=job_name, 87 | at_front=at_front, 88 | meta=self.meta, 89 | description=self.description, 90 | on_failure=self.on_failure, 91 | on_success=self.on_success, 92 | on_stopped=self.on_stopped, 93 | when=None, 94 | ) 95 | 96 | JOB_METHODS_LIST.append(f"{f.__module__}.{f.__name__}") 97 | f.delay = delay 98 | return f 99 | -------------------------------------------------------------------------------- /scheduler/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/helpers/__init__.py -------------------------------------------------------------------------------- /scheduler/helpers/callback.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import Union, Callable, Any, Optional 3 | 4 | from scheduler.helpers.utils import callable_func 5 | from scheduler.timeouts import JobTimeoutException 6 | 7 | 8 | class CallbackSetupError(Exception): 9 | pass 10 | 11 | 12 | class Callback: 13 | def __init__(self, func: Union[str, Callable[..., Any]], timeout: Optional[int] = None): 14 | from scheduler.settings import SCHEDULER_CONFIG 15 | 16 | self.timeout = timeout or SCHEDULER_CONFIG.CALLBACK_TIMEOUT 17 | if not isinstance(self.timeout, int) or self.timeout < 0: 18 | raise CallbackSetupError(f"Callback `timeout` must be a positive int, but received {self.timeout}") 19 | if not isinstance(func, str) and not inspect.isfunction(func) and not inspect.isbuiltin(func): 20 | raise CallbackSetupError(f"Callback `func` must be a string or function, received {func}") 21 | if isinstance(func, str): 22 | try: 23 | func_str = func 24 | func = callable_func(func) 25 | except (TypeError, AttributeError, ModuleNotFoundError, ValueError): 26 | raise CallbackSetupError(f"Callback `func` is not callable: {func_str}") 27 | self.func: Callable[..., Any] = func 28 | 29 | @property 30 | def name(self) -> str: 31 | return f"{self.func.__module__}.{self.func.__qualname__}" 32 | 33 | def __call__(self, *args, **kwargs): 34 | from scheduler.settings import SCHEDULER_CONFIG 35 | 36 | with SCHEDULER_CONFIG.DEATH_PENALTY_CLASS(self.timeout, JobTimeoutException): 37 | return self.func(*args, **kwargs) 38 | -------------------------------------------------------------------------------- /scheduler/helpers/queues/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "Queue", 3 | "InvalidJobOperation", 4 | "get_queue", 5 | "get_all_workers", 6 | "perform_job", 7 | ] 8 | 9 | from .getters import get_queue, get_all_workers 10 | from .queue_logic import Queue, InvalidJobOperation, perform_job 11 | -------------------------------------------------------------------------------- /scheduler/helpers/queues/getters.py: -------------------------------------------------------------------------------- 1 | from typing import Set 2 | 3 | from scheduler.redis_models.worker import WorkerModel 4 | from scheduler.settings import SCHEDULER_CONFIG, get_queue_names, get_queue_configuration, QueueConfiguration, logger 5 | from scheduler.types import ConnectionErrorTypes, BrokerMetaData, Broker 6 | from .queue_logic import Queue 7 | 8 | 9 | _BAD_QUEUE_CONFIGURATION = set() 10 | 11 | 12 | def _get_connection(config: QueueConfiguration, use_strict_broker=False): 13 | """Returns a Broker connection to use based on parameters in SCHEDULER_QUEUES""" 14 | if SCHEDULER_CONFIG.BROKER == Broker.FAKEREDIS: 15 | import fakeredis 16 | 17 | broker_cls = fakeredis.FakeRedis if not use_strict_broker else fakeredis.FakeStrictRedis 18 | else: 19 | broker_cls = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].connection_type 20 | if config.URL: 21 | return broker_cls.from_url(config.URL, db=config.DB, **(config.CONNECTION_KWARGS or {})) 22 | if config.UNIX_SOCKET_PATH: 23 | return broker_cls(unix_socket_path=config.UNIX_SOCKET_PATH, db=config.DB) 24 | 25 | if config.SENTINELS: 26 | connection_kwargs = { 27 | "db": config.DB, 28 | "password": config.PASSWORD, 29 | "username": config.USERNAME, 30 | } 31 | connection_kwargs.update(config.CONNECTION_KWARGS or {}) 32 | sentinel_kwargs = config.SENTINEL_KWARGS or {} 33 | SentinelClass = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].sentinel_type 34 | sentinel = SentinelClass(config.SENTINELS, sentinel_kwargs=sentinel_kwargs, **connection_kwargs) 35 | return sentinel.master_for( 36 | service_name=config.MASTER_NAME, 37 | redis_class=broker_cls, 38 | ) 39 | 40 | return broker_cls( 41 | host=config.HOST, 42 | port=config.PORT, 43 | db=config.DB, 44 | username=config.USERNAME, 45 | password=config.PASSWORD, 46 | **(config.CONNECTION_KWARGS or {}), 47 | ) 48 | 49 | 50 | def get_queue(name="default") -> Queue: 51 | """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`""" 52 | queue_settings = get_queue_configuration(name) 53 | is_async = queue_settings.ASYNC 54 | connection = _get_connection(queue_settings) 55 | return Queue(name=name, connection=connection, is_async=is_async) 56 | 57 | 58 | def get_all_workers() -> Set[WorkerModel]: 59 | queue_names = get_queue_names() 60 | 61 | workers_set: Set[WorkerModel] = set() 62 | for queue_name in queue_names: 63 | if queue_name in _BAD_QUEUE_CONFIGURATION: 64 | continue 65 | connection = _get_connection(get_queue_configuration(queue_name)) 66 | try: 67 | curr_workers: Set[WorkerModel] = set(WorkerModel.all(connection=connection)) 68 | workers_set.update(curr_workers) 69 | except ConnectionErrorTypes as e: 70 | logger.error(f"Could not connect for queue {queue_name}: {e}") 71 | _BAD_QUEUE_CONFIGURATION.add(queue_name) 72 | return workers_set 73 | -------------------------------------------------------------------------------- /scheduler/helpers/utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import importlib 3 | import time 4 | from typing import Callable 5 | 6 | 7 | def current_timestamp() -> int: 8 | """Returns current UTC timestamp in secs""" 9 | return int(time.time()) 10 | 11 | 12 | def utcnow() -> datetime.datetime: 13 | """Return now in UTC""" 14 | return datetime.datetime.now(datetime.timezone.utc) 15 | 16 | 17 | def callable_func(callable_str: str) -> Callable: 18 | path = callable_str.split(".") 19 | module = importlib.import_module(".".join(path[:-1])) 20 | func = getattr(module, path[-1]) 21 | if callable(func) is False: 22 | raise TypeError(f"'{callable_str}' is not callable") 23 | return func 24 | -------------------------------------------------------------------------------- /scheduler/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/management/__init__.py -------------------------------------------------------------------------------- /scheduler/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/management/commands/__init__.py -------------------------------------------------------------------------------- /scheduler/management/commands/delete_failed_executions.py: -------------------------------------------------------------------------------- 1 | import click 2 | from django.core.management.base import BaseCommand 3 | 4 | from scheduler.helpers.queues import get_queue 5 | from scheduler.redis_models import JobModel 6 | 7 | 8 | class Command(BaseCommand): 9 | help = "Delete failed jobs from Django queue." 10 | 11 | def add_arguments(self, parser): 12 | parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]") 13 | parser.add_argument("-f", "--func", help='optional job function name, e.g. "app.tasks.func"') 14 | parser.add_argument("--dry-run", action="store_true", help="Do not actually delete failed jobs") 15 | 16 | def handle(self, *args, **options): 17 | queue = get_queue(options.get("queue", "default")) 18 | job_names = queue.failed_job_registry.all() 19 | jobs = JobModel.get_many(job_names, connection=queue.connection) 20 | func_name = options.get("func", None) 21 | if func_name is not None: 22 | jobs = [job for job in jobs if job.func_name == func_name] 23 | dry_run = options.get("dry_run", False) 24 | click.echo(f"Found {len(jobs)} failed jobs") 25 | for job in job_names: 26 | click.echo(f"Deleting {job}") 27 | if not dry_run: 28 | queue.delete_job(job) 29 | click.echo(f"Deleted {len(jobs)} failed jobs") 30 | -------------------------------------------------------------------------------- /scheduler/management/commands/export.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import click 4 | from django.core.management.base import BaseCommand 5 | 6 | from scheduler.models import Task 7 | 8 | 9 | class Command(BaseCommand): 10 | """Export all scheduled jobs""" 11 | 12 | help = __doc__ 13 | 14 | def add_arguments(self, parser): 15 | parser.add_argument( 16 | "-o", 17 | "--output", 18 | action="store", 19 | choices=["json", "yaml"], 20 | default="json", 21 | dest="format", 22 | help="format of output", 23 | ) 24 | 25 | parser.add_argument( 26 | "-e", 27 | "--enabled", 28 | action="store_true", 29 | dest="enabled", 30 | help="Export only enabled jobs", 31 | ) 32 | parser.add_argument( 33 | "-f", 34 | "--filename", 35 | action="store", 36 | dest="filename", 37 | help="File name to load (otherwise writes to standard output)", 38 | ) 39 | 40 | def handle(self, *args, **options): 41 | file = open(options.get("filename"), "w") if options.get("filename") else sys.stdout 42 | res = list() 43 | 44 | tasks = Task.objects.all() 45 | if options.get("enabled"): 46 | tasks = tasks.filter(enabled=True) 47 | for task in tasks: 48 | res.append(task.to_dict()) 49 | 50 | if options.get("format") == "json": 51 | import json 52 | 53 | click.echo(json.dumps(res, indent=2, default=str), file=file) 54 | return 55 | 56 | if options.get("format") == "yaml": 57 | try: 58 | import yaml 59 | except ImportError: 60 | click.echo("Aborting. LibYAML is not installed.") 61 | exit(1) 62 | # Disable YAML alias 63 | yaml.Dumper.ignore_aliases = lambda *x: True 64 | click.echo(yaml.dump(res, default_flow_style=False), file=file) 65 | return 66 | -------------------------------------------------------------------------------- /scheduler/management/commands/run_job.py: -------------------------------------------------------------------------------- 1 | import click 2 | from django.core.management.base import BaseCommand 3 | 4 | from scheduler.helpers.queues import get_queue 5 | 6 | 7 | class Command(BaseCommand): 8 | """ 9 | Queues the function given with the first argument with the 10 | parameters given with the rest of the argument list. 11 | """ 12 | 13 | help = __doc__ 14 | args = "" 15 | 16 | def add_arguments(self, parser): 17 | parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]") 18 | parser.add_argument("--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds") 19 | parser.add_argument( 20 | "--result-ttl", "-r", type=int, dest="result_ttl", help="Time to store job results in seconds" 21 | ) 22 | parser.add_argument( 23 | "callable", 24 | help="Method to call", 25 | ) 26 | parser.add_argument("args", nargs="*", help="Args for callable") 27 | 28 | def handle(self, **options): 29 | verbosity = int(options.get("verbosity", 1)) 30 | timeout = options.get("timeout") 31 | result_ttl = options.get("result_ttl") 32 | queue = get_queue(options.get("queue")) 33 | func = options.get("callable") 34 | args = options.get("args") 35 | job = queue.create_and_enqueue_job(func, args=args, timeout=timeout, result_ttl=result_ttl, when=None) 36 | if verbosity: 37 | click.echo(f"Job {job.name} created") 38 | -------------------------------------------------------------------------------- /scheduler/management/commands/scheduler_stats.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import click 4 | from django.core.management.base import BaseCommand 5 | 6 | from scheduler.views import get_statistics 7 | 8 | ANSI_LIGHT_GREEN = "\033[1;32m" 9 | ANSI_LIGHT_WHITE = "\033[1;37m" 10 | ANSI_RESET = "\033[0m" 11 | 12 | KEYS = ("queued_jobs", "started_jobs", "finished_jobs", "canceled_jobs", "workers") 13 | 14 | 15 | class Command(BaseCommand): 16 | """Print statistics""" 17 | 18 | help = __doc__ 19 | 20 | def __init__(self, *args, **kwargs): 21 | super(Command, self).__init__(*args, **kwargs) 22 | self.table_width = 80 23 | self.interval = None 24 | 25 | def add_arguments(self, parser): 26 | parser.add_argument( 27 | "-j", 28 | "--json", 29 | action="store_true", 30 | dest="json", 31 | help="Output statistics as JSON", 32 | ) 33 | 34 | parser.add_argument( 35 | "-y", 36 | "--yaml", 37 | action="store_true", 38 | dest="yaml", 39 | help="Output statistics as YAML", 40 | ) 41 | 42 | parser.add_argument( 43 | "-i", 44 | "--interval", 45 | dest="interval", 46 | type=float, 47 | help="Poll statistics every N seconds", 48 | ) 49 | 50 | def _print_separator(self): 51 | click.echo("-" * self.table_width) 52 | 53 | def _print_stats_dashboard(self, statistics, prev_stats=None, with_color: bool = True): 54 | if self.interval: 55 | click.clear() 56 | click.echo() 57 | click.echo("Django-Scheduler CLI Dashboard") 58 | click.echo() 59 | self._print_separator() 60 | click.echo(f"| {'Name':<16} | Queued | Active | Finished | Canceled | Workers |") 61 | self._print_separator() 62 | for ind, queue in enumerate(statistics["queues"]): 63 | vals = list((queue[k] for k in KEYS)) 64 | # Deal with colors 65 | if not with_color: 66 | colors = ["" for _ in KEYS] 67 | if prev_stats and len(prev_stats["queues"]) > ind: 68 | prev = prev_stats["queues"][ind] 69 | prev_vals = tuple(prev[k] for k in KEYS) 70 | colors = [ 71 | ANSI_LIGHT_GREEN if vals[i] != prev_vals[i] else ANSI_LIGHT_WHITE for i in range(len(prev_vals)) 72 | ] 73 | else: 74 | colors = [ANSI_LIGHT_WHITE for _ in range(len(vals))] 75 | to_print = " | ".join([f"{colors[i]}{vals[i]:9}{ANSI_RESET}" for i in range(len(vals))]) 76 | click.echo(f"| {queue['name']:<16} | {to_print} |", color=with_color) 77 | 78 | self._print_separator() 79 | 80 | if self.interval: 81 | click.echo() 82 | click.echo("Press 'Ctrl+c' to quit") 83 | 84 | def handle(self, *args, **options): 85 | if options.get("json") and options.get("yaml"): 86 | click.secho("Aborting. Cannot output as both json and yaml", err=True, fg="red") 87 | exit(1) 88 | if options.get("json"): 89 | import json 90 | 91 | click.secho( 92 | json.dumps(get_statistics(), indent=2), 93 | ) 94 | return 95 | 96 | if options.get("yaml"): 97 | try: 98 | import yaml 99 | except ImportError: 100 | click.secho("Aborting. yaml not supported", err=True, fg="red") 101 | return 102 | 103 | click.secho(yaml.dump(get_statistics(), default_flow_style=False)) 104 | return 105 | 106 | self.interval = options.get("interval") 107 | 108 | if not self.interval or self.interval < 0: 109 | self._print_stats_dashboard(get_statistics(), with_color=not options.get("no_color")) 110 | return 111 | 112 | try: 113 | prev = None 114 | while True: 115 | statistics = get_statistics() 116 | self._print_stats_dashboard(statistics, prev, with_color=not options.get("no_color")) 117 | prev = statistics 118 | time.sleep(self.interval) 119 | except KeyboardInterrupt: 120 | pass 121 | -------------------------------------------------------------------------------- /scheduler/migrations/0002_alter_cronjob_id_alter_repeatablejob_id_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.0.1 on 2022-01-06 20:40 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0001_initial_squashed_0005_added_result_ttl'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name='cronjob', 14 | name='id', 15 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 16 | ), 17 | migrations.AlterField( 18 | model_name='repeatablejob', 19 | name='id', 20 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 21 | ), 22 | migrations.AlterField( 23 | model_name='scheduledjob', 24 | name='id', 25 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /scheduler/migrations/0003_auto_20220329_2107.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.12 on 2022-03-29 21:07 2 | 3 | import django.db.models.deletion 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ('contenttypes', '0002_remove_content_type_name'), 10 | ('scheduler', '0002_alter_cronjob_id_alter_repeatablejob_id_and_more'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='cronjob', 16 | name='cron_string', 17 | field=models.CharField(help_text='Define the schedule in a crontab like syntax. Times are in UTC.', 18 | max_length=64, verbose_name='cron string'), 19 | ), 20 | migrations.AlterField( 21 | model_name='cronjob', 22 | name='id', 23 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 24 | ), 25 | migrations.AlterField( 26 | model_name='repeatablejob', 27 | name='id', 28 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 29 | ), 30 | migrations.AlterField( 31 | model_name='repeatablejob', 32 | name='interval_unit', 33 | field=models.CharField( 34 | choices=[('seconds', 'seconds'), ('minutes', 'minutes'), ('hours', 'hours'), ('days', 'days'), 35 | ('weeks', 'weeks')], default='hours', max_length=12, verbose_name='interval unit'), 36 | ), 37 | migrations.AlterField( 38 | model_name='scheduledjob', 39 | name='id', 40 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), 41 | ), 42 | migrations.CreateModel( 43 | name='JobKwarg', 44 | fields=[ 45 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 46 | ('arg_type', models.CharField( 47 | choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'), 48 | ('datetime_val', 'Datetime')], default='str_val', max_length=12, 49 | verbose_name='Argument Type')), 50 | ('str_val', models.CharField(blank=True, max_length=255, verbose_name='String Value')), 51 | ('int_val', models.IntegerField(blank=True, null=True, verbose_name='Int Value')), 52 | ('bool_val', models.BooleanField(default=False, verbose_name='Boolean Value')), 53 | ('datetime_val', models.DateTimeField(blank=True, null=True, verbose_name='Datetime Value')), 54 | ('object_id', models.PositiveIntegerField()), 55 | ('key', models.CharField(max_length=255)), 56 | ('content_type', 57 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')), 58 | ], 59 | options={ 60 | 'ordering': ['id'], 61 | 'abstract': False, 62 | }, 63 | ), 64 | migrations.CreateModel( 65 | name='JobArg', 66 | fields=[ 67 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 68 | ('arg_type', models.CharField( 69 | choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'), 70 | ('datetime_val', 'Datetime')], default='str_val', max_length=12, 71 | verbose_name='Argument Type')), 72 | ('str_val', models.CharField(blank=True, max_length=255, verbose_name='String Value')), 73 | ('int_val', models.IntegerField(blank=True, null=True, verbose_name='Int Value')), 74 | ('bool_val', models.BooleanField(default=False, verbose_name='Boolean Value')), 75 | ('datetime_val', models.DateTimeField(blank=True, null=True, verbose_name='Datetime Value')), 76 | ('object_id', models.PositiveIntegerField()), 77 | ('content_type', 78 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')), 79 | ], 80 | options={ 81 | 'ordering': ['id'], 82 | 'abstract': False, 83 | }, 84 | ), 85 | ] 86 | -------------------------------------------------------------------------------- /scheduler/migrations/0004_cronjob_at_front_repeatablejob_at_front_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.4 on 2022-12-18 18:47 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0003_auto_20220329_2107'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name='cronjob', 14 | name='at_front', 15 | field=models.BooleanField(default=False, verbose_name='At front'), 16 | ), 17 | migrations.AddField( 18 | model_name='repeatablejob', 19 | name='at_front', 20 | field=models.BooleanField(default=False, verbose_name='At front'), 21 | ), 22 | migrations.AddField( 23 | model_name='scheduledjob', 24 | name='at_front', 25 | field=models.BooleanField(default=False, verbose_name='At front'), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /scheduler/migrations/0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.5 on 2023-01-13 22:35 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0004_cronjob_at_front_repeatablejob_at_front_and_more'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name='cronjob', 14 | name='at_front', 15 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'), 16 | ), 17 | migrations.AlterField( 18 | model_name='repeatablejob', 19 | name='at_front', 20 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'), 21 | ), 22 | migrations.AlterField( 23 | model_name='scheduledjob', 24 | name='at_front', 25 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /scheduler/migrations/0006_auto_20230118_1640.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.5 on 2023-01-18 16:40 2 | 3 | from django.db import migrations 4 | 5 | 6 | def forwards_func(apps, schema_editor): 7 | pass 8 | 9 | 10 | def reverse_func(apps, schema_editor): 11 | # forwards_func() creates two Country instances, 12 | # so reverse_func() should delete them. 13 | cronjob_model = apps.get_model(app_label='scheduler', model_name='CronJob') 14 | db_alias = schema_editor.connection.alias 15 | cronjob_model.objects.using(db_alias).filter(name='Job scheduling jobs').delete() 16 | 17 | 18 | class Migration(migrations.Migration): 19 | dependencies = [ 20 | ('scheduler', '0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more'), 21 | ] 22 | 23 | operations = [ 24 | migrations.RunPython(forwards_func, reverse_func), 25 | ] 26 | -------------------------------------------------------------------------------- /scheduler/migrations/0007_add_result_ttl.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.5 on 2023-01-20 22:32 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0006_auto_20230118_1640'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name='cronjob', 14 | name='result_ttl', 15 | field=models.IntegerField(blank=True, 16 | help_text='The TTL value (in seconds) of the job result. -1: Result never expires, you should delete jobs manually. 0: Result gets deleted immediately. >0: Result expires after n seconds.', 17 | null=True, verbose_name='result ttl'), 18 | ), 19 | migrations.AlterField( 20 | model_name='cronjob', 21 | name='queue', 22 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'), 23 | ), 24 | migrations.AlterField( 25 | model_name='repeatablejob', 26 | name='queue', 27 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'), 28 | ), 29 | migrations.AlterField( 30 | model_name='scheduledjob', 31 | name='queue', 32 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'), 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /scheduler/migrations/0008_rename_str_val_jobarg_val_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-02-19 22:12 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0007_add_result_ttl'), 9 | ] 10 | 11 | operations = [ 12 | migrations.RenameField( 13 | model_name='jobarg', 14 | old_name='str_val', 15 | new_name='val', 16 | ), 17 | migrations.RenameField( 18 | model_name='jobkwarg', 19 | old_name='str_val', 20 | new_name='val', 21 | ), 22 | migrations.RemoveField( 23 | model_name='jobarg', 24 | name='bool_val', 25 | ), 26 | migrations.RemoveField( 27 | model_name='jobarg', 28 | name='datetime_val', 29 | ), 30 | migrations.RemoveField( 31 | model_name='jobarg', 32 | name='int_val', 33 | ), 34 | migrations.RemoveField( 35 | model_name='jobkwarg', 36 | name='bool_val', 37 | ), 38 | migrations.RemoveField( 39 | model_name='jobkwarg', 40 | name='datetime_val', 41 | ), 42 | migrations.RemoveField( 43 | model_name='jobkwarg', 44 | name='int_val', 45 | ), 46 | migrations.AlterField( 47 | model_name='jobarg', 48 | name='arg_type', 49 | field=models.CharField(choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'), 50 | ('datetime_val', 'Datetime'), ('callable', 'Callable')], default='str_val', 51 | max_length=12, verbose_name='Argument Type'), 52 | ), 53 | migrations.AlterField( 54 | model_name='jobkwarg', 55 | name='arg_type', 56 | field=models.CharField(choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'), 57 | ('datetime_val', 'Datetime'), ('callable', 'Callable')], default='str_val', 58 | max_length=12, verbose_name='Argument Type'), 59 | ), 60 | ] 61 | -------------------------------------------------------------------------------- /scheduler/migrations/0009_alter_jobarg_arg_type_alter_jobarg_val_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-12 19:53 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0008_rename_str_val_jobarg_val_and_more'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name='jobarg', 14 | name='arg_type', 15 | field=models.CharField( 16 | choices=[('str', 'string'), ('int', 'int'), ('bool', 'boolean'), ('datetime', 'datetime'), 17 | ('callable', 'callable')], default='str', max_length=12, verbose_name='Argument Type'), 18 | ), 19 | migrations.AlterField( 20 | model_name='jobarg', 21 | name='val', 22 | field=models.CharField(blank=True, max_length=255, verbose_name='Argument Value'), 23 | ), 24 | migrations.AlterField( 25 | model_name='jobkwarg', 26 | name='arg_type', 27 | field=models.CharField( 28 | choices=[('str', 'string'), ('int', 'int'), ('bool', 'boolean'), ('datetime', 'datetime'), 29 | ('callable', 'callable')], default='str', max_length=12, verbose_name='Argument Type'), 30 | ), 31 | migrations.AlterField( 32 | model_name='jobkwarg', 33 | name='val', 34 | field=models.CharField(blank=True, max_length=255, verbose_name='Argument Value'), 35 | ), 36 | ] 37 | -------------------------------------------------------------------------------- /scheduler/migrations/0010_queue.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-16 20:59 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0009_alter_jobarg_arg_type_alter_jobarg_val_and_more'), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name='Queue', 14 | fields=[ 15 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 16 | ], 17 | options={ 18 | 'permissions': [['view', 'Access admin page']], 19 | 'managed': False, 20 | 'default_permissions': (), 21 | }, 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /scheduler/migrations/0012_alter_cronjob_name_alter_repeatablejob_name_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2023-04-18 19:08 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ('scheduler', '0011_worker_alter_queue_options_alter_cronjob_at_front_and_more'), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name='cronjob', 14 | name='name', 15 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'), 16 | ), 17 | migrations.AlterField( 18 | model_name='repeatablejob', 19 | name='name', 20 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'), 21 | ), 22 | migrations.AlterField( 23 | model_name='scheduledjob', 24 | name='name', 25 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /scheduler/migrations/0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.1 on 2023-05-11 16:40 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ("scheduler", "0012_alter_cronjob_name_alter_repeatablejob_name_and_more") 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="cronjob", 15 | name="queue", 16 | field=models.CharField( 17 | choices=[("default", "default"), ("low", "low"), ("high", "high")], 18 | help_text="Queue name", 19 | max_length=255, 20 | verbose_name="queue", 21 | ), 22 | ), 23 | migrations.AlterField( 24 | model_name="repeatablejob", 25 | name="queue", 26 | field=models.CharField( 27 | choices=[("default", "default"), ("low", "low"), ("high", "high")], 28 | help_text="Queue name", 29 | max_length=255, 30 | verbose_name="queue", 31 | ), 32 | ), 33 | migrations.AlterField( 34 | model_name="scheduledjob", 35 | name="queue", 36 | field=models.CharField( 37 | choices=[("default", "default"), ("low", "low"), ("high", "high")], 38 | help_text="Queue name", 39 | max_length=255, 40 | verbose_name="queue", 41 | ), 42 | ), 43 | ] 44 | -------------------------------------------------------------------------------- /scheduler/migrations/0014_alter_cronjob_created_alter_cronjob_modified_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.5 on 2023-09-08 20:16 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("scheduler", "0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="cronjob", 14 | name="created", 15 | field=models.DateTimeField(auto_now_add=True), 16 | ), 17 | migrations.AlterField( 18 | model_name="cronjob", 19 | name="modified", 20 | field=models.DateTimeField(auto_now=True), 21 | ), 22 | migrations.AlterField( 23 | model_name="repeatablejob", 24 | name="created", 25 | field=models.DateTimeField(auto_now_add=True), 26 | ), 27 | migrations.AlterField( 28 | model_name="repeatablejob", 29 | name="modified", 30 | field=models.DateTimeField(auto_now=True), 31 | ), 32 | migrations.AlterField( 33 | model_name="scheduledjob", 34 | name="created", 35 | field=models.DateTimeField(auto_now_add=True), 36 | ), 37 | migrations.AlterField( 38 | model_name="scheduledjob", 39 | name="modified", 40 | field=models.DateTimeField(auto_now=True), 41 | ), 42 | ] 43 | -------------------------------------------------------------------------------- /scheduler/migrations/0015_rename_cronjob_crontask_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.5 on 2023-10-08 16:41 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('scheduler', '0014_alter_cronjob_created_alter_cronjob_modified_and_more'), 10 | ] 11 | 12 | operations = [ 13 | migrations.RenameModel( 14 | old_name='CronJob', 15 | new_name='CronTask', 16 | ), 17 | migrations.RenameModel( 18 | old_name='RepeatableJob', 19 | new_name='RepeatableTask', 20 | ), 21 | migrations.RenameModel( 22 | old_name='ScheduledJob', 23 | new_name='ScheduledTask', 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /scheduler/migrations/0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.5 on 2023-10-08 16:48 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('contenttypes', '0002_remove_content_type_name'), 10 | ('scheduler', '0015_rename_cronjob_crontask_and_more'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RenameModel( 15 | old_name='JobArg', 16 | new_name='TaskArg', 17 | ), 18 | migrations.RenameModel( 19 | old_name='JobKwarg', 20 | new_name='TaskKwarg', 21 | ), 22 | migrations.AlterModelOptions( 23 | name='crontask', 24 | options={'ordering': ('name',), 'verbose_name': 'Cron Task', 'verbose_name_plural': 'Cron Tasks'}, 25 | ), 26 | migrations.AlterModelOptions( 27 | name='repeatabletask', 28 | options={'ordering': ('name',), 'verbose_name': 'Repeatable Task', 'verbose_name_plural': 'Repeatable Tasks'}, 29 | ), 30 | migrations.AlterModelOptions( 31 | name='scheduledtask', 32 | options={'ordering': ('name',), 'verbose_name': 'Scheduled Task', 'verbose_name_plural': 'Scheduled Tasks'}, 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /scheduler/migrations/0017_remove_crontask_repeat_crontask_failed_runs_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.1 on 2024-01-10 17:39 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('scheduler', '0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more'), 10 | ] 11 | 12 | operations = [ 13 | migrations.RemoveField( 14 | model_name='crontask', 15 | name='repeat', 16 | ), 17 | migrations.AddField( 18 | model_name='crontask', 19 | name='failed_runs', 20 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has failed', verbose_name='failed runs'), 21 | ), 22 | migrations.AddField( 23 | model_name='crontask', 24 | name='last_failed_run', 25 | field=models.DateTimeField(blank=True, help_text='Last time the task has failed', null=True, verbose_name='last failed run'), 26 | ), 27 | migrations.AddField( 28 | model_name='crontask', 29 | name='last_successful_run', 30 | field=models.DateTimeField(blank=True, help_text='Last time the task has succeeded', null=True, verbose_name='last successful run'), 31 | ), 32 | migrations.AddField( 33 | model_name='crontask', 34 | name='successful_runs', 35 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has succeeded', verbose_name='successful runs'), 36 | ), 37 | migrations.AddField( 38 | model_name='repeatabletask', 39 | name='failed_runs', 40 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has failed', verbose_name='failed runs'), 41 | ), 42 | migrations.AddField( 43 | model_name='repeatabletask', 44 | name='last_failed_run', 45 | field=models.DateTimeField(blank=True, help_text='Last time the task has failed', null=True, verbose_name='last failed run'), 46 | ), 47 | migrations.AddField( 48 | model_name='repeatabletask', 49 | name='last_successful_run', 50 | field=models.DateTimeField(blank=True, help_text='Last time the task has succeeded', null=True, verbose_name='last successful run'), 51 | ), 52 | migrations.AddField( 53 | model_name='repeatabletask', 54 | name='successful_runs', 55 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has succeeded', verbose_name='successful runs'), 56 | ), 57 | ] 58 | -------------------------------------------------------------------------------- /scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1b1 on 2024-06-29 14:21 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('scheduler', '0017_remove_crontask_repeat_crontask_failed_runs_and_more'), 10 | ] 11 | 12 | operations = [ 13 | # migrations.AlterField( 14 | # model_name='crontask', 15 | # name='queue', 16 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'), 17 | # ), 18 | # migrations.AlterField( 19 | # model_name='repeatabletask', 20 | # name='queue', 21 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'), 22 | # ), 23 | # migrations.AlterField( 24 | # model_name='scheduledtask', 25 | # name='queue', 26 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'), 27 | # ), 28 | ] 29 | -------------------------------------------------------------------------------- /scheduler/migrations/0020_remove_repeatabletask_new_task_id_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1.6 on 2025-02-05 15:40 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ("scheduler", "0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.RemoveField( 14 | model_name="repeatabletask", 15 | name="new_task_id", 16 | ), 17 | migrations.RemoveField( 18 | model_name="scheduledtask", 19 | name="new_task_id", 20 | ), 21 | migrations.DeleteModel( 22 | name="CronTask", 23 | ), 24 | migrations.DeleteModel( 25 | name="RepeatableTask", 26 | ), 27 | migrations.DeleteModel( 28 | name="ScheduledTask", 29 | ), 30 | ] 31 | -------------------------------------------------------------------------------- /scheduler/migrations/0021_remove_task_job_id_task_job_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.1.7 on 2025-03-24 14:30 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | dependencies = [ 9 | ('scheduler', '0020_remove_repeatabletask_new_task_id_and_more'), 10 | ] 11 | 12 | operations = [ 13 | migrations.RemoveField( 14 | model_name='task', 15 | name='job_id', 16 | ), 17 | migrations.AddField( 18 | model_name='task', 19 | name='job_name', 20 | field=models.CharField(blank=True, editable=False, help_text='Current job_name on queue', max_length=128, null=True, verbose_name='job name'), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /scheduler/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/migrations/__init__.py -------------------------------------------------------------------------------- /scheduler/models/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "Task", 3 | "TaskType", 4 | "TaskArg", 5 | "TaskKwarg", 6 | "get_scheduled_task", 7 | "run_task", 8 | "get_next_cron_time", 9 | ] 10 | 11 | from .args import TaskArg, TaskKwarg 12 | from .task import TaskType, Task, get_scheduled_task, run_task, get_next_cron_time 13 | -------------------------------------------------------------------------------- /scheduler/models/args.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Callable 3 | 4 | from django.contrib.contenttypes.fields import GenericForeignKey 5 | from django.contrib.contenttypes.models import ContentType 6 | from django.core.exceptions import ValidationError 7 | from django.db import models 8 | from django.utils.translation import gettext_lazy as _ 9 | 10 | from scheduler.helpers import utils 11 | 12 | ARG_TYPE_TYPES_DICT = { 13 | "str": str, 14 | "int": int, 15 | "bool": bool, 16 | "datetime": datetime, 17 | "callable": Callable, 18 | } 19 | 20 | 21 | class BaseTaskArg(models.Model): 22 | class ArgType(models.TextChoices): 23 | STR = "str", _("string") 24 | INT = "int", _("int") 25 | BOOL = "bool", _("boolean") 26 | DATETIME = "datetime", _("datetime") 27 | CALLABLE = "callable", _("callable") 28 | 29 | arg_type = models.CharField( 30 | _("Argument Type"), 31 | max_length=12, 32 | choices=ArgType.choices, 33 | default=ArgType.STR, 34 | ) 35 | val = models.CharField(_("Argument Value"), blank=True, max_length=255) 36 | content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) 37 | object_id = models.PositiveIntegerField() 38 | content_object = GenericForeignKey() 39 | 40 | def clean(self): 41 | if self.arg_type not in ARG_TYPE_TYPES_DICT: 42 | raise ValidationError( 43 | { 44 | "arg_type": ValidationError( 45 | _(f"Could not parse {self.arg_type}, options are: {ARG_TYPE_TYPES_DICT.keys()}"), code="invalid" 46 | ) 47 | } 48 | ) 49 | try: 50 | if self.arg_type == "callable": 51 | utils.callable_func(self.val) 52 | elif self.arg_type == "datetime": 53 | datetime.fromisoformat(self.val) 54 | elif self.arg_type == "bool": 55 | if self.val.lower() not in {"true", "false"}: 56 | raise ValidationError 57 | elif self.arg_type == "int": 58 | int(self.val) 59 | except Exception: 60 | raise ValidationError( 61 | {"arg_type": ValidationError(_(f"Could not parse {self.val} as {self.arg_type}"), code="invalid")} 62 | ) 63 | 64 | def save(self, **kwargs): 65 | super(BaseTaskArg, self).save(**kwargs) 66 | self.content_object.save() 67 | 68 | def delete(self, **kwargs): 69 | super(BaseTaskArg, self).delete(**kwargs) 70 | self.content_object.save() 71 | 72 | def value(self): 73 | if self.arg_type == "callable": 74 | res = utils.callable_func(self.val)() 75 | elif self.arg_type == "datetime": 76 | res = datetime.fromisoformat(self.val) 77 | elif self.arg_type == "bool": 78 | res = self.val.lower() == "true" 79 | else: 80 | res = ARG_TYPE_TYPES_DICT[self.arg_type](self.val) 81 | return res 82 | 83 | class Meta: 84 | abstract = True 85 | ordering = ["id"] 86 | 87 | 88 | class TaskArg(BaseTaskArg): 89 | def __str__(self): 90 | return f"TaskArg[arg_type={self.arg_type},value={self.value()}]" 91 | 92 | 93 | class TaskKwarg(BaseTaskArg): 94 | key = models.CharField(max_length=255) 95 | 96 | def __str__(self): 97 | key, value = self.value() 98 | return f"TaskKwarg[key={key},arg_type={self.arg_type},value={self.val}]" 99 | 100 | def value(self): 101 | return self.key, super(TaskKwarg, self).value() 102 | -------------------------------------------------------------------------------- /scheduler/models/ephemeral_models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class Queue(models.Model): 5 | """Placeholder model with no database table, but with django admin page and contenttype permission""" 6 | 7 | class Meta: 8 | managed = False # not in Django's database 9 | default_permissions = () 10 | permissions = [["view", "Access admin page"]] 11 | verbose_name_plural = " Queues" 12 | 13 | 14 | class Worker(models.Model): 15 | """Placeholder model with no database table, but with django admin page and contenttype permission""" 16 | 17 | class Meta: 18 | managed = False # not in Django's database 19 | default_permissions = () 20 | permissions = [["view", "Access admin page"]] 21 | verbose_name_plural = " Workers" 22 | -------------------------------------------------------------------------------- /scheduler/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/py.typed -------------------------------------------------------------------------------- /scheduler/redis_models/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "Result", 3 | "ResultType", 4 | "as_str", 5 | "SchedulerLock", 6 | "WorkerModel", 7 | "DequeueTimeout", 8 | "KvLock", 9 | "JobStatus", 10 | "JobModel", 11 | "JobNamesRegistry", 12 | "FinishedJobRegistry", 13 | "ActiveJobRegistry", 14 | "FailedJobRegistry", 15 | "CanceledJobRegistry", 16 | "ScheduledJobRegistry", 17 | "QueuedJobRegistry", 18 | ] 19 | 20 | from .base import as_str 21 | from .job import JobStatus, JobModel 22 | from .lock import SchedulerLock, KvLock 23 | from .registry.base_registry import DequeueTimeout, JobNamesRegistry 24 | from .registry.queue_registries import ( 25 | FinishedJobRegistry, 26 | ActiveJobRegistry, 27 | FailedJobRegistry, 28 | CanceledJobRegistry, 29 | ScheduledJobRegistry, 30 | QueuedJobRegistry, 31 | ) 32 | from .result import Result, ResultType 33 | from .worker import WorkerModel 34 | -------------------------------------------------------------------------------- /scheduler/redis_models/lock.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Any 2 | 3 | from scheduler.types import ConnectionType 4 | 5 | 6 | class KvLock(object): 7 | def __init__(self, name: str) -> None: 8 | self.name = name 9 | self.acquired = False 10 | 11 | @property 12 | def _locking_key(self) -> str: 13 | return f"_lock:{self.name}" 14 | 15 | def acquire(self, val: Any, connection: ConnectionType, expire: Optional[int] = None) -> bool: 16 | self.acquired = connection.set(self._locking_key, val, nx=True, ex=expire) 17 | return self.acquired 18 | 19 | def expire(self, connection: ConnectionType, expire: Optional[int] = None) -> bool: 20 | return connection.expire(self._locking_key, expire) 21 | 22 | def release(self, connection: ConnectionType): 23 | connection.delete(self._locking_key) 24 | 25 | def value(self, connection: ConnectionType) -> Any: 26 | return connection.get(self._locking_key) 27 | 28 | 29 | class SchedulerLock(KvLock): 30 | def __init__(self, queue_name: str) -> None: 31 | super().__init__(f"lock:scheduler:{queue_name}") 32 | 33 | 34 | class QueueLock(KvLock): 35 | def __init__(self, queue_name: str) -> None: 36 | super().__init__(f"queue:{queue_name}") 37 | -------------------------------------------------------------------------------- /scheduler/redis_models/registry/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/redis_models/registry/__init__.py -------------------------------------------------------------------------------- /scheduler/redis_models/result.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from datetime import datetime 3 | from enum import Enum 4 | from typing import Optional, Any, ClassVar, List 5 | 6 | from scheduler.helpers.utils import utcnow 7 | from scheduler.redis_models.base import StreamModel, decode_dict 8 | from scheduler.types import ConnectionType, Self 9 | 10 | 11 | class ResultType(Enum): 12 | SUCCESSFUL = "successful" 13 | FAILED = "failed" 14 | STOPPED = "stopped" 15 | 16 | 17 | @dataclasses.dataclass(slots=True, kw_only=True) 18 | class Result(StreamModel): 19 | parent: str 20 | type: ResultType 21 | worker_name: str 22 | ttl: Optional[int] = 0 23 | name: Optional[str] = None 24 | created_at: datetime = dataclasses.field(default_factory=utcnow) 25 | return_value: Optional[Any] = None 26 | exc_string: Optional[str] = None 27 | 28 | _list_key: ClassVar[str] = ":job-results:" 29 | _children_key_template: ClassVar[str] = ":job-results:{}:" 30 | _element_key_template: ClassVar[str] = ":job-results:{}" 31 | 32 | @classmethod 33 | def create( 34 | cls, 35 | connection: ConnectionType, 36 | job_name: str, 37 | worker_name: str, 38 | _type: ResultType, 39 | ttl: int, 40 | return_value: Any = None, 41 | exc_string: Optional[str] = None, 42 | ) -> Self: 43 | result = cls( 44 | parent=job_name, 45 | ttl=ttl, 46 | type=_type, 47 | return_value=return_value, 48 | exc_string=exc_string, 49 | worker_name=worker_name, 50 | ) 51 | result.save(connection) 52 | return result 53 | 54 | @classmethod 55 | def fetch_latest(cls, connection: ConnectionType, job_name: str) -> Optional["Result"]: 56 | """Returns the latest result for given job_name. 57 | 58 | :param connection: Broker connection. 59 | :param job_name: Job name. 60 | :return: Result instance or None if no result is available. 61 | """ 62 | response: List[Any] = connection.xrevrange(cls._children_key_template.format(job_name), "+", "-", count=1) 63 | if not response: 64 | return None 65 | result_id, payload = response[0] 66 | res = cls.deserialize(decode_dict(payload, set())) 67 | return res 68 | -------------------------------------------------------------------------------- /scheduler/settings.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import List, Dict 3 | 4 | from django.conf import settings 5 | from django.core.exceptions import ImproperlyConfigured 6 | 7 | from scheduler.types import SchedulerConfiguration, QueueConfiguration 8 | 9 | logger = logging.getLogger("scheduler") 10 | logging.basicConfig(level=logging.DEBUG) 11 | 12 | _QUEUES: Dict[str, QueueConfiguration] = dict() 13 | SCHEDULER_CONFIG: SchedulerConfiguration = SchedulerConfiguration() 14 | 15 | 16 | class QueueNotFoundError(Exception): 17 | pass 18 | 19 | 20 | def conf_settings(): 21 | global _QUEUES 22 | global SCHEDULER_CONFIG 23 | 24 | app_queues = getattr(settings, "SCHEDULER_QUEUES", None) 25 | if app_queues is None or not isinstance(app_queues, dict): 26 | raise ImproperlyConfigured("You have to define SCHEDULER_QUEUES in settings.py as dict") 27 | 28 | for queue_name, queue_config in app_queues.items(): 29 | if isinstance(queue_config, QueueConfiguration): 30 | _QUEUES[queue_name] = queue_config 31 | elif isinstance(queue_config, dict): 32 | _QUEUES[queue_name] = QueueConfiguration(**queue_config) 33 | else: 34 | raise ImproperlyConfigured(f"Queue {queue_name} configuration should be a QueueConfiguration or dict") 35 | 36 | user_settings = getattr(settings, "SCHEDULER_CONFIG", {}) 37 | if isinstance(user_settings, SchedulerConfiguration): 38 | SCHEDULER_CONFIG = user_settings # type: ignore 39 | return 40 | if not isinstance(user_settings, dict): 41 | raise ImproperlyConfigured("SCHEDULER_CONFIG should be a SchedulerConfiguration or dict") 42 | for k, v in user_settings.items(): 43 | if k not in SCHEDULER_CONFIG.__annotations__: 44 | raise ImproperlyConfigured(f"Unknown setting {k} in SCHEDULER_CONFIG") 45 | setattr(SCHEDULER_CONFIG, k, v) 46 | 47 | 48 | conf_settings() 49 | 50 | 51 | def get_queue_names() -> List[str]: 52 | return list(_QUEUES.keys()) 53 | 54 | 55 | def get_queue_configuration(queue_name: str) -> QueueConfiguration: 56 | if queue_name not in _QUEUES: 57 | raise QueueNotFoundError(f"Queue {queue_name} not found, queues={_QUEUES.keys()}") 58 | return _QUEUES[queue_name] 59 | -------------------------------------------------------------------------------- /scheduler/static/admin/js/select-fields.js: -------------------------------------------------------------------------------- 1 | (function ($) { 2 | $(function () { 3 | const tasktypes = { 4 | "CronTaskType": $(".tasktype-CronTaskType"), 5 | "RepeatableTaskType": $(".tasktype-RepeatableTaskType"), 6 | "OnceTaskType": $(".tasktype-OnceTaskType"), 7 | }; 8 | var taskTypeField = $('#id_task_type'); 9 | 10 | function toggleVerified(value) { 11 | console.log(value); 12 | for (const [k, v] of Object.entries(tasktypes)) { 13 | if (k === value) { 14 | v.show(); 15 | } else { 16 | v.hide(); 17 | } 18 | } 19 | } 20 | 21 | toggleVerified(taskTypeField.val()); 22 | 23 | taskTypeField.change(function () { 24 | toggleVerified($(this).val()); 25 | }); 26 | }); 27 | })(django.jQuery); -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/change_form.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/change_form.html" %} 2 | {% load i18n %} 3 | 4 | {% block after_related_objects %} 5 | {% include 'admin/scheduler/jobs-list.partial.html' %} 6 | {% endblock %} -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/change_list.html: -------------------------------------------------------------------------------- 1 | {% extends 'admin/change_list.html' %} 2 | {% load scheduler_tags %} 3 | 4 | {% block object-tools %} 5 | {{ block.super }} 6 | {% endblock %} -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/confirm_action.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/scheduler/scheduler_base.html" %} 2 | {% load scheduler_tags %} 3 | 4 | {% block breadcrumbs %} 5 | 11 | {% endblock %} 12 | 13 | {% block content_title %}

Are you sure?

{% endblock %} 14 | 15 | {% block content %} 16 |
17 |

18 | Are you sure you want to {{ action|capfirst }} the {{ total_jobs }} selected jobs from 19 | {{ queue.name }} 20 | ? These jobs are selected: 21 |

22 |
    23 | {% for job in jobs %} 24 |
  • 25 | {{ job.name }} 26 | {{ job | show_func_name }} 27 |
  • 28 | {% endfor %} 29 |
30 |
31 | {% csrf_token %} 32 |
33 | {% for job in jobs %} 34 | 35 | {% endfor %} 36 | 37 | 38 | 39 |
40 |
41 |
42 | {% endblock %} 43 | -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/jobs-list-with-tasks.partial.html: -------------------------------------------------------------------------------- 1 | {% load scheduler_tags i18n %} 2 | {% if not add %} 3 |
4 |

Job executions

5 |
6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | {% for exec in executions %} 22 | 23 | 26 | 33 | 36 | 39 | 42 | 45 | 48 | 51 | 54 | 55 | {% endfor %} 56 | 57 |
IDScheduled TaskSTATUSCreated atEnqueued atStarted atRan forWorker nameResult
24 | {{ exec.name }} 25 | 27 | {% if exec.scheduled_task_id %} 28 | 29 | {{ exec|job_scheduled_task }} 30 | 31 | {% endif %} 32 | 34 | {{ exec|job_status }} 35 | 37 | {{ exec.created_at|date:"Y-m-d, H:i:s"|default:"-" }} 38 | 40 | {{ exec.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }} 41 | 43 | {{ exec.started_at|date:"Y-m-d, H:i:s"|default:"-" }} 44 | 46 | {{ exec|job_runtime }} 47 | 49 | {{ exec.worker_name|default:"-" }} 50 | 52 | {{ exec|job_result|default:"-" }} 53 |
58 |
59 |

60 | {% if pagination_required %} 61 | {% for i in page_range %} 62 | {% if i == executions.paginator.ELLIPSIS %} 63 | {{ executions.paginator.ELLIPSIS }} 64 | {% elif i == executions.number %} 65 | {{ i }} 66 | {% else %} 67 | {{ i }} 69 | {% endif %} 70 | {% endfor %} 71 | {{ executions.paginator.count }} {% blocktranslate count counter=executions.paginator.count %}entry 72 | {% plural %}entries{% endblocktranslate %} 73 | {% endif %} 74 |

75 |
76 | {% endif %} -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/jobs-list.partial.html: -------------------------------------------------------------------------------- 1 | {% load scheduler_tags i18n %} 2 | {% if not add %} 3 |
4 |

Job executions

5 |
6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | {% for exec in executions %} 21 | 22 | 30 | 33 | 36 | 39 | 42 | 45 | 48 | 51 | 52 | {% endfor %} 53 | 54 |
IDSTATUSCreated atEnqueued atStarted atRan forWorker nameResult
23 | {{ exec.name }} 24 | {% if exec.scheduled_task_id %} 25 | 26 | Go to scheduled task 27 | 28 | {% endif %} 29 | 31 | {{ exec|job_status }} 32 | 34 | {{ exec.created_at|date:"Y-m-d, H:i:s"|default:"-" }} 35 | 37 | {{ exec.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }} 38 | 40 | {{ exec.started_at|date:"Y-m-d, H:i:s"|default:"-" }} 41 | 43 | {{ exec|job_runtime }} 44 | 46 | {{ exec.worker_name|default:"-" }} 47 | 49 | {{ exec|job_result|default:"-" }} 50 |
55 |
56 |

57 | {% if pagination_required %} 58 | {% for i in page_range %} 59 | {% if i == executions.paginator.ELLIPSIS %} 60 | {{ executions.paginator.ELLIPSIS }} 61 | {% elif i == executions.number %} 62 | {{ i }} 63 | {% else %} 64 | {{ i }} 66 | {% endif %} 67 | {% endfor %} 68 | {{ executions.paginator.count }} {% blocktranslate count counter=executions.paginator.count %}entry 69 | {% plural %}entries{% endblocktranslate %} 70 | {% endif %} 71 |

72 |
73 | {% endif %} -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/queue_workers.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/scheduler/scheduler_base.html" %} 2 | 3 | {% load static scheduler_tags l10n %} 4 | 5 | {% block title %}Workers in {{ queue.name }} {{ block.super }}{% endblock %} 6 | 7 | {% block extrastyle %} 8 | {{ block.super }} 9 | 10 | {% endblock %} 11 | 12 | 13 | {% block breadcrumbs %} 14 | 19 | {% endblock %} 20 | 21 | {% block content_title %}

Queue {{ queue.name }} workers

{% endblock %} 22 | 23 | {% block content %} 24 | 25 |
26 |
27 | {% include 'admin/scheduler/workers-list.partial.html' %} 28 |
29 |
30 | 31 | {% endblock %} 32 | -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/scheduler_base.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/base_site.html" %} 2 | {% load scheduler_tags %} 3 | 4 | {% load static %} 5 | 6 | {% block extrastyle %} 7 | {{ block.super }} 8 | 17 | 18 | {% endblock %} 19 | 20 | {% block extrahead %} 21 | {{ block.super }} 22 | 23 | {% endblock %} -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/single_job_action.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/scheduler/scheduler_base.html" %} 2 | {% load scheduler_tags %} 3 | 4 | {% block breadcrumbs %} 5 | 12 | {% endblock %} 13 | 14 | {% block content_title %}

Are you sure?

{% endblock %} 15 | 16 | {% block content %} 17 | 18 |
19 |

20 | Are you sure you want to {{ action }} 21 | 22 | {{ job.name }} ({{ job|show_func_name }}) 23 | 24 | from 25 | {{ queue.name }}? 26 | This action can not be undone. 27 |
28 | {% if job.is_scheduled_task %} 29 | Note: This scheduled job will be scheduled again if it is enabled 30 | {% endif %} 31 |

32 |
33 | {% csrf_token %} 34 |
35 | 36 |
37 |
38 |
39 | 40 | {% endblock %} 41 | -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/stats.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/base_site.html" %} 2 | 3 | {% block title %}Queues {{ block.super }}{% endblock %} 4 | 5 | {% block extrastyle %} 6 | {{ block.super }} 7 | 10 | {% endblock %} 11 | 12 | {% block content_title %}

Tasks Queues

{% endblock %} 13 | 14 | {% block breadcrumbs %} 15 | 19 | {% endblock %} 20 | 21 | {% block content %} 22 | 23 |
24 |
25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | {% if queue.scheduler_pid is not False %} 41 | 42 | {% endif %} 43 | 44 | 45 | 46 | {% for queue in queues %} 47 | 48 | 53 | 58 | 63 | 68 | 73 | 78 | 83 | 84 | 89 | 90 | 91 | 92 | {% if queue.scheduler_pid is not False %} 93 | 94 | {% endif %} 95 | 96 | {% endfor %} 97 | 98 |
NameScheduled JobsQueued JobsActive JobsFinished JobsFailed JobsCanceled JobsOldest Queued JobWorkersHostPortDBScheduler PID
49 | 50 | {{ queue.name }} 51 | 52 | 54 | 55 | {{ queue.scheduled_jobs }} 56 | 57 | 59 | 60 | {{ queue.queued_jobs }} 61 | 62 | 64 | 65 | {{ queue.started_jobs }} 66 | 67 | 69 | 70 | {{ queue.finished_jobs }} 71 | 72 | 74 | 75 | {{ queue.failed_jobs }} 76 | 77 | 79 | 80 | {{ queue.canceled_jobs }} 81 | 82 | {{ queue.oldest_job_timestamp }} 85 | 86 | {{ queue.workers }} 87 | 88 | {{ queue.connection_kwargs.host }}{{ queue.connection_kwargs.port }}{{ queue.connection_kwargs.db }}{{ queue.scheduler_pid|default_if_none:"Inactive" }}
99 |
100 | View as JSON 101 |
102 |
103 | 104 | {% endblock %} 105 | -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/worker_details.html: -------------------------------------------------------------------------------- 1 | {% extends 'admin/scheduler/scheduler_base.html' %} 2 | 3 | {% block breadcrumbs %} 4 | 9 | {% endblock %} 10 | 11 | {% block content_title %}

Worker Info

{% endblock %} 12 | 13 | {% block content %} 14 |
15 |
16 |
17 |
18 |
19 | 20 |
{{ worker.name }}
21 |
22 |
23 |
24 | 25 |
{{ worker.pid }}
26 |
27 |
28 |
29 | 30 |
{{ worker.state.value |capfirst }}
31 |
32 |
33 |
34 | 35 |
{{ worker.birth|date:"Y-m-d, H:i:s" }}
36 |
37 |
38 |
39 |
40 |
41 |
42 | 43 |
44 | {% for queue_name in worker.queue_names %} 45 | {{ queue_name }} 46 | {% endfor %} 47 |
48 |
49 |
50 |
51 | 52 |
53 |
54 |
55 |
56 |
57 | 58 |
59 | {% if current_job %} 60 | {{ current_job.func_name }} 61 | ({{ current_job.name }}) 62 | {% else %} 63 | No current job 64 | {% endif %} 65 |
66 |
67 |
68 | 69 |
70 |
71 | 72 |
{{ worker.successful_job_count|default:0 }}
73 |
74 |
75 | 76 |
77 |
78 | 79 |
{{ worker.failed_job_count|default:0 }}
80 |
81 |
82 | 83 |
84 |
85 | 86 |
{{ worker.total_working_time_ms|default:0|floatformat }}ms
87 |
88 |
89 |
90 |
91 |
92 | {% include 'admin/scheduler/jobs-list-with-tasks.partial.html' %} 93 |
94 | 95 | {% endblock %} 96 | -------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/workers-list.partial.html: -------------------------------------------------------------------------------- 1 | {% load scheduler_tags %} 2 | {% load l10n %} 3 |
4 | 5 | 6 | 7 | 10 | 13 | 16 | 19 | 22 | 25 | 28 | 31 | 34 | 37 | 38 | 39 | 40 | {% for worker in workers %} 41 | 42 | 47 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | {% endfor %} 63 | 64 |
8 |
Name
9 |
11 |
Queues
12 |
14 |
State
15 |
17 |
Birth
18 |
20 |
Hostname
21 |
23 |
PID
24 |
26 |
Working time
27 |
29 |
Successful jobs
30 |
32 |
Failed jobs
33 |
35 |
Scheduler
36 |
43 | 44 | {{ worker.name }} 45 | 46 | 48 | {% for queue_name in worker.queue_names %} 49 | {{ queue_name }} 50 | {% if not forloop.last %},{% endif %} 51 | {% endfor %} 52 | {{ worker.state.value | capfirst }}{{ worker.birth | date:"Y-m-d, H:i:s" }}{{ worker.hostname }}{{ worker.pid | unlocalize }}{{ worker.total_working_time_ms | default:0 | floatformat }} secs{{ worker.successful_job_count | default:0 }}{{ worker.failed_job_count | default:0 }}{{ worker.has_scheduler }}
65 |
-------------------------------------------------------------------------------- /scheduler/templates/admin/scheduler/workers_list.html: -------------------------------------------------------------------------------- 1 | {% extends "admin/scheduler/scheduler_base.html" %} 2 | 3 | {% load static scheduler_tags l10n %} 4 | 5 | {% block title %}Workers in {{ queue.name }} {{ block.super }}{% endblock %} 6 | 7 | {% block extrastyle %} 8 | {{ block.super }} 9 | 10 | {% endblock %} 11 | 12 | 13 | {% block breadcrumbs %} 14 | 18 | {% endblock %} 19 | 20 | {% block content_title %}

{{ workers|length }} Tasks Workers

{% endblock %} 21 | 22 | {% block content %} 23 | 24 |
25 |
26 |
27 | {% csrf_token %} 28 | {% include 'admin/scheduler/workers-list.partial.html' %} 29 |
30 |
31 |
32 | 33 | {% endblock %} 34 | -------------------------------------------------------------------------------- /scheduler/templatetags/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/templatetags/__init__.py -------------------------------------------------------------------------------- /scheduler/templatetags/scheduler_tags.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | from django import template 4 | from django.utils.safestring import mark_safe 5 | 6 | from scheduler.helpers.queues import Queue 7 | from scheduler.models import Task, get_scheduled_task 8 | from scheduler.models.task import run_task 9 | from scheduler.redis_models import Result, JobModel 10 | from scheduler.views.helpers import get_queue 11 | 12 | register = template.Library() 13 | 14 | 15 | @register.filter 16 | def show_func_name(job: JobModel) -> str: 17 | try: 18 | res = job.func_name 19 | if job.func == run_task: 20 | task = get_scheduled_task(*job.args) 21 | res = task.function_string() 22 | return mark_safe(res) 23 | except Exception as e: 24 | return repr(e) 25 | 26 | 27 | @register.filter 28 | def get_item(dictionary: Dict, key): 29 | return dictionary.get(key) 30 | 31 | 32 | @register.filter 33 | def scheduled_task(job: JobModel) -> Task: 34 | django_scheduled_task = get_scheduled_task(*job.args) 35 | return django_scheduled_task.get_absolute_url() 36 | 37 | 38 | @register.filter 39 | def job_result(job: JobModel) -> Optional[str]: 40 | queue = get_queue(job.queue_name) 41 | result = Result.fetch_latest(queue.connection, job.name) 42 | return result.type.name.capitalize() if result is not None else None 43 | 44 | 45 | @register.filter 46 | def job_scheduled_task(job: JobModel) -> Optional[str]: 47 | task = Task.objects.filter(id=job.scheduled_task_id).first() 48 | return task.name if task is not None else None 49 | 50 | 51 | @register.filter 52 | def job_status(job: JobModel): 53 | result = job.status 54 | return result.capitalize() 55 | 56 | 57 | @register.filter 58 | def job_runtime(job: JobModel): 59 | ended_at = job.ended_at 60 | if ended_at: 61 | runtime = job.ended_at - job.started_at 62 | return f"{int(runtime.microseconds / 1000)}ms" 63 | elif job.started_at: 64 | return "Still running" 65 | else: 66 | return "-" 67 | 68 | 69 | @register.filter 70 | def job_scheduled_time(job: JobModel, queue: Queue): 71 | return queue.scheduled_job_registry.get_scheduled_time(job.name) 72 | -------------------------------------------------------------------------------- /scheduler/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/conf.py: -------------------------------------------------------------------------------- 1 | # Settings for tests 2 | import os 3 | 4 | from django.conf import settings 5 | 6 | from scheduler.settings import conf_settings 7 | 8 | settings.SCHEDULER_QUEUES = { 9 | "default": {"HOST": "localhost", "PORT": 6379, "DB": 0}, 10 | "test": {"HOST": "localhost", "PORT": 1, "DB": 1}, 11 | "sentinel": { 12 | "SENTINELS": [("localhost", 26736), ("localhost", 26737)], 13 | "MASTER_NAME": "testmaster", 14 | "DB": 1, 15 | "USERNAME": "redis-user", 16 | "PASSWORD": "secret", 17 | "SENTINEL_KWARGS": {}, 18 | }, 19 | "test1": { 20 | "HOST": "localhost", 21 | "PORT": 1, 22 | "DB": 1, 23 | }, 24 | "test2": { 25 | "HOST": "localhost", 26 | "PORT": 1, 27 | "DB": 1, 28 | }, 29 | "test3": { 30 | "HOST": "localhost", 31 | "PORT": 6379, 32 | "DB": 1, 33 | }, 34 | "async": { 35 | "HOST": "localhost", 36 | "PORT": 6379, 37 | "DB": 1, 38 | "ASYNC": False, 39 | }, 40 | "url": { 41 | "URL": "redis://username:password@host:1234/", 42 | "DB": 4, 43 | }, 44 | "url_with_db": { 45 | "URL": "redis://username:password@host:1234/5", 46 | }, 47 | "url_default_db": { 48 | "URL": "redis://username:password@host:1234", 49 | }, 50 | "django_tasks_scheduler_test": { 51 | "HOST": "localhost", 52 | "PORT": 6379, 53 | "DB": 0, 54 | }, 55 | "scheduler_scheduler_active_test": { 56 | "HOST": "localhost", 57 | "PORT": 6379, 58 | "DB": 0, 59 | "ASYNC": False, 60 | }, 61 | "scheduler_scheduler_inactive_test": { 62 | "HOST": "localhost", 63 | "PORT": 6379, 64 | "DB": 0, 65 | "ASYNC": False, 66 | }, 67 | "worker_scheduler_active_test": { 68 | "HOST": "localhost", 69 | "PORT": 6379, 70 | "DB": 0, 71 | "ASYNC": False, 72 | }, 73 | "worker_scheduler_inactive_test": { 74 | "HOST": "localhost", 75 | "PORT": 6379, 76 | "DB": 0, 77 | "ASYNC": False, 78 | }, 79 | "django_tasks_scheduler_test2": { 80 | "HOST": "localhost", 81 | "PORT": 6379, 82 | "DB": 0, 83 | }, 84 | "test_scheduler": { 85 | "HOST": "localhost", 86 | "PORT": 6379, 87 | "DB": 0, 88 | }, 89 | } 90 | if os.getenv("FAKEREDIS", "False") == "True": # pragma: no cover 91 | for name, queue_settings in settings.SCHEDULER_QUEUES: # pragma: no cover 92 | queue_settings["BROKER"] = "fakeredis" # pragma: no cover 93 | 94 | conf_settings() 95 | -------------------------------------------------------------------------------- /scheduler/tests/jobs.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from time import sleep 3 | 4 | from scheduler.helpers.queues import get_queue 5 | 6 | _counter = 0 7 | 8 | 9 | def arg_callable(): 10 | global _counter 11 | _counter += 1 12 | return _counter 13 | 14 | 15 | def test_args_kwargs(*args, **kwargs): 16 | func = "test_args_kwargs({})" 17 | args_list = [repr(arg) for arg in args] 18 | kwargs_list = [f"{k}={v}" for (k, v) in kwargs.items()] 19 | return func.format(", ".join(args_list + kwargs_list)) 20 | 21 | 22 | def two_seconds_job(): 23 | sleep(2) 24 | logging.info(f"Job {_counter}") 25 | 26 | 27 | def long_job(): 28 | sleep(1000) 29 | logging.info(f"Job {_counter}") 30 | 31 | 32 | test_non_callable = "I am a teapot" 33 | 34 | 35 | def failing_job(): 36 | raise ValueError 37 | 38 | 39 | def test_job(): 40 | return 1 + 1 41 | 42 | 43 | def enqueue_jobs(): 44 | queue = get_queue() 45 | for i in range(20): 46 | queue.create_and_enqueue_job(test_job, name=f"job_{i:03}", args=()) 47 | -------------------------------------------------------------------------------- /scheduler/tests/test_internals.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from django.core.exceptions import ImproperlyConfigured 4 | from django.test import override_settings 5 | from django.utils import timezone 6 | 7 | from scheduler.helpers.callback import Callback, CallbackSetupError 8 | from scheduler.models import TaskType, get_scheduled_task 9 | from scheduler.tests.testtools import SchedulerBaseCase, task_factory 10 | 11 | 12 | class TestInternals(SchedulerBaseCase): 13 | def test_get_scheduled_job(self): 14 | task = task_factory(TaskType.ONCE, scheduled_time=timezone.now() - timedelta(hours=1)) 15 | self.assertEqual(task, get_scheduled_task(TaskType.ONCE, task.id)) 16 | with self.assertRaises(ValueError): 17 | get_scheduled_task(task.task_type, task.id + 1) 18 | with self.assertRaises(ValueError): 19 | get_scheduled_task("UNKNOWN_JOBTYPE", task.id) 20 | 21 | def test_task_update(self): 22 | task = task_factory(TaskType.ONCE) 23 | task.name = "new_name" 24 | task.save(update_fields=["name"]) 25 | 26 | def test_callback_bad_arguments(self): 27 | with self.assertRaises(CallbackSetupError) as cm: 28 | Callback("scheduler.tests.jobs.test_job", "1m") 29 | self.assertEqual(str(cm.exception), "Callback `timeout` must be a positive int, but received 1m") 30 | with self.assertRaises(CallbackSetupError) as cm: 31 | Callback("scheduler.tests.jobs.non_existing_method") 32 | self.assertEqual(str(cm.exception), "Callback `func` is not callable: scheduler.tests.jobs.non_existing_method") 33 | with self.assertRaises(CallbackSetupError) as cm: 34 | Callback("scheduler.tests.non_existing_module.non_existing_method") 35 | self.assertEqual( 36 | str(cm.exception), 37 | "Callback `func` is not callable: scheduler.tests.non_existing_module.non_existing_method", 38 | ) 39 | with self.assertRaises(CallbackSetupError) as cm: 40 | Callback("non_existing_method") 41 | self.assertEqual(str(cm.exception), "Callback `func` is not callable: non_existing_method") 42 | with self.assertRaises(CallbackSetupError) as cm: 43 | Callback(1) 44 | self.assertEqual(str(cm.exception), "Callback `func` must be a string or function, received 1") 45 | 46 | 47 | class TestConfSettings(SchedulerBaseCase): 48 | @override_settings(SCHEDULER_CONFIG=[]) 49 | def test_conf_settings__bad_scheduler_config(self): 50 | from scheduler import settings 51 | 52 | with self.assertRaises(ImproperlyConfigured) as cm: 53 | settings.conf_settings() 54 | 55 | self.assertEqual(str(cm.exception), "SCHEDULER_CONFIG should be a SchedulerConfiguration or dict") 56 | 57 | @override_settings(SCHEDULER_QUEUES=[]) 58 | def test_conf_settings__bad_scheduler_queues_config(self): 59 | from scheduler import settings 60 | 61 | with self.assertRaises(ImproperlyConfigured) as cm: 62 | settings.conf_settings() 63 | 64 | self.assertEqual(str(cm.exception), "You have to define SCHEDULER_QUEUES in settings.py as dict") 65 | 66 | @override_settings(SCHEDULER_QUEUES={"default": []}) 67 | def test_conf_settings__bad_queue_config(self): 68 | from scheduler import settings 69 | 70 | with self.assertRaises(ImproperlyConfigured) as cm: 71 | settings.conf_settings() 72 | 73 | self.assertEqual(str(cm.exception), "Queue default configuration should be a QueueConfiguration or dict") 74 | 75 | @override_settings(SCHEDULER_CONFIG={"UNKNOWN_SETTING": 10}) 76 | def test_conf_settings__unknown_setting(self): 77 | from scheduler import settings 78 | 79 | with self.assertRaises(ImproperlyConfigured) as cm: 80 | settings.conf_settings() 81 | 82 | self.assertEqual(str(cm.exception), "Unknown setting UNKNOWN_SETTING in SCHEDULER_CONFIG") 83 | -------------------------------------------------------------------------------- /scheduler/tests/test_job_decorator.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from django.test import TestCase 4 | 5 | from scheduler import settings 6 | from scheduler.helpers.queues import get_queue 7 | from . import conf # noqa 8 | from ..decorators import JOB_METHODS_LIST, job 9 | from ..redis_models import JobStatus 10 | from ..redis_models.job import JobModel 11 | from ..worker import create_worker 12 | 13 | 14 | @job() 15 | def test_job(): 16 | time.sleep(1) 17 | return 1 + 1 18 | 19 | 20 | @job("django_tasks_scheduler_test") 21 | def test_job_diff_queue(): 22 | time.sleep(1) 23 | return 1 + 1 24 | 25 | 26 | @job(timeout=1) 27 | def test_job_timeout(): 28 | time.sleep(1) 29 | return 1 + 1 30 | 31 | 32 | @job(result_ttl=1) 33 | def test_job_result_ttl(): 34 | return 1 + 1 35 | 36 | 37 | class MyClass: 38 | def run(self): 39 | print("Hello") 40 | 41 | def __eq__(self, other): 42 | if not isinstance(other, MyClass): 43 | return False 44 | return True 45 | 46 | 47 | @job() 48 | def long_running_func(x): 49 | x.run() 50 | 51 | 52 | class JobDecoratorTest(TestCase): 53 | def setUp(self) -> None: 54 | get_queue("default").connection.flushall() 55 | 56 | def test_all_job_methods_registered(self): 57 | self.assertEqual(5, len(JOB_METHODS_LIST)) 58 | 59 | def test_job_decorator_no_params(self): 60 | test_job.delay() 61 | self._assert_job_with_func_and_props( 62 | "default", 63 | test_job, 64 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL, 65 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT, 66 | ) 67 | 68 | def test_job_decorator_timeout(self): 69 | test_job_timeout.delay() 70 | self._assert_job_with_func_and_props( 71 | "default", 72 | test_job_timeout, 73 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL, 74 | 1, 75 | ) 76 | 77 | def test_job_decorator_result_ttl(self): 78 | test_job_result_ttl.delay() 79 | self._assert_job_with_func_and_props( 80 | "default", 81 | test_job_result_ttl, 82 | 1, 83 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT, 84 | ) 85 | 86 | def test_job_decorator_different_queue(self): 87 | test_job_diff_queue.delay() 88 | self._assert_job_with_func_and_props( 89 | "django_tasks_scheduler_test", 90 | test_job_diff_queue, 91 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL, 92 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT, 93 | ) 94 | 95 | def _assert_job_with_func_and_props(self, queue_name, expected_func, expected_result_ttl, expected_timeout): 96 | queue = get_queue(queue_name) 97 | jobs = JobModel.get_many(queue.queued_job_registry.all(), queue.connection) 98 | self.assertEqual(1, len(jobs)) 99 | 100 | j = jobs[0] 101 | self.assertEqual(j.func, expected_func) 102 | self.assertEqual(j.success_ttl, expected_result_ttl) 103 | self.assertEqual(j.timeout, expected_timeout) 104 | 105 | def test_job_decorator_bad_queue(self): 106 | with self.assertRaises(settings.QueueNotFoundError): 107 | 108 | @job("bad-queue") 109 | def test_job_bad_queue(): 110 | time.sleep(1) 111 | return 1 + 1 112 | 113 | def test_job_decorator_delay_with_param(self): 114 | queue_name = "default" 115 | long_running_func.delay(MyClass()) 116 | 117 | worker = create_worker(queue_name, burst=True) 118 | worker.work() 119 | 120 | jobs_list = worker.queues[0].get_all_jobs() 121 | self.assertEqual(1, len(jobs_list)) 122 | job = jobs_list[0] 123 | self.assertEqual(job.func, long_running_func) 124 | self.assertEqual(job.kwargs, {}) 125 | self.assertEqual(job.status, JobStatus.FINISHED) 126 | self.assertEqual(job.args, (MyClass(),)) 127 | -------------------------------------------------------------------------------- /scheduler/tests/test_mgmt_commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/test_mgmt_commands/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/test_mgmt_commands/test_delete_failed_executions.py: -------------------------------------------------------------------------------- 1 | from django.core.management import call_command 2 | 3 | from scheduler.helpers.queues import get_queue 4 | from scheduler.tests import conf # noqa 5 | from scheduler.tests.jobs import failing_job 6 | from scheduler.worker import create_worker 7 | from scheduler.tests.test_views.base import BaseTestCase 8 | 9 | 10 | class DeleteFailedExecutionsTest(BaseTestCase): 11 | def test_delete_failed_executions__delete_jobs(self): 12 | queue = get_queue("default") 13 | call_command("delete_failed_executions", queue="default") 14 | queue.create_and_enqueue_job(failing_job) 15 | self.assertEqual(1, len(queue.queued_job_registry)) 16 | worker = create_worker("default", burst=True) 17 | worker.work() 18 | self.assertEqual(1, len(queue.failed_job_registry)) 19 | call_command("delete_failed_executions", queue="default") 20 | self.assertEqual(0, len(queue.failed_job_registry)) 21 | -------------------------------------------------------------------------------- /scheduler/tests/test_mgmt_commands/test_export.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import tempfile 4 | from unittest import mock 5 | 6 | import yaml 7 | from django.core.management import call_command 8 | from django.test import TestCase 9 | 10 | from scheduler.tests import conf # noqa 11 | from scheduler.tests.testtools import task_factory 12 | from scheduler.models import TaskType 13 | 14 | 15 | class ExportTest(TestCase): 16 | def setUp(self) -> None: 17 | super().setUp() 18 | self.tmpfile = tempfile.NamedTemporaryFile() 19 | 20 | def tearDown(self) -> None: 21 | super().tearDown() 22 | os.remove(self.tmpfile.name) 23 | 24 | def test_export__should_export_job(self): 25 | tasks = list() 26 | tasks.append(task_factory(TaskType.ONCE, enabled=True)) 27 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True)) 28 | 29 | # act 30 | call_command("export", filename=self.tmpfile.name) 31 | # assert 32 | result = json.load(self.tmpfile) 33 | self.assertEqual(len(tasks), len(result)) 34 | self.assertEqual(result[0], tasks[0].to_dict()) 35 | self.assertEqual(result[1], tasks[1].to_dict()) 36 | 37 | def test_export__should_export_enabled_jobs_only(self): 38 | tasks = list() 39 | tasks.append(task_factory(TaskType.ONCE, enabled=True)) 40 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=False)) 41 | 42 | # act 43 | call_command("export", filename=self.tmpfile.name, enabled=True) 44 | # assert 45 | result = json.load(self.tmpfile) 46 | self.assertEqual(len(tasks) - 1, len(result)) 47 | self.assertEqual(result[0], tasks[0].to_dict()) 48 | 49 | def test_export__should_export_job_yaml_without_yaml_lib(self): 50 | tasks = list() 51 | tasks.append(task_factory(TaskType.ONCE, enabled=True)) 52 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True)) 53 | 54 | # act 55 | with mock.patch.dict("sys.modules", {"yaml": None}): 56 | with self.assertRaises(SystemExit) as cm: 57 | call_command("export", filename=self.tmpfile.name, format="yaml") 58 | self.assertEqual(cm.exception.code, 1) 59 | 60 | def test_export__should_export_job_yaml_green(self): 61 | tasks = list() 62 | tasks.append(task_factory(TaskType.ONCE, enabled=True)) 63 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True)) 64 | tasks.append(task_factory(TaskType.CRON, enabled=True)) 65 | 66 | # act 67 | call_command("export", filename=self.tmpfile.name, format="yaml") 68 | # assert 69 | result = yaml.load(self.tmpfile, yaml.SafeLoader) 70 | self.assertEqual(len(tasks), len(result)) 71 | self.assertEqual(result[0], tasks[0].to_dict()) 72 | self.assertEqual(result[1], tasks[1].to_dict()) 73 | self.assertEqual(result[2], tasks[2].to_dict()) 74 | -------------------------------------------------------------------------------- /scheduler/tests/test_mgmt_commands/test_run_job.py: -------------------------------------------------------------------------------- 1 | from django.core.management import call_command 2 | from django.test import TestCase 3 | 4 | from scheduler.helpers.queues import get_queue 5 | from scheduler.redis_models import JobModel 6 | from scheduler.tests import conf # noqa 7 | from scheduler.tests.jobs import test_job 8 | 9 | 10 | class RunJobTest(TestCase): 11 | def test_run_job__should_schedule_job(self): 12 | queue = get_queue("default") 13 | queue.queued_job_registry.empty() 14 | func_name = f"{test_job.__module__}.{test_job.__name__}" 15 | # act 16 | call_command("run_job", func_name, queue="default") 17 | # assert 18 | job_list = JobModel.get_many(queue.queued_job_registry.all(), queue.connection) 19 | self.assertEqual(1, len(job_list)) 20 | self.assertEqual(func_name + "()", job_list[0].get_call_string()) 21 | -------------------------------------------------------------------------------- /scheduler/tests/test_mgmt_commands/test_scheduler_stats.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | from io import StringIO 4 | 5 | import yaml 6 | from django.core.management import call_command 7 | from django.test import TestCase, override_settings 8 | 9 | from scheduler import settings 10 | from scheduler.helpers.queues import get_queue 11 | 12 | 13 | @override_settings(SCHEDULER_QUEUES=dict(default={"HOST": "localhost", "PORT": 6379, "DB": 0})) 14 | class SchedulerStatsTest(TestCase): 15 | EXPECTED_OUTPUT = { 16 | "queues": [ 17 | { 18 | "canceled_jobs": 0, 19 | "failed_jobs": 0, 20 | "finished_jobs": 0, 21 | "name": "default", 22 | "oldest_job_timestamp": None, 23 | "queued_jobs": 0, 24 | "scheduled_jobs": 0, 25 | "scheduler_pid": None, 26 | "started_jobs": 0, 27 | "workers": 0, 28 | } 29 | ] 30 | } 31 | OLD_QUEUES = None 32 | 33 | def setUp(self): 34 | super(SchedulerStatsTest, self).setUp() 35 | SchedulerStatsTest.OLD_QUEUES = settings._QUEUES 36 | settings._QUEUES = dict() 37 | settings.conf_settings() 38 | get_queue("default").connection.flushall() 39 | 40 | def tearDown(self): 41 | super(SchedulerStatsTest, self).tearDown() 42 | settings._QUEUES = SchedulerStatsTest.OLD_QUEUES 43 | 44 | def test_scheduler_stats__json_output(self): 45 | test_stdout = StringIO() 46 | sys.stdout = test_stdout 47 | # act 48 | call_command("scheduler_stats", "-j") 49 | # assert 50 | res = test_stdout.getvalue() 51 | self.assertEqual(json.loads(res), SchedulerStatsTest.EXPECTED_OUTPUT) 52 | 53 | def test_scheduler_stats__yaml_output(self): 54 | # arrange 55 | test_stdout = StringIO() 56 | sys.stdout = test_stdout 57 | # act 58 | call_command("scheduler_stats", "-y") 59 | # assert 60 | res = test_stdout.getvalue() 61 | self.assertEqual(yaml.load(res, yaml.SafeLoader), SchedulerStatsTest.EXPECTED_OUTPUT) 62 | 63 | def test_scheduler_stats__plain_text_output(self): 64 | test_stdout = StringIO() 65 | sys.stdout = test_stdout 66 | # act 67 | call_command("scheduler_stats", "--no-color") 68 | # assert 69 | res = test_stdout.getvalue() 70 | self.assertEqual( 71 | res, 72 | """ 73 | Django-Scheduler CLI Dashboard 74 | 75 | -------------------------------------------------------------------------------- 76 | | Name | Queued | Active | Finished | Canceled | Workers | 77 | -------------------------------------------------------------------------------- 78 | | default | 0 | 0 | 0 | 0 | 0 | 79 | -------------------------------------------------------------------------------- 80 | """, 81 | ) 82 | 83 | def test_scheduler_stats__bad_args(self): 84 | # arrange 85 | sys.stderr = StringIO() 86 | sys.stdout = StringIO() 87 | # act 88 | with self.assertRaises(SystemExit): 89 | call_command("scheduler_stats", "-y", "-j") 90 | # assert 91 | res = sys.stdout.getvalue() 92 | self.assertEqual(res, """""") 93 | err = sys.stderr.getvalue() 94 | self.assertEqual(err, """Aborting. Cannot output as both json and yaml\n""") 95 | -------------------------------------------------------------------------------- /scheduler/tests/test_multiprocess/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/test_multiprocess/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/test_multiprocess/test_integrity.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | from django.test import tag 4 | from django.urls import reverse 5 | 6 | from scheduler.helpers.queues import get_queue 7 | from scheduler.redis_models import JobStatus, JobModel, WorkerModel 8 | from scheduler.tests.jobs import long_job 9 | from .. import testtools 10 | from ..test_views.base import BaseTestCase 11 | 12 | 13 | @tag("multiprocess") 14 | class MultiProcessTest(BaseTestCase): 15 | def test_cancel_job_after_it_started(self): 16 | # arrange 17 | queue = get_queue("django_tasks_scheduler_test") 18 | job = queue.create_and_enqueue_job(long_job) 19 | self.assertTrue(job.is_queued) 20 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test") 21 | sleep(0.2) 22 | job = JobModel.get(job.name, connection=queue.connection) 23 | self.assertEqual(JobStatus.STARTED, job.status) 24 | # act 25 | res = self.client.post(reverse("job_detail_action", args=[job.name, "cancel"]), {"post": "yes"}, follow=True) 26 | 27 | # assert 28 | self.assertEqual(200, res.status_code) 29 | job = JobModel.get(job.name, connection=queue.connection) 30 | self.assertEqual(JobStatus.STOPPED, job.status) 31 | self.assertNotIn(job.name, queue.queued_job_registry.all()) 32 | sleep(0.2) 33 | process.terminate() 34 | process.join(2) 35 | process.kill() 36 | worker_model = WorkerModel.get(worker_name, connection=queue.connection) 37 | self.assertEqual(0, worker_model.completed_jobs) 38 | self.assertEqual(0, worker_model.failed_job_count) 39 | self.assertEqual(0, worker_model.successful_job_count) 40 | self.assertIsNotNone(worker_model.shutdown_requested_date) 41 | -------------------------------------------------------------------------------- /scheduler/tests/test_redis_models.py: -------------------------------------------------------------------------------- 1 | from django.urls import reverse 2 | 3 | from scheduler.tests.testtools import SchedulerBaseCase 4 | 5 | 6 | class TestWorkerAdmin(SchedulerBaseCase): 7 | def test_admin_list_view(self): 8 | # arrange 9 | self.client.login(username="admin", password="admin") 10 | model = "worker" 11 | url = reverse(f"admin:scheduler_{model}_changelist") 12 | 13 | # act 14 | res = self.client.get(url) 15 | # assert 16 | self.assertEqual(200, res.status_code) 17 | 18 | 19 | class TestQueueAdmin(SchedulerBaseCase): 20 | def test_admin_list_view(self): 21 | # arrange 22 | self.client.login(username="admin", password="admin") 23 | model = "queue" 24 | url = reverse(f"admin:scheduler_{model}_changelist") 25 | 26 | # act 27 | res = self.client.get(url) 28 | # assert 29 | self.assertEqual(200, res.status_code) 30 | -------------------------------------------------------------------------------- /scheduler/tests/test_settings.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | 3 | from django.conf import settings 4 | from django.core.exceptions import ImproperlyConfigured 5 | 6 | from scheduler.settings import conf_settings 7 | from scheduler.tests.testtools import SchedulerBaseCase 8 | from scheduler.types import Broker, SchedulerConfiguration 9 | 10 | 11 | class TestWorkerAdmin(SchedulerBaseCase): 12 | 13 | def setUp(self): 14 | from scheduler.settings import SCHEDULER_CONFIG 15 | self.old_settings = SCHEDULER_CONFIG 16 | 17 | def tearDown(self): 18 | from scheduler import settings as scheduler_settings 19 | scheduler_settings.SCHEDULER_CONFIG = self.old_settings 20 | 21 | def test_scheduler_config_as_dict(self): 22 | from scheduler.settings import SCHEDULER_CONFIG 23 | settings.SCHEDULER_CONFIG = dict( 24 | EXECUTIONS_IN_PAGE=SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE + 1, 25 | SCHEDULER_INTERVAL=SCHEDULER_CONFIG.SCHEDULER_INTERVAL + 1, 26 | BROKER=Broker.REDIS, 27 | CALLBACK_TIMEOUT=SCHEDULER_CONFIG.SCHEDULER_INTERVAL + 1, 28 | 29 | DEFAULT_SUCCESS_TTL=SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL + 1, 30 | DEFAULT_FAILURE_TTL=SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL + 1, 31 | DEFAULT_JOB_TTL=SCHEDULER_CONFIG.DEFAULT_JOB_TTL + 1, 32 | DEFAULT_JOB_TIMEOUT=SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT + 1, 33 | # General configuration values 34 | DEFAULT_WORKER_TTL=SCHEDULER_CONFIG.DEFAULT_WORKER_TTL + 1, 35 | DEFAULT_MAINTENANCE_TASK_INTERVAL=SCHEDULER_CONFIG.DEFAULT_MAINTENANCE_TASK_INTERVAL + 1, 36 | DEFAULT_JOB_MONITORING_INTERVAL=SCHEDULER_CONFIG.DEFAULT_JOB_MONITORING_INTERVAL + 1, 37 | SCHEDULER_FALLBACK_PERIOD_SECS=SCHEDULER_CONFIG.SCHEDULER_FALLBACK_PERIOD_SECS + 1, 38 | ) 39 | conf_settings() 40 | from scheduler.settings import SCHEDULER_CONFIG 41 | for key, value in settings.SCHEDULER_CONFIG.items(): 42 | self.assertEqual(getattr(SCHEDULER_CONFIG, key), value) 43 | 44 | def test_scheduler_config_as_data_class(self): 45 | from scheduler.settings import SCHEDULER_CONFIG 46 | self.assertEqual(SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE, 20) 47 | settings.SCHEDULER_CONFIG = SchedulerConfiguration( 48 | EXECUTIONS_IN_PAGE=1, 49 | SCHEDULER_INTERVAL=60, 50 | BROKER=Broker.REDIS, 51 | CALLBACK_TIMEOUT=1111, 52 | 53 | DEFAULT_SUCCESS_TTL=1111, 54 | DEFAULT_FAILURE_TTL=111111, 55 | DEFAULT_JOB_TTL=1111, 56 | DEFAULT_JOB_TIMEOUT=11111, 57 | # General configuration values 58 | DEFAULT_WORKER_TTL=11111, 59 | DEFAULT_MAINTENANCE_TASK_INTERVAL=111, 60 | DEFAULT_JOB_MONITORING_INTERVAL=1111, 61 | SCHEDULER_FALLBACK_PERIOD_SECS=1111, 62 | ) 63 | conf_settings() 64 | from scheduler.settings import SCHEDULER_CONFIG 65 | for key, value in dataclasses.asdict(settings.SCHEDULER_CONFIG).items(): 66 | self.assertEqual(getattr(SCHEDULER_CONFIG, key), value) 67 | 68 | def test_scheduler_config_as_dict_bad_param(self): 69 | settings.SCHEDULER_CONFIG = dict( 70 | EXECUTIONS_IN_PAGE=1, 71 | SCHEDULER_INTERVAL=60, 72 | BROKER=Broker.REDIS, 73 | CALLBACK_TIMEOUT=1111, 74 | 75 | DEFAULT_SUCCESS_TTL=1111, 76 | DEFAULT_FAILURE_TTL=111111, 77 | DEFAULT_JOB_TTL=1111, 78 | DEFAULT_JOB_TIMEOUT=11111, 79 | # General configuration values 80 | DEFAULT_WORKER_TTL=11111, 81 | DEFAULT_MAINTENANCE_TASK_INTERVAL=111, 82 | DEFAULT_JOB_MONITORING_INTERVAL=1111, 83 | SCHEDULER_FALLBACK_PERIOD_SECS=1111, 84 | BAD_PARAM='bad_value', # This should raise an error 85 | ) 86 | self.assertRaises(ImproperlyConfigured, conf_settings) 87 | -------------------------------------------------------------------------------- /scheduler/tests/test_task_types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/test_task_types/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/test_task_types/test_cron_task.py: -------------------------------------------------------------------------------- 1 | from django.core.exceptions import ValidationError 2 | 3 | from scheduler import settings 4 | from scheduler.helpers.queues import get_queue 5 | from scheduler.models import TaskType 6 | from scheduler.redis_models import JobModel 7 | from scheduler.tests.test_task_types.test_task_model import BaseTestCases 8 | from scheduler.tests.testtools import task_factory 9 | from scheduler.worker import create_worker 10 | 11 | 12 | class TestCronTask(BaseTestCases.TestBaseTask): 13 | task_type = TaskType.CRON 14 | 15 | def setUp(self) -> None: 16 | super().setUp() 17 | self.queue_name = settings.get_queue_names()[0] 18 | 19 | def test_clean(self): 20 | task = task_factory(self.task_type) 21 | task.cron_string = "* * * * *" 22 | task.queue = self.queue_name 23 | task.callable = "scheduler.tests.jobs.test_job" 24 | self.assertIsNone(task.clean()) 25 | 26 | def test_clean_cron_string_invalid(self): 27 | task = task_factory(self.task_type) 28 | task.cron_string = "not-a-cron-string" 29 | task.queue = self.queue_name 30 | task.callable = "scheduler.tests.jobs.test_job" 31 | with self.assertRaises(ValidationError): 32 | task.clean_cron_string() 33 | 34 | def test_check_rescheduled_after_execution(self): 35 | task = task_factory(self.task_type) 36 | queue = task.rqueue 37 | first_run_id = task.job_name 38 | entry = JobModel.get(first_run_id, connection=queue.connection) 39 | self.assertIsNotNone(entry) 40 | queue.run_sync(entry) 41 | task.refresh_from_db() 42 | self.assertEqual(task.failed_runs, 0) 43 | self.assertIsNone(task.last_failed_run) 44 | self.assertEqual(task.successful_runs, 1) 45 | self.assertIsNotNone(task.last_successful_run) 46 | self.assertTrue(task.is_scheduled()) 47 | self.assertNotEqual(task.job_name, first_run_id) 48 | 49 | def test_check_rescheduled_after_failed_execution(self): 50 | task = task_factory(self.task_type, callable_name="scheduler.tests.jobs.failing_job") 51 | queue = task.rqueue 52 | first_run_id = task.job_name 53 | entry = JobModel.get(first_run_id, connection=queue.connection) 54 | queue.run_sync(entry) 55 | task.refresh_from_db() 56 | self.assertEqual(task.failed_runs, 1) 57 | self.assertIsNotNone(task.last_failed_run) 58 | self.assertEqual(task.successful_runs, 0) 59 | self.assertIsNone(task.last_successful_run) 60 | self.assertTrue(task.is_scheduled()) 61 | self.assertNotEqual(task.job_name, first_run_id) 62 | 63 | def test_cron_task_enqueuing_jobs(self): 64 | queue = get_queue() 65 | prev_queued = queue.scheduled_job_registry.count(connection=queue.connection) 66 | prev_finished = queue.finished_job_registry.count(connection=queue.connection) 67 | 68 | task = task_factory(self.task_type, callable_name="scheduler.tests.jobs.enqueue_jobs") 69 | self.assertEqual(prev_queued + 1, queue.scheduled_job_registry.count(connection=queue.connection)) 70 | first_run_id = task.job_name 71 | entry = JobModel.get(first_run_id, connection=queue.connection) 72 | queue.run_sync(entry) 73 | self.assertEqual(20, len(queue.queued_job_registry)) 74 | self.assertEqual(prev_finished + 1, queue.finished_job_registry.count(connection=queue.connection)) 75 | worker = create_worker("default", fork_job_execution=False, burst=True) 76 | worker.work() 77 | self.assertEqual(prev_finished + 21, queue.finished_job_registry.count(connection=queue.connection)) 78 | worker.refresh(update_queues=True) 79 | self.assertEqual(20, worker._model.successful_job_count) 80 | self.assertEqual(0, worker._model.failed_job_count) 81 | -------------------------------------------------------------------------------- /scheduler/tests/test_task_types/test_once_task.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta, datetime 2 | 3 | from django.core.exceptions import ValidationError 4 | from django.utils import timezone 5 | 6 | from scheduler import settings 7 | from scheduler.models import TaskType 8 | from scheduler.tests.test_task_types.test_task_model import BaseTestCases 9 | from scheduler.tests.testtools import task_factory 10 | 11 | 12 | class TestScheduledTask(BaseTestCases.TestSchedulableTask): 13 | task_type = TaskType.ONCE 14 | queue_name = settings.get_queue_names()[0] 15 | 16 | def test_clean(self): 17 | job = task_factory(self.task_type) 18 | job.queue = self.queue_name 19 | job.callable = "scheduler.tests.jobs.test_job" 20 | self.assertIsNone(job.clean()) 21 | 22 | def test_create_without_date__fail(self): 23 | task = task_factory(self.task_type, scheduled_time=None, instance_only=True) 24 | self.assertIsNone(task.scheduled_time) 25 | with self.assertRaises(Exception) as cm: 26 | task.clean() 27 | self.assertTrue(isinstance(cm.exception, ValidationError)) 28 | self.assertEqual(str(cm.exception), "{'scheduled_time': ['Scheduled time is required']}") 29 | 30 | def test_create_with_date_in_the_past__fail(self): 31 | task = task_factory(self.task_type, scheduled_time=datetime.now() - timedelta(days=1), instance_only=True) 32 | with self.assertRaises(Exception) as cm: 33 | task.clean() 34 | self.assertTrue(isinstance(cm.exception, ValidationError)) 35 | self.assertEqual(str(cm.exception), "{'scheduled_time': ['Scheduled time must be in the future']}") 36 | 37 | def test_unschedulable_old_job(self): 38 | job = task_factory(self.task_type, scheduled_time=timezone.now() - timedelta(hours=1)) 39 | self.assertFalse(job.is_scheduled()) 40 | -------------------------------------------------------------------------------- /scheduler/tests/test_views/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/test_views/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/test_views/base.py: -------------------------------------------------------------------------------- 1 | from django.contrib.auth.models import User 2 | from django.test import TestCase 3 | from django.test.client import Client 4 | 5 | from scheduler.helpers.queues import get_queue 6 | from scheduler.tests import conf # noqa 7 | 8 | 9 | class BaseTestCase(TestCase): 10 | def setUp(self): 11 | self.user = User.objects.create_superuser("user", password="pass") 12 | self.client = Client() 13 | self.client.login(username=self.user.username, password="pass") 14 | get_queue("django_tasks_scheduler_test").connection.flushall() 15 | -------------------------------------------------------------------------------- /scheduler/tests/test_views/test_queue_registry_jobs.py: -------------------------------------------------------------------------------- 1 | import time 2 | from datetime import datetime 3 | 4 | from django.urls import reverse 5 | 6 | from scheduler.helpers.queues import get_queue 7 | from scheduler.tests.jobs import test_job 8 | from scheduler.tests.test_views.base import BaseTestCase 9 | 10 | 11 | class QueueRegistryJobsViewTest(BaseTestCase): 12 | def test_queue_jobs_unknown_registry(self): 13 | queue_name = "default" 14 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "unknown"]), follow=True) 15 | self.assertEqual(404, res.status_code) 16 | 17 | def test_queue_jobs_unknown_queue(self): 18 | res = self.client.get(reverse("queue_registry_jobs", args=["UNKNOWN", "queued"])) 19 | self.assertEqual(404, res.status_code) 20 | 21 | def test_queued_jobs(self): 22 | """Jobs in queue are displayed properly""" 23 | queue = get_queue("default") 24 | job = queue.create_and_enqueue_job(test_job) 25 | queue_name = "default" 26 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "queued"])) 27 | self.assertEqual(res.context["jobs"], [job]) 28 | 29 | def test_finished_jobs(self): 30 | """Ensure that finished jobs page works properly.""" 31 | queue = get_queue("django_tasks_scheduler_test") 32 | queue_name = "django_tasks_scheduler_test" 33 | 34 | job = queue.create_and_enqueue_job(test_job) 35 | registry = queue.finished_job_registry 36 | registry.add(queue.connection, job.name, time.time() + 2) 37 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "finished"])) 38 | self.assertEqual(res.context["jobs"], [job]) 39 | 40 | def test_failed_jobs(self): 41 | """Ensure that failed jobs page works properly.""" 42 | queue = get_queue("django_tasks_scheduler_test") 43 | queue_name = "django_tasks_scheduler_test" 44 | 45 | # Test that page doesn't fail when FailedJobRegistry is empty 46 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "failed"])) 47 | self.assertEqual(res.status_code, 200) 48 | 49 | job = queue.create_and_enqueue_job(test_job) 50 | registry = queue.failed_job_registry 51 | registry.add(queue.connection, job.name, time.time() + 20) 52 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "failed"])) 53 | self.assertEqual(res.context["jobs"], [job]) 54 | 55 | def test_scheduled_jobs(self): 56 | """Ensure that scheduled jobs page works properly.""" 57 | queue = get_queue("django_tasks_scheduler_test") 58 | queue_name = "django_tasks_scheduler_test" 59 | 60 | # Test that page doesn't fail when ScheduledJobRegistry is empty 61 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) 62 | self.assertEqual(res.status_code, 200) 63 | 64 | job = queue.create_and_enqueue_job(test_job, when=datetime.now()) 65 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) 66 | self.assertEqual(res.context["jobs"], [job]) 67 | 68 | def test_scheduled_jobs_registry_removal(self): 69 | """Ensure that non-existing job is being deleted from registry by view""" 70 | queue = get_queue("django_tasks_scheduler_test") 71 | queue_name = "django_tasks_scheduler_test" 72 | 73 | registry = queue.scheduled_job_registry 74 | job = queue.create_and_enqueue_job(test_job, when=datetime.now()) 75 | self.assertEqual(len(registry), 1) 76 | 77 | queue.delete_job(job.name) 78 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "scheduled"])) 79 | self.assertEqual(res.context["jobs"], []) 80 | 81 | self.assertEqual(len(registry), 0) 82 | 83 | def test_started_jobs(self): 84 | """Ensure that active jobs page works properly.""" 85 | queue = get_queue("django_tasks_scheduler_test") 86 | queue_name = "django_tasks_scheduler_test" 87 | 88 | job = queue.create_and_enqueue_job(test_job) 89 | registry = queue.active_job_registry 90 | registry.add(queue.connection, job.name, time.time() + 20) 91 | res = self.client.get(reverse("queue_registry_jobs", args=[queue_name, "active"])) 92 | self.assertEqual(res.context["jobs"], [job]) 93 | -------------------------------------------------------------------------------- /scheduler/tests/test_views/test_workers_view.py: -------------------------------------------------------------------------------- 1 | from django.urls import reverse 2 | 3 | from scheduler.worker import create_worker 4 | from scheduler.tests import conf # noqa 5 | from scheduler.tests.test_views.base import BaseTestCase 6 | 7 | 8 | class TestViewWorkers(BaseTestCase): 9 | def test_workers_home(self): 10 | res = self.client.get(reverse("workers_home")) 11 | prev_workers = res.context["workers"] 12 | worker1 = create_worker("django_tasks_scheduler_test") 13 | worker1.worker_start() 14 | worker2 = create_worker("test3") 15 | worker2.worker_start() 16 | 17 | res = self.client.get(reverse("workers_home")) 18 | self.assertEqual(res.context["workers"], prev_workers + [worker1._model, worker2._model]) 19 | -------------------------------------------------------------------------------- /scheduler/tests/test_worker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/scheduler/tests/test_worker/__init__.py -------------------------------------------------------------------------------- /scheduler/tests/test_worker/test_scheduler.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | import time_machine 4 | from django.utils import timezone 5 | 6 | from scheduler.settings import SCHEDULER_CONFIG 7 | from scheduler.worker import create_worker 8 | from scheduler.models import TaskType 9 | from scheduler.tests.testtools import SchedulerBaseCase, task_factory 10 | from scheduler.worker import WorkerScheduler 11 | 12 | 13 | class TestWorkerScheduler(SchedulerBaseCase): 14 | def test_create_worker_with_scheduler__scheduler_started(self): 15 | SCHEDULER_CONFIG.SCHEDULER_INTERVAL = 1 16 | worker = create_worker("default", name="test", burst=True, with_scheduler=True) 17 | worker.bootstrap() 18 | self.assertIsNotNone(worker.scheduler) 19 | worker.stop_scheduler() 20 | self.assertIsNone(worker.scheduler) 21 | 22 | def test_scheduler_schedules_tasks(self): 23 | with time_machine.travel(0.0, tick=False) as traveller: 24 | # arrange 25 | task = task_factory(TaskType.ONCE, scheduled_time=timezone.now() + timedelta(milliseconds=40)) 26 | self.assertIsNotNone(task.job_name) 27 | self.assertNotIn(task.job_name, task.rqueue.queued_job_registry) 28 | self.assertIn(task.job_name, task.rqueue.scheduled_job_registry) 29 | 30 | scheduler = WorkerScheduler([task.rqueue], worker_name="fake-worker", connection=task.rqueue.connection) 31 | 32 | # act 33 | traveller.move_to(50) 34 | scheduler._acquire_locks() 35 | scheduler.enqueue_scheduled_jobs() 36 | 37 | # assert 38 | self.assertIsNotNone(task.job_name) 39 | self.assertIn(task.job_name, task.rqueue.queued_job_registry) 40 | self.assertNotIn(task.job_name, task.rqueue.scheduled_job_registry) 41 | -------------------------------------------------------------------------------- /scheduler/tests/test_worker/test_worker_commands.py: -------------------------------------------------------------------------------- 1 | from scheduler.helpers.queues import get_queue 2 | from scheduler.tests.jobs import test_job 3 | from ..test_views.base import BaseTestCase 4 | from ...redis_models import JobModel 5 | from ...worker import create_worker 6 | from ...worker.commands import send_command 7 | from ...worker.commands.suspend_worker import SuspendWorkCommand 8 | 9 | 10 | class WorkerCommandsTest(BaseTestCase): 11 | def test_stop_worker_command__green(self): 12 | # Arrange 13 | worker_name = "test" 14 | queue = get_queue("default") 15 | job = queue.create_and_enqueue_job(test_job) 16 | self.assertTrue(job.is_queued) 17 | worker = create_worker("default", name=worker_name, burst=True, with_scheduler=False) 18 | worker.worker_start() 19 | # Act 20 | send_command(queue.connection, SuspendWorkCommand(worker_name=worker_name)) 21 | worker.work() 22 | 23 | # Assert 24 | self.assertTrue(job.is_queued) 25 | self.assertTrue(worker._model.is_suspended) 26 | 27 | def test_stop_worker_command__bad_worker_name(self): 28 | # Arrange 29 | worker_name = "test" 30 | queue = get_queue("default") 31 | job = queue.create_and_enqueue_job(test_job) 32 | self.assertTrue(job.is_queued) 33 | worker = create_worker("default", name=worker_name, burst=True, with_scheduler=False) 34 | worker.bootstrap() 35 | # Act 36 | send_command(queue.connection, SuspendWorkCommand(worker_name=worker_name + "1")) 37 | worker.work() 38 | 39 | # Assert 40 | self.assertFalse(worker._model.is_suspended) 41 | job = JobModel.get(job.name, connection=queue.connection) 42 | self.assertFalse(job.is_queued) 43 | -------------------------------------------------------------------------------- /scheduler/tests/test_worker/test_worker_commands_multiprocess.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | from django.test import tag 4 | 5 | from scheduler.helpers.queues import get_queue 6 | from scheduler.redis_models import JobStatus, JobModel, WorkerModel 7 | from scheduler.tests.jobs import long_job, two_seconds_job 8 | from .. import testtools 9 | from ..test_views.base import BaseTestCase 10 | from ...worker.commands import KillWorkerCommand, send_command, StopJobCommand 11 | 12 | 13 | @tag("multiprocess") 14 | class WorkerCommandsTest(BaseTestCase): 15 | def test_kill_job_command__current_job(self): 16 | # Arrange 17 | queue = get_queue("django_tasks_scheduler_test") 18 | job = queue.create_and_enqueue_job(long_job) 19 | self.assertTrue(job.is_queued) 20 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test") 21 | sleep(0.1) 22 | job = JobModel.get(job.name, connection=queue.connection) 23 | self.assertEqual(JobStatus.STARTED, job.status) 24 | 25 | # Act 26 | send_command(queue.connection, StopJobCommand(worker_name=worker_name, job_name=job.name)) 27 | 28 | # Assert 29 | 30 | process.terminate() 31 | process.join(2) 32 | process.kill() 33 | 34 | job = JobModel.get(job.name, connection=queue.connection) 35 | worker_model = WorkerModel.get(worker_name, connection=queue.connection) 36 | self.assertEqual(job.name, worker_model.stopped_job_name) 37 | self.assertEqual(job.name, worker_model.current_job_name) 38 | self.assertEqual(0, worker_model.completed_jobs) 39 | self.assertEqual(0, worker_model.failed_job_count) 40 | self.assertEqual(0, worker_model.successful_job_count) 41 | self.assertEqual(JobStatus.STOPPED, job.status) 42 | self.assertNotIn(job.name, queue.queued_job_registry.all()) 43 | 44 | def test_kill_job_command__different_job(self): 45 | # Arrange 46 | queue = get_queue("django_tasks_scheduler_test") 47 | job = queue.create_and_enqueue_job(two_seconds_job) 48 | self.assertTrue(job.is_queued) 49 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test") 50 | sleep(0.2) 51 | job = JobModel.get(job.name, connection=queue.connection) 52 | self.assertEqual(JobStatus.STARTED, job.status) 53 | 54 | # Act 55 | send_command(queue.connection, StopJobCommand(worker_name=worker_name, job_name=job.name + "1")) 56 | sleep(0.1) 57 | process.kill() 58 | process.join() 59 | # Assert 60 | job = JobModel.get(job.name, connection=queue.connection) 61 | self.assertEqual(JobStatus.STARTED, job.status) 62 | self.assertNotIn(job.name, queue.queued_job_registry.all()) 63 | worker_model = WorkerModel.get(worker_name, connection=queue.connection) 64 | self.assertEqual(0, worker_model.completed_jobs) 65 | self.assertEqual(0, worker_model.failed_job_count) 66 | self.assertEqual(0, worker_model.successful_job_count) 67 | self.assertIsNone(worker_model.stopped_job_name) 68 | self.assertEqual(job.name, worker_model.current_job_name) 69 | 70 | def test_kill_worker_command(self): 71 | queue = get_queue("django_tasks_scheduler_test") 72 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test") 73 | sleep(0.1) 74 | # act 75 | send_command(queue.connection, KillWorkerCommand(worker_name=worker_name)) 76 | # assert 77 | sleep(0.2) 78 | process.kill() 79 | process.join() 80 | worker_model = WorkerModel.get(worker_name, connection=queue.connection) 81 | self.assertEqual(0, worker_model.completed_jobs) 82 | self.assertEqual(0, worker_model.failed_job_count) 83 | self.assertEqual(0, worker_model.successful_job_count) 84 | self.assertIsNotNone(worker_model.shutdown_requested_date) 85 | -------------------------------------------------------------------------------- /scheduler/tests/test_worker/test_worker_creation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | 4 | from scheduler import settings 5 | from scheduler.redis_models import WorkerModel 6 | from scheduler.worker import create_worker 7 | from scheduler.tests import conf # noqa 8 | from scheduler.tests.testtools import SchedulerBaseCase 9 | from scheduler.worker.worker import QueueConnectionDiscrepancyError 10 | 11 | 12 | class TestWorker(SchedulerBaseCase): 13 | def test_create_worker__two_workers_same_queue(self): 14 | worker1 = create_worker("default", "django_tasks_scheduler_test") 15 | worker1.worker_start() 16 | worker2 = create_worker("default") 17 | worker2.worker_start() 18 | hostname = os.uname()[1] 19 | self.assertEqual(f"{hostname}-worker.1", worker1.name) 20 | self.assertEqual(f"{hostname}-worker.2", worker2.name) 21 | 22 | def test_create_worker__worker_with_queues_different_connection(self): 23 | with self.assertRaises(QueueConnectionDiscrepancyError): 24 | create_worker("default", "test1") 25 | 26 | def test_create_worker__with_name(self): 27 | name = uuid.uuid4().hex 28 | worker1 = create_worker("default", name=name) 29 | self.assertEqual(name, worker1.name) 30 | 31 | def test_create_worker__with_name_containing_slash(self): 32 | name = uuid.uuid4().hex[-4:] + "/" + uuid.uuid4().hex[-4:] 33 | worker1 = create_worker("default", name=name) 34 | self.assertEqual(name.replace("/", "."), worker1.name) 35 | 36 | def test_create_worker__scheduler_interval(self): 37 | prev = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL 38 | settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = 1 39 | worker = create_worker("default", name="test", burst=True, with_scheduler=True) 40 | worker.bootstrap() 41 | self.assertEqual(worker.name, "test") 42 | self.assertEqual(worker.scheduler.interval, 1) 43 | settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = prev 44 | worker.teardown() 45 | 46 | def test_create_worker__cleanup(self): 47 | worker = create_worker("default", name="test", burst=True, with_scheduler=False) 48 | worker.bootstrap() 49 | worker.connection.delete(WorkerModel.key_for(worker.name)) 50 | all_names = WorkerModel.all_names(worker.connection) 51 | self.assertIn(worker.name, all_names) 52 | # act 53 | WorkerModel.cleanup(worker.connection, "default") 54 | # assert 55 | all_names = WorkerModel.all_names(worker.connection) 56 | self.assertNotIn(worker.name, all_names) 57 | -------------------------------------------------------------------------------- /scheduler/timeouts.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | import signal 3 | import threading 4 | 5 | 6 | class BaseTimeoutException(Exception): 7 | """Base exception for timeouts.""" 8 | 9 | pass 10 | 11 | 12 | class JobTimeoutException(BaseTimeoutException): 13 | """Raised when a job takes longer to complete than the allowed maximum timeout value.""" 14 | 15 | pass 16 | 17 | 18 | class JobExecutionMonitorTimeoutException(BaseTimeoutException): 19 | """Raised when waiting for a job-execution-process exiting takes longer than the maximum timeout value.""" 20 | 21 | pass 22 | 23 | 24 | class BaseDeathPenalty: 25 | """Base class to setup job timeouts.""" 26 | 27 | def __init__(self, timeout, exception=BaseTimeoutException, **kwargs): 28 | self._timeout = timeout 29 | self._exception = exception 30 | 31 | def __enter__(self): 32 | self.setup_death_penalty() 33 | 34 | def __exit__(self, type, value, traceback): 35 | # Always cancel immediately, since we're done 36 | try: 37 | self.cancel_death_penalty() 38 | except BaseTimeoutException: 39 | # Weird case: we're done with the with body, but now the alarm is fired. We may safely ignore this 40 | # situation and consider the body done. 41 | pass 42 | 43 | # __exit__ may return True to supress further exception handling. We don't want to suppress any exceptions 44 | # here, since all errors should just pass through, BaseTimeoutException being handled normally to the invoking 45 | # context. 46 | return False 47 | 48 | def setup_death_penalty(self): 49 | raise NotImplementedError() 50 | 51 | def cancel_death_penalty(self): 52 | raise NotImplementedError() 53 | 54 | 55 | class UnixSignalDeathPenalty(BaseDeathPenalty): 56 | def handle_death_penalty(self, signum, frame) -> None: 57 | raise self._exception("Task exceeded maximum timeout value ({0} seconds)".format(self._timeout)) 58 | 59 | def setup_death_penalty(self) -> None: 60 | """Sets up an alarm signal and a signal handler that raises an exception after the timeout amount (expressed 61 | in seconds).""" 62 | signal.signal(signal.SIGALRM, self.handle_death_penalty) 63 | signal.alarm(self._timeout) 64 | 65 | def cancel_death_penalty(self) -> None: 66 | """Removes the death penalty alarm and puts back the system into default signal handling.""" 67 | signal.alarm(0) 68 | signal.signal(signal.SIGALRM, signal.SIG_DFL) 69 | 70 | 71 | class TimerDeathPenalty(BaseDeathPenalty): 72 | def __init__(self, timeout, exception=JobTimeoutException, **kwargs): 73 | super().__init__(timeout, exception, **kwargs) 74 | self._target_thread_id = threading.current_thread().ident 75 | self._timer = None 76 | 77 | # Monkey-patch exception with the message ahead of time 78 | # since PyThreadState_SetAsyncExc can only take a class 79 | def init_with_message(self, *args, **kwargs): # noqa 80 | super(exception, self).__init__("Task exceeded maximum timeout value ({0} seconds)".format(timeout)) 81 | 82 | self._exception.__init__ = init_with_message 83 | 84 | def new_timer(self): 85 | """Returns a new timer since timers can only be used once.""" 86 | return threading.Timer(self._timeout, self.handle_death_penalty) 87 | 88 | def handle_death_penalty(self): 89 | """Raises an asynchronous exception in another thread. 90 | 91 | Reference http://docs.python.org/c-api/init.html#PyThreadState_SetAsyncExc for more info. 92 | """ 93 | ret = ctypes.pythonapi.PyThreadState_SetAsyncExc( 94 | ctypes.c_long(self._target_thread_id), ctypes.py_object(self._exception) 95 | ) 96 | if ret == 0: 97 | raise ValueError(f"Invalid thread ID {self._target_thread_id}") 98 | elif ret > 1: 99 | ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self._target_thread_id), 0) 100 | raise SystemError("PyThreadState_SetAsyncExc failed") 101 | 102 | def setup_death_penalty(self): 103 | """Starts the timer.""" 104 | if self._timeout <= 0: 105 | return 106 | self._timer = self.new_timer() 107 | self._timer.start() 108 | 109 | def cancel_death_penalty(self): 110 | """Cancels the timer.""" 111 | if self._timeout <= 0: 112 | return 113 | self._timer.cancel() 114 | self._timer = None 115 | -------------------------------------------------------------------------------- /scheduler/types/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "ConnectionErrorTypes", 3 | "ResponseErrorTypes", 4 | "TimeoutErrorTypes", 5 | "WatchErrorTypes", 6 | "ConnectionType", 7 | "PipelineType", 8 | "SentinelType", 9 | "FunctionReferenceType", 10 | "BrokerMetaData", 11 | "TASK_TYPES", 12 | "Broker", 13 | "SchedulerConfiguration", 14 | "QueueConfiguration", 15 | "Self", 16 | ] 17 | 18 | from .broker_types import ( 19 | ConnectionErrorTypes, 20 | ResponseErrorTypes, 21 | TimeoutErrorTypes, 22 | WatchErrorTypes, 23 | ConnectionType, 24 | PipelineType, 25 | SentinelType, 26 | FunctionReferenceType, 27 | BrokerMetaData, 28 | TASK_TYPES, 29 | ) 30 | from .settings_types import Broker, SchedulerConfiguration, QueueConfiguration, Self 31 | -------------------------------------------------------------------------------- /scheduler/types/broker_types.py: -------------------------------------------------------------------------------- 1 | # This is a helper module to obfuscate types used by different broker implementations. 2 | from collections import namedtuple 3 | from typing import Any, Callable, TypeVar, Union 4 | from typing import Dict, Tuple 5 | 6 | import redis 7 | 8 | try: 9 | import valkey 10 | except ImportError: 11 | valkey = redis 12 | valkey.Valkey = redis.Redis 13 | valkey.StrictValkey = redis.StrictRedis 14 | 15 | from .settings_types import Broker 16 | 17 | ConnectionErrorTypes = (redis.ConnectionError, valkey.ConnectionError) 18 | ResponseErrorTypes = (redis.ResponseError, valkey.ResponseError) 19 | TimeoutErrorTypes = (redis.TimeoutError, valkey.TimeoutError) 20 | WatchErrorTypes = (redis.WatchError, valkey.WatchError) 21 | ConnectionType = Union[redis.Redis, valkey.Valkey] 22 | PipelineType = Union[redis.client.Pipeline, valkey.client.Pipeline] 23 | SentinelType = Union[redis.sentinel.Sentinel, valkey.sentinel.Sentinel] 24 | FunctionReferenceType = TypeVar("FunctionReferenceType", str, Callable[..., Any]) 25 | 26 | BrokerMetaDataType = namedtuple("BrokerMetaDataType", ["connection_type", "sentinel_type"]) 27 | 28 | BrokerMetaData: Dict[Tuple[Broker, bool], BrokerMetaDataType] = { 29 | # Map of (Broker, Strict flag) => Connection Class, Sentinel Class 30 | (Broker.REDIS, False): BrokerMetaDataType(redis.Redis, redis.sentinel.Sentinel), 31 | (Broker.VALKEY, False): BrokerMetaDataType(valkey.Valkey, valkey.sentinel.Sentinel), 32 | (Broker.REDIS, True): BrokerMetaDataType(redis.StrictRedis, redis.sentinel.Sentinel), 33 | (Broker.VALKEY, True): BrokerMetaDataType(valkey.StrictValkey, valkey.sentinel.Sentinel), 34 | } 35 | 36 | TASK_TYPES = ["OnceTaskType", "RepeatableTaskType", "CronTaskType"] 37 | -------------------------------------------------------------------------------- /scheduler/types/settings_types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from dataclasses import dataclass 3 | from enum import Enum 4 | from typing import Callable, Dict, Optional, List, Tuple, Any, Type 5 | 6 | from scheduler.timeouts import BaseDeathPenalty, UnixSignalDeathPenalty 7 | 8 | if sys.version_info >= (3, 11): 9 | from typing import Self 10 | else: 11 | from typing_extensions import Self 12 | 13 | 14 | class Broker(Enum): 15 | REDIS = "redis" 16 | FAKEREDIS = "fakeredis" 17 | VALKEY = "valkey" 18 | 19 | 20 | def _token_validation(token: str) -> bool: 21 | return False 22 | 23 | 24 | @dataclass(slots=True, kw_only=True) 25 | class SchedulerConfiguration: 26 | """Configuration for django-tasks-scheduler""" 27 | 28 | EXECUTIONS_IN_PAGE: int = 20 29 | SCHEDULER_INTERVAL: int = 10 30 | BROKER: Broker = Broker.REDIS 31 | TOKEN_VALIDATION_METHOD: Callable[[str], bool] = _token_validation 32 | CALLBACK_TIMEOUT: int = 60 # Callback timeout in seconds (success/failure/stopped) 33 | # Default values, can be override per task 34 | DEFAULT_SUCCESS_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep successful job results 35 | DEFAULT_FAILURE_TTL: int = 365 * 24 * 60 * 60 # Time To Live (TTL) in seconds to keep job failure information 36 | DEFAULT_JOB_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep job information 37 | DEFAULT_JOB_TIMEOUT: int = 5 * 60 # timeout (seconds) for a job 38 | # General configuration values 39 | DEFAULT_WORKER_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep worker information after last heartbeat 40 | DEFAULT_MAINTENANCE_TASK_INTERVAL: int = 10 * 60 # The interval to run maintenance tasks in seconds. 10 minutes. 41 | DEFAULT_JOB_MONITORING_INTERVAL: int = 30 # The interval to monitor jobs in seconds. 42 | SCHEDULER_FALLBACK_PERIOD_SECS: int = 120 # Period (secs) to wait before requiring to reacquire locks 43 | DEATH_PENALTY_CLASS: Type[BaseDeathPenalty] = UnixSignalDeathPenalty 44 | 45 | 46 | @dataclass(slots=True, frozen=True, kw_only=True) 47 | class QueueConfiguration: 48 | __CONNECTION_FIELDS__ = { 49 | "URL", 50 | "DB", 51 | "UNIX_SOCKET_PATH", 52 | "HOST", 53 | "PORT", 54 | "PASSWORD", 55 | "SENTINELS", 56 | "MASTER_NAME", 57 | "CONNECTION_KWARGS", 58 | } 59 | DB: Optional[int] = None 60 | # Redis connection parameters, either UNIX_SOCKET_PATH/URL/separate params (HOST, PORT, PASSWORD) should be provided 61 | UNIX_SOCKET_PATH: Optional[str] = None 62 | URL: Optional[str] = None 63 | HOST: Optional[str] = None 64 | PORT: Optional[int] = None 65 | USERNAME: Optional[str] = None 66 | PASSWORD: Optional[str] = None 67 | 68 | ASYNC: Optional[bool] = True 69 | 70 | SENTINELS: Optional[List[Tuple[str, int]]] = None 71 | SENTINEL_KWARGS: Optional[Dict[str, str]] = None 72 | MASTER_NAME: Optional[str] = None 73 | CONNECTION_KWARGS: Optional[Dict[str, Any]] = None 74 | 75 | def __post_init__(self): 76 | if not any((self.URL, self.UNIX_SOCKET_PATH, self.HOST, self.SENTINELS)): 77 | raise ValueError(f"At least one of URL, UNIX_SOCKET_PATH, HOST must be provided: {self}") 78 | if sum((self.URL is not None, self.UNIX_SOCKET_PATH is not None, self.HOST is not None)) > 1: 79 | raise ValueError(f"Only one of URL, UNIX_SOCKET_PATH, HOST should be provided: {self}") 80 | if self.HOST is not None and (self.PORT is None or self.DB is None): 81 | raise ValueError(f"HOST requires PORT and DB: {self}") 82 | 83 | def same_connection_params(self, other: Self) -> bool: 84 | for field in self.__CONNECTION_FIELDS__: 85 | if getattr(self, field) != getattr(other, field): 86 | return False 87 | return True 88 | -------------------------------------------------------------------------------- /scheduler/urls.py: -------------------------------------------------------------------------------- 1 | from django.urls import path 2 | 3 | from . import views 4 | 5 | urlpatterns = [ 6 | path("queues/", views.stats, name="queues_home"), 7 | path("queues/stats.json", views.stats_json, name="queues_home_json"), 8 | path("queues//workers/", views.queue_workers, name="queue_workers"), 9 | path("queues///jobs", views.list_registry_jobs, name="queue_registry_jobs"), 10 | path( 11 | "queues////", 12 | views.queue_registry_actions, 13 | name="queue_registry_action", 14 | ), 15 | path("queues//confirm-action/", views.queue_confirm_job_action, name="queue_confirm_job_action"), 16 | path("queues//actions/", views.queue_job_actions, name="queue_job_actions"), 17 | ] 18 | 19 | urlpatterns += [ 20 | path("workers/", views.workers_list, name="workers_home"), 21 | path("workers//", views.worker_details, name="worker_details"), 22 | path("jobs//", views.job_detail, name="job_details"), 23 | path("jobs///", views.job_action, name="job_detail_action"), 24 | ] 25 | -------------------------------------------------------------------------------- /scheduler/views/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "job_detail", 3 | "job_action", 4 | "stats", 5 | "stats_json", 6 | "queue_registry_actions", 7 | "queue_confirm_job_action", 8 | "queue_workers", 9 | "queue_job_actions", 10 | "list_registry_jobs", 11 | "workers_list", 12 | "worker_details", 13 | "get_statistics", 14 | ] 15 | 16 | from .job_views import job_detail, job_action 17 | from .queue_job_actions import queue_job_actions, queue_confirm_job_action 18 | from .queue_registry_actions import queue_registry_actions 19 | from .queue_views import stats, stats_json, queue_workers, list_registry_jobs, get_statistics 20 | from .worker_views import workers_list, worker_details 21 | -------------------------------------------------------------------------------- /scheduler/views/helpers.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | from typing import Tuple 3 | from urllib.parse import urlparse 4 | 5 | from django.contrib import messages 6 | from django.http import Http404 7 | from django.http import HttpRequest 8 | from django.urls import resolve 9 | from django.utils.http import url_has_allowed_host_and_scheme 10 | 11 | from scheduler.helpers.queues import Queue 12 | from scheduler.helpers.queues import get_queue as get_queue_base 13 | from scheduler.redis_models import JobModel 14 | from scheduler.settings import QueueNotFoundError 15 | from scheduler.settings import get_queue_names, logger 16 | 17 | _QUEUES_WITH_BAD_CONFIGURATION = set() 18 | 19 | 20 | def get_queue(queue_name: str) -> Queue: 21 | try: 22 | return get_queue_base(queue_name) 23 | except QueueNotFoundError as e: 24 | logger.error(e) 25 | raise Http404(e) 26 | 27 | 28 | def _find_job(job_name: str) -> Tuple[Optional[Queue], Optional[JobModel]]: 29 | queue_names = get_queue_names() 30 | for queue_name in queue_names: 31 | if queue_name in _QUEUES_WITH_BAD_CONFIGURATION: 32 | continue 33 | try: 34 | queue = get_queue(queue_name) 35 | job = JobModel.get(job_name, connection=queue.connection) 36 | if job is not None and job.queue_name == queue_name: 37 | return queue, job 38 | except Exception as e: 39 | _QUEUES_WITH_BAD_CONFIGURATION.add(queue_name) 40 | logger.debug(f"Queue {queue_name} added to bad configuration - Got exception: {e}") 41 | pass 42 | return None, None 43 | 44 | 45 | def _check_next_url(request: HttpRequest, default_next_url: str) -> str: 46 | next_url = request.POST.get("next_url", default_next_url) 47 | next_url = next_url.replace("\\", "") 48 | if ( 49 | not url_has_allowed_host_and_scheme(next_url, allowed_hosts=None) 50 | or urlparse(next_url).netloc 51 | or urlparse(next_url).scheme 52 | ): 53 | messages.warning(request, "Bad followup URL") 54 | next_url = default_next_url 55 | try: 56 | resolve(next_url) 57 | except Exception: 58 | messages.warning(request, "Bad followup URL") 59 | next_url = default_next_url 60 | return next_url 61 | 62 | 63 | def _enqueue_multiple_jobs(queue: Queue, job_names: List[str], at_front: bool = False) -> int: 64 | jobs = JobModel.get_many(job_names, connection=queue.connection) 65 | jobs_requeued = 0 66 | with queue.connection.pipeline() as pipe: 67 | for job in jobs: 68 | if job is None: 69 | continue 70 | job.save(connection=pipe) 71 | queue.enqueue_job(job, connection=pipe, at_front=at_front) 72 | jobs_requeued += 1 73 | pipe.execute() 74 | return jobs_requeued 75 | -------------------------------------------------------------------------------- /scheduler/views/job_views.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from html import escape 3 | 4 | from django.contrib import admin, messages 5 | from django.contrib.admin.views.decorators import staff_member_required 6 | from django.http import HttpResponse, HttpRequest 7 | from django.http.response import HttpResponseBadRequest 8 | from django.shortcuts import render, redirect 9 | from django.views.decorators.cache import never_cache 10 | 11 | from scheduler.helpers.queues import InvalidJobOperation 12 | from scheduler.redis_models import Result 13 | from scheduler.settings import logger 14 | from scheduler.views.helpers import _find_job 15 | from scheduler.worker.commands import send_command, StopJobCommand 16 | 17 | 18 | class JobDetailAction(str, Enum): 19 | DELETE = "delete" 20 | ENQUEUE = "enqueue" 21 | CANCEL = "cancel" 22 | 23 | 24 | @never_cache 25 | @staff_member_required 26 | def job_detail(request: HttpRequest, job_name: str) -> HttpResponse: 27 | queue, job = _find_job(job_name) 28 | if job is None: 29 | messages.warning(request, f"Job {escape(job_name)} does not exist, maybe its TTL has passed") 30 | return redirect("queues_home") 31 | try: 32 | job.func_name 33 | data_is_valid = True 34 | except Exception: 35 | data_is_valid = False 36 | 37 | try: 38 | last_result = Result.fetch_latest(queue.connection, job.name) 39 | except AttributeError: 40 | last_result = None 41 | 42 | context_data = { 43 | **admin.site.each_context(request), 44 | "job": job, 45 | "last_result": last_result, 46 | "results": Result.all(connection=queue.connection, parent=job.name), 47 | "queue": queue, 48 | "data_is_valid": data_is_valid, 49 | } 50 | return render(request, "admin/scheduler/job_detail.html", context_data) 51 | 52 | 53 | @never_cache 54 | @staff_member_required 55 | def job_action(request: HttpRequest, job_name: str, action: str) -> HttpResponse: 56 | queue, job = _find_job(job_name) 57 | if job is None: 58 | messages.warning(request, f"Job {escape(job_name)} does not exist, maybe its TTL has passed") 59 | return redirect("queues_home") 60 | if action not in [item.value for item in JobDetailAction]: 61 | return HttpResponseBadRequest(f"Action {escape(action)} is not supported") 62 | 63 | if request.method != "POST": 64 | context_data = { 65 | **admin.site.each_context(request), 66 | "job": job, 67 | "queue": queue, 68 | "action": action, 69 | } 70 | return render(request, "admin/scheduler/single_job_action.html", context_data) 71 | 72 | try: 73 | if action == JobDetailAction.DELETE: 74 | queue.delete_job(job.name) 75 | messages.info(request, f"You have successfully deleted {job.name}") 76 | return redirect("queue_registry_jobs", queue.name, "queued") 77 | elif action == JobDetailAction.ENQUEUE: 78 | queue.delete_job(job.name, expire_job_model=False) 79 | queue.enqueue_job(job) 80 | messages.info(request, f"You have successfully enqueued {job.name}") 81 | return redirect("job_details", job_name) 82 | elif action == JobDetailAction.CANCEL: 83 | send_command( 84 | connection=queue.connection, command=StopJobCommand(job_name=job.name, worker_name=job.worker_name) 85 | ) 86 | queue.cancel_job(job.name) 87 | messages.info(request, f"You have successfully cancelled {job.name}") 88 | return redirect("job_details", job_name) 89 | except InvalidJobOperation as e: 90 | logger.warning(f"Could not perform action: {e}") 91 | messages.warning(request, f"Could not perform action: {e}") 92 | return redirect("job_details", job_name) 93 | -------------------------------------------------------------------------------- /scheduler/views/queue_job_actions.py: -------------------------------------------------------------------------------- 1 | """list_registry_jobs actions on multiple selected jobs""" 2 | 3 | from enum import Enum 4 | 5 | from django.contrib import admin, messages 6 | from django.contrib.admin.views.decorators import staff_member_required 7 | from django.http import HttpResponse, HttpRequest 8 | from django.shortcuts import render, redirect 9 | from django.urls import reverse 10 | from django.views.decorators.cache import never_cache 11 | 12 | from scheduler.redis_models import JobModel 13 | from scheduler.settings import logger 14 | from scheduler.views.helpers import get_queue, _check_next_url, _enqueue_multiple_jobs 15 | from scheduler.worker.commands import StopJobCommand, send_command 16 | 17 | 18 | class QueueJobAction(Enum): 19 | DELETE = "delete" 20 | REQUEUE = "requeue" 21 | STOP = "stop" 22 | 23 | def __contains__(self, item) -> bool: 24 | return item in [a.value for a in self.__class__] 25 | 26 | 27 | @never_cache 28 | @staff_member_required 29 | def queue_job_actions(request: HttpRequest, queue_name: str) -> HttpResponse: 30 | queue = get_queue(queue_name) 31 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, "queued"])) 32 | action = request.POST.get("action", False) 33 | job_names = request.POST.get("job_names", False) 34 | if request.method != "POST" or not action or not job_names or action not in [item.value for item in QueueJobAction]: 35 | return redirect(next_url) 36 | job_names = request.POST.getlist("job_names") 37 | if action == QueueJobAction.DELETE.value: 38 | jobs = JobModel.get_many(job_names, connection=queue.connection) 39 | for job in jobs: 40 | if job is None: 41 | continue 42 | queue.delete_job(job.name) 43 | messages.info(request, f"You have successfully deleted {len(job_names)} jobs!") 44 | elif action == QueueJobAction.REQUEUE.value: 45 | requeued_jobs_count = _enqueue_multiple_jobs(queue, job_names) 46 | messages.info(request, f"You have successfully re-queued {requeued_jobs_count}/{len(job_names)} jobs!") 47 | elif action == QueueJobAction.STOP.value: 48 | cancelled_jobs = 0 49 | jobs = JobModel.get_many(job_names, connection=queue.connection) 50 | for job in jobs: 51 | if job is None: 52 | continue 53 | try: 54 | command = StopJobCommand(job_name=job.name, worker_name=job.worker_name) 55 | send_command(connection=queue.connection, command=command) 56 | queue.cancel_job(job.name) 57 | cancelled_jobs += 1 58 | except Exception as e: 59 | logger.warning(f"Could not stop job: {e}") 60 | pass 61 | messages.info(request, f"You have successfully stopped {cancelled_jobs} jobs!") 62 | return redirect(next_url) 63 | 64 | 65 | @never_cache 66 | @staff_member_required 67 | def queue_confirm_job_action(request: HttpRequest, queue_name: str) -> HttpResponse: 68 | queue = get_queue(queue_name) 69 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, "queued"])) 70 | action = request.POST.get("action", None) 71 | job_names = request.POST.getlist("_selected_action", None) 72 | if request.method != "POST" or action is None or job_names is None or action not in QueueJobAction: 73 | return redirect(next_url) 74 | 75 | # confirm action 76 | context_data = { 77 | **admin.site.each_context(request), 78 | "action": action, 79 | "jobs": [JobModel.get(job_name, connection=queue.connection) for job_name in job_names], 80 | "total_jobs": len(job_names), 81 | "queue": queue, 82 | "next_url": next_url, 83 | "action_url": reverse( 84 | "queue_job_actions", 85 | args=[ 86 | queue_name, 87 | ], 88 | ), 89 | } 90 | return render(request, "admin/scheduler/confirm_action.html", context_data) 91 | -------------------------------------------------------------------------------- /scheduler/views/queue_registry_actions.py: -------------------------------------------------------------------------------- 1 | """list_registry_jobs actions on all jobs in the registry""" 2 | 3 | from enum import Enum 4 | 5 | from django.contrib import admin, messages 6 | from django.contrib.admin.views.decorators import staff_member_required 7 | from django.http import HttpResponse, HttpRequest, HttpResponseNotFound 8 | from django.shortcuts import render, redirect 9 | from django.urls import reverse 10 | from django.views.decorators.cache import never_cache 11 | 12 | from scheduler.helpers.queues import Queue 13 | from scheduler.redis_models import JobModel, JobNamesRegistry 14 | from scheduler.types import ResponseErrorTypes 15 | from scheduler.views.helpers import get_queue, _check_next_url, _enqueue_multiple_jobs 16 | 17 | 18 | class QueueRegistryActions(Enum): 19 | EMPTY = "empty" 20 | REQUEUE = "requeue" 21 | 22 | 23 | def _clear_registry(request: HttpRequest, queue: Queue, registry_name: str, registry: JobNamesRegistry): 24 | try: 25 | job_names = registry.all() 26 | for job_name in job_names: 27 | registry.delete(registry.connection, job_name) 28 | job_model = JobModel.get(job_name, connection=registry.connection) 29 | job_model.delete(connection=registry.connection) 30 | messages.info(request, f"You have successfully cleared the {registry_name} jobs in queue {queue.name}") 31 | except ResponseErrorTypes as e: 32 | messages.error(request, f"error: {e}") 33 | raise e 34 | 35 | 36 | def _requeue_job_names(request: HttpRequest, queue: Queue, registry_name: str): 37 | registry = queue.get_registry(registry_name) 38 | job_names = registry.all() 39 | jobs_requeued_count = _enqueue_multiple_jobs(queue, job_names) 40 | messages.info(request, f"You have successfully re-queued {jobs_requeued_count} jobs!") 41 | 42 | 43 | @never_cache 44 | @staff_member_required 45 | def queue_registry_actions(request: HttpRequest, queue_name: str, registry_name: str, action: str) -> HttpResponse: 46 | queue = get_queue(queue_name) 47 | registry = queue.get_registry(registry_name) 48 | if registry is None: 49 | return HttpResponseNotFound() 50 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, registry_name])) 51 | if action not in [item.value for item in QueueRegistryActions]: 52 | return redirect(next_url) 53 | if request.method == "POST": 54 | if action == QueueRegistryActions.EMPTY.value: 55 | _clear_registry(request, queue, registry_name, registry) 56 | elif action == QueueRegistryActions.REQUEUE.value: 57 | _requeue_job_names(request, queue, registry_name) 58 | return redirect("queue_registry_jobs", queue_name, registry_name) 59 | job_names = registry.all() 60 | job_list = JobModel.get_many(job_names, connection=queue.connection) 61 | context_data = { 62 | **admin.site.each_context(request), 63 | "queue": queue, 64 | "total_jobs": len(registry), 65 | "action": action, 66 | "jobs": job_list, 67 | "next_url": next_url, 68 | "action_url": reverse("queue_registry_action", args=[queue_name, registry_name, action]), 69 | } 70 | return render(request, "admin/scheduler/confirm_action.html", context_data) 71 | -------------------------------------------------------------------------------- /scheduler/views/worker_views.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from django.contrib import admin 4 | from django.contrib.admin.views.decorators import staff_member_required 5 | from django.core.paginator import Paginator 6 | from django.http import HttpResponse, HttpRequest, Http404 7 | from django.shortcuts import render 8 | from django.views.decorators.cache import never_cache 9 | 10 | from scheduler.helpers.queues import get_all_workers 11 | from scheduler.redis_models import WorkerModel, JobModel 12 | from scheduler.settings import SCHEDULER_CONFIG 13 | from scheduler.views.helpers import get_queue 14 | 15 | 16 | def get_worker_executions(worker: WorkerModel) -> List[JobModel]: 17 | res = list() 18 | for queue_name in worker.queue_names: 19 | queue = get_queue(queue_name) 20 | curr_jobs = queue.get_all_jobs() 21 | curr_jobs = [j for j in curr_jobs if j.worker_name == worker.name] 22 | res.extend(curr_jobs) 23 | return res 24 | 25 | 26 | @never_cache 27 | @staff_member_required 28 | def worker_details(request: HttpRequest, name: str) -> HttpResponse: 29 | workers = get_all_workers() 30 | worker = next((w for w in workers if w.name == name), None) 31 | 32 | if worker is None: 33 | raise Http404(f"Couldn't find worker with this ID: {name}") 34 | 35 | execution_list = get_worker_executions(worker) 36 | paginator = Paginator(execution_list, SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE) 37 | page_number = request.GET.get("p", 1) 38 | page_obj = paginator.get_page(page_number) 39 | page_range = paginator.get_elided_page_range(page_obj.number) 40 | current_job = None 41 | if worker.current_job_name is not None: 42 | queue = get_queue(worker.queue_names[0]) 43 | current_job = JobModel.get(worker.current_job_name, connection=queue.connection) 44 | context_data = { 45 | **admin.site.each_context(request), 46 | "worker": worker, 47 | "queue_names": ", ".join(worker.queue_names), 48 | "current_job": current_job, 49 | "executions": page_obj, 50 | "page_range": page_range, 51 | "page_var": "p", 52 | } 53 | return render(request, "admin/scheduler/worker_details.html", context_data) 54 | 55 | 56 | @never_cache 57 | @staff_member_required 58 | def workers_list(request: HttpRequest) -> HttpResponse: 59 | all_workers = get_all_workers() 60 | worker_list = [worker for worker in all_workers] 61 | 62 | context_data = { 63 | **admin.site.each_context(request), 64 | "workers": worker_list, 65 | } 66 | return render(request, "admin/scheduler/workers_list.html", context_data) 67 | -------------------------------------------------------------------------------- /scheduler/worker/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "Worker", 3 | "create_worker", 4 | "WorkerScheduler", 5 | ] 6 | 7 | from .scheduler import WorkerScheduler 8 | from .worker import Worker, create_worker 9 | -------------------------------------------------------------------------------- /scheduler/worker/commands/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "WorkerCommandsChannelListener", 3 | "StopJobCommand", 4 | "ShutdownCommand", 5 | "KillWorkerCommand", 6 | "WorkerCommandError", 7 | "send_command", 8 | ] 9 | 10 | from .kill_worker import KillWorkerCommand 11 | from .shutdown import ShutdownCommand 12 | from .stop_job import StopJobCommand 13 | from .worker_commands import WorkerCommandsChannelListener, WorkerCommandError, send_command 14 | -------------------------------------------------------------------------------- /scheduler/worker/commands/kill_worker.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import os 3 | import signal 4 | from typing import Optional 5 | 6 | from scheduler.types import ConnectionType 7 | from scheduler.redis_models import WorkerModel 8 | from scheduler.settings import logger 9 | from scheduler.worker.commands.worker_commands import WorkerCommand 10 | 11 | 12 | class KillWorkerCommand(WorkerCommand): 13 | """kill-worker command""" 14 | 15 | command_name = "kill-worker" 16 | 17 | def __init__(self, *args, **kwargs) -> None: 18 | super().__init__(*args, **kwargs) 19 | self.worker_pid: Optional[int] = None 20 | 21 | def process_command(self, connection: ConnectionType) -> None: 22 | from scheduler.worker import Worker 23 | 24 | logger.info("Received kill-worker command.") 25 | worker_model = WorkerModel.get(self.worker_name, connection) 26 | self.worker_pid = worker_model.pid 27 | if self.worker_pid is None: 28 | raise ValueError("Worker PID is not set") 29 | logger.info(f"Killing worker main process {self.worker_pid}...") 30 | try: 31 | Worker.from_model(worker_model).request_stop(signal.SIGTERM, None) 32 | os.killpg(os.getpgid(self.worker_pid), signal.SIGTERM) 33 | logger.info(f"Killed worker main process pid {self.worker_pid}") 34 | except OSError as e: 35 | if e.errno == errno.ESRCH: 36 | logger.debug( 37 | f"Worker main process for {self.worker_name}:{self.worker_pid} already dead" 38 | ) # "No such process" is fine with us 39 | else: 40 | raise 41 | -------------------------------------------------------------------------------- /scheduler/worker/commands/shutdown.py: -------------------------------------------------------------------------------- 1 | import os 2 | import signal 3 | 4 | from scheduler.types import ConnectionType 5 | from scheduler.settings import logger 6 | from scheduler.worker.commands.worker_commands import WorkerCommand 7 | 8 | 9 | class ShutdownCommand(WorkerCommand): 10 | """shutdown command""" 11 | 12 | command_name = "shutdown" 13 | 14 | def process_command(self, connection: ConnectionType) -> None: 15 | logger.info("Received shutdown command, sending SIGINT signal.") 16 | pid = os.getpid() 17 | os.kill(pid, signal.SIGINT) 18 | -------------------------------------------------------------------------------- /scheduler/worker/commands/stop_job.py: -------------------------------------------------------------------------------- 1 | import os 2 | import signal 3 | from typing import Dict, Any 4 | 5 | from scheduler.types import ConnectionType 6 | from scheduler.redis_models import WorkerModel, JobModel 7 | from scheduler.settings import logger 8 | from scheduler.worker.commands.worker_commands import WorkerCommand, WorkerCommandError 9 | 10 | 11 | class StopJobCommand(WorkerCommand): 12 | """stop-job command""" 13 | 14 | command_name = "stop-job" 15 | 16 | def __init__(self, *args, job_name: str, worker_name: str, **kwargs) -> None: 17 | super().__init__(*args, worker_name=worker_name, **kwargs) 18 | self.job_name = job_name 19 | if self.job_name is None: 20 | raise WorkerCommandError("job_name for kill-job command is required") 21 | 22 | def command_payload(self) -> Dict[str, Any]: 23 | return super().command_payload(job_name=self.job_name) 24 | 25 | def process_command(self, connection: ConnectionType) -> None: 26 | logger.debug(f"Received command to stop job {self.job_name}") 27 | worker_model = WorkerModel.get(self.worker_name, connection) 28 | job_model = JobModel.get(self.job_name, connection) 29 | if worker_model is None: 30 | logger.error(f"Worker {self.worker_name} not found") 31 | return 32 | if job_model is None: 33 | logger.error(f"Job {self.job_name} not found") 34 | return 35 | if worker_model.pid == worker_model.job_execution_process_pid: 36 | logger.warning(f"Job execution process ID and worker process id {worker_model.pid} are equal, skipping") 37 | return 38 | if not worker_model.job_execution_process_pid: 39 | logger.error(f"Worker {self.worker_name} has no job execution process") 40 | return 41 | if worker_model.current_job_name != self.job_name: 42 | logger.info( 43 | f"{self.worker_name} working on job {worker_model.current_job_name}, " 44 | f"not on {self.job_name}, kill-job command ignored." 45 | ) 46 | return 47 | worker_model.set_field("stopped_job_name", self.job_name, connection) 48 | try: 49 | pgid = os.getpgid(worker_model.job_execution_process_pid) 50 | logger.debug( 51 | f"worker_pid {worker_model.pid}, job_execution_process {worker_model.job_execution_process_pid}" 52 | ) 53 | if pgid == worker_model.pid: 54 | logger.error("No separate process for job execution, skipping") 55 | return 56 | os.killpg(pgid, signal.SIGTERM) 57 | except ProcessLookupError as e: 58 | logger.error(f"Error killing job {self.job_name}: {e}") 59 | -------------------------------------------------------------------------------- /scheduler/worker/commands/suspend_worker.py: -------------------------------------------------------------------------------- 1 | from scheduler.types import ConnectionType 2 | from scheduler.redis_models import WorkerModel 3 | from scheduler.settings import logger 4 | from scheduler.worker.commands.worker_commands import WorkerCommand 5 | 6 | 7 | class SuspendWorkCommand(WorkerCommand): 8 | """Suspend worker command""" 9 | 10 | command_name = "suspend" 11 | 12 | def process_command(self, connection: ConnectionType) -> None: 13 | logger.debug(f"Received command to suspend worker {self.worker_name}") 14 | worker_model = WorkerModel.get(self.worker_name, connection) 15 | if worker_model is None: 16 | logger.warning(f"Worker {self.worker_name} not found") 17 | if worker_model.is_suspended: 18 | logger.warning(f"Worker {self.worker_name} already suspended") 19 | return 20 | worker_model.set_field("is_suspended", True, connection=connection) 21 | logger.info(f"Worker {self.worker_name} suspended") 22 | 23 | 24 | class ResumeWorkCommand(WorkerCommand): 25 | """Resume worker command""" 26 | 27 | command_name = "resume" 28 | 29 | def process_command(self, connection: ConnectionType) -> None: 30 | logger.debug(f"Received command to resume worker {self.worker_name}") 31 | worker_model = WorkerModel.get(self.worker_name, connection) 32 | if worker_model is None: 33 | logger.warning(f"Worker {self.worker_name} not found") 34 | if not worker_model.is_suspended: 35 | logger.warning(f"Worker {self.worker_name} not suspended and therefore can't be resumed") 36 | return 37 | worker_model.set_field("is_suspended", False, connection=connection) 38 | logger.info(f"Worker {self.worker_name} resumed") 39 | -------------------------------------------------------------------------------- /scheduler/worker/commands/worker_commands.py: -------------------------------------------------------------------------------- 1 | import json 2 | from abc import ABC 3 | from datetime import datetime, timezone 4 | from typing import Type, Dict, Any 5 | 6 | from scheduler.settings import logger 7 | from scheduler.types import ConnectionType, Self 8 | 9 | _PUBSUB_CHANNEL_TEMPLATE: str = ":workers:pubsub:{}" 10 | 11 | 12 | class WorkerCommandError(Exception): 13 | pass 14 | 15 | 16 | class WorkerCommand(ABC): 17 | """Abstract class for commands to be sent to a worker and processed by worker""" 18 | 19 | _registry: Dict[str, Type[Self]] = dict() 20 | command_name: str = "" 21 | 22 | def __init__(self, *args, worker_name: str, **kwargs) -> None: 23 | self.worker_name = worker_name 24 | 25 | def command_payload(self, **kwargs) -> Dict[str, Any]: 26 | commands_channel = WorkerCommandsChannelListener._commands_channel(self.worker_name) 27 | payload = { 28 | "command": self.command_name, 29 | "worker_name": self.worker_name, 30 | "channel_name": commands_channel, 31 | "created_at": datetime.now(tz=timezone.utc).isoformat(), 32 | } 33 | if kwargs: 34 | payload.update(kwargs) 35 | return payload 36 | 37 | def __str__(self) -> str: 38 | return f"{self.command_name}[{self.command_payload()}]" 39 | 40 | def process_command(self, connection: ConnectionType) -> None: 41 | raise NotImplementedError 42 | 43 | @classmethod 44 | def __init_subclass__(cls, *args, **kwargs): 45 | if cls is WorkerCommand: 46 | return 47 | if not cls.command_name: 48 | raise NotImplementedError(f"{cls.__name__} must have a name attribute") 49 | WorkerCommand._registry[cls.command_name] = cls 50 | 51 | @classmethod 52 | def from_payload(cls, payload: Dict[str, Any]) -> Type[Self]: 53 | command_name = payload.get("command") 54 | command_class = WorkerCommand._registry.get(command_name) 55 | if command_class is None: 56 | raise WorkerCommandError(f"Invalid command: {command_name}") 57 | return command_class(**payload) 58 | 59 | 60 | def send_command(connection: ConnectionType, command: WorkerCommand) -> None: 61 | """Send a command to the worker""" 62 | payload = command.command_payload() 63 | connection.publish(payload["channel_name"], json.dumps(payload)) 64 | 65 | 66 | class WorkerCommandsChannelListener(object): 67 | def __init__(self, connection: ConnectionType, worker_name: str) -> None: 68 | self.connection = connection 69 | self.pubsub_channel_name = WorkerCommandsChannelListener._commands_channel(worker_name) 70 | 71 | @staticmethod 72 | def _commands_channel(worker_name: str) -> str: 73 | return _PUBSUB_CHANNEL_TEMPLATE.format(worker_name) 74 | 75 | def start(self): 76 | """Subscribe to this worker's channel""" 77 | logger.info(f"Subscribing to channel {self.pubsub_channel_name}") 78 | self.pubsub = self.connection.pubsub() 79 | self.pubsub.subscribe(**{self.pubsub_channel_name: self.handle_payload}) 80 | self.pubsub_thread = self.pubsub.run_in_thread(sleep_time=0.2, daemon=True) 81 | 82 | def stop(self): 83 | """Unsubscribe from pubsub channel""" 84 | if self.pubsub_thread: 85 | logger.info(f"Unsubscribing from channel {self.pubsub_channel_name}") 86 | self.pubsub_thread.stop() 87 | self.pubsub_thread.join() 88 | self.pubsub.unsubscribe() 89 | self.pubsub.close() 90 | 91 | def handle_payload(self, payload: str) -> None: 92 | """Handle commands""" 93 | command = WorkerCommand.from_payload(json.loads(payload["data"])) 94 | logger.debug(f"Received command: {command}") 95 | command.process_command(self.connection) 96 | -------------------------------------------------------------------------------- /testproject/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /testproject/testproject/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/6ae0b16b8b818786cc4058794571d5bb4cc48ab8/testproject/testproject/__init__.py -------------------------------------------------------------------------------- /testproject/testproject/urls.py: -------------------------------------------------------------------------------- 1 | """testproject URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/1.9/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the `include()` function: from django.conf.urls import url, include 14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) 15 | """ 16 | 17 | from django.contrib import admin 18 | from django.urls import path, include 19 | 20 | from . import views 21 | 22 | urlpatterns = [ 23 | path("admin/", admin.site.urls), 24 | path("scheduler/", include("scheduler.urls")), 25 | path( 26 | "test-view/", 27 | views.my_view, 28 | ), 29 | ] 30 | -------------------------------------------------------------------------------- /testproject/testproject/views.py: -------------------------------------------------------------------------------- 1 | from django.http.response import HttpResponse 2 | from django.views.decorators.cache import cache_page 3 | 4 | 5 | @cache_page(timeout=500) 6 | def my_view(request): 7 | return HttpResponse("Yeah") 8 | -------------------------------------------------------------------------------- /testproject/testproject/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for testproject project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings") 15 | 16 | application = get_wsgi_application() 17 | --------------------------------------------------------------------------------