├── scheduler
├── py.typed
├── tests
│ ├── __init__.py
│ ├── test_views
│ │ ├── __init__.py
│ │ ├── base.py
│ │ └── test_workers_view.py
│ ├── test_task_types
│ │ ├── __init__.py
│ │ ├── test_cron_task.py
│ │ └── test_once_task.py
│ ├── test_worker
│ │ ├── __init__.py
│ │ ├── test_scheduler.py
│ │ ├── test_worker_creation.py
│ │ ├── test_worker_commands.py
│ │ └── test_worker_commands_multiprocess.py
│ ├── test_mgmt_commands
│ │ ├── __init__.py
│ │ ├── test_run_job.py
│ │ ├── test_delete_failed_executions.py
│ │ ├── test_export.py
│ │ └── test_scheduler_stats.py
│ ├── test_multiprocess
│ │ ├── __init__.py
│ │ └── test_integrity.py
│ ├── test_redis_models.py
│ ├── jobs.py
│ ├── test_admin_permissions.py
│ ├── conf.py
│ ├── test_settings.py
│ ├── test_internals.py
│ └── test_job_decorator.py
├── helpers
│ ├── __init__.py
│ ├── queues
│ │ ├── __init__.py
│ │ └── getters.py
│ ├── utils.py
│ └── callback.py
├── management
│ ├── __init__.py
│ └── commands
│ │ ├── __init__.py
│ │ ├── delete_failed_executions.py
│ │ ├── run_job.py
│ │ ├── export.py
│ │ └── scheduler_stats.py
├── migrations
│ ├── __init__.py
│ ├── 0021_remove_task_job_id_task_job_name.py
│ ├── 0015_rename_cronjob_crontask_and_more.py
│ ├── 0010_queue.py
│ ├── 0006_auto_20230118_1640.py
│ ├── 0020_remove_repeatabletask_new_task_id_and_more.py
│ ├── 0004_cronjob_at_front_repeatablejob_at_front_and_more.py
│ ├── 0002_alter_cronjob_id_alter_repeatablejob_id_and_more.py
│ ├── 0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more.py
│ ├── 0012_alter_cronjob_name_alter_repeatablejob_name_and_more.py
│ ├── 0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py
│ ├── 0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more.py
│ ├── 0007_add_result_ttl.py
│ ├── 0014_alter_cronjob_created_alter_cronjob_modified_and_more.py
│ ├── 0009_alter_jobarg_arg_type_alter_jobarg_val_and_more.py
│ ├── 0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more.py
│ ├── 0008_rename_str_val_jobarg_val_and_more.py
│ ├── 0017_remove_crontask_repeat_crontask_failed_runs_and_more.py
│ └── 0003_auto_20220329_2107.py
├── templatetags
│ ├── __init__.py
│ └── scheduler_tags.py
├── redis_models
│ ├── registry
│ │ └── __init__.py
│ ├── __init__.py
│ ├── lock.py
│ └── result.py
├── templates
│ └── admin
│ │ └── scheduler
│ │ ├── change_list.html
│ │ ├── change_form.html
│ │ ├── scheduler_base.html
│ │ ├── queue_workers.html
│ │ ├── workers_list.html
│ │ ├── single_job_action.html
│ │ ├── confirm_action.html
│ │ ├── workers-list.partial.html
│ │ ├── jobs-list.partial.html
│ │ ├── jobs-list-with-tasks.partial.html
│ │ ├── worker_details.html
│ │ └── stats.html
├── admin
│ ├── __init__.py
│ └── ephemeral_models.py
├── worker
│ ├── __init__.py
│ └── commands
│ │ ├── __init__.py
│ │ ├── shutdown.py
│ │ ├── kill_worker.py
│ │ ├── suspend_worker.py
│ │ ├── stop_job.py
│ │ └── worker_commands.py
├── __init__.py
├── apps.py
├── models
│ ├── __init__.py
│ ├── ephemeral_models.py
│ └── args.py
├── views
│ ├── __init__.py
│ ├── worker_views.py
│ ├── helpers.py
│ ├── queue_registry_actions.py
│ ├── job_views.py
│ └── queue_job_actions.py
├── types
│ ├── __init__.py
│ ├── broker_types.py
│ └── settings_types.py
├── static
│ └── admin
│ │ └── js
│ │ └── select-fields.js
├── urls.py
├── settings.py
└── decorators.py
├── .github
├── FUNDING.yml
├── zizmor.yml
├── dependabot.yml
├── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── bug_report.md
├── release-drafter.yml
├── workflows
│ ├── publish-documentation.yml
│ ├── test-workflow.yml
│ └── publish.yml
└── actions
│ └── test-coverage
│ └── action.yml
├── testproject
├── testproject
│ ├── __init__.py
│ ├── wsgi.py
│ ├── views.py
│ └── urls.py
└── manage.py
├── docs
├── requirements.txt
├── media
│ ├── add-args.jpg
│ ├── admin-job-details.jpg
│ ├── admin-queues-list.jpg
│ ├── admin-tasks-list.jpg
│ ├── add-scheduled-task.jpg
│ ├── admin-queue-registry.jpg
│ ├── admin-task-details.jpg
│ ├── admin-worker-details.jpg
│ └── admin-workers-list.jpg
├── migrate_to_v3.md
├── drt-model.md
├── installation.md
└── configuration.md
├── .readthedocs.yaml
├── SECURITY.md
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── mkdocs.yml
└── README.md
/scheduler/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/helpers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/management/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: cunla
2 |
--------------------------------------------------------------------------------
/scheduler/templatetags/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/test_views/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testproject/testproject/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/management/commands/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/redis_models/registry/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/test_task_types/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/test_worker/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/test_mgmt_commands/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scheduler/tests/test_multiprocess/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | mkdocs==1.6.1
2 | mkdocs-material==9.7.0
3 |
--------------------------------------------------------------------------------
/docs/media/add-args.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/add-args.jpg
--------------------------------------------------------------------------------
/docs/media/admin-job-details.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-job-details.jpg
--------------------------------------------------------------------------------
/docs/media/admin-queues-list.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-queues-list.jpg
--------------------------------------------------------------------------------
/docs/media/admin-tasks-list.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-tasks-list.jpg
--------------------------------------------------------------------------------
/docs/media/add-scheduled-task.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/add-scheduled-task.jpg
--------------------------------------------------------------------------------
/docs/media/admin-queue-registry.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-queue-registry.jpg
--------------------------------------------------------------------------------
/docs/media/admin-task-details.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-task-details.jpg
--------------------------------------------------------------------------------
/docs/media/admin-worker-details.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-worker-details.jpg
--------------------------------------------------------------------------------
/docs/media/admin-workers-list.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/django-commons/django-tasks-scheduler/HEAD/docs/media/admin-workers-list.jpg
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/change_list.html:
--------------------------------------------------------------------------------
1 | {% extends 'admin/change_list.html' %}
2 | {% load scheduler_tags %}
3 |
4 | {% block object-tools %}
5 | {{ block.super }}
6 | {% endblock %}
7 |
--------------------------------------------------------------------------------
/scheduler/admin/__init__.py:
--------------------------------------------------------------------------------
1 | from .ephemeral_models import QueueAdmin, WorkerAdmin
2 | from .task_admin import TaskAdmin
3 |
4 | __all__ = [
5 | "QueueAdmin",
6 | "WorkerAdmin",
7 | "TaskAdmin",
8 | ]
9 |
--------------------------------------------------------------------------------
/scheduler/worker/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "Worker",
3 | "create_worker",
4 | "WorkerScheduler",
5 | ]
6 |
7 | from .scheduler import WorkerScheduler
8 | from .worker import Worker, create_worker
9 |
--------------------------------------------------------------------------------
/scheduler/__init__.py:
--------------------------------------------------------------------------------
1 | import importlib.metadata
2 |
3 | __version__ = importlib.metadata.version("django-tasks-scheduler")
4 |
5 | __all__ = [
6 | "job",
7 | ]
8 |
9 | from scheduler.decorators import job
10 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/change_form.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/change_form.html" %}
2 | {% load i18n %}
3 |
4 | {% block after_related_objects %}
5 | {% include 'admin/scheduler/jobs-list.partial.html' %}
6 | {% endblock %}
7 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 | build:
3 | os: "ubuntu-20.04"
4 | tools:
5 | python: "3.12"
6 |
7 | mkdocs:
8 | configuration: mkdocs.yml
9 | fail_on_warning: false
10 |
11 | python:
12 | install:
13 | - requirements: docs/requirements.txt
14 |
--------------------------------------------------------------------------------
/scheduler/helpers/queues/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "Queue",
3 | "InvalidJobOperation",
4 | "get_queue",
5 | "get_all_workers",
6 | "queue_perform_job",
7 | ]
8 |
9 | from .getters import get_queue, get_all_workers
10 | from .queue_logic import Queue, InvalidJobOperation, queue_perform_job
11 |
--------------------------------------------------------------------------------
/testproject/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
7 |
8 | from django.core.management import execute_from_command_line
9 |
10 | execute_from_command_line(sys.argv)
11 |
--------------------------------------------------------------------------------
/scheduler/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 | from django.utils.translation import gettext_lazy as _
3 |
4 |
5 | class SchedulerConfig(AppConfig):
6 | default_auto_field = "django.db.models.AutoField"
7 | name = "scheduler"
8 | verbose_name = _("Tasks Scheduler")
9 |
10 | def ready(self):
11 | pass
12 |
--------------------------------------------------------------------------------
/scheduler/models/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "Task",
3 | "TaskType",
4 | "TaskArg",
5 | "TaskKwarg",
6 | "get_scheduled_task",
7 | "run_task",
8 | "get_next_cron_time",
9 | ]
10 |
11 | from .args import TaskArg, TaskKwarg
12 | from .task import TaskType, Task, get_scheduled_task, run_task, get_next_cron_time
13 |
--------------------------------------------------------------------------------
/.github/zizmor.yml:
--------------------------------------------------------------------------------
1 | rules:
2 | unpinned-images:
3 | ignore:
4 | - 'test.yml'
5 | - 'test-dragonfly.yml'
6 | unpinned-uses:
7 | config:
8 | policies:
9 | actions/*: any
10 | astral-sh/*: any
11 | pypa/gh-action-pypi-publish: any
12 | github-env:
13 | ignore:
14 | - 'action.yml:36:7'
15 | - 'action.yml:28:7'
16 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | | Version | Supported |
6 | |----------|--------------------|
7 | | 4.latest | :white_check_mark: |
8 |
9 | ## Reporting a Vulnerability
10 |
11 | To report a security vulnerability, please use the
12 | [Tidelift security contact](https://tidelift.com/security).
13 | Tidelift will coordinate the fix and disclosure.
14 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "WorkerCommandsChannelListener",
3 | "StopJobCommand",
4 | "ShutdownCommand",
5 | "KillWorkerCommand",
6 | "WorkerCommandError",
7 | "send_command",
8 | ]
9 |
10 | from .kill_worker import KillWorkerCommand
11 | from .shutdown import ShutdownCommand
12 | from .stop_job import StopJobCommand
13 | from .worker_commands import WorkerCommandsChannelListener, WorkerCommandError, send_command
14 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "pip"
9 | directory: "/"
10 | schedule:
11 | interval: "daily"
12 |
--------------------------------------------------------------------------------
/testproject/testproject/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for testproject project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/shutdown.py:
--------------------------------------------------------------------------------
1 | import os
2 | import signal
3 |
4 | from scheduler.types import ConnectionType
5 | from scheduler.settings import logger
6 | from scheduler.worker.commands.worker_commands import WorkerCommand
7 |
8 |
9 | class ShutdownCommand(WorkerCommand):
10 | """shutdown command"""
11 |
12 | command_name = "shutdown"
13 |
14 | def process_command(self, connection: ConnectionType) -> None:
15 | logger.info("Received shutdown command, sending SIGINT signal.")
16 | pid = os.getpid()
17 | os.kill(pid, signal.SIGINT)
18 |
--------------------------------------------------------------------------------
/scheduler/tests/test_views/base.py:
--------------------------------------------------------------------------------
1 | from django.contrib.auth.models import User
2 | from django.test import TestCase
3 | from django.test.client import Client
4 |
5 | from scheduler.helpers.queues import get_queue
6 | from scheduler.tests import conf # noqa
7 |
8 |
9 | class BaseTestCase(TestCase):
10 | def setUp(self):
11 | self.user = User.objects.create_superuser("user", password="pass")
12 | self.client = Client()
13 | self.client.login(username=self.user.username, password="pass")
14 | get_queue("django_tasks_scheduler_test").connection.flushall()
15 |
--------------------------------------------------------------------------------
/testproject/testproject/views.py:
--------------------------------------------------------------------------------
1 | from django.http import HttpResponse
2 | from django.views.decorators.cache import cache_page
3 |
4 | from scheduler import job
5 | import time
6 |
7 | @cache_page(timeout=500)
8 | def my_view(request):
9 | return HttpResponse("Yeah")
10 |
11 | @job("low")
12 | def long_running_func():
13 | print("start the function")
14 | time.sleep(30)
15 | print("function finished")
16 |
17 |
18 | def run_job(request):
19 | if request.method == "GET":
20 | print("got a GET-request")
21 | long_running_func.delay()
22 | return HttpResponse("OK - got a GET request")
23 | return HttpResponse(status=405)
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.py[cod]
3 | *$py.class
4 | tags
5 | *.so
6 |
7 | .Python
8 | .venv/
9 | docker-compose.yml
10 | env/
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 | *.manifest
26 | *.spec
27 |
28 | pip-log.txt
29 | pip-delete-this-directory.txt
30 | htmlcov/
31 | .tox/
32 | .coverage
33 | .coverage.*
34 | .cache
35 | nosetests.xml
36 | coverage.xml
37 | *,cover
38 | .hypothesis/
39 | *.mo
40 | *.pot
41 | *.log
42 | docs/_build/
43 | target/
44 |
45 | .ipynb_checkpoints
46 | .idea
47 | *.sqlite3
48 | .DS_Store
49 | *.iml
50 |
--------------------------------------------------------------------------------
/scheduler/views/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "job_detail",
3 | "job_action",
4 | "stats",
5 | "stats_json",
6 | "queue_registry_actions",
7 | "queue_confirm_job_action",
8 | "queue_workers",
9 | "queue_job_actions",
10 | "list_registry_jobs",
11 | "workers_list",
12 | "worker_details",
13 | "get_statistics",
14 | ]
15 |
16 | from .job_views import job_detail, job_action
17 | from .queue_job_actions import queue_job_actions, queue_confirm_job_action
18 | from .queue_registry_actions import queue_registry_actions
19 | from .queue_views import stats, stats_json, queue_workers, list_registry_jobs, get_statistics
20 | from .worker_views import workers_list, worker_details
21 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/scheduler_base.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/base_site.html" %}
2 | {% load scheduler_tags %}
3 |
4 | {% load static %}
5 |
6 | {% block extrastyle %}
7 | {{ block.super }}
8 |
17 |
18 | {% endblock %}
19 |
20 | {% block extrahead %}
21 | {{ block.super }}
22 |
23 | {% endblock %}
24 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/scheduler/helpers/utils.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import importlib
3 | import time
4 | from typing import Callable, Any
5 |
6 |
7 | def current_timestamp() -> int:
8 | """Returns current UTC timestamp in secs"""
9 | return int(time.time())
10 |
11 |
12 | def utcnow() -> datetime.datetime:
13 | """Return now in UTC"""
14 | return datetime.datetime.now(datetime.timezone.utc)
15 |
16 |
17 | def callable_func(callable_str: str) -> Callable[[Any], Any]:
18 | path = callable_str.split(".")
19 | module = importlib.import_module(".".join(path[:-1]))
20 | func: Callable[[Any], Any] = getattr(module, path[-1])
21 | if not callable(func):
22 | raise TypeError(f"'{callable_str}' is not callable")
23 | return func
24 |
--------------------------------------------------------------------------------
/scheduler/migrations/0021_remove_task_job_id_task_job_name.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 5.1.7 on 2025-03-24 14:30
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('scheduler', '0020_remove_repeatabletask_new_task_id_and_more'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='task',
15 | name='job_id',
16 | ),
17 | migrations.AddField(
18 | model_name='task',
19 | name='job_name',
20 | field=models.CharField(blank=True, editable=False, help_text='Current job_name on queue', max_length=128, null=True, verbose_name='job name'),
21 | ),
22 | ]
23 |
--------------------------------------------------------------------------------
/scheduler/migrations/0015_rename_cronjob_crontask_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.2.5 on 2023-10-08 16:41
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('scheduler', '0014_alter_cronjob_created_alter_cronjob_modified_and_more'),
10 | ]
11 |
12 | operations = [
13 | migrations.RenameModel(
14 | old_name='CronJob',
15 | new_name='CronTask',
16 | ),
17 | migrations.RenameModel(
18 | old_name='RepeatableJob',
19 | new_name='RepeatableTask',
20 | ),
21 | migrations.RenameModel(
22 | old_name='ScheduledJob',
23 | new_name='ScheduledTask',
24 | ),
25 | ]
26 |
--------------------------------------------------------------------------------
/scheduler/tests/test_views/test_workers_view.py:
--------------------------------------------------------------------------------
1 | from django.urls import reverse
2 |
3 | from scheduler.worker import create_worker
4 | from scheduler.tests import conf # noqa
5 | from scheduler.tests.test_views.base import BaseTestCase
6 |
7 |
8 | class TestViewWorkers(BaseTestCase):
9 | def test_workers_home(self):
10 | res = self.client.get(reverse("workers_home"))
11 | prev_workers = res.context["workers"]
12 | worker1 = create_worker("django_tasks_scheduler_test")
13 | worker1.worker_start()
14 | worker2 = create_worker("test3")
15 | worker2.worker_start()
16 |
17 | res = self.client.get(reverse("workers_home"))
18 | self.assertEqual(res.context["workers"], prev_workers + [worker1._model, worker2._model])
19 |
--------------------------------------------------------------------------------
/scheduler/migrations/0010_queue.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.7 on 2023-03-16 20:59
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0009_alter_jobarg_arg_type_alter_jobarg_val_and_more'),
9 | ]
10 |
11 | operations = [
12 | migrations.CreateModel(
13 | name='Queue',
14 | fields=[
15 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
16 | ],
17 | options={
18 | 'permissions': [['view', 'Access admin page']],
19 | 'managed': False,
20 | 'default_permissions': (),
21 | },
22 | ),
23 | ]
24 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name-template: 'v$RESOLVED_VERSION 🌈'
2 | tag-template: 'v$RESOLVED_VERSION'
3 | categories:
4 | - title: '🚀 Features'
5 | labels:
6 | - 'feature'
7 | - 'enhancement'
8 | - title: '🐛 Bug Fixes'
9 | labels:
10 | - 'fix'
11 | - 'bugfix'
12 | - 'bug'
13 | - title: '🧰 Maintenance'
14 | label: 'chore'
15 | change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
16 | change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
17 | version-resolver:
18 | major:
19 | labels:
20 | - 'major'
21 | minor:
22 | labels:
23 | - 'minor'
24 | patch:
25 | labels:
26 | - 'patch'
27 | default: patch
28 | template: |
29 | ## Changes
30 |
31 | $CHANGES
32 |
--------------------------------------------------------------------------------
/scheduler/models/ephemeral_models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class Queue(models.Model):
5 | """Placeholder model with no database table, but with django admin page and contenttype permission"""
6 |
7 | class Meta:
8 | managed = False # not in Django's database
9 | default_permissions = ()
10 | permissions = [["view", "Access admin page"]]
11 | verbose_name_plural = " Queues"
12 |
13 |
14 | class Worker(models.Model):
15 | """Placeholder model with no database table, but with django admin page and contenttype permission"""
16 |
17 | class Meta:
18 | managed = False # not in Django's database
19 | default_permissions = ()
20 | permissions = [["view", "Access admin page"]]
21 | verbose_name_plural = " Workers"
22 |
--------------------------------------------------------------------------------
/scheduler/types/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "ConnectionErrorTypes",
3 | "ResponseErrorTypes",
4 | "TimeoutErrorTypes",
5 | "WatchErrorTypes",
6 | "ConnectionType",
7 | "PipelineType",
8 | "SentinelType",
9 | "FunctionReferenceType",
10 | "BrokerMetaData",
11 | "TASK_TYPES",
12 | "Broker",
13 | "SchedulerConfiguration",
14 | "QueueConfiguration",
15 | "Self",
16 | ]
17 |
18 | from .broker_types import (
19 | ConnectionErrorTypes,
20 | ResponseErrorTypes,
21 | TimeoutErrorTypes,
22 | WatchErrorTypes,
23 | ConnectionType,
24 | PipelineType,
25 | SentinelType,
26 | FunctionReferenceType,
27 | BrokerMetaData,
28 | TASK_TYPES,
29 | )
30 | from .settings_types import Broker, SchedulerConfiguration, QueueConfiguration, Self
31 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - python version
29 | - django version
30 | - requirements.txt?
31 |
32 | **Additional context**
33 | Add any other context about the problem here.
34 |
--------------------------------------------------------------------------------
/scheduler/migrations/0006_auto_20230118_1640.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.5 on 2023-01-18 16:40
2 |
3 | from django.db import migrations
4 |
5 |
6 | def forwards_func(apps, schema_editor):
7 | pass
8 |
9 |
10 | def reverse_func(apps, schema_editor):
11 | # forwards_func() creates two Country instances,
12 | # so reverse_func() should delete them.
13 | cronjob_model = apps.get_model(app_label='scheduler', model_name='CronJob')
14 | db_alias = schema_editor.connection.alias
15 | cronjob_model.objects.using(db_alias).filter(name='Job scheduling jobs').delete()
16 |
17 |
18 | class Migration(migrations.Migration):
19 | dependencies = [
20 | ('scheduler', '0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more'),
21 | ]
22 |
23 | operations = [
24 | migrations.RunPython(forwards_func, reverse_func),
25 | ]
26 |
--------------------------------------------------------------------------------
/scheduler/migrations/0020_remove_repeatabletask_new_task_id_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 5.1.6 on 2025-02-05 15:40
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ("scheduler", "0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more"),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name="repeatabletask",
15 | name="new_task_id",
16 | ),
17 | migrations.RemoveField(
18 | model_name="scheduledtask",
19 | name="new_task_id",
20 | ),
21 | migrations.DeleteModel(
22 | name="CronTask",
23 | ),
24 | migrations.DeleteModel(
25 | name="RepeatableTask",
26 | ),
27 | migrations.DeleteModel(
28 | name="ScheduledTask",
29 | ),
30 | ]
31 |
--------------------------------------------------------------------------------
/scheduler/static/admin/js/select-fields.js:
--------------------------------------------------------------------------------
1 | (function ($) {
2 | $(function () {
3 | const tasktypes = {
4 | "CronTaskType": $(".tasktype-CronTaskType"),
5 | "RepeatableTaskType": $(".tasktype-RepeatableTaskType"),
6 | "OnceTaskType": $(".tasktype-OnceTaskType"),
7 | };
8 | var taskTypeField = $('#id_task_type');
9 |
10 | function toggleVerified(value) {
11 | console.log(value);
12 | for (const [k, v] of Object.entries(tasktypes)) {
13 | if (k === value) {
14 | v.show();
15 | } else {
16 | v.hide();
17 | }
18 | }
19 | }
20 |
21 | toggleVerified(taskTypeField.val());
22 |
23 | taskTypeField.change(function () {
24 | toggleVerified($(this).val());
25 | });
26 | });
27 | })(django.jQuery);
28 |
--------------------------------------------------------------------------------
/scheduler/tests/test_mgmt_commands/test_run_job.py:
--------------------------------------------------------------------------------
1 | from django.core.management import call_command
2 | from django.test import TestCase
3 |
4 | from scheduler.helpers.queues import get_queue
5 | from scheduler.redis_models import JobModel
6 | from scheduler.tests import conf # noqa
7 | from scheduler.tests.jobs import test_job
8 |
9 |
10 | class RunJobTest(TestCase):
11 | def test_run_job__should_schedule_job(self):
12 | queue = get_queue("default")
13 | queue.queued_job_registry.empty(queue.connection)
14 | func_name = f"{test_job.__module__}.{test_job.__name__}"
15 | # act
16 | call_command("run_job", func_name, queue="default")
17 | # assert
18 | job_list = JobModel.get_many(queue.queued_job_registry.all(queue.connection), queue.connection)
19 | self.assertEqual(1, len(job_list))
20 | self.assertEqual(func_name + "()", job_list[0].get_call_string())
21 |
--------------------------------------------------------------------------------
/scheduler/redis_models/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "Result",
3 | "ResultType",
4 | "as_str",
5 | "SchedulerLock",
6 | "WorkerModel",
7 | "DequeueTimeout",
8 | "KvLock",
9 | "JobStatus",
10 | "JobModel",
11 | "JobNamesRegistry",
12 | "FinishedJobRegistry",
13 | "ActiveJobRegistry",
14 | "FailedJobRegistry",
15 | "CanceledJobRegistry",
16 | "ScheduledJobRegistry",
17 | "QueuedJobRegistry",
18 | ]
19 |
20 | from .base import as_str
21 | from .job import JobStatus, JobModel
22 | from .lock import SchedulerLock, KvLock
23 | from .registry.base_registry import DequeueTimeout, JobNamesRegistry
24 | from .registry.queue_registries import (
25 | FinishedJobRegistry,
26 | ActiveJobRegistry,
27 | FailedJobRegistry,
28 | CanceledJobRegistry,
29 | ScheduledJobRegistry,
30 | QueuedJobRegistry,
31 | )
32 | from .result import Result, ResultType
33 | from .worker import WorkerModel
34 |
--------------------------------------------------------------------------------
/scheduler/tests/test_redis_models.py:
--------------------------------------------------------------------------------
1 | from django.urls import reverse
2 |
3 | from scheduler.tests.testtools import SchedulerBaseCase
4 |
5 |
6 | class TestWorkerAdmin(SchedulerBaseCase):
7 | def test_admin_list_view(self):
8 | # arrange
9 | self.client.login(username="admin", password="admin")
10 | model = "worker"
11 | url = reverse(f"admin:scheduler_{model}_changelist")
12 |
13 | # act
14 | res = self.client.get(url)
15 | # assert
16 | self.assertEqual(200, res.status_code)
17 |
18 |
19 | class TestQueueAdmin(SchedulerBaseCase):
20 | def test_admin_list_view(self):
21 | # arrange
22 | self.client.login(username="admin", password="admin")
23 | model = "queue"
24 | url = reverse(f"admin:scheduler_{model}_changelist")
25 |
26 | # act
27 | res = self.client.get(url)
28 | # assert
29 | self.assertEqual(200, res.status_code)
30 |
--------------------------------------------------------------------------------
/scheduler/migrations/0004_cronjob_at_front_repeatablejob_at_front_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.4 on 2022-12-18 18:47
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0003_auto_20220329_2107'),
9 | ]
10 |
11 | operations = [
12 | migrations.AddField(
13 | model_name='cronjob',
14 | name='at_front',
15 | field=models.BooleanField(default=False, verbose_name='At front'),
16 | ),
17 | migrations.AddField(
18 | model_name='repeatablejob',
19 | name='at_front',
20 | field=models.BooleanField(default=False, verbose_name='At front'),
21 | ),
22 | migrations.AddField(
23 | model_name='scheduledjob',
24 | name='at_front',
25 | field=models.BooleanField(default=False, verbose_name='At front'),
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v6.0.0
4 | hooks:
5 | - id: check-json
6 | - id: check-xml
7 | - id: check-yaml
8 | args: [--unsafe] # allow !!python/name:... tags in mkdocs.yml
9 | - id: end-of-file-fixer
10 | - id: trailing-whitespace
11 |
12 | - repo: https://github.com/codespell-project/codespell
13 | rev: v2.4.1
14 | hooks:
15 | - id: codespell # See pyproject.toml for args
16 | additional_dependencies:
17 | - tomli
18 |
19 | - repo: https://github.com/astral-sh/ruff-pre-commit
20 | rev: v0.14.8
21 | hooks:
22 | - id: ruff-check
23 | args: [--fix]
24 | - id: ruff-format
25 |
26 | - repo: https://github.com/tox-dev/pyproject-fmt
27 | rev: v2.11.1
28 | hooks:
29 | - id: pyproject-fmt
30 |
31 | - repo: https://github.com/abravalheri/validate-pyproject
32 | rev: v0.24.1
33 | hooks:
34 | - id: validate-pyproject
35 |
--------------------------------------------------------------------------------
/scheduler/tests/test_mgmt_commands/test_delete_failed_executions.py:
--------------------------------------------------------------------------------
1 | from django.core.management import call_command
2 |
3 | from scheduler.helpers.queues import get_queue
4 | from scheduler.tests import conf # noqa
5 | from scheduler.tests.jobs import failing_job
6 | from scheduler.worker import create_worker
7 | from scheduler.tests.test_views.base import BaseTestCase
8 |
9 |
10 | class DeleteFailedExecutionsTest(BaseTestCase):
11 | def test_delete_failed_executions__delete_jobs(self):
12 | queue = get_queue("default")
13 | call_command("delete_failed_executions", queue="default")
14 | queue.create_and_enqueue_job(failing_job)
15 | self.assertEqual(1, queue.queued_job_registry.count(queue.connection))
16 | worker = create_worker("default", burst=True)
17 | worker.work()
18 | self.assertEqual(1, queue.failed_job_registry.count(queue.connection))
19 | call_command("delete_failed_executions", queue="default")
20 | self.assertEqual(0, queue.failed_job_registry.count(queue.connection))
21 |
--------------------------------------------------------------------------------
/testproject/testproject/urls.py:
--------------------------------------------------------------------------------
1 | """testproject URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/1.9/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the `include()` function: from django.conf.urls import url, include
14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
15 | """
16 |
17 | from django.contrib import admin
18 | from django.urls import path, include
19 |
20 | from . import views
21 |
22 | urlpatterns = [
23 | path("admin/", admin.site.urls),
24 | path("scheduler/", include("scheduler.urls")),
25 | path("test-view/", views.my_view),
26 | path("run-job/", views.run_job),
27 | ]
28 |
--------------------------------------------------------------------------------
/scheduler/migrations/0002_alter_cronjob_id_alter_repeatablejob_id_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.0.1 on 2022-01-06 20:40
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0001_initial_squashed_0005_added_result_ttl'),
9 | ]
10 |
11 | operations = [
12 | migrations.AlterField(
13 | model_name='cronjob',
14 | name='id',
15 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
16 | ),
17 | migrations.AlterField(
18 | model_name='repeatablejob',
19 | name='id',
20 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
21 | ),
22 | migrations.AlterField(
23 | model_name='scheduledjob',
24 | name='id',
25 | field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/scheduler/tests/jobs.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from time import sleep
3 |
4 | from scheduler.helpers.queues import get_queue
5 |
6 | _counter = 0
7 |
8 |
9 | def arg_callable():
10 | global _counter
11 | _counter += 1
12 | return _counter
13 |
14 |
15 | def test_args_kwargs(*args, **kwargs):
16 | func = "test_args_kwargs({})"
17 | args_list = [repr(arg) for arg in args]
18 | kwargs_list = [f"{k}={v}" for (k, v) in kwargs.items()]
19 | return func.format(", ".join(args_list + kwargs_list))
20 |
21 |
22 | def two_seconds_job():
23 | sleep(2)
24 | logging.info(f"Job {_counter}") # noqa: LOG015
25 |
26 |
27 | def long_job():
28 | sleep(1000)
29 | logging.info(f"Job {_counter}") # noqa: LOG015
30 |
31 |
32 | test_non_callable = "I am a teapot"
33 |
34 |
35 | def failing_job():
36 | raise ValueError
37 |
38 |
39 | def test_job():
40 | return 1 + 1
41 |
42 |
43 | def enqueue_jobs():
44 | queue = get_queue()
45 | for i in range(20):
46 | queue.create_and_enqueue_job(test_job, name=f"job_{i:03}", args=())
47 |
--------------------------------------------------------------------------------
/scheduler/migrations/0005_alter_cronjob_at_front_alter_repeatablejob_at_front_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.5 on 2023-01-13 22:35
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0004_cronjob_at_front_repeatablejob_at_front_and_more'),
9 | ]
10 |
11 | operations = [
12 | migrations.AlterField(
13 | model_name='cronjob',
14 | name='at_front',
15 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'),
16 | ),
17 | migrations.AlterField(
18 | model_name='repeatablejob',
19 | name='at_front',
20 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'),
21 | ),
22 | migrations.AlterField(
23 | model_name='scheduledjob',
24 | name='at_front',
25 | field=models.BooleanField(blank=True, default=False, null=True, verbose_name='At front'),
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/queue_workers.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/scheduler/scheduler_base.html" %}
2 |
3 | {% load static scheduler_tags l10n %}
4 |
5 | {% block title %}Workers in {{ queue.name }} {{ block.super }}{% endblock %}
6 |
7 | {% block extrastyle %}
8 | {{ block.super }}
9 |
10 | {% endblock %}
11 |
12 |
13 | {% block breadcrumbs %}
14 |
19 | {% endblock %}
20 |
21 | {% block content_title %}Queue {{ queue.name }} workers {% endblock %}
22 |
23 | {% block content %}
24 |
25 |
26 |
27 | {% include 'admin/scheduler/workers-list.partial.html' %}
28 |
29 |
30 |
31 | {% endblock %}
32 |
--------------------------------------------------------------------------------
/scheduler/migrations/0012_alter_cronjob_name_alter_repeatablejob_name_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.2 on 2023-04-18 19:08
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0011_worker_alter_queue_options_alter_cronjob_at_front_and_more'),
9 | ]
10 |
11 | operations = [
12 | migrations.AlterField(
13 | model_name='cronjob',
14 | name='name',
15 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'),
16 | ),
17 | migrations.AlterField(
18 | model_name='repeatablejob',
19 | name='name',
20 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'),
21 | ),
22 | migrations.AlterField(
23 | model_name='scheduledjob',
24 | name='name',
25 | field=models.CharField(help_text='Name of the job.', max_length=128, unique=True, verbose_name='name'),
26 | ),
27 | ]
28 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/workers_list.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/scheduler/scheduler_base.html" %}
2 |
3 | {% load static scheduler_tags l10n %}
4 |
5 | {% block title %}Workers in {{ queue.name }} {{ block.super }}{% endblock %}
6 |
7 | {% block extrastyle %}
8 | {{ block.super }}
9 |
10 | {% endblock %}
11 |
12 |
13 | {% block breadcrumbs %}
14 |
18 | {% endblock %}
19 |
20 | {% block content_title %}{{ workers|length }} Tasks Workers {% endblock %}
21 |
22 | {% block content %}
23 |
24 |
32 |
33 | {% endblock %}
34 |
--------------------------------------------------------------------------------
/.github/workflows/publish-documentation.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Generate and publish documentation
4 |
5 | on:
6 | release:
7 | types: [published]
8 | workflow_dispatch:
9 |
10 | jobs:
11 | publish_documentation:
12 | runs-on: ubuntu-latest
13 | permissions:
14 | contents: write
15 | environment:
16 | name: pypi
17 | url: https://pypi.org/p/fakeredis
18 | steps:
19 | - uses: actions/checkout@v5
20 | with:
21 | persist-credentials: false
22 | - name: Set up Python
23 | uses: actions/setup-python@v6
24 | with:
25 | python-version: "3.13"
26 | - name: Configure Git Credentials
27 | run: |
28 | git config user.name github-actions[bot]
29 | git config user.email 41898282+github-actions[bot]@users.noreply.github.com
30 | - name: Publish documentation
31 | env:
32 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
33 | GOOGLE_ANALYTICS_KEY: ${{ secrets.GOOGLE_ANALYTICS_KEY }}
34 | run: |
35 | pip install -r docs/requirements.txt
36 | mkdocs gh-deploy --force
37 | mkdocs --version
38 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022- Daniel Moran, (Before 2016 - iStrategyLabs, LLC)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/scheduler/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import path
2 |
3 | from . import views
4 |
5 | urlpatterns = [
6 | path("queues/", views.stats, name="queues_home"),
7 | path("queues/stats.json", views.stats_json, name="queues_home_json"),
8 | path("queues//workers/", views.queue_workers, name="queue_workers"),
9 | path("queues///jobs", views.list_registry_jobs, name="queue_registry_jobs"),
10 | path(
11 | "queues////",
12 | views.queue_registry_actions,
13 | name="queue_registry_action",
14 | ),
15 | path("queues//confirm-action/", views.queue_confirm_job_action, name="queue_confirm_job_action"),
16 | path("queues//actions/", views.queue_job_actions, name="queue_job_actions"),
17 | ]
18 |
19 | urlpatterns += [
20 | path("workers/", views.workers_list, name="workers_home"),
21 | path("workers//", views.worker_details, name="worker_details"),
22 | path("jobs//", views.job_detail, name="job_details"),
23 | path("jobs///", views.job_action, name="job_detail_action"),
24 | ]
25 |
--------------------------------------------------------------------------------
/scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 5.1b1 on 2024-06-29 14:21
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('scheduler', '0017_remove_crontask_repeat_crontask_failed_runs_and_more'),
10 | ]
11 |
12 | operations = [
13 | # migrations.AlterField(
14 | # model_name='crontask',
15 | # name='queue',
16 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
17 | # ),
18 | # migrations.AlterField(
19 | # model_name='repeatabletask',
20 | # name='queue',
21 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
22 | # ),
23 | # migrations.AlterField(
24 | # model_name='scheduledtask',
25 | # name='queue',
26 | # field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
27 | # ),
28 | ]
29 |
--------------------------------------------------------------------------------
/scheduler/redis_models/lock.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Any
2 |
3 | from scheduler.types import ConnectionType
4 |
5 |
6 | class KvLock(object):
7 | def __init__(self, name: str) -> None:
8 | self.name = name
9 | self.acquired = False
10 |
11 | @property
12 | def _locking_key(self) -> str:
13 | return f"_lock:{self.name}"
14 |
15 | def acquire(self, val: Any, connection: ConnectionType, expire: Optional[int] = None) -> bool:
16 | self.acquired = connection.set(self._locking_key, val, nx=True, ex=expire)
17 | return self.acquired
18 |
19 | def expire(self, connection: ConnectionType, expire: Optional[int] = None) -> bool:
20 | return connection.expire(self._locking_key, expire)
21 |
22 | def release(self, connection: ConnectionType) -> None:
23 | connection.delete(self._locking_key)
24 |
25 | def value(self, connection: ConnectionType) -> Any:
26 | return connection.get(self._locking_key)
27 |
28 |
29 | class SchedulerLock(KvLock):
30 | def __init__(self, queue_name: str) -> None:
31 | super().__init__(f"lock:scheduler:{queue_name}")
32 |
33 |
34 | class QueueLock(KvLock):
35 | def __init__(self, queue_name: str) -> None:
36 | super().__init__(f"queue:{queue_name}")
37 |
--------------------------------------------------------------------------------
/scheduler/migrations/0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.2.5 on 2023-10-08 16:48
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('contenttypes', '0002_remove_content_type_name'),
10 | ('scheduler', '0015_rename_cronjob_crontask_and_more'),
11 | ]
12 |
13 | operations = [
14 | migrations.RenameModel(
15 | old_name='JobArg',
16 | new_name='TaskArg',
17 | ),
18 | migrations.RenameModel(
19 | old_name='JobKwarg',
20 | new_name='TaskKwarg',
21 | ),
22 | migrations.AlterModelOptions(
23 | name='crontask',
24 | options={'ordering': ('name',), 'verbose_name': 'Cron Task', 'verbose_name_plural': 'Cron Tasks'},
25 | ),
26 | migrations.AlterModelOptions(
27 | name='repeatabletask',
28 | options={'ordering': ('name',), 'verbose_name': 'Repeatable Task', 'verbose_name_plural': 'Repeatable Tasks'},
29 | ),
30 | migrations.AlterModelOptions(
31 | name='scheduledtask',
32 | options={'ordering': ('name',), 'verbose_name': 'Scheduled Task', 'verbose_name_plural': 'Scheduled Tasks'},
33 | ),
34 | ]
35 |
--------------------------------------------------------------------------------
/scheduler/migrations/0007_add_result_ttl.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.5 on 2023-01-20 22:32
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0006_auto_20230118_1640'),
9 | ]
10 |
11 | operations = [
12 | migrations.AddField(
13 | model_name='cronjob',
14 | name='result_ttl',
15 | field=models.IntegerField(blank=True,
16 | help_text='The TTL value (in seconds) of the job result. -1: Result never expires, you should delete jobs manually. 0: Result gets deleted immediately. >0: Result expires after n seconds.',
17 | null=True, verbose_name='result ttl'),
18 | ),
19 | migrations.AlterField(
20 | model_name='cronjob',
21 | name='queue',
22 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'),
23 | ),
24 | migrations.AlterField(
25 | model_name='repeatablejob',
26 | name='queue',
27 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'),
28 | ),
29 | migrations.AlterField(
30 | model_name='scheduledjob',
31 | name='queue',
32 | field=models.CharField(help_text='Queue name', max_length=16, verbose_name='queue'),
33 | ),
34 | ]
35 |
--------------------------------------------------------------------------------
/scheduler/management/commands/delete_failed_executions.py:
--------------------------------------------------------------------------------
1 | import click
2 | from django.core.management import CommandParser
3 | from django.core.management.base import BaseCommand
4 |
5 | from scheduler.helpers.queues import get_queue
6 | from scheduler.redis_models import JobModel
7 |
8 |
9 | class Command(BaseCommand):
10 | help = "Delete failed jobs from Django queue."
11 |
12 | def add_arguments(self, parser: CommandParser) -> None:
13 | parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]")
14 | parser.add_argument("-f", "--func", help='optional job function name, e.g. "app.tasks.func"')
15 | parser.add_argument("--dry-run", action="store_true", help="Do not actually delete failed jobs")
16 |
17 | def handle(self, *args, **options):
18 | queue = get_queue(options.get("queue", "default"))
19 | job_names = queue.failed_job_registry.all(queue.connection)
20 | jobs = JobModel.get_many(job_names, connection=queue.connection)
21 | if func_name := options.get("func"):
22 | jobs = [job for job in jobs if job.func_name == func_name]
23 | dry_run = options.get("dry_run", False)
24 | click.echo(f"Found {len(jobs)} failed jobs")
25 | for job in job_names:
26 | click.echo(f"Deleting {job}")
27 | if not dry_run:
28 | queue.delete_job(job)
29 | click.echo(f"Deleted {len(jobs)} failed jobs")
30 |
--------------------------------------------------------------------------------
/scheduler/migrations/0014_alter_cronjob_created_alter_cronjob_modified_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.2.5 on 2023-09-08 20:16
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ("scheduler", "0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more"),
9 | ]
10 |
11 | operations = [
12 | migrations.AlterField(
13 | model_name="cronjob",
14 | name="created",
15 | field=models.DateTimeField(auto_now_add=True),
16 | ),
17 | migrations.AlterField(
18 | model_name="cronjob",
19 | name="modified",
20 | field=models.DateTimeField(auto_now=True),
21 | ),
22 | migrations.AlterField(
23 | model_name="repeatablejob",
24 | name="created",
25 | field=models.DateTimeField(auto_now_add=True),
26 | ),
27 | migrations.AlterField(
28 | model_name="repeatablejob",
29 | name="modified",
30 | field=models.DateTimeField(auto_now=True),
31 | ),
32 | migrations.AlterField(
33 | model_name="scheduledjob",
34 | name="created",
35 | field=models.DateTimeField(auto_now_add=True),
36 | ),
37 | migrations.AlterField(
38 | model_name="scheduledjob",
39 | name="modified",
40 | field=models.DateTimeField(auto_now=True),
41 | ),
42 | ]
43 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/single_job_action.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/scheduler/scheduler_base.html" %}
2 | {% load scheduler_tags %}
3 |
4 | {% block breadcrumbs %}
5 |
12 | {% endblock %}
13 |
14 | {% block content_title %}Are you sure? {% endblock %}
15 |
16 | {% block content %}
17 |
18 |
19 |
20 | Are you sure you want to {{ action }}
21 |
22 | {{ job.name }} ({{ job|show_func_name }})
23 |
24 | from
25 | {{ queue.name }} ?
26 | This action can not be undone.
27 |
28 | {% if job.is_scheduled_task %}
29 | Note: This scheduled job will be scheduled again if it is enabled
30 | {% endif %}
31 |
32 |
38 |
39 |
40 | {% endblock %}
41 |
--------------------------------------------------------------------------------
/.github/workflows/test-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Test workflow
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | environment:
7 | description: 'Environment to run the workflow in'
8 | required: true
9 | type: environment
10 | reason:
11 | description: 'Reason for running the workflow'
12 | required: true
13 | type: string
14 | environment-optional:
15 | description: 'Environment to run the workflow in'
16 | required: false
17 | type: environment
18 | choices-input-optional:
19 | type: choice
20 | required: false
21 | options:
22 | - option 1
23 | - option 2
24 | description: "Choice input"
25 | number-input-optional:
26 | type: number
27 | required: false
28 | description: "Number input"
29 |
30 | jobs:
31 | ruff:
32 | runs-on: ubuntu-latest
33 | name: "ruff on code"
34 | permissions:
35 | contents: read
36 | steps:
37 | - uses: actions/checkout@v5
38 | with:
39 | persist-credentials: false
40 | - name: print all inputs
41 | run: |
42 | echo "environment: ${{ github.event.inputs.environment }}"
43 | echo "reason: ${{ github.event.inputs.reason }}"
44 | echo "environment-optional: ${{ github.event.inputs.environment-optional }}"
45 | echo "choices-input-optional: ${{ github.event.inputs.choices-input-optional }}"
46 | echo "number-input-optional: ${{ github.event.inputs.number-input-optional }}"
47 |
--------------------------------------------------------------------------------
/scheduler/migrations/0009_alter_jobarg_arg_type_alter_jobarg_val_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.7 on 2023-03-12 19:53
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0008_rename_str_val_jobarg_val_and_more'),
9 | ]
10 |
11 | operations = [
12 | migrations.AlterField(
13 | model_name='jobarg',
14 | name='arg_type',
15 | field=models.CharField(
16 | choices=[('str', 'string'), ('int', 'int'), ('bool', 'boolean'), ('datetime', 'datetime'),
17 | ('callable', 'callable')], default='str', max_length=12, verbose_name='Argument Type'),
18 | ),
19 | migrations.AlterField(
20 | model_name='jobarg',
21 | name='val',
22 | field=models.CharField(blank=True, max_length=255, verbose_name='Argument Value'),
23 | ),
24 | migrations.AlterField(
25 | model_name='jobkwarg',
26 | name='arg_type',
27 | field=models.CharField(
28 | choices=[('str', 'string'), ('int', 'int'), ('bool', 'boolean'), ('datetime', 'datetime'),
29 | ('callable', 'callable')], default='str', max_length=12, verbose_name='Argument Type'),
30 | ),
31 | migrations.AlterField(
32 | model_name='jobkwarg',
33 | name='val',
34 | field=models.CharField(blank=True, max_length=255, verbose_name='Argument Value'),
35 | ),
36 | ]
37 |
--------------------------------------------------------------------------------
/scheduler/migrations/0013_alter_cronjob_queue_alter_repeatablejob_queue_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.2.1 on 2023-05-11 16:40
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ("scheduler", "0012_alter_cronjob_name_alter_repeatablejob_name_and_more")
10 | ]
11 |
12 | operations = [
13 | migrations.AlterField(
14 | model_name="cronjob",
15 | name="queue",
16 | field=models.CharField(
17 | choices=[("default", "default"), ("low", "low"), ("high", "high")],
18 | help_text="Queue name",
19 | max_length=255,
20 | verbose_name="queue",
21 | ),
22 | ),
23 | migrations.AlterField(
24 | model_name="repeatablejob",
25 | name="queue",
26 | field=models.CharField(
27 | choices=[("default", "default"), ("low", "low"), ("high", "high")],
28 | help_text="Queue name",
29 | max_length=255,
30 | verbose_name="queue",
31 | ),
32 | ),
33 | migrations.AlterField(
34 | model_name="scheduledjob",
35 | name="queue",
36 | field=models.CharField(
37 | choices=[("default", "default"), ("low", "low"), ("high", "high")],
38 | help_text="Queue name",
39 | max_length=255,
40 | verbose_name="queue",
41 | ),
42 | ),
43 | ]
44 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/kill_worker.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import os
3 | import signal
4 | from typing import Any
5 |
6 | from scheduler.redis_models import WorkerModel
7 | from scheduler.settings import logger
8 | from scheduler.types import ConnectionType
9 | from scheduler.worker.commands.worker_commands import WorkerCommand
10 |
11 |
12 | class KillWorkerCommand(WorkerCommand):
13 | """kill-worker command"""
14 |
15 | command_name = "kill-worker"
16 |
17 | def __init__(self, *args: Any, **kwargs: Any) -> None:
18 | super().__init__(*args, **kwargs)
19 |
20 | def process_command(self, connection: ConnectionType) -> None:
21 | from scheduler.worker import Worker
22 |
23 | logger.info(f"Received kill-worker command for {self.worker_name}")
24 | worker_model = WorkerModel.get(self.worker_name, connection)
25 | if worker_model is None or worker_model.pid is None:
26 | raise ValueError("Worker PID is not set")
27 | logger.info(f"Killing worker main process {worker_model.pid}...")
28 | try:
29 | Worker.from_model(worker_model).request_stop(signal.SIGTERM, None)
30 | os.killpg(os.getpgid(worker_model.pid), signal.SIGTERM)
31 | logger.info(f"Killed worker main process pid {worker_model.pid}")
32 | except OSError as e:
33 | if e.errno == errno.ESRCH:
34 | logger.debug(
35 | f"Worker main process for {self.worker_name}:{worker_model.pid} already dead"
36 | ) # "No such process" is fine with us
37 | else:
38 | raise
39 |
--------------------------------------------------------------------------------
/scheduler/management/commands/run_job.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | import click
4 | from django.core.management.base import BaseCommand, CommandParser
5 |
6 | from scheduler.helpers.queues import get_queue
7 |
8 |
9 | class Command(BaseCommand):
10 | """
11 | Queues the function given with the first argument with the
12 | parameters given with the rest of the argument list.
13 | """
14 |
15 | help = __doc__
16 | args = ""
17 |
18 | def add_arguments(self, parser: CommandParser) -> None:
19 | parser.add_argument("--queue", "-q", dest="queue", default="default", help="Specify the queue [default]")
20 | parser.add_argument("--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds")
21 | parser.add_argument(
22 | "--result-ttl", "-r", type=int, dest="result_ttl", help="Time to store job results in seconds"
23 | )
24 | parser.add_argument(
25 | "callable",
26 | help="Method to call",
27 | )
28 | parser.add_argument("args", nargs="*", help="Args for callable")
29 |
30 | def handle(self, **options: Any) -> None:
31 | verbosity = int(options.get("verbosity", 1))
32 | timeout = options.get("timeout")
33 | result_ttl = options.get("result_ttl")
34 | queue = get_queue(options.get("queue"))
35 | func = options.get("callable")
36 | args = options.get("args")
37 | job = queue.create_and_enqueue_job(func, args=args, timeout=timeout, result_ttl=result_ttl, when=None)
38 | if verbosity:
39 | click.echo(f"Job {job.name} created")
40 |
--------------------------------------------------------------------------------
/scheduler/tests/test_admin_permissions.py:
--------------------------------------------------------------------------------
1 | from django.contrib import admin
2 | from django.contrib.auth.models import Permission, User
3 | from django.test import RequestFactory, TestCase
4 |
5 | from scheduler.admin.ephemeral_models import QueueAdmin
6 | from scheduler.models.ephemeral_models import Queue
7 |
8 |
9 | class TestImmutableAdminModulePermission(TestCase):
10 | def setUp(self) -> None:
11 | self.request_factory = RequestFactory()
12 | self.admin = QueueAdmin(Queue, admin.site)
13 |
14 | def _make_staff_user(self, username: str = "staff", with_scheduler_perm: bool = False) -> User:
15 | user = User.objects.create_user(username=username, password="pwd", is_staff=True)
16 | if with_scheduler_perm:
17 | # Any permission in the 'scheduler' app should grant module perms
18 | perm = Permission.objects.get(codename="view_task", content_type__app_label="scheduler")
19 | user.user_permissions.add(perm)
20 | return user
21 |
22 | def test_has_module_permission_without_any_scheduler_perms_is_false(self) -> None:
23 | user = self._make_staff_user("no_perm", with_scheduler_perm=False)
24 | request = self.request_factory.get("/")
25 | request.user = user
26 |
27 | assert self.admin.has_module_permission(request) is False
28 |
29 | def test_has_module_permission_with_any_scheduler_perm_is_true(self) -> None:
30 | user = self._make_staff_user("with_perm", with_scheduler_perm=True)
31 | request = self.request_factory.get("/")
32 | request.user = user
33 |
34 | assert self.admin.has_module_permission(request) is True
35 |
--------------------------------------------------------------------------------
/scheduler/helpers/callback.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | from typing import Union, Callable, Any, Optional
3 |
4 | from scheduler.helpers.utils import callable_func
5 | from scheduler.helpers.timeouts import JobTimeoutException
6 |
7 |
8 | class CallbackSetupError(Exception):
9 | pass
10 |
11 |
12 | class Callback:
13 | def __init__(self, func: Union[str, Callable[..., Any]], timeout: Optional[int] = None):
14 | from scheduler.settings import SCHEDULER_CONFIG
15 |
16 | self.timeout = timeout or SCHEDULER_CONFIG.CALLBACK_TIMEOUT
17 | if not isinstance(self.timeout, int) or self.timeout < 0:
18 | raise CallbackSetupError(f"Callback `timeout` must be a positive int, but received {self.timeout}")
19 | if not isinstance(func, str) and not inspect.isfunction(func) and not inspect.isbuiltin(func):
20 | raise CallbackSetupError(f"Callback `func` must be a string or function, received {func}")
21 | if isinstance(func, str):
22 | try:
23 | func_str = func
24 | func = callable_func(func)
25 | except (TypeError, AttributeError, ModuleNotFoundError, ValueError):
26 | raise CallbackSetupError(f"Callback `func` is not callable: {func_str}")
27 | self.func: Callable[..., Any] = func
28 |
29 | @property
30 | def name(self) -> str:
31 | return f"{self.func.__module__}.{self.func.__qualname__}"
32 |
33 | def __call__(self, *args: Any, **kwargs: Any) -> Any:
34 | from scheduler.settings import SCHEDULER_CONFIG
35 |
36 | with SCHEDULER_CONFIG.DEATH_PENALTY_CLASS(self.timeout, JobTimeoutException):
37 | return self.func(*args, **kwargs)
38 |
--------------------------------------------------------------------------------
/.github/actions/test-coverage/action.yml:
--------------------------------------------------------------------------------
1 | name: Run Tests with coverage
2 | description: 'Run tests with coverage and publish results to PR'
3 | inputs:
4 | pythonVer:
5 | description: 'python version'
6 | required: true
7 | djangoVer:
8 | description: 'django version'
9 | required: true
10 | repoToken:
11 | description: 'Token for PR comment'
12 | required: true
13 | outputs:
14 | coverage:
15 | description: "Coverage"
16 | value: ${{ steps.json-report.outputs.coverage }}
17 | runs:
18 | using: "composite"
19 | steps:
20 | - name: Run regular tests with coverage
21 | shell: bash
22 | run: |
23 | cd testproject
24 | uv run coverage run manage.py test --exclude-tag multiprocess scheduler
25 | - name: Coverage report
26 | id: coverage_report
27 | shell: bash
28 | run: |
29 | mv testproject/.coverage .
30 | echo 'REPORT<> $GITHUB_ENV
31 | uv run coverage report >> $GITHUB_ENV
32 | echo 'EOF' >> $GITHUB_ENV
33 | - name: json report
34 | id: json-report
35 | shell: bash
36 | run: |
37 | uv run coverage json
38 | echo "COVERAGE=$(jq '.totals.percent_covered_display|tonumber' coverage.json)" >> $GITHUB_ENV
39 | - uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc
40 | if: ${{ github.event_name == 'pull_request' }}
41 | with:
42 | message: |
43 | Coverage report python v${{ inputs.pythonVer }} django v${{ inputs.djangoVer }}
44 | ```
45 | ${{ env.REPORT }}
46 | ```
47 | repo-token: ${{ inputs.repoToken }}
48 | allow-repeats: true
49 | update-only: true
50 |
--------------------------------------------------------------------------------
/scheduler/types/broker_types.py:
--------------------------------------------------------------------------------
1 | # This is a helper module to obfuscate types used by different broker implementations.
2 | from collections import namedtuple
3 | from typing import Any, Callable, TypeVar, Union
4 | from typing import Dict, Tuple
5 |
6 | import redis
7 |
8 | try:
9 | import valkey
10 | except ImportError:
11 | valkey = redis
12 | valkey.Valkey = redis.Redis
13 | valkey.StrictValkey = redis.StrictRedis
14 |
15 | from .settings_types import Broker
16 |
17 | ConnectionErrorTypes = (redis.ConnectionError, valkey.ConnectionError)
18 | ResponseErrorTypes = (redis.ResponseError, valkey.ResponseError)
19 | TimeoutErrorTypes = (redis.TimeoutError, valkey.TimeoutError)
20 | WatchErrorTypes = (redis.WatchError, valkey.WatchError)
21 | ConnectionType = Union[redis.Redis, valkey.Valkey]
22 | PipelineType = Union[redis.client.Pipeline, valkey.client.Pipeline]
23 | SentinelType = Union[redis.sentinel.Sentinel, valkey.sentinel.Sentinel]
24 | FunctionReferenceType = TypeVar("FunctionReferenceType", str, Callable[..., Any])
25 |
26 | BrokerMetaDataType = namedtuple("BrokerMetaDataType", ["connection_type", "sentinel_type"])
27 |
28 | BrokerMetaData: Dict[Tuple[Broker, bool], BrokerMetaDataType] = {
29 | # Map of (Broker, Strict flag) => Connection Class, Sentinel Class
30 | (Broker.REDIS, False): BrokerMetaDataType(redis.Redis, redis.sentinel.Sentinel),
31 | (Broker.VALKEY, False): BrokerMetaDataType(valkey.Valkey, valkey.sentinel.Sentinel),
32 | (Broker.REDIS, True): BrokerMetaDataType(redis.StrictRedis, redis.sentinel.Sentinel),
33 | (Broker.VALKEY, True): BrokerMetaDataType(valkey.StrictValkey, valkey.sentinel.Sentinel),
34 | }
35 |
36 | TASK_TYPES = ["OnceTaskType", "RepeatableTaskType", "CronTaskType"]
37 |
--------------------------------------------------------------------------------
/docs/migrate_to_v3.md:
--------------------------------------------------------------------------------
1 | Migration from v2 to v3
2 | =======================
3 |
4 | Version 3.0.0 introduced a major design change. Instead of three separate models, there is one new `Task` model. The
5 | goal is to have one centralized admin view for all your scheduled tasks, regardless of the scheduling type.
6 |
7 | You need to migrate the scheduled tasks using the old models (`ScheduledTask`, `RepeatableTask`, `CronTask`) to the new
8 | model. It can be done using the export/import commands provided.
9 |
10 | After upgrading to django-tasks-scheduler v3.0.0, you will notice you are not able to create new scheduled tasks in the
11 | old models, that is intentional. In the next version of django-tasks-scheduler (v3.1), the old models will be deleted,
12 | so make sure you migrate your old models.
13 |
14 | !!! Note
15 | While we tested different scenarios heavily and left the code for old tasks, we could not account for all different
16 | use cases, therefore, please [open an issue][issues] if you encounter any.
17 |
18 | There are two ways to migrate your existing scheduled tasks:
19 |
20 | # Using the admin views of the old models
21 |
22 | If you go to the admin view of the old models, you will notice there is a new action in the actions drop down menu for
23 | migrating the selected tasks. Use it, and you will also have a link to the new task to compare the migration result.
24 |
25 | Note once you migrate using this method, the old task will be disabled automatically.
26 |
27 | # Export/Import management commands
28 |
29 | Run in your project directory:
30 |
31 | ```shell
32 | python manage.py export > scheduled_tasks.json
33 | python manage.py import --filename scheduled_tasks.json
34 | ```
35 |
36 | [issues]: https://github.com/django-commons/django-tasks-scheduler/issues
37 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/confirm_action.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/scheduler/scheduler_base.html" %}
2 | {% load scheduler_tags %}
3 |
4 | {% block breadcrumbs %}
5 |
11 | {% endblock %}
12 |
13 | {% block content_title %}Are you sure? {% endblock %}
14 |
15 | {% block content %}
16 |
17 |
18 | Are you sure you want to {{ action|capfirst }} the {{ total_jobs }} selected jobs from
19 | {{ queue.name }}
20 | ? These jobs are selected:
21 |
22 |
23 | {% for job in jobs %}
24 |
25 | {{ job.name }}
26 | {{ job | show_func_name }}
27 |
28 | {% endfor %}
29 |
30 |
41 |
42 | {% endblock %}
43 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/suspend_worker.py:
--------------------------------------------------------------------------------
1 | from scheduler.redis_models import WorkerModel
2 | from scheduler.settings import logger
3 | from scheduler.types import ConnectionType
4 | from scheduler.worker.commands.worker_commands import WorkerCommand
5 |
6 |
7 | class SuspendWorkCommand(WorkerCommand):
8 | """Suspend worker command"""
9 |
10 | command_name = "suspend"
11 |
12 | def process_command(self, connection: ConnectionType) -> None:
13 | logger.debug(f"Received command to suspend worker {self.worker_name}")
14 | worker_model = WorkerModel.get(self.worker_name, connection)
15 | if worker_model is None:
16 | logger.warning(f"Worker {self.worker_name} not found")
17 | return
18 | if worker_model.is_suspended:
19 | logger.warning(f"Worker {self.worker_name} already suspended")
20 | return
21 | worker_model.set_field("is_suspended", True, connection=connection)
22 | logger.info(f"Worker {self.worker_name} suspended")
23 |
24 |
25 | class ResumeWorkCommand(WorkerCommand):
26 | """Resume worker command"""
27 |
28 | command_name = "resume"
29 |
30 | def process_command(self, connection: ConnectionType) -> None:
31 | logger.debug(f"Received command to resume worker {self.worker_name}")
32 | worker_model = WorkerModel.get(self.worker_name, connection)
33 | if worker_model is None:
34 | logger.warning(f"Worker {self.worker_name} not found")
35 | return
36 | if not worker_model.is_suspended:
37 | logger.warning(f"Worker {self.worker_name} not suspended and therefore can't be resumed")
38 | return
39 | worker_model.set_field("is_suspended", False, connection=connection)
40 | logger.info(f"Worker {self.worker_name} resumed")
41 |
--------------------------------------------------------------------------------
/scheduler/tests/test_multiprocess/test_integrity.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from django.test import tag
4 | from django.urls import reverse
5 |
6 | from scheduler.helpers.queues import get_queue
7 | from scheduler.redis_models import JobStatus, JobModel, WorkerModel
8 | from scheduler.tests.jobs import long_job
9 | from .. import testtools
10 | from ..test_views.base import BaseTestCase
11 |
12 |
13 | @tag("multiprocess")
14 | class MultiProcessTest(BaseTestCase):
15 | def test_cancel_job_after_it_started(self):
16 | # arrange
17 | queue = get_queue("django_tasks_scheduler_test")
18 | job = queue.create_and_enqueue_job(long_job)
19 | self.assertTrue(job.is_queued)
20 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test")
21 | sleep(0.2)
22 | job = JobModel.get(job.name, connection=queue.connection)
23 | self.assertEqual(JobStatus.STARTED, job.status)
24 | # act
25 | res = self.client.post(reverse("job_detail_action", args=[job.name, "cancel"]), {"post": "yes"}, follow=True)
26 |
27 | # assert
28 | sleep(0.2)
29 | process.terminate()
30 | process.join(2)
31 | process.kill()
32 | self.assertEqual(200, res.status_code)
33 | job = JobModel.get(job.name, connection=queue.connection)
34 | self.assertEqual(JobStatus.STOPPED, job.status)
35 | self.assertNotIn(job.name, queue.queued_job_registry.all(queue.connection))
36 | worker_model = WorkerModel.get(worker_name, connection=queue.connection)
37 | self.assertEqual(0, worker_model.completed_jobs)
38 | self.assertEqual(0, worker_model.failed_job_count)
39 | self.assertEqual(0, worker_model.successful_job_count)
40 | self.assertIsNotNone(worker_model.shutdown_requested_date)
41 |
--------------------------------------------------------------------------------
/scheduler/admin/ephemeral_models.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from django.contrib import admin
4 | from django.http import HttpResponse, HttpRequest
5 |
6 | from scheduler import views
7 | from scheduler.models.ephemeral_models import Queue, Worker
8 |
9 |
10 | class ImmutableAdmin(admin.ModelAdmin):
11 | def has_add_permission(self, request: HttpRequest) -> bool:
12 | return False # Hide the admin "+ Add" link for Queues
13 |
14 | def has_change_permission(self, request: HttpRequest, obj: Any = None) -> bool:
15 | return True
16 |
17 | def has_module_permission(self, request: HttpRequest) -> bool:
18 | """Returns True if the given request has any permission in the given app label.
19 |
20 | Can be overridden by the user in subclasses. In such case, it should return True if the given request has
21 | permission to view the module on the admin index page and access the module's index page. Overriding it does
22 | not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for
23 | that.
24 | """
25 | return request.user.has_module_perms("scheduler") # type: ignore
26 |
27 |
28 | @admin.register(Queue)
29 | class QueueAdmin(ImmutableAdmin):
30 | """Admin View for queues"""
31 |
32 | def changelist_view(self, request: HttpRequest, extra_context: Any = None) -> HttpResponse:
33 | """The 'change list' admin view for this model."""
34 | return views.stats(request)
35 |
36 |
37 | @admin.register(Worker)
38 | class WorkerAdmin(ImmutableAdmin):
39 | """Admin View for workers"""
40 |
41 | def changelist_view(self, request: HttpRequest, extra_context: Any = None) -> HttpResponse:
42 | """The 'change list' admin view for this model."""
43 | return views.workers_list(request)
44 |
--------------------------------------------------------------------------------
/scheduler/tests/test_worker/test_scheduler.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 |
3 | import time_machine
4 | from django.utils import timezone
5 |
6 | from scheduler.models import TaskType
7 | from scheduler.settings import SCHEDULER_CONFIG
8 | from scheduler.tests.testtools import SchedulerBaseCase, task_factory
9 | from scheduler.worker import WorkerScheduler
10 | from scheduler.worker import create_worker
11 |
12 |
13 | class TestWorkerScheduler(SchedulerBaseCase):
14 | def test_create_worker_with_scheduler__scheduler_started(self):
15 | SCHEDULER_CONFIG.SCHEDULER_INTERVAL = 1
16 | worker = create_worker("default", name="test", burst=True, with_scheduler=True)
17 | worker.bootstrap()
18 | self.assertIsNotNone(worker.scheduler)
19 | worker.stop_scheduler()
20 | self.assertIsNone(worker.scheduler)
21 |
22 | def test_scheduler_schedules_tasks(self):
23 | with time_machine.travel(0.0, tick=False) as traveller:
24 | # arrange
25 | task = task_factory(TaskType.ONCE, scheduled_time=timezone.now() + timedelta(seconds=50))
26 | self.assertIsNotNone(task.job_name)
27 | self.assertFalse(task.rqueue.queued_job_registry.exists(task.rqueue.connection, task.job_name))
28 | self.assertTrue(task.rqueue.scheduled_job_registry.exists(task.rqueue.connection, task.job_name))
29 |
30 | scheduler = WorkerScheduler([task.rqueue], worker_name="fake-worker")
31 |
32 | # act
33 | traveller.move_to(50)
34 | scheduler._acquire_locks()
35 | scheduler.enqueue_scheduled_jobs()
36 |
37 | # assert
38 | self.assertIsNotNone(task.job_name)
39 | self.assertTrue(task.rqueue.queued_job_registry.exists(task.rqueue.connection, task.job_name))
40 | self.assertFalse(task.rqueue.scheduled_job_registry.exists(task.rqueue.connection, task.job_name))
41 |
--------------------------------------------------------------------------------
/scheduler/management/commands/export.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from typing import Any
3 |
4 | import click
5 | from django.core.management.base import BaseCommand, CommandParser
6 |
7 | from scheduler.models import Task
8 |
9 |
10 | class Command(BaseCommand):
11 | """Export all scheduled jobs"""
12 |
13 | help = __doc__
14 |
15 | def add_arguments(self, parser: CommandParser) -> None:
16 | parser.add_argument(
17 | "-o",
18 | "--output",
19 | action="store",
20 | choices=["json", "yaml"],
21 | default="json",
22 | dest="format",
23 | help="format of output",
24 | )
25 |
26 | parser.add_argument(
27 | "-e",
28 | "--enabled",
29 | action="store_true",
30 | dest="enabled",
31 | help="Export only enabled jobs",
32 | )
33 | parser.add_argument(
34 | "-f",
35 | "--filename",
36 | action="store",
37 | dest="filename",
38 | help="File name to load (otherwise writes to standard output)",
39 | )
40 |
41 | def handle(self, *args: Any, **options: Any) -> None:
42 | file = open(options.get("filename"), "w") if options.get("filename") else sys.stdout
43 |
44 | tasks = Task.objects.all()
45 | if options.get("enabled"):
46 | tasks = tasks.filter(enabled=True)
47 | res = [task.to_dict() for task in tasks]
48 |
49 | if options.get("format") == "json":
50 | import json
51 |
52 | click.echo(json.dumps(res, indent=2, default=str), file=file)
53 | return
54 |
55 | if options.get("format") == "yaml":
56 | try:
57 | import yaml
58 | except ImportError:
59 | click.echo("Aborting. LibYAML is not installed.")
60 | exit(1)
61 | # Disable YAML alias
62 | yaml.Dumper.ignore_aliases = lambda *x: True
63 | click.echo(yaml.dump(res, default_flow_style=False), file=file)
64 | return
65 |
--------------------------------------------------------------------------------
/scheduler/migrations/0008_rename_str_val_jobarg_val_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.1.7 on 2023-02-19 22:12
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 | dependencies = [
8 | ('scheduler', '0007_add_result_ttl'),
9 | ]
10 |
11 | operations = [
12 | migrations.RenameField(
13 | model_name='jobarg',
14 | old_name='str_val',
15 | new_name='val',
16 | ),
17 | migrations.RenameField(
18 | model_name='jobkwarg',
19 | old_name='str_val',
20 | new_name='val',
21 | ),
22 | migrations.RemoveField(
23 | model_name='jobarg',
24 | name='bool_val',
25 | ),
26 | migrations.RemoveField(
27 | model_name='jobarg',
28 | name='datetime_val',
29 | ),
30 | migrations.RemoveField(
31 | model_name='jobarg',
32 | name='int_val',
33 | ),
34 | migrations.RemoveField(
35 | model_name='jobkwarg',
36 | name='bool_val',
37 | ),
38 | migrations.RemoveField(
39 | model_name='jobkwarg',
40 | name='datetime_val',
41 | ),
42 | migrations.RemoveField(
43 | model_name='jobkwarg',
44 | name='int_val',
45 | ),
46 | migrations.AlterField(
47 | model_name='jobarg',
48 | name='arg_type',
49 | field=models.CharField(choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'),
50 | ('datetime_val', 'Datetime'), ('callable', 'Callable')], default='str_val',
51 | max_length=12, verbose_name='Argument Type'),
52 | ),
53 | migrations.AlterField(
54 | model_name='jobkwarg',
55 | name='arg_type',
56 | field=models.CharField(choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'),
57 | ('datetime_val', 'Datetime'), ('callable', 'Callable')], default='str_val',
58 | max_length=12, verbose_name='Argument Type'),
59 | ),
60 | ]
61 |
--------------------------------------------------------------------------------
/scheduler/templatetags/scheduler_tags.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Optional
2 |
3 | from django import template
4 | from django.utils.safestring import mark_safe
5 |
6 | from scheduler.helpers.queues import Queue
7 | from scheduler.models import Task, get_scheduled_task
8 | from scheduler.models.task import run_task
9 | from scheduler.redis_models import Result, JobModel
10 | from scheduler.views.helpers import get_queue
11 |
12 | register = template.Library()
13 |
14 |
15 | @register.filter
16 | def show_func_name(job: JobModel) -> str:
17 | try:
18 | res = job.func_name
19 | if job.func == run_task:
20 | task = get_scheduled_task(*job.args)
21 | res = task.function_string()
22 | return mark_safe(res)
23 | except Exception as e:
24 | return repr(e)
25 |
26 |
27 | @register.filter
28 | def get_item(dictionary: Dict, key):
29 | return dictionary.get(key)
30 |
31 |
32 | @register.filter
33 | def scheduled_task(job: JobModel) -> Optional[Task]:
34 | try:
35 | django_scheduled_task = get_scheduled_task(*job.args)
36 | return django_scheduled_task.get_absolute_url()
37 | except ValueError:
38 | return None
39 |
40 |
41 | @register.filter
42 | def job_result(job: JobModel) -> Optional[str]:
43 | queue = get_queue(job.queue_name)
44 | result = Result.fetch_latest(queue.connection, job.name)
45 | return result.type.name.capitalize() if result is not None else None
46 |
47 |
48 | @register.filter
49 | def job_scheduled_task(job: JobModel) -> Optional[str]:
50 | task = Task.objects.filter(id=job.scheduled_task_id).first()
51 | return task.name if task is not None else None
52 |
53 |
54 | @register.filter
55 | def job_status(job: JobModel):
56 | result = job.status
57 | return result.capitalize()
58 |
59 |
60 | @register.filter
61 | def job_runtime(job: JobModel):
62 | ended_at = job.ended_at
63 | if ended_at:
64 | runtime = job.ended_at - job.started_at
65 | return f"{int(runtime.microseconds / 1000)}ms"
66 | elif job.started_at:
67 | return "Still running"
68 | else:
69 | return "-"
70 |
71 |
72 | @register.filter
73 | def job_scheduled_time(job: JobModel, queue: Queue):
74 | return queue.scheduled_job_registry.get_scheduled_time(queue.connection, job.name)
75 |
--------------------------------------------------------------------------------
/docs/drt-model.md:
--------------------------------------------------------------------------------
1 | # Worker related flows
2 |
3 | Running `python manage.py scheduler_worker --name 'X' --queues high default low`
4 |
5 | ## Register new worker for queues
6 | ```mermaid
7 | sequenceDiagram
8 | autonumber
9 |
10 | participant worker as WorkerProcess
11 |
12 | participant qlist as QueueHash name -> key
13 | participant wlist as WorkerList
14 | participant wkey as WorkerKey
15 | participant queue as QueueKey
16 | participant job as JobHash
17 |
18 |
19 | note over worker,qlist: Checking sanity
20 |
21 | break when a queue-name in the args is not in queue-list
22 | worker ->>+ qlist: Query queue names
23 | qlist -->>- worker: All queue names
24 | worker ->> worker: check that queue names exists in the system
25 | end
26 |
27 | note over worker,wkey: register
28 | worker ->> wkey: Create workerKey with all info (new id, queues, status)
29 | worker ->> wlist: Add new worker to list, last heartbeat set to now()
30 | ```
31 |
32 | ## Work (execute jobs on queues)
33 |
34 | ```mermaid
35 | sequenceDiagram
36 | autonumber
37 |
38 | participant worker as WorkerProcess
39 |
40 | participant qlist as QueueHash name -> key
41 | participant wlist as WorkerList
42 | participant wkey as WorkerKey
43 | participant queue as QueueKey
44 | participant job as JobHash
45 |
46 | loop Until death
47 | worker ->> wlist: Update last heartbeat
48 | note over worker,job: Find next job
49 |
50 | loop over queueKeys until job to run is found or all queues are empty
51 | worker ->>+ queue: get next job name and remove it or None (zrange+zpop)
52 | queue -->>- worker: job name / nothing
53 | end
54 |
55 | note over worker,job: Execute job or sleep
56 | critical [job is found]
57 | worker ->> wkey: Update worker status to busy
58 | worker ->>+ job: query job data
59 | job -->>- worker: job data
60 |
61 | worker ->> job: update job status to running
62 | worker ->> worker: execute job
63 | worker ->> job: update job status to done/failed
64 | worker ->> wkey: Update worker status to idle
65 | option No job pending
66 | worker ->> worker: sleep
67 | end
68 | end
69 | ```
70 |
71 | # Scheduler flows
72 |
--------------------------------------------------------------------------------
/scheduler/settings.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import List, Dict
3 |
4 | from django.conf import settings
5 | from django.core.exceptions import ImproperlyConfigured
6 |
7 | from scheduler.types import SchedulerConfiguration, QueueConfiguration
8 | from typing_extensions import get_annotations
9 |
10 | logger = logging.getLogger("scheduler")
11 |
12 | _QUEUES: Dict[str, QueueConfiguration] = {}
13 | SCHEDULER_CONFIG: SchedulerConfiguration = SchedulerConfiguration()
14 |
15 |
16 | class QueueNotFoundError(Exception):
17 | pass
18 |
19 |
20 | def conf_settings():
21 | global _QUEUES
22 | global SCHEDULER_CONFIG
23 |
24 | app_queues = getattr(settings, "SCHEDULER_QUEUES", None)
25 | if app_queues is None or not isinstance(app_queues, dict):
26 | raise ImproperlyConfigured("You have to define SCHEDULER_QUEUES in settings.py as dict")
27 |
28 | for queue_name, queue_config in app_queues.items():
29 | if isinstance(queue_config, QueueConfiguration):
30 | _QUEUES[queue_name] = queue_config
31 | elif isinstance(queue_config, dict):
32 | _QUEUES[queue_name] = QueueConfiguration(**queue_config)
33 | else:
34 | raise ImproperlyConfigured(f"Queue {queue_name} configuration should be a QueueConfiguration or dict")
35 |
36 | user_settings = getattr(settings, "SCHEDULER_CONFIG", {})
37 | if isinstance(user_settings, SchedulerConfiguration):
38 | SCHEDULER_CONFIG = user_settings # type: ignore
39 | return
40 | if not isinstance(user_settings, dict):
41 | raise ImproperlyConfigured("SCHEDULER_CONFIG should be a SchedulerConfiguration or dict")
42 |
43 | # Use `type(obj)` because Python 3.14+ `annotationlib.get_annotations()` works only on classes/functions/modules.
44 | # It reads __annotations__ or __annotate__; instances without annotations will fail.
45 | annotations = get_annotations(type(SCHEDULER_CONFIG))
46 | for k, v in user_settings.items():
47 | if k not in annotations:
48 | raise ImproperlyConfigured(f"Unknown setting {k} in SCHEDULER_CONFIG")
49 | setattr(SCHEDULER_CONFIG, k, v)
50 |
51 |
52 | conf_settings()
53 |
54 |
55 | def get_queue_names() -> List[str]:
56 | return list(_QUEUES.keys())
57 |
58 |
59 | def get_queue_configuration(queue_name: str) -> QueueConfiguration:
60 | if queue_name not in _QUEUES:
61 | raise QueueNotFoundError(f"Queue {queue_name} not found, queues={_QUEUES.keys()}")
62 | return _QUEUES[queue_name]
63 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | env:
8 | # Change these for your project's URLs
9 | PYPI_URL: https://pypi.org/p/django-tasks-scheduler
10 | PYPI_TEST_URL: https://test.pypi.org/p/django-tasks-scheduler
11 |
12 | jobs:
13 | build:
14 | name: Build distribution 📦
15 | runs-on: ubuntu-latest
16 | permissions:
17 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
18 | steps:
19 | - uses: actions/checkout@v5
20 | with:
21 | persist-credentials: false
22 | - name: Set up Python
23 | uses: actions/setup-python@v6
24 | with:
25 | python-version: "3.13"
26 | - name: Install pypa/build
27 | run:
28 | python3 -m pip install build --user
29 | - name: Build a binary wheel and a source tarball
30 | run: python3 -m build
31 | - name: Store the distribution packages
32 | uses: actions/upload-artifact@v4
33 | with:
34 | name: python-package-distributions
35 | path: dist/
36 |
37 | publish-to-pypi:
38 | name: >-
39 | Publish Python 🐍 distribution 📦 to PyPI
40 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
41 | needs:
42 | - build
43 | runs-on: ubuntu-latest
44 | environment:
45 | name: pypi
46 | url: ${{ env.PYPI_URL }}
47 | permissions:
48 | id-token: write # IMPORTANT: mandatory for trusted publishing
49 | steps:
50 | - name: Download all the dists
51 | uses: actions/download-artifact@v5
52 | with:
53 | name: python-package-distributions
54 | path: dist/
55 | - name: Publish distribution 📦 to PyPI
56 | uses: pypa/gh-action-pypi-publish@v1.13.0
57 |
58 | publish-to-testpypi:
59 | name: Publish Python 🐍 distribution 📦 to TestPyPI
60 | needs:
61 | - build
62 | runs-on: ubuntu-latest
63 |
64 | environment:
65 | name: testpypi
66 | url: ${{ env.PYPI_TEST_URL }}
67 |
68 | permissions:
69 | id-token: write # IMPORTANT: mandatory for trusted publishing
70 |
71 | steps:
72 | - name: Download all the dists
73 | uses: actions/download-artifact@v5
74 | with:
75 | name: python-package-distributions
76 | path: dist/
77 | - name: Publish distribution 📦 to TestPyPI
78 | uses: pypa/gh-action-pypi-publish@v1.13.0
79 | with:
80 | repository-url: https://test.pypi.org/legacy/
81 | skip-existing: true
82 |
--------------------------------------------------------------------------------
/scheduler/redis_models/result.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 | from datetime import datetime
3 | from enum import Enum
4 | from typing import Optional, Any, ClassVar, List
5 |
6 | from scheduler.helpers.utils import utcnow
7 | from scheduler.redis_models.base import StreamModel, decode_dict
8 | from scheduler.settings import logger
9 | from scheduler.types import ConnectionType, Self
10 |
11 |
12 | class ResultType(Enum):
13 | SUCCESSFUL = "successful"
14 | FAILED = "failed"
15 | STOPPED = "stopped"
16 |
17 |
18 | @dataclasses.dataclass(slots=True, kw_only=True)
19 | class Result(StreamModel):
20 | parent: str
21 | type: ResultType
22 | worker_name: str
23 | ttl: Optional[int] = 0
24 | name: Optional[str] = None
25 | created_at: datetime = dataclasses.field(default_factory=utcnow)
26 | return_value: Optional[Any] = None
27 | exc_string: Optional[str] = None
28 |
29 | _list_key: ClassVar[str] = ":job-results:"
30 | _children_key_template: ClassVar[str] = ":job-results:{}:"
31 | _element_key_template: ClassVar[str] = ":job-results:{}"
32 |
33 | @classmethod
34 | def create(
35 | cls,
36 | connection: ConnectionType,
37 | job_name: str,
38 | worker_name: Optional[str],
39 | _type: ResultType,
40 | ttl: int,
41 | return_value: Any = None,
42 | exc_string: Optional[str] = None,
43 | ) -> Self:
44 | if worker_name is None:
45 | logger.warning(f"Job {job_name} has no worker name, will save result with 'unknown_worker'")
46 | worker_name = "unknown_worker"
47 | result = cls(
48 | parent=job_name,
49 | ttl=ttl,
50 | type=_type,
51 | return_value=return_value,
52 | exc_string=exc_string,
53 | worker_name=worker_name,
54 | )
55 | result.save(connection)
56 | return result
57 |
58 | @classmethod
59 | def fetch_latest(cls, connection: ConnectionType, job_name: str) -> Optional["Result"]:
60 | """Returns the latest result for given job_name.
61 |
62 | :param connection: Broker connection.
63 | :param job_name: Job name.
64 | :return: Result instance or None if no result is available.
65 | """
66 | response: List[Any] = connection.xrevrange(cls._children_key_template.format(job_name), "+", "-", count=1)
67 | if not response:
68 | return None
69 | result_id, payload = response[0]
70 | res = cls.deserialize(decode_dict(payload, set()))
71 | return res
72 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/workers-list.partial.html:
--------------------------------------------------------------------------------
1 | {% load scheduler_tags %}
2 | {% load l10n %}
3 |
4 |
5 |
6 |
7 |
8 | Name
9 |
10 |
11 | Queues
12 |
13 |
14 | State
15 |
16 |
17 | Birth
18 |
19 |
20 | Hostname
21 |
22 |
23 | PID
24 |
25 |
26 | Working time
27 |
28 |
29 | Successful jobs
30 |
31 |
32 | Failed jobs
33 |
34 |
35 | Scheduler
36 |
37 |
38 |
39 |
40 | {% for worker in workers %}
41 |
42 |
43 |
44 | {{ worker.name }}
45 |
46 |
47 |
48 | {% for queue_name in worker.queue_names %}
49 | {{ queue_name }}
50 | {% if not forloop.last %},{% endif %}
51 | {% endfor %}
52 |
53 | {{ worker.state.value | capfirst }}
54 | {{ worker.birth | date:"Y-m-d, H:i:s" }}
55 | {{ worker.hostname }}
56 | {{ worker.pid | unlocalize }}
57 | {{ worker.total_working_time_ms | default:0 | floatformat }} secs
58 | {{ worker.successful_job_count | default:0 }}
59 | {{ worker.failed_job_count | default:0 }}
60 | {{ worker.has_scheduler }}
61 |
62 | {% endfor %}
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/scheduler/tests/test_worker/test_worker_creation.py:
--------------------------------------------------------------------------------
1 | import os
2 | import uuid
3 |
4 | from scheduler import settings
5 | from scheduler.redis_models import WorkerModel
6 | from scheduler.worker import create_worker
7 | from scheduler.tests import conf # noqa
8 | from scheduler.tests.testtools import SchedulerBaseCase
9 | from scheduler.worker.worker import QueueConnectionDiscrepancyError
10 |
11 |
12 | class TestWorker(SchedulerBaseCase):
13 | def test_create_worker__two_workers_same_queue(self):
14 | worker1 = create_worker("default", "django_tasks_scheduler_test")
15 | worker1.worker_start()
16 | worker2 = create_worker("default")
17 | worker2.worker_start()
18 | hostname = os.uname()[1]
19 | self.assertEqual(f"{hostname}-worker.1", worker1.name)
20 | self.assertEqual(f"{hostname}-worker.2", worker2.name)
21 |
22 | def test_create_worker__worker_with_queues_different_connection(self):
23 | with self.assertRaises(QueueConnectionDiscrepancyError):
24 | create_worker("default", "test1")
25 |
26 | def test_create_worker__with_name(self):
27 | name = uuid.uuid4().hex
28 | worker1 = create_worker("default", name=name)
29 | self.assertEqual(name, worker1.name)
30 |
31 | def test_create_worker__with_name_containing_slash(self):
32 | name = uuid.uuid4().hex[-4:] + "/" + uuid.uuid4().hex[-4:]
33 | worker1 = create_worker("default", name=name)
34 | self.assertEqual(name.replace("/", "."), worker1.name)
35 |
36 | def test_create_worker__scheduler_interval(self):
37 | prev = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL
38 | settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = 1
39 | worker = create_worker("default", name="test", burst=True, with_scheduler=True)
40 | worker.bootstrap()
41 | self.assertEqual(worker.name, "test")
42 | self.assertEqual(worker.scheduler.interval, 1)
43 | settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL = prev
44 | worker.teardown()
45 |
46 | def test_create_worker__cleanup(self):
47 | worker = create_worker("default", name="test", burst=True, with_scheduler=False)
48 | worker.bootstrap()
49 | worker.connection.delete(WorkerModel.key_for(worker.name))
50 | all_names = WorkerModel.all_names(worker.connection)
51 | self.assertIn(worker.name, all_names)
52 | # act
53 | WorkerModel.cleanup(worker.connection, "default")
54 | # assert
55 | all_names = WorkerModel.all_names(worker.connection)
56 | self.assertNotIn(worker.name, all_names)
57 |
--------------------------------------------------------------------------------
/scheduler/tests/conf.py:
--------------------------------------------------------------------------------
1 | # Settings for tests
2 | import os
3 |
4 | from django.conf import settings
5 |
6 | from scheduler.settings import conf_settings
7 |
8 | settings.SCHEDULER_QUEUES = {
9 | "default": {"HOST": "localhost", "PORT": 6379, "DB": 0},
10 | "test": {"HOST": "localhost", "PORT": 1, "DB": 1},
11 | "sentinel": {
12 | "SENTINELS": [("localhost", 26736), ("localhost", 26737)],
13 | "MASTER_NAME": "testmaster",
14 | "DB": 1,
15 | "USERNAME": "redis-user",
16 | "PASSWORD": "secret",
17 | "SENTINEL_KWARGS": {},
18 | },
19 | "test1": {
20 | "HOST": "localhost",
21 | "PORT": 1,
22 | "DB": 1,
23 | },
24 | "test2": {
25 | "HOST": "localhost",
26 | "PORT": 1,
27 | "DB": 1,
28 | },
29 | "test3": {
30 | "HOST": "localhost",
31 | "PORT": 6379,
32 | "DB": 1,
33 | },
34 | "async": {
35 | "HOST": "localhost",
36 | "PORT": 6379,
37 | "DB": 1,
38 | "ASYNC": False,
39 | },
40 | "url": {
41 | "URL": "redis://username:password@host:1234/",
42 | "DB": 4,
43 | },
44 | "url_with_db": {
45 | "URL": "redis://username:password@host:1234/5",
46 | },
47 | "url_default_db": {
48 | "URL": "redis://username:password@host:1234",
49 | },
50 | "django_tasks_scheduler_test": {
51 | "HOST": "localhost",
52 | "PORT": 6379,
53 | "DB": 0,
54 | },
55 | "scheduler_scheduler_active_test": {
56 | "HOST": "localhost",
57 | "PORT": 6379,
58 | "DB": 0,
59 | "ASYNC": False,
60 | },
61 | "scheduler_scheduler_inactive_test": {
62 | "HOST": "localhost",
63 | "PORT": 6379,
64 | "DB": 0,
65 | "ASYNC": False,
66 | },
67 | "worker_scheduler_active_test": {
68 | "HOST": "localhost",
69 | "PORT": 6379,
70 | "DB": 0,
71 | "ASYNC": False,
72 | },
73 | "worker_scheduler_inactive_test": {
74 | "HOST": "localhost",
75 | "PORT": 6379,
76 | "DB": 0,
77 | "ASYNC": False,
78 | },
79 | "django_tasks_scheduler_test2": {
80 | "HOST": "localhost",
81 | "PORT": 6379,
82 | "DB": 0,
83 | },
84 | "test_scheduler": {
85 | "HOST": "localhost",
86 | "PORT": 6379,
87 | "DB": 0,
88 | },
89 | }
90 | if os.getenv("FAKEREDIS", "False") == "True": # pragma: no cover
91 | for name, queue_settings in settings.SCHEDULER_QUEUES: # pragma: no cover
92 | queue_settings["BROKER"] = "fakeredis" # pragma: no cover
93 |
94 | conf_settings()
95 |
--------------------------------------------------------------------------------
/scheduler/views/worker_views.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from django.contrib import admin
4 | from django.contrib.admin.views.decorators import staff_member_required
5 | from django.core.paginator import Paginator
6 | from django.http import HttpResponse, HttpRequest, Http404
7 | from django.shortcuts import render
8 | from django.views.decorators.cache import never_cache
9 |
10 | from scheduler.helpers.queues import get_all_workers
11 | from scheduler.redis_models import WorkerModel, JobModel
12 | from scheduler.settings import SCHEDULER_CONFIG
13 | from scheduler.views.helpers import get_queue
14 |
15 |
16 | def get_worker_executions(worker: WorkerModel) -> List[JobModel]:
17 | res = []
18 | for queue_name in worker.queue_names:
19 | queue = get_queue(queue_name)
20 | curr_jobs = queue.get_all_jobs()
21 | curr_jobs = [j for j in curr_jobs if j.worker_name == worker.name]
22 | res.extend(curr_jobs)
23 | return res
24 |
25 |
26 | @never_cache # type: ignore
27 | @staff_member_required # type: ignore
28 | def worker_details(request: HttpRequest, name: str) -> HttpResponse:
29 | workers = get_all_workers()
30 | worker = next((w for w in workers if w.name == name), None)
31 |
32 | if worker is None:
33 | raise Http404(f"Couldn't find worker with this ID: {name}")
34 |
35 | execution_list = get_worker_executions(worker)
36 | paginator = Paginator(execution_list, SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE)
37 | page_number = request.GET.get("p", 1)
38 | page_obj = paginator.get_page(page_number)
39 | page_range = paginator.get_elided_page_range(page_obj.number)
40 | current_job = None
41 | if worker.current_job_name is not None:
42 | queue = get_queue(worker.queue_names[0])
43 | current_job = JobModel.get(worker.current_job_name, connection=queue.connection)
44 | context_data = {
45 | **admin.site.each_context(request),
46 | "worker": worker,
47 | "queue_names": ", ".join(worker.queue_names),
48 | "current_job": current_job,
49 | "executions": page_obj,
50 | "page_range": page_range,
51 | "page_var": "p",
52 | }
53 | return render(request, "admin/scheduler/worker_details.html", context_data)
54 |
55 |
56 | @never_cache # type: ignore
57 | @staff_member_required # type: ignore
58 | def workers_list(request: HttpRequest) -> HttpResponse:
59 | all_workers = get_all_workers()
60 | worker_list = list(all_workers)
61 |
62 | context_data = {
63 | **admin.site.each_context(request),
64 | "workers": worker_list,
65 | }
66 | return render(request, "admin/scheduler/workers_list.html", context_data)
67 |
--------------------------------------------------------------------------------
/scheduler/migrations/0017_remove_crontask_repeat_crontask_failed_runs_and_more.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 5.0.1 on 2024-01-10 17:39
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('scheduler', '0016_rename_jobarg_taskarg_rename_jobkwarg_taskkwarg_and_more'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='crontask',
15 | name='repeat',
16 | ),
17 | migrations.AddField(
18 | model_name='crontask',
19 | name='failed_runs',
20 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has failed', verbose_name='failed runs'),
21 | ),
22 | migrations.AddField(
23 | model_name='crontask',
24 | name='last_failed_run',
25 | field=models.DateTimeField(blank=True, help_text='Last time the task has failed', null=True, verbose_name='last failed run'),
26 | ),
27 | migrations.AddField(
28 | model_name='crontask',
29 | name='last_successful_run',
30 | field=models.DateTimeField(blank=True, help_text='Last time the task has succeeded', null=True, verbose_name='last successful run'),
31 | ),
32 | migrations.AddField(
33 | model_name='crontask',
34 | name='successful_runs',
35 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has succeeded', verbose_name='successful runs'),
36 | ),
37 | migrations.AddField(
38 | model_name='repeatabletask',
39 | name='failed_runs',
40 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has failed', verbose_name='failed runs'),
41 | ),
42 | migrations.AddField(
43 | model_name='repeatabletask',
44 | name='last_failed_run',
45 | field=models.DateTimeField(blank=True, help_text='Last time the task has failed', null=True, verbose_name='last failed run'),
46 | ),
47 | migrations.AddField(
48 | model_name='repeatabletask',
49 | name='last_successful_run',
50 | field=models.DateTimeField(blank=True, help_text='Last time the task has succeeded', null=True, verbose_name='last successful run'),
51 | ),
52 | migrations.AddField(
53 | model_name='repeatabletask',
54 | name='successful_runs',
55 | field=models.PositiveIntegerField(default=0, help_text='Number of times the task has succeeded', verbose_name='successful runs'),
56 | ),
57 | ]
58 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/stop_job.py:
--------------------------------------------------------------------------------
1 | import os
2 | import signal
3 | from typing import Dict, Any
4 |
5 | from scheduler.redis_models import WorkerModel, JobModel
6 | from scheduler.settings import logger
7 | from scheduler.types import ConnectionType
8 | from scheduler.worker.commands.worker_commands import WorkerCommand, WorkerCommandError
9 |
10 |
11 | class StopJobCommand(WorkerCommand):
12 | """stop-job command"""
13 |
14 | command_name = "stop-job"
15 |
16 | def __init__(self, *args: Any, job_name: str, worker_name: str, **kwargs: Any) -> None:
17 | super().__init__(*args, worker_name=worker_name, **kwargs)
18 | self.job_name = job_name
19 | if self.job_name is None:
20 | raise WorkerCommandError("job_name for kill-job command is required")
21 |
22 | def command_payload(self, **kwargs: Any) -> Dict[str, Any]:
23 | return super().command_payload(job_name=self.job_name)
24 |
25 | def process_command(self, connection: ConnectionType) -> None:
26 | logger.debug(f"Received command to stop job {self.job_name}")
27 | worker_model = WorkerModel.get(self.worker_name, connection)
28 | job_model = JobModel.get(self.job_name, connection)
29 | if worker_model is None:
30 | logger.error(f"Worker {self.worker_name} not found")
31 | return
32 | if job_model is None:
33 | logger.error(f"Job {self.job_name} not found")
34 | return
35 | if not worker_model.job_execution_process_pid:
36 | logger.error(f"Worker {self.worker_name} has no job execution process")
37 | return
38 | if worker_model.pid == worker_model.job_execution_process_pid:
39 | logger.warning(f"Job execution process ID and worker process id {worker_model.pid} are equal, skipping")
40 | return
41 | if worker_model.current_job_name != self.job_name:
42 | logger.info(
43 | f"{self.worker_name} working on job {worker_model.current_job_name}, "
44 | f"not on {self.job_name}, kill-job command ignored."
45 | )
46 | return
47 | worker_model.set_field("stopped_job_name", self.job_name, connection)
48 | try:
49 | pgid = os.getpgid(worker_model.job_execution_process_pid)
50 | logger.debug(
51 | f"worker_pid {worker_model.pid}, job_execution_process {worker_model.job_execution_process_pid}"
52 | )
53 | if pgid == worker_model.pid:
54 | logger.error("No separate process for job execution, skipping")
55 | return
56 | os.killpg(pgid, signal.SIGTERM)
57 | except ProcessLookupError as e:
58 | logger.error(f"Error killing job {self.job_name}: {e}")
59 |
--------------------------------------------------------------------------------
/scheduler/views/helpers.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, List
2 | from typing import Tuple
3 | from urllib.parse import urlparse
4 |
5 | from django.contrib import messages
6 | from django.http import Http404
7 | from django.http import HttpRequest
8 | from django.urls import resolve
9 | from django.utils.http import url_has_allowed_host_and_scheme
10 |
11 | from scheduler.helpers.queues import Queue
12 | from scheduler.helpers.queues import get_queue as get_queue_base
13 | from scheduler.redis_models import JobModel
14 | from scheduler.settings import QueueNotFoundError
15 | from scheduler.settings import get_queue_names, logger
16 |
17 | _QUEUES_WITH_BAD_CONFIGURATION = set()
18 |
19 |
20 | def get_queue(queue_name: str) -> Queue:
21 | try:
22 | return get_queue_base(queue_name)
23 | except QueueNotFoundError as e:
24 | logger.error(e)
25 | raise Http404(e)
26 |
27 |
28 | def _find_job(job_name: str) -> Tuple[Optional[Queue], Optional[JobModel]]:
29 | queue_names = get_queue_names()
30 | for queue_name in queue_names:
31 | if queue_name in _QUEUES_WITH_BAD_CONFIGURATION:
32 | continue
33 | try:
34 | queue = get_queue(queue_name)
35 | job = JobModel.get(job_name, connection=queue.connection)
36 | if job is not None and job.queue_name == queue_name:
37 | return queue, job
38 | except Exception as e:
39 | _QUEUES_WITH_BAD_CONFIGURATION.add(queue_name)
40 | logger.debug(f"Queue {queue_name} added to bad configuration - Got exception: {e}")
41 | pass
42 | return None, None
43 |
44 |
45 | def _check_next_url(request: HttpRequest, default_next_url: str) -> str:
46 | next_url: str = request.POST.get("next_url", default_next_url)
47 | next_url = next_url.replace("\\", "")
48 | if (
49 | not url_has_allowed_host_and_scheme(next_url, allowed_hosts=None)
50 | or urlparse(next_url).netloc
51 | or urlparse(next_url).scheme
52 | ):
53 | messages.warning(request, "Bad followup URL")
54 | next_url = default_next_url
55 | try:
56 | resolve(next_url)
57 | except Exception:
58 | messages.warning(request, "Bad followup URL")
59 | next_url = default_next_url
60 | return next_url
61 |
62 |
63 | def _enqueue_multiple_jobs(queue: Queue, job_names: List[str], at_front: bool = False) -> int:
64 | jobs = JobModel.get_many(job_names, connection=queue.connection)
65 | jobs_requeued = 0
66 | with queue.connection.pipeline() as pipe:
67 | for job in jobs:
68 | if job is None:
69 | continue
70 | job.save(connection=pipe)
71 | queue.enqueue_job(job, pipeline=pipe, at_front=at_front)
72 | jobs_requeued += 1
73 | pipe.execute()
74 | return jobs_requeued
75 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/jobs-list.partial.html:
--------------------------------------------------------------------------------
1 | {% load scheduler_tags i18n %}
2 | {% if not add %}
3 |
4 | Job executions
5 |
6 |
7 |
8 |
9 | ID
10 | STATUS
11 | Created at
12 | Enqueued at
13 | Started at
14 | Ran for
15 | Worker name
16 | Result
17 |
18 |
19 |
20 | {% for exec in executions %}
21 |
22 |
23 | {{ exec.name }}
24 | {% if exec.scheduled_task_id %}
25 |
26 | Go to scheduled task
27 |
28 | {% endif %}
29 |
30 | {{ exec|job_status }}
31 | {{ exec.created_at|date:"Y-m-d, H:i:s"|default:"-" }}
32 | {{ exec.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }}
33 | {{ exec.started_at|date:"Y-m-d, H:i:s"|default:"-" }}
34 | {{ exec|job_runtime }}
35 | {{ exec.worker_name|default:"-" }}
36 | {{ exec|job_result|default:"-" }}
37 |
38 | {% endfor %}
39 |
40 |
41 |
42 |
43 | {% if pagination_required %}
44 | {% for i in page_range %}
45 | {% if i == executions.paginator.ELLIPSIS %}
46 | {{ executions.paginator.ELLIPSIS }}
47 | {% elif i == executions.number %}
48 | {{ i }}
49 | {% else %}
50 | {{ i }}
52 | {% endif %}
53 | {% endfor %}
54 | {% endif %}
55 | {{ executions.paginator.count }} {% blocktranslate count counter=executions.paginator.count %}entry{% plural %}entries{% endblocktranslate %}
56 |
57 |
58 | {% endif %}
59 |
--------------------------------------------------------------------------------
/scheduler/tests/test_mgmt_commands/test_export.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import tempfile
4 | from unittest import mock
5 |
6 | import yaml
7 | from django.core.management import call_command
8 | from django.test import TestCase
9 |
10 | from scheduler.tests import conf # noqa
11 | from scheduler.tests.testtools import task_factory
12 | from scheduler.models import TaskType
13 |
14 |
15 | class ExportTest(TestCase):
16 | def setUp(self) -> None:
17 | super().setUp()
18 | self.tmpfile = tempfile.NamedTemporaryFile()
19 |
20 | def tearDown(self) -> None:
21 | super().tearDown()
22 | os.remove(self.tmpfile.name)
23 |
24 | def test_export__should_export_job(self):
25 | tasks = []
26 | tasks.append(task_factory(TaskType.ONCE, enabled=True))
27 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True))
28 |
29 | # act
30 | call_command("export", filename=self.tmpfile.name)
31 | # assert
32 | result = json.load(self.tmpfile)
33 | self.assertEqual(len(tasks), len(result))
34 | self.assertEqual(result[0], tasks[0].to_dict())
35 | self.assertEqual(result[1], tasks[1].to_dict())
36 |
37 | def test_export__should_export_enabled_jobs_only(self):
38 | tasks = []
39 | tasks.append(task_factory(TaskType.ONCE, enabled=True))
40 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=False))
41 |
42 | # act
43 | call_command("export", filename=self.tmpfile.name, enabled=True)
44 | # assert
45 | result = json.load(self.tmpfile)
46 | self.assertEqual(len(tasks) - 1, len(result))
47 | self.assertEqual(result[0], tasks[0].to_dict())
48 |
49 | def test_export__should_export_job_yaml_without_yaml_lib(self):
50 | tasks = []
51 | tasks.append(task_factory(TaskType.ONCE, enabled=True))
52 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True))
53 |
54 | # act
55 | with mock.patch.dict("sys.modules", {"yaml": None}):
56 | with self.assertRaises(SystemExit) as cm:
57 | call_command("export", filename=self.tmpfile.name, format="yaml")
58 | self.assertEqual(cm.exception.code, 1)
59 |
60 | def test_export__should_export_job_yaml_green(self):
61 | tasks = []
62 | tasks.append(task_factory(TaskType.ONCE, enabled=True))
63 | tasks.append(task_factory(TaskType.REPEATABLE, enabled=True))
64 | tasks.append(task_factory(TaskType.CRON, enabled=True))
65 |
66 | # act
67 | call_command("export", filename=self.tmpfile.name, format="yaml")
68 | # assert
69 | result = yaml.load(self.tmpfile, yaml.SafeLoader)
70 | self.assertEqual(len(tasks), len(result))
71 | self.assertEqual(result[0], tasks[0].to_dict())
72 | self.assertEqual(result[1], tasks[1].to_dict())
73 | self.assertEqual(result[2], tasks[2].to_dict())
74 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/jobs-list-with-tasks.partial.html:
--------------------------------------------------------------------------------
1 | {% load scheduler_tags i18n %}
2 | {% if not add %}
3 |
4 | Job executions
5 |
6 |
7 |
8 |
9 | ID
10 | Scheduled Task
11 | STATUS
12 | Created at
13 | Enqueued at
14 | Started at
15 | Ran for
16 | Worker name
17 | Result
18 |
19 |
20 |
21 | {% for exec in executions %}
22 |
23 |
24 | {{ exec.name }}
25 |
26 |
27 | {% if exec.scheduled_task_id %}
28 |
29 | {{ exec|job_scheduled_task }}
30 |
31 | {% endif %}
32 |
33 | {{ exec|job_status }}
34 | {{ exec.created_at|date:"Y-m-d, H:i:s"|default:"-" }}
35 | {{ exec.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }}
36 | {{ exec.started_at|date:"Y-m-d, H:i:s"|default:"-" }}
37 | {{ exec|job_runtime }}
38 | {{ exec.worker_name|default:"-" }}
39 | {{ exec|job_result|default:"-" }}
40 |
41 | {% endfor %}
42 |
43 |
44 |
45 |
46 | {% if pagination_required %}
47 | {% for i in page_range %}
48 | {% if i == executions.paginator.ELLIPSIS %}
49 | {{ executions.paginator.ELLIPSIS }}
50 | {% elif i == executions.number %}
51 | {{ i }}
52 | {% else %}
53 | {{ i }}
55 | {% endif %}
56 | {% endfor %}
57 | {% endif %}
58 | {{ executions.paginator.count }} {% blocktranslate count counter=executions.paginator.count %}entry{% plural %}entries{% endblocktranslate %}
59 |
60 |
61 | {% endif %}
62 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | site_name: django-tasks-scheduler
3 | site_author: Daniel Moran
4 | site_description: >-
5 | Documentation for django-tasks-scheduler django library
6 | # Repository
7 | repo_name: django-commons/django-tasks-scheduler
8 | repo_url: https://github.com/django-commons/django-tasks-scheduler
9 |
10 | # Copyright
11 | copyright: Copyright © 2022 - 2023 Daniel Moran
12 |
13 | extra:
14 | generator: false
15 | analytics:
16 | provider: google
17 | property: G-GJBJBKXT19
18 |
19 | markdown_extensions:
20 | - abbr
21 | - admonition
22 | - attr_list
23 | - def_list
24 | - footnotes
25 | - md_in_html
26 | - pymdownx.arithmatex:
27 | generic: true
28 | - pymdownx.betterem:
29 | smart_enable: all
30 | - pymdownx.caret
31 | - pymdownx.details
32 | - pymdownx.emoji:
33 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
34 | emoji_index: !!python/name:material.extensions.emoji.twemoji
35 | - pymdownx.highlight:
36 | anchor_linenums: true
37 | - pymdownx.inlinehilite
38 | - pymdownx.keys
39 | - pymdownx.magiclink:
40 | repo_url_shorthand: true
41 | user: django-commons
42 | repo: django-tasks-scheduler
43 | - pymdownx.mark
44 | - pymdownx.smartsymbols
45 | - pymdownx.superfences:
46 | custom_fences:
47 | - name: mermaid
48 | class: mermaid
49 | format: !!python/name:pymdownx.superfences.fence_code_format
50 | - pymdownx.tabbed:
51 | alternate_style: true
52 | - pymdownx.tasklist:
53 | custom_checkbox: true
54 | - pymdownx.tilde
55 | - toc:
56 | permalink: true
57 | toc_depth: 3
58 |
59 |
60 | theme:
61 | name: material
62 | palette:
63 | - scheme: default
64 | primary: indigo
65 | accent: indigo
66 | toggle:
67 | icon: material/brightness-7
68 | name: Switch to dark mode
69 | - scheme: slate
70 | primary: indigo
71 | accent: indigo
72 | toggle:
73 | icon: material/brightness-4
74 | name: Switch to light mode
75 | features:
76 | # - announce.dismiss
77 | - content.action.edit
78 | - content.action.view
79 | - content.code.annotate
80 | - content.code.copy
81 | # - content.tabs.link
82 | - content.tooltips
83 | # - header.autohide
84 | # - navigation.expand
85 | - navigation.footer
86 | - navigation.indexes
87 | # - navigation.instant
88 | # - navigation.prune
89 | - navigation.sections
90 | # - navigation.tabs.sticky
91 | - navigation.tracking
92 | - search.highlight
93 | - search.share
94 | - search.suggest
95 | - toc.follow
96 | # - toc.integrate
97 | highlightjs: true
98 | hljs_languages:
99 | - yaml
100 | - django
101 |
102 | nav:
103 | - Home: index.md
104 | - Migrate v2 to v3: migrate_to_v3.md
105 | - Installation: installation.md
106 | - Configuration: configuration.md
107 | - Usage: usage.md
108 | - Management commands: commands.md
109 | - Change log: changelog.md
110 |
--------------------------------------------------------------------------------
/scheduler/helpers/queues/getters.py:
--------------------------------------------------------------------------------
1 | from typing import Set
2 |
3 | from scheduler.redis_models.worker import WorkerModel
4 | from scheduler.settings import SCHEDULER_CONFIG, get_queue_names, get_queue_configuration, logger
5 | from scheduler.types import ConnectionErrorTypes, BrokerMetaData, Broker, ConnectionType, QueueConfiguration
6 | from .queue_logic import Queue
7 |
8 | _BAD_QUEUE_CONFIGURATION = set()
9 |
10 |
11 | def _get_connection(config: QueueConfiguration, use_strict_broker: bool = False) -> ConnectionType:
12 | """Returns a Broker connection to use based on parameters in SCHEDULER_QUEUES"""
13 | if SCHEDULER_CONFIG.BROKER == Broker.FAKEREDIS:
14 | import fakeredis
15 |
16 | broker_cls = fakeredis.FakeRedis if not use_strict_broker else fakeredis.FakeStrictRedis
17 | else:
18 | broker_cls = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].connection_type
19 | if config.URL:
20 | return broker_cls.from_url(config.URL, db=config.DB, **(config.CONNECTION_KWARGS or {}))
21 | if config.UNIX_SOCKET_PATH:
22 | return broker_cls(unix_socket_path=config.UNIX_SOCKET_PATH, db=config.DB)
23 |
24 | if config.SENTINELS:
25 | connection_kwargs = {
26 | "db": config.DB,
27 | "password": config.PASSWORD,
28 | "username": config.USERNAME,
29 | }
30 | connection_kwargs.update(config.CONNECTION_KWARGS or {})
31 | sentinel_kwargs = config.SENTINEL_KWARGS or {}
32 | SentinelClass = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].sentinel_type
33 | sentinel = SentinelClass(config.SENTINELS, sentinel_kwargs=sentinel_kwargs, **connection_kwargs)
34 | return sentinel.master_for( # type: ignore
35 | service_name=config.MASTER_NAME,
36 | redis_class=broker_cls,
37 | )
38 |
39 | return broker_cls(
40 | host=config.HOST,
41 | port=config.PORT,
42 | db=config.DB,
43 | username=config.USERNAME,
44 | password=config.PASSWORD,
45 | **(config.CONNECTION_KWARGS or {}),
46 | )
47 |
48 |
49 | def get_queue_connection(queue_name: str) -> ConnectionType:
50 | queue_settings = get_queue_configuration(queue_name)
51 | connection = _get_connection(queue_settings)
52 | return connection
53 |
54 |
55 | def get_queue(name: str = "default") -> Queue:
56 | """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`"""
57 | queue_settings = get_queue_configuration(name)
58 | is_async = queue_settings.ASYNC
59 | connection = _get_connection(queue_settings)
60 | return Queue(name=name, connection=connection, is_async=is_async)
61 |
62 |
63 | def get_all_workers() -> Set[WorkerModel]:
64 | queue_names = get_queue_names()
65 |
66 | workers_set: Set[WorkerModel] = set()
67 | for queue_name in queue_names:
68 | if queue_name in _BAD_QUEUE_CONFIGURATION:
69 | continue
70 | connection = _get_connection(get_queue_configuration(queue_name))
71 | try:
72 | curr_workers: Set[WorkerModel] = set(WorkerModel.all(connection=connection))
73 | workers_set.update(curr_workers)
74 | except ConnectionErrorTypes as e:
75 | logger.error(f"Could not connect for queue {queue_name}: {e}")
76 | _BAD_QUEUE_CONFIGURATION.add(queue_name)
77 | return workers_set
78 |
--------------------------------------------------------------------------------
/docs/installation.md:
--------------------------------------------------------------------------------
1 | # Installation
2 |
3 | 1. Use pip to install:
4 | ```shell
5 | pip install django-tasks-scheduler
6 | ```
7 |
8 | 2. In `settings.py`, add `scheduler` to `INSTALLED_APPS`:
9 | ```python
10 | INSTALLED_APPS = [
11 | # ...
12 | 'scheduler',
13 | # ...
14 | ]
15 | ```
16 |
17 | 3. Configure your queues.
18 | Add at least one Redis Queue to your `settings.py`.
19 | Note that the usage of `QueueConfiguration` is optional, you can use a simple dictionary, but `QueueConfiguration`
20 | helps preventing configuration errors.
21 | ```python
22 | import os
23 | from typing import Dict
24 | from scheduler.types import QueueConfiguration
25 |
26 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = {
27 | 'default': QueueConfiguration(
28 | HOST='localhost',
29 | PORT=6379,
30 | USERNAME='some-user',
31 | PASSWORD='some-password',
32 | CONNECTION_KWARGS={ # Eventual additional Broker connection arguments
33 | 'ssl_cert_reqs': 'required',
34 | 'ssl': True,
35 | },
36 | ),
37 | 'with-sentinel': QueueConfiguration(
38 | SENTINELS= [('localhost', 26736), ('localhost', 26737)],
39 | MASTER_NAME= 'redismaster',
40 | DB= 0,
41 | USERNAME= 'redis-user',
42 | PASSWORD= 'secret',
43 | CONNECTION_KWARGS= {
44 | 'ssl': True},
45 | SENTINEL_KWARGS= {
46 | 'username': 'sentinel-user',
47 | 'password': 'secret',
48 | }),
49 | 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')),
50 | 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False),
51 | }
52 | ```
53 |
54 | 4. Optional: Configure default values for queuing jobs from code:
55 | ```python
56 | from scheduler.types import SchedulerConfiguration, Broker
57 |
58 | SCHEDULER_CONFIG = SchedulerConfiguration(
59 | EXECUTIONS_IN_PAGE=20,
60 | SCHEDULER_INTERVAL=10,
61 | BROKER=Broker.REDIS,
62 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped)
63 | # Default values, can be overridden per task/job
64 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results
65 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information
66 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information
67 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job
68 | # General configuration values
69 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat
70 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes.
71 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds.
72 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks
73 | )
74 | ```
75 |
76 | 5. Add `scheduler.urls` to your django application `urls.py`:
77 | ```python
78 | from django.urls import path, include
79 |
80 | urlpatterns = [
81 | # ...
82 | path('scheduler/', include('scheduler.urls')),
83 | ]
84 | ```
85 |
86 | 6. Run migrations to generate django models
87 | ```shell
88 | python manage.py migrate
89 | ```
90 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Django Tasks Scheduler
2 | ===================
3 | [](https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml)
4 | 
5 | [](https://pypi.org/project/django-tasks-scheduler/)
6 |
7 | Documentation can be found in https://django-tasks-scheduler.readthedocs.io/
8 |
9 | ## Introduction Video
10 |
11 | [](https://www.youtube.com/watch?v=Brfavid_fxw)
12 |
13 | Watch this introduction video to learn about django-tasks-scheduler and its features.
14 |
15 | # Usage
16 |
17 | 1. Update `settings.py` to include scheduler configuration:
18 |
19 | ```python
20 | import os
21 | from typing import Dict
22 | from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration
23 |
24 | INSTALLED_APPS = [
25 | # ...
26 | 'scheduler',
27 | # ...
28 | ]
29 | SCHEDULER_CONFIG = SchedulerConfiguration(
30 | EXECUTIONS_IN_PAGE=20,
31 | SCHEDULER_INTERVAL=10,
32 | BROKER=Broker.REDIS,
33 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped)
34 | # Default values, can be overridden per task/job
35 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results
36 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information
37 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information
38 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job
39 | # General configuration values
40 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat
41 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes.
42 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds.
43 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks
44 | )
45 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = {
46 | 'default': QueueConfiguration(URL='redis://localhost:6379/0'),
47 | }
48 | ```
49 |
50 | 2. Update `urls.py` to include scheduler urls:
51 |
52 | ```python
53 | from django.urls import path, include
54 |
55 | urlpatterns = [
56 | # ...
57 | path('scheduler/', include('scheduler.urls')),
58 | ]
59 | ```
60 |
61 | 3. Run migrations:
62 |
63 | ```bash
64 | python manage.py migrate
65 | ```
66 |
67 | 4. Check out the admin views:
68 | 
69 |
70 |
71 | # Local development environment
72 |
73 | You can install [`pre-commit` hook](https://pre-commit.com/) in the repo to add it as a git hook by
74 | running: `pre-commit install`. It is configured to check all change files based on configuration in
75 | `.pre-commit-config.yaml`.
76 |
77 | # Sponsor
78 |
79 | django-tasks-scheduler is developed for free.
80 |
81 | You can support this project by becoming a sponsor using [this link](https://github.com/sponsors/cunla).
82 |
83 | # Contributing
84 |
85 | Interested in contributing, providing suggestions, or submitting bugs? See
86 | guidelines [at this link](.github/CONTRIBUTING.md).
87 |
--------------------------------------------------------------------------------
/scheduler/tests/test_worker/test_worker_commands.py:
--------------------------------------------------------------------------------
1 | import json
2 | from threading import Thread
3 | from time import sleep
4 | from unittest import mock
5 |
6 | from scheduler.helpers.queues import get_queue
7 | from scheduler.tests.jobs import test_job, two_seconds_job
8 | from ..test_views.base import BaseTestCase
9 | from ...helpers.callback import Callback
10 | from ...redis_models import JobModel, JobStatus, WorkerModel
11 | from ...worker import create_worker
12 | from ...worker.commands import send_command, StopJobCommand
13 | from ...worker.commands.suspend_worker import SuspendWorkCommand
14 |
15 |
16 | def _callback_func():
17 | pass
18 |
19 |
20 | def callback_func():
21 | pass
22 |
23 |
24 | class WorkerCommandsTest(BaseTestCase):
25 | def test_stop_worker_command__green(self):
26 | # Arrange
27 | worker_name = "test"
28 | queue = get_queue("default")
29 | job = queue.create_and_enqueue_job(test_job)
30 | self.assertTrue(job.is_queued)
31 | worker = create_worker("default", name=worker_name, burst=True, with_scheduler=False)
32 | worker.worker_start()
33 | # Act
34 | send_command(queue.connection, SuspendWorkCommand(worker_name=worker_name))
35 | worker.work()
36 |
37 | # Assert
38 | self.assertTrue(job.is_queued)
39 | self.assertTrue(worker._model.is_suspended)
40 |
41 | def test_stop_worker_command__bad_worker_name(self):
42 | # Arrange
43 | worker_name = "test"
44 | queue = get_queue("default")
45 | job = queue.create_and_enqueue_job(test_job)
46 | self.assertTrue(job.is_queued)
47 | worker = create_worker("default", name=worker_name, burst=True, with_scheduler=False)
48 | worker.bootstrap()
49 | # Act
50 | send_command(queue.connection, SuspendWorkCommand(worker_name=worker_name + "1"))
51 | worker.work()
52 |
53 | # Assert
54 | self.assertFalse(worker._model.is_suspended)
55 | job = JobModel.get(job.name, connection=queue.connection)
56 | self.assertFalse(job.is_queued)
57 |
58 | @mock.patch("scheduler.redis_models.job.JobModel.call_stopped_callback")
59 | def test_stop_job_command__success(self, mock_stopped_callback):
60 | # Arrange
61 | worker_name = "test"
62 | queue = get_queue("default")
63 | job = queue.create_and_enqueue_job(two_seconds_job, on_stopped=Callback(callback_func))
64 | self.assertTrue(job.is_queued)
65 | worker = create_worker("default", name=worker_name, burst=True, with_scheduler=False)
66 | worker.bootstrap()
67 |
68 | # Act
69 | t = Thread(target=worker.work, args=(0,), name="worker-thread")
70 | t.start()
71 | sleep(0.1)
72 | command = StopJobCommand(worker_name=worker_name, job_name=job.name)
73 | command_payload = json.dumps(command.command_payload())
74 | worker._command_listener.handle_payload({"data": command_payload})
75 | worker.monitor_job_execution_process(job, queue)
76 |
77 | # Assert
78 | job = JobModel.get(job.name, connection=queue.connection)
79 | worker = WorkerModel.get(worker.name, connection=queue.connection)
80 | self.assertEqual(worker.stopped_job_name, job.name)
81 | self.assertIsNone(worker.current_job_name)
82 | self.assertEqual(job.status, JobStatus.STOPPED)
83 | t.join()
84 | mock_stopped_callback.assert_called()
85 |
--------------------------------------------------------------------------------
/scheduler/tests/test_mgmt_commands/test_scheduler_stats.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sys
3 | from io import StringIO
4 |
5 | import yaml
6 | from django.core.management import call_command
7 | from django.test import TestCase, override_settings
8 |
9 | from scheduler import settings
10 | from scheduler.helpers.queues import get_queue
11 |
12 |
13 | @override_settings(SCHEDULER_QUEUES={"default": {"HOST": "localhost", "PORT": 6379, "DB": 0}})
14 | class SchedulerStatsTest(TestCase):
15 | EXPECTED_OUTPUT = {
16 | "queues": [
17 | {
18 | "canceled_jobs": 0,
19 | "failed_jobs": 0,
20 | "finished_jobs": 0,
21 | "name": "default",
22 | "oldest_job_timestamp": None,
23 | "queued_jobs": 0,
24 | "scheduled_jobs": 0,
25 | "scheduler_pid": None,
26 | "started_jobs": 0,
27 | "workers": 0,
28 | }
29 | ]
30 | }
31 | OLD_QUEUES = None
32 |
33 | def setUp(self):
34 | super(SchedulerStatsTest, self).setUp()
35 | SchedulerStatsTest.OLD_QUEUES = settings._QUEUES
36 | settings._QUEUES = {}
37 | settings.conf_settings()
38 | get_queue("default").connection.flushall()
39 |
40 | def tearDown(self):
41 | super(SchedulerStatsTest, self).tearDown()
42 | settings._QUEUES = SchedulerStatsTest.OLD_QUEUES
43 |
44 | def test_scheduler_stats__json_output(self):
45 | test_stdout = StringIO()
46 | sys.stdout = test_stdout
47 | # act
48 | call_command("scheduler_stats", "-j")
49 | # assert
50 | res = test_stdout.getvalue()
51 | self.assertEqual(json.loads(res), SchedulerStatsTest.EXPECTED_OUTPUT)
52 |
53 | def test_scheduler_stats__yaml_output(self):
54 | # arrange
55 | test_stdout = StringIO()
56 | sys.stdout = test_stdout
57 | # act
58 | call_command("scheduler_stats", "-y")
59 | # assert
60 | res = test_stdout.getvalue()
61 | self.assertEqual(yaml.load(res, yaml.SafeLoader), SchedulerStatsTest.EXPECTED_OUTPUT)
62 |
63 | def test_scheduler_stats__plain_text_output(self):
64 | test_stdout = StringIO()
65 | sys.stdout = test_stdout
66 | # act
67 | call_command("scheduler_stats", "--no-color")
68 | # assert
69 | res = test_stdout.getvalue()
70 | self.assertEqual(
71 | res,
72 | """
73 | Django-Scheduler CLI Dashboard
74 |
75 | --------------------------------------------------------------------------------
76 | | Name | Queued | Active | Finished | Canceled | Workers |
77 | --------------------------------------------------------------------------------
78 | | default | 0 | 0 | 0 | 0 | 0 |
79 | --------------------------------------------------------------------------------
80 | """,
81 | )
82 |
83 | def test_scheduler_stats__bad_args(self):
84 | # arrange
85 | sys.stderr = StringIO()
86 | sys.stdout = StringIO()
87 | # act
88 | with self.assertRaises(SystemExit):
89 | call_command("scheduler_stats", "-y", "-j")
90 | # assert
91 | res = sys.stdout.getvalue()
92 | self.assertEqual(res, """""")
93 | err = sys.stderr.getvalue()
94 | self.assertEqual(err, """Aborting. Cannot output as both json and yaml\n""")
95 |
--------------------------------------------------------------------------------
/scheduler/models/args.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from typing import Callable, Any, Tuple, Dict, Type
3 |
4 | from django.contrib.contenttypes.fields import GenericForeignKey
5 | from django.contrib.contenttypes.models import ContentType
6 | from django.core.exceptions import ValidationError
7 | from django.db import models
8 | from django.utils.translation import gettext_lazy as _
9 |
10 | from scheduler.helpers import utils
11 |
12 | ARG_TYPE_TYPES_DICT: Dict[str, Type] = {
13 | "str": str,
14 | "int": int,
15 | "bool": bool,
16 | "datetime": datetime,
17 | "callable": Callable,
18 | }
19 |
20 |
21 | class BaseTaskArg(models.Model):
22 | class ArgType(models.TextChoices):
23 | STR = "str", _("string")
24 | INT = "int", _("int")
25 | BOOL = "bool", _("boolean")
26 | DATETIME = "datetime", _("datetime")
27 | CALLABLE = "callable", _("callable")
28 |
29 | arg_type = models.CharField(
30 | _("Argument Type"),
31 | max_length=12,
32 | choices=ArgType.choices,
33 | default=ArgType.STR,
34 | )
35 | val = models.CharField(_("Argument Value"), blank=True, max_length=255)
36 | content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
37 | object_id = models.PositiveIntegerField()
38 | content_object = GenericForeignKey()
39 |
40 | def clean(self) -> None:
41 | if self.arg_type not in ARG_TYPE_TYPES_DICT:
42 | msg = _("Could not parse %s, options are: %s") % (self.arg_type, ARG_TYPE_TYPES_DICT.keys())
43 | raise ValidationError({"arg_type": ValidationError(msg, code="invalid")})
44 | try:
45 | if self.arg_type == "callable":
46 | utils.callable_func(self.val)
47 | elif self.arg_type == "datetime":
48 | datetime.fromisoformat(self.val)
49 | elif self.arg_type == "bool":
50 | if self.val.lower() not in {"true", "false"}:
51 | raise ValidationError
52 | elif self.arg_type == "int":
53 | int(self.val)
54 | except Exception:
55 | msg = _("Could not parse %s as %s") % (self.val, self.arg_type)
56 | raise ValidationError({"arg_type": ValidationError(msg, code="invalid")})
57 |
58 | def save(self, **kwargs: Any) -> None:
59 | super(BaseTaskArg, self).save(**kwargs)
60 | self.content_object.save()
61 |
62 | def delete(self, **kwargs: Any) -> None:
63 | super(BaseTaskArg, self).delete(**kwargs)
64 | self.content_object.save()
65 |
66 | def value(self) -> Any:
67 | if self.arg_type == "callable":
68 | res = utils.callable_func(self.val)()
69 | elif self.arg_type == "datetime":
70 | res = datetime.fromisoformat(self.val)
71 | elif self.arg_type == "bool":
72 | res = self.val.lower() == "true"
73 | else:
74 | res = ARG_TYPE_TYPES_DICT[self.arg_type](self.val)
75 | return res
76 |
77 | class Meta:
78 | abstract = True
79 | ordering = ["id"]
80 |
81 |
82 | class TaskArg(BaseTaskArg):
83 | def __str__(self) -> str:
84 | return f"TaskArg[arg_type={self.arg_type},value={self.value()}]"
85 |
86 |
87 | class TaskKwarg(BaseTaskArg):
88 | key = models.CharField(max_length=255)
89 |
90 | def __str__(self) -> str:
91 | key, value = self.value()
92 | return f"TaskKwarg[key={key},arg_type={self.arg_type},value={self.val}]"
93 |
94 | def value(self) -> Tuple[str, Any]:
95 | return self.key, super(TaskKwarg, self).value()
96 |
--------------------------------------------------------------------------------
/scheduler/views/queue_registry_actions.py:
--------------------------------------------------------------------------------
1 | """list_registry_jobs actions on all jobs in the registry"""
2 |
3 | from enum import Enum
4 |
5 | from django.contrib import admin, messages
6 | from django.contrib.admin.views.decorators import staff_member_required
7 | from django.http import HttpResponse, HttpRequest, HttpResponseNotFound
8 | from django.shortcuts import render, redirect
9 | from django.urls import reverse
10 | from django.views.decorators.cache import never_cache
11 |
12 | from scheduler.helpers.queues import Queue
13 | from scheduler.helpers.queues.queue_logic import NoSuchRegistryError
14 | from scheduler.redis_models import JobModel, JobNamesRegistry
15 | from scheduler.settings import logger
16 | from scheduler.types import ResponseErrorTypes
17 | from scheduler.views.helpers import get_queue, _check_next_url, _enqueue_multiple_jobs
18 |
19 |
20 | class QueueRegistryActions(Enum):
21 | EMPTY = "empty"
22 | REQUEUE = "requeue"
23 |
24 |
25 | def _clear_registry(request: HttpRequest, queue: Queue, registry_name: str, registry: JobNamesRegistry) -> None:
26 | try:
27 | job_names = registry.all(queue.connection)
28 | for job_name in job_names:
29 | registry.delete(queue.connection, job_name)
30 | job_model = JobModel.get(job_name, connection=queue.connection)
31 | if job_model is not None:
32 | job_model.delete(connection=queue.connection)
33 | messages.info(request, f"You have successfully cleared the {registry_name} jobs in queue {queue.name}")
34 | except ResponseErrorTypes as e:
35 | messages.error(request, f"error: {e}")
36 | raise e
37 |
38 |
39 | def _requeue_job_names(request: HttpRequest, queue: Queue, registry_name: str) -> None:
40 | try:
41 | registry = queue.get_registry(registry_name)
42 | except NoSuchRegistryError:
43 | logger.error(f"No registry named {registry_name}")
44 | return
45 | job_names = registry.all(queue.connection)
46 | jobs_requeued_count = _enqueue_multiple_jobs(queue, job_names)
47 | messages.info(request, f"You have successfully re-queued {jobs_requeued_count} jobs!")
48 |
49 |
50 | @never_cache # type: ignore
51 | @staff_member_required # type: ignore
52 | def queue_registry_actions(request: HttpRequest, queue_name: str, registry_name: str, action: str) -> HttpResponse:
53 | queue = get_queue(queue_name)
54 | try:
55 | registry = queue.get_registry(registry_name)
56 | except NoSuchRegistryError:
57 | return HttpResponseNotFound()
58 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, registry_name]))
59 | if action not in [item.value for item in QueueRegistryActions]:
60 | return redirect(next_url)
61 | if request.method == "POST":
62 | if action == QueueRegistryActions.EMPTY.value:
63 | _clear_registry(request, queue, registry_name, registry)
64 | elif action == QueueRegistryActions.REQUEUE.value:
65 | _requeue_job_names(request, queue, registry_name)
66 | return redirect("queue_registry_jobs", queue_name, registry_name)
67 | job_names = registry.all(queue.connection)
68 | job_list = JobModel.get_many(job_names, connection=queue.connection)
69 | context_data = {
70 | **admin.site.each_context(request),
71 | "queue": queue,
72 | "total_jobs": registry.count(queue.connection),
73 | "action": action,
74 | "jobs": job_list,
75 | "next_url": next_url,
76 | "action_url": reverse("queue_registry_action", args=[queue_name, registry_name, action]),
77 | }
78 | return render(request, "admin/scheduler/confirm_action.html", context_data)
79 |
--------------------------------------------------------------------------------
/scheduler/types/settings_types.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from dataclasses import dataclass
3 | from enum import Enum
4 | from typing import Callable, Dict, Optional, List, Tuple, Any, Type, ClassVar, Set
5 |
6 | from scheduler.helpers.timeouts import BaseDeathPenalty, UnixSignalDeathPenalty
7 |
8 | if sys.version_info >= (3, 11):
9 | from typing import Self
10 | else:
11 | from typing_extensions import Self
12 |
13 |
14 | class Broker(Enum):
15 | REDIS = "redis"
16 | FAKEREDIS = "fakeredis"
17 | VALKEY = "valkey"
18 |
19 |
20 | def _token_validation(token: str) -> bool:
21 | return False
22 |
23 |
24 | @dataclass(slots=True, kw_only=True)
25 | class SchedulerConfiguration:
26 | """Configuration for django-tasks-scheduler"""
27 |
28 | EXECUTIONS_IN_PAGE: int = 20
29 | SCHEDULER_INTERVAL: int = 10
30 | BROKER: Broker = Broker.REDIS
31 | TOKEN_VALIDATION_METHOD: Callable[[str], bool] = _token_validation
32 | CALLBACK_TIMEOUT: int = 60 # Callback timeout in seconds (success/failure/stopped)
33 | # Default values, can be override per task
34 | DEFAULT_SUCCESS_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep successful job results
35 | DEFAULT_FAILURE_TTL: int = 365 * 24 * 60 * 60 # Time To Live (TTL) in seconds to keep job failure information
36 | DEFAULT_JOB_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep job information
37 | DEFAULT_JOB_TIMEOUT: int = 5 * 60 # timeout (seconds) for a job
38 | # General configuration values
39 | DEFAULT_WORKER_TTL: int = 10 * 60 # Time To Live (TTL) in seconds to keep worker information after last heartbeat
40 | DEFAULT_MAINTENANCE_TASK_INTERVAL: int = 10 * 60 # The interval to run maintenance tasks in seconds. 10 minutes.
41 | DEFAULT_JOB_MONITORING_INTERVAL: int = 30 # The interval to monitor jobs in seconds.
42 | SCHEDULER_FALLBACK_PERIOD_SECS: int = 120 # Period (secs) to wait before requiring to reacquire locks
43 | DEATH_PENALTY_CLASS: Type[BaseDeathPenalty] = UnixSignalDeathPenalty
44 |
45 |
46 | @dataclass(slots=True, frozen=True, kw_only=True)
47 | class QueueConfiguration:
48 | __CONNECTION_FIELDS__: ClassVar[Set[str]] = {
49 | "URL",
50 | "DB",
51 | "UNIX_SOCKET_PATH",
52 | "HOST",
53 | "PORT",
54 | "PASSWORD",
55 | "SENTINELS",
56 | "MASTER_NAME",
57 | "CONNECTION_KWARGS",
58 | }
59 | DB: Optional[int] = None
60 | # Redis connection parameters, either UNIX_SOCKET_PATH/URL/separate params (HOST, PORT, PASSWORD) should be provided
61 | UNIX_SOCKET_PATH: Optional[str] = None
62 | URL: Optional[str] = None
63 | HOST: Optional[str] = None
64 | PORT: Optional[int] = None
65 | USERNAME: Optional[str] = None
66 | PASSWORD: Optional[str] = None
67 |
68 | ASYNC: bool = True
69 |
70 | SENTINELS: Optional[List[Tuple[str, int]]] = None
71 | SENTINEL_KWARGS: Optional[Dict[str, str]] = None
72 | MASTER_NAME: Optional[str] = None
73 | CONNECTION_KWARGS: Optional[Dict[str, Any]] = None
74 |
75 | def __post_init__(self):
76 | if not any((self.URL, self.UNIX_SOCKET_PATH, self.HOST, self.SENTINELS)):
77 | raise ValueError(f"At least one of URL, UNIX_SOCKET_PATH, HOST must be provided: {self}")
78 | if sum((self.URL is not None, self.UNIX_SOCKET_PATH is not None, self.HOST is not None)) > 1:
79 | raise ValueError(f"Only one of URL, UNIX_SOCKET_PATH, HOST should be provided: {self}")
80 | if self.HOST is not None and (self.PORT is None or self.DB is None):
81 | raise ValueError(f"HOST requires PORT and DB: {self}")
82 |
83 | def same_connection_params(self, other: Self) -> bool:
84 | for field in self.__CONNECTION_FIELDS__:
85 | if getattr(self, field) != getattr(other, field):
86 | return False
87 | return True
88 |
--------------------------------------------------------------------------------
/scheduler/tests/test_task_types/test_cron_task.py:
--------------------------------------------------------------------------------
1 | from django.core.exceptions import ValidationError
2 |
3 | from scheduler import settings
4 | from scheduler.helpers.queues import get_queue
5 | from scheduler.models import TaskType
6 | from scheduler.redis_models import JobModel
7 | from scheduler.tests.test_task_types.test_task_model import BaseTestCases
8 | from scheduler.tests.testtools import task_factory
9 | from scheduler.worker import create_worker
10 |
11 |
12 | class TestCronTask(BaseTestCases.TestBaseTask):
13 | task_type = TaskType.CRON
14 |
15 | def setUp(self) -> None:
16 | super().setUp()
17 | self.queue_name = settings.get_queue_names()[0]
18 |
19 | def test_clean(self):
20 | task = task_factory(self.task_type)
21 | task.cron_string = "* * * * *"
22 | task.queue = self.queue_name
23 | task.callable = "scheduler.tests.jobs.test_job"
24 | self.assertIsNone(task.clean())
25 |
26 | def test_clean_cron_string_invalid(self):
27 | task = task_factory(self.task_type)
28 | task.cron_string = "not-a-cron-string"
29 | task.queue = self.queue_name
30 | task.callable = "scheduler.tests.jobs.test_job"
31 | with self.assertRaises(ValidationError):
32 | task.clean_cron_string()
33 |
34 | def test_check_rescheduled_after_execution(self):
35 | task = task_factory(self.task_type)
36 | queue = task.rqueue
37 | first_run_id = task.job_name
38 | entry = JobModel.get(first_run_id, connection=queue.connection)
39 | self.assertIsNotNone(entry)
40 | queue.run_sync(entry)
41 | task.refresh_from_db()
42 | self.assertEqual(task.failed_runs, 0)
43 | self.assertIsNone(task.last_failed_run)
44 | self.assertEqual(task.successful_runs, 1)
45 | self.assertIsNotNone(task.last_successful_run)
46 | self.assertTrue(task.is_scheduled())
47 | self.assertNotEqual(task.job_name, first_run_id)
48 |
49 | def test_check_rescheduled_after_failed_execution(self):
50 | task = task_factory(self.task_type, callable_name="scheduler.tests.jobs.failing_job")
51 | queue = task.rqueue
52 | first_run_id = task.job_name
53 | entry = JobModel.get(first_run_id, connection=queue.connection)
54 | queue.run_sync(entry)
55 | task.refresh_from_db()
56 | self.assertEqual(task.failed_runs, 1)
57 | self.assertIsNotNone(task.last_failed_run)
58 | self.assertEqual(task.successful_runs, 0)
59 | self.assertIsNone(task.last_successful_run)
60 | self.assertTrue(task.is_scheduled())
61 | self.assertNotEqual(task.job_name, first_run_id)
62 |
63 | def test_cron_task_enqueuing_jobs(self):
64 | queue = get_queue()
65 | prev_queued = queue.scheduled_job_registry.count(connection=queue.connection)
66 | prev_finished = queue.finished_job_registry.count(connection=queue.connection)
67 |
68 | task = task_factory(self.task_type, callable_name="scheduler.tests.jobs.enqueue_jobs")
69 | self.assertEqual(prev_queued + 1, queue.scheduled_job_registry.count(connection=queue.connection))
70 | first_run_id = task.job_name
71 | entry = JobModel.get(first_run_id, connection=queue.connection)
72 | queue.run_sync(entry)
73 | self.assertEqual(20, queue.queued_job_registry.count(queue.connection))
74 | self.assertEqual(prev_finished + 1, queue.finished_job_registry.count(queue.connection))
75 | worker = create_worker("default", fork_job_execution=False, burst=True)
76 | worker.work()
77 | self.assertEqual(prev_finished + 21, queue.finished_job_registry.count(queue.connection))
78 | worker.refresh(update_queues=True)
79 | self.assertEqual(20, worker._model.successful_job_count)
80 | self.assertEqual(0, worker._model.failed_job_count)
81 |
--------------------------------------------------------------------------------
/scheduler/views/job_views.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from html import escape
3 |
4 | from django.contrib import admin, messages
5 | from django.contrib.admin.views.decorators import staff_member_required
6 | from django.http import HttpResponse, HttpRequest
7 | from django.http.response import HttpResponseBadRequest
8 | from django.shortcuts import render, redirect
9 | from django.views.decorators.cache import never_cache
10 |
11 | from scheduler.helpers.queues import InvalidJobOperation
12 | from scheduler.redis_models import Result
13 | from scheduler.settings import logger
14 | from scheduler.views.helpers import _find_job
15 | from scheduler.worker.commands import send_command, StopJobCommand
16 |
17 |
18 | class JobDetailAction(str, Enum):
19 | DELETE = "delete"
20 | ENQUEUE = "enqueue"
21 | CANCEL = "cancel"
22 |
23 |
24 | @never_cache # type: ignore
25 | @staff_member_required # type: ignore
26 | def job_detail(request: HttpRequest, job_name: str) -> HttpResponse:
27 | queue, job = _find_job(job_name)
28 | if job is None or queue is None:
29 | messages.warning(request, f"Job {escape(job_name)} does not exist, maybe its TTL has passed")
30 | return redirect("queues_home")
31 | try:
32 | job.func_name
33 | data_is_valid = True
34 | except Exception:
35 | data_is_valid = False
36 |
37 | try:
38 | last_result = Result.fetch_latest(queue.connection, job.name)
39 | except AttributeError:
40 | last_result = None
41 |
42 | context_data = {
43 | **admin.site.each_context(request),
44 | "job": job,
45 | "last_result": last_result,
46 | "results": Result.all(connection=queue.connection, parent=job.name),
47 | "queue": queue,
48 | "data_is_valid": data_is_valid,
49 | }
50 | return render(request, "admin/scheduler/job_detail.html", context_data)
51 |
52 |
53 | @never_cache # type: ignore
54 | @staff_member_required # type: ignore
55 | def job_action(request: HttpRequest, job_name: str, action: str) -> HttpResponse:
56 | queue, job = _find_job(job_name)
57 | if job is None or queue is None:
58 | messages.warning(request, f"Job {escape(job_name)} does not exist, maybe its TTL has passed")
59 | return redirect("queues_home")
60 | if action not in [item.value for item in JobDetailAction]:
61 | return HttpResponseBadRequest(f"Action {escape(action)} is not supported")
62 |
63 | if request.method != "POST":
64 | context_data = {
65 | **admin.site.each_context(request),
66 | "job": job,
67 | "queue": queue,
68 | "action": action,
69 | }
70 | return render(request, "admin/scheduler/single_job_action.html", context_data)
71 |
72 | try:
73 | if action == JobDetailAction.DELETE:
74 | queue.delete_job(job.name)
75 | messages.info(request, f"You have successfully deleted {job.name}")
76 | return redirect("queue_registry_jobs", queue.name, "queued")
77 | elif action == JobDetailAction.ENQUEUE:
78 | queue.delete_job(job.name, expire_job_model=False)
79 | queue.enqueue_job(job)
80 | messages.info(request, f"You have successfully enqueued {job.name}")
81 | return redirect("job_details", job_name)
82 | elif action == JobDetailAction.CANCEL:
83 | send_command(
84 | connection=queue.connection, command=StopJobCommand(job_name=job.name, worker_name=job.worker_name)
85 | )
86 | queue.cancel_job(job.name)
87 | messages.info(request, f"You have successfully cancelled {job.name}")
88 | return redirect("job_details", job_name)
89 | except InvalidJobOperation as e:
90 | logger.warning(f"Could not perform action: {e}")
91 | messages.warning(request, f"Could not perform action: {e}")
92 | return redirect("job_details", job_name)
93 |
--------------------------------------------------------------------------------
/scheduler/decorators.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from typing import Any, Callable, Dict, Optional, Union, List
3 |
4 | from scheduler.helpers.callback import Callback
5 | from scheduler.types import ConnectionType
6 |
7 | JOB_METHODS_LIST: List[str] = []
8 |
9 |
10 | class job:
11 | def __init__(
12 | self,
13 | queue: Union["Queue", str, None] = None, # noqa: F821
14 | connection: Optional[ConnectionType] = None,
15 | timeout: Optional[int] = None,
16 | result_ttl: Optional[int] = None,
17 | job_info_ttl: Optional[int] = None,
18 | at_front: bool = False,
19 | meta: Optional[Dict[Any, Any]] = None,
20 | description: Optional[str] = None,
21 | on_failure: Optional[Union[Callback, Callable[..., Any]]] = None,
22 | on_success: Optional[Union[Callback, Callable[..., Any]]] = None,
23 | on_stopped: Optional[Union[Callback, Callable[..., Any]]] = None,
24 | ):
25 | """A decorator that adds a ``delay`` method to the decorated function, which in turn creates a RQ job when
26 | called. Accepts a required ``queue`` argument that can be either a ``Queue`` instance or a string
27 | denoting the queue name. For example::
28 |
29 |
30 | >>> @job(queue='default')
31 | >>> def simple_add(x, y):
32 | >>> return x + y
33 | >>> ...
34 | >>> # Puts `simple_add` function into queue
35 | >>> simple_add.delay(1, 2)
36 |
37 | :param queue: The queue to use, can be the Queue class itself, or the queue name (str)
38 | :type queue: Union['Queue', str]
39 | :param connection: Broker Connection
40 | :param timeout: Job timeout
41 | :param result_ttl: Result time to live
42 | :param job_info_ttl: Time to live for job info
43 | :param at_front: Whether to enqueue the job at front of the queue
44 | :param meta: Arbitrary metadata about the job
45 | :param description: Job description
46 | :param on_failure: Callable to run on failure
47 | :param on_success: Callable to run on success
48 | :param on_stopped: Callable to run when stopped
49 | """
50 | from scheduler.helpers.queues import get_queue
51 |
52 | if queue is None:
53 | queue = "default"
54 | self.queue = get_queue(queue) if isinstance(queue, str) else queue
55 | self.connection = connection
56 | self.timeout = timeout
57 | self.result_ttl = result_ttl
58 | self.job_info_ttl = job_info_ttl
59 | self.meta = meta
60 | self.at_front = at_front
61 | self.description = description
62 | self.on_success = on_success
63 | self.on_failure = on_failure
64 | self.on_stopped = on_stopped
65 |
66 | def __call__(self, f):
67 | @wraps(f)
68 | def delay(*args, **kwargs):
69 | from scheduler.helpers.queues import get_queue
70 |
71 | queue = get_queue(self.queue) if isinstance(self.queue, str) else self.queue
72 |
73 | job_name = kwargs.pop("job_name", None)
74 | at_front = kwargs.pop("at_front", False)
75 |
76 | if not at_front:
77 | at_front = self.at_front
78 |
79 | return queue.create_and_enqueue_job(
80 | f,
81 | args=args,
82 | kwargs=kwargs,
83 | timeout=self.timeout,
84 | result_ttl=self.result_ttl,
85 | job_info_ttl=self.job_info_ttl,
86 | name=job_name,
87 | at_front=at_front,
88 | meta=self.meta,
89 | description=self.description,
90 | on_failure=self.on_failure,
91 | on_success=self.on_success,
92 | on_stopped=self.on_stopped,
93 | when=None,
94 | )
95 |
96 | JOB_METHODS_LIST.append(f"{f.__module__}.{f.__name__}")
97 | f.delay = delay
98 | return f
99 |
--------------------------------------------------------------------------------
/scheduler/tests/test_worker/test_worker_commands_multiprocess.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from django.test import tag
4 |
5 | from scheduler.helpers.queues import get_queue
6 | from scheduler.redis_models import JobStatus, JobModel, WorkerModel
7 | from scheduler.tests.jobs import long_job, two_seconds_job
8 | from .. import testtools
9 | from ..test_views.base import BaseTestCase
10 | from ...worker.commands import KillWorkerCommand, send_command, StopJobCommand
11 |
12 |
13 | @tag("multiprocess")
14 | class WorkerCommandsTest(BaseTestCase):
15 | def test_kill_job_command__current_job(self):
16 | # Arrange
17 | queue = get_queue("django_tasks_scheduler_test")
18 | job = queue.create_and_enqueue_job(long_job)
19 | self.assertTrue(job.is_queued)
20 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test")
21 | sleep(0.1)
22 | job = JobModel.get(job.name, connection=queue.connection)
23 | self.assertEqual(JobStatus.STARTED, job.status)
24 |
25 | # Act
26 | send_command(queue.connection, StopJobCommand(worker_name=worker_name, job_name=job.name))
27 | process.terminate()
28 | process.join(2)
29 | process.kill()
30 |
31 | # Assert
32 | job = JobModel.get(job.name, connection=queue.connection)
33 | worker_model = WorkerModel.get(worker_name, connection=queue.connection)
34 | self.assertEqual(job.name, worker_model.stopped_job_name)
35 | self.assertEqual(job.name, worker_model.current_job_name)
36 | self.assertEqual(0, worker_model.completed_jobs)
37 | self.assertEqual(0, worker_model.failed_job_count)
38 | self.assertEqual(0, worker_model.successful_job_count)
39 | self.assertEqual(JobStatus.STOPPED, job.status)
40 | self.assertNotIn(job.name, queue.queued_job_registry.all(queue.connection))
41 |
42 | def test_kill_job_command__different_job(self):
43 | # Arrange
44 | queue = get_queue("django_tasks_scheduler_test")
45 | job = queue.create_and_enqueue_job(two_seconds_job)
46 | self.assertTrue(job.is_queued)
47 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test")
48 | sleep(0.2)
49 | job = JobModel.get(job.name, connection=queue.connection)
50 | self.assertEqual(JobStatus.STARTED, job.status)
51 |
52 | # Act
53 | send_command(queue.connection, StopJobCommand(worker_name=worker_name, job_name=job.name + "1"))
54 | sleep(0.1)
55 | process.kill()
56 | process.join()
57 | # Assert
58 | job = JobModel.get(job.name, connection=queue.connection)
59 | self.assertEqual(JobStatus.STARTED, job.status)
60 | self.assertNotIn(job.name, queue.queued_job_registry.all(queue.connection))
61 | worker_model = WorkerModel.get(worker_name, connection=queue.connection)
62 | self.assertEqual(0, worker_model.completed_jobs)
63 | self.assertEqual(0, worker_model.failed_job_count)
64 | self.assertEqual(0, worker_model.successful_job_count)
65 | self.assertIsNone(worker_model.stopped_job_name)
66 | self.assertEqual(job.name, worker_model.current_job_name)
67 |
68 | def test_kill_worker_command(self):
69 | queue = get_queue("django_tasks_scheduler_test")
70 | process, worker_name = testtools.run_worker_in_process("django_tasks_scheduler_test")
71 | sleep(0.1)
72 | # act
73 | send_command(queue.connection, KillWorkerCommand(worker_name=worker_name))
74 | # assert
75 | sleep(0.2)
76 | process.kill()
77 | process.join()
78 | worker_model = WorkerModel.get(worker_name, connection=queue.connection)
79 | self.assertEqual(0, worker_model.completed_jobs)
80 | self.assertEqual(0, worker_model.failed_job_count)
81 | self.assertEqual(0, worker_model.successful_job_count)
82 | self.assertIsNotNone(worker_model.shutdown_requested_date)
83 |
--------------------------------------------------------------------------------
/scheduler/worker/commands/worker_commands.py:
--------------------------------------------------------------------------------
1 | import json
2 | from abc import ABC
3 | from datetime import datetime, timezone
4 | from typing import Type, Dict, Any
5 |
6 | from scheduler.settings import logger
7 | from scheduler.types import ConnectionType, Self
8 |
9 | _PUBSUB_CHANNEL_TEMPLATE: str = ":workers:pubsub:{}"
10 | _WORKER_COMMANDS_REGISTRY: Dict[str, Type["WorkerCommand"]] = {}
11 |
12 |
13 | class WorkerCommandError(Exception):
14 | pass
15 |
16 |
17 | class WorkerCommand(ABC):
18 | """Abstract class for commands to be sent to a worker and processed by worker"""
19 |
20 | command_name: str = ""
21 |
22 | def __init__(self, *args: Any, worker_name: str, **kwargs: Any) -> None:
23 | self.worker_name: str = worker_name
24 |
25 | def command_payload(self, **kwargs: Any) -> Dict[str, Any]:
26 | commands_channel = WorkerCommandsChannelListener._commands_channel(self.worker_name)
27 | payload = {
28 | "command": self.command_name,
29 | "worker_name": self.worker_name,
30 | "channel_name": commands_channel,
31 | "created_at": datetime.now(tz=timezone.utc).isoformat(),
32 | }
33 | if kwargs:
34 | payload.update(kwargs)
35 | return payload
36 |
37 | def __str__(self) -> str:
38 | return f"{self.command_name}[{self.command_payload()}]"
39 |
40 | def process_command(self, connection: ConnectionType) -> None:
41 | raise NotImplementedError
42 |
43 | @classmethod
44 | def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
45 | if cls is WorkerCommand:
46 | return
47 | if not cls.command_name:
48 | raise NotImplementedError(f"{cls.__name__} must have a command_name attribute")
49 | _WORKER_COMMANDS_REGISTRY[cls.command_name] = cls
50 |
51 | @classmethod
52 | def from_payload(cls, payload: Dict[str, Any]) -> Type[Self]:
53 | command_name = payload.get("command")
54 | if command_name is None:
55 | raise WorkerCommandError("Payload must contain 'command' key")
56 | command_class = _WORKER_COMMANDS_REGISTRY.get(command_name)
57 | if command_class is None:
58 | raise WorkerCommandError(f"Invalid command: {command_name}")
59 | return command_class(**payload)
60 |
61 |
62 | def send_command(connection: ConnectionType, command: WorkerCommand) -> None:
63 | """Send a command to the worker"""
64 | payload = command.command_payload()
65 | connection.publish(payload["channel_name"], json.dumps(payload))
66 |
67 |
68 | class WorkerCommandsChannelListener(object):
69 | def __init__(self, connection: ConnectionType, worker_name: str) -> None:
70 | self.connection = connection
71 | self.pubsub_channel_name = WorkerCommandsChannelListener._commands_channel(worker_name)
72 |
73 | @staticmethod
74 | def _commands_channel(worker_name: str) -> str:
75 | return _PUBSUB_CHANNEL_TEMPLATE.format(worker_name)
76 |
77 | def start(self) -> None:
78 | """Subscribe to this worker's channel"""
79 | logger.info(f"Subscribing to channel {self.pubsub_channel_name}")
80 | self.pubsub = self.connection.pubsub()
81 | self.pubsub.subscribe(**{self.pubsub_channel_name: self.handle_payload})
82 | self.pubsub_thread = self.pubsub.run_in_thread(sleep_time=0.2, daemon=True)
83 |
84 | def stop(self) -> None:
85 | """Unsubscribe from pubsub channel"""
86 | if self.pubsub_thread:
87 | logger.info(f"Unsubscribing from channel {self.pubsub_channel_name}")
88 | self.pubsub_thread.stop()
89 | self.pubsub_thread.join()
90 | self.pubsub.unsubscribe()
91 | self.pubsub.close()
92 |
93 | def handle_payload(self, payload: str) -> None:
94 | """Handle commands"""
95 | command = WorkerCommand.from_payload(json.loads(payload["data"]))
96 | logger.debug(f"Received command: {command}")
97 | command.process_command(self.connection)
98 |
--------------------------------------------------------------------------------
/scheduler/tests/test_settings.py:
--------------------------------------------------------------------------------
1 | import dataclasses
2 |
3 | from django.conf import settings
4 | from django.core.exceptions import ImproperlyConfigured
5 |
6 | from scheduler.settings import conf_settings
7 | from scheduler.tests.testtools import SchedulerBaseCase
8 | from scheduler.types import Broker, SchedulerConfiguration
9 |
10 |
11 | class TestWorkerAdmin(SchedulerBaseCase):
12 | def setUp(self):
13 | from scheduler.settings import SCHEDULER_CONFIG
14 |
15 | self.old_settings = SCHEDULER_CONFIG
16 |
17 | def tearDown(self):
18 | from scheduler import settings as scheduler_settings
19 |
20 | scheduler_settings.SCHEDULER_CONFIG = self.old_settings
21 |
22 | def test_scheduler_config_as_dict(self):
23 | from scheduler.settings import SCHEDULER_CONFIG
24 |
25 | settings.SCHEDULER_CONFIG = {
26 | "EXECUTIONS_IN_PAGE": SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE + 1,
27 | "SCHEDULER_INTERVAL": SCHEDULER_CONFIG.SCHEDULER_INTERVAL + 1,
28 | "BROKER": Broker.REDIS,
29 | "CALLBACK_TIMEOUT": SCHEDULER_CONFIG.SCHEDULER_INTERVAL + 1,
30 | "DEFAULT_SUCCESS_TTL": SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL + 1,
31 | "DEFAULT_FAILURE_TTL": SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL + 1,
32 | "DEFAULT_JOB_TTL": SCHEDULER_CONFIG.DEFAULT_JOB_TTL + 1,
33 | "DEFAULT_JOB_TIMEOUT": SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT + 1,
34 | # General configuration values
35 | "DEFAULT_WORKER_TTL": SCHEDULER_CONFIG.DEFAULT_WORKER_TTL + 1,
36 | "DEFAULT_MAINTENANCE_TASK_INTERVAL": SCHEDULER_CONFIG.DEFAULT_MAINTENANCE_TASK_INTERVAL + 1,
37 | "DEFAULT_JOB_MONITORING_INTERVAL": SCHEDULER_CONFIG.DEFAULT_JOB_MONITORING_INTERVAL + 1,
38 | "SCHEDULER_FALLBACK_PERIOD_SECS": SCHEDULER_CONFIG.SCHEDULER_FALLBACK_PERIOD_SECS + 1,
39 | }
40 | conf_settings()
41 | from scheduler.settings import SCHEDULER_CONFIG
42 |
43 | for key, value in settings.SCHEDULER_CONFIG.items():
44 | self.assertEqual(getattr(SCHEDULER_CONFIG, key), value)
45 |
46 | def test_scheduler_config_as_data_class(self):
47 | from scheduler.settings import SCHEDULER_CONFIG
48 |
49 | self.assertEqual(SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE, 20)
50 | settings.SCHEDULER_CONFIG = SchedulerConfiguration(
51 | EXECUTIONS_IN_PAGE=1,
52 | SCHEDULER_INTERVAL=60,
53 | BROKER=Broker.REDIS,
54 | CALLBACK_TIMEOUT=1111,
55 | DEFAULT_SUCCESS_TTL=1111,
56 | DEFAULT_FAILURE_TTL=111111,
57 | DEFAULT_JOB_TTL=1111,
58 | DEFAULT_JOB_TIMEOUT=11111,
59 | # General configuration values
60 | DEFAULT_WORKER_TTL=11111,
61 | DEFAULT_MAINTENANCE_TASK_INTERVAL=111,
62 | DEFAULT_JOB_MONITORING_INTERVAL=1111,
63 | SCHEDULER_FALLBACK_PERIOD_SECS=1111,
64 | )
65 | conf_settings()
66 | from scheduler.settings import SCHEDULER_CONFIG
67 |
68 | for key, value in dataclasses.asdict(settings.SCHEDULER_CONFIG).items():
69 | self.assertEqual(getattr(SCHEDULER_CONFIG, key), value)
70 |
71 | def test_scheduler_config_as_dict_bad_param(self):
72 | settings.SCHEDULER_CONFIG = {
73 | "EXECUTIONS_IN_PAGE": 1,
74 | "SCHEDULER_INTERVAL": 60,
75 | "BROKER": Broker.REDIS,
76 | "CALLBACK_TIMEOUT": 1111,
77 | "DEFAULT_SUCCESS_TTL": 1111,
78 | "DEFAULT_FAILURE_TTL": 111111,
79 | "DEFAULT_JOB_TTL": 1111,
80 | "DEFAULT_JOB_TIMEOUT": 11111,
81 | # General configuration values
82 | "DEFAULT_WORKER_TTL": 11111,
83 | "DEFAULT_MAINTENANCE_TASK_INTERVAL": 111,
84 | "DEFAULT_JOB_MONITORING_INTERVAL": 1111,
85 | "SCHEDULER_FALLBACK_PERIOD_SECS": 1111,
86 | "BAD_PARAM": "bad_value", # This should raise an error
87 | }
88 | self.assertRaises(ImproperlyConfigured, conf_settings)
89 |
--------------------------------------------------------------------------------
/scheduler/views/queue_job_actions.py:
--------------------------------------------------------------------------------
1 | """list_registry_jobs actions on multiple selected jobs"""
2 |
3 | from enum import Enum
4 |
5 | from django.contrib import admin, messages
6 | from django.contrib.admin.views.decorators import staff_member_required
7 | from django.http import HttpResponse, HttpRequest
8 | from django.shortcuts import render, redirect
9 | from django.urls import reverse
10 | from django.views.decorators.cache import never_cache
11 |
12 | from scheduler.redis_models import JobModel
13 | from scheduler.settings import logger
14 | from scheduler.views.helpers import get_queue, _check_next_url, _enqueue_multiple_jobs
15 | from scheduler.worker.commands import StopJobCommand, send_command
16 |
17 |
18 | class QueueJobAction(Enum):
19 | DELETE = "delete"
20 | REQUEUE = "requeue"
21 | STOP = "stop"
22 |
23 | def __contains__(self, item: str) -> bool:
24 | return item in [a.value for a in self.__class__]
25 |
26 |
27 | @never_cache # type: ignore
28 | @staff_member_required # type: ignore
29 | def queue_job_actions(request: HttpRequest, queue_name: str) -> HttpResponse:
30 | queue = get_queue(queue_name)
31 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, "queued"]))
32 | action = request.POST.get("action", False)
33 | job_names = request.POST.get("job_names", False)
34 | if request.method != "POST" or not action or not job_names or action not in [item.value for item in QueueJobAction]:
35 | return redirect(next_url)
36 | job_names = request.POST.getlist("job_names")
37 | if action == QueueJobAction.DELETE.value:
38 | jobs = JobModel.get_many(job_names, connection=queue.connection)
39 | for job in jobs:
40 | if job is None:
41 | continue
42 | queue.delete_job(job.name)
43 | messages.info(request, f"You have successfully deleted {len(job_names)} jobs!")
44 | elif action == QueueJobAction.REQUEUE.value:
45 | requeued_jobs_count = _enqueue_multiple_jobs(queue, job_names)
46 | messages.info(request, f"You have successfully re-queued {requeued_jobs_count}/{len(job_names)} jobs!")
47 | elif action == QueueJobAction.STOP.value:
48 | cancelled_jobs = 0
49 | jobs = JobModel.get_many(job_names, connection=queue.connection)
50 | for job in jobs:
51 | if job is None:
52 | continue
53 | try:
54 | command = StopJobCommand(job_name=job.name, worker_name=job.worker_name)
55 | send_command(connection=queue.connection, command=command)
56 | queue.cancel_job(job.name)
57 | cancelled_jobs += 1
58 | except Exception as e:
59 | logger.warning(f"Could not stop job: {e}")
60 | pass
61 | messages.info(request, f"You have successfully stopped {cancelled_jobs} jobs!")
62 | return redirect(next_url)
63 |
64 |
65 | @never_cache # type: ignore
66 | @staff_member_required # type: ignore
67 | def queue_confirm_job_action(request: HttpRequest, queue_name: str) -> HttpResponse:
68 | queue = get_queue(queue_name)
69 | next_url = _check_next_url(request, reverse("queue_registry_jobs", args=[queue_name, "queued"]))
70 | action = request.POST.get("action", None)
71 | job_names = request.POST.getlist("_selected_action", None)
72 | if request.method != "POST" or action is None or job_names is None or action not in QueueJobAction:
73 | return redirect(next_url)
74 |
75 | # confirm action
76 | context_data = {
77 | **admin.site.each_context(request),
78 | "action": action,
79 | "jobs": [JobModel.get(job_name, connection=queue.connection) for job_name in job_names],
80 | "total_jobs": len(job_names),
81 | "queue": queue,
82 | "next_url": next_url,
83 | "action_url": reverse(
84 | "queue_job_actions",
85 | args=[
86 | queue_name,
87 | ],
88 | ),
89 | }
90 | return render(request, "admin/scheduler/confirm_action.html", context_data)
91 |
--------------------------------------------------------------------------------
/docs/configuration.md:
--------------------------------------------------------------------------------
1 | # Configure your django-tasks-scheduler
2 |
3 | ## settings.py
4 |
5 | All default settings for scheduler can be in one dictionary in `settings.py`:
6 |
7 | ```python
8 | import os
9 | from typing import Dict
10 | from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration
11 |
12 | SCHEDULER_CONFIG = SchedulerConfiguration(
13 | EXECUTIONS_IN_PAGE=20,
14 | SCHEDULER_INTERVAL=10,
15 | BROKER=Broker.REDIS,
16 | CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped)
17 | # Default values, can be overridden per task/job
18 | DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results
19 | DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information
20 | DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information
21 | DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job
22 | # General configuration values
23 | DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat
24 | DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes.
25 | DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds.
26 | SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks
27 | )
28 | SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = {
29 | 'default': QueueConfiguration(
30 | HOST='localhost',
31 | PORT=6379,
32 | USERNAME='some-user',
33 | PASSWORD='some-password',
34 | CONNECTION_KWARGS={ # Eventual additional Broker connection arguments
35 | 'ssl_cert_reqs': 'required',
36 | 'ssl': True,
37 | },
38 | ),
39 | 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')),
40 | 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False),
41 | }
42 | ```
43 |
44 | ### SCHEDULER_CONFIG: `EXECUTIONS_IN_PAGE`
45 |
46 | Number of job executions to show in a page in a ScheduledJob admin view.
47 |
48 | Default: `20`.
49 |
50 | ### SCHEDULER_CONFIG: `SCHEDULER_INTERVAL`
51 |
52 | Default scheduler interval, a scheduler is a subprocess of a worker and
53 | will check which job executions are pending.
54 |
55 | Default: `10` (10 seconds).
56 |
57 | ### SCHEDULER_CONFIG: `BROKER`
58 |
59 | ### SCHEDULER_CONFIG: `CALLBACK_TIMEOUT`
60 |
61 | ### SCHEDULER_CONFIG: `DEFAULT_SUCCESS_TTL`
62 |
63 | Default time to live for job execution result when it is successful.
64 |
65 | Default: `600` (10 minutes).
66 |
67 | ### SCHEDULER_CONFIG: `DEFAULT_FAILURE_TTL`
68 |
69 | Default time to live for job execution result when it is failed.
70 |
71 | Default: `600` (10 minutes).
72 |
73 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_TTL`
74 |
75 | Default timeout for job info.
76 |
77 | Default: `300` (5 minutes).
78 |
79 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_TIMEOUT`
80 |
81 | timeout (seconds) for a job.
82 |
83 | Default: `300` (5 minutes).
84 |
85 | ### SCHEDULER_CONFIG: `DEFAULT_WORKER_TTL`
86 |
87 | Time To Live (TTL) in seconds to keep worker information after last heartbeat.
88 | Default: `600` (10 minutes).
89 |
90 | ### SCHEDULER_CONFIG: `DEFAULT_MAINTENANCE_TASK_INTERVAL`
91 |
92 | The interval to run worker maintenance tasks in seconds.
93 | Default: `600` 10 minutes.
94 |
95 | ### SCHEDULER_CONFIG: `DEFAULT_JOB_MONITORING_INTERVAL`
96 |
97 | The interval to monitor jobs in seconds.
98 |
99 | ### SCHEDULER_CONFIG: `SCHEDULER_FALLBACK_PERIOD_SECS`
100 |
101 | Period (secs) to wait before requiring to reacquire locks.
102 |
103 | ### SCHEDULER_CONFIG: `TOKEN_VALIDATION_METHOD`
104 |
105 | Method to validate request `Authorization` header with.
106 | Enables checking stats using API token.
107 |
108 | Default: no tokens allowed.
109 |
110 | ### `SCHEDULER_QUEUES`
111 |
112 | You can configure the queues to work with.
113 | That way you can have different workers listening to different queues.
114 |
115 | Different queues can use different redis servers/connections.
116 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/worker_details.html:
--------------------------------------------------------------------------------
1 | {% extends 'admin/scheduler/scheduler_base.html' %}
2 |
3 | {% block breadcrumbs %}
4 |
9 | {% endblock %}
10 |
11 | {% block content_title %}Worker Info {% endblock %}
12 |
13 | {% block content %}
14 |
15 |
16 |
40 |
50 |
51 |
52 |
53 |
91 |
92 | {% include 'admin/scheduler/jobs-list-with-tasks.partial.html' %}
93 |
94 |
95 | {% endblock %}
96 |
--------------------------------------------------------------------------------
/scheduler/tests/test_internals.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, timezone as dt_timezone
2 |
3 | from django.core.exceptions import ImproperlyConfigured
4 | from django.test import override_settings
5 | from django.utils import timezone
6 |
7 | from scheduler.helpers.callback import Callback, CallbackSetupError
8 | from scheduler.models import TaskType, get_scheduled_task, get_next_cron_time
9 | from scheduler.tests.testtools import SchedulerBaseCase, task_factory
10 |
11 |
12 | class TestInternals(SchedulerBaseCase):
13 | def test_get_scheduled_job(self):
14 | task = task_factory(TaskType.ONCE, scheduled_time=timezone.now() + timedelta(hours=1))
15 | self.assertEqual(task, get_scheduled_task(TaskType.ONCE, task.id))
16 | with self.assertRaises(ValueError):
17 | get_scheduled_task(task.task_type, task.id + 1)
18 | with self.assertRaises(ValueError):
19 | get_scheduled_task("UNKNOWN_JOBTYPE", task.id)
20 |
21 | def test_task_update(self):
22 | task = task_factory(TaskType.ONCE)
23 | task.name = "new_name"
24 | task.save(update_fields=["name"])
25 |
26 | def test_callback_bad_arguments(self):
27 | with self.assertRaises(CallbackSetupError) as cm:
28 | Callback("scheduler.tests.jobs.test_job", "1m")
29 | self.assertEqual(str(cm.exception), "Callback `timeout` must be a positive int, but received 1m")
30 | with self.assertRaises(CallbackSetupError) as cm:
31 | Callback("scheduler.tests.jobs.non_existing_method")
32 | self.assertEqual(str(cm.exception), "Callback `func` is not callable: scheduler.tests.jobs.non_existing_method")
33 | with self.assertRaises(CallbackSetupError) as cm:
34 | Callback("scheduler.tests.non_existing_module.non_existing_method")
35 | self.assertEqual(
36 | str(cm.exception),
37 | "Callback `func` is not callable: scheduler.tests.non_existing_module.non_existing_method",
38 | )
39 | with self.assertRaises(CallbackSetupError) as cm:
40 | Callback("non_existing_method")
41 | self.assertEqual(str(cm.exception), "Callback `func` is not callable: non_existing_method")
42 | with self.assertRaises(CallbackSetupError) as cm:
43 | Callback(1)
44 | self.assertEqual(str(cm.exception), "Callback `func` must be a string or function, received 1")
45 |
46 |
47 | class TestConfSettings(SchedulerBaseCase):
48 | @override_settings(SCHEDULER_CONFIG=[])
49 | def test_conf_settings__bad_scheduler_config(self):
50 | from scheduler import settings
51 |
52 | with self.assertRaises(ImproperlyConfigured) as cm:
53 | settings.conf_settings()
54 |
55 | self.assertEqual(str(cm.exception), "SCHEDULER_CONFIG should be a SchedulerConfiguration or dict")
56 |
57 | @override_settings(SCHEDULER_QUEUES=[])
58 | def test_conf_settings__bad_scheduler_queues_config(self):
59 | from scheduler import settings
60 |
61 | with self.assertRaises(ImproperlyConfigured) as cm:
62 | settings.conf_settings()
63 |
64 | self.assertEqual(str(cm.exception), "You have to define SCHEDULER_QUEUES in settings.py as dict")
65 |
66 | @override_settings(SCHEDULER_QUEUES={"default": []})
67 | def test_conf_settings__bad_queue_config(self):
68 | from scheduler import settings
69 |
70 | with self.assertRaises(ImproperlyConfigured) as cm:
71 | settings.conf_settings()
72 |
73 | self.assertEqual(str(cm.exception), "Queue default configuration should be a QueueConfiguration or dict")
74 |
75 | @override_settings(SCHEDULER_CONFIG={"UNKNOWN_SETTING": 10})
76 | def test_conf_settings__unknown_setting(self):
77 | from scheduler import settings
78 |
79 | with self.assertRaises(ImproperlyConfigured) as cm:
80 | settings.conf_settings()
81 |
82 | self.assertEqual(str(cm.exception), "Unknown setting UNKNOWN_SETTING in SCHEDULER_CONFIG")
83 |
84 | @override_settings(USE_TZ=True, TIME_ZONE="EST")
85 | def test_get_next_cron_time(self):
86 | next_cron_time = get_next_cron_time("0 0 * * *")
87 | self.assertIsNotNone(next_cron_time)
88 | self.assertTrue(next_cron_time > timezone.now())
89 | self.assertEqual(dt_timezone.utc, next_cron_time.tzinfo)
90 |
--------------------------------------------------------------------------------
/scheduler/templates/admin/scheduler/stats.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/base_site.html" %}
2 |
3 | {% block title %}Queues {{ block.super }}{% endblock %}
4 |
5 | {% block extrastyle %}
6 | {{ block.super }}
7 |
10 | {% endblock %}
11 |
12 | {% block content_title %}Tasks Queues {% endblock %}
13 |
14 | {% block breadcrumbs %}
15 |
19 | {% endblock %}
20 |
21 | {% block content %}
22 |
23 |
103 |
104 | {% endblock %}
105 |
--------------------------------------------------------------------------------
/scheduler/migrations/0003_auto_20220329_2107.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.2.12 on 2022-03-29 21:07
2 |
3 | import django.db.models.deletion
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 | dependencies = [
9 | ('contenttypes', '0002_remove_content_type_name'),
10 | ('scheduler', '0002_alter_cronjob_id_alter_repeatablejob_id_and_more'),
11 | ]
12 |
13 | operations = [
14 | migrations.AlterField(
15 | model_name='cronjob',
16 | name='cron_string',
17 | field=models.CharField(help_text='Define the schedule in a crontab like syntax. Times are in UTC.',
18 | max_length=64, verbose_name='cron string'),
19 | ),
20 | migrations.AlterField(
21 | model_name='cronjob',
22 | name='id',
23 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
24 | ),
25 | migrations.AlterField(
26 | model_name='repeatablejob',
27 | name='id',
28 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
29 | ),
30 | migrations.AlterField(
31 | model_name='repeatablejob',
32 | name='interval_unit',
33 | field=models.CharField(
34 | choices=[('seconds', 'seconds'), ('minutes', 'minutes'), ('hours', 'hours'), ('days', 'days'),
35 | ('weeks', 'weeks')], default='hours', max_length=12, verbose_name='interval unit'),
36 | ),
37 | migrations.AlterField(
38 | model_name='scheduledjob',
39 | name='id',
40 | field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
41 | ),
42 | migrations.CreateModel(
43 | name='JobKwarg',
44 | fields=[
45 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
46 | ('arg_type', models.CharField(
47 | choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'),
48 | ('datetime_val', 'Datetime')], default='str_val', max_length=12,
49 | verbose_name='Argument Type')),
50 | ('str_val', models.CharField(blank=True, max_length=255, verbose_name='String Value')),
51 | ('int_val', models.IntegerField(blank=True, null=True, verbose_name='Int Value')),
52 | ('bool_val', models.BooleanField(default=False, verbose_name='Boolean Value')),
53 | ('datetime_val', models.DateTimeField(blank=True, null=True, verbose_name='Datetime Value')),
54 | ('object_id', models.PositiveIntegerField()),
55 | ('key', models.CharField(max_length=255)),
56 | ('content_type',
57 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
58 | ],
59 | options={
60 | 'ordering': ['id'],
61 | 'abstract': False,
62 | },
63 | ),
64 | migrations.CreateModel(
65 | name='JobArg',
66 | fields=[
67 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
68 | ('arg_type', models.CharField(
69 | choices=[('str_val', 'string'), ('int_val', 'int'), ('bool_val', 'boolean'),
70 | ('datetime_val', 'Datetime')], default='str_val', max_length=12,
71 | verbose_name='Argument Type')),
72 | ('str_val', models.CharField(blank=True, max_length=255, verbose_name='String Value')),
73 | ('int_val', models.IntegerField(blank=True, null=True, verbose_name='Int Value')),
74 | ('bool_val', models.BooleanField(default=False, verbose_name='Boolean Value')),
75 | ('datetime_val', models.DateTimeField(blank=True, null=True, verbose_name='Datetime Value')),
76 | ('object_id', models.PositiveIntegerField()),
77 | ('content_type',
78 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
79 | ],
80 | options={
81 | 'ordering': ['id'],
82 | 'abstract': False,
83 | },
84 | ),
85 | ]
86 |
--------------------------------------------------------------------------------
/scheduler/tests/test_task_types/test_once_task.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta, datetime
2 |
3 | import bs4
4 | import time_machine
5 | from django.core.exceptions import ValidationError
6 | from django.urls import reverse
7 | from django.utils import timezone
8 |
9 | from scheduler import settings
10 | from scheduler.helpers.queues import get_queue
11 | from scheduler.models import TaskType
12 | from scheduler.redis_models import JobModel
13 | from scheduler.tests.test_task_types.test_task_model import BaseTestCases
14 | from scheduler.tests.testtools import task_factory
15 |
16 |
17 | class TestScheduledOnceTask(BaseTestCases.TestSchedulableTask):
18 | task_type = TaskType.ONCE
19 | queue_name = settings.get_queue_names()[0]
20 |
21 | def test_clean(self):
22 | task = task_factory(self.task_type)
23 | task.queue = self.queue_name
24 | task.callable = "scheduler.tests.jobs.test_job"
25 | self.assertIsNone(task.clean())
26 |
27 | @time_machine.travel(datetime(2016, 12, 25))
28 | def test_admin_changelist_view__has_timezone_data(self):
29 | # arrange
30 | self.client.login(username="admin", password="admin")
31 | task_factory(self.task_type)
32 | url = reverse("admin:scheduler_task_changelist")
33 | # act
34 | res = self.client.get(url)
35 | # assert
36 | self.assertContains(res, "Run once: Dec. 26, 2016, midnight", count=1, status_code=200)
37 |
38 | @time_machine.travel(datetime(2016, 12, 25))
39 | def test_admin_change_view__has_execution_list(self):
40 | # arrange
41 | self.client.login(username="admin", password="admin")
42 | task = task_factory(self.task_type)
43 | url = reverse("admin:scheduler_task_change", args=(task.id,))
44 | # act
45 | res = self.client.get(url)
46 | # assert
47 | self.assertEqual(200, res.status_code)
48 | self.assertContains(res, "Job executions")
49 | self.assertFalse(res.context["pagination_required"])
50 | self.assertEqual(res.context["executions"].paginator.count, 1)
51 | soup = bs4.BeautifulSoup(res.content, "html.parser")
52 | self.assertEqual(1, len(soup.find_all("table", {"id": "result_list"})))
53 | counter_element_list = soup.find_all("span", {"id": "counter"})
54 | self.assertEqual(1, len(counter_element_list))
55 | counter_element = counter_element_list[0]
56 | self.assertEqual("1 entry", counter_element.text.strip())
57 |
58 | @time_machine.travel(datetime(2016, 12, 25))
59 | def test_admin_change_view__has_empty_execution_list(self):
60 | # arrange
61 | self.client.login(username="admin", password="admin")
62 | task = task_factory(self.task_type)
63 | queue = get_queue(self.queue_name)
64 | job = JobModel.get(task.job_name, connection=queue.connection)
65 | JobModel.delete(job, connection=queue.connection)
66 | url = reverse("admin:scheduler_task_change", args=(task.id,))
67 | # act
68 | res = self.client.get(url)
69 | # assert
70 | self.assertContains(res, "Job executions")
71 | self.assertContains(res, """""")
72 | self.assertContains(res, """0""", status_code=200)
73 | self.assertFalse(res.context["pagination_required"])
74 | self.assertEqual(res.context["executions"].paginator.count, 0)
75 |
76 | def test_create_without_date__fail(self):
77 | task = task_factory(self.task_type, scheduled_time=None, instance_only=True)
78 | self.assertIsNone(task.scheduled_time)
79 | with self.assertRaises(Exception) as cm:
80 | task.clean()
81 | self.assertTrue(isinstance(cm.exception, ValidationError))
82 | self.assertEqual(str(cm.exception), "{'scheduled_time': ['Scheduled time is required']}")
83 |
84 | def test_create_with_date_in_the_past__fail(self):
85 | task = task_factory(self.task_type, scheduled_time=datetime.now() - timedelta(days=1), instance_only=True)
86 | with self.assertRaises(Exception) as cm:
87 | task.clean()
88 | self.assertTrue(isinstance(cm.exception, ValidationError))
89 | self.assertEqual(str(cm.exception), "{'scheduled_time': ['Scheduled time must be in the future']}")
90 |
91 | def test_unschedulable_old_job(self):
92 | task = task_factory(self.task_type, scheduled_time=timezone.now() - timedelta(hours=1), instance_only=True)
93 | task.save(clean=False)
94 | self.assertFalse(task.is_scheduled())
95 |
--------------------------------------------------------------------------------
/scheduler/management/commands/scheduler_stats.py:
--------------------------------------------------------------------------------
1 | import time
2 | from typing import Any, Dict, List, Optional
3 |
4 | import click
5 | from django.core.management.base import BaseCommand, CommandParser
6 |
7 | from scheduler.views import get_statistics
8 |
9 | ANSI_LIGHT_GREEN = "\033[1;32m"
10 | ANSI_LIGHT_WHITE = "\033[1;37m"
11 | ANSI_RESET = "\033[0m"
12 |
13 | KEYS = ("queued_jobs", "started_jobs", "finished_jobs", "canceled_jobs", "workers")
14 |
15 |
16 | class Command(BaseCommand):
17 | """Print statistics"""
18 |
19 | help = __doc__
20 |
21 | def __init__(self, *args: Any, **kwargs: Any) -> None:
22 | super(Command, self).__init__(*args, **kwargs)
23 | self.table_width = 80
24 | self.interval = None
25 |
26 | def add_arguments(self, parser: CommandParser) -> None:
27 | parser.add_argument(
28 | "-j",
29 | "--json",
30 | action="store_true",
31 | dest="json",
32 | help="Output statistics as JSON",
33 | )
34 |
35 | parser.add_argument(
36 | "-y",
37 | "--yaml",
38 | action="store_true",
39 | dest="yaml",
40 | help="Output statistics as YAML",
41 | )
42 |
43 | parser.add_argument(
44 | "-i",
45 | "--interval",
46 | dest="interval",
47 | type=float,
48 | help="Poll statistics every N seconds",
49 | )
50 |
51 | def _print_separator(self) -> None:
52 | click.echo("-" * self.table_width)
53 |
54 | def _print_stats_dashboard(
55 | self,
56 | statistics: Dict[str, List[Dict[str, Any]]],
57 | prev_stats: Optional[Dict[str, List[Dict[str, Any]]]] = None,
58 | with_color: bool = True,
59 | ) -> None:
60 | if self.interval:
61 | click.clear()
62 | click.echo()
63 | click.echo("Django-Scheduler CLI Dashboard")
64 | click.echo()
65 | self._print_separator()
66 | click.echo(f"| {'Name':<16} | Queued | Active | Finished | Canceled | Workers |")
67 | self._print_separator()
68 | for ind, queue in enumerate(statistics["queues"]):
69 | vals = [queue[k] for k in KEYS]
70 | # Deal with colors
71 | if not with_color:
72 | colors = ["" for _ in KEYS]
73 | if prev_stats and len(prev_stats["queues"]) > ind:
74 | prev = prev_stats["queues"][ind]
75 | prev_vals = tuple(prev[k] for k in KEYS)
76 | colors = [
77 | ANSI_LIGHT_GREEN if vals[i] != prev_vals[i] else ANSI_LIGHT_WHITE for i in range(len(prev_vals))
78 | ]
79 | else:
80 | colors = [ANSI_LIGHT_WHITE for _ in range(len(vals))]
81 | to_print = " | ".join([f"{colors[i]}{vals[i]:9}{ANSI_RESET}" for i in range(len(vals))])
82 | click.echo(f"| {queue['name']:<16} | {to_print} |", color=with_color)
83 |
84 | self._print_separator()
85 |
86 | if self.interval:
87 | click.echo()
88 | click.echo("Press 'Ctrl+c' to quit")
89 |
90 | def handle(self, *args: Any, **options: Any) -> None:
91 | if options.get("json") and options.get("yaml"):
92 | click.secho("Aborting. Cannot output as both json and yaml", err=True, fg="red")
93 | exit(1)
94 | if options.get("json"):
95 | import json
96 |
97 | click.secho(
98 | json.dumps(get_statistics(), indent=2),
99 | )
100 | return
101 |
102 | if options.get("yaml"):
103 | try:
104 | import yaml
105 | except ImportError:
106 | click.secho("Aborting. yaml not supported", err=True, fg="red")
107 | return
108 |
109 | click.secho(yaml.dump(get_statistics(), default_flow_style=False))
110 | return
111 |
112 | self.interval = options.get("interval")
113 |
114 | if not self.interval or self.interval < 0:
115 | self._print_stats_dashboard(get_statistics(), with_color=not options.get("no_color"))
116 | return
117 |
118 | try:
119 | prev = None
120 | while True:
121 | statistics = get_statistics()
122 | self._print_stats_dashboard(statistics, prev, with_color=not options.get("no_color"))
123 | prev = statistics
124 | time.sleep(self.interval)
125 | except KeyboardInterrupt:
126 | pass
127 |
--------------------------------------------------------------------------------
/scheduler/tests/test_job_decorator.py:
--------------------------------------------------------------------------------
1 | import threading
2 | import time
3 |
4 | from django.test import TestCase
5 |
6 | from scheduler import settings
7 | from scheduler.helpers.queues import get_queue
8 | from . import conf # noqa
9 | from ..decorators import JOB_METHODS_LIST, job
10 | from ..redis_models import JobStatus
11 | from ..redis_models.job import JobModel
12 | from ..worker import create_worker
13 |
14 |
15 | @job()
16 | def test_job():
17 | time.sleep(1)
18 | return 1 + 1
19 |
20 |
21 | @job("django_tasks_scheduler_test")
22 | def test_job_diff_queue():
23 | time.sleep(1)
24 | return 1 + 1
25 |
26 |
27 | @job(timeout=1)
28 | def test_job_timeout():
29 | time.sleep(1)
30 | return 1 + 1
31 |
32 |
33 | @job(result_ttl=1)
34 | def test_job_result_ttl():
35 | return 1 + 1
36 |
37 |
38 | class MyClass:
39 | def run(self):
40 | print("Hello")
41 |
42 | def __eq__(self, other):
43 | if not isinstance(other, MyClass):
44 | return False
45 | return True
46 |
47 |
48 | @job()
49 | def func_with_param(x):
50 | x.run()
51 |
52 |
53 | @job(timeout=1)
54 | def long_running_func():
55 | time.sleep(1000)
56 |
57 |
58 | class JobDecoratorTest(TestCase):
59 | def setUp(self) -> None:
60 | get_queue("default").connection.flushall()
61 |
62 | def test_all_job_methods_registered(self):
63 | self.assertEqual(7, len(JOB_METHODS_LIST))
64 |
65 | def test_job_decorator_no_params(self):
66 | test_job.delay()
67 | self._assert_job_with_func_and_props(
68 | "default",
69 | test_job,
70 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL,
71 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT,
72 | )
73 |
74 | def test_job_decorator_timeout(self):
75 | test_job_timeout.delay()
76 | self._assert_job_with_func_and_props(
77 | "default",
78 | test_job_timeout,
79 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL,
80 | 1,
81 | )
82 |
83 | def test_job_decorator_result_ttl(self):
84 | test_job_result_ttl.delay()
85 | self._assert_job_with_func_and_props(
86 | "default",
87 | test_job_result_ttl,
88 | 1,
89 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT,
90 | )
91 |
92 | def test_job_decorator_different_queue(self):
93 | test_job_diff_queue.delay()
94 | self._assert_job_with_func_and_props(
95 | "django_tasks_scheduler_test",
96 | test_job_diff_queue,
97 | settings.SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL,
98 | settings.SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT,
99 | )
100 |
101 | def _assert_job_with_func_and_props(self, queue_name, expected_func, expected_result_ttl, expected_timeout):
102 | queue = get_queue(queue_name)
103 | jobs = JobModel.get_many(queue.queued_job_registry.all(queue.connection), queue.connection)
104 | self.assertEqual(1, len(jobs))
105 |
106 | j = jobs[0]
107 | self.assertEqual(j.func, expected_func)
108 | self.assertEqual(j.success_ttl, expected_result_ttl)
109 | self.assertEqual(j.timeout, expected_timeout)
110 |
111 | def test_job_decorator_bad_queue(self):
112 | with self.assertRaises(settings.QueueNotFoundError):
113 |
114 | @job("bad-queue")
115 | def test_job_bad_queue():
116 | return 1 + 1
117 |
118 | def test_job_decorator_delay_with_param(self):
119 | queue_name = "default"
120 | func_with_param.delay(MyClass())
121 |
122 | worker = create_worker(queue_name, burst=True)
123 | worker.work()
124 |
125 | jobs_list = worker.queues[0].get_all_jobs()
126 | self.assertEqual(1, len(jobs_list))
127 | job = jobs_list[0]
128 | self.assertEqual(job.func, func_with_param)
129 | self.assertEqual(job.kwargs, {})
130 | self.assertEqual(job.status, JobStatus.FINISHED)
131 | self.assertEqual(job.args, (MyClass(),))
132 |
133 | def test_job_decorator_delay_with_param_worker_thread(self):
134 | queue_name = "default"
135 |
136 | long_running_func.delay()
137 |
138 | worker = create_worker(queue_name, burst=True)
139 | t = threading.Thread(target=worker.work)
140 | t.start()
141 | t.join()
142 |
143 | jobs_list = get_queue(queue_name).get_all_jobs()
144 | self.assertEqual(1, len(jobs_list))
145 | j = jobs_list[0]
146 | self.assertEqual(j.func, long_running_func)
147 | self.assertEqual(j.kwargs, {})
148 | self.assertEqual(j.status, JobStatus.FAILED)
149 |
--------------------------------------------------------------------------------