├── .gitignore ├── .travis.yml ├── CHANGES.md ├── LICENSE ├── README.md ├── example ├── Dockerfile ├── add_task.py ├── docker-compose.yml ├── rem_task.py ├── requirements.txt └── tasks.py ├── install.sh ├── redisbeat ├── __init__.py ├── constants.py └── scheduler.py ├── setup.py ├── t ├── __init__.py └── unit │ ├── __init__.py │ └── test_scheduler.py └── uninstall.sh /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .* 3 | ======= 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *,cover 49 | .hypothesis/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # IPython Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # dotenv 82 | .env 83 | 84 | # virtualenv 85 | venv/ 86 | ENV/ 87 | 88 | # Spyder project settings 89 | .spyderproject 90 | 91 | # Rope project settings 92 | .ropeproject 93 | *.log 94 | *.out 95 | *.pid 96 | *.rdb 97 | *.db 98 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - '2.7' 4 | - '3.6' 5 | install: 6 | - pip install -r example/requirements.txt 7 | script: python setup.py test 8 | deploy: 9 | - provider: pypi 10 | user: yylucifer 11 | password: ${PYPI_PASSWORD} 12 | on: 13 | python: 2.7 14 | tags: true 15 | branch: master -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | v0.0.1 initial release 2 | 3 | v0.0.2 Remove task when manually remove task by delete task in redis. 4 | 5 | v0.0.6 fix: tick error 6 | 7 | v0.0.7 load entries from CELERYBEAT_SCHEDULE thanks @sibson 8 | Prefix as Parameter thanks @asmodehn 9 | 10 | v0.0.8 - v0.1.1 no new feature 11 | 12 | v0.1.2 celerybeat-redis can now run in multiple node, only one active, and other nodes are stand by 13 | v0.1.3 support python3 14 | v0.1.5 Refactor for celery 3.1(thanks asmodehn), add ttl for redis lock 15 | 16 | v1.1.0 Fix remove task error. 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tyrael 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | `redisbeat` is a [Celery Beat Scheduler](http://celery.readthedocs.org/en/latest/userguide/periodic-tasks.html) that stores periodic tasks and their status in a [Redis Datastore](https://redis.io/). 4 | 5 | Tasks can be added, removed or modified without restarting celery using `redisbeat`. 6 | 7 | And you can add scheduler task dynamically when you need to add scheduled task. 8 | 9 | 10 | # Features 11 | 12 | 1. Full-featured celery-beat scheduler. 13 | 2. Dynamically add/remove/modify tasks. 14 | 15 | 16 | # Installation 17 | 18 | `redisbeat` can be easily installed using setuptools or pip. 19 | 20 | # pip install redisbeat 21 | 22 | or you can install from source by cloning this repository: 23 | 24 | # git clone https://github.com/liuliqiang/redisbeat.git 25 | # cd redisbeat 26 | # python setup.py install 27 | 28 | # Docker-compose demo 29 | 30 | `redisbeat` provides a Docker demo in example folder that you can use: 31 | 32 | ``` 33 | # cd redisbeat/example 34 | # docker-compose up -d 35 | ``` 36 | 37 | After you have compose running, you can easily see it working with following commands: 38 | 39 | 1. Celery worker logs 40 | 41 | ``` 42 | # docker-compose logs worker 43 | ``` 44 | 45 | 1. Celery beat logs 46 | 47 | ``` 48 | # docker-compose logs beat 49 | ``` 50 | 51 | 4. dynamic add the task `sub` 52 | 53 | ``` 54 | # docker exec -it beat python add_task.py 55 | ``` 56 | 57 | 5. dynamic remove the task `sub` 58 | 59 | ``` 60 | # docker exec -it beat python rem_task.py 61 | ``` 62 | 63 | # Running demo locally without Docker 64 | 65 | If you want to try locally you can install the requirements from pip, and run it as a python project changing the url of redis from 'redis' to 'localhost' in tasks.py Celery instance and config: 66 | 67 | ```python 68 | #(...) 69 | app = Celery('tasks', backend='redis://redis:6379', 70 | broker='redis://redis:6379') 71 | 72 | app.conf.update( 73 | CELERY_REDIS_SCHEDULER_URL = 'redis://redis:6379', 74 | #(...) 75 | ``` 76 | 77 | Commands to start worker and beat: 78 | 79 | ``` 80 | # celery worker -A tasks -l INFO 81 | # celery beat -A tasks -S redisbeat.RedisScheduler -l INFO 82 | ``` 83 | 84 | # Configuration and Usage 85 | 86 | Configuration for `redisbeat` is similar to the original celery configuration for beat. 87 | You can configure `redisbeat` as: 88 | 89 | 90 | ```python 91 | # encoding: utf-8 92 | 93 | from datetime import timedelta 94 | from celery.schedules import crontab 95 | from celery import Celery 96 | 97 | app = Celery('tasks', backend='redis://localhost:6379', 98 | broker='redis://localhost:6379') 99 | 100 | app.conf.update( 101 | CELERYBEAT_SCHEDULE={ 102 | 'perminute': { 103 | 'task': 'tasks.add', 104 | 'schedule': timedelta(seconds=3), 105 | 'args': (1, 1) 106 | } 107 | } 108 | ) 109 | 110 | @app.task 111 | def add(x, y): 112 | return x + y 113 | 114 | @app.task 115 | def sub(x, y): 116 | return x - y 117 | ``` 118 | 119 | when you want to add a new task dynamically, you can try this code such like in `__main__`: 120 | 121 | ```python 122 | #!/usr/bin/env python 123 | # encoding: utf-8 124 | from datetime import timedelta 125 | from celery import Celery 126 | from redisbeat.scheduler import RedisScheduler 127 | 128 | 129 | app = Celery('tasks', backend='redis://localhost:6379', 130 | broker='redis://localhost:6379') 131 | 132 | app.conf.update( 133 | CELERYBEAT_SCHEDULE={ 134 | 'perminute': { 135 | 'task': 'tasks.add', 136 | 'schedule': timedelta(seconds=3), 137 | 'args': (1, 1) 138 | } 139 | } 140 | ) 141 | 142 | @app.task 143 | def add(x, y): 144 | return x + y 145 | 146 | @app.task 147 | def sub(x, y): 148 | return x - y 149 | 150 | if __name__ == "__main__": 151 | schduler = RedisScheduler(app=app) 152 | schduler.add(**{ 153 | 'name': 'sub-perminute', 154 | 'task': 'tasks.sub', 155 | 'schedule': timedelta(seconds=3), 156 | 'args': (1, 1) 157 | }) 158 | ``` 159 | 160 | It can be easily to add task for two step: 161 | 162 | 1. Init a `RedisScheduler` object from Celery app 163 | 2. Add new tasks by `RedisScheduler` object 164 | 165 | 166 | Or you can define settings in your celery configuration file similar to other configurations. 167 | 168 | ```python 169 | CELERY_BEAT_SCHEDULER = 'redisbeat.RedisScheduler' 170 | CELERY_REDIS_SCHEDULER_URL = 'redis://localhost:6379/1' 171 | CELERY_REDIS_SCHEDULER_KEY = 'celery:beat:order_tasks' 172 | CELERYBEAT_SCHEDULE = { 173 | 'perminute': { 174 | 'task': 'tasks.add', 175 | 'schedule': timedelta(seconds=3), 176 | 'args': (1, 1) 177 | } 178 | } 179 | ``` 180 | 181 | ### Multiple node support 182 | 183 | For running `redisbeat` in multi node deployment, it uses redis lock to prevent same task to be executed mutiple times. 184 | 185 | ```python 186 | CELERY_REDIS_MULTI_NODE_MODE = True 187 | CELERY_REDIS_SCHEDULER_LOCK_TTL = 30 188 | ``` 189 | 190 | This is an experimental feature, to use `redisbeat` in production env, set `CELERY_REDIS_MULTI_NODE_MODE = False`, `redisbeat` will not use this feature. 191 | 192 | 193 | -------------------------------------------------------------------------------- /example/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6-alpine 2 | WORKDIR /usr/src/app 3 | 4 | RUN apk add --no-cache --virtual \ 5 | .build-deps \ 6 | gcc \ 7 | g++ \ 8 | musl-dev \ 9 | python3-dev 10 | 11 | RUN rm -rf /tmp/* /var/cache/apk/* 12 | 13 | COPY ./requirements.txt /usr/src/app 14 | 15 | RUN pip install -r requirements.txt 16 | 17 | COPY . /usr/src/app 18 | -------------------------------------------------------------------------------- /example/add_task.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | from datetime import timedelta 4 | import logging 5 | import sys 6 | 7 | from redisbeat.scheduler import RedisScheduler 8 | 9 | from tasks import app 10 | 11 | root = logging.getLogger() 12 | root.setLevel(logging.DEBUG) 13 | 14 | handler = logging.StreamHandler(sys.stdout) 15 | handler.setLevel(logging.DEBUG) 16 | formatter = logging.Formatter( 17 | '%(asctime)s - %(name)s - %(levelname)s - %(message)s') 18 | handler.setFormatter(formatter) 19 | root.addHandler(handler) 20 | 21 | if __name__ == "__main__": 22 | schduler = RedisScheduler(app=app, skip_init=True) 23 | schduler.add(**{ 24 | 'name': 'sub-perminute', 25 | 'task': 'tasks.sub', 26 | 'schedule': timedelta(seconds=3), 27 | 'args': (1, 1) 28 | }) 29 | -------------------------------------------------------------------------------- /example/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | 5 | worker: 6 | container_name: worker 7 | hostname: worker 8 | restart: unless-stopped 9 | build: 10 | context: . 11 | command: [ "celery", "worker", "-A", "tasks", "-l", "INFO" ] 12 | depends_on: 13 | - redis 14 | stdin_open: true 15 | tty: true 16 | 17 | beat: 18 | container_name: beat 19 | hostname: beat 20 | restart: unless-stopped 21 | build: 22 | context: . 23 | command: 24 | [ 25 | "celery", 26 | "beat", 27 | "-A", 28 | "tasks", 29 | "-S", 30 | "redisbeat.RedisScheduler", 31 | "-l", 32 | "INFO" 33 | ] 34 | depends_on: 35 | - worker 36 | - redis 37 | stdin_open: true 38 | tty: true 39 | 40 | redis: 41 | container_name: redis 42 | hostname: redis 43 | restart: unless-stopped 44 | image: redis 45 | -------------------------------------------------------------------------------- /example/rem_task.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | from redisbeat.scheduler import RedisScheduler 4 | 5 | from tasks import app 6 | 7 | 8 | if __name__ == "__main__": 9 | schduler = RedisScheduler(app=app, skip_init=True) 10 | result = schduler.remove('sub-perminute') 11 | print("rem result: ", result) 12 | -------------------------------------------------------------------------------- /example/requirements.txt: -------------------------------------------------------------------------------- 1 | amqp==2.5.1 2 | billiard==3.6.1.0 3 | celery==4.3.0 4 | kombu==4.6.3 5 | pytz==2019.2 6 | redis==3.3.8 7 | jsonpickle==1.4.2 8 | redisbeat==1.2.6 9 | vine==1.3.0 10 | wheel==0.24.0 11 | -------------------------------------------------------------------------------- /example/tasks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | from datetime import timedelta 4 | import os 5 | 6 | from celery import Celery 7 | 8 | 9 | redis_url = 'redis://redis:6379' 10 | hostname = os.getenv("HOSTNAME") 11 | if hostname != "beat" and hostname != "worker": 12 | redis_url = 'redis://localhost:6379' 13 | 14 | 15 | app = Celery('tasks', backend=redis_url, broker=redis_url) 16 | 17 | app.conf.update(CELERY_REDIS_SCHEDULER_URL = redis_url) 18 | 19 | if hostname == "beat": 20 | app.conf.update( 21 | CELERYBEAT_SCHEDULE={ 22 | 'perminute': { 23 | 'task': 'tasks.add', 24 | 'schedule': timedelta(seconds=3), 25 | 'args': (1, 1) 26 | } 27 | } 28 | ) 29 | 30 | 31 | @app.task 32 | def add(x, y): 33 | return x + y 34 | 35 | 36 | @app.task 37 | def sub(x, y): 38 | return x - y 39 | 40 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | python setup.py install --record files.txt 4 | -------------------------------------------------------------------------------- /redisbeat/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from .scheduler import RedisScheduler 4 | 5 | __all__ = [ 6 | 'RedisScheduler', 7 | ] -------------------------------------------------------------------------------- /redisbeat/constants.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright 2017-2020 Liqiang Lau 4 | """ 5 | @desc: ` 6 | @author: liqiang liu 7 | @contact: liqiangliu443@gmail.com 8 | @site: https://liqiang.io 9 | @created at: 2020/3/7 10 | """ 11 | 12 | INIT_POLICY_RESET = "RESET" 13 | INIT_POLICY_DEFAULT = "DEFAULT" 14 | INIT_POLICY_IMMEDIATELY = "IMMEDIATELY" 15 | INIT_POLICY_FAST_FORWARD = "FAST_FORWARD" 16 | 17 | INIT_POLICIES = [ 18 | INIT_POLICY_RESET, 19 | INIT_POLICY_DEFAULT, 20 | INIT_POLICY_IMMEDIATELY, 21 | INIT_POLICY_FAST_FORWARD, 22 | ] -------------------------------------------------------------------------------- /redisbeat/scheduler.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2014 Kong Luoxing 3 | 4 | # Licensed under the Apache License, Version 2.0 (the 'License'); you may not 5 | # use this file except in compliance with the License. You may obtain a copy 6 | # of the License at http://www.apache.org/licenses/LICENSE-2.0 7 | ################################################################ 8 | # Copyright 2015-2024 Liqiang Liu 9 | 10 | # Licensed under the Apache License, Version 2.0 (the 'License'); you may not 11 | # use this file except in compliance with the License. You may obtain a copy 12 | # of the License at http://www.apache.org/licenses/LICENSE-2.0 13 | 14 | from functools import partial 15 | import jsonpickle 16 | import sys 17 | from time import mktime 18 | import traceback 19 | 20 | from celery.beat import Scheduler 21 | from celery import current_app 22 | from celery.utils.log import get_logger 23 | from redis import StrictRedis 24 | from redis.sentinel import Sentinel 25 | from redis.exceptions import LockError 26 | try: 27 | import urllib.parse as urlparse 28 | except ImportError: 29 | import urlparse 30 | 31 | logger = get_logger(__name__) 32 | debug, linfo, error, warning = (logger.debug, logger.info, logger.error, 33 | logger.warning) 34 | try: 35 | MAXINT = sys.maxint 36 | except AttributeError: 37 | # python3 38 | MAXINT = sys.maxsize 39 | 40 | 41 | class RedisScheduler(Scheduler): 42 | def __init__(self, *args, **kwargs): 43 | app = kwargs['app'] 44 | self.skip_init = kwargs.get('skip_init', False) 45 | self.key = app.conf.get("CELERY_REDIS_SCHEDULER_KEY", 46 | "celery:beat:order_tasks") 47 | self.max_interval = 2 # default max interval is 2 seconds 48 | self.schedule_url = app.conf.get("CELERY_REDIS_SCHEDULER_URL", 49 | "redis://localhost:6379") 50 | # using sentinels 51 | # supports 'sentinel://:pass@host:port/db 52 | if self.schedule_url.startswith('sentinel://'): 53 | self.broker_transport_options = app.conf.get( 54 | "CELERY_BROKER_TRANSPORT_OPTIONS", {"master_name": "mymaster"}) 55 | self.rdb = self.sentinel_connect( 56 | self.broker_transport_options['master_name']) 57 | else: 58 | self.rdb = StrictRedis.from_url(self.schedule_url) 59 | Scheduler.__init__(self, *args, **kwargs) 60 | app.add_task = partial(self.add, self) 61 | 62 | self.multi_node = app.conf.get("CELERY_REDIS_MULTI_NODE_MODE", False) 63 | # how long we should hold on to the redis lock in seconds 64 | if self.multi_node: 65 | self.lock_ttl = current_app.conf.get( 66 | "CELERY_REDIS_SCHEDULER_LOCK_TTL", 30) 67 | self._lock_acquired = False 68 | self._lock = self.rdb.lock( 69 | 'celery:beat:task_lock', timeout=self.lock_ttl) 70 | self._lock_acquired = self._lock.acquire(blocking=False) 71 | 72 | def _remove_db(self): 73 | linfo("remove db now") 74 | self.rdb.delete(self.key) 75 | 76 | def _when(self, entry, next_time_to_run): 77 | return mktime(entry.schedule.now().timetuple()) + (self.adjust(next_time_to_run) or 0) 78 | 79 | def setup_schedule(self): 80 | debug("setup schedule, skip_init: %s", self.skip_init) 81 | if self.skip_init: 82 | return 83 | # init entries 84 | self.merge_inplace(self.app.conf.CELERYBEAT_SCHEDULE) 85 | tasks = [jsonpickle.decode(entry) 86 | for entry in self.rdb.zrange(self.key, 0, -1)] 87 | linfo('Current schedule:\n' + '\n'.join( 88 | str('task: ' + entry.task + '; each: ' + repr(entry.schedule)) 89 | for entry in tasks)) 90 | 91 | def merge_inplace(self, tasks): 92 | old_entries = self.rdb.zrangebyscore( 93 | self.key, 0, MAXINT, withscores=True) 94 | old_entries_dict = dict({}) 95 | for task, score in old_entries: 96 | if not task: 97 | break 98 | debug("ready to load old_entries: %s", str(task)) 99 | entry = jsonpickle.decode(task) 100 | old_entries_dict[entry.name] = (entry, score) 101 | debug("old_entries: %s", old_entries_dict) 102 | 103 | self.rdb.delete(self.key) 104 | 105 | for key in tasks: 106 | last_run_at = 0 107 | e = self.Entry(**dict(tasks[key], name=key, app=self.app)) 108 | if key in old_entries_dict: 109 | # replace entry and remain old score 110 | last_run_at = old_entries_dict[key][1] 111 | del old_entries_dict[key] 112 | self.rdb.zadd(self.key, {jsonpickle.encode(e): min( 113 | last_run_at, self._when(e, e.is_due()[1]) or 0)}) 114 | debug("old_entries: %s", old_entries_dict) 115 | for key, tasks in old_entries_dict.items(): 116 | debug("key: %s", key) 117 | debug("tasks: %s", tasks) 118 | debug("zadd: %s", self.rdb.zadd( 119 | self.key, {jsonpickle.encode(tasks[0]): tasks[1]})) 120 | debug(self.rdb.zrange(self.key, 0, -1)) 121 | 122 | def is_due(self, entry): 123 | return entry.is_due() 124 | 125 | def adjust(self, n, drift=-0.010): 126 | if n and n > 0: 127 | return n + drift 128 | return n 129 | 130 | def add(self, **kwargs): 131 | e = self.Entry(app=current_app, **kwargs) 132 | self.rdb.zadd(self.key, {jsonpickle.encode( 133 | e): self._when(e, e.is_due()[1]) or 0}) 134 | return True 135 | 136 | def remove(self, task_key): 137 | tasks = self.rdb.zrange(self.key, 0, -1) or [] 138 | for idx, task in enumerate(tasks): 139 | entry = jsonpickle.decode(task) 140 | if entry.name == task_key: 141 | self.rdb.zremrangebyrank(self.key, idx, idx) 142 | return True 143 | else: 144 | return False 145 | 146 | def list(self): 147 | return [jsonpickle.decode(entry) for entry in self.rdb.zrange(self.key, 0, -1)] 148 | 149 | def get(self, task_key): 150 | tasks = self.rdb.zrange(self.key, 0, -1) or [] 151 | for idx, task in enumerate(tasks): 152 | entry = jsonpickle.decode(task) 153 | if entry.name == task_key: 154 | return entry 155 | else: 156 | return None 157 | 158 | def tick(self): 159 | tasks = self.rdb.zrangebyscore( 160 | self.key, 0, 161 | self.adjust(mktime(self.app.now().timetuple()), drift=0.010), 162 | withscores=True) or [] 163 | 164 | next_times = [self.max_interval, ] 165 | 166 | for task, score in tasks: 167 | entry = jsonpickle.decode(task) 168 | is_due, next_time_to_run = self.is_due(entry) 169 | 170 | next_times.append(next_time_to_run) 171 | if is_due: 172 | next_entry = self.reserve(entry) 173 | try: 174 | linfo("add task entry: %s to publisher", entry.name) 175 | result = self.apply_async(entry) 176 | except Exception as exc: 177 | error('Message Error: %s\n%s', 178 | exc, traceback.format_stack(), exc_info=True) 179 | else: 180 | debug('%s sent. id->%s', entry.task, result.id) 181 | self.rdb.zrem(self.key, task) 182 | self.rdb.zadd(self.key, {jsonpickle.encode( 183 | next_entry): self._when(next_entry, next_time_to_run) or 0}) 184 | 185 | next_task = self.rdb.zrangebyscore( 186 | self.key, 0, MAXINT, withscores=True, num=1, start=0) 187 | if not next_task: 188 | linfo("no next task found") 189 | return min(next_times) 190 | entry = jsonpickle.decode(next_task[0][0]) 191 | next_times.append(self.is_due(entry)[1]) 192 | 193 | return min(next_times) 194 | 195 | def close(self): 196 | # it would be call after cycle end 197 | if self.multi_node: 198 | try: 199 | self._lock.release() 200 | except LockError: 201 | pass 202 | self.sync() 203 | 204 | def sentinel_connect(self, master_name): 205 | url = urlparse.urlparse(self.schedule_url) 206 | 207 | def parse_host(s): 208 | if ':' in s: 209 | host, port = s.split(':', 1) 210 | port = int(port) 211 | else: 212 | host = s 213 | port = 26379 214 | 215 | return host, port 216 | 217 | if '@' in url.netloc: 218 | auth, hostspec = url.netloc.split('@', 1) 219 | else: 220 | auth = None 221 | hostspec = url.netloc 222 | 223 | if auth and ':' in auth: 224 | _, password = auth.split(':', 1) 225 | else: 226 | password = None 227 | path = url.path 228 | if path.startswith('/'): 229 | path = path[1:] 230 | hosts = [parse_host(s) for s in hostspec.split(',')] 231 | sentinel = Sentinel(hosts, password=password, db=path) 232 | master = sentinel.master_for(master_name) 233 | return master 234 | 235 | @property 236 | def info(self): 237 | # return infomation about Schedule 238 | return ' . db -> {self.schedule_url}, key -> {self.key}'.format(self=self) -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | try: 4 | import setuptools 5 | import setuptools.command.test 6 | from setuptools import find_packages, setup 7 | except ImportError: 8 | from distutils.core import setup, find_packages 9 | # To use a consistent encoding 10 | 11 | # Get the long description from the README file 12 | long_desc = """ 13 | Redis Scheduler For Celery, Support Add Task Dynamic 14 | See more @ https//liqiang.io/opensources/redisbeat 15 | """ 16 | 17 | 18 | class pytest(setuptools.command.test.test): 19 | user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')] 20 | 21 | def initialize_options(self): 22 | setuptools.command.test.test.initialize_options(self) 23 | self.pytest_args = [] 24 | 25 | def run_tests(self): 26 | import pytest as _pytest 27 | sys.exit(_pytest.main(self.pytest_args)) 28 | 29 | 30 | 31 | 32 | setup( 33 | name="redisbeat", 34 | 35 | # Versions should comply with PEP440. For a discussion on single-sourcing 36 | # the version across setup.py and the project code, see 37 | # https://packaging.python.org/en/latest/single_source_version.html 38 | version="1.2.7", 39 | 40 | description="Redis Scheduler For Celery, Support Add Task Dynamic", 41 | long_description="", 42 | 43 | # Author details 44 | author="Liqiang Liu", 45 | author_email="liqianglau@outlook.com", 46 | # The project's main homepage. 47 | url="https://liqiang.io/opensources/redisbeat", 48 | 49 | # Choose your license 50 | license='MIT', 51 | 52 | cmdclass={'test': pytest}, 53 | 54 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 55 | classifiers=[ 56 | # How mature is this project? Common values are 57 | # 3 - Alpha 58 | # 4 - Beta 59 | # 5 - Production/Stable 60 | 'Development Status :: 3 - Alpha', 61 | 62 | # Indicate who your project is intended for 63 | 'Intended Audience :: Developers', 64 | 'Topic :: Software Development :: Libraries', 65 | 66 | # Pick your license as you wish (should match "license" above) 67 | 'License :: OSI Approved :: MIT License', 68 | 69 | # Specify the Python versions you support here. In particular, ensure 70 | # that you indicate whether you support Python 2, Python 3 or both. 71 | 'Programming Language :: Python :: 2', 72 | 'Programming Language :: Python :: 2.7', 73 | 'Programming Language :: Python :: 3', 74 | 'Programming Language :: Python :: 3.5', 75 | 'Programming Language :: Python :: 3.6', 76 | 'Programming Language :: Python :: 3.9', 77 | ], 78 | 79 | # What does your project relate to? 80 | keywords="celery scheduler redis beat", 81 | 82 | # You can just specify the packages manually here if your project is 83 | # simple. Or you can use find_packages(). 84 | packages=find_packages(exclude=['contrib', 'docs', 'tests']), 85 | 86 | # Alternatively, if you want to distribute just a my_module.py, uncomment 87 | # this: 88 | # py_modules=["my_module"], 89 | 90 | # List run-time dependencies here. These will be installed by pip when 91 | # your project is installed. For an analysis of "install_requires" vs pip's 92 | # requirements files see: 93 | # https://packaging.python.org/en/latest/requirements.html 94 | install_requires=[ 95 | 'jsonpickle==3.0.0', 96 | ] 97 | ) 98 | -------------------------------------------------------------------------------- /t/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liuliqiang/redisbeat/713184034706de9d0915736e490e59e880a00f26/t/__init__.py -------------------------------------------------------------------------------- /t/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liuliqiang/redisbeat/713184034706de9d0915736e490e59e880a00f26/t/unit/__init__.py -------------------------------------------------------------------------------- /t/unit/test_scheduler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | from datetime import timedelta 4 | import os 5 | import unittest 6 | 7 | from celery import Celery 8 | from redis import StrictRedis 9 | 10 | from redisbeat import RedisScheduler 11 | 12 | 13 | redis_key = "celery:beat:order_tasks" 14 | min_redis_score = 0 15 | max_redis_score = 10000000000000000 16 | 17 | class TestStringMethods(unittest.TestCase): 18 | def setUp(self) -> None: 19 | super().setUp() 20 | self.redis_url = 'redis://localhost:6379' 21 | self.redis_cli = StrictRedis.from_url(self.redis_url) 22 | self.redis_cli.zpopmin(redis_key, count=1000) 23 | 24 | def test_redisbeat(self): 25 | app = Celery('tasks', backend=self.redis_url, broker=self.redis_url) 26 | 27 | app.conf.update(CELERY_REDIS_SCHEDULER_URL = self.redis_url) 28 | app.conf.update( 29 | CELERYBEAT_SCHEDULE={ 30 | 'perminute': { 31 | 'task': 'tasks.add', 32 | 'schedule': timedelta(seconds=3), 33 | 'args': (1, 1) 34 | } 35 | } 36 | ) 37 | 38 | RedisScheduler(app=app) 39 | results = self.redis_cli.zrangebyscore(redis_key, min_redis_score, max_redis_score, withscores=True) 40 | for result in results: 41 | print(result) 42 | self.assertEqual(len(results), 1) 43 | -------------------------------------------------------------------------------- /uninstall.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cat files.txt | xargs rm -rf 4 | --------------------------------------------------------------------------------