├── tests └── __init__.py ├── changelog.md ├── screenshot └── sqlite.png ├── celery_sqlalchemy_scheduler ├── __init__.py ├── session.py ├── tzcrontab.py ├── models.py └── schedulers.py ├── Pipfile ├── LICENSE ├── .gitignore ├── setup.py ├── examples └── base │ └── tasks.py ├── README-zh.md ├── README.md └── Pipfile.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # Change history 2 | 3 | ## v0.3.0 4 | 5 | - Support Celery 5.0.1 6 | -------------------------------------------------------------------------------- /screenshot/sqlite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AngelLiang/celery-sqlalchemy-scheduler/HEAD/screenshot/sqlite.png -------------------------------------------------------------------------------- /celery_sqlalchemy_scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # flake8:noqa 3 | 4 | from .session import SessionManager 5 | from .models import ( 6 | PeriodicTask, PeriodicTaskChanged, 7 | CrontabSchedule, IntervalSchedule, 8 | SolarSchedule, 9 | ) 10 | from .schedulers import DatabaseScheduler 11 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | autopep8 = "*" 8 | flake8 = "*" 9 | twine = "*" 10 | 11 | [packages] 12 | celery = ">=4.2" 13 | python-dotenv = "*" 14 | sqlalchemy = ">=1.3.0" 15 | mysql-connector = "*" 16 | celery-sqlalchemy-scheduler = {editable = true,path = "."} 17 | 18 | [requires] 19 | python_version = "3" 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 AngelLiang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .dmypy.json 111 | dmypy.json 112 | 113 | # my project 114 | .vscode 115 | *.db 116 | *.pid 117 | *.bak 118 | *.dat 119 | *.dir 120 | .pypirc 121 | -------------------------------------------------------------------------------- /celery_sqlalchemy_scheduler/session.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """SQLAlchemy session.""" 3 | 4 | from contextlib import contextmanager 5 | 6 | from sqlalchemy import create_engine 7 | from sqlalchemy.ext.declarative import declarative_base 8 | from sqlalchemy.orm import sessionmaker 9 | from sqlalchemy.pool import NullPool 10 | 11 | from kombu.utils.compat import register_after_fork 12 | 13 | ModelBase = declarative_base() 14 | 15 | 16 | @contextmanager 17 | def session_cleanup(session): 18 | try: 19 | yield 20 | except Exception: 21 | session.rollback() 22 | raise 23 | finally: 24 | session.close() 25 | 26 | 27 | def _after_fork_cleanup_session(session): 28 | session._after_fork() 29 | 30 | 31 | class SessionManager(object): 32 | """Manage SQLAlchemy sessions.""" 33 | 34 | def __init__(self): 35 | self._engines = {} 36 | self._sessions = {} 37 | self.forked = False 38 | self.prepared = False 39 | if register_after_fork is not None: 40 | register_after_fork(self, _after_fork_cleanup_session) 41 | 42 | def _after_fork(self): 43 | self.forked = True 44 | 45 | def get_engine(self, dburi, **kwargs): 46 | if self.forked: 47 | try: 48 | return self._engines[dburi] 49 | except KeyError: 50 | engine = self._engines[dburi] = create_engine(dburi, **kwargs) 51 | return engine 52 | else: 53 | return create_engine(dburi, poolclass=NullPool) 54 | 55 | def create_session(self, dburi, short_lived_sessions=False, **kwargs): 56 | engine = self.get_engine(dburi, **kwargs) 57 | if self.forked: 58 | if short_lived_sessions or dburi not in self._sessions: 59 | self._sessions[dburi] = sessionmaker(bind=engine) 60 | return engine, self._sessions[dburi] 61 | else: 62 | return engine, sessionmaker(bind=engine) 63 | 64 | def prepare_models(self, engine): 65 | if not self.prepared: 66 | ModelBase.metadata.create_all(engine) 67 | self.prepared = True 68 | 69 | def session_factory(self, dburi, **kwargs): 70 | engine, session = self.create_session(dburi, **kwargs) 71 | self.prepare_models(engine) 72 | return session() 73 | -------------------------------------------------------------------------------- /celery_sqlalchemy_scheduler/tzcrontab.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Timezone aware Cron schedule Implementation.""" 3 | 4 | import pytz 5 | from collections import namedtuple 6 | import datetime as dt 7 | 8 | from celery import schedules 9 | 10 | 11 | schedstate = namedtuple('schedstate', ('is_due', 'next')) 12 | 13 | 14 | class TzAwareCrontab(schedules.crontab): 15 | """Timezone Aware Crontab.""" 16 | 17 | def __init__( 18 | self, minute='*', hour='*', day_of_week='*', 19 | day_of_month='*', month_of_year='*', tz=pytz.utc, app=None 20 | ): 21 | """Overwrite Crontab constructor to include a timezone argument.""" 22 | self.tz = tz 23 | 24 | nowfun = self.nowfunc 25 | 26 | super(TzAwareCrontab, self).__init__( 27 | minute=minute, hour=hour, day_of_week=day_of_week, 28 | day_of_month=day_of_month, month_of_year=month_of_year, 29 | # tz=tz, 30 | nowfun=nowfun, app=app 31 | ) 32 | 33 | def nowfunc(self): 34 | return self.tz.normalize( 35 | pytz.utc.localize(dt.datetime.utcnow()) 36 | ) 37 | 38 | def is_due(self, last_run_at): 39 | """Calculate when the next run will take place. 40 | 41 | Return tuple of (is_due, next_time_to_check). 42 | The last_run_at argument needs to be timezone aware. 43 | 44 | """ 45 | # convert last_run_at to the schedule timezone 46 | last_run_at = last_run_at.astimezone(self.tz) 47 | 48 | rem_delta = self.remaining_estimate(last_run_at) 49 | rem = max(rem_delta.total_seconds(), 0) 50 | due = rem == 0 51 | if due: 52 | rem_delta = self.remaining_estimate(self.now()) 53 | rem = max(rem_delta.total_seconds(), 0) 54 | return schedstate(due, rem) 55 | 56 | # Needed to support pickling 57 | def __repr__(self): 58 | return """""".format(self) 61 | 62 | def __reduce__(self): 63 | return (self.__class__, (self._orig_minute, 64 | self._orig_hour, 65 | self._orig_day_of_week, 66 | self._orig_day_of_month, 67 | self._orig_month_of_year, 68 | self.tz), None) 69 | 70 | def __eq__(self, other): 71 | if isinstance(other, schedules.crontab): 72 | return (other.month_of_year == self.month_of_year 73 | and other.day_of_month == self.day_of_month 74 | and other.day_of_week == self.day_of_week 75 | and other.hour == self.hour 76 | and other.minute == self.minute 77 | and other.tz == self.tz) 78 | return NotImplemented 79 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | celery-sqlalchemy-scheduler 3 | ~~~~~~~~~~~~~~ 4 | A Scheduler Based SQLalchemy For Celery. 5 | :Copyright (c) 2018 AngelLiang 6 | :license: MIT, see LICENSE for more details. 7 | """ 8 | from os import path 9 | from codecs import open 10 | try: 11 | from setuptools import find_packages, setup 12 | except ImportError: 13 | from distutils.core import setup, find_packages 14 | # To use a consistent encoding 15 | 16 | basedir = path.abspath(path.dirname(__file__)) 17 | # Get the long description from the README file 18 | with open(path.join(basedir, 'README.md'), encoding='utf-8') as f: 19 | long_description = f.read() 20 | 21 | setup( 22 | name="celery_sqlalchemy_scheduler", 23 | 24 | # Versions should comply with PEP440. For a discussion on single-sourcing 25 | # the version across setup.py and the project code, see 26 | # https://packaging.python.org/en/latest/single_source_version.html 27 | version="0.3.0", 28 | # The project's main homepage. 29 | url="https://github.com/AngelLiang/celery-sqlalchemy-scheduler", 30 | # Choose your license 31 | 32 | license='MIT', 33 | 34 | description="A Scheduler Based SQLalchemy For Celery", 35 | long_description=long_description, 36 | long_description_content_type='text/markdown', # 长描述内容类型 37 | 38 | platforms='any', 39 | # Author details 40 | author="AngelLiang", 41 | author_email='yannanxiu@126.com', 42 | home_page='https://github.com/AngelLiang/celery-sqlalchemy-scheduler', 43 | 44 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 45 | classifiers=[ 46 | # How mature is this project? Common values are 47 | # 3 - Alpha 48 | # 4 - Beta 49 | # 5 - Production/Stable 50 | 'Development Status :: 3 - Alpha', 51 | 52 | # Indicate who your project is intended for 53 | 'Intended Audience :: Developers', 54 | 'Topic :: Software Development :: Libraries', 55 | 56 | # Pick your license as you wish (should match "license" above) 57 | 'License :: OSI Approved :: MIT License', 58 | 59 | # Specify the Python versions you support here. In particular, ensure 60 | # that you indicate whether you support Python 2, Python 3 or both. 61 | 'Programming Language :: Python :: 3', 62 | 'Programming Language :: Python :: 3.6', 63 | ], 64 | 65 | # What does your project relate to? 66 | keywords="celery scheduler sqlalchemy beat", 67 | 68 | # You can just specify the packages manually here if your project is 69 | # simple. Or you can use find_packages(). 70 | packages=find_packages(exclude=['contrib', 'docs', 'tests']), 71 | 72 | # Alternatively, if you want to distribute just a my_module.py, uncomment 73 | # this: 74 | # py_modules=["my_module"], 75 | 76 | # List run-time dependencies here. These will be installed by pip when 77 | # your project is installed. For an analysis of "install_requires" vs pip's 78 | # requirements files see: 79 | # https://packaging.python.org/en/latest/requirements.html 80 | 81 | install_requires=[ 82 | 'celery>=4.2', 83 | 'sqlalchemy', 84 | ], 85 | zip_safe=False, 86 | ) 87 | -------------------------------------------------------------------------------- /examples/base/tasks.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """ 3 | Ready:: 4 | 5 | $ pipenv install 6 | 7 | Run Worker:: 8 | 9 | # console 1 , in pipenv shell 10 | $ pipenv shell 11 | $ cd examples/base 12 | 13 | # Celery < 5.0 14 | $ celery worker -A tasks:celery -l info 15 | 16 | # Celery >= 5.0 17 | $ celery -A tasks:celery worker -l info 18 | 19 | Run Beat:: 20 | 21 | # console 2, in pipenv shell 22 | $ pipenv shell 23 | $ cd examples/base 24 | 25 | # Celery < 5.0 26 | $ celery beat -A tasks:celery -S tasks:DatabaseScheduler -l info 27 | 28 | # Celery >= 5.0 29 | $ celery -A tasks:celery beat -S tasks:DatabaseScheduler -l info 30 | 31 | Console 3:: 32 | 33 | # console 3, in pipenv shell 34 | $ pipenv shell 35 | $ cd examples/base 36 | $ python -m doctest tasks.py 37 | 38 | 39 | >>> import json 40 | >>> from celery_sqlalchemy_scheduler.models import PeriodicTask, IntervalSchedule 41 | >>> from celery_sqlalchemy_scheduler.session import SessionManager 42 | 43 | >>> beat_dburi = 'sqlite:///schedule.db' 44 | >>> session_manager = SessionManager() 45 | >>> engine, Session = session_manager.create_session(beat_dburi) 46 | >>> session = Session() 47 | 48 | # Disable 'echo-every-3-seconds' task 49 | >>> task = session.query(PeriodicTask).filter_by(name='echo-every-3-seconds').first() 50 | >>> task.enabled = False 51 | >>> session.add(task) 52 | >>> session.commit() 53 | 54 | 55 | >>> schedule = session.query(IntervalSchedule).filter_by(every=10, period=IntervalSchedule.SECONDS).first() 56 | >>> if not schedule: 57 | ... schedule = IntervalSchedule(every=10, period=IntervalSchedule.SECONDS) 58 | ... session.add(schedule) 59 | ... session.commit() 60 | 61 | # Add 'add-every-10s' task 62 | >>> task = PeriodicTask( 63 | ... interval=schedule, 64 | ... name='add-every-10s', 65 | ... task='tasks.add', # name of task. 66 | ... args=json.dumps([1, 5]) 67 | ... ) 68 | >>> session.add(task) 69 | >>> session.commit() 70 | >>> print('Add ' + task.name) 71 | Add add-every-10s 72 | 73 | >>> task.args=json.dumps([10, 2]) 74 | >>> session.add(task) 75 | >>> session.commit() 76 | """ 77 | import os 78 | import time 79 | import platform 80 | import datetime as dt 81 | from datetime import timedelta 82 | 83 | from celery import Celery 84 | from celery import schedules 85 | 86 | from celery_sqlalchemy_scheduler.schedulers import DatabaseScheduler # noqa 87 | 88 | # load environment variable from .env 89 | from dotenv import load_dotenv 90 | dotenv_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), '.env') 91 | if os.path.exists(dotenv_path): 92 | load_dotenv(dotenv_path, override=True) 93 | 94 | # for and convenient to test and modify 95 | # 可以在 examples/base 目录下创建 .env 文件,修改对应的变量 96 | ECHO_EVERY_MINUTE = os.getenv('ECHO_EVERY_MINUTE', '0') 97 | ECHO_EVERY_HOUR = os.getenv('ECHO_EVERY_HOUR', '8') 98 | 99 | if platform.system() == 'Windows': 100 | # must set the environment variable in windows for celery, 101 | # or else celery maybe don't work 102 | os.environ['FORKED_BY_MULTIPROCESSING'] = '1' 103 | 104 | # rabbitmq 105 | backend = 'rpc://' 106 | broker_url = 'amqp://guest:guest@127.0.0.1:5672//' 107 | 108 | 109 | # this scheduler will be reset after the celery beat restart 110 | beat_schedule = { 111 | 'echo-every-3-seconds': { 112 | 'task': 'tasks.echo', 113 | 'schedule': timedelta(seconds=3), 114 | 'args': ('hello', ), 115 | 'options': { 116 | 'expires': dt.datetime.utcnow() + timedelta(seconds=10) # right 117 | # 'expires': dt.datetime.now() + timedelta(seconds=30) # error 118 | # 'expires': 10 # right 119 | } 120 | }, 121 | 'add-every-minutes': { 122 | 'task': 'tasks.add', 123 | 'schedule': schedules.crontab('*', '*', '*'), 124 | 'args': (1, 2) 125 | }, 126 | 'echo-every-hours': { 127 | 'task': 'tasks.echo', 128 | 'schedule': schedules.crontab(ECHO_EVERY_MINUTE, '*', '*'), 129 | 'args': ('echo-every-hours',) 130 | }, 131 | 'echo-every-days': { 132 | 'task': 'tasks.echo', 133 | 'schedule': schedules.crontab(ECHO_EVERY_MINUTE, ECHO_EVERY_HOUR, '*'), 134 | 'args': ('echo-every-days',) 135 | }, 136 | } 137 | 138 | beat_scheduler = 'celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler' 139 | 140 | beat_sync_every = 0 141 | 142 | # The maximum number of seconds beat can sleep between checking the schedule. 143 | # default: 0 144 | beat_max_loop_interval = 10 145 | 146 | # configure celery_sqlalchemy_scheduler database uri 147 | beat_dburi = 'sqlite:///schedule.db' 148 | # beat_dburi = 'mysql+mysqlconnector://root:root@127.0.0.1/celery-schedule' 149 | 150 | timezone = 'Asia/Shanghai' 151 | 152 | # prevent memory leaks 153 | # 默认每个worker跑完10个任务后,自我销毁程序重建来释放内存 154 | worker_max_tasks_per_child = 10 155 | 156 | celery = Celery('tasks', 157 | backend=backend, 158 | broker=broker_url) 159 | 160 | config = { 161 | 'beat_schedule': beat_schedule, 162 | # 'beat_scheduler': beat_scheduler, # 命令行传参配置了,所以这里并不需要写死在代码里 163 | 'beat_max_loop_interval': beat_max_loop_interval, 164 | 'beat_dburi': beat_dburi, 165 | 166 | 'timezone': timezone, 167 | 'worker_max_tasks_per_child': worker_max_tasks_per_child 168 | } 169 | 170 | celery.conf.update(config) 171 | 172 | 173 | @celery.task 174 | def add(x, y): 175 | return x + y 176 | 177 | 178 | @celery.task 179 | def echo(data): 180 | print(data) 181 | 182 | 183 | if __name__ == "__main__": 184 | celery.start() 185 | # import doctest 186 | # doctest.testmod() 187 | -------------------------------------------------------------------------------- /README-zh.md: -------------------------------------------------------------------------------- 1 | # celery-sqlalchemy-scheduler 2 | 3 | 一个基于 sqlalchemy 的 scheduler,作为 celery 定时任务的辅助工具。 4 | 5 | ## 快速开始 6 | 7 | [中文文档](/README-zh.md) [English](/README.md) 8 | 9 | ### 依赖 10 | 11 | - Python 3 12 | - celery >= 4.2 13 | - sqlalchemy 14 | 15 | 首先必须安装 `celery` 和 `sqlalchemy`, 并且`celery`应该大于等于 4.2 版本。 16 | 17 | ``` 18 | $ pip install sqlalchemy celery 19 | ``` 20 | 21 | ### 安装 22 | 23 | 通过 PyPi 安装: 24 | 25 | ``` 26 | $ pip install celery-sqlalchemy-scheduler 27 | ``` 28 | 29 | 通过 github 仓库进行安装: 30 | 31 | ``` 32 | $ git clone git@github.com:AngelLiang/celery-sqlalchemy-scheduler.git 33 | $ cd celery-sqlalchemy-scheduler 34 | $ python setup.py install 35 | ``` 36 | 37 | ## 使用示例 38 | 39 | 安装`celery_sqlalchemy_scheduler`之后,你可以查看`examples`目录下的代码: 40 | 41 | 1. 启动 celery worker: 42 | 43 | ``` 44 | $ celery worker -A tasks -l info 45 | ``` 46 | 47 | 2. 使用`DatabaseScheduler`作为 scheduler 启动 celery beat: 48 | 49 | ``` 50 | $ celery beat -A tasks -S celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler -l info 51 | ``` 52 | 53 | ## 使用说明 54 | 55 | beat 启动之后,默认会在当前目录下生成名称为`schedule.db`的 sqlite 数据库。Windows 下可以使用 SQLiteStudio.exe 工具打开查看里面的数据。 56 | 57 | ![sqlite](screenshot/sqlite.png) 58 | 59 | ### 数据库同步 scheduler 到 beat 60 | 61 | 当需要更新 scheduler,只需要修改`schedule.db`相关数据即可。修改好数据库的 scheduler 后,`celery_sqlalchemy_scheduler`并不会马上同步数据库的数据到 beat,我们最后还需要修改`celery_periodic_task_changed`表的第一条数据,只需要把`last_update`字段更新到最新的时间即可。当 beat 在下一个“心跳”之后,就会同步数据库的数据到 beat。 62 | 63 | ## 配置数据库 64 | 65 | 在配置 Celery 的时候,可以设置 sqlalchemy 数据库的路径,示例如下: 66 | 67 | ```Python 68 | from celery import Celery 69 | 70 | celery = Celery('tasks') 71 | 72 | beat_dburi = 'sqlite:///schedule.db' 73 | 74 | celery.conf.update( 75 | {'beat_dburi': beat_dburi} 76 | ) 77 | ``` 78 | 79 | 当然,你可以改为使用 MySQL 或 PostgreSQL: 80 | 81 | ```Python 82 | # MySQL: `pip install mysql-connector` 83 | beat_dburi = 'mysql+mysqlconnector://root:root@127.0.0.1:3306/celery-schedule' 84 | 85 | # PostgreSQL: `pip install psycopg2` 86 | beat_dburi = 'postgresql+psycopg2://postgres:postgres@127.0.0.1:5432/celery-schedule' 87 | ``` 88 | 89 | ## 示例代码一 90 | 91 | 可以查看 `examples/base/tasks.py` 代码获取相关细节说明 92 | 93 | 相关讨论:https://github.com/AngelLiang/celery-sqlalchemy-scheduler/issues/15#issuecomment-625624088 94 | 95 | 开一个console,启动 Celery Worker 96 | 97 | $ pipenv shell 98 | $ cd examples/base 99 | 100 | # Celery < 5.0 101 | $ celery worker -A tasks:celery -l info 102 | 103 | # Celery >= 5.0 104 | $ celery -A tasks:celery worker -l info 105 | 106 | 开第二个console,启动 Celery Beat 107 | 108 | $ pipenv shell 109 | $ cd examples/base 110 | 111 | # Celery < 5.0 112 | $ celery beat -A tasks:celery -S tasks:DatabaseScheduler -l info 113 | 114 | # Celery >= 5.0 115 | $ celery -A tasks:celery beat -S tasks:DatabaseScheduler -l info 116 | 117 | ## 示例代码二 118 | 119 | ### `examples/base/tasks.py` 120 | 121 | ```python 122 | # coding=utf-8 123 | """ 124 | Ready:: 125 | 126 | $ pipenv install 127 | $ pipenv shell 128 | $ python setup.py install 129 | 130 | Run Worker:: 131 | 132 | $ cd examples/base 133 | $ celery worker -A tasks:celery -l info 134 | 135 | Run Beat:: 136 | 137 | $ cd examples/base 138 | $ celery beat -A tasks:celery -S tasks:DatabaseScheduler -l info 139 | 140 | """ 141 | import os 142 | import platform 143 | from datetime import timedelta 144 | from celery import Celery 145 | from celery import schedules 146 | 147 | from celery_sqlalchemy_scheduler.schedulers import DatabaseScheduler # noqa 148 | 149 | if platform.system() == 'Windows': 150 | # Celery在Windows环境下运行需要设置这个变量,否则调用任务会报错 151 | os.environ['FORKED_BY_MULTIPROCESSING'] = '1' 152 | 153 | backend = 'rpc://' 154 | broker_url = 'amqp://guest:guest@localhost:5672//' 155 | 156 | # 如果数据库修改了下面的schedule,beat重启后数据库会被下面的配置覆盖 157 | beat_schedule = { 158 | 'echo-every-3-seconds': { 159 | 'task': 'tasks.echo', 160 | 'schedule': timedelta(seconds=3), 161 | 'args': ('hello', ) 162 | }, 163 | 'add-every-minutes': { 164 | 'task': 'tasks.add', 165 | 'schedule': schedules.crontab('*', '*', '*'), 166 | 'args': (1, 2) 167 | }, 168 | } 169 | 170 | beat_scheduler = 'celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler' 171 | 172 | beat_sync_every = 0 173 | 174 | # The maximum number of seconds beat can sleep between checking the schedule. 175 | # default: 0 176 | beat_max_loop_interval = 10 177 | 178 | # 非celery和beat的配置,配置beat_dburi数据库路径 179 | beat_dburi = 'sqlite:///schedule.db' 180 | # OR 181 | # beat_dburi = 'mysql+mysqlconnector://root:root@127.0.0.1/celery-schedule' 182 | 183 | # 配置时区 184 | timezone = 'Asia/Shanghai' 185 | 186 | # 默认每个worker跑完10个任务后,自我销毁程序重建来释放内存 187 | # 防止内存泄漏 188 | worker_max_tasks_per_child = 10 189 | 190 | celery = Celery('tasks', 191 | backend=backend, 192 | broker=broker_url) 193 | 194 | config = dict( 195 | beat_schedule=beat_schedule, 196 | beat_scheduler=beat_scheduler, 197 | beat_max_loop_interval=beat_max_loop_interval, 198 | beat_dburi=beat_dburi, 199 | 200 | timezone=timezone, 201 | worker_max_tasks_per_child=worker_max_tasks_per_child 202 | ) 203 | 204 | celery.conf.update(config) 205 | 206 | 207 | @celery.task 208 | def add(x, y): 209 | return x + y 210 | 211 | 212 | @celery.task 213 | def echo(data): 214 | print(data) 215 | 216 | 217 | if __name__ == "__main__": 218 | celery.start() 219 | 220 | ``` 221 | 222 | ## 参考 223 | 224 | 本工具主要参考了以下资料和源码: 225 | 226 | - [django-celery-beat](https://github.com/celery/django-celery-beat) 227 | - [celerybeatredis](https://github.com/liuliqiang/celerybeatredis) 228 | - [celery](https://github.com/celery/celery) 229 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # celery-sqlalchemy-scheduler 2 | 3 | A Scheduler Based Sqlalchemy for Celery. 4 | 5 | > NOTE: At first I developed this project for flask with celery to change scheduler from database, like [django-celery-beat](https://github.com/celery/django-celery-beat) for django. And now I maybe haven't time to develop for new feature. No new feature develop plan for it. Just fix bugs. If someone found some bugs, welcome to issue or PR. Thank you for your attention. 6 | 7 | ## Getting Started 8 | 9 | [English](/README.md) [中文文档](/README-zh.md) 10 | 11 | ### Prerequisites 12 | 13 | - Python 3 14 | - celery >= 4.2 15 | - sqlalchemy 16 | 17 | First you must install `celery` and `sqlalchemy`, and `celery` should be >=4.2.0. 18 | 19 | ``` 20 | $ pip install sqlalchemy celery 21 | ``` 22 | 23 | ### Installing 24 | 25 | Install from PyPi: 26 | 27 | ``` 28 | $ pip install celery-sqlalchemy-scheduler 29 | ``` 30 | 31 | Install from source by cloning this repository: 32 | 33 | ``` 34 | $ git clone git@github.com:AngelLiang/celery-sqlalchemy-scheduler.git 35 | $ cd celery-sqlalchemy-scheduler 36 | $ python setup.py install 37 | ``` 38 | 39 | ## Usage 40 | 41 | After you have installed `celery_sqlalchemy_scheduler`, you can easily start with following steps: 42 | 43 | This is a demo for exmaple, you can check the code in `examples` directory 44 | 45 | 1. start celery worker 46 | 47 | ``` 48 | $ celery worker -A tasks -l info 49 | ``` 50 | 51 | 2. start the celery beat with `DatabaseScheduler` as scheduler: 52 | 53 | ``` 54 | $ celery beat -A tasks -S celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler -l info 55 | ``` 56 | 57 | ## Description 58 | 59 | After the celery beat is started, by default it create a sqlite database(`schedule.db`) in current folder. You can use `SQLiteStudio.exe` to inspect it. 60 | 61 | ![sqlite](screenshot/sqlite.png) 62 | 63 | When you want to update scheduler, you can update the data in `schedule.db`. But `celery_sqlalchemy_scheduler` don't update the scheduler immediately. Then you shoule be change the first column's `last_update` field in the `celery_periodic_task_changed` to now datetime. Finally the celery beat will update scheduler at next wake-up time. 64 | 65 | ### Database Configuration 66 | 67 | You can configure sqlalchemy db uri when you configure the celery, example as: 68 | 69 | ```Python 70 | from celery import Celery 71 | 72 | celery = Celery('tasks') 73 | 74 | beat_dburi = 'sqlite:///schedule.db' 75 | 76 | celery.conf.update( 77 | {'beat_dburi': beat_dburi} 78 | ) 79 | ``` 80 | 81 | Also, you can use MySQL or PostgreSQL. 82 | 83 | ```Python 84 | # MySQL: `pip install mysql-connector` 85 | beat_dburi = 'mysql+mysqlconnector://root:root@127.0.0.1:3306/celery-schedule' 86 | 87 | # PostgreSQL: `pip install psycopg2` 88 | beat_dburi = 'postgresql+psycopg2://postgres:postgres@127.0.0.1:5432/celery-schedule' 89 | ``` 90 | 91 | ## Example Code 1 92 | 93 | View `examples/base/tasks.py` for details. 94 | 95 | How to quickstart: https://github.com/AngelLiang/celery-sqlalchemy-scheduler/issues/15#issuecomment-625624088 96 | 97 | Run Worker in console 1 98 | 99 | $ pipenv shell 100 | $ cd examples/base 101 | 102 | # Celery < 5.0 103 | $ celery worker -A tasks:celery -l info 104 | 105 | # Celery >= 5.0 106 | $ celery -A tasks:celery worker -l info 107 | 108 | Run Beat in console 2 109 | 110 | $ pipenv shell 111 | $ cd examples/base 112 | 113 | # Celery < 5.0 114 | $ celery beat -A tasks:celery -S tasks:DatabaseScheduler -l info 115 | 116 | # Celery >= 5.0 117 | $ celery -A tasks:celery beat -S tasks:DatabaseScheduler -l info 118 | 119 | ## Example Code 2 120 | 121 | ### Example creating interval-based periodic task 122 | 123 | To create a periodic task executing at an interval you must first 124 | create the interval object: 125 | 126 | ```python 127 | >>> from celery_sqlalchemy_scheduler.models import PeriodicTask, IntervalSchedule 128 | >>> from celery_sqlalchemy_scheduler.session import SessionManager 129 | >>> from celeryconfig import beat_dburi 130 | >>> session_manager = SessionManager() 131 | >>> engine, Session = session_manager.create_session(beat_dburi) 132 | >>> session = Session() 133 | 134 | # executes every 10 seconds. 135 | >>> schedule = session.query(IntervalSchedule).filter_by(every=10, period=IntervalSchedule.SECONDS).first() 136 | >>> if not schedule: 137 | ... schedule = IntervalSchedule(every=10, period=IntervalSchedule.SECONDS) 138 | ... session.add(schedule) 139 | ... session.commit() 140 | ``` 141 | 142 | That's all the fields you need: a period type and the frequency. 143 | 144 | You can choose between a specific set of periods: 145 | 146 | - `IntervalSchedule.DAYS` 147 | - `IntervalSchedule.HOURS` 148 | - `IntervalSchedule.MINUTES` 149 | - `IntervalSchedule.SECONDS` 150 | - `IntervalSchedule.MICROSECONDS` 151 | 152 | _note_: 153 | 154 | If you have multiple periodic tasks executing every 10 seconds, 155 | then they should all point to the same schedule object. 156 | 157 | Now that we have defined the schedule object, we can create the periodic task 158 | entry: 159 | 160 | ```python 161 | >>> task = PeriodicTask( 162 | ... interval=schedule, # we created this above. 163 | ... name='Importing contacts', # simply describes this periodic task. 164 | ... task='proj.tasks.import_contacts', # name of task. 165 | ... ) 166 | >>> session.add(task) 167 | >>> session.commit() 168 | ``` 169 | 170 | Note that this is a very basic example, you can also specify the 171 | arguments and keyword arguments used to execute the task, the `queue` to 172 | send it to[\*], and set an expiry time. 173 | 174 | Here\'s an example specifying the arguments, note how JSON serialization 175 | is required: 176 | 177 | >>> import json 178 | >>> from datetime import datetime, timedelta 179 | 180 | >>> periodic_task = PeriodicTask( 181 | ... interval=schedule, # we created this above. 182 | ... name='Importing contacts', # simply describes this periodic task. 183 | ... task='proj.tasks.import_contacts', # name of task. 184 | ... args=json.dumps(['arg1', 'arg2']), 185 | ... kwargs=json.dumps({ 186 | ... 'be_careful': True, 187 | ... }), 188 | ... expires=datetime.utcnow() + timedelta(seconds=30) 189 | ... ) 190 | ... session.add(periodic_task) 191 | ... session.commit() 192 | 193 | ### Example creating crontab-based periodic task 194 | 195 | A crontab schedule has the fields: `minute`, `hour`, `day_of_week`, 196 | `day_of_month` and `month_of_year`, so if you want the equivalent of a 197 | `30 * * * *` (execute every 30 minutes) crontab entry you specify: 198 | 199 | >>> from celery_sqlalchemy_scheduler.models import PeriodicTask, CrontabSchedule 200 | >>> schedule = CrontabSchedule( 201 | ... minute='30', 202 | ... hour='*', 203 | ... day_of_week='*', 204 | ... day_of_month='*', 205 | ... month_of_year='*', 206 | ... timezone='UTC', 207 | ... ) 208 | 209 | The crontab schedule is linked to a specific timezone using the 210 | 'timezone' input parameter. 211 | 212 | Then to create a periodic task using this schedule, use the same 213 | approach as the interval-based periodic task earlier in this document, 214 | but instead of `interval=schedule`, specify `crontab=schedule`: 215 | 216 | >>> periodic_task = PeriodicTask( 217 | ... crontab=schedule, 218 | ... name='Importing contacts', 219 | ... task='proj.tasks.import_contacts', 220 | ... ) 221 | ... session.add(periodic_task) 222 | ... session.commit() 223 | 224 | ### Temporarily disable a periodic task 225 | 226 | You can use the `enabled` flag to temporarily disable a periodic task: 227 | 228 | >>> periodic_task.enabled = False 229 | >>> session.add(periodic_task) 230 | >>> session.commit() 231 | 232 | > Note: If you want to delete `PeriodicTask`, don't use `.delete()` method on a query 233 | > such as `db.session.query(PeriodicTask).filter(PeriodicTask.id == task_id).delete()`. 234 | > Because it doesn't trigger the `after_delete` event listener and result in Error. 235 | > The correct deletion method is using session to delete `PeriodicTask` object. 236 | 237 | >>> db.session.delete(db.session.query(PeriodicTask).get(task_id)) 238 | >>> db.session.commit() 239 | 240 | ### Example running periodic tasks 241 | 242 | The periodic tasks still need 'workers' to execute them. So make sure 243 | the default **Celery** package is installed. (If not installed, please 244 | follow the installation instructions here: 245 | ) 246 | 247 | Both the worker and beat services need to be running at the same time. 248 | 249 | 1. Start a Celery worker service (specify your project name): 250 | 251 | $ celery -A [project-name] worker --loglevel=info 252 | 253 | 2. As a separate process, start the beat service (specify the 254 | scheduler): 255 | 256 | $ celery -A [project-name] beat -l info --scheduler celery_sqlalchemy_scheduler.schedulers:DatabaseScheduler 257 | 258 | ## Acknowledgments 259 | 260 | - [django-celery-beat](https://github.com/celery/django-celery-beat) 261 | - [celerybeatredis](https://github.com/liuliqiang/celerybeatredis) 262 | - [celery](https://github.com/celery/celery) 263 | -------------------------------------------------------------------------------- /celery_sqlalchemy_scheduler/models.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | import datetime as dt 4 | import pytz 5 | 6 | import sqlalchemy as sa 7 | from sqlalchemy import func 8 | from sqlalchemy.event import listen 9 | from sqlalchemy.orm import relationship, foreign, remote 10 | from sqlalchemy.sql import select, insert, update 11 | 12 | from celery import schedules 13 | from celery.utils.log import get_logger 14 | 15 | from .tzcrontab import TzAwareCrontab 16 | from .session import ModelBase 17 | 18 | 19 | logger = get_logger('celery_sqlalchemy_scheduler.models') 20 | 21 | 22 | def cronexp(field): 23 | """Representation of cron expression.""" 24 | return field and str(field).replace(' ', '') or '*' 25 | 26 | 27 | class ModelMixin(object): 28 | 29 | @classmethod 30 | def create(cls, **kw): 31 | return cls(**kw) 32 | 33 | def update(self, **kw): 34 | for attr, value in kw.items(): 35 | setattr(self, attr, value) 36 | return self 37 | 38 | 39 | class IntervalSchedule(ModelBase, ModelMixin): 40 | __tablename__ = 'celery_interval_schedule' 41 | __table_args__ = {'sqlite_autoincrement': True} 42 | 43 | DAYS = 'days' 44 | HOURS = 'hours' 45 | MINUTES = 'minutes' 46 | SECONDS = 'seconds' 47 | MICROSECONDS = 'microseconds' 48 | 49 | id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) 50 | 51 | every = sa.Column(sa.Integer, nullable=False) 52 | period = sa.Column(sa.String(24)) 53 | 54 | def __repr__(self): 55 | if self.every == 1: 56 | return 'every {0}'.format(self.period_singular) 57 | return 'every {0} {1}'.format(self.every, self.period) 58 | 59 | @property 60 | def schedule(self): 61 | return schedules.schedule( 62 | dt.timedelta(**{self.period: self.every}), 63 | # nowfun=lambda: make_aware(now()) 64 | # nowfun=dt.datetime.now 65 | ) 66 | 67 | @classmethod 68 | def from_schedule(cls, session, schedule, period=SECONDS): 69 | every = max(schedule.run_every.total_seconds(), 0) 70 | model = session.query(IntervalSchedule).filter_by( 71 | every=every, period=period).first() 72 | if not model: 73 | model = cls(every=every, period=period) 74 | session.add(model) 75 | session.commit() 76 | return model 77 | 78 | @property 79 | def period_singular(self): 80 | return self.period[:-1] 81 | 82 | 83 | class CrontabSchedule(ModelBase, ModelMixin): 84 | __tablename__ = 'celery_crontab_schedule' 85 | __table_args__ = {'sqlite_autoincrement': True} 86 | 87 | id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) 88 | minute = sa.Column(sa.String(60 * 4), default='*') 89 | hour = sa.Column(sa.String(24 * 4), default='*') 90 | day_of_week = sa.Column(sa.String(64), default='*') 91 | day_of_month = sa.Column(sa.String(31 * 4), default='*') 92 | month_of_year = sa.Column(sa.String(64), default='*') 93 | timezone = sa.Column(sa.String(64), default='UTC') 94 | 95 | def __repr__(self): 96 | return '{0} {1} {2} {3} {4} (m/h/d/dM/MY) {5}'.format( 97 | cronexp(self.minute), cronexp(self.hour), 98 | cronexp(self.day_of_week), cronexp(self.day_of_month), 99 | cronexp(self.month_of_year), str(self.timezone) 100 | ) 101 | 102 | @property 103 | def schedule(self): 104 | return TzAwareCrontab( 105 | minute=self.minute, 106 | hour=self.hour, day_of_week=self.day_of_week, 107 | day_of_month=self.day_of_month, 108 | month_of_year=self.month_of_year, 109 | tz=pytz.timezone(self.timezone) 110 | ) 111 | 112 | @classmethod 113 | def from_schedule(cls, session, schedule): 114 | spec = { 115 | 'minute': schedule._orig_minute, 116 | 'hour': schedule._orig_hour, 117 | 'day_of_week': schedule._orig_day_of_week, 118 | 'day_of_month': schedule._orig_day_of_month, 119 | 'month_of_year': schedule._orig_month_of_year, 120 | } 121 | if schedule.tz: 122 | spec.update({ 123 | 'timezone': schedule.tz.zone 124 | }) 125 | model = session.query(CrontabSchedule).filter_by(**spec).first() 126 | if not model: 127 | model = cls(**spec) 128 | session.add(model) 129 | session.commit() 130 | return model 131 | 132 | 133 | class SolarSchedule(ModelBase, ModelMixin): 134 | __tablename__ = 'celery_solar_schedule' 135 | __table_args__ = {'sqlite_autoincrement': True} 136 | 137 | id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) 138 | 139 | event = sa.Column(sa.String(24)) 140 | latitude = sa.Column(sa.Float()) 141 | longitude = sa.Column(sa.Float()) 142 | 143 | @property 144 | def schedule(self): 145 | return schedules.solar( 146 | self.event, 147 | self.latitude, 148 | self.longitude, 149 | nowfun=dt.datetime.now 150 | ) 151 | 152 | @classmethod 153 | def from_schedule(cls, session, schedule): 154 | spec = { 155 | 'event': schedule.event, 156 | 'latitude': schedule.lat, 157 | 'longitude': schedule.lon 158 | } 159 | model = session.query(SolarSchedule).filter_by(**spec).first() 160 | if not model: 161 | model = cls(**spec) 162 | session.add(model) 163 | session.commit() 164 | return model 165 | 166 | def __repr__(self): 167 | return '{0} ({1}, {2})'.format( 168 | self.event, 169 | self.latitude, 170 | self.longitude 171 | ) 172 | 173 | 174 | class PeriodicTaskChanged(ModelBase, ModelMixin): 175 | """Helper table for tracking updates to periodic tasks.""" 176 | 177 | __tablename__ = 'celery_periodic_task_changed' 178 | 179 | id = sa.Column(sa.Integer, primary_key=True) 180 | last_update = sa.Column( 181 | sa.DateTime(timezone=True), nullable=False, default=dt.datetime.now) 182 | 183 | @classmethod 184 | def changed(cls, mapper, connection, target): 185 | """ 186 | :param mapper: the Mapper which is the target of this event 187 | :param connection: the Connection being used 188 | :param target: the mapped instance being persisted 189 | """ 190 | if not target.no_changes: 191 | cls.update_changed(mapper, connection, target) 192 | 193 | @classmethod 194 | def update_changed(cls, mapper, connection, target): 195 | """ 196 | :param mapper: the Mapper which is the target of this event 197 | :param connection: the Connection being used 198 | :param target: the mapped instance being persisted 199 | """ 200 | s = connection.execute(select([PeriodicTaskChanged]). 201 | where(PeriodicTaskChanged.id == 1).limit(1)) 202 | if not s: 203 | s = connection.execute(insert(PeriodicTaskChanged), 204 | last_update=dt.datetime.now()) 205 | else: 206 | s = connection.execute(update(PeriodicTaskChanged). 207 | where(PeriodicTaskChanged.id == 1). 208 | values(last_update=dt.datetime.now())) 209 | 210 | @classmethod 211 | def last_change(cls, session): 212 | periodic_tasks = session.query(PeriodicTaskChanged).get(1) 213 | if periodic_tasks: 214 | return periodic_tasks.last_update 215 | 216 | 217 | class PeriodicTask(ModelBase, ModelMixin): 218 | 219 | __tablename__ = 'celery_periodic_task' 220 | __table_args__ = {'sqlite_autoincrement': True} 221 | 222 | id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) 223 | # name 224 | name = sa.Column(sa.String(255), unique=True) 225 | # task name 226 | task = sa.Column(sa.String(255)) 227 | 228 | # not use ForeignKey 229 | interval_id = sa.Column(sa.Integer) 230 | interval = relationship( 231 | IntervalSchedule, 232 | uselist=False, 233 | primaryjoin=foreign(interval_id) == remote(IntervalSchedule.id) 234 | ) 235 | 236 | crontab_id = sa.Column(sa.Integer) 237 | crontab = relationship( 238 | CrontabSchedule, 239 | uselist=False, 240 | primaryjoin=foreign(crontab_id) == remote(CrontabSchedule.id) 241 | ) 242 | 243 | solar_id = sa.Column(sa.Integer) 244 | solar = relationship( 245 | SolarSchedule, 246 | uselist=False, 247 | primaryjoin=foreign(solar_id) == remote(SolarSchedule.id) 248 | ) 249 | 250 | args = sa.Column(sa.Text(), default='[]') 251 | kwargs = sa.Column(sa.Text(), default='{}') 252 | # queue for celery 253 | queue = sa.Column(sa.String(255)) 254 | # exchange for celery 255 | exchange = sa.Column(sa.String(255)) 256 | # routing_key for celery 257 | routing_key = sa.Column(sa.String(255)) 258 | priority = sa.Column(sa.Integer()) 259 | expires = sa.Column(sa.DateTime(timezone=True)) 260 | 261 | # 只执行一次 262 | one_off = sa.Column(sa.Boolean(), default=False) 263 | start_time = sa.Column(sa.DateTime(timezone=True)) 264 | enabled = sa.Column(sa.Boolean(), default=True) 265 | last_run_at = sa.Column(sa.DateTime(timezone=True)) 266 | total_run_count = sa.Column(sa.Integer(), nullable=False, default=0) 267 | # 修改时间 268 | date_changed = sa.Column(sa.DateTime(timezone=True), 269 | default=func.now(), onupdate=func.now()) 270 | description = sa.Column(sa.Text(), default='') 271 | 272 | no_changes = False 273 | 274 | def __repr__(self): 275 | fmt = '{0.name}: {{no schedule}}' 276 | if self.interval: 277 | fmt = '{0.name}: {0.interval}' 278 | elif self.crontab: 279 | fmt = '{0.name}: {0.crontab}' 280 | elif self.solar: 281 | fmt = '{0.name}: {0.solar}' 282 | return fmt.format(self) 283 | 284 | @property 285 | def task_name(self): 286 | return self.task 287 | 288 | @task_name.setter 289 | def task_name(self, value): 290 | self.task = value 291 | 292 | @property 293 | def schedule(self): 294 | if self.interval: 295 | return self.interval.schedule 296 | elif self.crontab: 297 | return self.crontab.schedule 298 | elif self.solar: 299 | return self.solar.schedule 300 | raise ValueError('{} schedule is None!'.format(self.name)) 301 | 302 | 303 | listen(PeriodicTask, 'after_insert', PeriodicTaskChanged.update_changed) 304 | listen(PeriodicTask, 'after_delete', PeriodicTaskChanged.update_changed) 305 | listen(PeriodicTask, 'after_update', PeriodicTaskChanged.changed) 306 | listen(IntervalSchedule, 'after_insert', PeriodicTaskChanged.update_changed) 307 | listen(IntervalSchedule, 'after_delete', PeriodicTaskChanged.update_changed) 308 | listen(IntervalSchedule, 'after_update', PeriodicTaskChanged.update_changed) 309 | listen(CrontabSchedule, 'after_insert', PeriodicTaskChanged.update_changed) 310 | listen(CrontabSchedule, 'after_delete', PeriodicTaskChanged.update_changed) 311 | listen(CrontabSchedule, 'after_update', PeriodicTaskChanged.update_changed) 312 | listen(SolarSchedule, 'after_insert', PeriodicTaskChanged.update_changed) 313 | listen(SolarSchedule, 'after_delete', PeriodicTaskChanged.update_changed) 314 | listen(SolarSchedule, 'after_update', PeriodicTaskChanged.update_changed) 315 | -------------------------------------------------------------------------------- /celery_sqlalchemy_scheduler/schedulers.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | import logging 4 | import datetime as dt 5 | from multiprocessing.util import Finalize 6 | 7 | import sqlalchemy 8 | from celery import current_app 9 | from celery import schedules 10 | from celery.beat import Scheduler, ScheduleEntry 11 | from celery.utils.log import get_logger 12 | from celery.utils.time import maybe_make_aware 13 | from kombu.utils.encoding import safe_repr, safe_str 14 | from kombu.utils.json import dumps, loads 15 | 16 | from .session import session_cleanup 17 | from .session import SessionManager 18 | from .models import ( 19 | PeriodicTask, PeriodicTaskChanged, 20 | CrontabSchedule, IntervalSchedule, 21 | SolarSchedule, 22 | ) 23 | 24 | # This scheduler must wake up more frequently than the 25 | # regular of 5 minutes because it needs to take external 26 | # changes to the schedule into account. 27 | DEFAULT_MAX_INTERVAL = 5 # seconds 28 | 29 | DEFAULT_BEAT_DBURI = 'sqlite:///schedule.db' 30 | 31 | ADD_ENTRY_ERROR = """\ 32 | Cannot add entry %r to database schedule: %r. Contents: %r 33 | """ 34 | 35 | 36 | session_manager = SessionManager() 37 | # session = session_manager() 38 | 39 | 40 | logger = get_logger('celery_sqlalchemy_scheduler.schedulers') 41 | 42 | 43 | class ModelEntry(ScheduleEntry): 44 | """Scheduler entry taken from database row.""" 45 | 46 | model_schedules = ( 47 | # (schedule_type, model_type, model_field) 48 | (schedules.crontab, CrontabSchedule, 'crontab'), 49 | (schedules.schedule, IntervalSchedule, 'interval'), 50 | (schedules.solar, SolarSchedule, 'solar'), 51 | ) 52 | save_fields = ['last_run_at', 'total_run_count', 'no_changes'] 53 | 54 | def __init__(self, model, Session, app=None, **kw): 55 | """Initialize the model entry.""" 56 | self.app = app or current_app._get_current_object() 57 | self.session = kw.get('session') 58 | self.Session = Session 59 | 60 | self.model = model 61 | self.name = model.name 62 | self.task = model.task 63 | 64 | try: 65 | self.schedule = model.schedule 66 | logger.debug('schedule: {}'.format(self.schedule)) 67 | except Exception as e: 68 | logger.error(e) 69 | logger.error( 70 | 'Disabling schedule %s that was removed from database', 71 | self.name, 72 | ) 73 | self._disable(model) 74 | 75 | try: 76 | self.args = loads(model.args or '[]') 77 | self.kwargs = loads(model.kwargs or '{}') 78 | except ValueError as exc: 79 | logger.exception( 80 | 'Removing schedule %s for argument deseralization error: %r', 81 | self.name, exc, 82 | ) 83 | self._disable(model) 84 | 85 | self.options = {} 86 | for option in ['queue', 'exchange', 'routing_key', 'expires', 87 | 'priority']: 88 | value = getattr(model, option) 89 | if value is None: 90 | continue 91 | self.options[option] = value 92 | 93 | self.total_run_count = model.total_run_count 94 | self.enabled = model.enabled 95 | 96 | if not model.last_run_at: 97 | model.last_run_at = self._default_now() 98 | self.last_run_at = model.last_run_at 99 | 100 | # 因为从数据库读取的 last_run_at 可能没有时区信息,所以这里必须加上时区信息 101 | self.last_run_at = self.last_run_at.replace(tzinfo=self.app.timezone) 102 | 103 | # self.options['expires'] 同理 104 | # if 'expires' in self.options: 105 | # expires = self.options['expires'] 106 | # self.options['expires'] = expires.replace(tzinfo=self.app.timezone) 107 | 108 | def _disable(self, model): 109 | model.no_changes = True 110 | self.model.enabled = self.enabled = model.enabled = False 111 | if self.session: 112 | self.session.add(model) 113 | self.session.commit() 114 | else: 115 | session = self.Session() 116 | with session_cleanup(session): 117 | session.add(model) 118 | session.commit() 119 | 120 | # obj = session.query(PeriodicTask).get(model.id) 121 | # obj.enable = model.enabled 122 | # session.add(obj) 123 | # session.commit() 124 | 125 | def is_due(self): 126 | if not self.model.enabled: 127 | # 5 second delay for re-enable. 128 | return schedules.schedstate(False, 5.0) 129 | 130 | # START DATE: only run after the `start_time`, if one exists. 131 | if self.model.start_time is not None: 132 | now = maybe_make_aware(self._default_now()) 133 | start_time = self.model.start_time.replace( 134 | tzinfo=self.app.timezone) 135 | if now < start_time: 136 | # The datetime is before the start date - don't run. 137 | _, delay = self.schedule.is_due(self.last_run_at) 138 | # use original delay for re-check 139 | return schedules.schedstate(False, delay) 140 | 141 | # ONE OFF TASK: Disable one off tasks after they've ran once 142 | if self.model.one_off and self.model.enabled \ 143 | and self.model.total_run_count > 0: 144 | self.model.enabled = False # disable 145 | self.model.total_run_count = 0 # Reset 146 | self.model.no_changes = False # Mark the model entry as changed 147 | save_fields = ('enabled',) # the additional fields to save 148 | self.save(save_fields) 149 | 150 | return schedules.schedstate(False, None) # Don't recheck 151 | 152 | return self.schedule.is_due(self.last_run_at) 153 | 154 | def _default_now(self): 155 | now = self.app.now() 156 | # The PyTZ datetime must be localised for the Django-Celery-Beat 157 | # scheduler to work. Keep in mind that timezone arithmatic 158 | # with a localized timezone may be inaccurate. 159 | # return now.tzinfo.localize(now.replace(tzinfo=None)) 160 | return now.replace(tzinfo=self.app.timezone) 161 | 162 | def __next__(self): 163 | # should be use `self._default_now()` or `self.app.now()` ? 164 | self.model.last_run_at = self.app.now() 165 | self.model.total_run_count += 1 166 | self.model.no_changes = True 167 | return self.__class__(self.model, Session=self.Session) 168 | next = __next__ # for 2to3 169 | 170 | def save(self, fields=tuple()): 171 | """ 172 | :params fields: tuple, the additional fields to save 173 | """ 174 | # TODO: 175 | session = self.Session() 176 | with session_cleanup(session): 177 | # Object may not be synchronized, so only 178 | # change the fields we care about. 179 | obj = session.query(PeriodicTask).get(self.model.id) 180 | 181 | for field in self.save_fields: 182 | setattr(obj, field, getattr(self.model, field)) 183 | for field in fields: 184 | setattr(obj, field, getattr(self.model, field)) 185 | session.add(obj) 186 | session.commit() 187 | 188 | @classmethod 189 | def to_model_schedule(cls, session, schedule): 190 | for schedule_type, model_type, model_field in cls.model_schedules: 191 | # change to schedule 192 | schedule = schedules.maybe_schedule(schedule) 193 | if isinstance(schedule, schedule_type): 194 | # TODO: 195 | model_schedule = model_type.from_schedule(session, schedule) 196 | return model_schedule, model_field 197 | raise ValueError( 198 | 'Cannot convert schedule type {0!r} to model'.format(schedule)) 199 | 200 | @classmethod 201 | def from_entry(cls, name, Session, app=None, **entry): 202 | """ 203 | 204 | **entry sample: 205 | 206 | {'task': 'celery.backend_cleanup', 207 | 'schedule': schedules.crontab('0', '4', '*'), 208 | 'options': {'expires': 43200}} 209 | 210 | """ 211 | session = Session() 212 | with session_cleanup(session): 213 | periodic_task = session.query( 214 | PeriodicTask).filter_by(name=name).first() 215 | if not periodic_task: 216 | periodic_task = PeriodicTask(name=name) 217 | temp = cls._unpack_fields(session, **entry) 218 | periodic_task.update(**temp) 219 | session.add(periodic_task) 220 | try: 221 | session.commit() 222 | except sqlalchemy.exc.IntegrityError as exc: 223 | logger.error(exc) 224 | session.rollback() 225 | except Exception as exc: 226 | logger.error(exc) 227 | session.rollback() 228 | res = cls(periodic_task, app=app, Session=Session, session=session) 229 | return res 230 | 231 | @classmethod 232 | def _unpack_fields(cls, session, schedule, 233 | args=None, kwargs=None, relative=None, options=None, 234 | **entry): 235 | """ 236 | 237 | **entry sample: 238 | 239 | {'task': 'celery.backend_cleanup', 240 | 'schedule': , 241 | 'options': {'expires': 43200}} 242 | 243 | """ 244 | model_schedule, model_field = cls.to_model_schedule(session, schedule) 245 | entry.update( 246 | # the model_id which to relationship 247 | {model_field + '_id': model_schedule.id}, 248 | args=dumps(args or []), 249 | kwargs=dumps(kwargs or {}), 250 | **cls._unpack_options(**options or {}) 251 | ) 252 | return entry 253 | 254 | @classmethod 255 | def _unpack_options(cls, queue=None, exchange=None, routing_key=None, 256 | priority=None, one_off=None, expires=None, **kwargs): 257 | data = { 258 | 'queue': queue, 259 | 'exchange': exchange, 260 | 'routing_key': routing_key, 261 | 'priority': priority, 262 | 'one_off': one_off, 263 | } 264 | if expires: 265 | if isinstance(expires, int): 266 | expires = dt.datetime.utcnow() + dt.timedelta(seconds=expires) 267 | elif isinstance(expires, dt.datetime): 268 | pass 269 | else: 270 | raise ValueError('expires value error') 271 | data['expires'] = expires 272 | return data 273 | 274 | def __repr__(self): 275 | return ''.format( 276 | safe_str(self.name), self.task, safe_repr(self.args), 277 | safe_repr(self.kwargs), self.schedule, 278 | ) 279 | 280 | 281 | class DatabaseScheduler(Scheduler): 282 | 283 | Entry = ModelEntry 284 | Model = PeriodicTask 285 | Changes = PeriodicTaskChanged 286 | 287 | _schedule = None 288 | _last_timestamp = None 289 | _initial_read = True 290 | _heap_invalidated = False 291 | 292 | def __init__(self, *args, **kwargs): 293 | """Initialize the database scheduler.""" 294 | self.app = kwargs['app'] 295 | self.dburi = kwargs.get('dburi') or self.app.conf.get( 296 | 'beat_dburi') or DEFAULT_BEAT_DBURI 297 | self.engine, self.Session = session_manager.create_session(self.dburi) 298 | session_manager.prepare_models(self.engine) 299 | 300 | self._dirty = set() 301 | Scheduler.__init__(self, *args, **kwargs) 302 | self._finalize = Finalize(self, self.sync, exitpriority=5) 303 | self.max_interval = (kwargs.get('max_interval') or 304 | self.app.conf.beat_max_loop_interval or 305 | DEFAULT_MAX_INTERVAL) 306 | 307 | def setup_schedule(self): 308 | """override""" 309 | logger.info('setup_schedule') 310 | self.install_default_entries(self.schedule) 311 | self.update_from_dict(self.app.conf.beat_schedule) 312 | 313 | def all_as_schedule(self): 314 | # TODO: 315 | session = self.Session() 316 | with session_cleanup(session): 317 | logger.debug('DatabaseScheduler: Fetching database schedule') 318 | # get all enabled PeriodicTask 319 | models = session.query(self.Model).filter_by(enabled=True).all() 320 | s = {} 321 | for model in models: 322 | try: 323 | s[model.name] = self.Entry(model, 324 | app=self.app, 325 | Session=self.Session, 326 | session=session) 327 | except ValueError: 328 | pass 329 | return s 330 | 331 | def schedule_changed(self): 332 | session = self.Session() 333 | with session_cleanup(session): 334 | changes = session.query(self.Changes).get(1) 335 | if not changes: 336 | changes = self.Changes(id=1) 337 | session.add(changes) 338 | session.commit() 339 | return False 340 | 341 | last, ts = self._last_timestamp, changes.last_update 342 | try: 343 | if ts and ts > (last if last else ts): 344 | return True 345 | finally: 346 | self._last_timestamp = ts 347 | return False 348 | 349 | def reserve(self, entry): 350 | """override 351 | 352 | It will be called in parent class. 353 | """ 354 | new_entry = next(entry) 355 | # Need to store entry by name, because the entry may change 356 | # in the mean time. 357 | self._dirty.add(new_entry.name) 358 | return new_entry 359 | 360 | def sync(self): 361 | """override""" 362 | logger.info('Writing entries...') 363 | _tried = set() 364 | _failed = set() 365 | try: 366 | while self._dirty: 367 | name = self._dirty.pop() 368 | try: 369 | self.schedule[name].save() # save to database 370 | logger.debug( 371 | '{name} save to database'.format(name=name)) 372 | _tried.add(name) 373 | except (KeyError) as exc: 374 | logger.error(exc) 375 | _failed.add(name) 376 | except sqlalchemy.exc.IntegrityError as exc: 377 | logger.exception('Database error while sync: %r', exc) 378 | except Exception as exc: 379 | logger.exception(exc) 380 | finally: 381 | # retry later, only for the failed ones 382 | self._dirty |= _failed 383 | 384 | def update_from_dict(self, mapping): 385 | s = {} 386 | for name, entry_fields in mapping.items(): 387 | # {'task': 'celery.backend_cleanup', 388 | # 'schedule': schedules.crontab('0', '4', '*'), 389 | # 'options': {'expires': 43200}} 390 | try: 391 | entry = self.Entry.from_entry( 392 | name, Session=self.Session, app=self.app, 393 | **entry_fields) 394 | if entry.model.enabled: 395 | s[name] = entry 396 | except Exception as exc: 397 | logger.error(ADD_ENTRY_ERROR, name, exc, entry_fields) 398 | 399 | # update self.schedule 400 | self.schedule.update(s) 401 | 402 | def install_default_entries(self, data): 403 | entries = {} 404 | if self.app.conf.result_expires: 405 | entries.setdefault( 406 | 'celery.backend_cleanup', { 407 | 'task': 'celery.backend_cleanup', 408 | 'schedule': schedules.crontab('0', '4', '*'), 409 | 'options': {'expires': 12 * 3600}, 410 | }, 411 | ) 412 | self.update_from_dict(entries) 413 | 414 | def schedules_equal(self, *args, **kwargs): 415 | if self._heap_invalidated: 416 | self._heap_invalidated = False 417 | return False 418 | return super(DatabaseScheduler, self).schedules_equal(*args, **kwargs) 419 | 420 | @property 421 | def schedule(self): 422 | initial = update = False 423 | if self._initial_read: 424 | logger.debug('DatabaseScheduler: initial read') 425 | initial = update = True 426 | self._initial_read = False 427 | elif self.schedule_changed(): 428 | # when you updated the `PeriodicTasks` model's `last_update` field 429 | logger.info('DatabaseScheduler: Schedule changed.') 430 | update = True 431 | 432 | if update: 433 | self.sync() 434 | self._schedule = self.all_as_schedule() 435 | # the schedule changed, invalidate the heap in Scheduler.tick 436 | if not initial: 437 | self._heap = [] 438 | self._heap_invalidated = True 439 | if logger.isEnabledFor(logging.DEBUG): 440 | logger.debug('Current schedule:\n%s', '\n'.join( 441 | repr(entry) for entry in self._schedule.values()), 442 | ) 443 | # logger.debug(self._schedule) 444 | return self._schedule 445 | 446 | @property 447 | def info(self): 448 | """override""" 449 | # return infomation about Schedule 450 | return ' . db -> {self.dburi}'.format(self=self) 451 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "98d02ee2e76a1ec2274050c7683c20b0633e4c5bea72b4f05943f112bd1bb0a3" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "amqp": { 20 | "hashes": [ 21 | "sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2", 22 | "sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb" 23 | ], 24 | "version": "==5.0.6" 25 | }, 26 | "billiard": { 27 | "hashes": [ 28 | "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", 29 | "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" 30 | ], 31 | "version": "==3.6.4.0" 32 | }, 33 | "celery": { 34 | "hashes": [ 35 | "sha256:5e8d364e058554e83bbb116e8377d90c79be254785f357cb2cec026e79febe13", 36 | "sha256:f4efebe6f8629b0da2b8e529424de376494f5b7a743c321c8a2ddc2b1414921c" 37 | ], 38 | "index": "pypi", 39 | "version": "==5.0.5" 40 | }, 41 | "celery-sqlalchemy-scheduler": { 42 | "editable": true, 43 | "path": "." 44 | }, 45 | "click": { 46 | "hashes": [ 47 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", 48 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" 49 | ], 50 | "version": "==7.1.2" 51 | }, 52 | "click-didyoumean": { 53 | "hashes": [ 54 | "sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb" 55 | ], 56 | "version": "==0.0.3" 57 | }, 58 | "click-plugins": { 59 | "hashes": [ 60 | "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", 61 | "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" 62 | ], 63 | "version": "==1.1.1" 64 | }, 65 | "click-repl": { 66 | "hashes": [ 67 | "sha256:9c4c3d022789cae912aad8a3f5e1d7c2cdd016ee1225b5212ad3e8691563cda5", 68 | "sha256:b9f29d52abc4d6059f8e276132a111ab8d94980afe6a5432b9d996544afa95d5" 69 | ], 70 | "version": "==0.1.6" 71 | }, 72 | "greenlet": { 73 | "hashes": [ 74 | "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", 75 | "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85", 76 | "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683", 77 | "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd", 78 | "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7", 79 | "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476", 80 | "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c", 81 | "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2", 82 | "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c", 83 | "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6", 84 | "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d", 85 | "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f", 86 | "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664", 87 | "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c", 88 | "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0", 89 | "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139", 90 | "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef", 91 | "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7", 92 | "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8", 93 | "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c", 94 | "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce", 95 | "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7", 96 | "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36", 97 | "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5", 98 | "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a", 99 | "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee", 100 | "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70", 101 | "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c", 102 | "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128", 103 | "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2", 104 | "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218", 105 | "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df", 106 | "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e", 107 | "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be", 108 | "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770", 109 | "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203", 110 | "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06", 111 | "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f", 112 | "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379", 113 | "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7", 114 | "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b", 115 | "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243", 116 | "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378" 117 | ], 118 | "markers": "python_version >= '3'", 119 | "version": "==1.0.0" 120 | }, 121 | "importlib-metadata": { 122 | "hashes": [ 123 | "sha256:19192b88d959336bfa6bdaaaef99aeafec179eca19c47c804e555703ee5f07ef", 124 | "sha256:2e881981c9748d7282b374b68e759c87745c25427b67ecf0cc67fb6637a1bff9" 125 | ], 126 | "markers": "python_version < '3.8'", 127 | "version": "==4.0.0" 128 | }, 129 | "kombu": { 130 | "hashes": [ 131 | "sha256:6dc509178ac4269b0e66ab4881f70a2035c33d3a622e20585f965986a5182006", 132 | "sha256:f4965fba0a4718d47d470beeb5d6446e3357a62402b16c510b6a2f251e05ac3c" 133 | ], 134 | "version": "==5.0.2" 135 | }, 136 | "mysql-connector": { 137 | "hashes": [ 138 | "sha256:1733e6ce52a049243de3264f1fbc22a852cb35458c4ad739ba88189285efdf32" 139 | ], 140 | "index": "pypi", 141 | "version": "==2.2.9" 142 | }, 143 | "prompt-toolkit": { 144 | "hashes": [ 145 | "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04", 146 | "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc" 147 | ], 148 | "version": "==3.0.18" 149 | }, 150 | "python-dotenv": { 151 | "hashes": [ 152 | "sha256:471b782da0af10da1a80341e8438fca5fadeba2881c54360d5fd8d03d03a4f4a", 153 | "sha256:49782a97c9d641e8a09ae1d9af0856cc587c8d2474919342d5104d85be9890b2" 154 | ], 155 | "index": "pypi", 156 | "version": "==0.17.0" 157 | }, 158 | "pytz": { 159 | "hashes": [ 160 | "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", 161 | "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" 162 | ], 163 | "version": "==2021.1" 164 | }, 165 | "six": { 166 | "hashes": [ 167 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 168 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 169 | ], 170 | "version": "==1.15.0" 171 | }, 172 | "sqlalchemy": { 173 | "hashes": [ 174 | "sha256:065ac7331b87494a86bf3dc4430c1ee7779d6dc532213c528394ddd00804e518", 175 | "sha256:099e63ffad329989080c533896267c40f9cb38ed5704168f7dae3afdda121e10", 176 | "sha256:0d8aab144cf8d31c1ac834802c7df4430248f74bd8b3ed3149f9c9eec0eafe50", 177 | "sha256:230b210fc6d1af5d555d1d04ff9bd4259d6ab82b020369724ab4a1c805a32dd3", 178 | "sha256:25aaf0bec9eadde9789e3c0178c718ae6923b57485fdeae85999bc3089d9b871", 179 | "sha256:29816a338982c30dd7ee76c4e79f17d5991abb1b6561e9f1d72703d030a79c86", 180 | "sha256:2e1b8d31c97a2b91aea8ed8299ad360a32d60728a89f2aac9c98eef07a633a0e", 181 | "sha256:343c679899afdc4952ac659dc46f2075a2bd4fba87ca0df264be838eecd02096", 182 | "sha256:386f215248c3fb2fab9bb77f631bc3c6cd38354ca2363d241784f8297d16b80a", 183 | "sha256:457a1652bc1c5f832165ff341380b3742bfb98b9ceca24576350992713ad700f", 184 | "sha256:4e554872766d2783abf0a11704536596e8794229fb0fa63d311a74caae58c6c5", 185 | "sha256:4edff2b4101a1c442fb1b17d594a5fdf99145f27c5eaffae12c26aef2bb2bf65", 186 | "sha256:690fbca2a208314504a2ab46d3e7dae320247fcb1967863b9782a70bf49fc600", 187 | "sha256:6c6090d73820dcf04549f0b6e80f67b46c8191f0e40bf09c6d6f8ece2464e8b6", 188 | "sha256:7bdb0f972bc35054c05088e91cec8fa810c3aa565b690bae75c005ee430e12e8", 189 | "sha256:815a8cdf9c0fa504d0bfbe83fb3e596b7663fc828b73259a20299c01330467aa", 190 | "sha256:a28c7b96bc5beef585172ca9d79068ae7fa2527feaa26bd63371851d7894c66f", 191 | "sha256:a8763fe4de02f746666161b130cc3e5d1494a6f5475f5622f05251739fc22e55", 192 | "sha256:b0266e133d819d33b555798822606e876187a96798e2d8c9b7f85e419d73ef94", 193 | "sha256:bb97aeaa699c43da62e35856ab56e5154d062c09a3593a2c12c67d6a21059920", 194 | "sha256:bce6eaf7b9a3a445911e225570b8fd26b7e98654ac9f308a8a52addb64a2a488", 195 | "sha256:c4485040d86d4b3d9aa509fd3c492de3687d9bf52fb85d66b33912ad068a088c", 196 | "sha256:c6f228b79fd757d9ca539c9958190b3a44308f743dc7d83575aa0891033f6c86", 197 | "sha256:cde2cf3ee76e8c538f2f43f5cf9252ad53404fc350801191128bab68f335a8b2", 198 | "sha256:cfa4a336de7d32ae30b54f7b8ec888fb5c6313a1b7419a9d7b3f49cdd83012a3", 199 | "sha256:cfbf2cf8e8ef0a1d23bfd0fa387057e6e522d55e43821f1d115941d913ee7762", 200 | "sha256:e26791ac43806dec1f18d328596db87f1b37f9d8271997dd1233054b4c377f51", 201 | "sha256:e7d262415e4adf148441bd9f10ae4e5498d6649962fabc62a64ec7b4891d56c5", 202 | "sha256:e9e95568eafae18ac40d00694b82dc3febe653f81eee83204ef248563f39696d", 203 | "sha256:ec7c33e22beac16b4c5348c41cd94cfee056152e55a0efc62843deebfc53fcb4", 204 | "sha256:f239778cf03cd46da4962636501f6dea55af9b4684cd7ceee104ad4f0290e878", 205 | "sha256:f31757972677fbe9132932a69a4f23db59187a072cc26427f56a3082b46b6dac", 206 | "sha256:fbdcf9019e92253fc6aa0bcd5937302664c3a4d53884c425c0caa994e56c4421", 207 | "sha256:fc82688695eacf77befc3d839df2bc7ff314cd1d547f120835acdcbac1a480b8" 208 | ], 209 | "index": "pypi", 210 | "version": "==1.4.9" 211 | }, 212 | "typing-extensions": { 213 | "hashes": [ 214 | "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", 215 | "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", 216 | "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" 217 | ], 218 | "markers": "python_version < '3.8'", 219 | "version": "==3.7.4.3" 220 | }, 221 | "vine": { 222 | "hashes": [ 223 | "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30", 224 | "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e" 225 | ], 226 | "version": "==5.0.0" 227 | }, 228 | "wcwidth": { 229 | "hashes": [ 230 | "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", 231 | "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" 232 | ], 233 | "version": "==0.2.5" 234 | }, 235 | "zipp": { 236 | "hashes": [ 237 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", 238 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" 239 | ], 240 | "version": "==3.4.1" 241 | } 242 | }, 243 | "develop": { 244 | "autopep8": { 245 | "hashes": [ 246 | "sha256:5454e6e9a3d02aae38f866eec0d9a7de4ab9f93c10a273fb0340f3d6d09f7514", 247 | "sha256:f01b06a6808bc31698db907761e5890eb2295e287af53f6693b39ce55454034a" 248 | ], 249 | "index": "pypi", 250 | "version": "==1.5.6" 251 | }, 252 | "bleach": { 253 | "hashes": [ 254 | "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", 255 | "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" 256 | ], 257 | "version": "==3.3.0" 258 | }, 259 | "certifi": { 260 | "hashes": [ 261 | "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", 262 | "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" 263 | ], 264 | "version": "==2020.12.5" 265 | }, 266 | "chardet": { 267 | "hashes": [ 268 | "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", 269 | "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" 270 | ], 271 | "version": "==4.0.0" 272 | }, 273 | "colorama": { 274 | "hashes": [ 275 | "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", 276 | "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" 277 | ], 278 | "version": "==0.4.4" 279 | }, 280 | "docutils": { 281 | "hashes": [ 282 | "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", 283 | "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61" 284 | ], 285 | "version": "==0.17.1" 286 | }, 287 | "flake8": { 288 | "hashes": [ 289 | "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378", 290 | "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a" 291 | ], 292 | "index": "pypi", 293 | "version": "==3.9.1" 294 | }, 295 | "idna": { 296 | "hashes": [ 297 | "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", 298 | "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" 299 | ], 300 | "version": "==2.10" 301 | }, 302 | "importlib-metadata": { 303 | "hashes": [ 304 | "sha256:19192b88d959336bfa6bdaaaef99aeafec179eca19c47c804e555703ee5f07ef", 305 | "sha256:2e881981c9748d7282b374b68e759c87745c25427b67ecf0cc67fb6637a1bff9" 306 | ], 307 | "markers": "python_version < '3.8'", 308 | "version": "==4.0.0" 309 | }, 310 | "keyring": { 311 | "hashes": [ 312 | "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", 313 | "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48" 314 | ], 315 | "version": "==23.0.1" 316 | }, 317 | "mccabe": { 318 | "hashes": [ 319 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", 320 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" 321 | ], 322 | "version": "==0.6.1" 323 | }, 324 | "packaging": { 325 | "hashes": [ 326 | "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", 327 | "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" 328 | ], 329 | "version": "==20.9" 330 | }, 331 | "pkginfo": { 332 | "hashes": [ 333 | "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4", 334 | "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75" 335 | ], 336 | "version": "==1.7.0" 337 | }, 338 | "pycodestyle": { 339 | "hashes": [ 340 | "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", 341 | "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" 342 | ], 343 | "version": "==2.7.0" 344 | }, 345 | "pyflakes": { 346 | "hashes": [ 347 | "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", 348 | "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db" 349 | ], 350 | "version": "==2.3.1" 351 | }, 352 | "pygments": { 353 | "hashes": [ 354 | "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", 355 | "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" 356 | ], 357 | "version": "==2.8.1" 358 | }, 359 | "pyparsing": { 360 | "hashes": [ 361 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", 362 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" 363 | ], 364 | "version": "==2.4.7" 365 | }, 366 | "pywin32-ctypes": { 367 | "hashes": [ 368 | "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942", 369 | "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98" 370 | ], 371 | "markers": "sys_platform == 'win32'", 372 | "version": "==0.2.0" 373 | }, 374 | "readme-renderer": { 375 | "hashes": [ 376 | "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c", 377 | "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db" 378 | ], 379 | "version": "==29.0" 380 | }, 381 | "requests": { 382 | "hashes": [ 383 | "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", 384 | "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" 385 | ], 386 | "version": "==2.25.1" 387 | }, 388 | "requests-toolbelt": { 389 | "hashes": [ 390 | "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", 391 | "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" 392 | ], 393 | "version": "==0.9.1" 394 | }, 395 | "rfc3986": { 396 | "hashes": [ 397 | "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d", 398 | "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50" 399 | ], 400 | "version": "==1.4.0" 401 | }, 402 | "six": { 403 | "hashes": [ 404 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 405 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 406 | ], 407 | "version": "==1.15.0" 408 | }, 409 | "toml": { 410 | "hashes": [ 411 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", 412 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" 413 | ], 414 | "version": "==0.10.2" 415 | }, 416 | "tqdm": { 417 | "hashes": [ 418 | "sha256:daec693491c52e9498632dfbe9ccfc4882a557f5fa08982db1b4d3adbe0887c3", 419 | "sha256:ebdebdb95e3477ceea267decfc0784859aa3df3e27e22d23b83e9b272bf157ae" 420 | ], 421 | "version": "==4.60.0" 422 | }, 423 | "twine": { 424 | "hashes": [ 425 | "sha256:16f706f2f1687d7ce30e7effceee40ed0a09b7c33b9abb5ef6434e5551565d83", 426 | "sha256:a56c985264b991dc8a8f4234eb80c5af87fa8080d0c224ad8f2cd05a2c22e83b" 427 | ], 428 | "index": "pypi", 429 | "version": "==3.4.1" 430 | }, 431 | "typing-extensions": { 432 | "hashes": [ 433 | "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", 434 | "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", 435 | "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" 436 | ], 437 | "markers": "python_version < '3.8'", 438 | "version": "==3.7.4.3" 439 | }, 440 | "urllib3": { 441 | "hashes": [ 442 | "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", 443 | "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" 444 | ], 445 | "version": "==1.26.4" 446 | }, 447 | "webencodings": { 448 | "hashes": [ 449 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 450 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 451 | ], 452 | "version": "==0.5.1" 453 | }, 454 | "zipp": { 455 | "hashes": [ 456 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", 457 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" 458 | ], 459 | "version": "==3.4.1" 460 | } 461 | } 462 | } 463 | --------------------------------------------------------------------------------