├── .dockerignore ├── .env ├── .gitignore ├── Dockerfile ├── LICENSE ├── Pipfile ├── Pipfile.lock ├── README.md ├── app ├── __init__.py ├── common │ ├── __init__.py │ ├── logger.py │ └── resp_code.py ├── config.py ├── database.py ├── main.py ├── route.py └── state.py ├── job ├── __init__.py ├── api │ ├── __init__.py │ └── v1 │ │ ├── __init__.py │ │ ├── job.py │ │ ├── record.py │ │ └── task.py ├── listener.py ├── models.py ├── route.py ├── schemas.py └── tasks.py ├── requirements.txt ├── rpc ├── __init__.py └── client.py ├── rpc_server.py ├── run.py ├── run.sh ├── scheduler ├── __init__.py ├── listener.py ├── schedulers │ ├── __init__.py │ ├── asyncio.py │ └── gevent.py ├── schema.py ├── stores │ ├── __init__.py │ └── sqlachemy.py └── utils.py ├── setup.cfg ├── sources.list ├── task └── test.py ├── tests └── __init__.py └── utils ├── __init__.py └── common.py /.dockerignore: -------------------------------------------------------------------------------- 1 | .venv -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | PROJECT_NAME=CronJob 2 | BACKEND_CORS_ORIGINS=["*"] 3 | 4 | DEBUG=True 5 | DB_HOST=172.17.0.1 6 | DB_USER=root 7 | DB_PASSWORD=cwx568319 8 | DB_NAME=cronJob 9 | DB_PORT=3306 10 | 11 | RPC_URL = tcp://0.0.0.0:4242 12 | RPC_POOL_SIZE = 3 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | <<<<<<< HEAD 54 | ======= 55 | cover/ 56 | >>>>>>> origin/v0.1 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | <<<<<<< HEAD 80 | ======= 81 | .pybuilder/ 82 | >>>>>>> origin/v0.1 83 | target/ 84 | 85 | # Jupyter Notebook 86 | .ipynb_checkpoints 87 | 88 | # IPython 89 | profile_default/ 90 | ipython_config.py 91 | 92 | # pyenv 93 | <<<<<<< HEAD 94 | .python-version 95 | ======= 96 | # For a library or package, you might want to ignore these files since the code is 97 | # intended to run in multiple environments; otherwise, check them in: 98 | # .python-version 99 | >>>>>>> origin/v0.1 100 | 101 | # pipenv 102 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 103 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 104 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 105 | # install all needed dependencies. 106 | #Pipfile.lock 107 | 108 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 109 | __pypackages__/ 110 | 111 | # Celery stuff 112 | celerybeat-schedule 113 | celerybeat.pid 114 | 115 | # SageMath parsed files 116 | *.sage.py 117 | 118 | # Environments 119 | .env 120 | .venv 121 | env/ 122 | venv/ 123 | ENV/ 124 | env.bak/ 125 | venv.bak/ 126 | 127 | # Spyder project settings 128 | .spyderproject 129 | .spyproject 130 | 131 | # Rope project settings 132 | .ropeproject 133 | 134 | # mkdocs documentation 135 | /site 136 | 137 | # mypy 138 | .mypy_cache/ 139 | .dmypy.json 140 | dmypy.json 141 | 142 | # Pyre type checker 143 | .pyre/ 144 | <<<<<<< HEAD 145 | ======= 146 | 147 | # pytype static type analyzer 148 | .pytype/ 149 | 150 | # Cython debug symbols 151 | cython_debug/ 152 | 153 | # Text Editor 154 | .vscode 155 | >>>>>>> origin/v0.1 156 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | 3 | ENV LANG=C.UTF-8 4 | ENV DEBIAN_FRONTEND=ansgoo 5 | 6 | 7 | 8 | #1,SET ENV 9 | ENV DB_HOST 101.34.19.90 10 | 11 | ADD sources.list /etc/apt 12 | 13 | WORKDIR /home/cronjob 14 | ADD . /home/cronjob 15 | 16 | RUN apt-get update && \ 17 | apt-get install -y vim && \ 18 | apt-get -y install python3.8 python3.8-dev && \ 19 | apt-get -y install python3-distutils && \ 20 | apt-get -y install python3-pip && \ 21 | pip config set global.index-url http://mirrors.aliyun.com/pypi/simple && \ 22 | pip config set install.trusted-host mirrors.aliyun.com && \ 23 | python3 -m pip install pipenv && \ 24 | cd /home/cronjob && \ 25 | pipenv install 26 | 27 | ENTRYPOINT ["bash", "run.sh"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 AnsGoo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.douban.com/simple" 3 | verify_ssl = false 4 | name = "pypi" 5 | 6 | [packages] 7 | fastapi = "==0.61.1" 8 | pydantic = {extras = ["dotenv"], version = "*"} 9 | pymysql = "==1.0.2" 10 | apscheduler = "==3.6.3" 11 | ujson = "==4.0.1" 12 | loguru = "==0.5.3" 13 | uvicorn = "==0.13.3" 14 | dateutils = "==0.6.12" 15 | sqlalchemy = "1.4.20" 16 | zerorpc = "*" 17 | 18 | [dev-packages] 19 | pdir2 = "*" 20 | 21 | [requires] 22 | python_version = "3.8" 23 | 24 | [scripts] 25 | rpc = "python rpc_server.py" 26 | server = "uvicorn app.main:app --host 0.0.0.0 --port 8000" 27 | dev = "uvicorn app.main:app --host 0.0.0.0 --reload" 28 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "118eeabe09a5d80a6331f06cf22a74ba2aa4856bb3a9ceef92c93e9fd7fa0232" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.8" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.douban.com/simple", 14 | "verify_ssl": false 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "apscheduler": { 20 | "hashes": [ 21 | "sha256:3bb5229eed6fbbdafc13ce962712ae66e175aa214c69bed35a06bffcf0c5e244", 22 | "sha256:e8b1ecdb4c7cb2818913f766d5898183c7cb8936680710a4d3a966e02262e526" 23 | ], 24 | "index": "pypi", 25 | "version": "==3.6.3" 26 | }, 27 | "backports.zoneinfo": { 28 | "hashes": [ 29 | "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf", 30 | "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328", 31 | "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546", 32 | "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6", 33 | "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570", 34 | "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9", 35 | "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7", 36 | "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987", 37 | "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722", 38 | "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582", 39 | "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc", 40 | "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b", 41 | "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1", 42 | "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08", 43 | "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac", 44 | "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2" 45 | ], 46 | "markers": "python_version >= '3.6' and python_version < '3.9'", 47 | "version": "==0.2.1" 48 | }, 49 | "click": { 50 | "hashes": [ 51 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", 52 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" 53 | ], 54 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 55 | "version": "==7.1.2" 56 | }, 57 | "dateutils": { 58 | "hashes": [ 59 | "sha256:03dd90bcb21541bd4eb4b013637e4f1b5f944881c46cc6e4b67a6059e370e3f1", 60 | "sha256:f33b6ab430fa4166e7e9cb8b21ee9f6c9843c48df1a964466f52c79b2a8d53b3" 61 | ], 62 | "index": "pypi", 63 | "version": "==0.6.12" 64 | }, 65 | "fastapi": { 66 | "hashes": [ 67 | "sha256:61ed73b4304413a2ea618d1b95ea866ee386e0e62dd8659c4f5059286f4a39c2", 68 | "sha256:6cc31bb555dd8ca956d1d227477d661e4ac012337242a41d36214ffbda78bfe9" 69 | ], 70 | "index": "pypi", 71 | "version": "==0.61.1" 72 | }, 73 | "future": { 74 | "hashes": [ 75 | "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" 76 | ], 77 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", 78 | "version": "==0.18.2" 79 | }, 80 | "gevent": { 81 | "hashes": [ 82 | "sha256:018f93de7d5318d2fb440f846839a4464738468c3476d5c9cf7da45bb71c18bd", 83 | "sha256:0d581f22a5be6281b11ad6309b38b18f0638cf896931223cbaa5adb904826ef6", 84 | "sha256:1472012493ca1fac103f700d309cb6ef7964dcdb9c788d1768266e77712f5e49", 85 | "sha256:172caa66273315f283e90a315921902cb6549762bdcb0587fd60cb712a9d6263", 86 | "sha256:17b68f4c9e20e47ad49fe797f37f91d5bbeace8765ce2707f979a8d4ec197e4d", 87 | "sha256:1ca01da176ee37b3527a2702f7d40dbc9ffb8cfc7be5a03bfa4f9eec45e55c46", 88 | "sha256:1d543c9407a1e4bca11a8932916988cfb16de00366de5bf7bc9e7a3f61e60b18", 89 | "sha256:1e1286a76f15b5e15f1e898731d50529e249529095a032453f2c101af3fde71c", 90 | "sha256:1e955238f59b2947631c9782a713280dd75884e40e455313b5b6bbc20b92ff73", 91 | "sha256:1f001cac0ba8da76abfeb392a3057f81fab3d67cc916c7df8ea977a44a2cc989", 92 | "sha256:1ff3796692dff50fec2f381b9152438b221335f557c4f9b811f7ded51b7a25a1", 93 | "sha256:2929377c8ebfb6f4d868d161cd8de2ea6b9f6c7a5fcd4f78bcd537319c16190b", 94 | "sha256:319d8b1699b7b8134de66d656cd739b308ab9c45ace14d60ae44de7775b456c9", 95 | "sha256:323b207b281ba0405fea042067fa1a61662e5ac0d574ede4ebbda03efd20c350", 96 | "sha256:3b7eae8a0653ba95a224faaddf629a913ace408edb67384d3117acf42d7dcf89", 97 | "sha256:4114f0f439f0b547bb6f1d474fee99ddb46736944ad2207cef3771828f6aa358", 98 | "sha256:4197d423e198265eef39a0dea286ef389da9148e070310f34455ecee8172c391", 99 | "sha256:494c7f29e94df9a1c3157d67bb7edfa32a46eed786e04d9ee68d39f375e30001", 100 | "sha256:4e2f008c82dc54ec94f4de12ca6feea60e419babb48ec145456907ae61625aa4", 101 | "sha256:53ee7f170ed42c7561fe8aff5d381dc9a4124694e70580d0c02fba6aafc0ea37", 102 | "sha256:54f4bfd74c178351a4a05c5c7df6f8a0a279ff6f392b57608ce0e83c768207f9", 103 | "sha256:58898dbabb5b11e4d0192aae165ad286dc6742c543e1be9d30dc82753547c508", 104 | "sha256:59b47e81b399d49a5622f0f503c59f1ce57b7705306ea0196818951dfc2f36c8", 105 | "sha256:5aa99e4882a9e909b4756ee799c6fa0f79eb0542779fad4cc60efa23ec1b2aa8", 106 | "sha256:6c04ee32c11e9fcee47c1b431834878dc987a7a2cc4fe126ddcae3bad723ce89", 107 | "sha256:84c517e33ed604fa06b7d756dc0171169cc12f7fdd68eb7b17708a62eebf4516", 108 | "sha256:8729129edef2637a8084258cb9ec4e4d5ca45d97ac77aa7a6ff19ccb530ab731", 109 | "sha256:877abdb3a669576b1d51ce6a49b7260b2a96f6b2424eb93287e779a3219d20ba", 110 | "sha256:8c192d2073e558e241f0b592c1e2b34127a4481a5be240cad4796533b88b1a98", 111 | "sha256:8f2477e7b0a903a01485c55bacf2089110e5f767014967ba4b287ff390ae2638", 112 | "sha256:96c56c280e3c43cfd075efd10b250350ed5ffd3c1514ec99a080b1b92d7c8374", 113 | "sha256:97cd42382421779f5d82ec5007199e8a84aa288114975429e4fd0a98f2290f10", 114 | "sha256:98bc510e80f45486ef5b806a1c305e0e89f0430688c14984b0dbdec03331f48b", 115 | "sha256:990d7069f14dc40674e0d5cb43c68fd3bad8337048613b9bb94a0c4180ffc176", 116 | "sha256:9d85574eb729f981fea9a78998725a06292d90a3ed50ddca74530c3148c0be41", 117 | "sha256:a2237451c721a0f874ef89dbb4af4fdc172b76a964befaa69deb15b8fff10f49", 118 | "sha256:a47a4e77e2bc668856aad92a0b8de7ee10768258d93cd03968e6c7ba2e832f76", 119 | "sha256:a5488eba6a568b4d23c072113da4fc0feb1b5f5ede7381656dc913e0d82204e2", 120 | "sha256:ae90226074a6089371a95f20288431cd4b3f6b0b096856afd862e4ac9510cddd", 121 | "sha256:b43d500d7d3c0e03070dee813335bb5315215aa1cf6a04c61093dfdd718640b3", 122 | "sha256:b6c144e08dfad4106effc043a026e5d0c0eff6ad031904c70bf5090c63f3a6a7", 123 | "sha256:d21ad79cca234cdbfa249e727500b0ddcbc7adfff6614a96e6eaa49faca3e4f2", 124 | "sha256:d82081656a5b9a94d37c718c8646c757e1617e389cdc533ea5e6a6f0b8b78545", 125 | "sha256:da4183f0b9d9a1e25e1758099220d32c51cc2c6340ee0dea3fd236b2b37598e4", 126 | "sha256:db562a8519838bddad0c439a2b12246bab539dd50e299ea7ff3644274a33b6a5", 127 | "sha256:ddaa3e310a8f1a45b5c42cf50b54c31003a3028e7d4e085059090ea0e7a5fddd", 128 | "sha256:ed7f16613eebf892a6a744d7a4a8f345bc6f066a0ff3b413e2479f9c0a180193", 129 | "sha256:efc003b6c1481165af61f0aeac248e0a9ac8d880bb3acbe469b448674b2d5281", 130 | "sha256:f01c9adbcb605364694b11dcd0542ec468a29ac7aba2fb5665dc6caf17ba4d7e", 131 | "sha256:f23d0997149a816a2a9045af29c66f67f405a221745b34cefeac5769ed451db8", 132 | "sha256:f3329bedbba4d3146ae58c667e0f9ac1e6f1e1e6340c7593976cdc60aa7d1a47", 133 | "sha256:f7ed2346eb9dc4344f9cb0d7963ce5b74fe16fdd031a2809bb6c2b6eba7ebcd5" 134 | ], 135 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 136 | "version": "==22.10.2" 137 | }, 138 | "greenlet": { 139 | "hashes": [ 140 | "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9", 141 | "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9", 142 | "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581", 143 | "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26", 144 | "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd", 145 | "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2", 146 | "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a", 147 | "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82", 148 | "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a", 149 | "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce", 150 | "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f", 151 | "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524", 152 | "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48", 153 | "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77", 154 | "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928", 155 | "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e", 156 | "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67", 157 | "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9", 158 | "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68", 159 | "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd", 160 | "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515", 161 | "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5", 162 | "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39", 163 | "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94", 164 | "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92", 165 | "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e", 166 | "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726", 167 | "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd", 168 | "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5", 169 | "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764", 170 | "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955", 171 | "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608", 172 | "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148", 173 | "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51", 174 | "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9", 175 | "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d", 176 | "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c", 177 | "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72", 178 | "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1", 179 | "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2", 180 | "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23", 181 | "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb", 182 | "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6", 183 | "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19", 184 | "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45", 185 | "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000", 186 | "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da", 187 | "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617", 188 | "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963", 189 | "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7", 190 | "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d", 191 | "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d", 192 | "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0", 193 | "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243", 194 | "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce", 195 | "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6", 196 | "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a", 197 | "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1", 198 | "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f", 199 | "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd" 200 | ], 201 | "markers": "python_version >= '3' and platform_python_implementation == 'CPython' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", 202 | "version": "==2.0.1" 203 | }, 204 | "h11": { 205 | "hashes": [ 206 | "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", 207 | "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" 208 | ], 209 | "markers": "python_version >= '3.7'", 210 | "version": "==0.14.0" 211 | }, 212 | "loguru": { 213 | "hashes": [ 214 | "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319", 215 | "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c" 216 | ], 217 | "index": "pypi", 218 | "version": "==0.5.3" 219 | }, 220 | "msgpack": { 221 | "hashes": [ 222 | "sha256:002b5c72b6cd9b4bafd790f364b8480e859b4712e91f43014fe01e4f957b8467", 223 | "sha256:0a68d3ac0104e2d3510de90a1091720157c319ceeb90d74f7b5295a6bee51bae", 224 | "sha256:0df96d6eaf45ceca04b3f3b4b111b86b33785683d682c655063ef8057d61fd92", 225 | "sha256:0dfe3947db5fb9ce52aaea6ca28112a170db9eae75adf9339a1aec434dc954ef", 226 | "sha256:0e3590f9fb9f7fbc36df366267870e77269c03172d086fa76bb4eba8b2b46624", 227 | "sha256:11184bc7e56fd74c00ead4f9cc9a3091d62ecb96e97653add7a879a14b003227", 228 | "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88", 229 | "sha256:1276e8f34e139aeff1c77a3cefb295598b504ac5314d32c8c3d54d24fadb94c9", 230 | "sha256:1576bd97527a93c44fa856770197dec00d223b0b9f36ef03f65bac60197cedf8", 231 | "sha256:1e91d641d2bfe91ba4c52039adc5bccf27c335356055825c7f88742c8bb900dd", 232 | "sha256:26b8feaca40a90cbe031b03d82b2898bf560027160d3eae1423f4a67654ec5d6", 233 | "sha256:2999623886c5c02deefe156e8f869c3b0aaeba14bfc50aa2486a0415178fce55", 234 | "sha256:2a2df1b55a78eb5f5b7d2a4bb221cd8363913830145fad05374a80bf0877cb1e", 235 | "sha256:2bb8cdf50dd623392fa75525cce44a65a12a00c98e1e37bf0fb08ddce2ff60d2", 236 | "sha256:2cc5ca2712ac0003bcb625c96368fd08a0f86bbc1a5578802512d87bc592fe44", 237 | "sha256:35bc0faa494b0f1d851fd29129b2575b2e26d41d177caacd4206d81502d4c6a6", 238 | "sha256:3c11a48cf5e59026ad7cb0dc29e29a01b5a66a3e333dc11c04f7e991fc5510a9", 239 | "sha256:449e57cc1ff18d3b444eb554e44613cffcccb32805d16726a5494038c3b93dab", 240 | "sha256:462497af5fd4e0edbb1559c352ad84f6c577ffbbb708566a0abaaa84acd9f3ae", 241 | "sha256:4733359808c56d5d7756628736061c432ded018e7a1dff2d35a02439043321aa", 242 | "sha256:48f5d88c99f64c456413d74a975bd605a9b0526293218a3b77220a2c15458ba9", 243 | "sha256:49565b0e3d7896d9ea71d9095df15b7f75a035c49be733051c34762ca95bbf7e", 244 | "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250", 245 | "sha256:4d5834a2a48965a349da1c5a79760d94a1a0172fbb5ab6b5b33cbf8447e109ce", 246 | "sha256:4dea20515f660aa6b7e964433b1808d098dcfcabbebeaaad240d11f909298075", 247 | "sha256:545e3cf0cf74f3e48b470f68ed19551ae6f9722814ea969305794645da091236", 248 | "sha256:63e29d6e8c9ca22b21846234913c3466b7e4ee6e422f205a2988083de3b08cae", 249 | "sha256:6916c78f33602ecf0509cc40379271ba0f9ab572b066bd4bdafd7434dee4bc6e", 250 | "sha256:6a4192b1ab40f8dca3f2877b70e63799d95c62c068c84dc028b40a6cb03ccd0f", 251 | "sha256:6c9566f2c39ccced0a38d37c26cc3570983b97833c365a6044edef3574a00c08", 252 | "sha256:76ee788122de3a68a02ed6f3a16bbcd97bc7c2e39bd4d94be2f1821e7c4a64e6", 253 | "sha256:7760f85956c415578c17edb39eed99f9181a48375b0d4a94076d84148cf67b2d", 254 | "sha256:77ccd2af37f3db0ea59fb280fa2165bf1b096510ba9fe0cc2bf8fa92a22fdb43", 255 | "sha256:81fc7ba725464651190b196f3cd848e8553d4d510114a954681fd0b9c479d7e1", 256 | "sha256:85f279d88d8e833ec015650fd15ae5eddce0791e1e8a59165318f371158efec6", 257 | "sha256:9667bdfdf523c40d2511f0e98a6c9d3603be6b371ae9a238b7ef2dc4e7a427b0", 258 | "sha256:a75dfb03f8b06f4ab093dafe3ddcc2d633259e6c3f74bb1b01996f5d8aa5868c", 259 | "sha256:ac5bd7901487c4a1dd51a8c58f2632b15d838d07ceedaa5e4c080f7190925bff", 260 | "sha256:aca0f1644d6b5a73eb3e74d4d64d5d8c6c3d577e753a04c9e9c87d07692c58db", 261 | "sha256:b17be2478b622939e39b816e0aa8242611cc8d3583d1cd8ec31b249f04623243", 262 | "sha256:c1683841cd4fa45ac427c18854c3ec3cd9b681694caf5bff04edb9387602d661", 263 | "sha256:c23080fdeec4716aede32b4e0ef7e213c7b1093eede9ee010949f2a418ced6ba", 264 | "sha256:d5b5b962221fa2c5d3a7f8133f9abffc114fe218eb4365e40f17732ade576c8e", 265 | "sha256:d603de2b8d2ea3f3bcb2efe286849aa7a81531abc52d8454da12f46235092bcb", 266 | "sha256:e83f80a7fec1a62cf4e6c9a660e39c7f878f603737a0cdac8c13131d11d97f52", 267 | "sha256:eb514ad14edf07a1dbe63761fd30f89ae79b42625731e1ccf5e1f1092950eaa6", 268 | "sha256:eba96145051ccec0ec86611fe9cf693ce55f2a3ce89c06ed307de0e085730ec1", 269 | "sha256:ed6f7b854a823ea44cf94919ba3f727e230da29feb4a99711433f25800cf747f", 270 | "sha256:f0029245c51fd9473dc1aede1160b0a29f4a912e6b1dd353fa6d317085b219da", 271 | "sha256:f5d869c18f030202eb412f08b28d2afeea553d6613aee89e200d7aca7ef01f5f", 272 | "sha256:fb62ea4b62bfcb0b380d5680f9a4b3f9a2d166d9394e9bbd9666c0ee09a3645c", 273 | "sha256:fcb8a47f43acc113e24e910399376f7277cf8508b27e5b88499f053de6b115a8" 274 | ], 275 | "version": "==1.0.4" 276 | }, 277 | "pydantic": { 278 | "extras": [ 279 | "dotenv" 280 | ], 281 | "hashes": [ 282 | "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42", 283 | "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624", 284 | "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e", 285 | "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559", 286 | "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709", 287 | "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9", 288 | "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d", 289 | "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52", 290 | "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda", 291 | "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912", 292 | "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c", 293 | "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525", 294 | "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe", 295 | "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41", 296 | "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b", 297 | "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283", 298 | "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965", 299 | "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c", 300 | "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410", 301 | "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5", 302 | "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116", 303 | "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98", 304 | "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f", 305 | "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644", 306 | "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13", 307 | "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd", 308 | "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254", 309 | "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6", 310 | "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488", 311 | "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5", 312 | "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c", 313 | "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1", 314 | "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a", 315 | "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2", 316 | "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d", 317 | "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236" 318 | ], 319 | "index": "pypi", 320 | "version": "==1.10.2" 321 | }, 322 | "pymysql": { 323 | "hashes": [ 324 | "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641", 325 | "sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36" 326 | ], 327 | "index": "pypi", 328 | "version": "==1.0.2" 329 | }, 330 | "python-dateutil": { 331 | "hashes": [ 332 | "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", 333 | "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" 334 | ], 335 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 336 | "version": "==2.8.2" 337 | }, 338 | "python-dotenv": { 339 | "hashes": [ 340 | "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5", 341 | "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045" 342 | ], 343 | "version": "==0.21.0" 344 | }, 345 | "pytz": { 346 | "hashes": [ 347 | "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427", 348 | "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2" 349 | ], 350 | "version": "==2022.6" 351 | }, 352 | "pytz-deprecation-shim": { 353 | "hashes": [ 354 | "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6", 355 | "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d" 356 | ], 357 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 358 | "version": "==0.1.0.post0" 359 | }, 360 | "pyzmq": { 361 | "hashes": [ 362 | "sha256:0108358dab8c6b27ff6b985c2af4b12665c1bc659648284153ee501000f5c107", 363 | "sha256:07bec1a1b22dacf718f2c0e71b49600bb6a31a88f06527dfd0b5aababe3fa3f7", 364 | "sha256:0e8f482c44ccb5884bf3f638f29bea0f8dc68c97e38b2061769c4cb697f6140d", 365 | "sha256:0ec91f1bad66f3ee8c6deb65fa1fe418e8ad803efedd69c35f3b5502f43bd1dc", 366 | "sha256:0f14cffd32e9c4c73da66db97853a6aeceaac34acdc0fae9e5bbc9370281864c", 367 | "sha256:15975747462ec49fdc863af906bab87c43b2491403ab37a6d88410635786b0f4", 368 | "sha256:1724117bae69e091309ffb8255412c4651d3f6355560d9af312d547f6c5bc8b8", 369 | "sha256:1a7c280185c4da99e0cc06c63bdf91f5b0b71deb70d8717f0ab870a43e376db8", 370 | "sha256:1b7928bb7580736ffac5baf814097be342ba08d3cfdfb48e52773ec959572287", 371 | "sha256:2032d9cb994ce3b4cba2b8dfae08c7e25bc14ba484c770d4d3be33c27de8c45b", 372 | "sha256:20e7eeb1166087db636c06cae04a1ef59298627f56fb17da10528ab52a14c87f", 373 | "sha256:216f5d7dbb67166759e59b0479bca82b8acf9bed6015b526b8eb10143fb08e77", 374 | "sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066", 375 | "sha256:3104f4b084ad5d9c0cb87445cc8cfd96bba710bef4a66c2674910127044df209", 376 | "sha256:3e6192dbcefaaa52ed81be88525a54a445f4b4fe2fffcae7fe40ebb58bd06bfd", 377 | "sha256:42d4f97b9795a7aafa152a36fe2ad44549b83a743fd3e77011136def512e6c2a", 378 | "sha256:44e706bac34e9f50779cb8c39f10b53a4d15aebb97235643d3112ac20bd577b4", 379 | "sha256:47b11a729d61a47df56346283a4a800fa379ae6a85870d5a2e1e4956c828eedc", 380 | "sha256:4854f9edc5208f63f0841c0c667260ae8d6846cfa233c479e29fdc85d42ebd58", 381 | "sha256:48f721f070726cd2a6e44f3c33f8ee4b24188e4b816e6dd8ba542c8c3bb5b246", 382 | "sha256:52afb0ac962963fff30cf1be775bc51ae083ef4c1e354266ab20e5382057dd62", 383 | "sha256:54d8b9c5e288362ec8595c1d98666d36f2070fd0c2f76e2b3c60fbad9bd76227", 384 | "sha256:5bd3d7dfd9cd058eb68d9a905dec854f86649f64d4ddf21f3ec289341386c44b", 385 | "sha256:613010b5d17906c4367609e6f52e9a2595e35d5cc27d36ff3f1b6fa6e954d944", 386 | "sha256:624321120f7e60336be8ec74a172ae7fba5c3ed5bf787cc85f7e9986c9e0ebc2", 387 | "sha256:65c94410b5a8355cfcf12fd600a313efee46ce96a09e911ea92cf2acf6708804", 388 | "sha256:6640f83df0ae4ae1104d4c62b77e9ef39be85ebe53f636388707d532bee2b7b8", 389 | "sha256:687700f8371643916a1d2c61f3fdaa630407dd205c38afff936545d7b7466066", 390 | "sha256:77c2713faf25a953c69cf0f723d1b7dd83827b0834e6c41e3fb3bbc6765914a1", 391 | "sha256:78068e8678ca023594e4a0ab558905c1033b2d3e806a0ad9e3094e231e115a33", 392 | "sha256:7a23ccc1083c260fa9685c93e3b170baba45aeed4b524deb3f426b0c40c11639", 393 | "sha256:7abddb2bd5489d30ffeb4b93a428130886c171b4d355ccd226e83254fcb6b9ef", 394 | "sha256:80093b595921eed1a2cead546a683b9e2ae7f4a4592bb2ab22f70d30174f003a", 395 | "sha256:8242543c522d84d033fe79be04cb559b80d7eb98ad81b137ff7e0a9020f00ace", 396 | "sha256:838812c65ed5f7c2bd11f7b098d2e5d01685a3f6d1f82849423b570bae698c00", 397 | "sha256:83ea1a398f192957cb986d9206ce229efe0ee75e3c6635baff53ddf39bd718d5", 398 | "sha256:8421aa8c9b45ea608c205db9e1c0c855c7e54d0e9c2c2f337ce024f6843cab3b", 399 | "sha256:858375573c9225cc8e5b49bfac846a77b696b8d5e815711b8d4ba3141e6e8879", 400 | "sha256:86de64468cad9c6d269f32a6390e210ca5ada568c7a55de8e681ca3b897bb340", 401 | "sha256:87f7ac99b15270db8d53f28c3c7b968612993a90a5cf359da354efe96f5372b4", 402 | "sha256:8bad8210ad4df68c44ff3685cca3cda448ee46e20d13edcff8909eba6ec01ca4", 403 | "sha256:8bb4af15f305056e95ca1bd086239b9ebc6ad55e9f49076d27d80027f72752f6", 404 | "sha256:8c78bfe20d4c890cb5580a3b9290f700c570e167d4cdcc55feec07030297a5e3", 405 | "sha256:8f3f3154fde2b1ff3aa7b4f9326347ebc89c8ef425ca1db8f665175e6d3bd42f", 406 | "sha256:94010bd61bc168c103a5b3b0f56ed3b616688192db7cd5b1d626e49f28ff51b3", 407 | "sha256:941fab0073f0a54dc33d1a0460cb04e0d85893cb0c5e1476c785000f8b359409", 408 | "sha256:9dca7c3956b03b7663fac4d150f5e6d4f6f38b2462c1e9afd83bcf7019f17913", 409 | "sha256:a180dbd5ea5d47c2d3b716d5c19cc3fb162d1c8db93b21a1295d69585bfddac1", 410 | "sha256:a2712aee7b3834ace51738c15d9ee152cc5a98dc7d57dd93300461b792ab7b43", 411 | "sha256:a435ef8a3bd95c8a2d316d6e0ff70d0db524f6037411652803e118871d703333", 412 | "sha256:abb756147314430bee5d10919b8493c0ccb109ddb7f5dfd2fcd7441266a25b75", 413 | "sha256:abe6eb10122f0d746a0d510c2039ae8edb27bc9af29f6d1b05a66cc2401353ff", 414 | "sha256:acbd0a6d61cc954b9f535daaa9ec26b0a60a0d4353c5f7c1438ebc88a359a47e", 415 | "sha256:ae08ac90aa8fa14caafc7a6251bd218bf6dac518b7bff09caaa5e781119ba3f2", 416 | "sha256:ae61446166983c663cee42c852ed63899e43e484abf080089f771df4b9d272ef", 417 | "sha256:afe1f3bc486d0ce40abb0a0c9adb39aed3bbac36ebdc596487b0cceba55c21c1", 418 | "sha256:b946da90dc2799bcafa682692c1d2139b2a96ec3c24fa9fc6f5b0da782675330", 419 | "sha256:b947e264f0e77d30dcbccbb00f49f900b204b922eb0c3a9f0afd61aaa1cedc3d", 420 | "sha256:bb5635c851eef3a7a54becde6da99485eecf7d068bd885ac8e6d173c4ecd68b0", 421 | "sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6", 422 | "sha256:c31805d2c8ade9b11feca4674eee2b9cce1fec3e8ddb7bbdd961a09dc76a80ea", 423 | "sha256:c8840f064b1fb377cffd3efeaad2b190c14d4c8da02316dae07571252d20b31f", 424 | "sha256:ccb94342d13e3bf3ffa6e62f95b5e3f0bc6bfa94558cb37f4b3d09d6feb536ff", 425 | "sha256:d66689e840e75221b0b290b0befa86f059fb35e1ee6443bce51516d4d61b6b99", 426 | "sha256:dabf1a05318d95b1537fd61d9330ef4313ea1216eea128a17615038859da3b3b", 427 | "sha256:db03704b3506455d86ec72c3358a779e9b1d07b61220dfb43702b7b668edcd0d", 428 | "sha256:de4217b9eb8b541cf2b7fde4401ce9d9a411cc0af85d410f9d6f4333f43640be", 429 | "sha256:df0841f94928f8af9c7a1f0aaaffba1fb74607af023a152f59379c01c53aee58", 430 | "sha256:dfb992dbcd88d8254471760879d48fb20836d91baa90f181c957122f9592b3dc", 431 | "sha256:e7e66b4e403c2836ac74f26c4b65d8ac0ca1eef41dfcac2d013b7482befaad83", 432 | "sha256:e8012bce6836d3f20a6c9599f81dfa945f433dab4dbd0c4917a6fb1f998ab33d", 433 | "sha256:f01de4ec083daebf210531e2cca3bdb1608dbbbe00a9723e261d92087a1f6ebc", 434 | "sha256:f0d945a85b70da97ae86113faf9f1b9294efe66bd4a5d6f82f2676d567338b66", 435 | "sha256:fa0ae3275ef706c0309556061185dd0e4c4cd3b7d6f67ae617e4e677c7a41e2e" 436 | ], 437 | "markers": "python_version >= '3.6'", 438 | "version": "==24.0.1" 439 | }, 440 | "six": { 441 | "hashes": [ 442 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", 443 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" 444 | ], 445 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 446 | "version": "==1.16.0" 447 | }, 448 | "sqlalchemy": { 449 | "hashes": [ 450 | "sha256:0be9b479c5806cece01f1581726573a8d6515f8404e082c375b922c45cfc2a7b", 451 | "sha256:17aee7bfcef7bf0dea92f10e5dfdd67418dcf6fe0759f520e168b605855c003e", 452 | "sha256:21f3df74a0ab39e1255e94613556e33c1dc3b454059fe0b365ec3bbb9ed82e4a", 453 | "sha256:237067ba0ef45a518b64606e1807f7229969ad568288b110ed5f0ca714a3ed3a", 454 | "sha256:2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90", 455 | "sha256:393f51a09778e8984d735b59a810731394308b4038acdb1635397c2865dae2b6", 456 | "sha256:3ca21b35b714ce36f4b8d1ee8d15f149db8eb43a472cf71600bf18dae32286e7", 457 | "sha256:3cbdbed8cdcae0f83640a9c44fa02b45a6c61e149c58d45a63c9581aba62850f", 458 | "sha256:3eba07f740488c3a125f17c092a81eeae24a6c7ec32ac9dbc52bf7afaf0c4f16", 459 | "sha256:3f68eab46649504eb95be36ca529aea16cd199f080726c28cbdbcbf23d20b2a2", 460 | "sha256:4c56e6899fa6e767e4be5d106941804a4201c5cb9620a409c0b80448ec70b656", 461 | "sha256:53f90a2374f60e703c94118d21533765412da8225ba98659de7dd7998641ab17", 462 | "sha256:595b185041a4dc5c685283ea98c2f67bbfa47bb28e4a4f5b27ebf40684e7a9f8", 463 | "sha256:65a0ad931944fcb0be12a8e0ac322dbd3ecf17c53f088bc10b6da8f0caac287b", 464 | "sha256:68e0cd5d32a32c4395168d42f2fefbb03b817ead3a8f3704b8bd5697c0b26c24", 465 | "sha256:6a06c2506c41926d2769f7968759995f2505e31c5b5a0821e43ca5a3ddb0e8ae", 466 | "sha256:6d7e1b28342b45f19e3dea7873a9479e4a57e15095a575afca902e517fb89652", 467 | "sha256:6f0ea4d7348feb5e5d0bf317aace92e28398fa9a6e38b7be9ec1f31aad4a8039", 468 | "sha256:7313e4acebb9ae88dbde14a8a177467a7625b7449306c03a3f9f309b30e163d0", 469 | "sha256:7cf7c7adbf4417e3f46fc5a2dbf8395a5a69698217337086888f79700a12e93a", 470 | "sha256:80ead36fb1d676cc019586ffdc21c7e906ce4bf243fe4021e4973dae332b6038", 471 | "sha256:9470633395e5f24d6741b4c8a6e905bce405a28cf417bba4ccbaadf3dab0111d", 472 | "sha256:94c0093678001f5d79f2dcbf3104c54d6c89e41ab50d619494c503a4d3f1aef2", 473 | "sha256:95f4f8d62589755b507218f2e3189475a4c1f5cc9db2aec772071a7dc6cd5726", 474 | "sha256:9c857676d810ca196be73c98eb839125d6fa849bfa3589be06201a6517f9961c", 475 | "sha256:a22208c1982f1fe2ae82e5e4c3d4a6f2445a7a0d65fb7983a3d7cbbe3983f5a4", 476 | "sha256:ad5f966623905ee33694680dda1b735544c99c7638f216045d21546d3d8c6f5b", 477 | "sha256:ae1ed1ebc407d2f66c6f0ec44ef7d56e3f455859df5494680e2cf89dad8e3ae0", 478 | "sha256:afd1ac99179d1864a68c06b31263a08ea25a49df94e272712eb2824ef151e294", 479 | "sha256:b6a337a2643a41476fb6262059b8740f4b9a2ec29bf00ffb18c18c080f6e0aed", 480 | "sha256:b737fbeb2f78926d1f59964feb287bbbd050e7904766f87c8ce5cfb86e6d840c", 481 | "sha256:c46322354c58d4dc039a2c982d28284330f8919f31206894281f4b595b9d8dbe", 482 | "sha256:c7e3b9e01fdbe1ce3a165cc7e1ff52b24813ee79c6df6dee0d1e13888a97817e", 483 | "sha256:c9aa372b295a36771cffc226b6517df3011a7d146ac22d19fa6a75f1cdf9d7e6", 484 | "sha256:d3b6d4588994da73567bb00af9d7224a16c8027865a8aab53ae9be83f9b7cbd1", 485 | "sha256:d3b9ac11f36ab9a726097fba7c7f6384f0129aedb017f1d4d1d4fce9052a1320", 486 | "sha256:d654870a66027af3a26df1372cf7f002e161c6768ebe4c9c6fdc0da331cb5173", 487 | "sha256:d8080bc51a775627865e0f1dbfc0040ff4ace685f187f6036837e1727ba2ed10", 488 | "sha256:da60b98b0f6f0df9fbf8b72d67d13b73aa8091923a48af79a951d4088530a239", 489 | "sha256:f5e8ed9cde48b76318ab989deeddc48f833d2a6a7b7c393c49b704f67dedf01d", 490 | "sha256:f8e5443295b218b08bef8eb85d31b214d184b3690d99a33b7bd8e5591e2b0aa1" 491 | ], 492 | "index": "pypi", 493 | "version": "==1.4.44" 494 | }, 495 | "starlette": { 496 | "hashes": [ 497 | "sha256:bd2ffe5e37fb75d014728511f8e68ebf2c80b0fa3d04ca1479f4dc752ae31ac9", 498 | "sha256:ebe8ee08d9be96a3c9f31b2cb2a24dbdf845247b745664bd8a3f9bd0c977fdbc" 499 | ], 500 | "markers": "python_version >= '3.6'", 501 | "version": "==0.13.6" 502 | }, 503 | "typing-extensions": { 504 | "hashes": [ 505 | "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa", 506 | "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e" 507 | ], 508 | "markers": "python_version >= '3.7'", 509 | "version": "==4.4.0" 510 | }, 511 | "tzdata": { 512 | "hashes": [ 513 | "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342", 514 | "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae" 515 | ], 516 | "markers": "python_version >= '3.6'", 517 | "version": "==2022.6" 518 | }, 519 | "tzlocal": { 520 | "hashes": [ 521 | "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745", 522 | "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7" 523 | ], 524 | "markers": "python_version >= '3.6'", 525 | "version": "==4.2" 526 | }, 527 | "ujson": { 528 | "hashes": [ 529 | "sha256:078808c385036cba73cad96f498310c61e9b5ae5ac9ea01e7c3996ece544b556", 530 | "sha256:0a2e1b211714eb1ec0772a013ec9967f8f95f21c84e8f46382e9f8a32ae781fe", 531 | "sha256:0f412c3f59b1ab0f40018235224ca0cf29232d0201ff5085618565a8a9c810ed", 532 | "sha256:26cf6241b36ff5ce4539ae687b6b02673109c5e3efc96148806a7873eaa229d3", 533 | "sha256:2b2d9264ac76aeb11f590f7a1ccff0689ba1313adacbb6d38d3b15f21a392897", 534 | "sha256:4f12b0b4e235b35d49f15227b0a827e614c52dda903c58a8f5523936c233dfc7", 535 | "sha256:4fe8c6112b732cba5a722f7cbe22f18d405f6f44415794a5b46473a477635233", 536 | "sha256:51480048373cf97a6b97fcd70c3586ca0a31f27e22ab680fb14c1f22bedbf743", 537 | "sha256:568bb3e7f035006147af4ce3a9ced7d126c92e1a8607c7b2266007b1c1162c53", 538 | "sha256:5fe1536465b1c86e32a47113abd3178001b7c2dcd61f95f336fe2febf4661e74", 539 | "sha256:71703a269f074ff65b9d7746662e4b3e76a4af443e532218af1e8ce15d9b1e7b", 540 | "sha256:7a1545ac2476db4cc1f0f236603ccbb50991fc1bba480cda1bc06348cc2a2bf0", 541 | "sha256:a5200a68f1dcf3ce275e1cefbcfa3914b70c2b5e2f71c2e31556aa1f7244c845", 542 | "sha256:a618af22407baeadb3f046f81e7a5ee5e9f8b0b716d2b564f92276a54d26a823", 543 | "sha256:a79bca47eafb31c74b38e68623bc9b2bb930cb48fab1af31c8f2cb68cf473421", 544 | "sha256:b87379a3f8046d6d111762d81f3384bf38ab24b1535c841fe867a4a097d84523", 545 | "sha256:bd4c77aee3ffb920e2dbc21a9e0c7945a400557ce671cfd57dbd569f5ebc619d", 546 | "sha256:c354c1617b0a4378b6279d0cd511b769500cf3fa7c42e8e004cbbbb6b4c2a875", 547 | "sha256:c604024bd853b5df6be7d933e934da8dd139e6159564db7c55b92a9937678093", 548 | "sha256:e7ab24942b2d57920d75b817b8eead293026db003247e26f99506bdad86c61b4", 549 | "sha256:f8a60928737a9a47e692fcd661ef2b5d75ba22c7c930025bd95e338f2a6e15bc" 550 | ], 551 | "index": "pypi", 552 | "version": "==4.0.1" 553 | }, 554 | "uvicorn": { 555 | "hashes": [ 556 | "sha256:1079c50a06f6338095b4f203e7861dbff318dde5f22f3a324fc6e94c7654164c", 557 | "sha256:ef1e0bb5f7941c6fe324e06443ddac0331e1632a776175f87891c7bd02694355" 558 | ], 559 | "index": "pypi", 560 | "version": "==0.13.3" 561 | }, 562 | "zerorpc": { 563 | "hashes": [ 564 | "sha256:0a91ab78db756aec25d5c7c326f8d0dcdf852144acba2cced2cfb2e736c51628", 565 | "sha256:d2ee247a566fc703f29c277d767f6f61f1e12f76d0402faea4bd815f32cbf37f" 566 | ], 567 | "index": "pypi", 568 | "version": "==0.6.3" 569 | }, 570 | "zope.event": { 571 | "hashes": [ 572 | "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42", 573 | "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330" 574 | ], 575 | "version": "==4.5.0" 576 | }, 577 | "zope.interface": { 578 | "hashes": [ 579 | "sha256:008b0b65c05993bb08912f644d140530e775cf1c62a072bf9340c2249e613c32", 580 | "sha256:0217a9615531c83aeedb12e126611b1b1a3175013bbafe57c702ce40000eb9a0", 581 | "sha256:0fb497c6b088818e3395e302e426850f8236d8d9f4ef5b2836feae812a8f699c", 582 | "sha256:17ebf6e0b1d07ed009738016abf0d0a0f80388e009d0ac6e0ead26fc162b3b9c", 583 | "sha256:311196634bb9333aa06f00fc94f59d3a9fddd2305c2c425d86e406ddc6f2260d", 584 | "sha256:3218ab1a7748327e08ef83cca63eea7cf20ea7e2ebcb2522072896e5e2fceedf", 585 | "sha256:404d1e284eda9e233c90128697c71acffd55e183d70628aa0bbb0e7a3084ed8b", 586 | "sha256:4087e253bd3bbbc3e615ecd0b6dd03c4e6a1e46d152d3be6d2ad08fbad742dcc", 587 | "sha256:40f4065745e2c2fa0dff0e7ccd7c166a8ac9748974f960cd39f63d2c19f9231f", 588 | "sha256:5334e2ef60d3d9439c08baedaf8b84dc9bb9522d0dacbc10572ef5609ef8db6d", 589 | "sha256:604cdba8f1983d0ab78edc29aa71c8df0ada06fb147cea436dc37093a0100a4e", 590 | "sha256:6373d7eb813a143cb7795d3e42bd8ed857c82a90571567e681e1b3841a390d16", 591 | "sha256:655796a906fa3ca67273011c9805c1e1baa047781fca80feeb710328cdbed87f", 592 | "sha256:65c3c06afee96c654e590e046c4a24559e65b0a87dbff256cd4bd6f77e1a33f9", 593 | "sha256:696f3d5493eae7359887da55c2afa05acc3db5fc625c49529e84bd9992313296", 594 | "sha256:6e972493cdfe4ad0411fd9abfab7d4d800a7317a93928217f1a5de2bb0f0d87a", 595 | "sha256:7579960be23d1fddecb53898035a0d112ac858c3554018ce615cefc03024e46d", 596 | "sha256:765d703096ca47aa5d93044bf701b00bbce4d903a95b41fff7c3796e747b1f1d", 597 | "sha256:7e66f60b0067a10dd289b29dceabd3d0e6d68be1504fc9d0bc209cf07f56d189", 598 | "sha256:8a2ffadefd0e7206adc86e492ccc60395f7edb5680adedf17a7ee4205c530df4", 599 | "sha256:959697ef2757406bff71467a09d940ca364e724c534efbf3786e86eee8591452", 600 | "sha256:9d783213fab61832dbb10d385a319cb0e45451088abd45f95b5bb88ed0acca1a", 601 | "sha256:a16025df73d24795a0bde05504911d306307c24a64187752685ff6ea23897cb0", 602 | "sha256:a2ad597c8c9e038a5912ac3cf166f82926feff2f6e0dabdab956768de0a258f5", 603 | "sha256:bfee1f3ff62143819499e348f5b8a7f3aa0259f9aca5e0ddae7391d059dce671", 604 | "sha256:d169ccd0756c15bbb2f1acc012f5aab279dffc334d733ca0d9362c5beaebe88e", 605 | "sha256:d514c269d1f9f5cd05ddfed15298d6c418129f3f064765295659798349c43e6f", 606 | "sha256:d692374b578360d36568dd05efb8a5a67ab6d1878c29c582e37ddba80e66c396", 607 | "sha256:dbaeb9cf0ea0b3bc4b36fae54a016933d64c6d52a94810a63c00f440ecb37dd7", 608 | "sha256:dc26c8d44472e035d59d6f1177eb712888447f5799743da9c398b0339ed90b1b", 609 | "sha256:e1574980b48c8c74f83578d1e77e701f8439a5d93f36a5a0af31337467c08fcf", 610 | "sha256:e74a578172525c20d7223eac5f8ad187f10940dac06e40113d62f14f3adb1e8f", 611 | "sha256:e945de62917acbf853ab968d8916290548df18dd62c739d862f359ecd25842a6", 612 | "sha256:f0980d44b8aded808bec5059018d64692f0127f10510eca71f2f0ace8fb11188", 613 | "sha256:f98d4bd7bbb15ca701d19b93263cc5edfd480c3475d163f137385f49e5b3a3a7", 614 | "sha256:fb68d212efd057596dee9e6582daded9f8ef776538afdf5feceb3059df2d2e7b" 615 | ], 616 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 617 | "version": "==5.5.2" 618 | } 619 | }, 620 | "develop": { 621 | "pdir2": { 622 | "hashes": [ 623 | "sha256:752b32f0e438ce4bbccf2e5c2e69b522a216afcc95dce20929064ecb76f2db88", 624 | "sha256:b6c34d0e49aeb44eb977ab86edd59fd301fcd5caa498931bd6aa11125cc75f4e" 625 | ], 626 | "index": "pypi", 627 | "version": "==0.3.6" 628 | }, 629 | "typing-extensions": { 630 | "hashes": [ 631 | "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa", 632 | "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e" 633 | ], 634 | "markers": "python_version >= '3.7'", 635 | "version": "==4.4.0" 636 | } 637 | } 638 | } 639 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CronJob 2 | 3 | 本项目采用[FastAPI](https://github.com/tiangolo/fastapi) + [APScheduler](https://github.com/topics/apscheduler) + [ZeroRPC](https://github.com/0rpc/zerorpc-python)开发轻量级定时调度平台 4 | 5 | ![](https://img.shields.io/github/license/AnsGoo/cronJob?style=for-the-badge) 6 | ![](https://img.shields.io/github/stars/AnsGoo/cronJob?style=for-the-badge) 7 | ![](https://img.shields.io/github/issues/AnsGoo/cronJob?style=for-the-badge) 8 | ![](https://img.shields.io/github/forks/AnsGoo/cronJob?style=for-the-badge) 9 | 10 | ## 特点 11 | 12 | - 完全兼容Crontab 13 | - 支持秒级定时任务 14 | - 作业任务可搜索、暂停、编辑、删除 15 | - 作业任务持久化存储、三种不同触发器类型作业动态添加 16 | - 采用RPC方式规避了APScheduler在多进程部署的情况下,任务被多次调度的问题 17 | - 提供图形化的管理页面 18 | 19 | ## TODO 20 | 21 | - [ ] WEB UI页面优化[cronJobFront](https://github.com/AnsGoo/cronJobFront) 22 | 23 | ## 使用 24 | 25 | - 安装虚拟环境管理器 26 | 27 | ```shell 28 | pip install pipenv 29 | ``` 30 | 31 | - 获取代码并激活虚拟环境 32 | 33 | ```shell 34 | git clone https://github.com/AnsGoo/cronJob.git 35 | pipenv shell 36 | 37 | ``` 38 | 39 | - 安装依赖 40 | 41 | ```shell 42 | 43 | pipenv sync 44 | 45 | ``` 46 | 47 | - 运行 48 | 49 | ```shell 50 | 51 | // 开发 52 | pipenv run dev --host=0.0.0.0 --port=8000 --reload 53 | pipenv run rpc 54 | 55 | // 生产 56 | 57 | pipenv run server--workers=4 --host=0.0.0.0 --port=8000 58 | pipenv run rpc 59 | 60 | ``` 61 | - 部署 62 | 63 | 制作镜像 64 | 65 | ```shell 66 | docker build cronjob:v1 . 67 | ``` 68 | 69 | 运行 70 | 71 | ```shell 72 | docker run -p 8000:8000 -p 4242:4242 --name cronjob -it cronjob:v1 73 | ``` 74 | - 开发任务 75 | 76 | 在`job/tasks.py`的`Task`对象中自定义以`task`开头的任务即可在调度方法中调取该任务,例如: 77 | 78 | ```python 79 | class Task(BaseTask): 80 | def task_test(self) -> None: 81 | ''' 82 | 测试任务 83 | :return: 84 | ''' 85 | print('test') 86 | 87 | ``` 88 | 89 | 90 | ## License 91 | 92 | This project is licensed under the terms of the MIT license. 93 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/app/__init__.py -------------------------------------------------------------------------------- /app/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/app/common/__init__.py -------------------------------------------------------------------------------- /app/common/logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2020/10/15 16:58 4 | # @Author : CoderCharm 5 | # @File : logger.py 6 | # @Software: PyCharm 7 | # @Github : github/CoderCharm 8 | # @Email : wg_python@163.com 9 | # @Desc : 10 | """ 11 | 日志文件配置 参考链接 12 | https://github.com/Delgan/loguru 13 | # 本来是想 像flask那样把日志对象挂载到app对象上,作者建议直接使用全局对象 14 | https://github.com/tiangolo/fastapi/issues/81#issuecomment-473677039 15 | """ 16 | 17 | import os 18 | import time 19 | from loguru import logger 20 | 21 | from app.config import BASE_DIR 22 | log_path = BASE_DIR.joinpath('logs') 23 | 24 | if not log_path.exists(): 25 | os.mkdir(log_path) 26 | 27 | log_file = log_path.joinpath(f'{time.strftime("%Y-%m-%d")}.log') 28 | rpc_log_file = log_path.joinpath(f'rpc_{time.strftime("%Y-%m-%d")}.log') 29 | schduler_log_file = log_path.joinpath(f'scheduler_{time.strftime("%Y-%m-%d")}.log') 30 | def rpc_filter(record): 31 | if 'rpc' in record['name']: 32 | return True 33 | else: 34 | return False 35 | 36 | def scheduler_filter(record): 37 | if 'job.listener' in record['name']: 38 | return True 39 | else: 40 | return False 41 | 42 | def app_filter(record): 43 | if 'job.listener' in record['name']: 44 | return False 45 | elif 'rpc' in record['name']: 46 | return False 47 | else: 48 | return True 49 | # 日志简单配置 50 | logger.add(log_file, level='INFO', rotation="00:00", retention="5 days", enqueue=True,filter=app_filter) 51 | logger.add(rpc_log_file, level='INFO', rotation="00:00", retention="5 days", enqueue=True,filter=rpc_filter) 52 | logger.add(schduler_log_file, level='INFO', rotation="00:00", retention="5 days", enqueue=True,filter=scheduler_filter) 53 | __all__ = ["logger"] -------------------------------------------------------------------------------- /app/common/resp_code.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 2020/9/22 13:32 4 | # @Author : CoderCharm 5 | # @File : response_code.py 6 | # @Software: PyCharm 7 | # @Github : github/CoderCharm 8 | # @Email : wg_python@163.com 9 | # @Desc : 10 | """ 11 | 统一响应状态码 12 | """ 13 | from typing import Union 14 | 15 | from typing import Any 16 | from fastapi import status 17 | from fastapi.responses import JSONResponse, Response 18 | from fastapi.encoders import jsonable_encoder 19 | 20 | 21 | def resp_200(data: Union[list, dict, str, Any] = {}) -> Response: 22 | return JSONResponse( 23 | status_code=status.HTTP_200_OK, 24 | content=jsonable_encoder(data) 25 | ) 26 | 27 | 28 | def resp_201(data: Union[list, dict, str, Any] = {}) -> Response: 29 | return JSONResponse( 30 | status_code=status.HTTP_201_CREATED, 31 | content=jsonable_encoder(data) 32 | ) 33 | 34 | 35 | def resp_202(data: Union[list, dict, str, Any] = {}) -> Response: 36 | return JSONResponse( 37 | status_code=status.HTTP_202_ACCEPTED, 38 | content=jsonable_encoder(data) 39 | ) 40 | 41 | 42 | def resp_400(message='BAD REQUEST') -> Response: 43 | return JSONResponse( 44 | status_code=status.HTTP_400_BAD_REQUEST, 45 | content=message 46 | ) 47 | 48 | 49 | def resp_404(*, message='NOT FOUND') -> Response: 50 | return JSONResponse( 51 | status_code=status.HTTP_404_NOT_FOUND, 52 | content=message 53 | ) 54 | 55 | -------------------------------------------------------------------------------- /app/config.py: -------------------------------------------------------------------------------- 1 | 2 | from pathlib import Path 3 | from urllib.parse import quote_plus 4 | from typing import Any, Dict, List, Mapping, Optional, Union 5 | 6 | from pydantic import AnyHttpUrl, BaseSettings, validator, AnyUrl 7 | 8 | from scheduler.schema import SchedulerConfig 9 | from scheduler.schema import SchedulerConfig, JobStore, JobExecutorPool 10 | 11 | 12 | BASE_DIR = Path.cwd() 13 | 14 | class MySQLDSN(AnyUrl): 15 | 16 | def __init__(self, *args, **kwargs): # real signature unknown 17 | pass 18 | 19 | __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default 20 | 21 | allowed_schemes = {'mysql+pymysql', 'mysql+mysqldb'} 22 | user_required = True 23 | 24 | @classmethod 25 | def build(cls, 26 | scheme: str, 27 | user: str = '', 28 | password: str = '', 29 | dbname: str = '', 30 | host: str = '', 31 | port: str = '3306', 32 | **kwargs 33 | ) -> str: 34 | option_list = [key + '=' + value for key, value in kwargs.items()] 35 | option_str = '&'.join(option_list) 36 | if not option_str: 37 | return '%s://%s:%s@%s:%s/%s' %(scheme, user, quote_plus(password), host, port, dbname) 38 | else: 39 | return '%s://%s:%s@%s:%s/%s?%s' %(scheme, user, quote_plus(password), host, port, dbname, option_str) 40 | 41 | class Settings(BaseSettings): 42 | DEBUG: bool=False 43 | PROJECT_NAME: str = '' 44 | BACKEND_CORS_ORIGINS: List[Union[AnyHttpUrl,str]] = [] 45 | 46 | @validator("BACKEND_CORS_ORIGINS", pre=True) 47 | def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]: 48 | if isinstance(v, str) and not v.startswith("["): 49 | return [i.strip() for i in v.split(",")] 50 | elif isinstance(v, (list, str)): 51 | return v 52 | raise ValueError(v) 53 | 54 | DB_HOST: str = '' 55 | DB_USER: str = '' 56 | DB_PASSWORD: str = '' 57 | DB_NAME: str = '' 58 | DB_PORT: str = '' 59 | DATABASE_URI: Optional[str] = None 60 | 61 | @validator("DATABASE_URI", pre=True) 62 | def assemble_db_connection(cls, v: Optional[str], values: Dict[str, Any]) -> Any: 63 | if isinstance(v, str): 64 | return v 65 | 66 | return MySQLDSN.build( 67 | host=values.get("DB_HOST", ""), 68 | dbname=values.get('DB_NAME', ""), 69 | scheme="mysql+pymysql", 70 | user=values.get("DB_USER", 'root'), 71 | password=values.get("DB_PASSWORD", ""), 72 | port=values.get("DB_PORT",'3306') 73 | ) 74 | 75 | SCHEDULER_CONFIG: Optional[SchedulerConfig] = None 76 | 77 | @validator("SCHEDULER_CONFIG", pre=True) 78 | def init_scheduler(cls, v: Optional[SchedulerConfig], values: Mapping[str, Any]) -> SchedulerConfig: 79 | url = MySQLDSN.build( 80 | host=values.get("DB_HOST", ""), 81 | dbname=values.get('DB_NAME', ""), 82 | scheme="mysql+pymysql", 83 | user=values.get("DB_USER", 'root'), 84 | password=values.get("DB_PASSWORD", ""), 85 | port=values.get("DB_PORT",'3306') 86 | ) 87 | 88 | return SchedulerConfig(executors={ 89 | 'default': JobExecutorPool(type='thread',size=3).build(), 90 | 'processpool': JobExecutorPool(type='process',size=3).build() 91 | }, 92 | default= { 93 | 'coalesce': False, 94 | 'max_instances': 1 95 | }, 96 | stores={ 97 | 'default': JobStore(type='sqlalchemy', url=url).build() 98 | } 99 | ) 100 | RPC_URL:str = '' 101 | RPC_POOL_SIZE:int = 1 102 | 103 | TASK_SCRIPT_PATH: Path = BASE_DIR.joinpath('task') 104 | class Config: 105 | case_sensitive = True 106 | env_file = ".env" 107 | 108 | 109 | settings = Settings() -------------------------------------------------------------------------------- /app/database.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | 3 | from sqlalchemy import create_engine 4 | from sqlalchemy.ext.declarative import as_declarative, declared_attr 5 | from sqlalchemy.orm import sessionmaker 6 | 7 | from app.config import settings 8 | 9 | engine = create_engine(settings.DATABASE_URI, pool_pre_ping=True) 10 | SessionLocal = sessionmaker(bind=engine) 11 | 12 | 13 | @as_declarative() 14 | class Base: 15 | 16 | @declared_attr 17 | def __tablename__(cls) -> str: 18 | return cls.__name__.lower() 19 | 20 | 21 | def get_db() -> Generator: 22 | """ 23 | 获取sqlalchemy会话对象 24 | :return: 25 | """ 26 | try: 27 | db = SessionLocal() 28 | yield db 29 | finally: 30 | db.close() 31 | 32 | db = SessionLocal() 33 | 34 | def create_table() -> None: 35 | Base.metadata.create_all(engine) 36 | -------------------------------------------------------------------------------- /app/main.py: -------------------------------------------------------------------------------- 1 | 2 | from fastapi import FastAPI, Request, Response 3 | from fastapi.middleware.cors import CORSMiddleware 4 | 5 | from .route import router 6 | from app.config import settings 7 | from .database import create_table 8 | 9 | 10 | def get_application(): 11 | _app = FastAPI( 12 | title=settings.PROJECT_NAME, 13 | description='一款轻量级定时任务调度平台' 14 | ) 15 | 16 | register_cors(_app) 17 | 18 | # 注册路由 19 | register_router(_app) 20 | 21 | # 请求拦截 22 | register_hook(_app) 23 | 24 | # 注册任务调度 25 | register_scheduler(_app) 26 | 27 | 28 | return _app 29 | 30 | 31 | def register_router(app: FastAPI) -> None: 32 | """ 33 | 注册路由 34 | 这里暂时把两个API服务写到一起,后面在拆分 35 | :param app: 36 | :return: 37 | """ 38 | # 项目API 39 | app.include_router(router) 40 | 41 | 42 | def register_cors(app: FastAPI) -> None: 43 | """ 44 | 支持跨域 45 | :param app: 46 | :return: 47 | """ 48 | if settings.DEBUG: 49 | app.add_middleware( 50 | CORSMiddleware, 51 | allow_origins=["*"], 52 | allow_credentials=True, 53 | allow_methods=["*"], 54 | allow_headers=["*"], 55 | ) 56 | 57 | 58 | 59 | def register_hook(app: FastAPI) -> None: 60 | """ 61 | 请求响应拦截 hook 62 | https://fastapi.tiangolo.com/tutorial/middleware/ 63 | :param app: 64 | :return: 65 | """ 66 | 67 | @app.middleware("http") 68 | async def logger_request(request: Request, call_next) -> Response: 69 | # https://stackoverflow.com/questions/60098005/fastapi-starlette-get-client-real-ip 70 | # logger.info(f"访问记录:{request.method} url:{request.url}\nheaders:{request.headers}\nIP:{request.client.host}") 71 | response = await call_next(request) 72 | return response 73 | 74 | def register_scheduler(app: FastAPI) -> None: 75 | """ 76 | 注册任务调度对象 77 | :param app: 78 | :return: 79 | """ 80 | @app.on_event("startup") 81 | async def load_schedule_or_create_blank(): 82 | create_table() 83 | 84 | @app.on_event('shutdown') 85 | async def shutdown_schedule(): 86 | """ 87 | 关闭调度对象 88 | :return: 89 | """ 90 | pass 91 | 92 | app = get_application() 93 | 94 | 95 | -------------------------------------------------------------------------------- /app/route.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from job.route import router as job_router 4 | 5 | router = APIRouter() 6 | router.include_router(job_router,prefix='/v1') 7 | 8 | -------------------------------------------------------------------------------- /app/state.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from starlette.datastructures import State 3 | 4 | 5 | class DefaultState: 6 | 7 | state = State() 8 | 9 | def get(self,key:str, value: Any = None) -> Any: 10 | if hasattr(self.state, key): 11 | return getattr(self.state, key) 12 | else: 13 | if not value: 14 | raise Exception('state don`t %s attribute' %key) 15 | else: 16 | return value 17 | 18 | 19 | def set(self, key:str, value: Any) -> None: 20 | if hasattr(self.state, key): 21 | raise Exception('state don`t %s attribute' %key) 22 | else: 23 | setattr(self.state, key, value) 24 | 25 | def update(self, key:str, value: Any) -> None: 26 | if hasattr(self.state, key): 27 | setattr(self.state, key, value) 28 | 29 | 30 | default_state = DefaultState() 31 | 32 | -------------------------------------------------------------------------------- /job/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/job/__init__.py -------------------------------------------------------------------------------- /job/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/job/api/__init__.py -------------------------------------------------------------------------------- /job/api/v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/job/api/v1/__init__.py -------------------------------------------------------------------------------- /job/api/v1/job.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from typing import Optional 4 | from pydantic import ValidationError 5 | from fastapi import APIRouter, Path, Query, Body, status, Response 6 | from fastapi.exceptions import HTTPException 7 | from zerorpc.core import Client 8 | from app.common.resp_code import resp_200, resp_201, resp_202, resp_400 9 | from app.common.logger import logger 10 | from job.schemas import JobSchema, TriggerSchema, JobQueryParams 11 | from apscheduler.job import Job 12 | from utils.common import remove_none 13 | from rpc.client import get_client 14 | from app.config import settings 15 | 16 | router = APIRouter() 17 | 18 | def _get_job(job_id: str, client:Client) -> Job: 19 | job = client.get_job(job_id) 20 | if job: 21 | return job 22 | else: 23 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,detail='%s not found' %job_id) 24 | 25 | 26 | @router.get("/jobs", summary="获取所有job信息") 27 | async def get_jobs( 28 | state: Optional[str] = Query(None, title='job 状态',description='可选参数 RUNNING STOP'), 29 | name: Optional[str] = Query(None, title='job name'), 30 | trigger: Optional[str] = Query(None, title='触发器类型', description='可选参数:date cron interval'), 31 | func: Optional[str] = Query(None, title='任务名称', description='任务的方法名称') 32 | ) -> Response: 33 | """ 34 | 获取所有job 35 | :return: 36 | """ 37 | try: 38 | query_condtions = JobQueryParams(state=state, name=name, trigger=trigger, func=func) 39 | except ValidationError as e: 40 | return resp_400(e.json()) 41 | with get_client(settings.RPC_URL) as client: 42 | conditions = remove_none(query_condtions.dict()) 43 | jobs = client.query_jobs('default', conditions) 44 | return resp_200(data=jobs) 45 | 46 | 47 | @router.get("/jobs/{job_id}", summary="获取指定的job信息") 48 | async def get_job( 49 | job_id: str = Query("job_id", title="job id") 50 | ) -> Response: 51 | with get_client(settings.RPC_URL) as client: 52 | job = _get_job(job_id=job_id, client=client) 53 | return resp_200(job) 54 | 55 | 56 | # cron 更灵活的定时任务 可以使用crontab表达式 57 | @router.post("/jobs", summary='添加job') 58 | async def add_job(job: JobSchema) -> Response: 59 | with get_client(settings.RPC_URL) as client: 60 | instance, msg = client.add_job(job.to_dict()) 61 | if instance: 62 | return resp_201(instance) 63 | else: 64 | return resp_400(msg) 65 | 66 | 67 | @router.delete("/job/{job_id}", summary="移除任务") 68 | async def remove_job( 69 | job_id: str = Path(..., title="任务id", embed=True) 70 | ) -> Response: 71 | try: 72 | with get_client(settings.RPC_URL) as client: 73 | result = client.remove_job(job_id, client=client) 74 | print(result) 75 | except Exception as e: 76 | logger.error(e) 77 | return resp_400() 78 | return resp_202() 79 | 80 | @router.get("/job/{job_id}/pause", summary="暂停任务") 81 | async def pause_job( 82 | job_id: str = Path(..., title="任务id", embed=True) 83 | ) -> Response: 84 | with get_client(settings.RPC_URL) as client: 85 | job = _get_job(job_id=job_id, client=client) 86 | job = client.pause_job(job_id) 87 | return resp_202(job) 88 | 89 | 90 | @router.get("/job/{job_id}/resume", summary="恢复任务") 91 | async def resume_job( 92 | job_id: str = Path(..., title="任务id", embed=True) 93 | ) -> Response: 94 | with get_client(settings.RPC_URL) as client: 95 | _get_job(job_id=job_id, client=client) 96 | job = client.resume_job(job_id) 97 | return resp_202(job) 98 | 99 | 100 | @router.put("/job/{job_id}/reschedule", summary="重新调度任务") 101 | async def reschedule_job( 102 | trigger: TriggerSchema, 103 | job_id: str = Path(...,title="任务id", embed=True), 104 | jobstore: Optional[str] = Body('default') 105 | ) -> Response: 106 | with get_client(settings.RPC_URL) as client: 107 | _get_job(job_id=job_id, client=client) 108 | job, result = client.reschedule_job(job_id, trigger.dict(), jobstore) 109 | return resp_202(job) 110 | 111 | 112 | @router.get("/job/stores", summary="获取stores") 113 | async def get_stores() -> Response: 114 | with get_client(settings.RPC_URL) as client: 115 | stores = client.get_stores() 116 | return resp_200(data=[{ 'name': item for item in stores}]) 117 | 118 | 119 | @router.get("/job/executors", summary="获取") 120 | async def get_executors() -> Response: 121 | with get_client(settings.RPC_URL) as client: 122 | executors = client.get_executors() 123 | return resp_200(data=[{ 'name': item for item in executors}]) 124 | -------------------------------------------------------------------------------- /job/api/v1/record.py: -------------------------------------------------------------------------------- 1 | 2 | from typing import Optional 3 | from fastapi import APIRouter, Query, Response 4 | 5 | from app.common.resp_code import resp_200 6 | from job.models import JobRecord 7 | from app.database import db 8 | from utils.common import remove_none 9 | 10 | router = APIRouter() 11 | 12 | @router.get("/records", summary="获取所有job记录") 13 | async def get_records( 14 | result: Optional[str] = Query(None, title='job 状态',description='可选参数 SUCCESS FAILED MISSED'), 15 | name: Optional[str] = Query(None, title='job name'), 16 | trigger: Optional[str] = Query(None, title='触发器类型', description='可选参数:date cron interval'), 17 | page: Optional[int] = Query(..., title='分页页码', description='分页页码'), 18 | page_size: Optional[int] = Query(default=20, title='单页记录数', description='分页页码') 19 | ) -> Response: 20 | """ 21 | 获取所有job 22 | :return: 23 | """ 24 | 25 | query_conditions = remove_none({ 26 | 'result': result, 27 | 'name': name, 28 | 'trigger': trigger 29 | }) 30 | queryset = db.query(JobRecord).filter_by(**query_conditions) 31 | if page and page_size: 32 | temp_page = (page - 1) * page_size 33 | total = queryset.count() 34 | queryset = queryset.offset(temp_page).limit(page_size) 35 | data = [job.to_json() for job in queryset] 36 | return resp_200({'total': total, 'results': data}) 37 | 38 | data = [job.to_json() for job in queryset] 39 | return resp_200(data) -------------------------------------------------------------------------------- /job/api/v1/task.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Response 2 | 3 | from app.common.resp_code import resp_200 4 | from job.tasks import task_list 5 | 6 | router = APIRouter() 7 | 8 | @router.get("/tasks", summary="获取可用Task") 9 | async def get_tasks() -> Response: 10 | data = [] 11 | for name, task in task_list.task_dict.items(): 12 | data.append( 13 | { 14 | 'name': name, 15 | 'desc': task.__doc__ 16 | } 17 | ) 18 | return resp_200(data=data) 19 | 20 | @router.post("/reload", summary="重新加载任务") 21 | async def reload_task() -> Response: 22 | """ 23 | 重新加载所有任务 24 | """ 25 | task_list.load_task() 26 | return resp_200() -------------------------------------------------------------------------------- /job/listener.py: -------------------------------------------------------------------------------- 1 | import json 2 | from apscheduler.job import Job 3 | from apscheduler.events import EVENT_JOB_MISSED,EVENT_JOB_ERROR, EVENT_JOB_EXECUTED, JobEvent 4 | from scheduler.utils import get_job_trigger_name 5 | from scheduler.listener import JobBaseListener 6 | from app.database import db 7 | from app.common.logger import logger 8 | from .models import JobRecord 9 | 10 | 11 | class CronJobListener(JobBaseListener): 12 | 13 | def save_record(self,event: JobEvent, job: Job) -> None: 14 | result = None 15 | if event.code == EVENT_JOB_EXECUTED: 16 | result = 'SUCCESS' 17 | elif event.code == EVENT_JOB_ERROR: 18 | result = 'FAILED' 19 | elif event.code == EVENT_JOB_MISSED: 20 | result = 'MISSED' 21 | 22 | data = { 23 | 'job_id': job.id, 24 | 'name': job.name, 25 | 'args': json.dumps(job.args), 26 | 'kwargs': json.dumps(job.kwargs), 27 | 'trigger': get_job_trigger_name(job.trigger), 28 | 'result': result, 29 | 'out': event.traceback, 30 | 'runtime': event.scheduled_run_time 31 | } 32 | record = JobRecord(**data) 33 | db.add(record) 34 | db.commit() 35 | db.flush() 36 | 37 | def job_listener(self,event: JobEvent) -> None: 38 | job = self.schedule.get_job(event.job_id) 39 | self.save_record(event, job) 40 | if event.code == EVENT_JOB_EXECUTED: 41 | logger.info('job[%s] %s run SUCCESS at %s'%(event.job_id, job.name, event.scheduled_run_time)) 42 | elif event.code == EVENT_JOB_ERROR: 43 | logger.error('job[%s] %s run FAILED at %s, error info: \n %s'%(event.job_id, job.name, event.scheduled_run_time, event.traceback)) 44 | elif event.code == EVENT_JOB_MISSED: 45 | logger.warning('job[%s] %s run MISSED at %s'%(event.job_id, job.name, event.scheduled_run_time)) 46 | -------------------------------------------------------------------------------- /job/models.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Dict 3 | from sqlalchemy import Column, Integer, String, Text, Enum, DateTime 4 | from app.database import Base 5 | 6 | 7 | class JobRecord(Base): 8 | id = Column(Integer, primary_key=True, autoincrement=True, index=True) 9 | job_id = Column(String(191)) 10 | name = Column(String(255)) 11 | args = Column(Text, nullable=True) 12 | kwargs = Column(Text, nullable=True) 13 | trigger = Column(Enum('date', 'cron', 'interval')) 14 | result = Column(Enum('SUCCESS', 'FAILED', 'MISSED')) 15 | out = Column(Text, nullable=True, server_default=None) 16 | runtime = Column(DateTime) 17 | 18 | def to_json(self) -> Dict: 19 | return { 20 | 'id': self.id, 21 | 'name': self.name, 22 | 'args': json.loads(self.args) if self.args else [], 23 | 'kwargs': json.loads(self.kwargs) if self.kwargs else dict(), 24 | 'trigger': self.trigger, 25 | 'result': self.result, 26 | 'out': self.out, 27 | 'runtime': self.runtime 28 | } 29 | 30 | -------------------------------------------------------------------------------- /job/route.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from .api.v1.job import router as job_router 3 | from .api.v1.record import router as record_router 4 | from .api.v1.task import router as task_router 5 | 6 | router = APIRouter() 7 | 8 | router.include_router(job_router, tags=["job"]) 9 | router.include_router(record_router, tags=["record"]) 10 | router.include_router(task_router, tags=["task"]) -------------------------------------------------------------------------------- /job/schemas.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime 2 | from typing import Optional, List, Dict, Union, Any, Tuple 3 | from apscheduler.triggers.cron.fields import ( 4 | BaseField, MonthField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES) 5 | from apscheduler.util import undefined 6 | from apscheduler.triggers.cron import CronTrigger 7 | from apscheduler.triggers.date import DateTrigger 8 | from apscheduler.triggers.interval import IntervalTrigger 9 | 10 | from pydantic import BaseModel, validator, conint 11 | 12 | from .tasks import task_list 13 | 14 | class TriggerSchema(BaseModel): 15 | trigger: str 16 | run_time: str 17 | start_date: Optional[Union[datetime, date]] = None 18 | end_date: Optional[Union[datetime, date]] = None 19 | timezone: Optional[str] = None 20 | jitter: Optional[int] = None 21 | 22 | @validator('trigger',pre=True) 23 | def validate_trigger(cls, trigger: str, values: Optional[Dict]) -> str: 24 | if trigger in ['date','interval','cron']: 25 | return trigger 26 | else: 27 | raise ValueError('trigger must be in date 、 interval and cron among') 28 | 29 | 30 | @validator('run_time') 31 | def validate_run_time(cls, run_time:str, values: Dict) -> str: 32 | if values['trigger'] == 'date': 33 | try: 34 | datetime.strptime(run_time, '%Y-%m-%d %H:%M:%S') 35 | return run_time 36 | except: 37 | try: 38 | date.strftime(run_time, '%Y-%m-%d') 39 | return run_time 40 | except: 41 | raise ValueError('runtime must be date or datetime type') 42 | elif values['trigger'] == 'interval': 43 | datas = run_time.split() 44 | if len(datas) == 5: 45 | for item in datas: 46 | if not item.isdigit(): 47 | ValueError('interval time must be consist of integer') 48 | return run_time 49 | else: 50 | raise ValueError('run time must format 0 0 1 0 0') 51 | 52 | elif values['trigger'] == 'cron': 53 | times = run_time.split() 54 | try: 55 | if len(times) in [5, 6]: 56 | second = BaseField('second', times[0]) 57 | minute = BaseField('minute', times[1]) 58 | hour = BaseField('hour', times[2]) 59 | day = DayOfMonthField('day', times[3]) 60 | month = MonthField('month', times[4]) 61 | day_of_week = DayOfWeekField('day_of_week', times[5]) 62 | if len(times) == 6: 63 | year = BaseField('year', times[6]) 64 | return run_time 65 | except Exception as e: 66 | print(e) 67 | raise ValueError(str(e)) 68 | 69 | def build(self) -> Union[DateTrigger, IntervalTrigger, CronTrigger]: 70 | if self.trigger == 'date': 71 | try: 72 | rundate = datetime.strptime(self.run_time, '%Y-%m-%d %H:%M:%S') 73 | except: 74 | try: 75 | rundate = date.strftime(self.run_time, '%Y-%m-%d') 76 | except: 77 | raise ValueError('runtime must be date or datetime type') 78 | 79 | return DateTrigger(run_date=rundate,timezone=self.timezone) 80 | 81 | elif self.trigger == 'interval': 82 | datas = self.run_time.split() 83 | data = [int(item) for item in datas] 84 | return IntervalTrigger( 85 | seconds=data[0], 86 | minutes=data[1], 87 | hours=data[2], 88 | days=data[3], 89 | weeks=data[4], 90 | start_date=self.start_date, 91 | end_date=self.end_date, 92 | timezone=self.timezone, 93 | jitter=self.jitter 94 | ) 95 | 96 | elif self.trigger == 'cron': 97 | times = self.run_time.split() 98 | if len(times) == 5: 99 | return CronTrigger( 100 | day_of_week=times[5], 101 | month=times[4], 102 | day=times[3], 103 | hour=times[2], 104 | minute=times[1], 105 | second=times[0], 106 | start_date=self.start_date, 107 | end_date=self.end_date, 108 | timezone=self.timezone, 109 | jitter=self.jitter 110 | ) 111 | elif len(times) == 6: 112 | return CronTrigger( 113 | year=times[6], 114 | day_of_week=times[5], 115 | month=times[4], 116 | day=times[3], 117 | hour=times[2], 118 | minute=times[1], 119 | second=times[0], 120 | start_date=self.start_date, 121 | end_date=self.end_date, 122 | timezone=self.timezone, 123 | jitter=self.jitter 124 | ) 125 | 126 | 127 | 128 | class JobSchema(BaseModel): 129 | func: str 130 | name: str 131 | trigger: TriggerSchema 132 | max_instances: Optional[int] = 1 133 | args: Optional[List] = list() 134 | kwargs: Optional[Dict] = dict() 135 | next_run_time: Optional[Union[datetime, date]] 136 | jobstore: Optional[str] = 'default' 137 | executor: Optional['str'] = 'default' 138 | replace_existing: Optional[bool] = False 139 | misfire_grace_time: Optional[int] 140 | coalesce: Optional[bool] 141 | 142 | # misfire_grace_time = undefined, coalesce = undefined, max_instances = undefined, 143 | # next_run_time = undefined, 144 | 145 | def to_dict(self) -> Dict[str, Any]: 146 | data = self.dict() 147 | keys = list(data.keys()) 148 | for k in keys: 149 | if data[k] == undefined: 150 | data.pop(k) 151 | 152 | data["func"] = self.func[0] 153 | return data 154 | 155 | @validator('next_run_time', always=True) 156 | def validate_next_run_time(cls, next_run_time:datetime, values: Optional[Dict]) -> str: 157 | if next_run_time: 158 | return next_run_time 159 | else: 160 | return undefined 161 | 162 | @validator('misfire_grace_time', always=True) 163 | def validate_misfire_grace_time(cls, misfire_grace_time: int, values: Optional[Dict]) -> str: 164 | ''' 165 | 如果一个job本来14:00有一次执行,但是由于某种原因没有被调度上,现在14:01了,这个14:00的运行实例被提交时, 166 | 会检查它预订运行的时间和当下时间的差值(这里是1分钟),大于我们设置的30秒限制,那么这个运行实例不会被执行。 167 | ''' 168 | if misfire_grace_time: 169 | return misfire_grace_time 170 | else: 171 | return undefined 172 | 173 | 174 | @validator('coalesce', always=True) 175 | def validate_coalesce(cls, coalesce: int, values: Optional[Dict]) -> str: 176 | ''' 177 | 当由于某种原因导致某个job积攒了好几次没有实际运行(比如说系统挂了5分钟后恢复,有一个 178 | 任务是每分钟跑一次的,按道理说这5分钟内本来是“计划”运行5次的,但实际没有执行),如果 179 | coalesce为True,下次这个job被submit给executor时,只会执行1次,也就是最后这次, 180 | 如果为False,那么会执行5次(不一定,因为还有其他条件,看后面misfire_grace_time的解释) 181 | ''' 182 | if coalesce: 183 | return coalesce 184 | else: 185 | return undefined 186 | 187 | 188 | @validator('max_instances', always=True) 189 | def validate_max_instances(cls, max_instances: int, values: Optional[Dict]) -> int: 190 | ''' 191 | 就是说同一个job同一时间最多有几个实例再跑,比如一个耗时10分钟的job,被指定每分钟运行1次, 192 | 如果我们max_instance值为5,那么在第6~10分钟上,新的运行实例不会被执行,因为已经有5个实例在跑了 193 | ''' 194 | if max_instances is None: 195 | return undefined 196 | else: 197 | return max_instances 198 | 199 | @validator('func') 200 | def validate_func(cls, func: str, values: Optional[Dict]) -> Tuple: 201 | if func in task_list.task_dict: 202 | return func, task_list.task_dict[func] 203 | else: 204 | raise ValueError('not found %s task'%func) 205 | 206 | 207 | class JobQueryParams(BaseModel): 208 | state: Optional[str] 209 | name: Optional[str] 210 | trigger: Optional[str] 211 | func: Optional[str] 212 | 213 | 214 | @validator('state', pre=True) 215 | def validate_state(cls, state: str, values: Optional[Dict]) -> Optional[bool]: 216 | if state == 'RUNNING': 217 | return True 218 | elif state == 'STOP': 219 | return False 220 | else: 221 | return None 222 | 223 | class RecordQueryParams(BaseModel): 224 | result: Optional[str] 225 | name: Optional[str] 226 | trigger: Optional[str] 227 | page: Optional[conint(gt=0)] 228 | page_size: Optional[conint(gt=0)] 229 | 230 | 231 | 232 | 233 | -------------------------------------------------------------------------------- /job/tasks.py: -------------------------------------------------------------------------------- 1 | 2 | import inspect 3 | import importlib 4 | import threading 5 | from app.config import settings 6 | 7 | class Singleton(type): 8 | _instance_lock = threading.Lock() 9 | def __init__(self,*args,**kwargs): 10 | super().__init__(*args,**kwargs) 11 | def __call__(cls, *args, **kwargs): 12 | with cls._instance_lock: 13 | if not hasattr(cls, '_instance'): 14 | cls._instance = super().__call__(*args, **kwargs) 15 | return cls._instance 16 | 17 | 18 | class TaskCollection(metaclass=Singleton): 19 | def __init__(self) -> None: 20 | self.task_dict = {} 21 | self.load_task() 22 | 23 | def load_task(self): 24 | for file in settings.TASK_SCRIPT_PATH.glob('*.py'): 25 | key = f'task.{file.stem}' 26 | module = importlib.import_module(key) 27 | class_list = inspect.getmembers(module, inspect.isclass) 28 | for cs in class_list: 29 | if 'Task' in cs[0]: 30 | key = f'{key}.{cs[0]}' 31 | func_list = inspect.getmembers(cs[1], inspect.isfunction) 32 | for func in func_list: 33 | if func[0].startswith('task_'): 34 | key = f'{key}.{func[0]}' 35 | self.task_dict[key] = func[1] 36 | 37 | task_list = TaskCollection() 38 | __all__ = ["task_list"] -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | APScheduler==3.6.3 2 | dateutils==0.6.12 3 | fastapi==0.61.1 4 | loguru==0.5.3 5 | pydantic==1.8.2 6 | PyMySQL==1.0.2 7 | python-dateutil==2.8.1 8 | python-dotenv==0.17.1 9 | SQLAlchemy==1.4.18 10 | ujson==4.0.1 11 | uvicorn==0.13.3 12 | zerorpc==0.6.3 13 | -------------------------------------------------------------------------------- /rpc/__init__.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from enum import Enum 3 | from pydantic import ValidationError 4 | from typing import Dict, List, Tuple 5 | from scheduler.utils import job_to_dict 6 | from job.schemas import JobSchema, TriggerSchema 7 | from app.common.logger import logger 8 | 9 | 10 | class Result(Enum): 11 | sucess = 'Success' 12 | fail = 'fail' 13 | 14 | 15 | class SchedulerService: 16 | def __init__(self, scheduler) -> None: 17 | self.scheduler = scheduler 18 | 19 | def add_job(self, data: Dict) -> Tuple: 20 | job_id = str(uuid.uuid1()) 21 | try: 22 | schema = JobSchema(**data) 23 | except ValidationError as e: 24 | logger.warning('create job fail', e.json()) 25 | return None, e.json() 26 | 27 | job = self.scheduler.add_job( 28 | func=schema.func[1], 29 | trigger=schema.trigger.build(), 30 | args=schema.args, 31 | kwargs=schema.kwargs, 32 | id=job_id, 33 | name=schema.name, 34 | misfire_grace_time=schema.misfire_grace_time, 35 | coalesce=schema.coalesce, 36 | max_instances=schema.max_instances, 37 | next_run_time=schema.next_run_time, 38 | jobstore=schema.jobstore, 39 | executor=schema.executor, 40 | replace_existing=schema.replace_existing 41 | ) 42 | return job_to_dict(job), Result.sucess.value 43 | 44 | def reschedule_job(self, job_id:str, trigger:Dict,jobstore=None) ->Dict: 45 | try: 46 | schema = TriggerSchema(**trigger) 47 | except ValidationError as e: 48 | return None, e.json() 49 | 50 | job = self.scheduler.reschedule_job(job_id, jobstore, trigger=schema.build()) 51 | return job_to_dict(job), Result.sucess.value 52 | 53 | def pause_job(self, job_id:str, jobstore:str=None) -> Dict: 54 | logger.info('job[%s] has been paused' %job_id) 55 | job = self.scheduler.pause_job(job_id, jobstore) 56 | return job_to_dict(job) 57 | 58 | def resume_job(self, job_id:str, jobstore:str=None) -> Dict : 59 | logger.info('job[%s] has been resumed' %job_id) 60 | job = self.scheduler.resume_job(job_id, jobstore) 61 | return job_to_dict(job) 62 | 63 | def remove_job(self, job_id:str, jobstore:str=None) -> str: 64 | logger.info('job[%s] has been removed' %job_id) 65 | self.scheduler.remove_job(job_id, jobstore) 66 | return {"result": Result.sucess.value} 67 | 68 | def get_job(self, job_id:str) -> Dict: 69 | job = self.scheduler.get_job(job_id) 70 | return job_to_dict(job) 71 | 72 | def get_jobs(self, jobstore:str=None) -> List[Dict]: 73 | jobs = self.scheduler.get_jobs(jobstore) 74 | data = [job_to_dict(job) for job in jobs] 75 | return data 76 | 77 | def query_jobs(self, jobstore:str, conditions:Dict=dict()) -> List[Dict]: 78 | jobs = self.scheduler.query_jobs(jobstore=jobstore,conditions=conditions) 79 | data = [job_to_dict(job) for job in jobs] 80 | return data 81 | 82 | def get_stores(self) -> List[str]: 83 | return list(self.scheduler._jobstores.keys()) 84 | 85 | def get_executors(self) -> List[str]: 86 | return list(self.scheduler._executors.keys()) -------------------------------------------------------------------------------- /rpc/client.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | import zerorpc 3 | import contextlib 4 | 5 | @contextlib.contextmanager 6 | def get_client(url: str) -> Generator: 7 | client = zerorpc.Client(timeout=60, heartbeat=60) 8 | client.connect(url) 9 | yield client 10 | client.close() 11 | return -------------------------------------------------------------------------------- /rpc_server.py: -------------------------------------------------------------------------------- 1 | import zerorpc 2 | from apscheduler.events import EVENT_JOB_MISSED,EVENT_JOB_ERROR, EVENT_JOB_EXECUTED 3 | from scheduler.schedulers.gevent import ExtendGeventcheduler 4 | from job.listener import CronJobListener 5 | from app.common.logger import logger 6 | from app.config import settings 7 | from rpc import SchedulerService 8 | 9 | 10 | config = settings.SCHEDULER_CONFIG 11 | scheduler = ExtendGeventcheduler( 12 | jobstores=config.stores, 13 | executors=config.executors, 14 | job_defaults=config.default 15 | ) 16 | listener = CronJobListener(schedule=scheduler).job_listener 17 | scheduler.add_listener(listener,EVENT_JOB_EXECUTED|EVENT_JOB_ERROR|EVENT_JOB_MISSED) 18 | scheduler.start() 19 | try: 20 | logger.info('schduler has been started') 21 | server = zerorpc.Server(SchedulerService(scheduler=scheduler),pool_size=settings.RPC_POOL_SIZE, heartbeat=60) 22 | server.bind(settings.RPC_URL) 23 | logger.info('RPC server is run at %s' %settings.RPC_URL) 24 | server.run() 25 | except KeyboardInterrupt: 26 | server.stop() 27 | logger.warning('stop RPC Server') 28 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | from app.main import app 2 | 3 | 4 | if __name__ == '__main__': 5 | import uvicorn 6 | uvicorn.run(app, reload=True, workers=4) -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pipenv run server & 4 | pipenv run rpc -------------------------------------------------------------------------------- /scheduler/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/scheduler/__init__.py -------------------------------------------------------------------------------- /scheduler/listener.py: -------------------------------------------------------------------------------- 1 | 2 | from apscheduler.job import Job 3 | from apscheduler.events import JobEvent 4 | 5 | class JobBaseListener: 6 | 7 | def __init__(self, schedule) -> None: 8 | self.schedule = schedule 9 | 10 | 11 | def save_record(self,event: JobEvent, job: Job) -> None: 12 | pass 13 | 14 | def job_listener(event: JobEvent) -> None: 15 | pass -------------------------------------------------------------------------------- /scheduler/schedulers/__init__.py: -------------------------------------------------------------------------------- 1 | from .asyncio import ExtendAsyncIOScheduler -------------------------------------------------------------------------------- /scheduler/schedulers/asyncio.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import six 3 | from typing import Optional 4 | 5 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 6 | from apscheduler.schedulers.base import STATE_STOPPED 7 | 8 | 9 | class ExtendAsyncIOScheduler(AsyncIOScheduler): 10 | def query_jobs(self, jobstore: str='default', conditions: Optional[dict]=dict()): 11 | with self._jobstores_lock: 12 | jobs = [] 13 | if self.state == STATE_STOPPED: 14 | for job, alias, replace_existing in self._pending_jobs: 15 | if jobstore is None or alias == jobstore: 16 | jobs.append(job) 17 | else: 18 | for alias, store in six.iteritems(self._jobstores): 19 | if jobstore is None or alias == jobstore: 20 | jobs.extend(store.query_jobs(**conditions)) 21 | return jobs -------------------------------------------------------------------------------- /scheduler/schedulers/gevent.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import six 3 | from typing import Optional 4 | 5 | from apscheduler.schedulers.gevent import GeventScheduler 6 | from apscheduler.schedulers.base import STATE_STOPPED 7 | 8 | 9 | class ExtendGeventcheduler(GeventScheduler): 10 | def query_jobs(self, jobstore: str='default', conditions: Optional[dict]=dict()): 11 | with self._jobstores_lock: 12 | jobs = [] 13 | if self.state == STATE_STOPPED: 14 | for job, alias, replace_existing in self._pending_jobs: 15 | if jobstore is None or alias == jobstore: 16 | jobs.append(job) 17 | else: 18 | for alias, store in six.iteritems(self._jobstores): 19 | if jobstore is None or alias == jobstore: 20 | jobs.extend(store.query_jobs(**conditions)) 21 | return jobs -------------------------------------------------------------------------------- /scheduler/schema.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Mapping, Optional 2 | from pydantic import BaseModel, validator 3 | 4 | from apscheduler.executors.pool import BasePoolExecutor, ThreadPoolExecutor, ProcessPoolExecutor 5 | from apscheduler.jobstores.base import BaseJobStore 6 | from scheduler.stores import ExtendSQLAlchemyJobStore 7 | 8 | from enum import Enum 9 | 10 | class ExcutorPool(Enum): 11 | thread = 'thread' 12 | process = 'process' 13 | 14 | 15 | class JobExecutorPool(BaseModel): 16 | type: str = 'thread' 17 | size: int = 1 18 | 19 | @validator('size') 20 | def validate_size(cls, size: int, values: Optional[Dict]) -> int: 21 | if size > 0: 22 | return size 23 | else: 24 | raise ValueError('pool size must be more than 1') 25 | 26 | 27 | def build(self) -> BasePoolExecutor: 28 | if self.type == 'thread': 29 | return ThreadPoolExecutor(self.size) 30 | elif self.type == 'process': 31 | return ProcessPoolExecutor(self.size) 32 | else: 33 | raise NotImplementedError('%s executor are not implemente' % self.type ) 34 | 35 | 36 | class Store(Enum): 37 | sqlalchemy = 'sqlalchemy' 38 | redis = 'redis' 39 | mongo = 'mongo' 40 | 41 | 42 | class JobStore(BaseModel): 43 | type: str = 'sqlalchemy' 44 | url: str 45 | 46 | def build(self) -> BaseJobStore: 47 | if self.type == 'sqlalchemy': 48 | return ExtendSQLAlchemyJobStore(self.url) 49 | else: 50 | raise NotImplementedError('%s store are not implemente' % self.type ) 51 | 52 | class Default(BaseModel): 53 | coalesce: bool = False 54 | max_instances: int = 1 55 | 56 | class SchedulerConfig(BaseModel): 57 | executors: Mapping[str, Any] 58 | default: Mapping[str,Any] 59 | stores: Mapping[str, Any] 60 | 61 | 62 | @validator('executors', pre=True) 63 | def validate_executors(cls, executors: Mapping[str, JobExecutorPool], values: Optional[Dict]) -> Mapping[str,JobExecutorPool]: 64 | if 'default' in executors: 65 | return executors 66 | else: 67 | raise ValueError('default excutor must be exists') 68 | 69 | 70 | @validator('stores', pre=True) 71 | def validate_stores(cls, stores: Mapping[str, JobStore], values: Optional[Dict]) -> Mapping[str, JobStore]: 72 | if 'default' in stores: 73 | return stores 74 | else: 75 | raise ValueError('default excutor must be exists') -------------------------------------------------------------------------------- /scheduler/stores/__init__.py: -------------------------------------------------------------------------------- 1 | from .sqlachemy import ExtendSQLAlchemyJobStore -------------------------------------------------------------------------------- /scheduler/stores/sqlachemy.py: -------------------------------------------------------------------------------- 1 | 2 | from __future__ import absolute_import 3 | import pickle 4 | from apscheduler.jobstores.base import JobLookupError, ConflictingIdError 5 | from apscheduler.util import maybe_ref, datetime_to_utc_timestamp 6 | from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore 7 | from apscheduler.job import Job 8 | 9 | try: 10 | from sqlalchemy import ( 11 | create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, String, Enum) 12 | from sqlalchemy.orm import sessionmaker 13 | from sqlalchemy.orm.query import Query 14 | from sqlalchemy.exc import IntegrityError 15 | except ImportError: # pragma: nocover 16 | raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed') 17 | 18 | from scheduler.utils import get_job_trigger_name 19 | 20 | from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore 21 | 22 | 23 | class ExtendSQLAlchemyJobStore(SQLAlchemyJobStore): 24 | def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None, 25 | pickle_protocol=pickle.HIGHEST_PROTOCOL, tableschema=None, engine_options=None): 26 | self.pickle_protocol = pickle_protocol 27 | metadata = maybe_ref(metadata) or MetaData() 28 | if engine: 29 | self.engine = maybe_ref(engine) 30 | elif url: 31 | self.engine = create_engine(url, **(engine_options or {})) 32 | else: 33 | raise ValueError('Need either "engine" or "url" defined') 34 | 35 | sessionLocal = sessionmaker(bind=self.engine,autocommit=True, autoflush=True) 36 | self.db = sessionLocal() 37 | self.jobs_t = Table( 38 | tablename, metadata, 39 | Column('id', Unicode(191, _warn_on_bytestring=False), primary_key=True), 40 | Column('name', String(256)), 41 | Column('func', String(256)), 42 | Column('trigger', Enum('date', 'cron', 'interval')), 43 | Column('next_run_time', Float(25), index=True), 44 | Column('job_state', LargeBinary, nullable=False), 45 | schema=tableschema 46 | ) 47 | 48 | def add_job(self, job: Job) -> None: 49 | insert = self.jobs_t.insert().values(**{ 50 | 'id': job.id, 51 | 'name': job.name, 52 | 'trigger': get_job_trigger_name(job.trigger), 53 | 'func': job.func.__name__, 54 | 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 55 | 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) 56 | }) 57 | try: 58 | self.engine.execute(insert) 59 | except IntegrityError: 60 | raise ConflictingIdError(job.id) 61 | 62 | def update_job(self, job: Job) -> None: 63 | update = self.jobs_t.update().values(**{ 64 | 'name': job.name, 65 | 'trigger': get_job_trigger_name(job.trigger), 66 | 'func': job.func.__name__, 67 | 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 68 | 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) 69 | }).where(self.jobs_t.c.id == job.id) 70 | result = self.engine.execute(update) 71 | if result.rowcount == 0: 72 | raise JobLookupError(job.id) 73 | 74 | def query_jobs(self, **conditions) -> Query: 75 | jobs = self._query_jobs(**conditions) 76 | self._fix_paused_jobs_sorting(jobs) 77 | return jobs 78 | 79 | def _query_jobs(self, **conditions) -> Query: 80 | jobs = [] 81 | queryset = self.db.query(self.jobs_t).order_by(self.jobs_t.c.next_run_time) 82 | state = conditions.pop('state', None) 83 | if conditions: 84 | queryset = queryset.filter_by(**conditions) 85 | if state is not None: 86 | if state: 87 | queryset = queryset.filter(self.jobs_t.c.next_run_time.isnot(None)) 88 | else: 89 | queryset = queryset.filter(self.jobs_t.c.next_run_time==None) 90 | queryset = queryset.all() 91 | failed_job_ids = set() 92 | for row in queryset: 93 | try: 94 | jobs.append(self._reconstitute_job(row.job_state)) 95 | except BaseException: 96 | self._logger.exception('Unable to restore job "%s" -- removing it', row.id) 97 | failed_job_ids.add(row.id) 98 | 99 | # Remove all the jobs we failed to restore 100 | if failed_job_ids: 101 | delete = self.jobs_t.delete().where(self.jobs_t.c.id.in_(failed_job_ids)) 102 | self.engine.execute(delete) 103 | 104 | return jobs 105 | -------------------------------------------------------------------------------- /scheduler/utils.py: -------------------------------------------------------------------------------- 1 | 2 | import six 3 | import dateutil.parser 4 | from datetime import datetime 5 | from typing import Union, Dict, Tuple 6 | 7 | 8 | from collections import OrderedDict 9 | from apscheduler.triggers.cron import CronTrigger 10 | from apscheduler.triggers.date import DateTrigger 11 | from apscheduler.triggers.interval import IntervalTrigger 12 | from apscheduler.job import Job 13 | 14 | 15 | def get_job_trigger_name(trigger: Union[CronTrigger, IntervalTrigger, DateTrigger]) -> str: 16 | if isinstance(trigger, DateTrigger): 17 | return 'date' 18 | elif isinstance(trigger, IntervalTrigger): 19 | return 'interval' 20 | elif isinstance(trigger, CronTrigger): 21 | return 'cron' 22 | 23 | 24 | def job_to_dict(job:Job) -> Dict: 25 | """Converts a job to an OrderedDict.""" 26 | data = OrderedDict() 27 | data['id'] = job.id 28 | data['name'] = job.name 29 | data['func'] = job.func_ref 30 | data['args'] = job.args 31 | data['kwargs'] = job.kwargs 32 | data['jobstore'] = job._jobstore_alias 33 | 34 | data.update(trigger_to_dict(job.trigger)) 35 | 36 | if not job.pending: 37 | data['misfire_grace_time'] = job.misfire_grace_time 38 | data['max_instances'] = job.max_instances 39 | data['next_run_time'] = None if job.next_run_time is None else job.next_run_time.strftime("%Y-%m-%d %H:%M:%S") 40 | 41 | return data 42 | 43 | 44 | def pop_trigger(data:Dict) -> Tuple[str]: 45 | """Pops trigger and trigger args from a given dict.""" 46 | 47 | trigger_name = data.pop('trigger') 48 | trigger_args = {} 49 | 50 | if trigger_name == 'date': 51 | trigger_arg_names = ('run_date', 'timezone') 52 | elif trigger_name == 'interval': 53 | trigger_arg_names = ('weeks', 'days', 'hours', 'minutes', 'seconds', 'start_date', 'end_date', 'timezone') 54 | elif trigger_name == 'cron': 55 | trigger_arg_names = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second', 'start_date', 'end_date', 'timezone') 56 | else: 57 | raise Exception('Trigger %s is not supported.' % trigger_name) 58 | 59 | for arg_name in trigger_arg_names: 60 | if arg_name in data: 61 | trigger_args[arg_name] = data.pop(arg_name) 62 | 63 | return trigger_name, trigger_args 64 | 65 | 66 | def trigger_to_dict(trigger: Union[DateTrigger, IntervalTrigger, CronTrigger]) -> Dict: 67 | """Converts a trigger to an OrderedDict.""" 68 | 69 | data = OrderedDict() 70 | 71 | if isinstance(trigger, DateTrigger): 72 | data['trigger'] = 'date' 73 | data['run_date'] = trigger.run_date.strftime("%Y-%m-%d %H:%M:%S") 74 | elif isinstance(trigger, IntervalTrigger): 75 | data['trigger'] = 'interval' 76 | data['start_date'] = trigger.start_date.strftime("%Y-%m-%d %H:%M:%S") 77 | 78 | if trigger.end_date: 79 | data['end_date'] = trigger.end_date.strftime("%Y-%m-%d %H:%M:%S") 80 | times = [str(item) for item in extract_timedelta(trigger.interval)] 81 | times.reverse() 82 | data['run_time'] = ' '.join(times) 83 | 84 | elif isinstance(trigger, CronTrigger): 85 | data['trigger'] = 'cron' 86 | 87 | if trigger.start_date: 88 | data['start_date'] = trigger.start_date 89 | 90 | if trigger.end_date: 91 | data['end_date'] = trigger.end_date 92 | trigger.fields.reverse() 93 | cron = dict() 94 | for field in trigger.fields: 95 | if not field.is_default: 96 | cron[field.name] = str(field) 97 | fields= ['second', 'minute', 'hour', 'day', 'month', 'day_of_week', 'year'] 98 | times = [cron[field] for field in fields if cron.get(field)] 99 | data['run_time'] = ' '.join(times) 100 | else: 101 | data['trigger'] = str(trigger) 102 | 103 | return data 104 | 105 | 106 | def fix_job_def(job_def:Dict) -> Dict: 107 | """ 108 | Replaces the datetime in string by datetime object. 109 | """ 110 | if six.PY2 and isinstance(job_def.get('func'), six.text_type): 111 | job_def['func'] = str(job_def.get('func')) 112 | 113 | if isinstance(job_def.get('start_date'), six.string_types): 114 | job_def['start_date'] = dateutil.parser.parse(job_def.get('start_date')) 115 | 116 | if isinstance(job_def.get('end_date'), six.string_types): 117 | job_def['end_date'] = dateutil.parser.parse(job_def.get('end_date')) 118 | 119 | if isinstance(job_def.get('run_date'), six.string_types): 120 | job_def['run_date'] = dateutil.parser.parse(job_def.get('run_date')) 121 | 122 | if isinstance(job_def.get('trigger'), dict): 123 | trigger = job_def.pop('trigger') 124 | job_def['trigger'] = trigger.pop('type', 'date') 125 | job_def.update(trigger) 126 | 127 | 128 | def extract_timedelta(delta: datetime) -> Tuple[int]: 129 | w, d = divmod(delta.days, 7) 130 | mm, ss = divmod(delta.seconds, 60) 131 | hh, mm = divmod(mm, 60) 132 | return w, d, hh, mm, ss -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [isort] 2 | profile = black 3 | known_first_party = app 4 | 5 | [flake8] 6 | max-complexity = 7 7 | statistics = True 8 | max-line-length = 88 9 | ignore = W503,E203 10 | per-file-ignores = 11 | __init__.py: F401 12 | 13 | [mypy] 14 | plugins = pydantic.mypy 15 | ignore_missing_imports = True 16 | follow_imports = skip 17 | strict_optional = True 18 | -------------------------------------------------------------------------------- /sources.list: -------------------------------------------------------------------------------- 1 | deb http://mirrors.aliyun.com/ubuntu/ focal main restricted universe multiverse 2 | deb-src http://mirrors.aliyun.com/ubuntu/ focal main restricted universe multiverse 3 | 4 | deb http://mirrors.aliyun.com/ubuntu/ focal-security main restricted universe multiverse 5 | deb-src http://mirrors.aliyun.com/ubuntu/ focal-security main restricted universe multiverse 6 | 7 | deb http://mirrors.aliyun.com/ubuntu/ focal-updates main restricted universe multiverse 8 | deb-src http://mirrors.aliyun.com/ubuntu/ focal-updates main restricted universe multiverse 9 | 10 | deb http://mirrors.aliyun.com/ubuntu/ focal-proposed main restricted universe multiverse 11 | deb-src http://mirrors.aliyun.com/ubuntu/ focal-proposed main restricted universe multiverse 12 | 13 | deb http://mirrors.aliyun.com/ubuntu/ focal-backports main restricted universe multiverse 14 | deb-src http://mirrors.aliyun.com/ubuntu/ focal-backports main restricted universe multiverse -------------------------------------------------------------------------------- /task/test.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | class TestTask: 4 | @staticmethod 5 | def task_test(*args, **kwargs) -> None: 6 | ''' 7 | 测试任务 8 | :return: 9 | ''' 10 | print('执行test') 11 | 12 | @staticmethod 13 | def task_test1(*args, **kwargs) -> None: 14 | ''' 15 | 测试任务1 16 | :return: 17 | ''' 18 | print('执行结果test1') -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/tests/__init__.py -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnsGoo/cronJob/c2564d8eb3903d782dc770d6eec92dd152c3c69f/utils/__init__.py -------------------------------------------------------------------------------- /utils/common.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Dict 3 | 4 | def remove_none(data:Dict) -> Dict: 5 | keys = json.loads(json.dumps(data)).keys() 6 | for key in keys: 7 | if data[key] is None: 8 | del data[key] 9 | return data --------------------------------------------------------------------------------