├── .github ├── FUNDING.yml └── workflows │ └── release.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE.md ├── README.md ├── chancy ├── __init__.py ├── app.py ├── cli │ ├── __init__.py │ ├── cli.py │ ├── misc.py │ ├── queue.py │ └── worker.py ├── contrib │ ├── __init__.py │ └── django │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ ├── auth.py │ │ └── models.py ├── errors.py ├── executors │ ├── __init__.py │ ├── asyncex.py │ ├── base.py │ ├── process.py │ ├── sub.py │ └── thread.py ├── hub.py ├── job.py ├── migrate.py ├── migrations │ ├── v1.py │ ├── v2.py │ ├── v3.py │ ├── v4.py │ └── v5.py ├── plugin.py ├── plugins │ ├── __init__.py │ ├── api │ │ ├── __init__.py │ │ ├── auth.py │ │ ├── core.py │ │ ├── plugin.py │ │ └── ui │ │ │ ├── README.md │ │ │ ├── eslint.config.js │ │ │ ├── index.html │ │ │ ├── package-lock.json │ │ │ ├── package.json │ │ │ ├── public │ │ │ └── logo_small.png │ │ │ ├── src │ │ │ ├── App.css │ │ │ ├── Layout.tsx │ │ │ ├── components │ │ │ │ ├── Loading.tsx │ │ │ │ ├── MetricCharts.tsx │ │ │ │ └── UpdatingTime.tsx │ │ │ ├── hooks │ │ │ │ ├── useCrons.tsx │ │ │ │ ├── useJobs.tsx │ │ │ │ ├── useMetrics.tsx │ │ │ │ ├── useQueues.tsx │ │ │ │ ├── useServerConfiguration.tsx │ │ │ │ ├── useSessionStorage.tsx │ │ │ │ ├── useWorkers.tsx │ │ │ │ └── useWorkflows.tsx │ │ │ ├── index.scss │ │ │ ├── main.tsx │ │ │ ├── pages │ │ │ │ ├── Crons.tsx │ │ │ │ ├── Jobs.tsx │ │ │ │ ├── Metrics.tsx │ │ │ │ ├── Queues.tsx │ │ │ │ ├── Workers.tsx │ │ │ │ ├── WorkflowChart.tsx │ │ │ │ └── Workflows.tsx │ │ │ ├── utils.tsx │ │ │ └── vite-env.d.ts │ │ │ ├── tsconfig.app.json │ │ │ ├── tsconfig.json │ │ │ ├── tsconfig.node.json │ │ │ └── vite.config.ts │ ├── cron │ │ ├── __init__.py │ │ ├── api.py │ │ ├── django │ │ │ ├── admin.py │ │ │ ├── apps.py │ │ │ └── models.py │ │ └── migrations │ │ │ ├── __init__.py │ │ │ ├── v1.py │ │ │ └── v2.py │ ├── leadership.py │ ├── metrics │ │ ├── __init__.py │ │ ├── api.py │ │ ├── metrics.py │ │ └── migrations │ │ │ ├── __init__.py │ │ │ └── v1.py │ ├── pruner.py │ ├── recovery.py │ ├── reprioritize.py │ ├── retry.py │ ├── sentry.py │ └── workflow │ │ ├── __init__.py │ │ ├── api.py │ │ ├── django │ │ ├── __init__.py │ │ ├── admin.py │ │ ├── apps.py │ │ └── models.py │ │ └── migrations │ │ ├── __init__.py │ │ ├── v1.py │ │ └── v2.py ├── queue.py ├── rule.py ├── utils.py └── worker.py ├── docker-compose.yml ├── docs ├── Makefile ├── _static │ └── workflow.dot ├── chancy.app.rst ├── chancy.errors.rst ├── chancy.executors.asyncex.rst ├── chancy.executors.base.rst ├── chancy.executors.process.rst ├── chancy.executors.rst ├── chancy.executors.sub.rst ├── chancy.executors.thread.rst ├── chancy.hub.rst ├── chancy.job.rst ├── chancy.migrate.rst ├── chancy.plugin.rst ├── chancy.plugins.api.rst ├── chancy.plugins.cron.rst ├── chancy.plugins.leadership.rst ├── chancy.plugins.metrics.rst ├── chancy.plugins.pruner.rst ├── chancy.plugins.recovery.rst ├── chancy.plugins.reprioritize.rst ├── chancy.plugins.retry.rst ├── chancy.plugins.rst ├── chancy.plugins.sentry.rst ├── chancy.plugins.workflow.rst ├── chancy.queue.rst ├── chancy.rst ├── chancy.rule.rst ├── chancy.utils.rst ├── chancy.worker.rst ├── conf.py ├── design.rst ├── faq.rst ├── howto │ ├── celery.rst │ ├── context.rst │ ├── django.rst │ ├── fastapi.rst │ ├── index.rst │ ├── jobs.rst │ ├── log.rst │ └── retry.rst ├── index.rst ├── make.bat └── similar.rst ├── misc ├── logo.png ├── logo_small.png ├── ux_job_failed.png ├── ux_jobs.png ├── ux_queue.png ├── ux_worker.png └── ux_workflow.png ├── pyproject.toml └── tests ├── conftest.py ├── contrib └── django │ ├── conftest.py │ ├── settings.py │ ├── test_connection.py │ └── test_models.py ├── plugins ├── test_cron.py ├── test_leadership.py ├── test_pruner.py ├── test_reprioritization.py ├── test_retry.py └── test_workflow.py ├── test_explicit_pool.py ├── test_jobs.py ├── test_queues.py ├── test_scale.py └── test_worker.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: TkTech 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: # Replace with a single custom sponsorship URL 13 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Test and Release 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | pull_request: 8 | push: 9 | branches: 10 | - main 11 | - dev 12 | 13 | jobs: 14 | build: 15 | name: Build Package 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v4.1.6 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v5.1.0 23 | with: 24 | python-version: "3.13" 25 | 26 | - name: Install uv 27 | run: pip install uv 28 | 29 | - name: Installing dependencies (Node) 30 | working-directory: chancy/plugins/api/ui 31 | run: npm install 32 | 33 | - name: Building UI 34 | working-directory: chancy/plugins/api/ui 35 | run: npm run build 36 | 37 | - name: Checking for code smells 38 | run: uvx ruff check 39 | 40 | - name: Checking for formatting issues 41 | run: uvx ruff format --check 42 | 43 | - name: Build package 44 | run: uv build 45 | 46 | - name: Upload wheel artifact 47 | uses: actions/upload-artifact@v4 48 | with: 49 | name: wheel-package 50 | path: dist/*.whl 51 | retention-days: 1 52 | 53 | - name: Upload source artifact 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: source-package 57 | path: dist/*.tar.gz 58 | retention-days: 1 59 | 60 | test: 61 | name: "${{ matrix.os }} - Py${{ matrix.python-version }} - PG${{ matrix.pg }}" 62 | needs: [build] 63 | runs-on: ${{ matrix.os }} 64 | timeout-minutes: 30 65 | strategy: 66 | fail-fast: true 67 | matrix: 68 | python-version: ["3.11", "3.12", "3.13"] 69 | os: ["ubuntu-24.04", "macos-14", "windows-2022"] 70 | pg: ["14", "15", "16", "17"] 71 | env: 72 | OS: ${{ matrix.os }} 73 | PYTHON: ${{ matrix.python-version }} 74 | 75 | steps: 76 | - uses: actions/checkout@v4.1.6 77 | 78 | - uses: ikalnytskyi/action-setup-postgres@v7 79 | with: 80 | username: postgres 81 | password: localtest 82 | database: postgres 83 | port: 8190 84 | postgres-version: ${{ matrix.pg }} 85 | id: postgres 86 | 87 | - name: Set up Python ${{ matrix.python-version }} 88 | uses: actions/setup-python@v5.1.0 89 | with: 90 | python-version: ${{ matrix.python-version }} 91 | 92 | - name: Installing uv 93 | run: pip install uv 94 | 95 | - name: Installing dependencies (Python) 96 | run: uv sync --all-extras 97 | 98 | - name: Running tests 99 | run: | 100 | uv run pytest -s -vvvvv --cov=chancy --cov-report=xml 101 | 102 | - name: Uploading coverage 103 | uses: codecov/codecov-action@v4 104 | with: 105 | env_vars: OS,PYTHON 106 | fail_ci_if_error: true 107 | flags: unittests 108 | token: ${{ secrets.CODECOV_TOKEN }} 109 | verbose: true 110 | slug: TkTech/chancy 111 | 112 | docs: 113 | name: Building Documentation 114 | needs: [build] 115 | runs-on: ubuntu-latest 116 | 117 | steps: 118 | - uses: actions/checkout@v4.1.6 119 | 120 | - name: Set up Python 121 | uses: actions/setup-python@v5.1.0 122 | with: 123 | python-version: "3.13" 124 | 125 | - name: Installing dependencies 126 | run: sudo apt-get install graphviz 127 | 128 | - name: Installing uv 129 | run: pip install uv 130 | 131 | - name: Installing dependencies (Python) 132 | run: uv sync --all-extras 133 | 134 | - name: Building docs 135 | run: | 136 | cd docs && uv run make clean html 137 | 138 | - name: Publishing documentation 139 | if: github.event_name == 'release' 140 | run: | 141 | uv run ghp-import -f -n -p docs/_build/html 142 | 143 | release: 144 | name: Release to PyPI 145 | needs: [test, docs] 146 | runs-on: ubuntu-latest 147 | if: github.event_name == 'release' 148 | permissions: 149 | contents: read 150 | id-token: write 151 | 152 | steps: 153 | - name: Download all artifacts 154 | uses: actions/download-artifact@v4 155 | with: 156 | merge-multiple: true 157 | path: dist 158 | 159 | - name: Set up Python 160 | uses: actions/setup-python@v5.1.0 161 | with: 162 | python-version: "3.13" 163 | 164 | - name: Install uv 165 | run: pip install uv 166 | 167 | - name: Publishing to PyPI 168 | run: | 169 | uv publish -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Python template 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ 162 | 163 | # PyCharm 164 | .idea/ 165 | 166 | chancy/plugins/api/ui/node_modules 167 | # rollup-visualizer output 168 | chancy/plugins/api/ui/stats.html 169 | # uv 170 | uv.lock -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Tyler Kennedy 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so, 8 | subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Chancy 2 | 3 | ![Chancy Logo](misc/logo_small.png) 4 | 5 | Chancy is a distributed task queue and scheduler for Python built on top of 6 | Postgres. 7 | 8 | ![MIT License](https://img.shields.io/github/license/tktech/chancy) 9 | ![Codecov](https://img.shields.io/codecov/c/github/TkTech/chancy) 10 | ![PyPI Version](https://img.shields.io/pypi/v/chancy) 11 | ![Python Version](https://img.shields.io/pypi/pyversions/chancy) 12 | ![OS Platforms](https://img.shields.io/badge/OS-Linux%20|%20macOS%20|%20Windows-blue) 13 | ![PostgreSQL Versions](https://img.shields.io/badge/PostgreSQL-%2014%20|%2015%20|%2016%20|%2017-blue) 14 | 15 | ## Key Features 16 | 17 | - **Robust Jobs** - support for priorities, retries, timeouts, scheduling, 18 | global rate limits, memory limits, global uniqueness, error 19 | capture, cancellation, and more 20 | - **Minimal dependencies** - Core functionality requires only psycopg3 - which 21 | can be installed side-by-side with psycopg2. 22 | - **Minimal infrastructure** - No need to run a separate service like 23 | RabbitMQ or redis. Every feature is built on top of Postgres. No need 24 | for separate monitoring services like Flower or schedulers like Celery 25 | Beat - everything is built-in to the worker. 26 | - **Plugins** - Several plugins including a dashboard, workflows, cron jobs, 27 | and more. 28 | - **Flexible** - A single worker can handle many queues and mix threads, 29 | processes, sub-interpreters, and asyncio jobs, allowing powerful workflows 30 | that use the optimal concurrency model for each job. Queues can be created, 31 | deleted, modified, and paused at runtime. 32 | - **async-first** - Internals designed from the ground up to be async-first, 33 | but has minimal sync APIs for easy integration with existing non-async 34 | codebases. 35 | - **Transactional enqueueing** - Atomically enqueue jobs and the data they 36 | depend on in a single transaction. 37 | - **Performant** - Used in production environments to process millions of 38 | jobs per day. 39 | - **Portable** - Supports Linux, OS X, and Windows. 40 | - **100% open & free** - no enterprise tiers or paid features. 41 | 42 | ## Documentation 43 | 44 | Check out the getting-started guide and the API documentation at 45 | https://tkte.ch/chancy/. 46 | 47 | ## Screenshots 48 | 49 | Chancy comes with an optional dashboard that provides a basic 50 | look into the status of your queues: 51 | 52 | ![Workflows](misc/ux_workflow.png) 53 | ![Queue Details](misc/ux_queue.png) 54 | ![Jobs](misc/ux_jobs.png) 55 | ![Job](misc/ux_job_failed.png) -------------------------------------------------------------------------------- /chancy/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ( 2 | "Chancy", 3 | "Worker", 4 | "Queue", 5 | "Job", 6 | "QueuedJob", 7 | "Limit", 8 | "Reference", 9 | "job", 10 | ) 11 | 12 | from chancy.app import Chancy 13 | from chancy.queue import Queue 14 | from chancy.worker import Worker 15 | from chancy.job import Limit, Job, QueuedJob, Reference, job 16 | -------------------------------------------------------------------------------- /chancy/cli/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from functools import wraps 3 | 4 | 5 | def run_async_command(f): 6 | """ 7 | Decorator to run a coroutine as a Click command. 8 | """ 9 | 10 | @wraps(f) 11 | def wrapper(*args, **kwargs): 12 | return asyncio.run(f(*args, **kwargs)) 13 | 14 | return wrapper 15 | -------------------------------------------------------------------------------- /chancy/cli/cli.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | import click 4 | 5 | from chancy import Chancy 6 | from chancy.utils import import_string 7 | 8 | 9 | @click.group() 10 | @click.option( 11 | "--app", 12 | "-a", 13 | help="The import path for the Chancy app.", 14 | required=True, 15 | ) 16 | @click.pass_context 17 | def cli(ctx: click.Context, app: str): 18 | """ 19 | Command line interface for Chancy. 20 | """ 21 | ctx.ensure_object(dict) 22 | 23 | # Might be a Chancy instance or a function we need to call to get one. 24 | chancy_app: Chancy | Callable[[], Chancy] = import_string(app) 25 | 26 | if isinstance(chancy_app, Chancy): 27 | ctx.obj["app"] = chancy_app 28 | else: 29 | ctx.obj["app"] = chancy_app() 30 | 31 | 32 | def main(): 33 | from chancy.cli import misc, queue, worker 34 | 35 | cli.add_command(misc.misc_group) 36 | cli.add_command(queue.queue_group) 37 | cli.add_command(worker.worker_group) 38 | 39 | cli() 40 | 41 | 42 | if __name__ == "__main__": 43 | main() 44 | -------------------------------------------------------------------------------- /chancy/cli/misc.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import code 3 | 4 | import click 5 | from psycopg import AsyncCursor 6 | from psycopg.rows import DictRow, dict_row 7 | 8 | from chancy import Chancy, Worker, Job, QueuedJob, Limit, Reference, Queue 9 | from chancy.cli import run_async_command 10 | from chancy.migrate import Migrator 11 | 12 | 13 | @click.group(name="misc") 14 | def misc_group(): 15 | """ 16 | Miscellaneous commands. 17 | """ 18 | pass 19 | 20 | 21 | @misc_group.command() 22 | @click.option( 23 | "--to-version", 24 | "-t", 25 | help="The version to migrate to.", 26 | type=int, 27 | ) 28 | @click.option( 29 | "--plugin", 30 | "-p", 31 | help="Migrate only the specified plugin.", 32 | type=str, 33 | ) 34 | @click.pass_context 35 | @run_async_command 36 | async def migrate( 37 | ctx: click.Context, to_version: int | None, plugin: str | None 38 | ): 39 | """ 40 | Migrate the database's core tables and any configured plugins up or down 41 | to the desired version. 42 | 43 | By default, migrates to the latest version, but can be controlled with 44 | the `--to-version` option. 45 | 46 | Use the `--plugin` option to migrate only a specific plugin instead of all plugins. 47 | """ 48 | chancy: Chancy = ctx.obj["app"] 49 | 50 | async with chancy: 51 | if plugin: 52 | matching_plugins = [ 53 | p 54 | for p in chancy.plugins.values() 55 | if p.__class__.__name__.lower() == plugin.lower() 56 | ] 57 | 58 | if not matching_plugins: 59 | raise click.ClickException( 60 | f"No plugin found with name '{plugin}'" 61 | ) 62 | 63 | if len(matching_plugins) > 1: 64 | raise click.ClickException( 65 | f"Multiple plugins found with name '{plugin}'. Please use a more specific name." 66 | ) 67 | 68 | await matching_plugins[0].migrate(chancy, to_version=to_version) 69 | else: 70 | await chancy.migrate(to_version=to_version) 71 | 72 | 73 | async def _check_migrations(migrator: Migrator, cursor: AsyncCursor[DictRow]): 74 | """ 75 | Check the migrations for a migrator. 76 | """ 77 | good = "✓" 78 | bad = "✗" 79 | 80 | all_migrations = migrator.discover_all_migrations() 81 | current_version = await migrator.get_current_version(cursor) 82 | 83 | for i, migration in enumerate(all_migrations, 1): 84 | is_applied = i <= current_version 85 | click.echo( 86 | f"| |- [{good if is_applied else bad}] {migration.__class__.__name__} " 87 | ) 88 | 89 | 90 | @misc_group.command() 91 | @click.pass_context 92 | @run_async_command 93 | async def check_migrations(ctx: click.Context): 94 | """ 95 | Check the current migration status of the database. 96 | """ 97 | chancy: Chancy = ctx.obj["app"] 98 | 99 | async with chancy: 100 | migrator = Migrator("chancy", "chancy.migrations", prefix=chancy.prefix) 101 | async with chancy.pool.connection() as conn: 102 | async with conn.cursor(row_factory=dict_row) as cursor: 103 | click.echo("Chancy Core") 104 | await _check_migrations(migrator, cursor) 105 | 106 | for plugin in chancy.plugins.values(): 107 | migrator = plugin.migrator(chancy) 108 | if migrator is None: 109 | continue 110 | 111 | click.echo(f"|-{plugin.__class__.__name__} Plugin") 112 | await _check_migrations(migrator, cursor) 113 | 114 | 115 | @misc_group.command() 116 | @click.pass_context 117 | @run_async_command 118 | async def shell(ctx): 119 | """ 120 | Start an interactive shell with the Chancy app instance and common 121 | imports already available. 122 | """ 123 | chancy: Chancy = ctx.obj["app"] 124 | 125 | async with chancy: 126 | code.interact( 127 | local={ 128 | "chancy": chancy, 129 | "Worker": Worker, 130 | "Job": Job, 131 | "QueuedJob": QueuedJob, 132 | "Limit": Limit, 133 | "Reference": Reference, 134 | "Queue": Queue, 135 | "asyncio": asyncio, 136 | } 137 | ) 138 | -------------------------------------------------------------------------------- /chancy/cli/queue.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import click 4 | from click import Context 5 | 6 | from chancy import Chancy, Job, Queue 7 | from chancy.cli import run_async_command 8 | 9 | 10 | @click.group(name="queue") 11 | def queue_group(): 12 | """ 13 | Queue management commands. 14 | """ 15 | pass 16 | 17 | 18 | @queue_group.command() 19 | @click.argument("job") 20 | @click.option( 21 | "--queue", 22 | "-q", 23 | help="The queue to push the job to.", 24 | default="default", 25 | ) 26 | @click.option( 27 | "--priority", 28 | "-p", 29 | help="The job's priority.", 30 | default=0, 31 | type=int, 32 | ) 33 | @click.option( 34 | "--unique-key", 35 | "-u", 36 | help="The job's unique key.", 37 | ) 38 | @click.option( 39 | "--max-attempts", 40 | "-m", 41 | help="The maximum number of attempts to make.", 42 | type=int, 43 | default=1, 44 | ) 45 | @click.option( 46 | "--kwargs", 47 | "-k", 48 | help="A JSON blob containing the keyword arguments to pass to the job.", 49 | ) 50 | @click.pass_context 51 | @run_async_command 52 | async def push( 53 | ctx: Context, 54 | job: str, 55 | queue: str, 56 | priority: int, 57 | unique_key: str, 58 | max_attempts: int, 59 | kwargs: str | None = None, 60 | ): 61 | """ 62 | Push a job. 63 | 64 | Note that this method of pushing a job ignores any defaults that might be 65 | defined on the job using the @job() decorator, such as the queue name. 66 | While this can be an inconvenience, it allows pushing jobs without having 67 | to import any project-specific code. 68 | """ 69 | chancy: Chancy = ctx.obj["app"] 70 | 71 | if kwargs is not None: 72 | kwargs = json.loads(kwargs) 73 | 74 | async with chancy: 75 | await chancy.push( 76 | Job( 77 | func=job, 78 | queue=queue, 79 | priority=priority, 80 | unique_key=unique_key, 81 | max_attempts=max_attempts, 82 | kwargs=kwargs, 83 | ) 84 | ) 85 | 86 | 87 | @queue_group.command("list") 88 | @click.pass_context 89 | @run_async_command 90 | async def list_queues(ctx: click.Context): 91 | """ 92 | List all the queues. 93 | """ 94 | chancy: Chancy = ctx.obj["app"] 95 | 96 | async with chancy: 97 | queues = await chancy.get_all_queues() 98 | 99 | for queue in queues: 100 | click.echo(f"{queue!r}") 101 | 102 | 103 | @queue_group.command("declare") 104 | @click.argument("name") 105 | @click.option( 106 | "--concurrency", 107 | "-c", 108 | default=1, 109 | type=int, 110 | help="The number of simultaneous jobs to process.", 111 | ) 112 | @click.option( 113 | "--executor", 114 | "-e", 115 | default="chancy.executors.process.ProcessExecutor", 116 | help="The executor to use.", 117 | ) 118 | @click.option( 119 | "--rate-limit", "-r", type=int, help="The global queue rate limit." 120 | ) 121 | @click.option( 122 | "--rate-limit-window", 123 | "-w", 124 | type=int, 125 | help="The global queue rate limit window.", 126 | ) 127 | @click.option( 128 | "--polling-interval", 129 | "-p", 130 | help="The interval to poll the queue for new jobs.", 131 | type=int, 132 | default=5, 133 | ) 134 | @click.option( 135 | "--tags", 136 | "-t", 137 | help="Extra tags to apply to the queue.", 138 | multiple=True, 139 | ) 140 | @click.pass_context 141 | @run_async_command 142 | async def declare_queue( 143 | ctx: click.Context, 144 | name: str, 145 | concurrency: int | None, 146 | executor: str | None, 147 | rate_limit: int | None, 148 | rate_limit_window: int | None, 149 | polling_interval: int | None, 150 | tags: list[str] | None, 151 | ): 152 | """ 153 | Declare a new queue. 154 | """ 155 | chancy: Chancy = ctx.obj["app"] 156 | 157 | async with chancy: 158 | await chancy.declare( 159 | Queue( 160 | name, 161 | concurrency=concurrency, 162 | executor=executor, 163 | rate_limit=rate_limit, 164 | rate_limit_window=rate_limit_window, 165 | polling_interval=polling_interval, 166 | tags=set(tags) if tags else {r".*"}, 167 | ) 168 | ) 169 | 170 | 171 | @queue_group.command("delete") 172 | @click.argument("name") 173 | @click.pass_context 174 | @run_async_command 175 | async def delete_queue(ctx: click.Context, name: str): 176 | """ 177 | Delete a queue. 178 | """ 179 | chancy: Chancy = ctx.obj["app"] 180 | 181 | if click.confirm(f"Are you sure you want to delete the queue {name}?"): 182 | async with chancy: 183 | await chancy.delete_queue(name) 184 | -------------------------------------------------------------------------------- /chancy/cli/worker.py: -------------------------------------------------------------------------------- 1 | import secrets 2 | 3 | import click 4 | 5 | from chancy import Chancy, Worker 6 | from chancy.cli import run_async_command 7 | from chancy.errors import MigrationsNeededError 8 | 9 | 10 | @click.group(name="worker") 11 | def worker_group(): 12 | """ 13 | Worker management commands. 14 | """ 15 | pass 16 | 17 | 18 | @worker_group.command("start") 19 | @click.option("--worker-id", "-w", help="The worker ID to use.") 20 | @click.option( 21 | "--tags", 22 | "-t", 23 | help="Extra tags to apply to the worker.", 24 | multiple=True, 25 | ) 26 | @click.pass_context 27 | @run_async_command 28 | async def worker_command( 29 | ctx: click.Context, worker_id: str | None, tags: list[str] | None 30 | ): 31 | """ 32 | Start a worker. 33 | """ 34 | chancy: Chancy = ctx.obj["app"] 35 | 36 | async with chancy: 37 | try: 38 | async with Worker( 39 | chancy, 40 | worker_id=worker_id, 41 | tags=set(tags) if tags else None, 42 | ) as worker: 43 | await worker.wait_for_shutdown() 44 | except MigrationsNeededError: 45 | click.echo( 46 | "The database is not up to date and is missing migrations.\n" 47 | "Please run `chancy misc migrate` to update the database.\n" 48 | "You can check the current migration status with" 49 | " `chancy misc check-migrations`." 50 | ) 51 | return 1 52 | 53 | 54 | @worker_group.command("web") 55 | @click.option("--host", "-h", help="The host to bind to.", default="localhost") 56 | @click.option("--port", "-p", help="The port to bind to.", default=8000) 57 | @click.option( 58 | "--debug", "-d", help="Run the server in debug mode.", is_flag=True 59 | ) 60 | @click.option( 61 | "--allow-origin", 62 | "-o", 63 | help="A list of allowed origins.", 64 | multiple=True, 65 | default=lambda: ["*"], 66 | ) 67 | @click.pass_context 68 | @run_async_command 69 | async def web_command( 70 | ctx: click.Context, 71 | host: str, 72 | port: int, 73 | debug: bool, 74 | allow_origin: list[str], 75 | ): 76 | """ 77 | Start the Chancy dashboard. 78 | """ 79 | from chancy.plugins.api import Api 80 | from chancy.plugins.api.auth import SimpleAuthBackend 81 | 82 | chancy: Chancy = ctx.obj["app"] 83 | 84 | async with chancy: 85 | worker = Worker(chancy, tags=set()) 86 | 87 | # The metrics plugin needs to be running to pull in cluster-wide 88 | # metrics. 89 | if metrics := chancy.plugins.get("chancy.metrics"): 90 | worker.manager.add("metrics", metrics.run(worker, chancy)) 91 | 92 | if not (api := chancy.plugins.get("chancy.api")): 93 | chancy.log.info( 94 | "No API plugin was configured on the Chancy application," 95 | " falling back to the default API." 96 | ) 97 | auth = SimpleAuthBackend({"admin": secrets.token_urlsafe(32)}) 98 | api = Api( 99 | host=host, 100 | port=port, 101 | allow_origins=allow_origin, 102 | debug=debug, 103 | authentication_backend=auth, 104 | ) 105 | chancy.log.warning( 106 | f"No username or password was provided for the API, defaulting" 107 | f" to 'admin' with a random password: {auth.users['admin']}" 108 | ) 109 | 110 | worker.manager.add("api", api.run(worker, chancy)) 111 | await worker.wait_for_shutdown() 112 | -------------------------------------------------------------------------------- /chancy/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The contrib directory contains code that is not part of the core functionality 3 | of Chancy, but is useful for developers who are using Chancy. 4 | """ 5 | -------------------------------------------------------------------------------- /chancy/contrib/django/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for integrating Chancy with Django. 3 | """ 4 | -------------------------------------------------------------------------------- /chancy/contrib/django/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | from chancy.contrib.django.models import Job, Worker, Queue 4 | 5 | 6 | class ReadOnlyAdmin(admin.ModelAdmin): 7 | def has_add_permission(self, request): 8 | return False 9 | 10 | def has_delete_permission(self, request, obj=None): 11 | return False 12 | 13 | # Oddly, this prevents the user from viewing the object. 14 | # def has_change_permission(self, request, obj=None): 15 | # return False 16 | 17 | 18 | @admin.register(Job) 19 | class JobAdmin(admin.ModelAdmin): 20 | list_display = ("id", "queue", "func", "state", "attempts", "max_attempts") 21 | search_fields = ("id", "queue", "func") 22 | 23 | 24 | @admin.register(Worker) 25 | class WorkerAdmin(ReadOnlyAdmin): 26 | list_display = ("worker_id", "tags", "queues") 27 | search_fields = ("worker_id",) 28 | 29 | 30 | @admin.register(Queue) 31 | class QueueAdmin(admin.ModelAdmin): 32 | list_display = ("name", "state", "executor") 33 | search_fields = ("name",) 34 | list_filter = ("state",) 35 | -------------------------------------------------------------------------------- /chancy/contrib/django/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ChancyConfig(AppConfig): 5 | name = "chancy.contrib.django" 6 | label = "chancy" 7 | -------------------------------------------------------------------------------- /chancy/contrib/django/auth.py: -------------------------------------------------------------------------------- 1 | from django.contrib.auth import aauthenticate 2 | from django.contrib.auth.models import User 3 | from starlette.authentication import AuthCredentials, BaseUser, SimpleUser 4 | from starlette.requests import Request, HTTPConnection 5 | 6 | from chancy.plugins.api import AuthBackend 7 | 8 | 9 | class DjangoAuthBackend(AuthBackend): 10 | async def login( 11 | self, request: Request, username: str, password: str 12 | ) -> bool: 13 | user: User | None = await aauthenticate( 14 | username=username, password=password 15 | ) 16 | if user is not None and user.is_superuser: 17 | request.session["username"] = username 18 | return True 19 | return False 20 | 21 | async def logout(self, request: Request) -> None: 22 | request.session.pop("username", None) 23 | 24 | async def authenticate( 25 | self, conn: HTTPConnection 26 | ) -> tuple[AuthCredentials, BaseUser] | None: 27 | username = conn.session.get("username") 28 | if username is not None: 29 | return AuthCredentials(["authenticated"]), SimpleUser(username) 30 | return None 31 | -------------------------------------------------------------------------------- /chancy/contrib/django/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unmanaged Django models that match the Chancy tables. 3 | 4 | Makes the assumption that the Django default database is the same as the Chancy 5 | database. 6 | """ 7 | 8 | __all__ = ("Job", "Worker", "Queue") 9 | 10 | from django.contrib.postgres.fields import ArrayField 11 | from django.db import models 12 | from django.conf import settings 13 | 14 | from chancy.utils import chancy_uuid 15 | 16 | PREFIX = getattr(settings, "CHANCY_PREFIX", "chancy_") 17 | 18 | 19 | class Job(models.Model): 20 | id = models.UUIDField(primary_key=True, default=chancy_uuid, editable=False) 21 | queue = models.TextField() 22 | func = models.TextField() 23 | kwargs = models.JSONField(db_default="{}", blank=True) 24 | limits = models.JSONField(db_default="[]", blank=True) 25 | meta = models.JSONField(db_default="{}", blank=True) 26 | state = models.CharField(max_length=25, default="pending") 27 | priority = models.IntegerField(default=0) 28 | attempts = models.IntegerField(default=0) 29 | max_attempts = models.IntegerField(default=1) 30 | taken_by = models.TextField(null=True) 31 | created_at = models.DateTimeField(auto_now_add=True) 32 | started_at = models.DateTimeField(null=True) 33 | completed_at = models.DateTimeField(null=True) 34 | scheduled_at = models.DateTimeField(auto_now_add=True) 35 | unique_key = models.TextField(null=True) 36 | errors = models.JSONField(db_default="[]", blank=True) 37 | 38 | class Meta: 39 | managed = False 40 | db_table = f"{PREFIX}jobs" 41 | 42 | 43 | class Worker(models.Model): 44 | worker_id = models.TextField(primary_key=True) 45 | tags = ArrayField(models.TextField(), default=list, blank=True) 46 | queues = ArrayField(models.TextField(), default=list, blank=True) 47 | 48 | last_seen = models.DateTimeField(auto_now=True) 49 | expires_at = models.DateTimeField() 50 | 51 | class Meta: 52 | managed = False 53 | db_table = f"{PREFIX}workers" 54 | 55 | 56 | class Queue(models.Model): 57 | name = models.TextField(primary_key=True) 58 | state = models.TextField(default="active") 59 | concurrency = models.IntegerField(null=True) 60 | tags = ArrayField(models.TextField(), default=list) 61 | executor = models.TextField( 62 | default="chancy.executors.process.ProcessExecutor" 63 | ) 64 | executor_options = models.JSONField(db_default="{}", blank=True) 65 | polling_interval = models.IntegerField(default=5) 66 | rate_limit = models.IntegerField(null=True, blank=True) 67 | rate_limit_window = models.IntegerField(null=True, blank=True) 68 | created_at = models.DateTimeField(auto_now_add=True) 69 | 70 | class Meta: 71 | managed = False 72 | db_table = f"{PREFIX}queues" 73 | -------------------------------------------------------------------------------- /chancy/errors.py: -------------------------------------------------------------------------------- 1 | class MigrationsNeededError(Exception): 2 | """ 3 | Raised when the database is out of date. 4 | """ 5 | -------------------------------------------------------------------------------- /chancy/executors/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Executors 3 | ========= 4 | 5 | This package contains the built-in executors for Chancy. Executors are 6 | responsible for managing the execution of jobs after they've been retrieved 7 | from a queue. A single worker can have multiple queues, each using its own 8 | executor, allowing you to run jobs in the way that best suits their needs. 9 | 10 | Capabilities 11 | ------------ 12 | 13 | Not all executors have the same capabilities, typically due to limits in how 14 | threads are implemented in Python. The following table summarizes the 15 | capabilities of each executor: 16 | 17 | .. list-table:: Executor Capabilities 18 | :header-rows: 1 19 | :widths: 20 15 15 15 20 | 21 | * - Executor 22 | - Cancellation [#f1]_ 23 | - Timeouts 24 | - Memory Limits 25 | * - ProcessExecutor 26 | - ✓ 27 | - ✓ 28 | - ✓ 29 | * - AsyncExecutor 30 | - ✓ 31 | - ✓ 32 | - ✗ 33 | * - ThreadedExecutor 34 | - ✗ 35 | - ✓ 36 | - ✗ 37 | * - SubInterpreter 38 | - ✗ 39 | - ✓ 40 | - ✗ 41 | 42 | .. [#f1] Cancellation is always possible before a job is started. Cancellation 43 | here refers to the ability to stop a job that is actively running. 44 | 45 | ProcessExecutor (Default) 46 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 47 | :class:`chancy.executors.process.ProcessExecutor` 48 | Optimized for CPU-bound jobs, uses separate processes for true parallelism. 49 | 50 | AsyncExecutor 51 | ~~~~~~~~~~~~~ 52 | :class:`chancy.executors.asyncex.AsyncExecutor` 53 | Optimized for IO-bound jobs like API calls and database operations. Uses 54 | asyncio for efficient concurrent execution. Thousands of jobs can be 55 | executed concurrently with limited resources. 56 | 57 | ThreadedExecutor 58 | ~~~~~~~~~~~~~~~~ 59 | :class:`chancy.executors.thread.ThreadedExecutor` 60 | Suitable for IO-bound jobs that can't use asyncio. Uses threads for concurrent 61 | execution. 62 | 63 | SubInterpreterExecutor 64 | ~~~~~~~~~~~~~~~~~~~~~~ 65 | :class:`chancy.executors.sub.SubInterpreterExecutor` 66 | Experimental executor using Python sub-interpreters. Provides GIL avoidance 67 | with lower overhead than processes. 68 | 69 | 70 | Custom Executors 71 | ~~~~~~~~~~~~~~~~ 72 | You can implement your own executor by subclassing the 73 | :class:`chancy.executors.base.Executor` class and implementing the 74 | :meth:`~chancy.executors.base.Executor.push` method. 75 | """ 76 | -------------------------------------------------------------------------------- /chancy/executors/asyncex.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from asyncio import CancelledError 3 | 4 | from chancy import Reference 5 | from chancy.executors.base import Executor 6 | from chancy.job import QueuedJob, Limit 7 | 8 | 9 | class AsyncExecutor(Executor): 10 | """ 11 | An Executor which uses asyncio to run its jobs in the main event loop. 12 | 13 | This executor is useful for running large numbers of IO-bound jobs, as it 14 | can run many jobs concurrently without blocking the main event loop and 15 | without the high overhead of new processes or threads. However, it is not 16 | suitable for CPU-bound jobs, as it will block the main event loop and 17 | prevent other jobs & queues from running. 18 | 19 | To use this executor, simply pass the import path to this class in the 20 | ``executor`` field of your queue configuration or use the 21 | :class:`~chancy.app.Chancy.Executor` shortcut: 22 | 23 | .. code-block:: python 24 | 25 | async with Chancy("postgresql://localhost/postgres") as chancy: 26 | await chancy.declare( 27 | Queue( 28 | name="default", 29 | executor=Chancy.Executor.Async, 30 | ) 31 | ) 32 | """ 33 | 34 | def __init__(self, worker, queue): 35 | super().__init__(worker, queue) 36 | self.jobs: dict[asyncio.Task, QueuedJob] = {} 37 | 38 | async def push(self, job: QueuedJob): 39 | job = await self.on_job_starting(job) 40 | task = asyncio.create_task(self._job_wrapper(job)) 41 | self.jobs[task] = job 42 | task.add_done_callback(self.jobs.pop) 43 | 44 | def __len__(self): 45 | return len(self.jobs) 46 | 47 | async def _job_wrapper(self, job: QueuedJob): 48 | try: 49 | func, kwargs = Executor.get_function_and_kwargs(job) 50 | if not asyncio.iscoroutinefunction(func): 51 | raise ValueError( 52 | f"Function {job.func!r} is not an async function, which is" 53 | f" required for the AsyncExecutor. Please use the" 54 | f" ThreadedExecutor or ProcessExecutor instead." 55 | ) 56 | 57 | timeout = next( 58 | ( 59 | limit.value 60 | for limit in job.limits 61 | if limit.type_ == Limit.Type.TIME 62 | ), 63 | None, 64 | ) 65 | 66 | async with asyncio.timeout(timeout): 67 | result = await func(**kwargs) 68 | await self.on_job_completed(job=job, result=result) 69 | except (Exception, CancelledError) as exc: 70 | await self.on_job_completed(job=job, exc=exc, result=None) 71 | 72 | async def cancel(self, ref: Reference): 73 | for task, job in self.jobs.items(): 74 | if job.id == ref.identifier: 75 | task.cancel() 76 | return 77 | 78 | async def stop(self): 79 | """ 80 | Stop the executor, giving it a chance to clean up any resources it 81 | may have allocated to running jobs. 82 | """ 83 | for task in self.jobs: 84 | task.cancel() 85 | await asyncio.gather(*self.jobs) 86 | 87 | def get_default_concurrency(self): 88 | """ 89 | Get the default concurrency level for this executor. 90 | 91 | This method is called when the queue's concurrency level is set to 92 | None. It should return the number of jobs that can be processed 93 | concurrently by this executor. 94 | 95 | By default, returns 100. 96 | """ 97 | return 100 98 | -------------------------------------------------------------------------------- /chancy/executors/thread.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | from concurrent.futures import ThreadPoolExecutor, Future 4 | import asyncio 5 | import functools 6 | from typing import Any 7 | 8 | from chancy.executors.base import Executor, ConcurrentExecutor 9 | from chancy.job import QueuedJob, Limit 10 | 11 | 12 | class ThreadedExecutor(ConcurrentExecutor): 13 | """ 14 | An Executor which uses a thread pool to run its jobs. 15 | 16 | This executor is useful for running I/O-bound jobs concurrently without the 17 | overhead of separate processes. It's not suitable for CPU-bound tasks due 18 | to Python's Global SubInterpreter Lock (GIL). 19 | 20 | When working with existing asyncio code, it's often easier and more 21 | efficient to use the :class:`~chancy.executors.asyncex.AsyncExecutor` 22 | instead, as it can run a very large number of jobs concurrently. 23 | 24 | To use this executor, simply pass the import path to this class in the 25 | ``executor`` field of your queue configuration or use the 26 | :class:`~chancy.app.Chancy.Executor` shortcut: 27 | 28 | .. code-block:: python 29 | 30 | async with Chancy("postgresql://localhost/postgres") as chancy: 31 | await chancy.declare( 32 | Queue( 33 | name="default", 34 | executor=Chancy.Executor.Threaded, 35 | ) 36 | ) 37 | 38 | :param worker: The worker instance associated with this executor. 39 | :param queue: The queue that this executor is associated with. 40 | """ 41 | 42 | def __init__(self, worker, queue): 43 | super().__init__(worker, queue) 44 | self.pool = ThreadPoolExecutor(max_workers=queue.concurrency) 45 | 46 | async def push(self, job: QueuedJob) -> Future: 47 | job = await self.on_job_starting(job) 48 | future: Future = self.pool.submit(self.job_wrapper, job) 49 | self.jobs[future] = job 50 | future.add_done_callback( 51 | functools.partial( 52 | self._on_job_completed, loop=asyncio.get_running_loop() 53 | ) 54 | ) 55 | return future 56 | 57 | def job_wrapper(self, job: QueuedJob) -> tuple[QueuedJob, Any]: 58 | """ 59 | This is the function that is actually started by the thread pool 60 | executor. It's responsible for setting up necessary limits, 61 | running the job, and returning the result. 62 | """ 63 | func, kwargs = Executor.get_function_and_kwargs(job) 64 | 65 | time_limit = next( 66 | ( 67 | limit.value 68 | for limit in job.limits 69 | if limit.type_ == Limit.Type.TIME 70 | ), 71 | None, 72 | ) 73 | 74 | timer = None 75 | if time_limit: 76 | timer = threading.Timer(time_limit, self._timeout_handler) 77 | timer.start() 78 | 79 | try: 80 | if asyncio.iscoroutinefunction(func): 81 | loop = asyncio.new_event_loop() 82 | asyncio.set_event_loop(loop) 83 | try: 84 | result = loop.run_until_complete(func(**kwargs)) 85 | finally: 86 | loop.run_until_complete(loop.shutdown_asyncgens()) 87 | loop.close() 88 | else: 89 | result = func(**kwargs) 90 | finally: 91 | if timer: 92 | timer.cancel() 93 | 94 | return job, result 95 | 96 | @staticmethod 97 | def _timeout_handler(): 98 | raise TimeoutError("Job timed out.") 99 | 100 | def _on_job_completed( 101 | self, future: Future, loop: asyncio.AbstractEventLoop 102 | ): 103 | job = self.jobs.pop(future) 104 | 105 | result = None 106 | exc = future.exception() 107 | if exc is None: 108 | job, result = future.result() 109 | 110 | asyncio.run_coroutine_threadsafe( 111 | self.on_job_completed(job=job, exc=exc, result=result), 112 | loop, 113 | ) 114 | 115 | async def stop(self): 116 | self.pool.shutdown(cancel_futures=True) 117 | await super().stop() 118 | 119 | def get_default_concurrency(self) -> int: 120 | """ 121 | Get the default concurrency level for this executor. 122 | 123 | This method is called when the queue's concurrency level is set to 124 | None. It should return the number of jobs that can be processed 125 | concurrently by this executor. 126 | 127 | On Python 3.13+, defaults to the number of logical CPUs on the system 128 | plus 4. On older versions of Python, defaults to the number of CPUs on 129 | the system plus 4. This mimics the behavior of Python's built-in 130 | ThreadPoolExecutor. 131 | """ 132 | # Only available in 3.13+ 133 | if hasattr(os, "process_cpu_count"): 134 | return min(32, (os.process_cpu_count() or 1) + 4) 135 | return min(32, (os.cpu_count() or 1) + 4) 136 | -------------------------------------------------------------------------------- /chancy/hub.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import inspect 3 | from dataclasses import dataclass 4 | from itertools import chain 5 | from typing import Callable, Awaitable 6 | 7 | 8 | @dataclass 9 | class Event: 10 | name: str 11 | body: dict[str, any] 12 | 13 | 14 | EventCallbackT = Callable[[Event], Awaitable[None]] | Callable[[Event], None] 15 | 16 | 17 | class Hub: 18 | """ 19 | An event hub for registering and emitting events. 20 | """ 21 | 22 | def __init__(self): 23 | # Called when any event is emitted, useful for debugging. 24 | self._wildcard_handlers: list[EventCallbackT] = [] 25 | self._handlers: dict[str, list[EventCallbackT]] = {} 26 | self._waiters: dict[str, set[asyncio.Future]] = {} 27 | 28 | def on(self, event: str, f: EventCallbackT): 29 | """ 30 | Register a handler for an event. 31 | """ 32 | self._handlers.setdefault(event, []).append(f) 33 | 34 | def on_any(self, f: EventCallbackT): 35 | """ 36 | Register a handler for any event. 37 | """ 38 | self._wildcard_handlers.append(f) 39 | 40 | def remove(self, event: str, handler: EventCallbackT): 41 | """ 42 | Remove a handler from an event. 43 | """ 44 | try: 45 | self._handlers.get(event, []).remove(handler) 46 | except ValueError: 47 | pass 48 | 49 | def remove_on_any(self, handler: EventCallbackT): 50 | """ 51 | Remove a handler from any event. 52 | """ 53 | try: 54 | self._wildcard_handlers.remove(handler) 55 | except ValueError: 56 | pass 57 | 58 | def clear(self): 59 | """ 60 | Clear all handlers. 61 | """ 62 | self._handlers = {} 63 | 64 | for event in self._waiters: 65 | for waiter in self._waiters[event]: 66 | if not waiter.done(): 67 | waiter.cancel() 68 | 69 | self._waiters = {} 70 | 71 | async def emit(self, event: str, body): 72 | """ 73 | Emit an event with the given body. 74 | """ 75 | result = Event(name=event, body=body) 76 | 77 | for handler in chain( 78 | self._handlers.get(event, []), self._wildcard_handlers 79 | ): 80 | if inspect.iscoroutinefunction(handler): 81 | await handler(result) 82 | else: 83 | handler(result) 84 | 85 | # Notify any asyncio waiters that are waiting for this event, used 86 | # to implement wait_for(). 87 | waiters = self._waiters.get(event, []) 88 | for waiter in waiters: 89 | if not waiter.done(): 90 | waiter.set_result(result) 91 | 92 | self._waiters[event] = {w for w in waiters if not w.done()} 93 | 94 | async def wait_for( 95 | self, event: str | list[str], timeout: float | None = None 96 | ) -> list[Event]: 97 | """ 98 | Wait for a specific event to occur. 99 | 100 | :param event: The name of the event to wait for, or a list of events. 101 | :param timeout: The maximum time to wait for the event (in seconds). 102 | """ 103 | if isinstance(event, str): 104 | event = [event] 105 | 106 | if not event: 107 | raise ValueError("At least one event must be provided.") 108 | 109 | futures = [] 110 | for e in event: 111 | future = asyncio.get_running_loop().create_future() 112 | future.add_done_callback( 113 | lambda _: self._waiters.get(e, set()).discard(future) 114 | ) 115 | self._waiters.setdefault(e, set()).add(future) 116 | futures.append(future) 117 | 118 | done, pending = await asyncio.wait( 119 | futures, 120 | timeout=timeout, 121 | return_when=asyncio.FIRST_COMPLETED, 122 | ) 123 | 124 | for task in pending: 125 | task.cancel() 126 | 127 | return [task.result() for task in done] 128 | -------------------------------------------------------------------------------- /chancy/migrations/v2.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow 3 | 4 | from chancy.migrate import Migration, Migrator 5 | 6 | 7 | class V2Migration(Migration): 8 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 9 | """ 10 | Adds common indexes. 11 | """ 12 | await cursor.execute( 13 | sql.SQL( 14 | """ 15 | CREATE INDEX IF NOT EXISTS {idx_queue_state} 16 | ON {jobs} (queue, state); 17 | """ 18 | ).format( 19 | jobs=sql.Identifier(f"{migrator.prefix}jobs"), 20 | idx_queue_state=sql.Identifier( 21 | f"{migrator.prefix}idx_jobs_queue_state" 22 | ), 23 | ) 24 | ) 25 | 26 | await cursor.execute( 27 | sql.SQL( 28 | """ 29 | CREATE INDEX IF NOT EXISTS {idx_jobs_fetch_prune} ON {jobs} 30 | (queue, state, attempts, scheduled_at, priority DESC, id DESC) 31 | WHERE state IN ('pending', 'retrying'); 32 | """ 33 | ).format( 34 | jobs=sql.Identifier(f"{migrator.prefix}jobs"), 35 | idx_jobs_fetch_prune=sql.Identifier( 36 | f"{migrator.prefix}idx_jobs_fetch_prune" 37 | ), 38 | ) 39 | ) 40 | 41 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 42 | await cursor.execute( 43 | sql.SQL( 44 | """ 45 | DROP INDEX IF EXISTS {idx_queue_state}; 46 | """ 47 | ).format( 48 | idx_queue_state=sql.Identifier( 49 | f"{migrator.prefix}idx_jobs_queue_state" 50 | ) 51 | ) 52 | ) 53 | 54 | await cursor.execute( 55 | sql.SQL( 56 | """ 57 | DROP INDEX IF EXISTS {idx_jobs_fetch_prune}; 58 | """ 59 | ).format( 60 | idx_jobs_fetch_prune=sql.Identifier( 61 | f"{migrator.prefix}idx_jobs_fetch_prune" 62 | ) 63 | ) 64 | ) 65 | -------------------------------------------------------------------------------- /chancy/migrations/v3.py: -------------------------------------------------------------------------------- 1 | from psycopg import sql 2 | 3 | from chancy.migrate import Migration 4 | 5 | 6 | class MakeMaxAttemptsNonNull(Migration): 7 | """ 8 | Makes the max_attempts column non-null with a default value of 1, which 9 | matches the default value in the Job class. 10 | """ 11 | 12 | async def up(self, migrator, cursor): 13 | # First set any existing NULL values to 1 14 | await cursor.execute( 15 | sql.SQL( 16 | """ 17 | UPDATE {jobs} 18 | SET max_attempts = 1 19 | WHERE max_attempts IS NULL 20 | """ 21 | ).format(jobs=sql.Identifier(f"{migrator.prefix}jobs")) 22 | ) 23 | 24 | # Then alter the column to be non-null with a default 25 | await cursor.execute( 26 | sql.SQL( 27 | """ 28 | ALTER TABLE {jobs} 29 | ALTER COLUMN max_attempts SET NOT NULL, 30 | ALTER COLUMN max_attempts SET DEFAULT 1 31 | """ 32 | ).format(jobs=sql.Identifier(f"{migrator.prefix}jobs")) 33 | ) 34 | 35 | async def down(self, migrator, cursor): 36 | await cursor.execute( 37 | sql.SQL( 38 | """ 39 | ALTER TABLE {jobs} 40 | ALTER COLUMN max_attempts DROP NOT NULL, 41 | ALTER COLUMN max_attempts DROP DEFAULT 42 | """ 43 | ).format(jobs=sql.Identifier(f"{migrator.prefix}jobs")) 44 | ) 45 | -------------------------------------------------------------------------------- /chancy/migrations/v4.py: -------------------------------------------------------------------------------- 1 | from psycopg import sql 2 | 3 | from chancy.migrate import Migration 4 | 5 | 6 | class AddResumeAtField(Migration): 7 | """ 8 | Adds a resume_at datetime to the queues table. 9 | """ 10 | 11 | async def up(self, migrator, cursor): 12 | await cursor.execute( 13 | sql.SQL( 14 | """ 15 | ALTER TABLE {queues} 16 | ADD COLUMN resume_at TIMESTAMP WITH TIME ZONE 17 | """ 18 | ).format(queues=sql.Identifier(f"{migrator.prefix}queues")) 19 | ) 20 | 21 | async def down(self, migrator, cursor): 22 | await cursor.execute( 23 | sql.SQL( 24 | """ 25 | ALTER TABLE {queues} 26 | DROP COLUMN resume_at 27 | """ 28 | ).format(queues=sql.Identifier(f"{migrator.prefix}queues")) 29 | ) 30 | -------------------------------------------------------------------------------- /chancy/migrations/v5.py: -------------------------------------------------------------------------------- 1 | from psycopg import sql 2 | 3 | from chancy.migrate import Migration 4 | 5 | 6 | class ConvertToJSONB(Migration): 7 | """ 8 | Convert JSON fields to JSONB for better performance and indexing capabilities. 9 | 10 | This migration updates all JSON fields in core tables to use JSONB type, which provides: 11 | - Better performance for reads 12 | - Ability to index JSON fields 13 | - More efficient storage 14 | """ 15 | 16 | async def up(self, migrator, cursor): 17 | # Update the jobs table 18 | await cursor.execute( 19 | sql.SQL( 20 | """ 21 | ALTER TABLE {jobs} 22 | ALTER COLUMN kwargs TYPE JSONB USING kwargs::JSONB, 23 | ALTER COLUMN limits TYPE JSONB USING limits::JSONB, 24 | ALTER COLUMN meta TYPE JSONB USING meta::JSONB, 25 | ALTER COLUMN errors TYPE JSONB USING errors::JSONB 26 | """ 27 | ).format(jobs=sql.Identifier(f"{migrator.prefix}jobs")) 28 | ) 29 | 30 | # Update the queues table 31 | await cursor.execute( 32 | sql.SQL( 33 | """ 34 | ALTER TABLE {queues} 35 | ALTER COLUMN executor_options TYPE JSONB USING 36 | CASE WHEN executor_options IS NULL THEN NULL 37 | ELSE executor_options::JSONB END 38 | """ 39 | ).format(queues=sql.Identifier(f"{migrator.prefix}queues")) 40 | ) 41 | 42 | async def down(self, migrator, cursor): 43 | # Revert the jobs table 44 | await cursor.execute( 45 | sql.SQL( 46 | """ 47 | ALTER TABLE {jobs} 48 | ALTER COLUMN kwargs TYPE JSON USING kwargs::JSON, 49 | ALTER COLUMN limits TYPE JSON USING limits::JSON, 50 | ALTER COLUMN meta TYPE JSON USING meta::JSON, 51 | ALTER COLUMN errors TYPE JSON USING errors::JSON 52 | """ 53 | ).format(jobs=sql.Identifier(f"{migrator.prefix}jobs")) 54 | ) 55 | 56 | # Revert the queues table 57 | await cursor.execute( 58 | sql.SQL( 59 | """ 60 | ALTER TABLE {queues} 61 | ALTER COLUMN executor_options TYPE JSON USING 62 | CASE WHEN executor_options IS NULL THEN NULL 63 | ELSE executor_options::JSON END 64 | """ 65 | ).format(queues=sql.Identifier(f"{migrator.prefix}queues")) 66 | ) 67 | -------------------------------------------------------------------------------- /chancy/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package contains the various built-in plugins available for Chancy. 3 | Plugins are used to extend the functionality of Chancy, while allowing for 4 | significant customization. 5 | 6 | Some Chancy plugins are enabled by default. You can pass your own into the 7 | list of plugins to customize them or set ``no_default_plugins`` to ``True`` to 8 | disable them completely: 9 | 10 | .. code-block:: python 11 | 12 | async with Chancy(..., no_default_plugins=True, plugins=[]): 13 | ... 14 | 15 | Default Plugins 16 | --------------- 17 | 18 | - :class:`chancy.plugins.workflow.WorkflowPlugin`: This plugin provides a 19 | workflow system for managing complex job dependencies. 20 | - :class:`chancy.plugins.metrics.Metrics`: This plugin collects metrics about 21 | the performance of the Chancy cluster and makes them available to the API, 22 | dashboard, and database. 23 | - :class:`chancy.plugins.leadership.Leadership`: This plugin is responsible for 24 | managing leadership elections between multiple workers. Most other plugins 25 | require this plugin to be enabled. 26 | - :class:`chancy.plugins.pruner.Pruner`: This plugin is responsible for pruning 27 | old jobs from the database when they are no longer needed. 28 | - :class:`chancy.plugins.recovery.Recovery`: This plugin is responsible for 29 | recovering jobs that were interrupted during execution, such as when a 30 | worker crashes or is restarted. 31 | 32 | Optional Plugins 33 | ---------------- 34 | 35 | Chancy also comes with some additional plugins that can be enabled as needed, 36 | and typically require additional dependencies: 37 | 38 | - :class:`chancy.plugins.api.Api`: This plugin provides an API and dashboard 39 | for viewing the state of the Chancy cluster. 40 | - :class:`chancy.plugins.cron.Cron`: This plugin is responsible for running 41 | jobs to at recurring schedules, such as "once an hour" or "every Monday". 42 | - :class:`chancy.plugins.retry.RetryPlugin`: This plugin provides a slightly 43 | more advanced job retry system with jitter, backoff, and other features. 44 | - :class:`chancy.plugins.reprioritize.Reprioritize`: This plugin increases 45 | the priority of jobs based on how long they've been in the queue. 46 | - :class:`chancy.plugins.sentry.SentryPlugin`: This plugin sends job exceptions 47 | to Sentry, along with metadata about the job and worker. 48 | 49 | """ 50 | -------------------------------------------------------------------------------- /chancy/plugins/api/auth.py: -------------------------------------------------------------------------------- 1 | __all__ = ("SimpleAuthBackend", "AuthBackend") 2 | 3 | import abc 4 | 5 | from starlette.authentication import ( 6 | AuthenticationBackend, 7 | AuthCredentials, 8 | BaseUser, 9 | SimpleUser, 10 | ) 11 | from starlette.requests import HTTPConnection, Request 12 | 13 | 14 | class AuthBackend(AuthenticationBackend, abc.ABC): 15 | @abc.abstractmethod 16 | async def login( 17 | self, request: Request, username: str, password: str 18 | ) -> bool: 19 | pass 20 | 21 | @abc.abstractmethod 22 | async def logout(self, request: Request) -> None: 23 | pass 24 | 25 | 26 | class SimpleAuthBackend(AuthBackend): 27 | """ 28 | A simple authentication backend that uses a dictionary of users and 29 | passwords. 30 | 31 | :param users: A dictionary of users and their passwords. 32 | """ 33 | 34 | def __init__(self, users: dict[str, str]): 35 | self.users = users 36 | 37 | async def login( 38 | self, request: Request, username: str, password: str 39 | ) -> bool: 40 | if username in self.users and self.users[username] == password: 41 | request.session["username"] = username 42 | return True 43 | return False 44 | 45 | async def logout(self, request: Request) -> None: 46 | request.session.pop("username", None) 47 | 48 | async def authenticate( 49 | self, conn: HTTPConnection 50 | ) -> tuple[AuthCredentials, BaseUser] | None: 51 | username = conn.session.get("username") 52 | if username is None: 53 | return None 54 | 55 | return AuthCredentials(["authenticated"]), SimpleUser(username) 56 | -------------------------------------------------------------------------------- /chancy/plugins/api/plugin.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import typing 3 | from typing import Optional 4 | 5 | 6 | class RouteT(typing.TypedDict): 7 | """ 8 | A type hint for a route. 9 | """ 10 | 11 | path: str 12 | endpoint: typing.Callable 13 | methods: str | None 14 | name: str | None 15 | is_websocket: Optional[bool] 16 | 17 | 18 | class ApiPlugin(abc.ABC): 19 | """ 20 | A plugin that provides additional API endpoints. 21 | """ 22 | 23 | def __init__(self, api): 24 | self.api = api 25 | 26 | @abc.abstractmethod 27 | def name(self) -> str: 28 | """ 29 | Get the name of the plugin. 30 | """ 31 | 32 | def routes(self) -> list[RouteT]: 33 | """ 34 | Get a list of routes to add to the API. 35 | """ 36 | return [] 37 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/README.md: -------------------------------------------------------------------------------- 1 | # chancy-ui 2 | 3 | This directory contains a single-page application (SPA) that uses the Chancy 4 | API to display a simple dashboard of the current state of the Chancy cluster. 5 | 6 | The default API plugin will serve the built version of this project when you 7 | navigate to the root `/` out of the `chancy.plugins.api.dist` directory. 8 | 9 | ## Development 10 | 11 | Install the dependencies by running the following command: 12 | 13 | ```bash 14 | npm install 15 | ``` 16 | 17 | To start the development server, run the following command: 18 | 19 | ```bash 20 | npm run dev 21 | ``` 22 | 23 | This will start a development server that will automatically reload when you 24 | make changes to the source code on `http://localhost:5134`. 25 | 26 | ## Building 27 | 28 | To build the project, run the following command: 29 | 30 | ```bash 31 | npm run build 32 | ``` 33 | 34 | Do not commit the `../dist` directory to the repository. It is automatically 35 | generated when the project is built for release to ensure it's up-to-date. 36 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js' 2 | import globals from 'globals' 3 | import reactHooks from 'eslint-plugin-react-hooks' 4 | import reactRefresh from 'eslint-plugin-react-refresh' 5 | import tseslint from 'typescript-eslint' 6 | 7 | export default tseslint.config( 8 | { ignores: ['dist'] }, 9 | { 10 | extends: [js.configs.recommended, ...tseslint.configs.recommended], 11 | files: ['**/*.{ts,tsx}'], 12 | languageOptions: { 13 | ecmaVersion: 2020, 14 | globals: globals.browser, 15 | }, 16 | plugins: { 17 | 'react-hooks': reactHooks, 18 | 'react-refresh': reactRefresh, 19 | }, 20 | rules: { 21 | ...reactHooks.configs.recommended.rules, 22 | 'react-refresh/only-export-components': [ 23 | 'warn', 24 | { allowConstantExport: true }, 25 | ], 26 | }, 27 | }, 28 | ) 29 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Chancy UI 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chancy-ui", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "tsc -b && vite build", 9 | "lint": "eslint .", 10 | "preview": "vite preview" 11 | }, 12 | "dependencies": { 13 | "@dagrejs/dagre": "^1.1.4", 14 | "@popperjs/core": "^2.11.8", 15 | "@tanstack/react-query": "^5.54.1", 16 | "@xyflow/react": "^12.2.1", 17 | "bootstrap": "^5.3.3", 18 | "react": "^18.3.1", 19 | "react-dom": "^18.3.1", 20 | "react-router-dom": "^6.26.1", 21 | "recharts": "^2.15.1" 22 | }, 23 | "devDependencies": { 24 | "@eslint/js": "^9.9.0", 25 | "@types/react": "^18.3.3", 26 | "@types/react-dom": "^18.3.0", 27 | "@vitejs/plugin-react": "^4.3.1", 28 | "eslint": "^9.9.0", 29 | "eslint-plugin-react-hooks": "^5.1.0-rc.0", 30 | "eslint-plugin-react-refresh": "^0.4.9", 31 | "globals": "^15.9.0", 32 | "rollup-plugin-visualizer": "^5.12.0", 33 | "sass": "^1.78.0", 34 | "typescript": "^5.5.3", 35 | "typescript-eslint": "^8.0.1", 36 | "vite": "^5.4.1" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/public/logo_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/chancy/plugins/api/ui/public/logo_small.png -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/App.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/chancy/plugins/api/ui/src/App.css -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/components/Loading.tsx: -------------------------------------------------------------------------------- 1 | export function Loading () { 2 | return ( 3 |
4 |
5 | Loading... 6 |
7 |
8 | ); 9 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/components/UpdatingTime.tsx: -------------------------------------------------------------------------------- 1 | import { relativeTime } from '../utils'; 2 | import React from 'react'; 3 | import { useEffect, useState } from 'react'; 4 | 5 | /** 6 | * Renders a constantly updating time. 7 | * 8 | * @param date 9 | * @constructor 10 | */ 11 | export function UpdatingTime ({ date }: { date: string }) { 12 | const [time, setTime] = React.useState(relativeTime(date)); 13 | 14 | React.useEffect(() => { 15 | const interval = setInterval(() => { 16 | setTime(relativeTime(date)); 17 | }, 1000); 18 | 19 | return () => clearInterval(interval); 20 | }, [date]); 21 | 22 | return time; 23 | } 24 | 25 | export function CountdownTimer({ date, className }: { date: string | undefined, className?: string }) { 26 | const [timeString, setTimeString] = useState(''); 27 | 28 | useEffect(() => { 29 | if (!date) { 30 | setTimeString('-'); 31 | return; 32 | } 33 | 34 | const updateTimer = () => { 35 | const now = new Date().getTime(); 36 | const target = new Date(date).getTime(); 37 | const diff = Math.abs(target - now); 38 | 39 | const hours = Math.floor(diff / (1000 * 60 * 60)); 40 | const minutes = Math.floor((diff % (1000 * 60 * 60)) / (1000 * 60)); 41 | const seconds = Math.floor((diff % (1000 * 60)) / 1000); 42 | 43 | setTimeString( 44 | (now < target ? "+" : "-") + 45 | `${hours.toString().padStart(2, '0')}h` + 46 | `${minutes.toString().padStart(2, '0')}m` + 47 | `${seconds.toString().padStart(2, '0')}s` 48 | ); 49 | }; 50 | 51 | updateTimer(); 52 | const interval = setInterval(updateTimer, 1000); 53 | return () => clearInterval(interval); 54 | }, [date]); 55 | 56 | return {timeString}; 57 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useCrons.tsx: -------------------------------------------------------------------------------- 1 | import {useQuery} from '@tanstack/react-query'; 2 | 3 | interface Cron { 4 | unique_key: string; 5 | cron: string, 6 | last_run: string, 7 | next_run: string, 8 | job: { 9 | func: string, 10 | queue: string, 11 | kwargs: unknown, 12 | priority: number, 13 | max_attempts: number, 14 | limits: { 15 | key: string, 16 | value: number 17 | }[] 18 | } 19 | } 20 | 21 | export function useCrons ({ url }: { url: string | null }) { 22 | return useQuery({ 23 | queryKey: ['crons', url], 24 | queryFn: async () => { 25 | const response = await fetch(`${url}/api/v1/crons`); 26 | return await response.json(); 27 | }, 28 | enabled: url !== null 29 | }); 30 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useJobs.tsx: -------------------------------------------------------------------------------- 1 | import {keepPreviousData, useQuery} from '@tanstack/react-query'; 2 | import {useMemo} from 'react'; 3 | 4 | export interface Job { 5 | id: string, 6 | queue: string, 7 | func: string, 8 | kwargs: Record, 9 | limits: { 10 | key: string, 11 | value: number 12 | }[], 13 | meta: Record, 14 | state: string, 15 | priority: number, 16 | attempts: number, 17 | max_attempts: number, 18 | taken_by: string, 19 | created_at: string, 20 | started_at: string, 21 | completed_at: string, 22 | scheduled_at: string, 23 | unique_key: string, 24 | errors: { 25 | traceback: string, 26 | attempt: number 27 | }[] 28 | } 29 | 30 | export function useJobs ({ 31 | url, 32 | state, 33 | func 34 | }: { 35 | url: string | null, 36 | state: string | undefined, 37 | func?: string | undefined 38 | }) { 39 | const fullUrl = useMemo(() => { 40 | const params = new URLSearchParams(); 41 | if (state) { 42 | params.append('state', state); 43 | } 44 | if (func) { 45 | params.append('func', func); 46 | } 47 | return `${url}/api/v1/jobs?${params.toString()}`; 48 | }, [url, state, func]); 49 | 50 | return useQuery({ 51 | queryKey: ['jobs', fullUrl], 52 | queryFn: async () => { 53 | const params = new URLSearchParams(); 54 | if (state) { 55 | params.append('state', state); 56 | } 57 | if (func) { 58 | params.append('func', func); 59 | } 60 | 61 | const response = await fetch(fullUrl); 62 | return response.json(); 63 | }, 64 | enabled: url !== null, 65 | refetchInterval: 5000, 66 | placeholderData: keepPreviousData 67 | }); 68 | } 69 | 70 | export function useJob ({ 71 | url, 72 | job_id 73 | }: { 74 | url: string | null, 75 | job_id: string | undefined 76 | }) { 77 | return useQuery({ 78 | queryKey: ['job', url, job_id], 79 | queryFn: async () => { 80 | const response = await fetch(`${url}/api/v1/jobs/${job_id}`); 81 | return response.json(); 82 | }, 83 | enabled: url !== null && job_id !== undefined 84 | }); 85 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useMetrics.tsx: -------------------------------------------------------------------------------- 1 | import { useQuery } from '@tanstack/react-query'; 2 | 3 | export type MetricType = 'counter' | 'gauge' | 'histogram'; 4 | 5 | export interface MetricPoint { 6 | timestamp: string; 7 | value: number | { [key: string]: number }; 8 | } 9 | 10 | export interface MetricData { 11 | data: MetricPoint[]; 12 | type: MetricType; 13 | } 14 | 15 | export interface MetricsOverview { 16 | categories: { 17 | [category: string]: string[]; 18 | }; 19 | count: number; 20 | } 21 | 22 | 23 | export function useMetricsOverview({ url }: { url: string | null }) { 24 | return useQuery({ 25 | queryKey: ['metrics-overview', url], 26 | queryFn: async () => { 27 | const response = await fetch(`${url}/api/v1/metrics`); 28 | return response.json(); 29 | }, 30 | enabled: url !== null, 31 | refetchInterval: 10000, 32 | }); 33 | } 34 | 35 | export function useMetricDetail({ 36 | url, 37 | key, 38 | resolution = '5min', 39 | limit = 60, 40 | enabled = true, 41 | worker_id = undefined 42 | }: { 43 | url: string | null; 44 | key: string; 45 | resolution?: string; 46 | limit?: number; 47 | enabled?: boolean; 48 | worker_id?: string; 49 | }) { 50 | return useQuery>({ 51 | queryKey: ['metric-detail', url, key, resolution, limit, worker_id], 52 | queryFn: async () => { 53 | const params = new URLSearchParams({ 54 | resolution, 55 | limit: limit.toString() 56 | }); 57 | 58 | if (worker_id) { 59 | params.append('worker_id', worker_id); 60 | } 61 | 62 | const response = await fetch(`${url}/api/v1/metrics/${key}?${params.toString()}`); 63 | return response.json(); 64 | }, 65 | enabled: enabled, 66 | refetchInterval: 10000, 67 | }); 68 | } 69 | 70 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useQueues.tsx: -------------------------------------------------------------------------------- 1 | import {useQuery} from '@tanstack/react-query'; 2 | 3 | interface Queue { 4 | name: string; 5 | concurrency: number; 6 | tags: string[]; 7 | state: string; 8 | executor: string; 9 | executor_options: Record; 10 | polling_interval: number; 11 | rate_limit: number | null; 12 | rate_limit_window: number | null; 13 | resume_at: string | null; 14 | } 15 | 16 | export function useQueues(url: string | null) { 17 | return useQuery({ 18 | queryKey: ['queues', url], 19 | queryFn: async () => { 20 | const response = await fetch(`${url}/api/v1/queues`); 21 | return await response.json(); 22 | }, 23 | enabled: url !== null 24 | }); 25 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useServerConfiguration.tsx: -------------------------------------------------------------------------------- 1 | import {useSessionStorage} from './useSessionStorage.tsx'; 2 | import {useQuery} from '@tanstack/react-query'; 3 | import React, {useMemo} from 'react'; 4 | 5 | interface ServerConfiguration { 6 | plugins: string[], 7 | } 8 | 9 | const ServerContext = React.createContext(null); 10 | 11 | export function useServerConfiguration() { 12 | const [host, setHost] = useSessionStorage('settings.host', "http://localhost"); 13 | const [port, setPort] = useSessionStorage('settings.port', 8000); 14 | 15 | const { data, isLoading, refetch } = useQuery({ 16 | queryKey: ['configuration', host, port], 17 | queryFn: async () => { 18 | const response = await fetch(`${host}:${port}/api/v1/configuration`); 19 | return await response.json(); 20 | }, 21 | enabled: false 22 | }); 23 | 24 | const url = useMemo(() => { 25 | if (!host || !port) { 26 | return null; 27 | } 28 | 29 | return `${host}:${port}`; 30 | }, [host, port]); 31 | 32 | return { 33 | configuration: data || null, 34 | isLoading, 35 | setHost, 36 | setPort, 37 | host, 38 | port, 39 | url, 40 | refetch, 41 | } 42 | } 43 | 44 | export function ServerConfigurationProvider({children}: {children: React.ReactNode}) { 45 | const value = useServerConfiguration(); 46 | 47 | return ( 48 | 49 | {children} 50 | 51 | ) 52 | } 53 | 54 | export function useServer() { 55 | return React.useContext(ServerContext); 56 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useSessionStorage.tsx: -------------------------------------------------------------------------------- 1 | import {useState} from 'react'; 2 | 3 | export function useSessionStorage(key: string, initialValue: T) { 4 | const [storedValue, setStoredValue] = useState(() => { 5 | try { 6 | const item = window.sessionStorage.getItem(key); 7 | return item ? JSON.parse(item) : initialValue; 8 | } catch (error) { 9 | console.error(error); 10 | return initialValue; 11 | } 12 | }); 13 | 14 | const setValue = (value: T) => { 15 | try { 16 | setStoredValue(value); 17 | window.sessionStorage.setItem(key, JSON.stringify(value)); 18 | } catch (error) { 19 | console.error(error); 20 | } 21 | }; 22 | 23 | return [storedValue, setValue] as const; 24 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useWorkers.tsx: -------------------------------------------------------------------------------- 1 | import {useQuery} from '@tanstack/react-query'; 2 | 3 | export interface Worker { 4 | worker_id: string; 5 | tags: string[]; 6 | queues: string[]; 7 | last_seen: string; 8 | expires_at: string; 9 | is_leader: boolean; 10 | } 11 | 12 | export function useWorkers(url: string | null) { 13 | return useQuery({ 14 | queryKey: ['workers', url], 15 | queryFn: async () => { 16 | const response = await fetch(`${url}/api/v1/workers`); 17 | return await response.json(); 18 | }, 19 | refetchInterval: 10000, 20 | enabled: url !== null 21 | }); 22 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/hooks/useWorkflows.tsx: -------------------------------------------------------------------------------- 1 | import {useQuery} from '@tanstack/react-query'; 2 | 3 | export interface Step { 4 | step_id: string; 5 | state: string; 6 | job_id: string; 7 | dependencies: string[]; 8 | job: { 9 | func: string, 10 | queue: string, 11 | kwargs: unknown, 12 | priority: number, 13 | max_attempts: number, 14 | limits: { 15 | key: string, 16 | value: number 17 | }[] 18 | } 19 | } 20 | 21 | export interface Workflow { 22 | id: string, 23 | name: string, 24 | state: string, 25 | created_at: string, 26 | updated_at: string, 27 | steps : { 28 | [key: string]: Step 29 | } 30 | } 31 | 32 | export function useWorkflow ({ 33 | url, 34 | workflow_id, 35 | options = {} 36 | }: { 37 | url: string | null, 38 | workflow_id: string | undefined, 39 | options?: { 40 | refetchInterval?: number, 41 | } 42 | }) { 43 | return useQuery({ 44 | queryKey: ['workflow', url, workflow_id], 45 | queryFn: async () => { 46 | const response = await fetch(`${url}/api/v1/workflows/${workflow_id}`); 47 | return await response.json(); 48 | }, 49 | enabled: url !== null && workflow_id !== undefined, 50 | ...options 51 | }); 52 | } 53 | 54 | export function useWorkflows ({ 55 | url 56 | }: { 57 | url: string | null 58 | }) { 59 | return useQuery({ 60 | queryKey: ['workflows', url], 61 | queryFn: async () => { 62 | const response = await fetch(`${url}/api/v1/workflows`); 63 | return await response.json(); 64 | }, 65 | enabled: url !== null 66 | }); 67 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/index.scss: -------------------------------------------------------------------------------- 1 | $enable-rounded: false; 2 | @import "node_modules/bootstrap/scss/bootstrap"; 3 | 4 | 5 | body, 6 | #root { 7 | height: 100vh; 8 | width: 100vw; 9 | } 10 | 11 | [data-bs-theme="dark"] { 12 | --bs-body-bg: #1a1a1a; 13 | --bs-border-color: #2d2d2d; 14 | 15 | #sidebar { 16 | background-color: #1a1c1e; 17 | } 18 | 19 | .nav-pills { 20 | --bs-nav-pills-link-active-bg: #2d2d2d; 21 | --bs-nav-pills-link-active-color: #fff; 22 | } 23 | } 24 | 25 | a { 26 | text-decoration: none; 27 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/main.tsx: -------------------------------------------------------------------------------- 1 | import { StrictMode } from 'react' 2 | import { createRoot } from 'react-dom/client' 3 | import {QueryClient, QueryClientProvider} from '@tanstack/react-query'; 4 | import { 5 | createBrowserRouter, 6 | redirect, 7 | RouterProvider 8 | } from 'react-router-dom'; 9 | 10 | import Layout from './Layout.tsx' 11 | import './index.scss' 12 | // @ts-expect-error We need to import this for the side effects 13 | import * as bootstrap from 'bootstrap'; // eslint-disable-line 14 | import {ServerConfigurationProvider} from './hooks/useServerConfiguration.tsx'; 15 | import {Queue, Queues} from './pages/Queues.tsx'; 16 | import {WorkerDetails, Workers} from './pages/Workers.tsx'; 17 | import {Job, Jobs} from './pages/Jobs.tsx'; 18 | import {Cron, Crons} from './pages/Crons.tsx'; 19 | import {Workflow, Workflows} from './pages/Workflows.tsx'; 20 | import {Metrics, MetricDetail} from './pages/Metrics.tsx'; 21 | 22 | const queryClient = new QueryClient(); 23 | 24 | const router = createBrowserRouter([ 25 | { 26 | element: , 27 | children: [ 28 | { path: "/", loader: () => redirect("/jobs") }, 29 | { path: "/queues", element: }, 30 | { path: "/queues/:name", element: }, 31 | { path: "/workers", element: }, 32 | { path: "/workers/:worker_id", element: }, 33 | { path: "/jobs", element: , loader: () => redirect("/jobs/pending") }, 34 | { path: "/jobs/pending", element: }, 35 | { path: "/jobs/running", element: }, 36 | { path: "/jobs/succeeded", element: }, 37 | { path: "/jobs/failed", element: }, 38 | { path: "/jobs/retrying", element: }, 39 | { path: "/jobs/:job_id", element: }, 40 | { path: "/crons", element: }, 41 | { path: "/crons/:cron_id", element: }, 42 | { path: "/workflows", element: }, 43 | { path: "/workflows/:workflow_id", element: }, 44 | { path: "/metrics", element: }, 45 | { path: "/metrics/:metricKey", element: }, 46 | ] 47 | } 48 | ]); 49 | 50 | createRoot(document.getElementById('root')!).render( 51 | 52 | 53 | 54 | 55 | 56 | 57 | , 58 | ) 59 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/pages/Metrics.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { useServerConfiguration } from '../hooks/useServerConfiguration'; 3 | import { Loading } from '../components/Loading'; 4 | import { useMetricsOverview, useMetricDetail } from '../hooks/useMetrics'; 5 | import { Link, useParams } from 'react-router-dom'; 6 | import { MetricChart, ResolutionSelector } from '../components/MetricCharts'; 7 | 8 | const MetricsWrapper = ({ 9 | isLoading, 10 | data, 11 | errorMessage, 12 | children 13 | }: { 14 | isLoading: boolean; 15 | data: unknown; 16 | errorMessage: string; 17 | children: React.ReactNode; 18 | }) => { 19 | if (isLoading) { 20 | return ; 21 | } 22 | 23 | if (!data) { 24 | return
{errorMessage}
; 25 | } 26 | 27 | return <>{children}; 28 | }; 29 | 30 | export function MetricsList() { 31 | const { url } = useServerConfiguration(); 32 | const { data: overview, isLoading } = useMetricsOverview({ url }); 33 | 34 | return ( 35 | 40 |
41 |

Available Metrics

42 |

All raw metrics currently known, synchronized each minute across all workers.

43 | 44 | {overview && overview.categories && Object.entries(overview.categories).length > 0 ? ( 45 | Object.entries(overview.categories) 46 | .sort(([categoryA], [categoryB]) => categoryA.localeCompare(categoryB)) 47 | .map(([category, metrics]) => ( 48 |
49 |
50 |
{category.charAt(0).toUpperCase() + category.slice(1)} Metrics
51 |
52 |
53 |
54 | {metrics 55 | .sort((a, b) => a.localeCompare(b)) 56 | .map(metric => { 57 | const metricKey = `${category}:${metric}`; 58 | 59 | return ( 60 | 65 | {category}: 66 | {metric} 67 | 68 | ); 69 | }) 70 | } 71 |
72 |
73 |
74 | )) 75 | ) : ( 76 |
77 | No metrics available. Make sure the Metrics plugin is enabled and jobs have been processed. 78 |
79 | )} 80 |
81 |
82 | ); 83 | } 84 | 85 | export function MetricDetail() { 86 | const { url } = useServerConfiguration(); 87 | const { metricKey } = useParams<{ metricKey: string }>(); 88 | const [resolution, setResolution] = useState('5min'); 89 | 90 | const { data: metrics, isLoading } = useMetricDetail({ 91 | url, 92 | key: metricKey as string, 93 | resolution 94 | }); 95 | 96 | return ( 97 | 102 |
103 |

104 | {metricKey} 105 |

106 | 107 | 108 | 109 |
110 | {metrics && Object.entries(metrics).map(([subtype, metricData]) => { 111 | return ( 112 |
113 |
114 |
115 |
{subtype}
116 |
117 |
118 | 123 |
124 |
125 |
126 | ); 127 | })} 128 |
129 |
130 |
131 | ); 132 | } 133 | 134 | export function Metrics() { 135 | return ; 136 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/pages/Workers.tsx: -------------------------------------------------------------------------------- 1 | import {useServerConfiguration} from '../hooks/useServerConfiguration.tsx'; 2 | import {Loading} from '../components/Loading.tsx'; 3 | import {Link, useParams} from 'react-router-dom'; 4 | import {useWorkers, Worker} from '../hooks/useWorkers.tsx'; 5 | import {UpdatingTime} from '../components/UpdatingTime.tsx'; 6 | import {QueueMetrics, ResolutionSelector} from '../components/MetricCharts.tsx'; 7 | import {useState} from 'react'; 8 | 9 | function WorkerInfoTable({ worker } : { worker: Worker }) { 10 | return ( 11 | 12 | 13 | 14 | 15 | 18 | 19 | 20 | 21 | 29 | 30 | 31 | 32 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 |
Worker ID 16 | {worker.worker_id} 17 |
Tags 22 | {worker.is_leader && ( 23 | Leader Node 24 | )} 25 | {worker.tags.map((tag) => ( 26 | {tag} 27 | ))} 28 |
Queues 33 |
34 | {worker.queues.map((queue) => ( 35 | 36 | 37 | {queue} 38 | 39 | 40 | ))} 41 |
42 |
Last Seen
Expires At
54 | ); 55 | } 56 | 57 | export function WorkerDetails () { 58 | const { worker_id } = useParams<{worker_id: string}>(); 59 | const { url } = useServerConfiguration(); 60 | const { data: workers, isLoading } = useWorkers(url); 61 | const [resolution, setResolution] = useState('5min'); 62 | 63 | if (isLoading) return ; 64 | 65 | const worker = workers?.find(worker => worker.worker_id === worker_id); 66 | 67 | if (!worker) { 68 | return ( 69 |
70 |

Worker - {worker_id}

71 |
Worker not found.
72 |
73 | ); 74 | } 75 | 76 | return ( 77 |
78 |

Worker - {worker.worker_id}

79 |

Details

80 | 81 | {worker.queues.length > 0 && ( 82 | <> 83 |

Queue Metrics

84 |

Per-queue metrics are for jobs processed by this worker only.

85 | 86 | 87 | {worker.queues.map(queueName => ( 88 | 95 | ))} 96 | 97 | )} 98 |
99 | ); 100 | } 101 | 102 | export function Workers() { 103 | const {url} = useServerConfiguration(); 104 | const {data: workers, isLoading} = useWorkers(url); 105 | 106 | if (isLoading) return ; 107 | 108 | if (!workers) { 109 | return ( 110 |
111 |

Workers

112 |
Workers not found.
113 |
114 | ); 115 | } 116 | 117 | return ( 118 |
119 |

Workers

120 | {workers.sort((a, b) => a.worker_id.localeCompare(b.worker_id)).map(worker => ( 121 |
122 |

123 | {worker.worker_id} 124 |

125 | 126 |
127 | ))} 128 |
129 | ); 130 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/pages/WorkflowChart.tsx: -------------------------------------------------------------------------------- 1 | import {useRef} from 'react'; 2 | import type {NodeProps} from '@xyflow/react'; 3 | import {Edge, Handle, MarkerType, Node, NodeTypes, Position, ReactFlow,} from '@xyflow/react'; 4 | import '@xyflow/react/dist/style.css'; 5 | import dagre from '@dagrejs/dagre'; 6 | import {Workflow} from '../hooks/useWorkflows.tsx'; 7 | import {Link} from 'react-router-dom'; 8 | import {statusToColor} from '../utils.tsx'; 9 | 10 | interface WorkflowChartProps { 11 | workflow: Workflow; 12 | } 13 | 14 | type CustomNode = Node<{ 15 | label: string, 16 | jobId: string, 17 | state: string, 18 | job: { 19 | func: string 20 | } 21 | }, 'custom'>; 22 | 23 | const CustomNode = ({ data }: NodeProps) => { 24 | const nodeRef = useRef(null); 25 | 26 | return ( 27 |
30 | 33 |
34 |
{data.label}
35 |
36 | {data.jobId ? ( 37 | 38 | {data.jobId} 39 | 40 | ) : "-"} 41 |
42 |
43 | 46 |
47 | ); 48 | }; 49 | 50 | const nodeTypes: NodeTypes = { 51 | customNode: CustomNode, 52 | }; 53 | 54 | const getLayoutedElements = (nodes: Node[], edges: Edge[], direction = 'LR') => { 55 | const dagreGraph = new dagre.graphlib.Graph(); 56 | dagreGraph.setDefaultEdgeLabel(() => ({})); 57 | 58 | dagreGraph.setGraph({ rankdir: direction }); 59 | 60 | nodes.forEach((node) => { 61 | dagreGraph.setNode(node.id, { width: 350, height: 80 }); 62 | }); 63 | 64 | edges.forEach((edge) => { 65 | dagreGraph.setEdge(edge.source, edge.target); 66 | }); 67 | 68 | dagre.layout(dagreGraph); 69 | 70 | const layoutNodes = nodes.map((node) => { 71 | const nodeWithPosition = dagreGraph.node(node.id); 72 | return { 73 | ...node, 74 | position: { 75 | x: nodeWithPosition.x - nodeWithPosition.width / 2, 76 | y: nodeWithPosition.y - nodeWithPosition.height / 2, 77 | }, 78 | }; 79 | }); 80 | 81 | return { nodes: layoutNodes, edges }; 82 | }; 83 | 84 | const WorkflowChart: React.FC = ({ workflow }) => { 85 | const nodes: Node[] = []; 86 | const edges: Edge[] = []; 87 | 88 | Object.entries(workflow.steps || {}).forEach(([stepId, step]) => { 89 | nodes.push({ 90 | id: stepId, 91 | type: 'customNode', 92 | data: { 93 | label: stepId, 94 | jobId: step.job_id, 95 | state: step.state, 96 | job: step.job, 97 | }, 98 | position: { x: 0, y: 0 }, 99 | }); 100 | 101 | step.dependencies?.forEach((dependencyId) => { 102 | edges.push({ 103 | id: `e${dependencyId}-${stepId}`, 104 | source: dependencyId, 105 | target: stepId, 106 | animated: workflow.steps[stepId].state === null, 107 | style: { 108 | strokeWidth: 2, 109 | }, 110 | markerEnd: { 111 | type: MarkerType.ArrowClosed, 112 | }, 113 | }); 114 | }); 115 | }); 116 | 117 | const { nodes: layoutNodes, edges: layoutEdges } = getLayoutedElements(nodes, edges); 118 | 119 | return ( 120 |
121 | 132 |
133 | ); 134 | }; 135 | 136 | export default WorkflowChart; -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/pages/Workflows.tsx: -------------------------------------------------------------------------------- 1 | import {useServerConfiguration} from '../hooks/useServerConfiguration.tsx'; 2 | import {useWorkflow, useWorkflows} from '../hooks/useWorkflows.tsx'; 3 | import {Loading} from '../components/Loading.tsx'; 4 | import {Link, useParams} from 'react-router-dom'; 5 | import {UpdatingTime} from '../components/UpdatingTime.tsx'; 6 | import {statusToColor} from '../utils.tsx'; 7 | import WorkflowChart from './WorkflowChart.tsx'; 8 | import {ReactFlowProvider} from '@xyflow/react'; 9 | 10 | 11 | export function Workflow() { 12 | const { url } = useServerConfiguration(); 13 | const { workflow_id } = useParams<{workflow_id: string}>(); 14 | const { data: workflow, isLoading } = useWorkflow({ url, workflow_id, options: {refetchInterval: 5000 } }); 15 | 16 | if (isLoading) return ; 17 | 18 | if (!workflow) { 19 | return ( 20 |
21 |

Workflow - {workflow_id}

22 |
Workflow not found.
23 |
24 | ); 25 | } 26 | 27 | return ( 28 |
29 |

Workflow - {workflow_id}

30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 41 | 42 | 43 | 44 | 47 | 48 | 49 | 50 | 53 | 54 | 55 |
Name{workflow.name}
State 39 | {workflow.state} 40 |
Created 45 | 46 |
Updated 51 | 52 |
56 | {workflow.steps && ( 57 | <> 58 |
59 |
60 | Workflow Visualization 61 |
62 |
63 | 64 | 65 | 66 |
67 |
68 |

Steps

69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | {Object.entries(workflow.steps).map(([step_id, step]) => ( 79 | 80 | 81 | 90 | 99 | 100 | ))} 101 | 102 |
Step IDStateJob ID
{step_id} 82 | {step.state ? ( 83 | 84 | {step.state} 85 | 86 | ) : ( 87 | Waiting 88 | )} 89 | 91 | {step.job_id ? ( 92 | 93 | {step.job_id} 94 | 95 | ) : ( 96 | Waiting for dependencies 97 | )} 98 |
103 | 104 | )} 105 |
106 | ); 107 | } 108 | 109 | export function Workflows() { 110 | const {url} = useServerConfiguration(); 111 | const {data: workflows, isLoading} = useWorkflows({url}); 112 | 113 | if (isLoading) return ; 114 | 115 | return ( 116 |
117 |

Workflows

118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | {workflows?.map(workflow => ( 128 | 129 | 134 | 137 | 140 | 141 | ))} 142 | 143 |
NameStateCreated
130 | 131 | {workflow.name} 132 | 133 | 135 | {workflow.state} 136 | 138 | 139 |
144 |
145 | ); 146 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/utils.tsx: -------------------------------------------------------------------------------- 1 | export function relativeTime (date: string) { 2 | // Convert a date string to a relative time string 3 | // like "7 minutes ago" or "in 2 days" 4 | const now = new Date(); 5 | const then = new Date(date); 6 | 7 | const diff = Math.abs(now.getTime() - then.getTime()); 8 | if (diff < 1000) return 'just now'; 9 | 10 | const was_past = then < now; 11 | 12 | const seconds = Math.floor(diff / 1000); 13 | const minutes = Math.floor(seconds / 60); 14 | const hours = Math.floor(minutes / 60); 15 | const days = Math.floor(hours / 24); 16 | const weeks = Math.floor(days / 7); 17 | const months = Math.floor(weeks / 4); 18 | const years = Math.floor(months / 12); 19 | 20 | if (years) return `${years} year${years > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 21 | if (months) return `${months} month${months > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 22 | if (weeks) return `${weeks} week${weeks > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 23 | if (days) return `${days} day${days > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 24 | if (hours) return `${hours} hour${hours > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 25 | if (minutes) return `${minutes} minute${minutes > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 26 | return `${seconds} second${seconds > 1 ? 's' : ''} ${was_past ? 'ago' : 'from now'}`; 27 | } 28 | 29 | export function statusToColor (status: string) { 30 | return { 31 | pending: 'info', 32 | running: 'primary', 33 | succeeded: 'success', 34 | completed: 'success', 35 | failed: 'danger', 36 | retrying: 'warning' 37 | }[status] || 'secondary'; 38 | } -------------------------------------------------------------------------------- /chancy/plugins/api/ui/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 6 | "module": "ESNext", 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "isolatedModules": true, 13 | "moduleDetection": "force", 14 | "noEmit": true, 15 | "jsx": "react-jsx", 16 | 17 | /* Linting */ 18 | "strict": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "noFallthroughCasesInSwitch": true 22 | }, 23 | "include": ["src"] 24 | } 25 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [], 3 | "references": [ 4 | { "path": "./tsconfig.app.json" }, 5 | { "path": "./tsconfig.node.json" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "lib": ["ES2023"], 5 | "module": "ESNext", 6 | "skipLibCheck": true, 7 | 8 | /* Bundler mode */ 9 | "moduleResolution": "bundler", 10 | "allowImportingTsExtensions": true, 11 | "isolatedModules": true, 12 | "moduleDetection": "force", 13 | "noEmit": true, 14 | 15 | /* Linting */ 16 | "strict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true 20 | }, 21 | "include": ["vite.config.ts"] 22 | } 23 | -------------------------------------------------------------------------------- /chancy/plugins/api/ui/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import react from '@vitejs/plugin-react' 3 | import { visualizer } from "rollup-plugin-visualizer"; 4 | 5 | // https://vitejs.dev/config/ 6 | export default defineConfig({ 7 | plugins: [react(), visualizer()], 8 | build: { 9 | outDir: '../dist', 10 | // Since our dist directory is outside the vite root, 11 | // it doesn't empty by default. 12 | emptyOutDir: true, 13 | } 14 | }) 15 | -------------------------------------------------------------------------------- /chancy/plugins/cron/api.py: -------------------------------------------------------------------------------- 1 | from starlette.authentication import requires 2 | from starlette.responses import Response 3 | 4 | from chancy.plugins.api import ApiPlugin 5 | from chancy.plugins.cron import Cron 6 | from chancy.utils import json_dumps 7 | 8 | 9 | class CronApiPlugin(ApiPlugin): 10 | """ 11 | Provides API endpoints for viewing cron jobs. 12 | """ 13 | 14 | def name(self): 15 | return "cron" 16 | 17 | def routes(self): 18 | return [ 19 | { 20 | "path": "/api/v1/crons", 21 | "endpoint": self.get_cron, 22 | "methods": ["GET"], 23 | "name": "get_cron", 24 | }, 25 | ] 26 | 27 | @staticmethod 28 | @requires(["authenticated"]) 29 | async def get_cron(request, *, chancy, worker): 30 | """ 31 | Get all known cron jobs. 32 | """ 33 | plugin: Cron = chancy.plugins["chancy.cron"] 34 | 35 | return Response( 36 | json_dumps(list((await plugin.get_schedules(chancy)).values())), 37 | media_type="application/json", 38 | ) 39 | -------------------------------------------------------------------------------- /chancy/plugins/cron/django/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | from chancy.plugins.cron.django.models import Cron 4 | 5 | 6 | @admin.register(Cron) 7 | class CronAdmin(admin.ModelAdmin): 8 | list_display = ("unique_key", "cron", "last_run", "next_run") 9 | search_fields = ("unique_key", "cron") 10 | -------------------------------------------------------------------------------- /chancy/plugins/cron/django/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ChancyCronConfig(AppConfig): 5 | name = "chancy.plugins.cron.django" 6 | label = "chancy_cron" 7 | verbose_name = "Chancy Cron" 8 | -------------------------------------------------------------------------------- /chancy/plugins/cron/django/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.conf import settings 3 | 4 | PREFIX = getattr(settings, "CHANCY_PREFIX", "chancy_") 5 | 6 | 7 | class Cron(models.Model): 8 | unique_key = models.TextField(primary_key=True) 9 | job = models.JSONField(null=False) 10 | cron = models.TextField(null=False) 11 | last_run = models.DateTimeField() 12 | next_run = models.DateTimeField(null=False) 13 | 14 | class Meta: 15 | managed = False 16 | db_table = f"{PREFIX}cron" 17 | -------------------------------------------------------------------------------- /chancy/plugins/cron/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/chancy/plugins/cron/migrations/__init__.py -------------------------------------------------------------------------------- /chancy/plugins/cron/migrations/v1.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow 3 | 4 | from chancy.migrate import Migration, Migrator 5 | 6 | 7 | class V1Migration(Migration): 8 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 9 | await cursor.execute( 10 | sql.SQL( 11 | """ 12 | CREATE TABLE {table} ( 13 | unique_key TEXT PRIMARY KEY, 14 | job JSON NOT NULL, 15 | cron TEXT NOT NULL, 16 | last_run TIMESTAMPTZ, 17 | next_run TIMESTAMPTZ NOT NULL 18 | ) 19 | """ 20 | ).format(table=sql.Identifier(f"{migrator.prefix}cron")) 21 | ) 22 | 23 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 24 | await cursor.execute( 25 | sql.SQL("DROP TABLE {table}").format( 26 | table=sql.Identifier(f"{migrator.prefix}cron") 27 | ) 28 | ) 29 | -------------------------------------------------------------------------------- /chancy/plugins/cron/migrations/v2.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow 3 | 4 | from chancy.migrate import Migration, Migrator 5 | 6 | 7 | class ConvertToJSONB(Migration): 8 | """ 9 | Convert JSON fields to JSONB in cron table for better performance and indexing capabilities. 10 | 11 | This migration updates the job JSON field in cron table to use JSONB type, which provides: 12 | - Better performance for reads 13 | - Ability to index JSON fields 14 | - More efficient storage 15 | """ 16 | 17 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 18 | # Update cron table 19 | await cursor.execute( 20 | sql.SQL( 21 | """ 22 | ALTER TABLE {cron} 23 | ALTER COLUMN job TYPE JSONB USING job::JSONB 24 | """ 25 | ).format(cron=sql.Identifier(f"{migrator.prefix}cron")) 26 | ) 27 | 28 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 29 | # Revert cron table 30 | await cursor.execute( 31 | sql.SQL( 32 | """ 33 | ALTER TABLE {cron} 34 | ALTER COLUMN job TYPE JSON USING job::JSON 35 | """ 36 | ).format(cron=sql.Identifier(f"{migrator.prefix}cron")) 37 | ) 38 | -------------------------------------------------------------------------------- /chancy/plugins/metrics/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Metrics plugin for collecting and aggregating job and queue metrics. 3 | """ 4 | 5 | from .metrics import Metrics 6 | 7 | __all__ = ["Metrics"] 8 | -------------------------------------------------------------------------------- /chancy/plugins/metrics/api.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from starlette.authentication import requires 4 | from starlette.requests import Request 5 | from starlette.responses import Response 6 | 7 | from chancy.plugins.api.plugin import ApiPlugin 8 | from chancy.utils import json_dumps 9 | 10 | 11 | class MetricsApiPlugin(ApiPlugin): 12 | """ 13 | API plugin for exposing metrics data. 14 | """ 15 | 16 | def name(self): 17 | return "metrics" 18 | 19 | def routes(self): 20 | return [ 21 | { 22 | "path": "/api/v1/metrics", 23 | "endpoint": self.get_metrics, 24 | "methods": ["GET"], 25 | "name": "get_metrics", 26 | }, 27 | { 28 | "path": "/api/v1/metrics/{prefix}", 29 | "endpoint": self.get_metric_detail, 30 | "methods": ["GET"], 31 | "name": "get_metric_detail", 32 | }, 33 | ] 34 | 35 | @staticmethod 36 | @requires(["authenticated"]) 37 | async def get_metrics(request: Request, *, chancy, worker): 38 | """ 39 | Get a list of all available metrics. 40 | """ 41 | metrics_plugin = chancy.plugins["chancy.metrics"] 42 | 43 | metric_categories = defaultdict(list) 44 | 45 | metrics = await metrics_plugin.get_metrics(chancy) 46 | for key in metrics.keys(): 47 | parts = key.split(":") 48 | category, metric_name = parts[0], parts[1] 49 | if metric_name not in metric_categories[category]: 50 | metric_categories[category].append(metric_name) 51 | 52 | return Response( 53 | json_dumps( 54 | { 55 | "categories": metric_categories, 56 | "count": len(metrics), 57 | } 58 | ), 59 | media_type="application/json", 60 | ) 61 | 62 | @staticmethod 63 | @requires(["authenticated"]) 64 | async def get_metric_detail(request: Request, *, chancy, worker): 65 | """ 66 | Get detailed data for a specific metric. 67 | 68 | Can be filtered by worker_id with the worker_id query parameter. 69 | """ 70 | metrics_plugin = chancy.plugins["chancy.metrics"] 71 | 72 | metric_prefix = request.path_params.get("prefix", "") 73 | resolution = request.query_params.get("resolution", "5min") 74 | worker_id = request.query_params.get("worker_id") 75 | 76 | metrics = await metrics_plugin.get_metrics( 77 | chancy, 78 | metric_prefix=metric_prefix, 79 | worker_id=worker_id, 80 | ) 81 | 82 | return Response( 83 | json_dumps( 84 | { 85 | metric_key: { 86 | "data": [ 87 | { 88 | "timestamp": timestamp, 89 | "value": value, 90 | } 91 | for timestamp, value in getattr( 92 | metric, f"values_{resolution}" 93 | ) 94 | ], 95 | "type": metric.metric_type, 96 | } 97 | for metric_key, metric in metrics.items() 98 | } 99 | ), 100 | media_type="application/json", 101 | ) 102 | -------------------------------------------------------------------------------- /chancy/plugins/metrics/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database migrations for the metrics plugin. 3 | """ 4 | -------------------------------------------------------------------------------- /chancy/plugins/metrics/migrations/v1.py: -------------------------------------------------------------------------------- 1 | """ 2 | Initial migration for the metrics plugin. 3 | """ 4 | 5 | from psycopg import AsyncCursor, sql 6 | from psycopg.rows import DictRow 7 | 8 | from chancy.migrate import Migration, Migrator 9 | 10 | 11 | class MetricsInitialMigration(Migration): 12 | """ 13 | Create the initial tables for the metrics plugin. 14 | """ 15 | 16 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 17 | """ 18 | Create the metrics table. 19 | """ 20 | await cursor.execute( 21 | sql.SQL( 22 | """ 23 | CREATE TABLE IF NOT EXISTS {metrics_table} ( 24 | -- Primary key: metric_key + resolution + worker_id combination 25 | metric_key VARCHAR(255) NOT NULL, 26 | resolution VARCHAR(10) NOT NULL, -- '1min', '5min', '1hour', etc. 27 | worker_id VARCHAR(255) NOT NULL, 28 | 29 | -- Storage for time-series data 30 | -- timestamp -> value mapping, stored in descending order (newest first) 31 | timestamps TIMESTAMPTZ[] NOT NULL, 32 | values JSONB[] NOT NULL, 33 | 34 | metric_type VARCHAR(20) NOT NULL, 35 | updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 36 | metadata JSONB NOT NULL DEFAULT '{{}}', 37 | 38 | PRIMARY KEY (metric_key, resolution, worker_id) 39 | ) 40 | """ 41 | ).format(metrics_table=sql.Identifier(f"{migrator.prefix}metrics")) 42 | ) 43 | 44 | await cursor.execute( 45 | sql.SQL( 46 | """ 47 | CREATE INDEX IF NOT EXISTS {metrics_updated_idx} 48 | ON {metrics_table} (updated_at) 49 | """ 50 | ).format( 51 | metrics_table=sql.Identifier(f"{migrator.prefix}metrics"), 52 | metrics_updated_idx=sql.Identifier( 53 | f"{migrator.prefix}metrics_updated_idx" 54 | ), 55 | ) 56 | ) 57 | 58 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 59 | """ 60 | Drop the metrics table. 61 | """ 62 | await cursor.execute( 63 | sql.SQL("DROP TABLE IF EXISTS {metrics_table}").format( 64 | metrics_table=sql.Identifier(f"{migrator.prefix}metrics") 65 | ) 66 | ) 67 | -------------------------------------------------------------------------------- /chancy/plugins/recovery.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow, dict_row 3 | 4 | from chancy.app import Chancy 5 | from chancy.worker import Worker 6 | from chancy.utils import timed_block 7 | from chancy.plugin import Plugin 8 | 9 | 10 | class Recovery(Plugin): 11 | """ 12 | Recovers jobs that appear to be abandoned by a worker. 13 | 14 | .. note:: 15 | This plugin is enabled by default, you only need to provide it in the 16 | list of plugins to customize its arguments or if ``no_default_plugins`` 17 | is set to ``True``. 18 | 19 | .. code-block:: python 20 | 21 | from chancy.plugins.recovery import Recovery 22 | 23 | async with Chancy(..., plugins=[ 24 | Recovery() 25 | ]) as chancy: 26 | ... 27 | 28 | Typically, this happens when a worker is unexpectedly terminated, or has 29 | otherwise been lost which we recognize by checking the last seen timestamp 30 | of the worker heartbeat. 31 | 32 | This will transition any matching jobs back to the "pending" state, and 33 | increment the `max_attempts` counter by 1 to allow it to be retried. 34 | 35 | :param poll_interval: The number of seconds between recovery poll intervals. 36 | """ 37 | 38 | def __init__(self, *, poll_interval: int = 60): 39 | super().__init__() 40 | self.poll_interval = poll_interval 41 | 42 | @staticmethod 43 | def get_identifier() -> str: 44 | return "chancy.recovery" 45 | 46 | @staticmethod 47 | def get_dependencies() -> list[str]: 48 | return ["chancy.leadership"] 49 | 50 | async def run(self, worker: Worker, chancy: Chancy): 51 | while await self.sleep(self.poll_interval): 52 | await self.wait_for_leader(worker) 53 | async with chancy.pool.connection() as conn: 54 | async with conn.cursor(row_factory=dict_row) as cursor: 55 | with timed_block() as chancy_time: 56 | rows_recovered = await self.recover( 57 | worker, chancy, cursor 58 | ) 59 | chancy.log.info( 60 | f"Recovery recovered {rows_recovered} row(s) from" 61 | f" the database. Took {chancy_time.elapsed:.2f}" 62 | f" seconds." 63 | ) 64 | await chancy.notify( 65 | cursor, 66 | "recovery.recovered", 67 | { 68 | "elapsed": chancy_time.elapsed, 69 | "rows_recovered": rows_recovered, 70 | }, 71 | ) 72 | 73 | @classmethod 74 | async def recover( 75 | cls, worker: Worker, chancy: Chancy, cursor: AsyncCursor[DictRow] 76 | ) -> int: 77 | """ 78 | Recover jobs that were running when the worker was unexpectedly 79 | terminated, or has otherwise been lost. 80 | 81 | :param worker: The worker that is running the recovery. 82 | :param chancy: The Chancy application. 83 | :param cursor: The cursor to use for database operations. 84 | :return: The number of rows recovered from the database 85 | """ 86 | query = sql.SQL( 87 | """ 88 | UPDATE 89 | {jobs} cj 90 | SET 91 | state = 'pending', 92 | taken_by = NULL, 93 | started_at = NULL, 94 | max_attempts = max_attempts + 1 95 | WHERE 96 | NOT EXISTS ( 97 | SELECT 1 98 | FROM {workers} cw 99 | WHERE ( 100 | cw.worker_id = cj.taken_by 101 | AND 102 | cw.last_seen >= NOW() - INTERVAL '{interval} SECOND' 103 | ) 104 | ) 105 | AND state = 'running'; 106 | """ 107 | ).format( 108 | jobs=sql.Identifier(f"{chancy.prefix}jobs"), 109 | workers=sql.Identifier(f"{chancy.prefix}workers"), 110 | interval=sql.Literal(worker.heartbeat_timeout), 111 | ) 112 | 113 | await cursor.execute(query) 114 | return cursor.rowcount 115 | -------------------------------------------------------------------------------- /chancy/plugins/reprioritize.py: -------------------------------------------------------------------------------- 1 | from psycopg import sql 2 | from psycopg.rows import dict_row 3 | 4 | from chancy.plugin import Plugin 5 | from chancy.rule import JobRules 6 | 7 | 8 | class Reprioritize(Plugin): 9 | """ 10 | A plugin that increases the priority of jobs based on how long they've been 11 | in the queue. This helps prevent job starvation by gradually increasing the 12 | priority of older jobs. 13 | 14 | .. code-block:: python 15 | 16 | async with Chancy(..., plugins=[ 17 | Reprioritize( 18 | rule=( 19 | Reprioritize.Rules.Age() > 600) & 20 | (Reprioritize.Rules.Queue() == "high-priority") 21 | ), 22 | check_interval=300, 23 | priority_increase=1, 24 | ]) as chancy: 25 | ... 26 | 27 | This means that any job that has been in the queue for more than 10 minutes 28 | will have its priority increased by 1 every 5 minutes, but only for jobs 29 | in the high-priority queue. 30 | """ 31 | 32 | Rules = JobRules 33 | 34 | def __init__( 35 | self, 36 | rule, 37 | *, 38 | check_interval: int = 300, 39 | priority_increase: int = 1, 40 | batch_size: int = 1000, 41 | ): 42 | super().__init__() 43 | self.rule = rule 44 | self.check_interval = check_interval 45 | self.priority_increase = priority_increase 46 | self.batch_size = batch_size 47 | 48 | @staticmethod 49 | def get_identifier() -> str: 50 | return "chancy.reprioritize" 51 | 52 | @staticmethod 53 | def get_dependencies() -> list[str]: 54 | return ["chancy.leadership"] 55 | 56 | async def run(self, worker, chancy): 57 | while await self.sleep(self.check_interval): 58 | await self.wait_for_leader(worker) 59 | updated = await self.reprioritize_jobs(chancy) 60 | chancy.log.info( 61 | f"Reprioritized {updated} jobs by increasing priority" 62 | f" by {self.priority_increase}" 63 | ) 64 | 65 | async def reprioritize_jobs(self, chancy) -> int: 66 | """ 67 | Reprioritize jobs based on the rules provided to the plugin. 68 | 69 | Returns the total number of jobs that were updated. 70 | """ 71 | total_updated = 0 72 | 73 | async with chancy.pool.connection() as conn: 74 | async with conn.cursor(row_factory=dict_row) as cursor: 75 | while True: 76 | # Update jobs in batches to avoid long-running transactions 77 | async with conn.transaction(): 78 | await cursor.execute( 79 | sql.SQL( 80 | """ 81 | WITH jobs_to_update AS ( 82 | SELECT 83 | id, 84 | priority 85 | FROM {jobs_table} 86 | WHERE 87 | state IN ('pending', 'retrying') 88 | AND ({rule}) 89 | ORDER BY id 90 | LIMIT {batch_size} 91 | FOR UPDATE SKIP LOCKED 92 | ) 93 | UPDATE {jobs_table} j 94 | SET 95 | priority = j.priority + {increment} 96 | FROM jobs_to_update 97 | WHERE j.id = jobs_to_update.id 98 | RETURNING j.id 99 | """ 100 | ).format( 101 | jobs_table=sql.Identifier( 102 | f"{chancy.prefix}jobs" 103 | ), 104 | rule=self.rule.to_sql(), 105 | increment=self.priority_increase, 106 | batch_size=self.batch_size, 107 | ) 108 | ) 109 | 110 | results = await cursor.fetchall() 111 | if not results: 112 | break 113 | 114 | total_updated += len(results) 115 | 116 | return total_updated 117 | -------------------------------------------------------------------------------- /chancy/plugins/retry.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from datetime import datetime, timedelta, timezone 3 | import random 4 | from typing import Any 5 | 6 | from chancy.plugin import Plugin 7 | from chancy.job import QueuedJob 8 | from chancy.worker import Worker 9 | 10 | 11 | class RetryPlugin(Plugin): 12 | """ 13 | Plugin that handles job retries based on settings stored in the Job's 14 | metadata. 15 | 16 | This plugin can be used as an example for implementing your own retry 17 | policies. 18 | 19 | Usage: 20 | 21 | .. code-block:: python 22 | 23 | from chancy import job 24 | from chancy.plugins.retry import RetryPlugin 25 | 26 | @job() 27 | def job_that_fails(): 28 | raise ValueError("This job should fail.") 29 | 30 | async with Chancy(..., plugins=[RetryPlugin()]) as chancy: 31 | await chancy.declare(Queue("default")) 32 | await chancy.push( 33 | job_that_fails.job.with_max_attempts(3).with_meta({ 34 | "retry_settings": { 35 | "backoff": 2, 36 | "backoff_factor": 3, 37 | "backoff_limit": 300, 38 | "backoff_jitter": [1, 5], 39 | } 40 | }) 41 | ) 42 | 43 | The above example will retry the job 3 times, with a starting backoff of 2 44 | seconds, a backoff factor of 3, a backoff limit of 300 seconds, and a 45 | random jitter of between 1 and 5 seconds. 46 | """ 47 | 48 | @staticmethod 49 | def get_identifier() -> str: 50 | return "chancy.retry_plugin" 51 | 52 | @staticmethod 53 | def calculate_next_run(job: QueuedJob, retry_settings: dict) -> datetime: 54 | delay = retry_settings.get("backoff", 1) 55 | factor = retry_settings.get("backoff_factor", 2.0) 56 | limit = retry_settings.get("backoff_limit", 300) 57 | jitter = retry_settings.get("backoff_jitter", [1, 5]) 58 | 59 | delay *= factor ** (job.attempts - 1) 60 | delay = min(delay, limit) 61 | delay += random.uniform(*jitter) 62 | 63 | return datetime.now(timezone.utc) + timedelta(seconds=delay) 64 | 65 | async def on_job_completed( 66 | self, 67 | *, 68 | job: QueuedJob, 69 | worker: Worker, 70 | exc: Exception | None = None, 71 | result: Any = None, 72 | ) -> QueuedJob: 73 | if exc is None or job.state not in { 74 | job.State.FAILED, 75 | job.State.RETRYING, 76 | }: 77 | return job 78 | 79 | if job.attempts >= job.max_attempts: 80 | return job 81 | 82 | retry_settings = job.meta.get("retry_settings", {}) 83 | 84 | # We don't need to adjust the # of attempts as the base executor will 85 | # do that for us. 86 | return dataclasses.replace( 87 | job, 88 | state=QueuedJob.State.RETRYING, 89 | scheduled_at=self.calculate_next_run(job, retry_settings), 90 | completed_at=None, 91 | ) 92 | -------------------------------------------------------------------------------- /chancy/plugins/sentry.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import sentry_sdk 4 | 5 | from chancy.job import QueuedJob 6 | from chancy.worker import Worker 7 | from chancy.plugin import Plugin 8 | 9 | 10 | class SentryPlugin(Plugin): 11 | """ 12 | A plugin that sends errors to Sentry. 13 | 14 | This plugin will send all errors that occur during job execution to Sentry, 15 | along with extended metadata about the job such as the executing worker, 16 | the queue, and the job ID. 17 | 18 | This plugin assumes you've already configured sentry-sdk using 19 | ``sentry_sdk.init()`` at some point, to prevent conflicts with other Sentry 20 | integrations. 21 | 22 | Requires the sentry-sdk package version 2.0.0 or higher. 23 | 24 | Usage: 25 | 26 | .. code-block:: bash 27 | 28 | pip install sentry-sdk 29 | 30 | .. code-block:: python 31 | 32 | from chancy.plugins.sentry import SentryPlugin 33 | 34 | async with Chancy(..., plugins=[SentryPlugin()]) as chancy: 35 | pass 36 | """ 37 | 38 | @staticmethod 39 | def get_identifier() -> str: 40 | return "chancy.sentry_plugin" 41 | 42 | async def on_job_completed( 43 | self, 44 | *, 45 | worker: Worker, 46 | job: QueuedJob, 47 | exc: Exception | None = None, 48 | result: Any | None = None, 49 | ) -> QueuedJob: 50 | if exc: 51 | with sentry_sdk.new_scope() as scope: 52 | scope.set_tags( 53 | { 54 | "chancy.func": job.func, 55 | "chancy.worker": worker.worker_id, 56 | "chancy.queue": job.queue, 57 | } 58 | ) 59 | scope.set_extra("chancy.job_id", job.id) 60 | scope.capture_exception(exc) 61 | return job 62 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/api.py: -------------------------------------------------------------------------------- 1 | from dataclasses import asdict 2 | 3 | from psycopg import sql 4 | from psycopg.rows import dict_row 5 | from starlette.authentication import requires 6 | from starlette.responses import Response 7 | 8 | from chancy.plugins.api import ApiPlugin 9 | from chancy.plugins.workflow import WorkflowPlugin 10 | from chancy.utils import json_dumps 11 | 12 | 13 | class WorkflowApiPlugin(ApiPlugin): 14 | """ 15 | API plugin for workflows. 16 | """ 17 | 18 | def name(self): 19 | return "workflow" 20 | 21 | def routes(self): 22 | return [ 23 | { 24 | "path": "/api/v1/workflows", 25 | "endpoint": self.get_workflows, 26 | "methods": ["GET"], 27 | "name": "get_workflows", 28 | }, 29 | { 30 | "path": "/api/v1/workflows/{id}", 31 | "endpoint": self.get_workflow, 32 | "methods": ["GET"], 33 | "name": "get_workflow", 34 | }, 35 | ] 36 | 37 | @staticmethod 38 | @requires(["authenticated"]) 39 | async def get_workflows(request, *, chancy, worker): 40 | """ 41 | Get all known workflows. 42 | """ 43 | async with chancy.pool.connection() as conn: 44 | async with conn.cursor(row_factory=dict_row) as cursor: 45 | await cursor.execute( 46 | sql.SQL( 47 | """ 48 | SELECT 49 | w.* 50 | FROM 51 | {workflows_table} w 52 | ORDER BY 53 | created_at DESC; 54 | """ 55 | ).format( 56 | workflows_table=sql.Identifier( 57 | f"{chancy.prefix}workflows" 58 | ), 59 | ) 60 | ) 61 | 62 | results = await cursor.fetchall() 63 | 64 | return Response( 65 | json_dumps(results), 66 | media_type="application/json", 67 | ) 68 | 69 | @staticmethod 70 | @requires(["authenticated"]) 71 | async def get_workflow(request, *, chancy, worker): 72 | """ 73 | Get a single workflow by ID. 74 | """ 75 | workflow_id = request.path_params["id"] 76 | workflow = await WorkflowPlugin.fetch_workflow(chancy, workflow_id) 77 | 78 | if workflow is None: 79 | return Response( 80 | json_dumps({"error": "Workflow not found"}), 81 | media_type="application/json", 82 | status_code=404, 83 | ) 84 | 85 | return Response( 86 | json_dumps(asdict(workflow)), 87 | media_type="application/json", 88 | ) 89 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/django/__init__.py: -------------------------------------------------------------------------------- 1 | # Django integration for Chancy Workflow plugin 2 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/django/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | from chancy.plugins.workflow.django.models import Workflow, WorkflowStep 4 | 5 | 6 | class WorkflowStepInline(admin.TabularInline): 7 | model = WorkflowStep 8 | readonly_fields = ( 9 | "id", 10 | "step_id", 11 | "dependencies", 12 | "job_id", 13 | "created_at", 14 | "updated_at", 15 | ) 16 | extra = 0 17 | can_deletes = False 18 | 19 | 20 | @admin.register(Workflow) 21 | class WorkflowAdmin(admin.ModelAdmin): 22 | list_display = ("id", "name", "state", "created_at", "updated_at") 23 | search_fields = ("id", "name", "state") 24 | readonly_fields = ("id", "created_at", "updated_at") 25 | inlines = [WorkflowStepInline] 26 | 27 | 28 | @admin.register(WorkflowStep) 29 | class WorkflowStepAdmin(admin.ModelAdmin): 30 | list_display = ( 31 | "id", 32 | "workflow", 33 | "step_id", 34 | "job_id", 35 | "created_at", 36 | "updated_at", 37 | ) 38 | search_fields = ( 39 | "id", 40 | "workflow__id", 41 | "workflow__name", 42 | "step_id", 43 | "job_id", 44 | ) 45 | raw_id_fields = ("workflow",) 46 | readonly_fields = ("id", "created_at", "updated_at") 47 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/django/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ChancyWorkflowConfig(AppConfig): 5 | name = "chancy.plugins.workflow.django" 6 | label = "chancy_workflow" 7 | verbose_name = "Chancy Workflow" 8 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/django/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unmanaged Django models for the workflow plugin. 3 | """ 4 | 5 | __all__ = ("Workflow", "WorkflowStep") 6 | 7 | from django.db import models 8 | from django.conf import settings 9 | 10 | from chancy.utils import chancy_uuid 11 | 12 | PREFIX = getattr(settings, "CHANCY_PREFIX", "chancy_") 13 | 14 | 15 | class Workflow(models.Model): 16 | id = models.UUIDField(primary_key=True, default=chancy_uuid, editable=False) 17 | name = models.TextField(null=False) 18 | state = models.TextField(null=False) 19 | created_at = models.DateTimeField(auto_now_add=True) 20 | updated_at = models.DateTimeField(auto_now=True) 21 | 22 | class Meta: 23 | managed = False 24 | db_table = f"{PREFIX}workflows" 25 | 26 | 27 | class WorkflowStep(models.Model): 28 | id = models.AutoField(primary_key=True) 29 | workflow = models.ForeignKey( 30 | Workflow, 31 | on_delete=models.CASCADE, 32 | related_name="steps", 33 | db_column="workflow_id", 34 | ) 35 | step_id = models.TextField(null=False) 36 | job_data = models.JSONField(null=False) 37 | dependencies = models.JSONField(null=False) 38 | job_id = models.UUIDField(null=True, blank=True) 39 | created_at = models.DateTimeField(auto_now_add=True) 40 | updated_at = models.DateTimeField(auto_now=True) 41 | 42 | class Meta: 43 | managed = False 44 | db_table = f"{PREFIX}workflow_steps" 45 | unique_together = ("workflow", "step_id") 46 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/chancy/plugins/workflow/migrations/__init__.py -------------------------------------------------------------------------------- /chancy/plugins/workflow/migrations/v1.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow 3 | 4 | from chancy.migrate import Migration, Migrator 5 | 6 | 7 | class V1Migration(Migration): 8 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 9 | # Create workflows table 10 | await cursor.execute( 11 | sql.SQL( 12 | """ 13 | CREATE TABLE {workflows} ( 14 | id UUID PRIMARY KEY, 15 | name TEXT NOT NULL, 16 | state TEXT NOT NULL, 17 | created_at TIMESTAMPTZ DEFAULT NOW(), 18 | updated_at TIMESTAMPTZ DEFAULT NOW() 19 | ) 20 | """ 21 | ).format(workflows=sql.Identifier(f"{migrator.prefix}workflows")) 22 | ) 23 | 24 | # Create workflow_steps table 25 | await cursor.execute( 26 | sql.SQL( 27 | """ 28 | CREATE TABLE {workflow_steps} ( 29 | id SERIAL PRIMARY KEY, 30 | workflow_id UUID REFERENCES {workflows}(id) 31 | ON DELETE CASCADE, 32 | step_id TEXT NOT NULL, 33 | job_data JSON NOT NULL, 34 | dependencies JSON NOT NULL, 35 | job_id UUID, 36 | created_at TIMESTAMPTZ DEFAULT NOW(), 37 | updated_at TIMESTAMPTZ DEFAULT NOW() 38 | ) 39 | """ 40 | ).format( 41 | workflow_steps=sql.Identifier( 42 | f"{migrator.prefix}workflow_steps" 43 | ), 44 | workflows=sql.Identifier(f"{migrator.prefix}workflows"), 45 | ) 46 | ) 47 | 48 | # Create indexes 49 | await cursor.execute( 50 | sql.SQL( 51 | """ 52 | CREATE INDEX {workflow_steps_workflow_id_idx} ON {workflow_steps} (workflow_id); 53 | CREATE INDEX {workflows_state_idx} ON {workflows} (state); 54 | CREATE UNIQUE INDEX {workflows_steps_unique_idx} ON {workflow_steps} (workflow_id, step_id); 55 | """ 56 | ).format( 57 | workflow_steps=sql.Identifier( 58 | f"{migrator.prefix}workflow_steps" 59 | ), 60 | workflows=sql.Identifier(f"{migrator.prefix}workflows"), 61 | workflow_steps_workflow_id_idx=sql.Identifier( 62 | f"{migrator.prefix}workflow_steps_workflow_id_idx" 63 | ), 64 | workflow_steps_state_idx=sql.Identifier( 65 | f"{migrator.prefix}workflow_steps_state_idx" 66 | ), 67 | workflows_state_idx=sql.Identifier( 68 | f"{migrator.prefix}workflows_state_idx" 69 | ), 70 | workflows_steps_unique_idx=sql.Identifier( 71 | f"{migrator.prefix}workflows_steps_unique_idx" 72 | ), 73 | ) 74 | ) 75 | 76 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 77 | await cursor.execute( 78 | sql.SQL("DROP TABLE IF EXISTS {table}").format( 79 | table=sql.Identifier(f"{migrator.prefix}workflow_steps") 80 | ) 81 | ) 82 | 83 | await cursor.execute( 84 | sql.SQL("DROP TABLE IF EXISTS {table}").format( 85 | table=sql.Identifier(f"{migrator.prefix}workflows") 86 | ) 87 | ) 88 | -------------------------------------------------------------------------------- /chancy/plugins/workflow/migrations/v2.py: -------------------------------------------------------------------------------- 1 | from psycopg import AsyncCursor, sql 2 | from psycopg.rows import DictRow 3 | 4 | from chancy.migrate import Migration, Migrator 5 | 6 | 7 | class ConvertToJSONB(Migration): 8 | """ 9 | Convert JSON fields to JSONB in workflow tables for better performance and indexing capabilities. 10 | 11 | This migration updates all JSON fields in workflow tables to use JSONB type, which provides: 12 | - Better performance for reads 13 | - Ability to index JSON fields 14 | - More efficient storage 15 | """ 16 | 17 | async def up(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 18 | # Update workflow_steps table 19 | await cursor.execute( 20 | sql.SQL( 21 | """ 22 | ALTER TABLE {workflow_steps} 23 | ALTER COLUMN job_data TYPE JSONB USING job_data::JSONB, 24 | ALTER COLUMN dependencies TYPE JSONB USING dependencies::JSONB 25 | """ 26 | ).format( 27 | workflow_steps=sql.Identifier( 28 | f"{migrator.prefix}workflow_steps" 29 | ) 30 | ) 31 | ) 32 | 33 | async def down(self, migrator: Migrator, cursor: AsyncCursor[DictRow]): 34 | # Revert workflow_steps table 35 | await cursor.execute( 36 | sql.SQL( 37 | """ 38 | ALTER TABLE {workflow_steps} 39 | ALTER COLUMN job_data TYPE JSON USING job_data::JSON, 40 | ALTER COLUMN dependencies TYPE JSON USING dependencies::JSON 41 | """ 42 | ).format( 43 | workflow_steps=sql.Identifier( 44 | f"{migrator.prefix}workflow_steps" 45 | ) 46 | ) 47 | ) 48 | -------------------------------------------------------------------------------- /chancy/rule.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for creating dynamic rules that can be used when configuring the 3 | conditions of a Plugin. 4 | """ 5 | 6 | from typing import Any 7 | from psycopg import sql 8 | 9 | 10 | class SQLAble: 11 | def to_sql(self) -> sql.Composable: 12 | raise NotImplementedError 13 | 14 | 15 | class Rule(SQLAble): 16 | def __init__(self, field: str): 17 | self.field = field 18 | 19 | def __eq__(self, other: Any) -> "Condition": 20 | return Condition(self.to_sql(), "=", other) 21 | 22 | def __ne__(self, other: Any) -> "Condition": 23 | return Condition(self.to_sql(), "!=", other) 24 | 25 | def __lt__(self, other: Any) -> "Condition": 26 | return Condition(self.to_sql(), "<", other) 27 | 28 | def __le__(self, other: Any) -> "Condition": 29 | return Condition(self.to_sql(), "<=", other) 30 | 31 | def __gt__(self, other: Any) -> "Condition": 32 | return Condition(self.to_sql(), ">", other) 33 | 34 | def __ge__(self, other: Any) -> "Condition": 35 | return Condition(self.to_sql(), ">=", other) 36 | 37 | def __or__(self, other: "Condition") -> "OrCondition": 38 | return OrCondition(self, other) 39 | 40 | def __and__(self, other: "Condition") -> "AndCondition": 41 | return AndCondition(self, other) 42 | 43 | def contains(self, value: str) -> "Condition": 44 | """ 45 | String contains a lowercase string. 46 | """ 47 | return Condition(self.to_sql(), "ILIKE", f"%{value}%") 48 | 49 | def to_sql(self) -> sql.Composable: 50 | return sql.Identifier(self.field) 51 | 52 | 53 | class Condition(SQLAble): 54 | def __init__(self, field: sql.Composable, op: str, value: Any): 55 | self.field = field 56 | self.op = op 57 | self.value = value 58 | 59 | def __or__(self, other: "Condition") -> "OrCondition": 60 | return OrCondition(self, other) 61 | 62 | def __and__(self, other: "Condition") -> "AndCondition": 63 | return AndCondition(self, other) 64 | 65 | def to_sql(self) -> sql.Composable: 66 | return sql.SQL("{field} {op} {value}").format( 67 | field=self.field, 68 | op=sql.SQL(self.op), 69 | value=sql.Literal(self.value), 70 | ) 71 | 72 | 73 | class OrCondition(SQLAble): 74 | def __init__(self, left: SQLAble, right: SQLAble): 75 | self.left = left 76 | self.right = right 77 | 78 | def __or__(self, other: SQLAble) -> "OrCondition": 79 | return OrCondition(self, other) 80 | 81 | def __and__(self, other: SQLAble) -> "AndCondition": 82 | return AndCondition(self, other) 83 | 84 | def to_sql(self) -> sql.Composable: 85 | return sql.SQL("({left}) OR ({right})").format( 86 | left=self.left.to_sql(), right=self.right.to_sql() 87 | ) 88 | 89 | 90 | class AndCondition(SQLAble): 91 | def __init__(self, left: SQLAble, right: SQLAble): 92 | self.left = left 93 | self.right = right 94 | 95 | def __or__(self, other: Condition) -> OrCondition: 96 | return OrCondition(self, other) 97 | 98 | def __and__(self, other: Condition) -> "AndCondition": 99 | return AndCondition(self, other) 100 | 101 | def to_sql(self) -> sql.Composable: 102 | return sql.SQL("({left}) AND ({right})").format( 103 | left=self.left.to_sql(), right=self.right.to_sql() 104 | ) 105 | 106 | 107 | class JobRules: 108 | """ 109 | A collection of rules that can be used to filter the main job table. 110 | """ 111 | 112 | class Age(Rule): 113 | def __init__(self): 114 | super().__init__("age") 115 | 116 | def to_sql(self) -> sql.Composable: 117 | return sql.SQL("EXTRACT(EPOCH FROM (NOW() - created_at))") 118 | 119 | class Queue(Rule): 120 | def __init__(self): 121 | super().__init__("queue") 122 | 123 | class Job(Rule): 124 | def __init__(self): 125 | super().__init__("func") 126 | 127 | class State(Rule): 128 | def __init__(self): 129 | super().__init__("state") 130 | 131 | class CreatedAt(Rule): 132 | def __init__(self): 133 | super().__init__("created_at") 134 | 135 | class ScheduledAt(Rule): 136 | def __init__(self): 137 | super().__init__("scheduled_at") 138 | 139 | class ID(Rule): 140 | def __init__(self): 141 | super().__init__("id") 142 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # A docker file that sets up postgres for the test suite 2 | # with extensive logging and auto_explain enabled. 3 | services: 4 | db: 5 | image: postgres 6 | restart: always 7 | environment: 8 | POSTGRES_PASSWORD: localtest 9 | POSTGRES_USER: postgres 10 | POSTGRES_DB: postgres 11 | ports: 12 | - "8190:5432" 13 | volumes: 14 | - data:/var/lib/postgresql/data 15 | command: >- 16 | postgres 17 | -c shared_preload_libraries=auto_explain 18 | -c auto_explain.log_min_duration=0 19 | -c auto_explain.log_analyze=true 20 | -c auto_explain.log_buffers=true 21 | -c auto_explain.log_timing=true 22 | -c auto_explain.log_triggers=true 23 | -c auto_explain.log_verbose=true 24 | -c auto_explain.log_nested_statements=true 25 | -c client_min_messages=notice 26 | -c log_statement=all 27 | -c log_min_duration_statement=0 28 | 29 | volumes: 30 | data: -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/_static/workflow.dot: -------------------------------------------------------------------------------- 1 | digraph "example" { 2 | rankdir=TB; 3 | node [shape=box, style="rounded,filled", fontname="Arial"]; 4 | "top" [label="top\n(pending)", fillcolor=lightblue]; 5 | "left" [label="left\n(pending)", fillcolor=lightblue]; 6 | "right" [label="right\n(pending)", fillcolor=lightblue]; 7 | "bottom" [label="bottom\n(pending)", fillcolor=lightblue]; 8 | "top" -> "left"; 9 | "top" -> "right"; 10 | "left" -> "bottom"; 11 | "right" -> "bottom"; 12 | labelloc="t"; 13 | label="Workflow: example\nState: pending"; 14 | } 15 | -------------------------------------------------------------------------------- /docs/chancy.app.rst: -------------------------------------------------------------------------------- 1 | chancy.app module 2 | ================= 3 | 4 | .. automodule:: chancy.app 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.errors.rst: -------------------------------------------------------------------------------- 1 | chancy.errors module 2 | ==================== 3 | 4 | .. automodule:: chancy.errors 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.executors.asyncex.rst: -------------------------------------------------------------------------------- 1 | AsyncExecutor 2 | ============= 3 | 4 | .. automodule:: chancy.executors.asyncex 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.executors.base.rst: -------------------------------------------------------------------------------- 1 | Executor 2 | ======== 3 | 4 | .. automodule:: chancy.executors.base 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.executors.process.rst: -------------------------------------------------------------------------------- 1 | ProcessExecutor 2 | =============== 3 | 4 | .. automodule:: chancy.executors.process 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.executors.rst: -------------------------------------------------------------------------------- 1 | Executors 2 | ========= 3 | 4 | .. automodule:: chancy.executors 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | .. toctree:: 13 | :maxdepth: 1 14 | 15 | chancy.executors.base 16 | chancy.executors.process 17 | chancy.executors.asyncex 18 | chancy.executors.thread 19 | chancy.executors.sub 20 | -------------------------------------------------------------------------------- /docs/chancy.executors.sub.rst: -------------------------------------------------------------------------------- 1 | SubInterpreterExecutor 2 | ====================== 3 | 4 | .. automodule:: chancy.executors.sub 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.executors.thread.rst: -------------------------------------------------------------------------------- 1 | ThreadedExecutor 2 | ================ 3 | 4 | .. automodule:: chancy.executors.thread 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.hub.rst: -------------------------------------------------------------------------------- 1 | chancy.hub module 2 | ================= 3 | 4 | .. automodule:: chancy.hub 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.job.rst: -------------------------------------------------------------------------------- 1 | chancy.job module 2 | ================= 3 | 4 | .. automodule:: chancy.job 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.migrate.rst: -------------------------------------------------------------------------------- 1 | chancy.migrate module 2 | ===================== 3 | 4 | .. automodule:: chancy.migrate 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugin.rst: -------------------------------------------------------------------------------- 1 | chancy.plugin module 2 | ==================== 3 | 4 | .. automodule:: chancy.plugin 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.api.rst: -------------------------------------------------------------------------------- 1 | API/Dashboard 2 | ============= 3 | 4 | .. automodule:: chancy.plugins.api 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. automodule:: chancy.plugins.api.plugin 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | 14 | .. automodule:: chancy.plugins.api.core 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /docs/chancy.plugins.cron.rst: -------------------------------------------------------------------------------- 1 | Cron 2 | ==== 3 | 4 | .. automodule:: chancy.plugins.cron 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. automodule:: chancy.plugins.cron.api 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | -------------------------------------------------------------------------------- /docs/chancy.plugins.leadership.rst: -------------------------------------------------------------------------------- 1 | Leadership 2 | ========== 3 | 4 | .. automodule:: chancy.plugins.leadership 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.metrics.rst: -------------------------------------------------------------------------------- 1 | Metrics 2 | ======= 3 | 4 | .. automodule:: chancy.plugins.metrics 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.pruner.rst: -------------------------------------------------------------------------------- 1 | Pruner 2 | ====== 3 | 4 | .. automodule:: chancy.plugins.pruner 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.recovery.rst: -------------------------------------------------------------------------------- 1 | Recovery 2 | ======== 3 | 4 | .. automodule:: chancy.plugins.recovery 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.reprioritize.rst: -------------------------------------------------------------------------------- 1 | Reprioritization 2 | ================ 3 | 4 | .. automodule:: chancy.plugins.reprioritize 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.retry.rst: -------------------------------------------------------------------------------- 1 | Retry 2 | ===== 3 | 4 | .. automodule:: chancy.plugins.retry 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.rst: -------------------------------------------------------------------------------- 1 | Plugins 2 | ======= 3 | 4 | .. automodule:: chancy.plugins 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | .. toctree:: 13 | :maxdepth: 4 14 | 15 | chancy.plugins.leadership 16 | chancy.plugins.pruner 17 | chancy.plugins.recovery 18 | chancy.plugins.cron 19 | chancy.plugins.api 20 | chancy.plugins.workflow 21 | chancy.plugins.retry 22 | chancy.plugins.reprioritize 23 | chancy.plugins.sentry 24 | chancy.plugins.metrics -------------------------------------------------------------------------------- /docs/chancy.plugins.sentry.rst: -------------------------------------------------------------------------------- 1 | Sentry 2 | ====== 3 | 4 | .. automodule:: chancy.plugins.sentry 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.plugins.workflow.rst: -------------------------------------------------------------------------------- 1 | Workflows 2 | ========= 3 | 4 | .. autoclass:: chancy.plugins.workflow.WorkflowPlugin 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. autoclass:: chancy.plugins.workflow.Workflow 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | 14 | .. autoclass:: chancy.plugins.workflow.WorkflowStep 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | 19 | .. autoclass:: chancy.plugins.workflow.Sequence 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | 24 | .. automodule:: chancy.plugins.workflow.api 25 | :members: 26 | :undoc-members: 27 | :show-inheritance: 28 | -------------------------------------------------------------------------------- /docs/chancy.queue.rst: -------------------------------------------------------------------------------- 1 | chancy.queue module 2 | =================== 3 | 4 | .. automodule:: chancy.queue 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.rst: -------------------------------------------------------------------------------- 1 | Chancy API 2 | ========== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | chancy.executors 11 | chancy.plugins 12 | 13 | Submodules 14 | ---------- 15 | 16 | .. toctree:: 17 | :maxdepth: 4 18 | 19 | chancy.app 20 | chancy.job 21 | chancy.queue 22 | chancy.worker 23 | chancy.plugin 24 | chancy.migrate 25 | chancy.hub 26 | chancy.rule 27 | chancy.utils 28 | chancy.errors -------------------------------------------------------------------------------- /docs/chancy.rule.rst: -------------------------------------------------------------------------------- 1 | chancy.plugins.rule module 2 | ========================== 3 | 4 | .. automodule:: chancy.rule 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.utils.rst: -------------------------------------------------------------------------------- 1 | chancy.utils module 2 | =================== 3 | 4 | .. automodule:: chancy.utils 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/chancy.worker.rst: -------------------------------------------------------------------------------- 1 | chancy.worker module 2 | ==================== 3 | 4 | .. automodule:: chancy.worker 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | 9 | project = "chancy" 10 | copyright = "2024, Tyler Kennedy" 11 | author = "Tyler Kennedy" 12 | 13 | # -- General configuration --------------------------------------------------- 14 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 15 | 16 | extensions = [ 17 | "sphinx.ext.autodoc", 18 | "sphinx.ext.todo", 19 | "sphinx.ext.graphviz", 20 | "sphinx.ext.linkcode", 21 | "sphinx_inline_tabs", 22 | ] 23 | 24 | templates_path = ["_templates"] 25 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 26 | 27 | language = "en" 28 | 29 | # -- Options for HTML output ------------------------------------------------- 30 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 31 | 32 | html_theme = "furo" 33 | html_static_path = ["_static"] 34 | html_logo = "../misc/logo.png" 35 | html_title = "Chancy" 36 | 37 | html_js_files = [ 38 | ( 39 | "https://plausible.io/js/script.js", 40 | {"data-domain": "tkte.ch", "defer": "defer"}, 41 | ), 42 | ] 43 | 44 | # -- Options for todo extension ---------------------------------------------- 45 | # https://www.sphinx-doc.org/en/master/usage/extensions/todo.html#configuration 46 | 47 | todo_include_todos = True 48 | 49 | 50 | def linkcode_resolve(domain, info): 51 | if domain != "py": 52 | return None 53 | 54 | if not info["module"]: 55 | return None 56 | 57 | filename = info["module"].replace(".", "/") 58 | return f"https://github.com/tktech/chancy/blob/main/{filename}.py" 59 | -------------------------------------------------------------------------------- /docs/design.rst: -------------------------------------------------------------------------------- 1 | Design 2 | ====== 3 | 4 | .. warning:: 5 | 6 | This section is a work in progress. 7 | 8 | Chancy is designed to fit the vast majority of use cases, and to do so in a way 9 | that requires just the availability of a Postgres database. It's not meant to 10 | handle millions of jobs per second, but it should be able to handle thousands 11 | of jobs per second on a cheap VPS and handle workloads typical for 99% of 12 | applications. 13 | 14 | The Application 15 | --------------- 16 | 17 | A Chancy application is made when you define a :class:`~chancy.app.Chancy` 18 | instance. This object is responsible for managing the database connection, 19 | and exposes common functionality like pushing jobs to the queue or 20 | running migrations. The Chancy app itself has no running tasks associated 21 | with it. 22 | 23 | .. code-block:: python 24 | 25 | from chancy import Chancy 26 | 27 | async with Chancy("postgresql://localhost/postgres") as chancy: 28 | pass 29 | 30 | 31 | The Worker 32 | ---------- 33 | 34 | Now when we want to run a job, we do so using a long-running 35 | :class:`~chancy.worker.Worker` process. This worker process is responsible 36 | for pulling jobs from the queue, co-ordinating the execution of the job, and 37 | updating the job status in the database as it progresses. 38 | 39 | .. code-block:: python 40 | 41 | from chancy import Chancy, Worker 42 | 43 | async with Chancy("postgresql://localhost/postgres") as chancy: 44 | async with Worker(chancy) as worker: 45 | await worker.wait_for_shutdown() 46 | 47 | The worker will use Postgres' ``SELECT...FOR UPDATE SKIP LOCKED`` to guarantee 48 | that only one worker is running a given job at a time. If the worker happens 49 | to crash while running a job, the :class:`~chancy.plugins.recovery.Recovery` 50 | plugin will periodically restore them. 51 | 52 | Each worker also listens for realtime events using Postgres' ``LISTEN/NOTIFY`` 53 | mechanism. This allows the worker to be notified of new jobs being pushed to 54 | the queue (among other things) nearly instantly. In the case that this fails, 55 | the worker will also poll the queue periodically. 56 | 57 | Leadership 58 | ---------- 59 | 60 | Instead of having a hardcoded "coordinator" process or requiring separate 61 | setup for periodic tasks like Celery's "beat" process, all of the workers 62 | in a Chancy application will periodically attempt to become the "leader". 63 | Certain plugins like :doc:`cron ` and 64 | :doc:`workflows ` will only run on the worker that 65 | currently holds leadership to prevent race conditions. 66 | 67 | The default leadership process is handled by the 68 | :class:`~chancy.plugins.leadership.Leadership` plugin, and can easily be 69 | replaced with a custom implementation. 70 | 71 | Multi-mode concurrency 72 | ---------------------- 73 | 74 | Each queue can use its own concurrency model, and multiple queues can mix and 75 | match models in the same worker. By default, queues use the 76 | :class:`~chancy.executors.process.ProcessExecutor` which is similar to the 77 | default in most other task queues. This means you can mix asyncio-based 78 | jobs which are crawling external APIs with a CPU-bound task aggregating 79 | the results and it'll just work. 80 | 81 | 82 | Extendable 83 | ---------- 84 | 85 | Almost all functionality beyond queue management and job fetching in Chancy is 86 | implemented as a plugin. This allows us to easily add new features without 87 | breaking backwards compatibility, and to make it easy to swap out the 88 | underlying implementation as your needs change. Workflows, cron jobs, 89 | job recovery, job pruning, and more are all implemented as swappable plugins. 90 | 91 | This is especially useful for busy applications where you might need to tweak 92 | queries or behaviors to optimize for your specific use case. 93 | 94 | 95 | Reliable By Default 96 | ------------------- 97 | 98 | In Chancy, jobs are guaranteed to be run *at least once*. In the case of a 99 | worker crash, networking issues, or other failure, the job can be recovered 100 | with the :class:`~chancy.plugins.recovery.Recovery` plugin. This ensures 101 | that jobs are never lost, but care must be taken to ensure that jobs are 102 | idempotent. That is, the job should be able to be run multiple times without 103 | causing any side effects. 104 | 105 | This contrasts to Celery, which may lose jobs in the case of a worker crash 106 | if the `acks_late` setting is left on its default of disabled. -------------------------------------------------------------------------------- /docs/faq.rst: -------------------------------------------------------------------------------- 1 | FAQ 2 | === 3 | 4 | **Q.** Why? 5 | 6 | **A.** For the most part, due to the issues laid out in the 7 | :doc:`Compare with Celery `, which is the current uncontested 8 | incumbent in the Python task queue space. While there are many alternatives, 9 | none of them checked off all of the boxes needed for my existing projects 10 | to migrate off of Celery. 11 | 12 | ----- 13 | 14 | **Q.** Why not sell 'pro' features like Oban Pro and Sidekiq Pro? 15 | 16 | **A.** Chancy is not the product - it's a tool used in my *actual* products. 17 | Having a fully open and community-driven project benefits all of the projects 18 | using it. The more users using workflows, dashboards, etc the more reliable 19 | and robust the project becomes for the revenue-generating products built on it. 20 | 21 | ----- 22 | 23 | **Q.** Why ``Job`` instead of ``Task``? 24 | 25 | **A.** To simplify developer QoL when merging with existing codebases that 26 | already use Celery, Chancy uses the term "job" instead of "task". That 27 | way you can import: 28 | 29 | .. code-block:: python 30 | 31 | from chancy import Job 32 | from celery import Task 33 | 34 | ... and progressively migrate your codebase to Chancy without having to 35 | rewrite all of your tasks or alias the imports. 36 | 37 | ----- 38 | 39 | **Q.** Why can't I just do ``job.delay()``? 40 | 41 | **A.** Supporting ``job.delay()`` like Celery does would require global 42 | state to keep track of the currently active Chancy application. Chancy 43 | has **absolutely no global state** as a hard rule, which helps minimize 44 | the risk of bugs when dealing with many different types of concurrency 45 | models in a single application. 46 | 47 | 48 | ----- 49 | 50 | **Q.** Why not use advisory locks to make implementation of some features 51 | easier? 52 | 53 | **A.** Advisory locks don't support any form of namespacing - they're just 54 | a numeric ID. This makes them unsuitable for use in a multi-tenant system 55 | where you might have multiple different applications using the same database. 56 | Advisory locks also have several issues when used with pgbouncer that might 57 | stop us from officially supporting pgbouncer in the future. 58 | 59 | ----- 60 | 61 | **Q.** Why does Chancy not support other databases or message brokers? 62 | 63 | **A.** Chancy is intended to be highly customizable and reliant on the 64 | implementation of features through plugins. Requiring plugins to support 65 | multiple databases or message brokers would make them significantly more 66 | complex or force targeting a lowest common denominator. We'd rather get 67 | have everything we can out of Postgres and have a very narrow focus done 68 | well than to have a broad focus done poorly. 69 | 70 | Celery supports numerous backends and may be a better option of you're 71 | looking for portability, but this comes at the cost of complexity and 72 | limited features - no locking, rate limiting, workflows, etc. -------------------------------------------------------------------------------- /docs/howto/celery.rst: -------------------------------------------------------------------------------- 1 | Compare to Celery 2 | ================= 3 | 4 | `Celery`_ is the defacto standard for background task processing in Python, 5 | most commonly using RabbitMQ or Redis as a broker. It is a mature project with a 6 | large user base and a lot of documentation available. 7 | 8 | Celery is a good choice for many use cases, but it is not without its drawbacks, 9 | and Chancy was written specifically to address some of those drawbacks. 10 | 11 | Future Scheduling 12 | ----------------- 13 | 14 | When Celery schedules a task to run in the future using `eta` or `countdown`, 15 | it's pulled into the memory of a currently running worker and the worker's own 16 | scheduler is responsible for executing the task at the right time. 17 | 18 | This is fine if you only have a couple of tasks but can cause severe memory 19 | issues if you have a lot of tasks scheduled to run in the future. Too many 20 | jobs (~65k) also disables QoS for the worker, which can cause other issues. 21 | 22 | Chancy doesn't depend on the worker to schedule jobs in the future. It simply 23 | stores the timestamp it should execute at when it saves the job into the 24 | database and every worker is given a chance to pick up the job when it's time 25 | to run. 26 | 27 | Rate Limiting 28 | ------------- 29 | 30 | Celery has only very basic rate limiting support, which makes it almost 31 | impossible to properly do basic tasks like fetching from an API with a rate 32 | limit. 33 | 34 | Chancy has built-in support for globally rate limiting a queue. 35 | 36 | 37 | Asyncio-first (but everything else too) 38 | --------------------------------------- 39 | 40 | Celery predates the existence of asyncio, and to this day it doesn't have 41 | any support for running asyncio workers nor for executing asyncio tasks. 42 | When your tasks are I/O bound, this can be a significant drawback, such as when 43 | calling external APIs. 44 | 45 | Chancy is built on top of asyncio and can offer drastically improved resource 46 | utilization for I/O bound tasks using its 47 | :class:`~chancy.executors.asyncex.AsyncExecutor`. 48 | 49 | Introspection 50 | ------------- 51 | 52 | Things go wrong - it's inevitable. When they do, you need to be able to 53 | introspect the state of your workers and queues to figure out what's going on. 54 | Unfortunately, this is often extremely difficult with Celery. 55 | 56 | Since Chancy is just using your existing Postgres database, you can simply 57 | query the database with plain SQL to figure out what's going on. Want to see 58 | how many of each type of job is in the queue? Just run a query. 59 | 60 | .. code-block:: sql 61 | 62 | SELECT func, COUNT(*) FROM chancy_jobs GROUP BY func; 63 | 64 | 65 | Chancy also comes with a built-in :class:`~chancy.plugins.api.Api` plugin that 66 | provides a dashboard for monitoring the state of your workers, queues, 67 | workflows, and cron jobs. No extra setup or services required. 68 | 69 | 70 | Mixed-mode Workers 71 | ------------------ 72 | 73 | Celery has a number of different worker pool implementations, like processes, 74 | gevent, eventlet and threads. However, a single worker process can only *use* 75 | one of these pools at a time. 76 | 77 | In Chancy, every queue can specify its own pool (which we call Executors), 78 | allowing a single worker to mix-and-match pools to optimize for different 79 | types of jobs without having to run multiple workers. One worker can have 80 | thousands of parallel asyncio tasks pulling reports from an external API 81 | while using another core to generate PDFs. 82 | 83 | 84 | .. _Celery: https://docs.celeryproject.org/en/stable/ -------------------------------------------------------------------------------- /docs/howto/context.rst: -------------------------------------------------------------------------------- 1 | Get The Job Context 2 | =================== 3 | 4 | Sometimes you need access to details about the job that is currently running. For 5 | example you might want to know the job's ID to log it, or the number of times the 6 | job has been retried. Getting the job context is easy: 7 | 8 | .. code-block:: python 9 | 10 | from chancy import QueuedJob, job 11 | 12 | @job() 13 | def my_job(*, context: QueuedJob): 14 | print(f"Job ID: {context.id}") 15 | print(f"Job attempts: {context.attempts}") 16 | 17 | 18 | That's it! When Chancy runs a job, it checks to see if the type signature for that 19 | job function includes a :class:`chancy.job.QueuedJob` and assumes you want the 20 | context for the job. 21 | 22 | .. tip:: 23 | 24 | The name of the argument doesn't matter, as long as the type is correct. For 25 | example, you could name the argument ``job_context`` instead of ``context``. 26 | 27 | The job context is immutable, *except* for the ``meta`` attribute, which you can 28 | use to store arbitrary data about the job: 29 | 30 | .. code-block:: python 31 | 32 | from chancy import QueuedJob, job 33 | 34 | @job() 35 | def my_job(*, context: QueuedJob): 36 | # This will raise an exception because the job context is 37 | # generally immutable. 38 | context.id = "new_id" 39 | # This will work because the meta attribute is mutable. 40 | context.meta["attempts"] = context.meta.get("attempts", 0) + 1 41 | 42 | 43 | -------------------------------------------------------------------------------- /docs/howto/django.rst: -------------------------------------------------------------------------------- 1 | Use with Django 2 | =============== 3 | 4 | Chancy works with both synchronous and asynchronous Django applications. You 5 | shouldn't need to make any changes to your existing code. 6 | 7 | Install chancy: 8 | 9 | .. code-block:: bash 10 | 11 | $ pip install chancy[cli,django] 12 | 13 | 14 | Using Django models and features in Chancy 15 | ------------------------------------------ 16 | 17 | Next to your Django ``settings.py`` module, create a new file called 18 | ``worker.py``. This file will contain the code that defines your chancy 19 | app: 20 | 21 | .. code-block:: python 22 | 23 | import os 24 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_application.settings") 25 | import django 26 | django.setup() 27 | from django.conf import settings 28 | 29 | from chancy import Chancy 30 | 31 | chancy_app = Chancy(settings.DATABASES["default"]) 32 | 33 | 34 | And then use the CLI to migrate the database and start a worker process: 35 | 36 | .. code-block:: bash 37 | 38 | chancy --app my_application.worker.chancy_app misc migrate 39 | chancy --app my_application.worker.chancy_app worker start 40 | 41 | 42 | This ensures django is fully setup before processing any jobs. 43 | 44 | 45 | Using Chancy from the django ORM and Admin 46 | ------------------------------------------ 47 | 48 | .. note:: 49 | 50 | This feature is new in version 0.22.0 and seeking feedback on desired 51 | functionality and use cases. 52 | 53 | Chancy can be used from the Django ORM and Admin interface. To do this, you 54 | need to add the following to your Django settings: 55 | 56 | .. code-block:: python 57 | 58 | INSTALLED_APPS = [ 59 | ... 60 | "chancy.contrib.django", 61 | ] 62 | 63 | 64 | This gives you access to the Jobs, Queues, and Workers models. Some plugins, 65 | like the Cron and Workflow plugins, also provide their own django extensions: 66 | 67 | .. code-block:: python 68 | 69 | INSTALLED_APPS = [ 70 | ... 71 | "chancy.contrib.django", 72 | "chancy.plugins.cron.django", 73 | "chancy.plugins.workflow.django", 74 | ] 75 | 76 | Now you can create new cron jobs in the admin, query the status of your jobs, 77 | workers and workflows from the comfort of the Django ORM. 78 | 79 | .. code-block:: python 80 | 81 | from chancy.contrib.django.models import Job 82 | 83 | j = await chancy.push(test_job) 84 | 85 | orm_job = await Job.objects.aget(id=j.identifier) 86 | 87 | 88 | .. important:: 89 | 90 | The current implementation assumes that the chancy tables live in the same 91 | database as your Django "default" database. 92 | 93 | 94 | Login to Chancy with Django's superusers 95 | ---------------------------------------- 96 | 97 | To use Django's authentication system with Chancy's API and dashboard, 98 | you can use the included ``DjangoAuthBackend`` authentication backend. This 99 | backend supports lets you login to chancy using the username and password 100 | of a Django superuser. 101 | 102 | .. code-block:: python 103 | 104 | import os 105 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_application.settings") 106 | import django 107 | django.setup() 108 | from django.conf import settings 109 | from chancy.contrib.django.auth import DjangoAuthBackend 110 | 111 | app = Chancy( 112 | dsn=settings.DATABASES["default"], 113 | plugins=[ 114 | Api( 115 | authentication_backend=DjangoAuthBackend(), 116 | secret_key=settings.SECRET_KEY, 117 | ), 118 | ], 119 | ) -------------------------------------------------------------------------------- /docs/howto/fastapi.rst: -------------------------------------------------------------------------------- 1 | Use with Fastapi 2 | ================ 3 | 4 | While you can keep your Chancy worker separate from your FastAPI application, 5 | you can also run your Chancy worker in the same process as your FastAPI 6 | application by using FastAPI's lifespan events: 7 | 8 | .. code-block:: python 9 | 10 | import asyncio 11 | import contextlib 12 | from typing import AsyncIterator 13 | 14 | from fastapi import FastAPI 15 | from chancy import Chancy, Worker, Queue, job 16 | 17 | chancy = Chancy("postgresql://localhost/postgres") 18 | 19 | @contextlib.asynccontextmanager 20 | async def lifespan(app: FastAPI) -> AsyncIterator[None]: 21 | """ 22 | FastAPI lifespan handler that starts and stops the Chancy worker. 23 | This ensures the worker starts when FastAPI starts and shuts down properly. 24 | """ 25 | # Run the database migrations (don't do this in production) 26 | await chancy.migrate() 27 | 28 | # Declare any queues we need. 29 | await chancy.declare(Queue("default")) 30 | 31 | # Start the worker in the background and return control to FastAPI 32 | async with Worker(chancy) as worker: 33 | yield 34 | 35 | 36 | app = FastAPI(lifespan=lifespan) 37 | 38 | 39 | @job(queue="default") 40 | async def send_an_email(): 41 | print("Sending an email") 42 | 43 | 44 | @app.get("/") 45 | async def read_root(): 46 | await chancy.push(send_an_email) 47 | return {"Hello": "World"} 48 | 49 | This can be useful for small applications and simple deployments 50 | (like containers meant for UnRAID) where you don't want to manage multiple 51 | processes or containers. However, for larger applications, it's recommended 52 | to keep your worker separate from your FastAPI application. -------------------------------------------------------------------------------- /docs/howto/index.rst: -------------------------------------------------------------------------------- 1 | How To... 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | jobs.rst 8 | celery.rst 9 | django.rst 10 | fastapi.rst 11 | retry.rst 12 | context.rst 13 | log.rst -------------------------------------------------------------------------------- /docs/howto/jobs.rst: -------------------------------------------------------------------------------- 1 | Make Jobs 2 | ========= 3 | 4 | Jobs are the core of Chancy. They are the functions that are run by your 5 | workers. 6 | 7 | Creating a Job 8 | -------------- 9 | 10 | Use the :func:`~chancy.job.job` decorator to create a job: 11 | 12 | .. code-block:: python 13 | 14 | from chancy import job 15 | 16 | @job() 17 | def greet(): 18 | print(f"Hello world!") 19 | 20 | You can still call this function normally: 21 | 22 | .. code-block:: python 23 | 24 | >>> greet() 25 | Hello world! 26 | 27 | You can also specify the defaults for a job: 28 | 29 | .. code-block:: python 30 | 31 | from chancy import job 32 | 33 | @job(queue="default", priority=1, max_attempts=3, kwargs={"name": "World"}) 34 | def greet(*, name: str): 35 | print(f"Hello, {name}!") 36 | 37 | Jobs are immutable once created - use the `with_` methods on a Job to create 38 | a new job with modified properties: 39 | 40 | .. code-block:: python 41 | 42 | @job(queue="default", priority=1, max_attempts=3, kwargs={"name": "World"}) 43 | def greet(*, name: str): 44 | print(f"Hello, {name}!") 45 | 46 | async with Chancy("postgresql://localhost/postgres") as chancy: 47 | await chancy.push(greet.job.with_kwargs(name="Alice")) 48 | 49 | 50 | Queue a Job 51 | ----------- 52 | 53 | Once you've created a job, push it to the queue: 54 | 55 | .. code-block:: python 56 | 57 | async with Chancy("postgresql://localhost/postgres") as chancy: 58 | await chancy.push(greet) 59 | 60 | Queue multiple jobs at once: 61 | 62 | .. code-block:: python 63 | 64 | await chancy.push_many([job1, job2, job3]) 65 | 66 | Push returns a :class:`~chancy.job.Reference` object that can be used to 67 | retrieve the job instance later, or wait for it to complete: 68 | 69 | .. code-block:: python 70 | 71 | reference = await chancy.push(greet) 72 | finished_job = await chancy.wait_for_job(reference) 73 | assert finished_job.state == finished_job.State.SUCCEEDED 74 | 75 | Priority 76 | -------- 77 | 78 | Priority determines the order of execution. The higher the priority, the 79 | sooner the job will be executed: 80 | 81 | .. code-block:: python 82 | 83 | higher_priority_job = greet.job.with_priority(10) 84 | lower_priority_job = greet.job.with_priority(-10) 85 | 86 | Retry Attempts 87 | -------------- 88 | 89 | Specify how many times a job should be retried if it fails: 90 | 91 | .. code-block:: python 92 | 93 | greet.job.with_max_attempts(3) 94 | 95 | Scheduled Execution 96 | ------------------- 97 | 98 | Schedule a job to run some time in the future: 99 | 100 | .. code-block:: python 101 | 102 | from datetime import datetime, timedelta, timezone 103 | 104 | future_job = greet.job.with_scheduled_at( 105 | datetime.now(timezone.utc) + timedelta(hours=1) 106 | ) 107 | 108 | .. note:: 109 | 110 | Scheduled jobs are guaranteed to run *at* or *after* the scheduled time, 111 | but not *exactly* at that time. 112 | 113 | .. tip:: 114 | 115 | If you need recurring jobs, take a look at the 116 | :class:`~chancy.plugins.cron.Cron` plugin. 117 | 118 | Resource Limits 119 | --------------- 120 | 121 | Set memory and time limits for job execution: 122 | 123 | .. code-block:: python 124 | 125 | from chancy import Limit, job 126 | 127 | @job(limits=[ 128 | Limit(Limit.Type.MEMORY, 1024 * 1024 * 1024), 129 | Limit(Limit.Type.TIME, 60), 130 | ]) 131 | def greet(*, name: str): 132 | print(f"Hello, {name}!") 133 | 134 | Not all executors will support all types of limits. For example only 135 | the default :class:`~chancy.executors.process.ProcessExecutor` supports 136 | memory limits. 137 | 138 | Unique Jobs 139 | ----------- 140 | 141 | Prevent duplicate job execution by assigning a unique key: 142 | 143 | .. code-block:: python 144 | 145 | from chancy import job 146 | 147 | @job() 148 | def greet(*, name: str): 149 | print(f"Hello, {name}!") 150 | 151 | async with Chancy("postgresql://localhost/postgres") as chancy: 152 | await chancy.push(greet.job.with_unique_key("greet_alice").with_kwargs(name="Alice")) 153 | 154 | 155 | .. note:: 156 | 157 | Unique jobs ensure only one job with the same ``unique_key`` is 158 | queued or running at a time, but any number can be completed or 159 | failed. 160 | -------------------------------------------------------------------------------- /docs/howto/log.rst: -------------------------------------------------------------------------------- 1 | Customize Logging 2 | ================= 3 | 4 | Chancy will setup a default logger if you don't provide one which will log 5 | ``INFO`` and above to the console. If you want to customize the logging, such 6 | as to use an existing application logger, you can pass a logger to the 7 | :class:`~chancy.app.Chancy` constructor: 8 | 9 | .. code-block:: python 10 | 11 | import logging 12 | from chancy import Chancy 13 | 14 | logger = logging.getLogger("my_application") 15 | 16 | chancy_app = Chancy( 17 | settings.my_database_dsn, 18 | log=logger, 19 | ) 20 | 21 | If you want to modify the logger after it's been created, you can get it from 22 | the app: 23 | 24 | .. code-block:: python 25 | 26 | import logging 27 | 28 | chancy_app = Chancy(settings.my_database_dsn) 29 | chancy_app.log.setLevel(logging.DEBUG) 30 | 31 | .. tip:: 32 | 33 | Setting logging to ``DEBUG`` can be pretty useful when developing locally, 34 | since it'll give you stack traces from failed jobs in the console. Just 35 | remember to turn it back down to ``INFO`` or ``WARNING`` in production since 36 | you may otherwise accidentally log sensitive information. 37 | 38 | Or you can get it at any time using the normal global logging functions: 39 | 40 | .. code-block:: python 41 | 42 | import logging 43 | 44 | logger = logging.getLogger("chancy") 45 | logger.setLevel(logging.DEBUG) 46 | logger.addHandler(logging.StreamHandler()) 47 | -------------------------------------------------------------------------------- /docs/howto/retry.rst: -------------------------------------------------------------------------------- 1 | Use Advanced Retries 2 | ==================== 3 | 4 | When you create a Job, you can specify a ``max_attempts`` argument to control how 5 | many times the job will be retried when an exception occurs: 6 | 7 | .. code-block:: python 8 | 9 | from chancy import job 10 | 11 | @job(max_attempts=3) 12 | def my_job(): 13 | raise ValueError("This job should fail.") 14 | 15 | This is very simplistic, and sometimes you need more control over how retries are 16 | handled in your application. For example, you might want to retry a job only if a 17 | specific exception is raised, or ensure that a random jitter is applied to the 18 | delay between retries to prevent the thundering herd problem. 19 | 20 | Chancy comes with a :class:`~chancy.plugins.retry.RetryPlugin` plugin that supports 21 | backoff, jitter, exponential backoff, and more: 22 | 23 | .. code-block:: python 24 | 25 | from chancy import job 26 | from chancy.plugins.retry import RetryPlugin 27 | 28 | @job() 29 | def job_that_fails(): 30 | raise ValueError("This job should fail.") 31 | 32 | async with Chancy(..., plugins=[RetryPlugin()]) as chancy: 33 | await chancy.declare(Queue("default")) 34 | await chancy.push( 35 | job_that_fails.job.with_max_attempts(3).with_meta({ 36 | "retry_settings": { 37 | "backoff": 2, 38 | "backoff_factor": 3, 39 | "backoff_limit": 300, 40 | "backoff_jitter": [1, 5], 41 | } 42 | }) 43 | ) 44 | 45 | The ``RetryPlugin`` is very simple, being about 60 lines. You can easily use it as 46 | the basis for your own complex retry strategies. -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/similar.rst: -------------------------------------------------------------------------------- 1 | Similar Projects 2 | ---------------- 3 | 4 | With the addition of modern Postgres features like ``LISTEN/NOTIFY`` and 5 | ``SELECT FOR UPDATE...SKIP LOCKED``, postgres-backed task queues have 6 | become a viable alternative to other task queues built on RabbitMQ or 7 | redis like celery_. As such the space is exploding with new projects. 8 | Here are some of the most popular ones if Chancy doesn't fit your 9 | needs: 10 | 11 | .. list-table:: 12 | :header-rows: 1 13 | :widths: 20 20 60 14 | 15 | * - Project 16 | - Language 17 | - Note 18 | * - celery_ 19 | - Python 20 | - The defacto Python task queue 21 | * - procastinate_ 22 | - Python 23 | - 24 | * - oban_ 25 | - Elixir 26 | - Inspired many of the features in Chancy 27 | * - river_ 28 | - Go 29 | - 30 | * - neoq_ 31 | - Go 32 | - 33 | * - faktory_ 34 | - Go 35 | - 36 | * - pg-boss_ 37 | - Node.js 38 | - 39 | * - graphile_ 40 | - Node.js 41 | - 42 | * - Minion_ 43 | - Perl 44 | - 45 | 46 | .. _celery: https://docs.celeryproject.org/en/stable/ 47 | .. _oban: https://hexdocs.pm/oban/Oban.html 48 | .. _river: https://github.com/riverqueue/river 49 | .. _procastinate: https://procrastinate.readthedocs.io/ 50 | .. _graphile: https://worker.graphile.org/ 51 | .. _neoq: https://github.com/acaloiaro/neoq 52 | .. _faktory: https://github.com/contribsys/faktory 53 | .. _pg-boss: https://github.com/timgit/pg-boss 54 | .. _Minion: https://github.com/mojolicious/minion 55 | -------------------------------------------------------------------------------- /misc/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/logo.png -------------------------------------------------------------------------------- /misc/logo_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/logo_small.png -------------------------------------------------------------------------------- /misc/ux_job_failed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/ux_job_failed.png -------------------------------------------------------------------------------- /misc/ux_jobs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/ux_jobs.png -------------------------------------------------------------------------------- /misc/ux_queue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/ux_queue.png -------------------------------------------------------------------------------- /misc/ux_worker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/ux_worker.png -------------------------------------------------------------------------------- /misc/ux_workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TkTech/chancy/e4d80ea6eea3e0f1ddd5fb0df09cbbc60fd924e0/misc/ux_workflow.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "chancy" 3 | version = "0.24.1" 4 | description = "A simple and flexible job queue for Python" 5 | readme = "README.md" 6 | requires-python = ">=3.11" 7 | authors = [ 8 | { name = "Tyler Kennedy", email = "tk@tkte.ch" } 9 | ] 10 | dependencies = [ 11 | "psycopg[pool]>=3.2.1", 12 | ] 13 | classifiers = [ 14 | "Development Status :: 4 - Beta", 15 | "Intended Audience :: Developers", 16 | "License :: OSI Approved :: MIT License", 17 | 18 | "Programming Language :: Python :: 3", 19 | "Programming Language :: Python :: 3.11", 20 | "Programming Language :: Python :: 3.12", 21 | "Programming Language :: Python :: 3.13", 22 | ] 23 | 24 | [project.urls] 25 | Repository = "https://github.com/tktech/chancy" 26 | Documentation = "https://tkte.ch/chancy/" 27 | 28 | [project.optional-dependencies] 29 | cron = ["croniter"] 30 | web = [ 31 | "starlette", 32 | "uvicorn[standard]", 33 | "itsdangerous", 34 | ] 35 | cli = [ 36 | "click" 37 | ] 38 | sub = [ 39 | "interpreters-pep-734>=0.4.1; python_version >= '3.13'", 40 | ] 41 | django = [ 42 | "django>=4.0.0", 43 | ] 44 | 45 | [project.scripts] 46 | chancy = "chancy.cli.cli:main" 47 | 48 | [tool.uv] 49 | dev-dependencies = [ 50 | "pytest-asyncio>=0.24.0", 51 | "pytest>=8.2.0", 52 | "sphinx>=7.3.7", 53 | "sphinx-inline-tabs>=2023.4.21", 54 | "furo>=2024.5.6", 55 | "pytest-cov>=5.0.0", 56 | "sentry-sdk>=2.3.1", 57 | "ghp-import>=2.1.0", 58 | "pytest-benchmark>=5.1.0", 59 | "pytest-django>=4.8.0", 60 | ] 61 | 62 | [tool.hatch.build.targets.wheel] 63 | artifacts = [ 64 | "chancy/plugins/api/dist/**" 65 | ] 66 | 67 | [tool.hatch.build.targets.sdist] 68 | artifacts = [ 69 | "chancy/plugins/api/dist/**" 70 | ] 71 | 72 | [build-system] 73 | requires = ["hatchling"] 74 | build-backend = "hatchling.build" 75 | 76 | [tool.pytest.ini_options] 77 | asyncio_default_fixture_loop_scope = "function" 78 | DJANGO_SETTINGS_MODULE = "tests.contrib.django.settings" 79 | django_find_project = false 80 | 81 | [tool.ruff] 82 | line-length = 80 83 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import AsyncIterator 3 | 4 | import pytest 5 | import pytest_asyncio 6 | import sys 7 | 8 | from chancy import Chancy, Worker 9 | 10 | 11 | @pytest.fixture(scope="session") 12 | def event_loop_policy(): 13 | # Since psycopg's asyncio implementation cannot use the default 14 | # proactor event loop on Windows, we need to use the selector event loop. 15 | if sys.platform == "win32": 16 | return asyncio.WindowsSelectorEventLoopPolicy() 17 | return asyncio.DefaultEventLoopPolicy() 18 | 19 | 20 | @pytest_asyncio.fixture() 21 | async def chancy(request): 22 | """ 23 | Provides a Chancy application instance with an open connection pool 24 | to the test database. 25 | """ 26 | async with Chancy( 27 | "postgresql://postgres:localtest@localhost:8190/postgres", 28 | **getattr(request, "param", {}), 29 | ) as chancy: 30 | await chancy.migrate() 31 | yield chancy 32 | await chancy.migrate(to_version=0) 33 | 34 | 35 | @pytest.fixture 36 | def chancy_just_app(): 37 | """ 38 | Provides just a configured chancy instance with no open connection pool 39 | or migrations. 40 | """ 41 | return Chancy( 42 | "postgresql://postgres:localtest@localhost:8190/postgres", 43 | ) 44 | 45 | 46 | @pytest_asyncio.fixture() 47 | async def worker(request, chancy) -> AsyncIterator[Worker]: 48 | """ 49 | Starts and returns a Worker and the task associated with it. 50 | 51 | If the worker is not stopped by the time the test completes, it will be 52 | cancelled. 53 | """ 54 | async with Worker( 55 | chancy, shutdown_timeout=60, **getattr(request, "param", {}) 56 | ) as worker: 57 | yield worker 58 | 59 | 60 | @pytest_asyncio.fixture() 61 | async def worker_no_start(chancy) -> Worker: 62 | """ 63 | Returns a Worker instance that has not been started. 64 | """ 65 | return Worker(chancy) 66 | 67 | 68 | @pytest.fixture( 69 | params=( 70 | [Chancy.Executor.Process, Chancy.Executor.Threaded] 71 | + ( 72 | [Chancy.Executor.SubInterpreter] 73 | if sys.version_info >= (3, 13) 74 | else [] 75 | ) 76 | ) 77 | ) 78 | def sync_executor(request): 79 | """ 80 | Provides a parameterized fixture for all sync executors. 81 | """ 82 | return request.param 83 | 84 | 85 | @pytest.fixture(params=[Chancy.Executor.Async]) 86 | def async_executor(request): 87 | """ 88 | Provides a parameterized fixture for all async executors. 89 | """ 90 | return request.param 91 | -------------------------------------------------------------------------------- /tests/contrib/django/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.conf import settings 3 | 4 | 5 | @pytest.fixture(scope="session") 6 | def django_db_setup(): 7 | settings.DATABASES["default"] = { 8 | "ENGINE": "django.db.backends.postgresql", 9 | "NAME": "postgres", 10 | "USER": "postgres", 11 | "PASSWORD": "localtest", 12 | "HOST": "localhost", 13 | "PORT": "8190", 14 | } 15 | -------------------------------------------------------------------------------- /tests/contrib/django/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django test settings for chancy.contrib.django tests. 3 | """ 4 | 5 | INSTALLED_APPS = [ 6 | "chancy.contrib.django.apps.ChancyConfig", 7 | ] 8 | -------------------------------------------------------------------------------- /tests/contrib/django/test_connection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from chancy import Chancy 4 | 5 | 6 | @pytest.mark.django_db 7 | @pytest.mark.asyncio 8 | async def test_django_style_connection(settings): 9 | async with Chancy(settings.DATABASES["default"]) as app: 10 | await app.migrate() 11 | await app.migrate(to_version=0) 12 | -------------------------------------------------------------------------------- /tests/contrib/django/test_models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the Django models integration. 3 | """ 4 | 5 | import pytest 6 | 7 | from chancy import job 8 | 9 | 10 | @job() 11 | def test_job(): 12 | pass 13 | 14 | 15 | @pytest.mark.django_db 16 | @pytest.mark.asyncio 17 | async def test_chancy_job_django_query(chancy, worker): 18 | """ 19 | Test that a job created in Chancy can be queried using the Django ORM. 20 | """ 21 | from chancy.contrib.django.models import Job 22 | 23 | j = await chancy.push(test_job) 24 | 25 | orm_job = await Job.objects.aget(id=j.identifier) 26 | 27 | assert orm_job.id == j.identifier 28 | assert orm_job.queue == "default" 29 | assert orm_job.func.endswith("test_job") 30 | 31 | 32 | @pytest.mark.django_db 33 | @pytest.mark.asyncio 34 | async def test_chancy_worker_django_query(chancy, worker): 35 | """ 36 | Test that we can query the Worker table. 37 | """ 38 | from chancy.contrib.django.models import Worker 39 | 40 | orm_worker = await Worker.objects.aget(worker_id=worker.worker_id) 41 | 42 | assert orm_worker.worker_id == worker.worker_id 43 | assert len(orm_worker.tags) > 1 and "*" in orm_worker.tags 44 | 45 | 46 | @pytest.mark.django_db 47 | @pytest.mark.asyncio 48 | async def test_chancy_queue_django_query(chancy, worker): 49 | """ 50 | Test that we can query the Queue table. 51 | """ 52 | from chancy.contrib.django.models import Queue 53 | 54 | orm_queue = await Queue.objects.aget(name="default") 55 | 56 | assert orm_queue.name == "default" 57 | assert orm_queue.state == "active" 58 | assert orm_queue.concurrency is None 59 | assert len(orm_queue.tags) == 1 60 | -------------------------------------------------------------------------------- /tests/plugins/test_leadership.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from chancy.app import Chancy 6 | from chancy.worker import Worker 7 | from chancy.plugins.leadership import ImmediateLeadership, Leadership 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "chancy", 12 | [ 13 | { 14 | "plugins": [ 15 | ImmediateLeadership(), 16 | ], 17 | "no_default_plugins": True, 18 | } 19 | ], 20 | indirect=True, 21 | ) 22 | @pytest.mark.asyncio 23 | async def test_immediate_leadership(chancy: Chancy, worker: Worker): 24 | """ 25 | Ensures that the immediate leadership plugin works as expected. 26 | """ 27 | assert worker.is_leader.is_set() 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "chancy", 32 | [ 33 | { 34 | "plugins": [ 35 | Leadership(poll_interval=5), 36 | ], 37 | "no_default_plugins": True, 38 | } 39 | ], 40 | indirect=True, 41 | ) 42 | @pytest.mark.asyncio 43 | async def test_leadership(chancy: Chancy, worker: Worker): 44 | """ 45 | Ensures that the leadership plugin works as expected. 46 | """ 47 | await asyncio.sleep(10) 48 | assert worker.is_leader.is_set() 49 | 50 | 51 | @pytest.mark.parametrize( 52 | "chancy", 53 | [ 54 | { 55 | "plugins": [ 56 | Leadership(poll_interval=5, timeout=10), 57 | ], 58 | "no_default_plugins": True, 59 | } 60 | ], 61 | indirect=True, 62 | ) 63 | @pytest.mark.asyncio 64 | async def test_leadership_many_workers(chancy: Chancy): 65 | """ 66 | Ensures that the leadership plugin works as expected with several workers. 67 | """ 68 | workers = [Worker(chancy) for _ in range(10)] 69 | for worker in workers: 70 | await worker.start() 71 | 72 | await asyncio.sleep(15) 73 | leaders = [worker for worker in workers if worker.is_leader.is_set()] 74 | assert len(leaders) == 1 75 | 76 | for worker in workers: 77 | await worker.stop() 78 | 79 | 80 | @pytest.mark.parametrize( 81 | "chancy", 82 | [ 83 | { 84 | "plugins": [ 85 | Leadership(poll_interval=5, timeout=10), 86 | ], 87 | "no_default_plugins": True, 88 | } 89 | ], 90 | indirect=True, 91 | ) 92 | @pytest.mark.asyncio 93 | async def test_leadership_transition(chancy: Chancy): 94 | """ 95 | Ensures that leadership transitions work as expected. 96 | """ 97 | worker1 = Worker(chancy, worker_id="1") 98 | worker2 = Worker(chancy, worker_id="2") 99 | 100 | await worker1.start() 101 | await worker2.start() 102 | 103 | await asyncio.sleep(15) 104 | 105 | # Only one worker should have become the leader. 106 | assert worker1.is_leader.is_set() != worker2.is_leader.is_set() 107 | 108 | # Who got the leadership. 109 | who_leads = worker1 if worker1.is_leader.is_set() else worker2 110 | who_follows = worker2 if worker1.is_leader.is_set() else worker1 111 | 112 | await who_leads.stop() 113 | 114 | await asyncio.sleep(15) 115 | 116 | assert who_follows.is_leader.is_set() 117 | await who_follows.stop() 118 | -------------------------------------------------------------------------------- /tests/plugins/test_pruner.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | from psycopg.rows import dict_row 5 | 6 | from chancy import Chancy, Queue, Worker, job 7 | from chancy.plugins.pruner import Pruner 8 | from chancy.plugins.leadership import ImmediateLeadership 9 | 10 | 11 | @job() 12 | def job_to_run(): 13 | pass 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "chancy", 18 | [ 19 | { 20 | "plugins": [ 21 | ImmediateLeadership(), 22 | ], 23 | "no_default_plugins": True, 24 | } 25 | ], 26 | indirect=True, 27 | ) 28 | @pytest.mark.asyncio 29 | async def test_pruner_functionality(chancy: Chancy, worker: Worker): 30 | """ 31 | This test manually calls the prune method to avoid timing issues. 32 | """ 33 | p = Pruner(Pruner.Rules.Queue() == "test_queue") 34 | await chancy.declare(Queue("test_queue")) 35 | 36 | ref = await chancy.push(job_to_run.job.with_queue("test_queue")) 37 | initial_job = await chancy.wait_for_job(ref) 38 | assert initial_job is not None, "Job should exist before pruning" 39 | 40 | async with chancy.pool.connection() as conn: 41 | async with conn.cursor(row_factory=dict_row) as cursor: 42 | await p.prune(chancy, cursor) 43 | 44 | pruned_job = await chancy.get_job(ref) 45 | assert pruned_job is None, "Job should be pruned" 46 | 47 | p = Pruner( 48 | (Pruner.Rules.Queue() == "test_queue") & (Pruner.Rules.Age() > 10) 49 | ) 50 | ref = await chancy.push(job_to_run.job.with_queue("test_queue")) 51 | initial_job = await chancy.wait_for_job(ref) 52 | assert initial_job is not None, "Job should exist before pruning" 53 | 54 | async with chancy.pool.connection() as conn: 55 | async with conn.cursor(row_factory=dict_row) as cursor: 56 | await p.prune(chancy, cursor) 57 | 58 | not_pruned_job = await chancy.get_job(ref) 59 | assert not_pruned_job is not None, "Job should not be pruned yet" 60 | 61 | await asyncio.sleep(10) 62 | 63 | async with chancy.pool.connection() as conn: 64 | async with conn.cursor(row_factory=dict_row) as cursor: 65 | await p.prune(chancy, cursor) 66 | 67 | pruned_job = await chancy.get_job(ref) 68 | assert pruned_job is None, "Job should be pruned" 69 | -------------------------------------------------------------------------------- /tests/plugins/test_reprioritization.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | 4 | import pytest 5 | 6 | from chancy import Job, Queue, Chancy, Worker 7 | from chancy.plugins.leadership import ImmediateLeadership 8 | from chancy.plugins.reprioritize import Reprioritize 9 | from chancy.rule import JobRules 10 | 11 | 12 | def simple_job(): 13 | pass 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "chancy", 18 | [ 19 | { 20 | "plugins": [ 21 | ImmediateLeadership(), 22 | Reprioritize( 23 | JobRules.Age() > 0, 24 | check_interval=1, 25 | priority_increase=5, 26 | ), 27 | ], 28 | "no_default_plugins": True, 29 | } 30 | ], 31 | indirect=True, 32 | ) 33 | @pytest.mark.asyncio 34 | async def test_basic_reprioritization(chancy: Chancy, worker: Worker): 35 | """ 36 | Tests that basic reprioritization works as expected. 37 | """ 38 | await chancy.declare(Queue("default")) 39 | 40 | ref = await chancy.push( 41 | Job.from_func(simple_job).with_scheduled_at( 42 | datetime.datetime.now(tz=datetime.timezone.utc) 43 | + datetime.timedelta(minutes=10) 44 | ) 45 | ) 46 | 47 | initial_job = await chancy.get_job(ref) 48 | initial_priority = initial_job.priority 49 | 50 | await asyncio.sleep(2) 51 | 52 | updated_job = await chancy.get_job(ref) 53 | 54 | # Priority should have increased (making it more important) 55 | assert updated_job.priority > initial_priority 56 | -------------------------------------------------------------------------------- /tests/plugins/test_retry.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pytest 4 | 5 | from chancy import Job, Queue, QueuedJob, Worker 6 | from chancy.plugins.retry import RetryPlugin 7 | 8 | 9 | def job_that_fails(): 10 | raise ValueError("This job should fail") 11 | 12 | 13 | def successful_job(): 14 | pass 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "chancy", 19 | [{"plugins": [RetryPlugin()], "no_default_plugins": True}], 20 | indirect=True, 21 | ) 22 | @pytest.mark.asyncio 23 | async def test_retry_with_default_settings(chancy, worker: Worker): 24 | """Test that jobs retry with default settings when no retry_settings in meta""" 25 | await chancy.declare(Queue("default")) 26 | 27 | ref = await chancy.push(Job.from_func(job_that_fails, max_attempts=3)) 28 | 29 | job = await chancy.wait_for_job(ref, timeout=30) 30 | assert job.state == QueuedJob.State.FAILED 31 | assert job.attempts == 3 32 | assert len(job.errors) == 3 33 | 34 | # Verify error structure 35 | for i, error in enumerate(job.errors): 36 | assert error["attempt"] == i 37 | assert "ValueError: This job should fail" in error["traceback"] 38 | 39 | 40 | @pytest.mark.parametrize( 41 | "chancy", 42 | [{"plugins": [RetryPlugin()], "no_default_plugins": True}], 43 | indirect=True, 44 | ) 45 | @pytest.mark.asyncio 46 | async def test_retry_with_custom_settings(chancy, worker: Worker): 47 | """Test that jobs retry with custom settings from meta""" 48 | await chancy.declare(Queue("default")) 49 | 50 | retry_settings = { 51 | "backoff": 2, 52 | "backoff_factor": 3, 53 | "backoff_limit": 10, 54 | "backoff_jitter": [0, 1], 55 | } 56 | 57 | starting_time = datetime.datetime.now(tz=datetime.timezone.utc) 58 | ref = await chancy.push( 59 | Job.from_func( 60 | job_that_fails, 61 | max_attempts=3, 62 | meta={"retry_settings": retry_settings}, 63 | ) 64 | ) 65 | 66 | job = await chancy.wait_for_job(ref, timeout=30) 67 | assert job.state == QueuedJob.State.FAILED 68 | assert job.attempts == 3 69 | assert len(job.errors) == 3 70 | assert job.meta["retry_settings"] == retry_settings 71 | 72 | # Very flaky test, but we can at least check that the backoff is roughly 73 | # correct. Depends heavily on polling frequency of the worker. 74 | delta: datetime.timedelta = job.scheduled_at - starting_time 75 | assert 2 * 3 <= int(delta.total_seconds()) <= 20 # 10 + default poll freq 76 | 77 | 78 | @pytest.mark.parametrize( 79 | "chancy", 80 | [{"plugins": [RetryPlugin()], "no_default_plugins": True}], 81 | indirect=True, 82 | ) 83 | @pytest.mark.asyncio 84 | async def test_no_retry_on_success(chancy, worker: Worker): 85 | """Test that successful jobs don't trigger retry logic""" 86 | await chancy.declare(Queue("default")) 87 | 88 | ref = await chancy.push( 89 | Job.from_func( 90 | successful_job, 91 | max_attempts=3, 92 | meta={"retry_settings": {"backoff": 1}}, 93 | ) 94 | ) 95 | 96 | job = await chancy.wait_for_job(ref, timeout=30) 97 | assert job.state == QueuedJob.State.SUCCEEDED 98 | assert job.attempts == 1 99 | assert len(job.errors) == 0 100 | 101 | 102 | @pytest.mark.parametrize( 103 | "chancy", 104 | [{"plugins": [RetryPlugin()], "no_default_plugins": True}], 105 | indirect=True, 106 | ) 107 | @pytest.mark.asyncio 108 | async def test_respect_max_attempts(chancy, worker: Worker): 109 | """Test that jobs don't retry beyond max_attempts""" 110 | await chancy.declare(Queue("default")) 111 | 112 | ref = await chancy.push( 113 | Job.from_func( 114 | job_that_fails, 115 | max_attempts=2, 116 | meta={ 117 | "retry_settings": { 118 | "backoff": 1, 119 | "backoff_factor": 2, 120 | "backoff_limit": 10, 121 | "backoff_jitter": [0, 1], 122 | } 123 | }, 124 | ) 125 | ) 126 | 127 | job = await chancy.wait_for_job(ref, timeout=30) 128 | assert job.state == QueuedJob.State.FAILED 129 | assert job.attempts == 2 130 | assert len(job.errors) == 2 131 | -------------------------------------------------------------------------------- /tests/test_explicit_pool.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from chancy import Chancy, QueuedJob, Job, Queue 4 | 5 | 6 | def job_to_run(): 7 | return 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_basic_job(chancy_just_app: Chancy): 12 | """ 13 | Simply test that we can push a job, and it runs successfully. 14 | """ 15 | await chancy_just_app.migrate() 16 | await chancy_just_app.declare(Queue("default")) 17 | ref = await chancy_just_app.push(Job.from_func(job_to_run)) 18 | job = await chancy_just_app.get_job(ref) 19 | assert job.state == QueuedJob.State.PENDING 20 | await chancy_just_app.migrate(to_version=0) 21 | await chancy_just_app.pool.close() 22 | -------------------------------------------------------------------------------- /tests/test_scale.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | import pytest 4 | from psycopg import sql 5 | from psycopg.rows import dict_row 6 | 7 | from chancy import Chancy, Job, Queue, Worker 8 | from chancy.utils import chancy_uuid, timed_block 9 | 10 | 11 | def dummy_job(): 12 | pass 13 | 14 | 15 | @pytest.mark.asyncio 16 | @pytest.mark.parametrize( 17 | "chancy", 18 | [ 19 | { 20 | "notifications": False, 21 | } 22 | ], 23 | indirect=True, 24 | ) 25 | async def test_busy_chancy(chancy: Chancy, worker_no_start: Worker): 26 | """ 27 | Tests the performance of the worker when the Chancy instance has 10 million 28 | completed jobs and 40,000 pending jobs. 29 | 30 | This is NOT testing optimal cases - it's here to catch egregious performance 31 | regressions. For example our first test checks job fetches are under 100ms, 32 | but it's typically sub 1ms in reality. 33 | """ 34 | queue = Queue("default", concurrency=10) 35 | await chancy.declare(queue) 36 | 37 | async with chancy.pool.connection() as conn: 38 | async with conn.cursor(row_factory=dict_row) as cursor: 39 | async with cursor.copy( 40 | sql.SQL( 41 | "COPY {jobs_table} (" 42 | " id," 43 | " queue," 44 | " state," 45 | " func," 46 | " kwargs," 47 | " meta," 48 | " created_at," 49 | " scheduled_at" 50 | ")" 51 | " FROM STDIN" 52 | ).format(jobs_table=sql.Identifier(f"{chancy.prefix}jobs")) 53 | ) as copy: 54 | for i in range(10_000_000): 55 | await copy.write_row( 56 | ( 57 | chancy_uuid(), 58 | "default", 59 | "succeeded", 60 | "dummy_job", 61 | "{}", 62 | "{}", 63 | datetime.now(timezone.utc), 64 | datetime.now(timezone.utc), 65 | ) 66 | ) 67 | 68 | j = Job.from_func(dummy_job) 69 | 70 | for i in range(40_000): 71 | await copy.write_row( 72 | ( 73 | chancy_uuid(), 74 | "default", 75 | "pending", 76 | j.func, 77 | "{}", 78 | "{}", 79 | datetime.now(timezone.utc), 80 | datetime.now(timezone.utc), 81 | ) 82 | ) 83 | 84 | await conn.commit() 85 | 86 | with timed_block() as timer: 87 | await worker_no_start.fetch_jobs(queue, conn, up_to=1) 88 | assert timer.elapsed < 0.1 89 | 90 | with timed_block() as timer: 91 | await worker_no_start.fetch_jobs(queue, conn, up_to=100) 92 | assert timer.elapsed < 0.1 93 | -------------------------------------------------------------------------------- /tests/test_worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | import asyncio 3 | 4 | import pytest 5 | 6 | from chancy import Worker, Chancy, Queue, QueuedJob, job 7 | from chancy.errors import MigrationsNeededError 8 | 9 | 10 | @job() 11 | def job_to_run(): 12 | return 13 | 14 | 15 | @job() 16 | def job_that_fails(): 17 | raise ValueError("This job should fail.") 18 | 19 | 20 | @job() 21 | def job_that_sleeps(): 22 | time.sleep(0.5) 23 | return 24 | 25 | 26 | @pytest.mark.asyncio 27 | async def test_queue_update(chancy: Chancy, worker: Worker): 28 | """ 29 | Test that updating a queue's configuration successfully reconfigures the 30 | executor. 31 | """ 32 | await chancy.declare( 33 | Queue( 34 | "test_update", 35 | concurrency=1, 36 | executor="chancy.executors.process.ProcessExecutor", 37 | polling_interval=5, 38 | ), 39 | upsert=True, 40 | ) 41 | 42 | ref1 = await chancy.push(job_that_sleeps.job.with_queue("test_update")) 43 | 44 | await asyncio.sleep(0.5) 45 | 46 | await chancy.declare( 47 | Queue( 48 | "test_update", 49 | concurrency=2, 50 | executor="chancy.executors.thread.ThreadedExecutor", 51 | polling_interval=2, 52 | ), 53 | upsert=True, 54 | ) 55 | 56 | # The first job should still complete successfully 57 | job1 = await chancy.wait_for_job(ref1, timeout=10) 58 | assert job1.state == QueuedJob.State.SUCCEEDED 59 | 60 | # Push another job to the queue with the new configuration 61 | ref2 = await chancy.push(job_to_run.job.with_queue("test_update")) 62 | job2 = await chancy.wait_for_job(ref2, timeout=10) 63 | assert job2.state == QueuedJob.State.SUCCEEDED 64 | 65 | # Check the executor has been updated in the worker 66 | assert ( 67 | worker.executors["test_update"].__class__.__name__ == "ThreadedExecutor" 68 | ) 69 | 70 | 71 | @pytest.mark.asyncio 72 | async def test_queue_removal(chancy: Chancy, worker: Worker): 73 | """ 74 | Test that removing a queue properly cleans up its executor. 75 | """ 76 | await chancy.declare( 77 | Queue( 78 | "test_removal", 79 | concurrency=1, 80 | polling_interval=1, 81 | ), 82 | upsert=True, 83 | ) 84 | 85 | # Push a job and let it complete 86 | ref = await chancy.push(job_to_run.job.with_queue("test_removal")) 87 | j = await chancy.wait_for_job(ref, timeout=30) 88 | assert j.state == QueuedJob.State.SUCCEEDED 89 | 90 | # Verify the executor exists 91 | assert "test_removal" in worker.executors 92 | 93 | await chancy.delete_queue("test_removal", purge_jobs=True) 94 | await worker.hub.wait_for("worker.queue.removed", timeout=30) 95 | 96 | # Give the executor time to clean up 97 | await asyncio.sleep(5) 98 | 99 | assert "test_removal" not in worker.executors 100 | 101 | 102 | @pytest.mark.asyncio 103 | async def test_error_on_needed_migrations(chancy_just_app: Chancy): 104 | """ 105 | Test that an error is raised if there are migrations that need to be 106 | applied before starting the worker. 107 | """ 108 | with pytest.raises(MigrationsNeededError): 109 | async with chancy_just_app: 110 | async with Worker(chancy_just_app): 111 | pass 112 | 113 | 114 | @pytest.mark.asyncio 115 | async def test_immediate_processing(chancy: Chancy, worker: Worker): 116 | """ 117 | Test that the worker processes jobs immediately when receiving queue.pushed 118 | notifications instead of waiting for the full polling interval. 119 | """ 120 | await chancy.declare(Queue("test_immediate", polling_interval=60)) 121 | await worker.hub.wait_for("worker.queue.started") 122 | await asyncio.sleep(5) 123 | 124 | j = await chancy.push(job_to_run.job.with_queue("test_immediate")) 125 | 126 | result = await chancy.wait_for_job( 127 | j, 128 | interval=1, 129 | timeout=5, # Short timeout since we expect immediate processing 130 | ) 131 | 132 | assert result.state == QueuedJob.State.SUCCEEDED 133 | --------------------------------------------------------------------------------