├── .circleci └── config.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── assets └── beanhub.svg ├── bq ├── __init__.py ├── app.py ├── cmds │ ├── __init__.py │ ├── cli.py │ ├── create_tables.py │ ├── environment.py │ ├── main.py │ ├── process.py │ ├── submit.py │ └── utils.py ├── config.py ├── constants.py ├── db │ ├── __init__.py │ ├── base.py │ └── session.py ├── events.py ├── models │ ├── __init__.py │ ├── event.py │ ├── helpers.py │ ├── task.py │ └── worker.py ├── processors │ ├── __init__.py │ ├── processor.py │ ├── registry.py │ └── retry_policies.py ├── services │ ├── __init__.py │ ├── dispatch.py │ └── worker.py └── utils.py ├── docker-compose.yaml ├── pyproject.toml ├── tests ├── .create-test-db.sql ├── __init__.py ├── acceptance │ ├── __init__.py │ ├── fixtures │ │ ├── __init__.py │ │ ├── app.py │ │ └── processors.py │ └── test_process_cmd.py ├── conftest.py ├── factories.py └── unit │ ├── __init__.py │ ├── fixtures │ ├── __init__.py │ └── processors.py │ ├── processors │ ├── __init__.py │ ├── conftest.py │ ├── test_processor.py │ ├── test_registry.py │ └── test_retry_policies.py │ ├── services │ ├── __init__.py │ ├── test_dispatch_service.py │ └── test_worker_service.py │ └── test_config.py └── uv.lock /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | # Use the latest 2.1 version of CircleCI pipeline process engine. 2 | # See: https://circleci.com/docs/2.0/configuration-reference 3 | version: 2.1 4 | 5 | orbs: 6 | python: circleci/python@2.1.1 7 | 8 | jobs: 9 | test: 10 | docker: 11 | - image: cimg/python:3.11.12 12 | - image: cimg/postgres:16.2 13 | environment: 14 | POSTGRES_USER: bq 15 | POSTGRES_DB: bq_test 16 | steps: 17 | - checkout 18 | - run: 19 | name: Install uv 20 | command: pip install uv 21 | - run: 22 | name: Run test 23 | command: uv run python -m pytest ./tests -svvvv 24 | build-and-publish: 25 | docker: 26 | - image: cimg/python:3.11.12 27 | steps: 28 | - checkout 29 | - run: 30 | name: Install uv 31 | command: pip install uv 32 | - run: 33 | name: Build 34 | command: uv build 35 | - run: 36 | name: Publish 37 | command: uv publish 38 | 39 | workflows: 40 | test: 41 | jobs: 42 | - test: 43 | filters: 44 | # needed for deploy build 45 | # ref: https://discuss.circleci.com/t/builds-for-tags-not-triggering/17681/7 46 | tags: 47 | only: /^.*/ 48 | - build-and-publish: 49 | requires: 50 | - test 51 | filters: 52 | branches: 53 | ignore: /.*/ 54 | tags: 55 | only: /^.*/ 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | .idea 162 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | # Ruff version. 4 | rev: v0.11.6 5 | hooks: 6 | # Run the formatter. 7 | - id: ruff-format 8 | - repo: https://github.com/asottile/reorder_python_imports 9 | rev: v3.10.0 10 | hooks: 11 | - id: reorder-python-imports 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Launch Platform 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BeanQueue [![CircleCI](https://dl.circleci.com/status-badge/img/gh/LaunchPlatform/bq/tree/master.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/LaunchPlatform/bq/tree/master) 2 | 3 | BeanQueue, a lightweight Python task queue framework based on [SQLAlchemy](https://www.sqlalchemy.org/), PostgreSQL [SKIP LOCKED queries](https://www.2ndquadrant.com/en/blog/what-is-select-skip-locked-for-in-postgresql-9-5/) and [NOTIFY](https://www.postgresql.org/docs/current/sql-notify.html) / [LISTEN](https://www.postgresql.org/docs/current/sql-listen.html) statements. 4 | 5 | **Notice**: Still in its early stage, we built this for [BeanHub](https://beanhub.io)'s internal usage. May change rapidly. Use at your own risk for now. 6 | 7 | ## Features 8 | 9 | - **Super lightweight**: Under 1K lines 10 | - **Easy-to-deploy**: Only relies on PostgreSQL 11 | - **Transactional**: Commit your tasks with other database entries altogether without worrying about data inconsistencies 12 | - **Easy-to-use**: Built-in command line tools for processing tasks and helpers for generating task models 13 | - **Auto-notify**: Automatic generation of NOTIFY statements for new or updated tasks, ensuring fast task processing 14 | - **Retry**: Built-in and customizable retry policies 15 | - **Schedule**: Schedule tasks to run later 16 | - **Worker heartbeat and auto-reschedule**: Each worker keeps updating heartbeat, if one is found dead, the others will reschedule the tasks 17 | - **Customizable**: Custom Task, Worker and Event models. Use it as a library and build your own work queue 18 | 19 | ## Install 20 | 21 | ```bash 22 | pip install beanqueue 23 | ``` 24 | 25 | ## Usage 26 | 27 | You can define a basic task processor like this 28 | 29 | ```python 30 | from sqlalchemy.orm import Session 31 | 32 | import bq 33 | from .. import models 34 | from .. import image_utils 35 | 36 | app = bq.BeanQueue() 37 | 38 | @app.processor(channel="images") 39 | def resize_image(db: Session, task: bq.Task, width: int, height: int): 40 | image = db.query(models.Image).filter(models.Image.task == task).one() 41 | image_utils.resize(image, size=(width, height)) 42 | db.add(image) 43 | # by default the `processor` decorator has `auto_complete` flag turns on, 44 | # so it will commit the db changes for us automatically 45 | ``` 46 | 47 | The `db` and `task` keyword arguments are optional. 48 | If you don't need to access the task object, you can simply define the function without these two parameters. 49 | We also provide an optional `savepoint` argument in case if you want to rollback database changes you made. 50 | 51 | To submit a task, you can either use `bq.Task` model object to construct the task object, insert into the 52 | database session and commit. 53 | 54 | ```python 55 | import bq 56 | from .db import Session 57 | from .. import models 58 | 59 | db = Session() 60 | task = bq.Task( 61 | channel="files", 62 | module="my_pkgs.files.processors", 63 | name="upload_to_s3_for_backup", 64 | ) 65 | file = models.File( 66 | task=task, 67 | blob_name="...", 68 | ) 69 | db.add(task) 70 | db.add(file) 71 | db.commit() 72 | ``` 73 | 74 | Or, you can use the `run` helper like this: 75 | 76 | ```python 77 | from .processors import resize_image 78 | from .db import Session 79 | from .. import my_models 80 | 81 | db = Session() 82 | # a Task model generated for invoking resize_image function 83 | task = resize_image.run(width=200, height=300) 84 | # associate task with your own models 85 | image = my_models.Image(task=task, blob_name="...") 86 | db.add(image) 87 | # we have Task model SQLALchemy event handler to send NOTIFY "" statement for you, 88 | # so that the workers will be woken up immediately 89 | db.add(task) 90 | # commit will make the task visible to worker immediately 91 | db.commit() 92 | ``` 93 | 94 | To run the worker, you can do this: 95 | 96 | ```bash 97 | BQ_PROCESSOR_PACKAGES='["my_pkgs.processors"]' bq process images 98 | ``` 99 | 100 | The `BQ_PROCESSOR_PACKAGES` is a JSON list contains the Python packages where you define your processors (the functions you decorated with `bq.processors.registry.processor`). 101 | To submit a task for testing purpose, you can do 102 | 103 | ```bash 104 | bq submit images my_pkgs.processors resize_image -k '{"width": 200, "height": 300}' 105 | ``` 106 | 107 | To create tables for BeanQueue, you can run 108 | 109 | ```bash 110 | bq create_tables 111 | ``` 112 | 113 | ### Schedule 114 | 115 | In most cases, a task will be executed as soon as possible after it is created. 116 | To run a task later, you can set a datetime value to the `scheduled_at` attribute of the task model. 117 | For example: 118 | 119 | ```python 120 | import datetime 121 | 122 | db = Session() 123 | task = resize_image.run(width=200, height=300) 124 | task.scheduled_at = func.now() + datetime.timedelta(minutes=3) 125 | db.add(task) 126 | ``` 127 | 128 | Please note that currently, workers won't wake up at the next exact moment when the scheduled tasks are ready to run. 129 | It has to wait until the polling times out, and eventually, it will see the task's scheduled_at time exceeds the current datetime. 130 | Therefore, depending on your `POLL_TIMEOUT` setting and the number of your workers when they started processing, the actual execution may be inaccurate. 131 | If you set the `POLL_TIMEOUT` to 60 seconds, please expect less than 60 seconds of delay. 132 | 133 | ### Retry 134 | 135 | To automatically retry a task after failure, you can specify a retry policy to the processor. 136 | 137 | ```python 138 | import datetime 139 | import bq 140 | from sqlalchemy.orm import Session 141 | 142 | app = bq.BeanQueue() 143 | delay_retry = bq.DelayRetry(delay=datetime.timedelta(seconds=120)) 144 | 145 | @app.processor(channel="images", retry_policy=delay_retry) 146 | def resize_image(db: Session, task: bq.Task, width: int, height: int): 147 | # resize image here ... 148 | pass 149 | ``` 150 | 151 | Currently, we provide some simple common retry policies such as `DelayRetry` and `ExponentialBackoffRetry`. 152 | You can define your retry policy easily by making a function that returns an optional object at the next scheduled time for retry. 153 | 154 | ```python 155 | def my_retry_policy(task: bq.Task) -> typing.Any: 156 | # Calculate delay based on task model ... 157 | return func.now() + datetime.timedelta(seconds=delay) 158 | ``` 159 | 160 | To cap how many attempts are allowed, you can also use `LimitAttempt` like this: 161 | 162 | ```python 163 | delay_retry = bq.DelayRetry(delay=datetime.timedelta(seconds=120)) 164 | capped_delay_retry = bq.LimitAttempt(3, delay_retry) 165 | 166 | @app.processor(channel="images", retry_policy=capped_delay_retry) 167 | def resize_image(db: Session, task: bq.Task, width: int, height: int): 168 | # Resize image here ... 169 | pass 170 | ``` 171 | 172 | You can also retry only for specific exception classes with the `retry_exceptions` argument. 173 | 174 | ```python 175 | @app.processor( 176 | channel="images", 177 | retry_policy=delay_retry, 178 | retry_exceptions=ValueError, 179 | ) 180 | def resize_image(db: Session, task: bq.Task, width: int, height: int): 181 | # resize image here ... 182 | pass 183 | ``` 184 | 185 | ### Configurations 186 | 187 | Configurations can be modified by setting environment variables with `BQ_` prefix. 188 | For example, to set the python packages to scan for processors, you can set `BQ_PROCESSOR_PACKAGES`. 189 | To change the PostgreSQL database to connect to, you can set `BQ_DATABASE_URL`. 190 | The complete definition of configurations can be found at the [bq/config.py](bq/config.py) module. 191 | 192 | If you want to configure BeanQueue programmatically, you can pass in `Config` object to the `bq.BeanQueue` object when creating. 193 | For example: 194 | 195 | ```python 196 | import bq 197 | from .my_config import config 198 | 199 | config = bq.Config( 200 | PROCESSOR_PACKAGES=["my_pkgs.processors"], 201 | DATABASE_URL=config.DATABASE_URL, 202 | BATCH_SIZE=10, 203 | ) 204 | app = bq.BeanQueue(config=config) 205 | ``` 206 | 207 | Then you can pass `--app` argument (or `-a` for short) pointing to the app object to the process command like this: 208 | 209 | ```bash 210 | bq -a my_pkgs.bq.app process images 211 | ``` 212 | 213 | Or if you prefer to define your own process command, you can also call `process_tasks` of the `BeanQueue` object directly like this: 214 | 215 | ```python 216 | app.process_tasks(channels=("images",)) 217 | ``` 218 | 219 | ### Define your own tables 220 | 221 | BeanQueue is designed to be as customizable as much as possible. 222 | One of its key features is that you can define your own SQLAlchemy model instead of using the ones we provided. 223 | 224 | To make defining your own `Task`, `Worker` or `Event` model much easier, use bq's mixin classes: 225 | 226 | - `bq.TaskModelMixin`: provides task model columns 227 | - `bq.TaskModelRefWorkerMixin`: provides foreign key column and relationship to `bq.Worker` 228 | - `bq.TaskModelRefParentMixin`: provides foreign key column and relationship to children `bq.Task` created during processing 229 | - `bq.TaskModelRefEventMixin`: provides foreign key column and relationship to `bq.Event` 230 | - `bq.WorkerModelMixin`: provides worker model columns 231 | - `bq.WorkerRefMixin`: provides relationship to `bq.Task` 232 | - `bq.EventModelMixin`: provides event model columns 233 | - `bq.EventModelRefTaskMixin`: provides foreign key column and relationship to `bq.Task` 234 | 235 | Here's an example for defining your own Task model: 236 | 237 | ```python 238 | import uuid 239 | 240 | from sqlalchemy import ForeignKey 241 | from sqlalchemy.dialects.postgresql import UUID 242 | from sqlalchemy.orm import Mapped 243 | from sqlalchemy.orm import mapped_column 244 | from sqlalchemy.orm import relationship 245 | import bq 246 | from bq.models.task import listen_events 247 | 248 | from .base_class import Base 249 | 250 | 251 | class Task(bq.TaskModelMixin, Base): 252 | __tablename__ = "task" 253 | worker_id: Mapped[uuid.UUID] = mapped_column( 254 | UUID(as_uuid=True), 255 | ForeignKey("worker.id", onupdate="CASCADE"), 256 | nullable=True, 257 | index=True, 258 | ) 259 | 260 | worker: Mapped["Worker"] = relationship( 261 | "Worker", back_populates="tasks", uselist=False 262 | ) 263 | 264 | listen_events(Task) 265 | ``` 266 | 267 | For task insertion and updates to notify workers, we need to register any custom task types with `bq.models.task.listen_events`. 268 | In the example above, this is done right after the Task model definition. 269 | For more details and advanced usage, see the definition of `bq.models.task.listen_events`. 270 | 271 | You just see how easy it is to define your Task model. Now, here's an example for defining your own Worker model: 272 | 273 | ```python 274 | import bq 275 | from sqlalchemy.orm import Mapped 276 | from sqlalchemy.orm import relationship 277 | 278 | from .base_class import Base 279 | 280 | 281 | class Worker(bq.WorkerModelMixin, Base): 282 | __tablename__ = "worker" 283 | 284 | tasks: Mapped[list["Task"]] = relationship( 285 | "Task", 286 | back_populates="worker", 287 | cascade="all,delete", 288 | order_by="Task.created_at", 289 | ) 290 | ``` 291 | 292 | With the model class ready, you only need to change the `TASK_MODEL`, `WORKER_MODEL` and `EVENT_MODEL` of `Config` to the full Python module name plus the class name like this. 293 | 294 | ```python 295 | import bq 296 | config = bq.Config( 297 | TASK_MODEL="my_pkgs.models.Task", 298 | WORKER_MODEL="my_pkgs.models.Worker", 299 | EVENT_MODEL="my_pkgs.models.Event", 300 | # ... other configs 301 | ) 302 | app = bq.BeanQueue(config) 303 | ``` 304 | 305 | ## Why? 306 | 307 | There are countless work queue projects. Why make yet another one? 308 | The primary issue with most work queue tools is their reliance on a standalone broker server. 309 | Our work queue tasks frequently interact with the database, and the atomic nature of database transactions is great for data integrity. 310 | However, integrating an external work queue into the system presents a risk. 311 | The work queue and the database don't share the same data view, potentially compromising data integrity and reliability. 312 | 313 | For example, you have a table of `images` to keep the user-uploaded images. 314 | And you have a background work queue for resizing the uploaded images into different thumbnail sizes. 315 | So, you will first need to insert a row for the uploaded image about the job into the database before you push the task to the work queue. 316 | 317 | Say you push the task to the work queue immediately after you insert the `images` table then commit like this: 318 | 319 | ``` 320 | 1. Insert into the "images" table 321 | 2. Push resizing task to the work queue 322 | 3. Commit db changes 323 | ``` 324 | 325 | While this might seem like the right way to do it, there's a hidden bug. 326 | If the worker starts too fast before the transaction commits at step 3, it will not be able to see the new row in `images` as it has not been committed yet. 327 | One may need to make the task retry a few times to ensure that even if the first attempt failed, it could see the image row in the following attempt. 328 | But this adds complexity to the system and also increases the latency if the first attempt fails. 329 | Also, if the commit step fails, you will have a failed work queue job trying to fetch a row from the database that will never exist. 330 | 331 | Another approach is to push the resize task after the database changes are committed. It works like this: 332 | 333 | ``` 334 | 1. Insert into the "images" table 335 | 2. Commit db changes 336 | 3. Push resizing task to the work queue 337 | ``` 338 | 339 | With this approach, we don't need to worry about workers picking up the task too early. 340 | However, there's another drawback. 341 | If step 3 for pushing a new task to the work queue fails, the newly inserted `images` row will never be processed. 342 | There are many solutions to this problem, but these are all caused by inconsistent data views between the database and the work queue storage. 343 | Things would be much easier if we had a work queue that shared the same consistent view as the database. 344 | 345 | By using a database as the data storage, all the problems are gone. 346 | You can simply do the following: 347 | 348 | ``` 349 | 1. Insert into the "images" table 350 | 2. Insert the image resizing task into the `tasks` table 351 | 3. Commit db changes 352 | ``` 353 | 354 | It's all or nothing! 355 | By doing so, you don't need to maintain another work queue backend. 356 | You are probably using a database anyway, so this work queue comes for free. 357 | 358 | Usually, a database is inefficient as the work queues data storage because of the potential lock contention and the need for constant querying. 359 | However, things have changed since the [introduction of the SKIP LOCKED](https://www.2ndquadrant.com/en/blog/what-is-select-skip-locked-for-in-postgresql-9-5/) and [LISTEN](https://www.postgresql.org/docs/current/sql-listen.html) / [NOTIFY](https://www.postgresql.org/docs/current/sql-notify.html) features in PostgreSQL or other databases. 360 | 361 | This project is inspired by many of the SKIP-LOCKED-based work queue successors. 362 | Why don't we just use those existing tools? 363 | Well, because while they work great as work queue solutions, they don't take advantage of writing tasks and their relative data into the database in a transaction. 364 | Many provide an abstraction function or gRPC method for pushing tasks into the database, rather than allowing users to directly insert rows and commit them together. 365 | 366 | BeanQueue doesn't overly abstract the logic of publishing a new task into the queue. 367 | Instead, you insert rows directly, choosing when and what to commit as tasks. 368 | 369 | ## Sponsor 370 | 371 |

372 | BeanHub logo 373 |

374 | 375 | A modern accounting book service based on the most popular open source version control system [Git](https://git-scm.com/) and text-based double entry accounting book software [Beancount](https://beancount.github.io/docs/index.html). 376 | 377 | ## Alternatives 378 | 379 | - [solid_queue](https://github.com/rails/solid_queue) 380 | - [good_job](https://github.com/bensheldon/good_job) 381 | - [graphile-worker](https://github.com/graphile/worker) 382 | - [postgres-tq](https://github.com/flix-tech/postgres-tq) 383 | - [pq](https://github.com/malthe/pq/) 384 | - [PgQueuer](https://github.com/janbjorge/PgQueuer) 385 | - [hatchet](https://github.com/hatchet-dev/hatchet) 386 | - [procrastinate](https://github.com/procrastinate-org/procrastinate) 387 | -------------------------------------------------------------------------------- /assets/beanhub.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /bq/__init__.py: -------------------------------------------------------------------------------- 1 | from .app import BeanQueue 2 | from .config import Config # noqa 3 | from .models import Event 4 | from .models import EventModelMixin 5 | from .models import EventModelRefTaskMixin 6 | from .models import EventType 7 | from .models import Task # noqa 8 | from .models import TaskModelMixin 9 | from .models import TaskModelRefEventMixin 10 | from .models import TaskModelRefParentMixin 11 | from .models import TaskModelRefWorkerMixin 12 | from .models import TaskState # noqa 13 | from .models import Worker # noqa 14 | from .models import WorkerModelMixin # noqa 15 | from .models import WorkerRefMixin # noqa 16 | from .models import WorkerState # noqa 17 | from .processors.retry_policies import DelayRetry 18 | from .processors.retry_policies import ExponentialBackoffRetry 19 | from .processors.retry_policies import LimitAttempt 20 | -------------------------------------------------------------------------------- /bq/app.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import importlib 3 | import json 4 | import logging 5 | import platform 6 | import sys 7 | import threading 8 | import typing 9 | from importlib.metadata import PackageNotFoundError 10 | from importlib.metadata import version 11 | from wsgiref.simple_server import make_server 12 | from wsgiref.simple_server import WSGIRequestHandler 13 | 14 | import venusian 15 | from sqlalchemy import func 16 | from sqlalchemy.engine import create_engine 17 | from sqlalchemy.engine import Engine 18 | from sqlalchemy.orm import Session as DBSession 19 | from sqlalchemy.pool import SingletonThreadPool 20 | 21 | from . import constants 22 | from . import events 23 | from . import models 24 | from .config import Config 25 | from .db.session import SessionMaker 26 | from .processors.processor import Processor 27 | from .processors.processor import ProcessorHelper 28 | from .processors.registry import collect 29 | from .services.dispatch import DispatchService 30 | from .services.worker import WorkerService 31 | from .utils import load_module_var 32 | 33 | logger = logging.getLogger(__name__) 34 | 35 | 36 | class WSGIRequestHandlerWithLogger(WSGIRequestHandler): 37 | logger = logging.getLogger("metrics_server") 38 | 39 | def log_message(self, format, *args): 40 | message = format % args 41 | self.logger.info( 42 | "%s - - [%s] %s\n" 43 | % ( 44 | self.address_string(), 45 | self.log_date_time_string(), 46 | message.translate(self._control_char_table), 47 | ) 48 | ) 49 | 50 | 51 | class BeanQueue: 52 | def __init__( 53 | self, 54 | config: Config | None = None, 55 | session_cls: DBSession = SessionMaker, 56 | worker_service_cls: typing.Type[WorkerService] = WorkerService, 57 | dispatch_service_cls: typing.Type[DispatchService] = DispatchService, 58 | engine: Engine | None = None, 59 | ): 60 | self.config = config if config is not None else Config() 61 | self.session_cls = session_cls 62 | self.worker_service_cls = worker_service_cls 63 | self.dispatch_service_cls = dispatch_service_cls 64 | self._engine = engine 65 | self._worker_update_shutdown_event: threading.Event = threading.Event() 66 | # noop if metrics thread is not started yet, shutdown if it is started 67 | self._metrics_server_shutdown: typing.Callable[[], None] = lambda: None 68 | 69 | def create_default_engine(self): 70 | return create_engine( 71 | str(self.config.DATABASE_URL), poolclass=SingletonThreadPool 72 | ) 73 | 74 | def make_session(self) -> DBSession: 75 | return self.session_cls(bind=self.engine) 76 | 77 | @property 78 | def engine(self) -> Engine: 79 | if self._engine is None: 80 | self._engine = self.create_default_engine() 81 | return self._engine 82 | 83 | @property 84 | def task_model(self) -> typing.Type[models.Task]: 85 | return load_module_var(self.config.TASK_MODEL) 86 | 87 | @property 88 | def worker_model(self) -> typing.Type[models.Worker]: 89 | return load_module_var(self.config.WORKER_MODEL) 90 | 91 | @property 92 | def event_model(self) -> typing.Type[models.Event] | None: 93 | if self.config.EVENT_MODEL is None: 94 | return 95 | return load_module_var(self.config.EVENT_MODEL) 96 | 97 | def _make_worker_service(self, session: DBSession): 98 | return self.worker_service_cls( 99 | session=session, task_model=self.task_model, worker_model=self.worker_model 100 | ) 101 | 102 | def _make_dispatch_service(self, session: DBSession): 103 | return self.dispatch_service_cls(session=session, task_model=self.task_model) 104 | 105 | def processor( 106 | self, 107 | channel: str = constants.DEFAULT_CHANNEL, 108 | auto_complete: bool = True, 109 | retry_policy: typing.Callable | None = None, 110 | retry_exceptions: typing.Type | typing.Tuple[typing.Type, ...] | None = None, 111 | task_model: typing.Type | None = None, 112 | ) -> typing.Callable: 113 | def decorator(wrapped: typing.Callable): 114 | processor = Processor( 115 | module=wrapped.__module__, 116 | name=wrapped.__name__, 117 | channel=channel, 118 | func=wrapped, 119 | auto_complete=auto_complete, 120 | retry_policy=retry_policy, 121 | retry_exceptions=retry_exceptions, 122 | ) 123 | helper_obj = ProcessorHelper( 124 | processor, 125 | task_cls=task_model if task_model is not None else self.task_model, 126 | ) 127 | 128 | def callback(scanner: venusian.Scanner, name: str, ob: typing.Callable): 129 | if processor.name != name: 130 | raise ValueError("Name is not the same") 131 | scanner.registry.add(processor) 132 | 133 | venusian.attach( 134 | helper_obj, callback, category=constants.BQ_PROCESSOR_CATEGORY 135 | ) 136 | return helper_obj 137 | 138 | return decorator 139 | 140 | def update_workers( 141 | self, 142 | worker_id: typing.Any, 143 | ): 144 | db = self.make_session() 145 | 146 | worker_service = self._make_worker_service(db) 147 | dispatch_service = self._make_dispatch_service(db) 148 | 149 | current_worker = worker_service.get_worker(worker_id) 150 | logger.info( 151 | "Updating worker %s with heartbeat_period=%s, heartbeat_timeout=%s", 152 | current_worker.id, 153 | self.config.WORKER_HEARTBEAT_PERIOD, 154 | self.config.WORKER_HEARTBEAT_TIMEOUT, 155 | ) 156 | while True: 157 | dead_workers = worker_service.fetch_dead_workers( 158 | timeout=self.config.WORKER_HEARTBEAT_TIMEOUT 159 | ) 160 | task_count = worker_service.reschedule_dead_tasks( 161 | # TODO: a better way to abstract this? 162 | dead_workers.with_entities(current_worker.__class__.id) 163 | ) 164 | found_dead_worker = False 165 | for dead_worker in dead_workers: 166 | found_dead_worker = True 167 | logger.info( 168 | "Found dead worker %s (name=%s), reschedule %s dead tasks in channels %s", 169 | dead_worker.id, 170 | dead_worker.name, 171 | task_count, 172 | dead_worker.channels, 173 | ) 174 | dispatch_service.notify(dead_worker.channels) 175 | if found_dead_worker: 176 | db.commit() 177 | 178 | if current_worker.state != models.WorkerState.RUNNING: 179 | # This probably means we are somehow very slow to update the heartbeat in time, or the timeout window 180 | # is set too short. It could also be the administrator update the worker state to something else than 181 | # RUNNING. Regardless the reason, let's stop processing. 182 | logger.warning( 183 | "Current worker %s state is %s instead of running, quit processing", 184 | current_worker.id, 185 | current_worker.state, 186 | ) 187 | sys.exit(0) 188 | 189 | do_shutdown = self._worker_update_shutdown_event.wait( 190 | self.config.WORKER_HEARTBEAT_PERIOD 191 | ) 192 | if do_shutdown: 193 | return 194 | 195 | current_worker.last_heartbeat = func.now() 196 | db.add(current_worker) 197 | db.commit() 198 | 199 | def _serve_http_request( 200 | self, worker_id: typing.Any, environ: dict, start_response: typing.Callable 201 | ) -> list[bytes]: 202 | path = environ["PATH_INFO"] 203 | if path == "/healthz": 204 | db = self.make_session() 205 | worker_service = self._make_worker_service(db) 206 | worker = worker_service.get_worker(worker_id) 207 | if worker is not None and worker.state == models.WorkerState.RUNNING: 208 | start_response( 209 | "200 OK", 210 | [ 211 | ("Content-Type", "application/json"), 212 | ], 213 | ) 214 | return [ 215 | json.dumps(dict(status="ok", worker_id=str(worker_id))).encode( 216 | "utf8" 217 | ) 218 | ] 219 | else: 220 | logger.warning("Bad worker %s state %s", worker_id, worker.state) 221 | start_response( 222 | "500 Internal Server Error", 223 | [ 224 | ("Content-Type", "application/json"), 225 | ], 226 | ) 227 | return [ 228 | json.dumps( 229 | dict( 230 | status="internal error", 231 | worker_id=str(worker_id), 232 | state=str(worker.state), 233 | ) 234 | ).encode("utf8") 235 | ] 236 | # TODO: add other metrics endpoints 237 | start_response( 238 | "404 NOT FOUND", 239 | [ 240 | ("Content-Type", "application/json"), 241 | ], 242 | ) 243 | return [json.dumps(dict(status="not found")).encode("utf8")] 244 | 245 | def run_metrics_http_server(self, worker_id: typing.Any): 246 | host = self.config.METRICS_HTTP_SERVER_INTERFACE 247 | port = self.config.METRICS_HTTP_SERVER_PORT 248 | with make_server( 249 | host, 250 | port, 251 | functools.partial(self._serve_http_request, worker_id), 252 | handler_class=WSGIRequestHandlerWithLogger, 253 | ) as httpd: 254 | # expose graceful shutdown to the main thread 255 | self._metrics_server_shutdown = httpd.shutdown 256 | logger.info("Run metrics HTTP server on %s:%s", host, port) 257 | httpd.serve_forever() 258 | 259 | def process_tasks( 260 | self, 261 | channels: tuple[str, ...], 262 | ): 263 | try: 264 | bq_version = version("beanqueue") 265 | except PackageNotFoundError: 266 | bq_version = "unknown" 267 | 268 | logger.info( 269 | "Starting processing tasks, bq_version=%s", 270 | bq_version, 271 | ) 272 | db = self.make_session() 273 | if not channels: 274 | channels = [constants.DEFAULT_CHANNEL] 275 | 276 | if not self.config.PROCESSOR_PACKAGES: 277 | logger.error("No PROCESSOR_PACKAGES provided") 278 | raise ValueError("No PROCESSOR_PACKAGES provided") 279 | 280 | logger.info("Scanning packages %s", self.config.PROCESSOR_PACKAGES) 281 | pkgs = list(map(importlib.import_module, self.config.PROCESSOR_PACKAGES)) 282 | registry = collect(pkgs) 283 | for channel, module_processors in registry.processors.items(): 284 | logger.info("Collected processors with channel %r", channel) 285 | for module, func_processors in module_processors.items(): 286 | for processor in func_processors.values(): 287 | logger.info( 288 | " Processor module=%r, name=%r", module, processor.name 289 | ) 290 | 291 | dispatch_service = self.dispatch_service_cls( 292 | session=db, task_model=self.task_model 293 | ) 294 | work_service = self.worker_service_cls( 295 | session=db, task_model=self.task_model, worker_model=self.worker_model 296 | ) 297 | 298 | worker = work_service.make_worker(name=platform.node(), channels=channels) 299 | db.add(worker) 300 | dispatch_service.listen(channels) 301 | db.commit() 302 | 303 | metrics_server_thread = None 304 | if self.config.METRICS_HTTP_SERVER_ENABLED: 305 | WSGIRequestHandlerWithLogger.logger.setLevel( 306 | self.config.METRICS_HTTP_SERVER_LOG_LEVEL 307 | ) 308 | metrics_server_thread = threading.Thread( 309 | target=self.run_metrics_http_server, 310 | args=(worker.id,), 311 | ) 312 | metrics_server_thread.daemon = True 313 | metrics_server_thread.start() 314 | 315 | logger.info("Created worker %s, name=%s", worker.id, worker.name) 316 | events.worker_init.send(self, worker=worker) 317 | 318 | logger.info("Processing tasks in channels = %s ...", channels) 319 | # Graceful shutdown of worker update event on exit of the worker 320 | worker_update_thread = threading.Thread( 321 | target=functools.partial( 322 | self.update_workers, 323 | worker_id=worker.id, 324 | ), 325 | name="update_workers", 326 | ) 327 | worker_update_thread.daemon = True 328 | worker_update_thread.start() 329 | 330 | worker_id = worker.id 331 | 332 | try: 333 | while True: 334 | while True: 335 | tasks = dispatch_service.dispatch( 336 | channels, 337 | worker_id=worker_id, 338 | limit=self.config.BATCH_SIZE, 339 | ).all() 340 | for task in tasks: 341 | logger.info( 342 | "Processing task %s, channel=%s, module=%s, func=%s", 343 | task.id, 344 | task.channel, 345 | task.module, 346 | task.func_name, 347 | ) 348 | # TODO: support processor pool and other approaches to dispatch the workload 349 | registry.process(task, event_cls=self.event_model) 350 | if not tasks: 351 | # we should try to keep dispatching until we cannot find tasks 352 | break 353 | else: 354 | db.commit() 355 | # we will not see notifications in a transaction, need to close the transaction first before entering 356 | # polling 357 | db.close() 358 | try: 359 | for notification in dispatch_service.poll( 360 | timeout=self.config.POLL_TIMEOUT 361 | ): 362 | logger.debug("Receive notification %s", notification) 363 | except TimeoutError: 364 | logger.debug("Poll timeout, try again") 365 | continue 366 | except (SystemExit, KeyboardInterrupt): 367 | db.rollback() 368 | logger.info("Shutting down ...") 369 | self._worker_update_shutdown_event.set() 370 | worker_update_thread.join(5) 371 | if metrics_server_thread is not None: 372 | # set a threading event, waits until server is shutdown 373 | # serve the ongoing requests 374 | self._metrics_server_shutdown() 375 | metrics_server_thread.join(1) 376 | 377 | worker.state = models.WorkerState.SHUTDOWN 378 | db.add(worker) 379 | task_count = work_service.reschedule_dead_tasks([worker.id]) 380 | logger.info("Reschedule %s tasks", task_count) 381 | dispatch_service.notify(channels) 382 | db.commit() 383 | 384 | logger.info("Shutdown gracefully") 385 | -------------------------------------------------------------------------------- /bq/cmds/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/bq/cmds/__init__.py -------------------------------------------------------------------------------- /bq/cmds/cli.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import click 5 | from rich.logging import RichHandler 6 | 7 | from .environment import Environment 8 | from .environment import LOG_LEVEL_MAP 9 | from .environment import LogLevel 10 | from .environment import pass_env 11 | from .utils import load_app 12 | 13 | 14 | @click.group(help="Command line tools for BeanQueue") 15 | @click.option( 16 | "-l", 17 | "--log-level", 18 | type=click.Choice( 19 | list(map(lambda key: key.value, LOG_LEVEL_MAP.keys())), case_sensitive=False 20 | ), 21 | default=lambda: os.environ.get("LOG_LEVEL", "INFO"), 22 | ) 23 | @click.option( 24 | "--disable-rich-log", 25 | is_flag=True, 26 | help="disable rich log handler", 27 | ) 28 | @click.option( 29 | "-a", "--app", type=str, help='BeanQueue app object to use, e.g. "my_pkgs.bq.app"' 30 | ) 31 | @click.version_option(prog_name="bq", package_name="bq") 32 | @pass_env 33 | def cli(env: Environment, log_level: str, disable_rich_log: bool, app: str): 34 | env.log_level = LogLevel(log_level) 35 | env.app = load_app(app) 36 | 37 | if disable_rich_log: 38 | logging.basicConfig( 39 | level=LOG_LEVEL_MAP[env.log_level], 40 | force=True, 41 | ) 42 | else: 43 | FORMAT = "%(message)s" 44 | logging.basicConfig( 45 | level=LOG_LEVEL_MAP[env.log_level], 46 | format=FORMAT, 47 | datefmt="[%X]", 48 | handlers=[RichHandler()], 49 | force=True, 50 | ) 51 | -------------------------------------------------------------------------------- /bq/cmds/create_tables.py: -------------------------------------------------------------------------------- 1 | from .. import models # noqa 2 | from ..db.base import Base 3 | from .cli import cli 4 | from .environment import Environment 5 | from .environment import pass_env 6 | 7 | 8 | @cli.command(name="create_tables", help="Create BeanQueue tables") 9 | @pass_env 10 | def create_tables(env: Environment): 11 | Base.metadata.create_all(bind=env.app.engine) 12 | env.logger.info("Done, tables created") 13 | -------------------------------------------------------------------------------- /bq/cmds/environment.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import enum 3 | import logging 4 | 5 | import click 6 | 7 | from ..app import BeanQueue 8 | 9 | 10 | @enum.unique 11 | class LogLevel(enum.Enum): 12 | VERBOSE = "verbose" 13 | DEBUG = "debug" 14 | INFO = "info" 15 | WARNING = "warning" 16 | ERROR = "error" 17 | FATAL = "fatal" 18 | 19 | 20 | LOG_LEVEL_MAP = { 21 | LogLevel.DEBUG: logging.DEBUG, 22 | LogLevel.INFO: logging.INFO, 23 | LogLevel.WARNING: logging.WARNING, 24 | LogLevel.ERROR: logging.ERROR, 25 | LogLevel.FATAL: logging.FATAL, 26 | } 27 | 28 | 29 | @dataclasses.dataclass 30 | class Environment: 31 | log_level: LogLevel = LogLevel.INFO 32 | logger: logging.Logger = logging.getLogger("bq") 33 | app: BeanQueue = BeanQueue() 34 | 35 | 36 | pass_env = click.make_pass_decorator(Environment, ensure=True) 37 | -------------------------------------------------------------------------------- /bq/cmds/main.py: -------------------------------------------------------------------------------- 1 | from . import create_tables # noqa 2 | from . import process # noqa 3 | from . import submit # noqa 4 | from .cli import cli 5 | 6 | __ALL__ = [cli] 7 | 8 | if __name__ == "__main__": 9 | cli() 10 | -------------------------------------------------------------------------------- /bq/cmds/process.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from .cli import cli 4 | from .environment import Environment 5 | from .environment import pass_env 6 | 7 | 8 | @cli.command(name="process", help="Process BeanQueue tasks") 9 | @click.argument("channels", nargs=-1) 10 | @pass_env 11 | def process( 12 | env: Environment, 13 | channels: tuple[str, ...], 14 | ): 15 | env.app.process_tasks(channels) 16 | -------------------------------------------------------------------------------- /bq/cmds/submit.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import click 4 | 5 | from .cli import cli 6 | from .environment import Environment 7 | from .environment import pass_env 8 | 9 | 10 | @cli.command(name="submit", help="Submit a new task, mostly for debugging purpose") 11 | @click.argument("channel", nargs=1) 12 | @click.argument("module", nargs=1) 13 | @click.argument("func", nargs=1) 14 | @click.option( 15 | "-k", "--kwargs", type=str, help="Keyword arguments as JSON", default=None 16 | ) 17 | @pass_env 18 | def submit( 19 | env: Environment, 20 | channel: str, 21 | module: str, 22 | func: str, 23 | kwargs: str | None, 24 | ): 25 | db = env.app.make_session() 26 | 27 | env.logger.info( 28 | "Submit task with channel=%s, module=%s, func=%s", channel, module, func 29 | ) 30 | kwargs_value = {} 31 | if kwargs: 32 | kwargs_value = json.loads(kwargs) 33 | 34 | task = env.app.task_model( 35 | channel=channel, 36 | module=module, 37 | func_name=func, 38 | kwargs=kwargs_value, 39 | ) 40 | db.add(task) 41 | db.commit() 42 | env.logger.info("Done, submit task %s", task.id) 43 | -------------------------------------------------------------------------------- /bq/cmds/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from ..app import BeanQueue 4 | from ..utils import load_module_var 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | def load_app(app: str | None) -> BeanQueue: 10 | if app is None: 11 | logger.info("No BeanQueue app provided, create default app") 12 | return BeanQueue() 13 | logger.info("Load BeanQueue app from %s", app) 14 | return load_module_var(app) 15 | -------------------------------------------------------------------------------- /bq/config.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from pydantic import Field 4 | from pydantic import field_validator 5 | from pydantic import PostgresDsn 6 | from pydantic import ValidationInfo 7 | from pydantic_core import MultiHostUrl 8 | from pydantic_settings import BaseSettings 9 | from pydantic_settings import SettingsConfigDict 10 | 11 | 12 | class Config(BaseSettings): 13 | # Packages to scan for processor functions 14 | PROCESSOR_PACKAGES: list[str] = Field(default_factory=list) 15 | 16 | # Size of tasks batch to fetch each time from the database 17 | BATCH_SIZE: int = 1 18 | 19 | # How long we should poll before timeout in seconds 20 | POLL_TIMEOUT: int = 60 21 | 22 | # Interval of worker heartbeat update cycle in seconds 23 | WORKER_HEARTBEAT_PERIOD: int = 30 24 | 25 | # Timeout of worker heartbeat in seconds 26 | WORKER_HEARTBEAT_TIMEOUT: int = 100 27 | 28 | # which task model to use 29 | TASK_MODEL: str = "bq.Task" 30 | 31 | # which worker model to use 32 | WORKER_MODEL: str = "bq.Worker" 33 | 34 | # which event model to use 35 | EVENT_MODEL: str | None = "bq.Event" 36 | 37 | # Enable metrics HTTP server 38 | METRICS_HTTP_SERVER_ENABLED: bool = True 39 | 40 | # the metrics http server interface to listen 41 | METRICS_HTTP_SERVER_INTERFACE: str = "" 42 | 43 | # the metrics http server port to listen 44 | METRICS_HTTP_SERVER_PORT: int = 8000 45 | 46 | # default log level for metrics http server 47 | METRICS_HTTP_SERVER_LOG_LEVEL: int = 30 48 | 49 | POSTGRES_SERVER: str = "localhost" 50 | POSTGRES_USER: str = "bq" 51 | POSTGRES_PASSWORD: str = "" 52 | POSTGRES_DB: str = "bq" 53 | # The URL of postgresql database to connect 54 | DATABASE_URL: typing.Optional[PostgresDsn] = None 55 | 56 | @field_validator("DATABASE_URL", mode="before") 57 | def assemble_db_connection( 58 | cls, v: typing.Optional[str], info: ValidationInfo 59 | ) -> typing.Any: 60 | if isinstance(v, str): 61 | return v 62 | # Notice: Older Pydantic version (2.7), PostgresDsn is an annotated MultiHostUrl object, 63 | # we cannot use isinstance with PostgresDsn directly. We need to check and see if PostgresDsn 64 | # is an annotated type or not before we decide how to check if the passed in object is an 65 | # PostgresDsn or not. 66 | if typing.get_origin(PostgresDsn) is typing.Annotated: 67 | if isinstance(v, MultiHostUrl): 68 | return v 69 | else: 70 | if isinstance(v, PostgresDsn): 71 | return v 72 | if v is not None: 73 | raise ValueError("Unexpected DATABASE_URL type") 74 | return PostgresDsn.build( 75 | scheme="postgresql", 76 | username=info.data.get("POSTGRES_USER"), 77 | password=info.data.get("POSTGRES_PASSWORD"), 78 | host=info.data.get("POSTGRES_SERVER"), 79 | path=f"{info.data.get('POSTGRES_DB') or ''}", 80 | ) 81 | 82 | model_config = SettingsConfigDict(case_sensitive=True, env_prefix="BQ_") 83 | -------------------------------------------------------------------------------- /bq/constants.py: -------------------------------------------------------------------------------- 1 | # the name of default channel to use if not provided 2 | DEFAULT_CHANNEL = "default" 3 | # category value for venusian to scan functions decorated with `processor` 4 | BQ_PROCESSOR_CATEGORY = "bq_processor" 5 | -------------------------------------------------------------------------------- /bq/db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/bq/db/__init__.py -------------------------------------------------------------------------------- /bq/db/base.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import DeclarativeBase 2 | 3 | 4 | class Base(DeclarativeBase): 5 | pass 6 | -------------------------------------------------------------------------------- /bq/db/session.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import scoped_session 2 | from sqlalchemy.orm import sessionmaker 3 | 4 | SessionMaker = sessionmaker() 5 | Session = scoped_session(SessionMaker) 6 | -------------------------------------------------------------------------------- /bq/events.py: -------------------------------------------------------------------------------- 1 | import blinker 2 | 3 | worker_init = blinker.signal("worker-init") 4 | 5 | task_failure = blinker.signal("task-failure") 6 | -------------------------------------------------------------------------------- /bq/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .event import Event 2 | from .event import EventModelMixin 3 | from .event import EventModelRefTaskMixin 4 | from .event import EventType 5 | from .task import Task 6 | from .task import TaskModelMixin 7 | from .task import TaskModelRefEventMixin 8 | from .task import TaskModelRefParentMixin 9 | from .task import TaskModelRefWorkerMixin 10 | from .task import TaskState 11 | from .worker import Worker 12 | from .worker import WorkerModelMixin 13 | from .worker import WorkerRefMixin 14 | from .worker import WorkerState 15 | -------------------------------------------------------------------------------- /bq/models/event.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import enum 3 | import typing 4 | import uuid 5 | 6 | from sqlalchemy import DateTime 7 | from sqlalchemy import Enum 8 | from sqlalchemy import ForeignKey 9 | from sqlalchemy import func 10 | from sqlalchemy import String 11 | from sqlalchemy.dialects.postgresql import UUID 12 | from sqlalchemy.orm import declared_attr 13 | from sqlalchemy.orm import Mapped 14 | from sqlalchemy.orm import mapped_column 15 | from sqlalchemy.orm import relationship 16 | 17 | from ..db.base import Base 18 | from .helpers import make_repr_attrs 19 | 20 | 21 | class EventType(enum.Enum): 22 | # task failed 23 | FAILED = "FAILED" 24 | # task failed and retry scheduled 25 | FAILED_RETRY_SCHEDULED = "FAILED_RETRY_SCHEDULED" 26 | # task complete 27 | COMPLETE = "COMPLETE" 28 | 29 | 30 | class EventModelMixin: 31 | id: Mapped[uuid.UUID] = mapped_column( 32 | UUID(as_uuid=True), primary_key=True, server_default=func.gen_random_uuid() 33 | ) 34 | # type of the event 35 | type: Mapped[EventType] = mapped_column( 36 | Enum(EventType), 37 | nullable=False, 38 | index=True, 39 | ) 40 | # Error message 41 | error_message: Mapped[typing.Optional[str]] = mapped_column(String, nullable=True) 42 | # the scheduled at time for retry 43 | scheduled_at: Mapped[datetime.datetime] = mapped_column( 44 | DateTime(timezone=True), 45 | nullable=True, 46 | ) 47 | # created datetime of the event 48 | created_at: Mapped[datetime.datetime] = mapped_column( 49 | DateTime(timezone=True), nullable=False, server_default=func.now() 50 | ) 51 | 52 | 53 | class EventModelRefTaskMixin: 54 | # foreign key id of the task 55 | task_id: Mapped[uuid.UUID] = mapped_column( 56 | UUID(as_uuid=True), 57 | ForeignKey("bq_tasks.id", name="fk_event_task_id"), 58 | nullable=True, 59 | ) 60 | 61 | @declared_attr 62 | def task(cls) -> Mapped["Task"]: 63 | return relationship("Task", back_populates="events", uselist=False) 64 | 65 | 66 | class Event(EventModelMixin, EventModelRefTaskMixin, Base): 67 | __tablename__ = "bq_events" 68 | 69 | def __repr__(self) -> str: 70 | items = [ 71 | ("id", self.id), 72 | ("type", self.type), 73 | ("created_at", self.created_at), 74 | ("scheduled_at", self.scheduled_at), 75 | ] 76 | return f"<{self.__class__.__name__} {make_repr_attrs(items)}>" 77 | -------------------------------------------------------------------------------- /bq/models/helpers.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | 4 | def make_repr_attrs(items: typing.Sequence[typing.Tuple[str, typing.Any]]) -> str: 5 | return " ".join(map(lambda item: "=".join([item[0], str(item[1])]), items)) 6 | -------------------------------------------------------------------------------- /bq/models/task.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import enum 3 | import typing 4 | import uuid 5 | 6 | from sqlalchemy import Connection 7 | from sqlalchemy import DateTime 8 | from sqlalchemy import Enum 9 | from sqlalchemy import event 10 | from sqlalchemy import ForeignKey 11 | from sqlalchemy import func 12 | from sqlalchemy import inspect 13 | from sqlalchemy import String 14 | from sqlalchemy.dialects.postgresql import JSONB 15 | from sqlalchemy.dialects.postgresql import UUID 16 | from sqlalchemy.orm import declared_attr 17 | from sqlalchemy.orm import Mapped 18 | from sqlalchemy.orm import mapped_column 19 | from sqlalchemy.orm import Mapper 20 | from sqlalchemy.orm import relationship 21 | 22 | from ..db.base import Base 23 | from .helpers import make_repr_attrs 24 | 25 | 26 | class TaskState(enum.Enum): 27 | # task just created, not dispatched yet. or, the task failed and is waiting for a retry. 28 | PENDING = "PENDING" 29 | # a worker is processing the task right now 30 | PROCESSING = "PROCESSING" 31 | # the task is done 32 | DONE = "DONE" 33 | # the task is failed 34 | FAILED = "FAILED" 35 | 36 | 37 | class TaskModelMixin: 38 | id: Mapped[uuid.UUID] = mapped_column( 39 | UUID(as_uuid=True), primary_key=True, server_default=func.gen_random_uuid() 40 | ) 41 | # current state of the task 42 | state: Mapped[TaskState] = mapped_column( 43 | Enum(TaskState), 44 | nullable=False, 45 | default=TaskState.PENDING, 46 | server_default=TaskState.PENDING.value, 47 | index=True, 48 | ) 49 | # channel for workers and job creator to listen/notify 50 | channel: Mapped[str] = mapped_column(String, nullable=False, index=True) 51 | # module of the processor function 52 | module: Mapped[str] = mapped_column(String, nullable=False) 53 | # func name of the processor func 54 | func_name: Mapped[str] = mapped_column(String, nullable=False) 55 | # keyword arguments 56 | kwargs: Mapped[typing.Optional[typing.Any]] = mapped_column(JSONB, nullable=True) 57 | # Result of the task 58 | result: Mapped[typing.Optional[typing.Any]] = mapped_column(JSONB, nullable=True) 59 | # Error message 60 | error_message: Mapped[typing.Optional[str]] = mapped_column(String, nullable=True) 61 | # created datetime of the task 62 | created_at: Mapped[datetime.datetime] = mapped_column( 63 | DateTime(timezone=True), nullable=False, server_default=func.now() 64 | ) 65 | # scheduled to run at a specific time 66 | scheduled_at: Mapped[datetime.datetime] = mapped_column( 67 | DateTime(timezone=True), 68 | nullable=True, 69 | ) 70 | 71 | 72 | class TaskModelRefWorkerMixin: 73 | # foreign key id of assigned worker 74 | worker_id: Mapped[uuid.UUID] = mapped_column( 75 | UUID(as_uuid=True), 76 | ForeignKey("bq_workers.id", name="fk_workers_id"), 77 | nullable=True, 78 | ) 79 | 80 | @declared_attr 81 | def worker(cls) -> Mapped["Worker"]: 82 | return relationship("Worker", back_populates="tasks", uselist=False) 83 | 84 | 85 | class TaskModelRefParentMixin: 86 | # foreign key id of the source task which created the current task while we are processing it 87 | parent_id: Mapped[uuid.UUID] = mapped_column( 88 | UUID(as_uuid=True), 89 | ForeignKey("bq_tasks.id", name="fk_task_parent_task_id"), 90 | nullable=True, 91 | ) 92 | 93 | @declared_attr 94 | def parent(cls) -> Mapped[typing.Optional["Task"]]: 95 | return relationship( 96 | "Task", 97 | back_populates="children", 98 | remote_side=[cls.id], 99 | foreign_keys=[cls.parent_id], 100 | uselist=False, 101 | ) 102 | 103 | @declared_attr 104 | def children(cls) -> Mapped[list["Task"]]: 105 | return relationship( 106 | "Task", foreign_keys=[cls.parent_id], back_populates="parent" 107 | ) 108 | 109 | 110 | class TaskModelRefEventMixin: 111 | @declared_attr 112 | def events(cls) -> Mapped[list["Event"]]: 113 | return relationship("Event", back_populates="task") 114 | 115 | 116 | class Task( 117 | TaskModelMixin, 118 | TaskModelRefWorkerMixin, 119 | TaskModelRefEventMixin, 120 | TaskModelRefParentMixin, 121 | Base, 122 | ): 123 | __tablename__ = "bq_tasks" 124 | 125 | def __repr__(self) -> str: 126 | items = [ 127 | ("id", self.id), 128 | ("state", self.state), 129 | ("channel", self.channel), 130 | ("module", self.module), 131 | ("func_name", self.func_name), 132 | ] 133 | return f"<{self.__class__.__name__} {make_repr_attrs(items)}>" 134 | 135 | 136 | def notify_if_needed(connection: Connection, task: Task): 137 | session = inspect(task).session 138 | transaction = session.get_transaction() 139 | if transaction is not None: 140 | key = "_notified_channels" 141 | if hasattr(transaction, key): 142 | notified_channels = getattr(transaction, key) 143 | else: 144 | notified_channels = set() 145 | setattr(transaction, key, notified_channels) 146 | 147 | if task.channel in notified_channels: 148 | # already notified, skip 149 | return 150 | notified_channels.add(task.channel) 151 | 152 | quoted_channel = connection.dialect.identifier_preparer.quote_identifier( 153 | task.channel 154 | ) 155 | connection.exec_driver_sql(f"NOTIFY {quoted_channel}") 156 | 157 | 158 | def task_insert_notify(mapper: Mapper, connection: Connection, target: Task): 159 | if target.state != TaskState.PENDING: 160 | return 161 | notify_if_needed(connection, target) 162 | 163 | 164 | def task_update_notify(mapper: Mapper, connection: Connection, target: Task): 165 | history = inspect(target).attrs.state.history 166 | if not history.has_changes(): 167 | return 168 | if target.state != TaskState.PENDING: 169 | return 170 | notify_if_needed(connection, target) 171 | 172 | 173 | def listen_events(model_cls: typing.Type): 174 | event.listens_for(model_cls, "after_insert")(task_insert_notify) 175 | event.listens_for(model_cls, "after_update")(task_update_notify) 176 | 177 | 178 | listen_events(Task) 179 | -------------------------------------------------------------------------------- /bq/models/worker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import enum 3 | import uuid 4 | 5 | from sqlalchemy import DateTime 6 | from sqlalchemy import Enum 7 | from sqlalchemy import func 8 | from sqlalchemy import String 9 | from sqlalchemy.dialects.postgresql import ARRAY 10 | from sqlalchemy.dialects.postgresql import UUID 11 | from sqlalchemy.orm import declared_attr 12 | from sqlalchemy.orm import Mapped 13 | from sqlalchemy.orm import mapped_column 14 | from sqlalchemy.orm import relationship 15 | 16 | from ..db.base import Base 17 | from .helpers import make_repr_attrs 18 | 19 | 20 | class WorkerState(enum.Enum): 21 | # the worker is running 22 | RUNNING = "RUNNING" 23 | # the worker shuts down normally 24 | SHUTDOWN = "SHUTDOWN" 25 | # The worker has no heartbeat for a while 26 | NO_HEARTBEAT = "NO_HEARTBEAT" 27 | 28 | 29 | class WorkerModelMixin: 30 | id: Mapped[uuid.UUID] = mapped_column( 31 | UUID(as_uuid=True), primary_key=True, server_default=func.gen_random_uuid() 32 | ) 33 | # current state of the worker 34 | state: Mapped[WorkerState] = mapped_column( 35 | Enum(WorkerState), 36 | nullable=False, 37 | default=WorkerState.RUNNING, 38 | server_default=WorkerState.RUNNING.value, 39 | index=True, 40 | ) 41 | # name of the worker 42 | name: Mapped[str] = mapped_column(String, nullable=False) 43 | # the channels we are processing 44 | channels: Mapped[list[str]] = mapped_column(ARRAY(String), nullable=False) 45 | # last heartbeat of this worker 46 | last_heartbeat: Mapped[datetime.datetime] = mapped_column( 47 | DateTime(timezone=True), 48 | nullable=False, 49 | server_default=func.now(), 50 | index=True, 51 | ) 52 | # created datetime of the worker 53 | created_at: Mapped[datetime.datetime] = mapped_column( 54 | DateTime(timezone=True), nullable=False, server_default=func.now() 55 | ) 56 | 57 | 58 | class WorkerRefMixin: 59 | @declared_attr 60 | def tasks(cls) -> Mapped[list["Task"]]: 61 | return relationship( 62 | "Task", 63 | back_populates="worker", 64 | cascade="all,delete", 65 | order_by="Task.created_at", 66 | ) 67 | 68 | 69 | class Worker(WorkerModelMixin, WorkerRefMixin, Base): 70 | __tablename__ = "bq_workers" 71 | 72 | def __repr__(self) -> str: 73 | items = [ 74 | ("id", self.id), 75 | ("name", self.name), 76 | ("channels", self.channels), 77 | ("state", self.state), 78 | ] 79 | return f"<{self.__class__.__name__} {make_repr_attrs(items)}>" 80 | -------------------------------------------------------------------------------- /bq/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/bq/processors/__init__.py -------------------------------------------------------------------------------- /bq/processors/processor.py: -------------------------------------------------------------------------------- 1 | import contextvars 2 | import dataclasses 3 | import datetime 4 | import inspect 5 | import logging 6 | import typing 7 | 8 | from sqlalchemy import select 9 | from sqlalchemy.orm import object_session 10 | 11 | from .. import events 12 | from .. import models 13 | 14 | logger = logging.getLogger(__name__) 15 | current_task = contextvars.ContextVar("current_task") 16 | 17 | 18 | @dataclasses.dataclass(frozen=True) 19 | class Processor: 20 | channel: str 21 | module: str 22 | name: str 23 | func: typing.Callable 24 | # should we auto complete the task or not 25 | auto_complete: bool = True 26 | # The retry policy function for returning a new scheduled time for next attempt 27 | retry_policy: typing.Callable | None = None 28 | # The exceptions we suppose to retry when encountered 29 | retry_exceptions: typing.Type | typing.Tuple[typing.Type, ...] | None = None 30 | 31 | def process(self, task: models.Task, event_cls: typing.Type | None = None): 32 | ctx_token = current_task.set(task) 33 | try: 34 | db = object_session(task) 35 | func_signature = inspect.signature(self.func) 36 | base_kwargs = {} 37 | if "task" in func_signature.parameters: 38 | base_kwargs["task"] = task 39 | if "db" in func_signature.parameters: 40 | base_kwargs["db"] = db 41 | try: 42 | with db.begin_nested() as savepoint: 43 | if "savepoint" in func_signature.parameters: 44 | base_kwargs["savepoint"] = savepoint 45 | result = self.func(**base_kwargs, **task.kwargs) 46 | except Exception as exc: 47 | logger.error("Unhandled exception for task %s", task.id, exc_info=True) 48 | events.task_failure.send(self, task=task, exception=exc) 49 | task.state = models.TaskState.FAILED 50 | task.error_message = str(exc) 51 | retry_scheduled_at = None 52 | if ( 53 | self.retry_exceptions is None 54 | or isinstance(exc, self.retry_exceptions) 55 | ) and self.retry_policy is not None: 56 | retry_scheduled_at = self.retry_policy(task) 57 | if retry_scheduled_at is not None: 58 | task.state = models.TaskState.PENDING 59 | task.scheduled_at = retry_scheduled_at 60 | if isinstance(retry_scheduled_at, datetime.datetime): 61 | retry_scheduled_at_value = retry_scheduled_at 62 | else: 63 | retry_scheduled_at_value = db.scalar( 64 | select(retry_scheduled_at) 65 | ) 66 | logger.info( 67 | "Schedule task %s for retry at %s", 68 | task.id, 69 | retry_scheduled_at_value, 70 | ) 71 | if event_cls is not None: 72 | event = event_cls( 73 | task=task, 74 | type=models.EventType.FAILED 75 | if retry_scheduled_at is None 76 | else models.EventType.FAILED_RETRY_SCHEDULED, 77 | error_message=task.error_message, 78 | scheduled_at=retry_scheduled_at, 79 | ) 80 | db.add(event) 81 | db.add(task) 82 | return 83 | if self.auto_complete: 84 | logger.info("Task %s auto complete", task.id) 85 | task.state = models.TaskState.DONE 86 | task.result = result 87 | if event_cls is not None: 88 | event = event_cls( 89 | task=task, 90 | type=models.EventType.COMPLETE, 91 | ) 92 | db.add(event) 93 | db.add(task) 94 | return result 95 | finally: 96 | current_task.reset(ctx_token) 97 | 98 | 99 | class ProcessorHelper: 100 | """Helper function to replace the decorated processor function and make creating Task model much easier""" 101 | 102 | def __init__(self, processor: Processor, task_cls: typing.Type = models.Task): 103 | self._processor = processor 104 | self._task_cls = task_cls 105 | 106 | def __call__(self, *args, **kwargs): 107 | return self._processor.func(*args, **kwargs) 108 | 109 | def run(self, **kwargs) -> models.Task: 110 | try: 111 | parent = current_task.get() 112 | except LookupError: 113 | parent = None 114 | return self._task_cls( 115 | channel=self._processor.channel, 116 | module=self._processor.module, 117 | func_name=self._processor.name, 118 | kwargs=kwargs, 119 | parent=parent, 120 | ) 121 | -------------------------------------------------------------------------------- /bq/processors/registry.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import logging 3 | import typing 4 | 5 | import venusian 6 | from sqlalchemy.orm import object_session 7 | 8 | from .. import constants 9 | from .. import models 10 | from .processor import Processor 11 | 12 | 13 | class Registry: 14 | def __init__(self): 15 | self.logger = logging.getLogger(__name__) 16 | self.processors = collections.defaultdict(lambda: collections.defaultdict(dict)) 17 | 18 | def add(self, processor: Processor): 19 | self.processors[processor.channel][processor.module][processor.name] = processor 20 | 21 | def process( 22 | self, 23 | task: models.Task, 24 | event_cls: typing.Type | None = None, 25 | ) -> typing.Any: 26 | modules = self.processors.get(task.channel, {}) 27 | functions = modules.get(task.module, {}) 28 | processor: Processor = functions.get(task.func_name) 29 | db = object_session(task) 30 | if processor is None: 31 | self.logger.error( 32 | "Cannot find processor for task %s with module=%s, func=%s", 33 | task.id, 34 | task.module, 35 | task.func_name, 36 | ) 37 | task.state = models.TaskState.FAILED 38 | task.error_message = f"Cannot find processor for task with module={task.module}, func={task.func_name}" 39 | if event_cls is not None: 40 | event = event_cls( 41 | task=task, 42 | type=models.EventType.FAILED, 43 | error_message=task.error_message, 44 | ) 45 | db.add(event) 46 | db.add(task) 47 | return 48 | return processor.process(task, event_cls=event_cls) 49 | 50 | 51 | def collect(packages: list[typing.Any], registry: Registry | None = None) -> Registry: 52 | if registry is None: 53 | registry = Registry() 54 | scanner = venusian.Scanner(registry=registry) 55 | for package in packages: 56 | scanner.scan(package, categories=(constants.BQ_PROCESSOR_CATEGORY,)) 57 | return registry 58 | -------------------------------------------------------------------------------- /bq/processors/retry_policies.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import typing 3 | 4 | from sqlalchemy import func 5 | from sqlalchemy import inspect 6 | from sqlalchemy.orm import object_session 7 | 8 | from .. import models 9 | 10 | 11 | def get_failure_times(task: models.Task) -> int: 12 | db = object_session(task) 13 | task_info = inspect(task.__class__) 14 | event_cls = task_info.attrs["events"].entity.class_ 15 | return ( 16 | db.query(event_cls) 17 | .filter(event_cls.task == task) 18 | .filter(event_cls.type == models.EventType.FAILED_RETRY_SCHEDULED) 19 | ).count() 20 | 21 | 22 | class DelayRetry: 23 | def __init__(self, delay: datetime.timedelta): 24 | self.delay = delay 25 | 26 | def __call__(self, task: models.Task) -> typing.Any: 27 | return func.now() + self.delay 28 | 29 | 30 | class ExponentialBackoffRetry: 31 | def __init__( 32 | self, base: float = 2, exponent_offset: float = 0, exponent_scalar: float = 1.0 33 | ): 34 | self.base = base 35 | self.exponent_offset = exponent_offset 36 | self.exponent_scalar = exponent_scalar 37 | 38 | def __call__(self, task: models.Task) -> typing.Any: 39 | failure_times = get_failure_times(task) 40 | delay_seconds = self.base ** ( 41 | self.exponent_offset + (self.exponent_scalar * (failure_times + 1)) 42 | ) 43 | return func.now() + datetime.timedelta(seconds=delay_seconds) 44 | 45 | 46 | class LimitAttempt: 47 | def __init__(self, maximum_attempt: int, retry_policy: typing.Callable): 48 | self.maximum_attempt = maximum_attempt 49 | self.retry_policy = retry_policy 50 | 51 | def __call__(self, task: models.Task) -> typing.Any: 52 | failure_times = get_failure_times(task) 53 | if (failure_times + 1) >= self.maximum_attempt: 54 | return None 55 | return self.retry_policy(task) 56 | -------------------------------------------------------------------------------- /bq/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/bq/services/__init__.py -------------------------------------------------------------------------------- /bq/services/dispatch.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import select 3 | import typing 4 | import uuid 5 | 6 | from sqlalchemy import func 7 | from sqlalchemy import null 8 | from sqlalchemy import or_ 9 | from sqlalchemy.orm import Query 10 | 11 | from .. import models 12 | from ..db.session import Session 13 | 14 | 15 | @dataclasses.dataclass(frozen=True) 16 | class Notification: 17 | pid: int 18 | channel: str 19 | payload: typing.Optional[str] = None 20 | 21 | 22 | class DispatchService: 23 | def __init__(self, session: Session, task_model: typing.Type = models.Task): 24 | self.session = session 25 | self.task_model: typing.Type[models.Task] = task_model 26 | 27 | def make_task_query( 28 | self, 29 | channels: typing.Sequence[str], 30 | limit: int = 1, 31 | now: typing.Any = func.now(), 32 | ) -> Query: 33 | return ( 34 | self.session.query(self.task_model.id) 35 | .filter(self.task_model.channel.in_(channels)) 36 | .filter(self.task_model.state == models.TaskState.PENDING) 37 | .filter( 38 | or_( 39 | self.task_model.scheduled_at.is_(null()), 40 | now >= self.task_model.scheduled_at, 41 | ) 42 | ) 43 | .order_by(self.task_model.created_at) 44 | .limit(limit) 45 | .with_for_update(skip_locked=True) 46 | ) 47 | 48 | def make_update_query(self, task_query: typing.Any, worker_id: typing.Any): 49 | return ( 50 | self.task_model.__table__.update() 51 | .where(self.task_model.id.in_(task_query)) 52 | .values( 53 | state=models.TaskState.PROCESSING, 54 | worker_id=worker_id, 55 | ) 56 | .returning(self.task_model.id) 57 | ) 58 | 59 | def dispatch( 60 | self, 61 | channels: typing.Sequence[str], 62 | worker_id: uuid.UUID, 63 | limit: int = 1, 64 | now: typing.Any = func.now(), 65 | ) -> Query: 66 | task_query = self.make_task_query(channels, limit=limit, now=now) 67 | task_subquery = task_query.scalar_subquery() 68 | task_ids = [ 69 | item[0] 70 | for item in self.session.execute( 71 | self.make_update_query(task_subquery, worker_id=worker_id) 72 | ) 73 | ] 74 | # TODO: ideally returning with (self.task_model) should return the whole model, but SQLAlchemy is returning 75 | # it columns in rows. We can save a round trip if we can find out how to solve this 76 | return self.session.query(self.task_model).filter( 77 | self.task_model.id.in_(task_ids) 78 | ) 79 | 80 | def listen(self, channels: typing.Sequence[str]): 81 | conn = self.session.connection() 82 | for channel in channels: 83 | quoted_channel = conn.dialect.identifier_preparer.quote_identifier(channel) 84 | conn.exec_driver_sql(f"LISTEN {quoted_channel}") 85 | 86 | def poll(self, timeout: int = 5) -> typing.Generator[Notification, None, None]: 87 | conn = self.session.connection() 88 | driver_conn = conn.connection.driver_connection 89 | 90 | def pop_notifies(): 91 | while driver_conn.notifies: 92 | notify = driver_conn.notifies.pop(0) 93 | yield Notification( 94 | pid=notify.pid, 95 | channel=notify.channel, 96 | payload=notify.payload, 97 | ) 98 | 99 | # poll first to see if there's anything already 100 | driver_conn.poll() 101 | if driver_conn.notifies: 102 | yield from pop_notifies() 103 | else: 104 | # okay, nothing, let's select and wait for new stuff 105 | if select.select([driver_conn], [], [], timeout) == ([], [], []): 106 | # nope, nothing, times out 107 | raise TimeoutError("Timeout waiting for new notifications") 108 | else: 109 | # yep, we got something 110 | driver_conn.poll() 111 | yield from pop_notifies() 112 | 113 | def notify(self, channels: typing.Sequence[str]): 114 | conn = self.session.connection() 115 | for channel in channels: 116 | quoted_channel = conn.dialect.identifier_preparer.quote_identifier(channel) 117 | conn.exec_driver_sql(f"NOTIFY {quoted_channel}") 118 | -------------------------------------------------------------------------------- /bq/services/worker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import typing 3 | 4 | from sqlalchemy import func 5 | from sqlalchemy.orm import Query 6 | from sqlalchemy.orm import Session 7 | 8 | from .. import models 9 | 10 | 11 | class WorkerService: 12 | def __init__( 13 | self, 14 | session: Session, 15 | task_model: typing.Type = models.Task, 16 | worker_model: typing.Type = models.Worker, 17 | ): 18 | self.session = session 19 | self.task_model: typing.Type[models.Task] = task_model 20 | self.worker_model: typing.Type[models.Worker] = worker_model 21 | 22 | def get_worker(self, id: typing.Any) -> typing.Any: 23 | return self.session.get(self.worker_model, id) 24 | 25 | def make_worker(self, name: str, channels: tuple[str, ...]): 26 | return self.worker_model(name=name, channels=channels) 27 | 28 | def update_heartbeat(self, worker: models.Worker): 29 | worker.last_heartbeat = func.now() 30 | self.session.add(worker) 31 | 32 | def make_dead_worker_query(self, timeout: int, limit: int = 5) -> Query: 33 | return ( 34 | self.session.query(self.worker_model.id) 35 | .filter( 36 | self.worker_model.last_heartbeat 37 | < (func.now() - datetime.timedelta(seconds=timeout)) 38 | ) 39 | .filter(self.worker_model.state == models.WorkerState.RUNNING) 40 | .limit(limit) 41 | .with_for_update(skip_locked=True) 42 | ) 43 | 44 | def make_update_dead_worker_query(self, worker_query: typing.Any): 45 | return ( 46 | self.worker_model.__table__.update() 47 | .where(self.worker_model.id.in_(worker_query)) 48 | .values( 49 | state=models.WorkerState.NO_HEARTBEAT, 50 | ) 51 | .returning(self.worker_model.id) 52 | ) 53 | 54 | def fetch_dead_workers(self, timeout: int, limit: int = 5) -> Query: 55 | dead_worker_query = self.make_dead_worker_query(timeout=timeout, limit=limit) 56 | dead_worker_subquery = dead_worker_query.scalar_subquery() 57 | worker_ids = [ 58 | item[0] 59 | for item in self.session.execute( 60 | self.make_update_dead_worker_query(dead_worker_subquery) 61 | ) 62 | ] 63 | # TODO: ideally returning with (models.Task) should return the whole model, but SQLAlchemy is returning 64 | # it columns in rows. We can save a round trip if we can find out how to solve this 65 | return self.session.query(self.worker_model).filter( 66 | self.worker_model.id.in_(worker_ids) 67 | ) 68 | 69 | def make_update_tasks_query(self, worker_query: typing.Any): 70 | return ( 71 | self.task_model.__table__.update() 72 | .where(self.task_model.worker_id.in_(worker_query)) 73 | .where(self.task_model.state == models.TaskState.PROCESSING) 74 | .values( 75 | state=models.TaskState.PENDING, 76 | worker_id=None, 77 | ) 78 | ) 79 | 80 | def reschedule_dead_tasks(self, worker_query: typing.Any) -> int: 81 | update_dead_task_query = self.make_update_tasks_query(worker_query=worker_query) 82 | res = self.session.execute(update_dead_task_query) 83 | return res.rowcount 84 | -------------------------------------------------------------------------------- /bq/utils.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import typing 3 | 4 | 5 | def load_module_var(name: str) -> typing.Type: 6 | module_name, model_name = name.rsplit(".", 1) 7 | module = importlib.import_module(module_name) 8 | return getattr(module, model_name) 9 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | psql: 4 | environment: 5 | POSTGRES_HOST_AUTH_METHOD: trust 6 | POSTGRES_PASSWORD: "" 7 | POSTGRES_USER: "bq" 8 | POSTGRES_DB: "bq" 9 | image: "postgres:16.3" 10 | ports: 11 | - "5432:5432" 12 | volumes: 13 | - psqldata:/var/lib/postgresql/data 14 | - ./tests/.create-test-db.sql:/docker-entrypoint-initdb.d/create-test-db.sql 15 | healthcheck: 16 | test: ["CMD", "pg_isready", "-d", "bq"] 17 | interval: 5s 18 | timeout: 5s 19 | retries: 10 20 | volumes: 21 | psqldata: 22 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "beanqueue" 3 | version = "1.1.7" 4 | description = "BeanQueue or BQ for short, PostgreSQL SKIP LOCK and SQLAlchemy based worker queue library" 5 | authors = [{ name = "Fang-Pen Lin", email = "fangpen@launchplatform.com" }] 6 | requires-python = "~=3.11" 7 | readme = "README.md" 8 | license = "MIT" 9 | dependencies = [ 10 | "sqlalchemy>=2.0.30,<3", 11 | "venusian>=3.1.0,<4", 12 | "click>=8.1.7,<9", 13 | "pydantic-settings>=2.2.1,<3", 14 | "blinker>=1.8.2,<2", 15 | "rich>=13.7.1,<14", 16 | ] 17 | 18 | [project.scripts] 19 | bq = "bq.cmds.main:cli" 20 | 21 | [dependency-groups] 22 | dev = [ 23 | "psycopg2-binary>=2.9.9,<3", 24 | "pytest-factoryboy>=2.7.0,<3", 25 | ] 26 | 27 | [tool.hatch.build.targets.sdist] 28 | include = ["bq"] 29 | 30 | [tool.hatch.build.targets.wheel] 31 | include = ["bq"] 32 | 33 | [build-system] 34 | requires = ["hatchling"] 35 | build-backend = "hatchling.build" 36 | -------------------------------------------------------------------------------- /tests/.create-test-db.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE bq_test; 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/__init__.py -------------------------------------------------------------------------------- /tests/acceptance/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/acceptance/__init__.py -------------------------------------------------------------------------------- /tests/acceptance/fixtures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/acceptance/fixtures/__init__.py -------------------------------------------------------------------------------- /tests/acceptance/fixtures/app.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/acceptance/fixtures/app.py -------------------------------------------------------------------------------- /tests/acceptance/fixtures/processors.py: -------------------------------------------------------------------------------- 1 | import bq 2 | 3 | app = bq.BeanQueue() 4 | 5 | 6 | @app.processor(channel="acceptance-tests") 7 | def sum(task: bq.Task, num_0: int, num_1: int): 8 | return num_0 + num_1 9 | -------------------------------------------------------------------------------- /tests/acceptance/test_process_cmd.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | from multiprocessing import Process 4 | 5 | from sqlalchemy.orm import Session 6 | 7 | from .fixtures.processors import app 8 | from .fixtures.processors import sum 9 | from bq import models 10 | from bq.config import Config 11 | 12 | 13 | def run_process_cmd(db_url: str): 14 | app.config = Config( 15 | PROCESSOR_PACKAGES=["tests.acceptance.fixtures.processors"], 16 | DATABASE_URL=db_url, 17 | ) 18 | app.process_tasks(channels=("acceptance-tests",)) 19 | 20 | 21 | def test_process_cmd(db: Session, db_url: str): 22 | procs = [] 23 | for _ in range(10): 24 | proc = Process(target=run_process_cmd, args=(db_url,)) 25 | proc.start() 26 | procs.append(proc) 27 | 28 | task_count = 1000 29 | for i in range(task_count): 30 | task = sum.run(num_0=i, num_1=i * 3) 31 | db.add(task) 32 | db.commit() 33 | 34 | begin = datetime.datetime.now() 35 | while True: 36 | done_tasks = ( 37 | db.query(models.Task) 38 | .filter(models.Task.state == models.TaskState.DONE) 39 | .count() 40 | ) 41 | if done_tasks == task_count: 42 | break 43 | delta = datetime.datetime.now() - begin 44 | if delta.total_seconds() > 30: 45 | raise TimeoutError("Timeout waiting for all tasks to finish") 46 | time.sleep(1) 47 | 48 | for proc in procs: 49 | proc.kill() 50 | proc.join(3) 51 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import typing 3 | 4 | import pytest 5 | from pytest_factoryboy import register 6 | from sqlalchemy.engine import create_engine 7 | from sqlalchemy.engine import Engine 8 | 9 | from .factories import EventFactory 10 | from .factories import TaskFactory 11 | from .factories import WorkerFactory 12 | from bq.db.base import Base 13 | from bq.db.session import Session 14 | 15 | register(TaskFactory) 16 | register(WorkerFactory) 17 | register(EventFactory) 18 | 19 | 20 | @pytest.fixture 21 | def db_url() -> str: 22 | return os.environ.get("TEST_DB_URL", "postgresql://bq:@localhost/bq_test") 23 | 24 | 25 | @pytest.fixture 26 | def engine(db_url: str) -> Engine: 27 | return create_engine(db_url) 28 | 29 | 30 | @pytest.fixture 31 | def db(engine: Engine) -> typing.Generator[Session, None, None]: 32 | Session.configure(bind=engine) 33 | Base.metadata.create_all(bind=engine) 34 | try: 35 | yield Session 36 | finally: 37 | Session.remove() 38 | Base.metadata.drop_all(bind=engine) 39 | -------------------------------------------------------------------------------- /tests/factories.py: -------------------------------------------------------------------------------- 1 | from factory import Faker 2 | from factory import SubFactory 3 | from factory.alchemy import SQLAlchemyModelFactory 4 | from sqlalchemy import func 5 | 6 | from bq import models 7 | from bq.db.session import Session 8 | 9 | 10 | class BaseFactory(SQLAlchemyModelFactory): 11 | class Meta: 12 | abstract = True 13 | sqlalchemy_session = Session 14 | 15 | 16 | class WorkerFactory(BaseFactory): 17 | state = models.WorkerState.RUNNING 18 | name = Faker("slug") 19 | channels = ["default"] 20 | last_heartbeat = func.now() 21 | created_at = func.now() 22 | 23 | class Meta: 24 | model = models.Worker 25 | sqlalchemy_session_persistence = "commit" 26 | 27 | 28 | class TaskFactory(BaseFactory): 29 | state = models.TaskState.PENDING 30 | channel = Faker("slug") 31 | module = Faker("slug") 32 | func_name = Faker("slug") 33 | worker = None 34 | created_at = func.now() 35 | scheduled_at = None 36 | kwargs = {} 37 | 38 | class Meta: 39 | model = models.Task 40 | sqlalchemy_session_persistence = "commit" 41 | 42 | 43 | class EventFactory(BaseFactory): 44 | type = models.EventType.COMPLETE 45 | task = SubFactory(TaskFactory) 46 | created_at = func.now() 47 | error_message = None 48 | scheduled_at = None 49 | 50 | class Meta: 51 | model = models.Event 52 | sqlalchemy_session_persistence = "commit" 53 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/fixtures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/unit/fixtures/__init__.py -------------------------------------------------------------------------------- /tests/unit/fixtures/processors.py: -------------------------------------------------------------------------------- 1 | from bq import models 2 | from bq.app import BeanQueue 3 | 4 | 5 | app = BeanQueue() 6 | 7 | 8 | @app.processor(channel="mock-channel") 9 | def processor0(task: models.Task): 10 | return "processed by processor0" 11 | 12 | 13 | @app.processor(channel="mock-channel2") 14 | def processor1(task: models.Task, kwarg0: str): 15 | return kwarg0 16 | -------------------------------------------------------------------------------- /tests/unit/processors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/unit/processors/__init__.py -------------------------------------------------------------------------------- /tests/unit/processors/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | def processor_module() -> str: 6 | return ".".join(__name__.split(".")[:-2]) + ".fixtures.processors" 7 | -------------------------------------------------------------------------------- /tests/unit/processors/test_processor.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | import pytest 4 | from sqlalchemy.orm import Session 5 | 6 | from bq import models 7 | from bq.processors.processor import current_task 8 | from bq.processors.processor import Processor 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "func, expected", 13 | [ 14 | (lambda: [], []), 15 | (lambda task: ["task"], ["task"]), 16 | (lambda task, db: ["task", "db"], ["task", "db"]), 17 | ], 18 | ) 19 | def test_process_task_kwargs( 20 | db: Session, task: models.Task, func: typing.Callable, expected: list 21 | ): 22 | processor = Processor( 23 | channel="mock-channel", module="mock.module", name="my_func", func=func 24 | ) 25 | assert frozenset(processor.process(task=task)) == frozenset(expected) 26 | 27 | 28 | @pytest.mark.parametrize("task__state", [models.TaskState.PROCESSING]) 29 | @pytest.mark.parametrize( 30 | "auto_complete, expected_state", 31 | [ 32 | (True, models.TaskState.DONE), 33 | (False, models.TaskState.PROCESSING), 34 | ], 35 | ) 36 | def test_process_task_auto_complete( 37 | db: Session, 38 | task: models.Task, 39 | auto_complete: bool, 40 | expected_state: models.TaskState, 41 | ): 42 | called = False 43 | 44 | def func(): 45 | nonlocal called 46 | called = True 47 | return "result" 48 | 49 | processor = Processor( 50 | channel="mock-channel", 51 | module="mock.module", 52 | name="my_func", 53 | func=func, 54 | auto_complete=auto_complete, 55 | ) 56 | assert processor.process(task=task) == "result" 57 | db.commit() 58 | assert task.state == expected_state 59 | assert called 60 | 61 | 62 | def test_process_task_events( 63 | db: Session, 64 | task: models.Task, 65 | ): 66 | def func(): 67 | return "result" 68 | 69 | processor = Processor( 70 | channel="mock-channel", 71 | module="mock.module", 72 | name="my_func", 73 | func=func, 74 | auto_complete=True, 75 | ) 76 | assert processor.process(task=task, event_cls=models.Event) == "result" 77 | db.commit() 78 | db.expire_all() 79 | assert len(task.events) == 1 80 | event = task.events[0] 81 | assert event.type == models.EventType.COMPLETE 82 | assert event.error_message is None 83 | assert event.scheduled_at is None 84 | 85 | 86 | def test_process_task_unhandled_exception( 87 | db: Session, 88 | task: models.Task, 89 | ): 90 | def func(): 91 | raise ValueError("boom") 92 | 93 | processor = Processor( 94 | channel="mock-channel", 95 | module="mock.module", 96 | name="my_func", 97 | func=func, 98 | ) 99 | processor.process(task=task) 100 | db.commit() 101 | assert task.state == models.TaskState.FAILED 102 | 103 | 104 | @pytest.mark.parametrize("task__func_name", ["my_func"]) 105 | def test_process_savepoint_rollback( 106 | db: Session, 107 | task: models.Task, 108 | ): 109 | def func(): 110 | task.func_name = "changed" 111 | db.add(task) 112 | db.flush() 113 | raise ValueError("boom") 114 | 115 | processor = Processor( 116 | channel="mock-channel", 117 | module="mock.module", 118 | name="my_func", 119 | func=func, 120 | ) 121 | processor.process(task=task) 122 | db.commit() 123 | assert task.state == models.TaskState.FAILED 124 | assert task.func_name == "my_func" 125 | 126 | 127 | def test_processor_helper(processor_module: str): 128 | from ..fixtures.processors import processor0 129 | 130 | task = processor0.run(k0="v0") 131 | assert isinstance(task, models.Task) 132 | assert task.module == processor_module 133 | assert task.func_name == "processor0" 134 | assert task.channel == "mock-channel" 135 | assert task.kwargs == dict(k0="v0") 136 | assert task.parent is None 137 | assert not task.children 138 | 139 | 140 | def test_processor_helper_create_child_task( 141 | db: Session, processor_module: str, task: models.Task 142 | ): 143 | from ..fixtures.processors import processor0 144 | 145 | token = current_task.set(task) 146 | try: 147 | child_task = processor0.run(k0="v0") 148 | db.add(child_task) 149 | db.commit() 150 | finally: 151 | current_task.reset(token) 152 | 153 | db.expire_all() 154 | assert child_task.parent == task 155 | assert task.children == [child_task] 156 | -------------------------------------------------------------------------------- /tests/unit/processors/test_registry.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy.orm import Session 3 | 4 | from .. import fixtures 5 | from .conftest import processor_module 6 | from bq import models 7 | from bq.processors.registry import collect 8 | from bq.processors.registry import Registry 9 | 10 | 11 | @pytest.fixture 12 | def registry() -> Registry: 13 | return collect([fixtures]) 14 | 15 | 16 | def test_collect(registry: Registry, processor_module: str): 17 | assert registry.processors.keys() == {"mock-channel", "mock-channel2"} 18 | 19 | modules0 = registry.processors["mock-channel"] 20 | assert modules0.keys() == {processor_module} 21 | funcs0 = modules0[processor_module] 22 | assert funcs0.keys() == {"processor0"} 23 | 24 | modules1 = registry.processors["mock-channel2"] 25 | assert modules1.keys() == {processor_module} 26 | funcs1 = modules1[processor_module] 27 | assert funcs1.keys() == {"processor1"} 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "task__channel, task__module, task__func_name, task__kwargs, expected", 32 | [ 33 | ( 34 | "mock-channel", 35 | "tests.unit.fixtures.processors", 36 | "processor0", 37 | {}, 38 | "processed by processor0", 39 | ), 40 | ( 41 | "mock-channel2", 42 | "tests.unit.fixtures.processors", 43 | "processor1", 44 | dict(kwarg0="mock-val"), 45 | "mock-val", 46 | ), 47 | ], 48 | ) 49 | def test_registry_process( 50 | db: Session, registry: Registry, task: models.Task, expected: str 51 | ): 52 | assert registry.process(task) == expected 53 | -------------------------------------------------------------------------------- /tests/unit/processors/test_retry_policies.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pytest 4 | from sqlalchemy import func 5 | from sqlalchemy import select 6 | from sqlalchemy.orm import Session 7 | 8 | from ...factories import EventFactory 9 | from bq import models 10 | from bq.processors.retry_policies import DelayRetry 11 | from bq.processors.retry_policies import ExponentialBackoffRetry 12 | from bq.processors.retry_policies import LimitAttempt 13 | 14 | 15 | @pytest.mark.parametrize("failure_count", [0, 1, 5, 10]) 16 | def test_delay_policy( 17 | db: Session, event_factory: EventFactory, task: models.Task, failure_count: int 18 | ): 19 | for _ in range(failure_count): 20 | event_factory(task=task, type=models.EventType.FAILED_RETRY_SCHEDULED) 21 | delay = DelayRetry(delay=datetime.timedelta(seconds=5)) 22 | scheduled_at = delay(task) 23 | expected = db.scalar(select(func.now() + datetime.timedelta(seconds=5))) 24 | actual = db.scalar(select(scheduled_at)) 25 | assert actual == expected 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "failure_count, expected_delay", 30 | [ 31 | (0, 32), 32 | (1, 128), 33 | (5, 32768), 34 | (10, 33554432), 35 | ], 36 | ) 37 | def test_exponential_backoff( 38 | db: Session, 39 | event_factory: EventFactory, 40 | task: models.Task, 41 | failure_count: int, 42 | expected_delay: int, 43 | ): 44 | for _ in range(failure_count): 45 | event_factory(task=task, type=models.EventType.FAILED_RETRY_SCHEDULED) 46 | backoff = ExponentialBackoffRetry(base=2, exponent_offset=3, exponent_scalar=2) 47 | scheduled_at = backoff(task) 48 | expected = db.scalar( 49 | select(func.now() + datetime.timedelta(seconds=expected_delay)) 50 | ) 51 | actual = db.scalar(select(scheduled_at)) 52 | assert actual == expected 53 | 54 | 55 | @pytest.mark.parametrize( 56 | "failure_count, expected_retry", 57 | [ 58 | (0, True), 59 | (1, True), 60 | (5, False), 61 | (6, False), 62 | (7, False), 63 | ], 64 | ) 65 | def test_limit_attempt( 66 | db: Session, 67 | event_factory: EventFactory, 68 | task: models.Task, 69 | failure_count: int, 70 | expected_retry: bool, 71 | ): 72 | for _ in range(failure_count): 73 | event_factory(task=task, type=models.EventType.FAILED_RETRY_SCHEDULED) 74 | policy = LimitAttempt(6, DelayRetry(delay=datetime.timedelta(seconds=5))) 75 | scheduled_at = policy(task) 76 | if expected_retry: 77 | assert scheduled_at is not None 78 | else: 79 | assert scheduled_at is None 80 | -------------------------------------------------------------------------------- /tests/unit/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LaunchPlatform/bq/1ce563cd90ce02d5945baf52c93595d010e0d682/tests/unit/services/__init__.py -------------------------------------------------------------------------------- /tests/unit/services/test_dispatch_service.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pytest 4 | from sqlalchemy import func 5 | from sqlalchemy.orm import Session 6 | 7 | from ...factories import TaskFactory 8 | from bq import models 9 | from bq.services.dispatch import DispatchService 10 | 11 | 12 | @pytest.fixture 13 | def dispatch_service(db: Session) -> DispatchService: 14 | return DispatchService(db) 15 | 16 | 17 | def test_dispatch_empty( 18 | db: Session, dispatch_service: DispatchService, worker: models.Worker 19 | ): 20 | assert not list(dispatch_service.dispatch(["test"], worker_id=worker.id)) 21 | 22 | 23 | def test_dispatch( 24 | db: Session, 25 | dispatch_service: DispatchService, 26 | worker: models.Worker, 27 | task: models.Task, 28 | ): 29 | assert task.state == models.TaskState.PENDING 30 | tasks = list(dispatch_service.dispatch([task.channel], worker_id=worker.id)) 31 | db.expire_all() 32 | assert len(tasks) == 1 33 | returned_task = tasks[0] 34 | assert returned_task.state == models.TaskState.PROCESSING 35 | assert returned_task.worker == worker 36 | assert not list(dispatch_service.dispatch([task.channel], worker_id=worker.id)) 37 | 38 | 39 | @pytest.mark.parametrize( 40 | "task__scheduled_at", [func.now() + datetime.timedelta(seconds=10)] 41 | ) 42 | def test_dispatch_with_scheduled_at( 43 | db: Session, 44 | dispatch_service: DispatchService, 45 | worker: models.Worker, 46 | task: models.Task, 47 | ): 48 | assert task.state == models.TaskState.PENDING 49 | assert task.scheduled_at is not None 50 | 51 | tasks = list(dispatch_service.dispatch([task.channel], worker_id=worker.id)) 52 | db.expire_all() 53 | assert len(tasks) == 0 54 | 55 | tasks = list( 56 | dispatch_service.dispatch( 57 | [task.channel], 58 | worker_id=worker.id, 59 | now=func.now() + datetime.timedelta(seconds=10), 60 | ) 61 | ) 62 | db.expire_all() 63 | assert len(tasks) == 1 64 | returned_task = tasks[0] 65 | assert returned_task.state == models.TaskState.PROCESSING 66 | assert returned_task.worker == worker 67 | 68 | 69 | def test_dispatch_many( 70 | db: Session, 71 | dispatch_service: DispatchService, 72 | worker: models.Worker, 73 | task_factory: TaskFactory, 74 | ): 75 | for _ in range(3): 76 | task_factory(channel="other_channel") 77 | 78 | channel = "my_channel" 79 | for _ in range(4): 80 | task_factory(channel=channel) 81 | 82 | task_factory(channel=channel, state=models.TaskState.DONE) 83 | 84 | tasks = list(dispatch_service.dispatch([channel], worker_id=worker.id, limit=3)) 85 | db.expire_all() 86 | assert len(tasks) == 3 87 | for task in tasks: 88 | assert task.state == models.TaskState.PROCESSING 89 | assert task.worker == worker 90 | 91 | for task in db.query(models.Task).filter(models.Task.channel != channel): 92 | assert task.state == models.TaskState.PENDING 93 | assert task.worker is None 94 | 95 | remain_ids = list( 96 | db.query(models.Task.id) 97 | .filter(models.Task.channel == channel) 98 | .filter(models.Task.state == models.TaskState.PENDING) 99 | ) 100 | assert len(remain_ids) == 1 101 | assert remain_ids[0] not in [task.id for task in tasks] 102 | 103 | tasks = list(dispatch_service.dispatch(["my_channel"], worker_id=worker.id)) 104 | assert len(tasks) == 1 105 | 106 | 107 | def test_listen_value_quote(db: Session, dispatch_service: DispatchService): 108 | dispatch_service.listen(["a", "中文", "!@#$%^&*(()-_"]) 109 | db.commit() 110 | 111 | 112 | def test_poll(db: Session, dispatch_service: DispatchService): 113 | dispatch_service.listen(["a", "b", "c"]) 114 | db.commit() 115 | with pytest.raises(TimeoutError): 116 | list(dispatch_service.poll(timeout=1)) 117 | dispatch_service.notify(["a", "c"]) 118 | db.commit() 119 | notifications = list(dispatch_service.poll(timeout=1)) 120 | assert frozenset([n.channel for n in notifications]) == frozenset(["a", "c"]) 121 | -------------------------------------------------------------------------------- /tests/unit/services/test_worker_service.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pytest 4 | from sqlalchemy import func 5 | from sqlalchemy.orm import Session 6 | 7 | from ...factories import TaskFactory 8 | from ...factories import WorkerFactory 9 | from bq import models 10 | from bq.services.worker import WorkerService 11 | 12 | 13 | @pytest.fixture 14 | def worker_service(db: Session) -> WorkerService: 15 | return WorkerService(db) 16 | 17 | 18 | def test_update_heartbeat( 19 | db: Session, worker_service: WorkerService, worker: models.Worker 20 | ): 21 | now = db.scalar(func.now()) 22 | assert worker.last_heartbeat != now 23 | worker_service.update_heartbeat(worker) 24 | db.commit() 25 | assert worker.last_heartbeat == now 26 | 27 | 28 | def test_fetch_dead_workers( 29 | db: Session, worker_service: WorkerService, worker_factory: WorkerFactory 30 | ): 31 | now = db.scalar(func.now()) 32 | dead_worker0 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=6)) 33 | dead_worker1 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=7)) 34 | alive_worker0 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=4)) 35 | alive_worker1 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=3)) 36 | alive_worker2 = worker_factory(last_heartbeat=now) 37 | dead_workers = worker_service.fetch_dead_workers(5).all() 38 | assert len(dead_workers) == 2 39 | assert frozenset(worker.id for worker in dead_workers) == frozenset( 40 | [dead_worker0.id, dead_worker1.id] 41 | ) 42 | assert dead_worker0.state == models.WorkerState.NO_HEARTBEAT 43 | assert dead_worker1.state == models.WorkerState.NO_HEARTBEAT 44 | assert alive_worker0.state == models.WorkerState.RUNNING 45 | assert alive_worker1.state == models.WorkerState.RUNNING 46 | assert alive_worker2.state == models.WorkerState.RUNNING 47 | db.commit() 48 | 49 | 50 | def test_reschedule_dead_tasks( 51 | db: Session, 52 | worker_service: WorkerService, 53 | worker_factory: WorkerFactory, 54 | task_factory: TaskFactory, 55 | ): 56 | now = db.scalar(func.now()) 57 | 58 | dead_worker0 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=6)) 59 | dead_task0 = task_factory(worker=dead_worker0, state=models.TaskState.PROCESSING) 60 | dead_task1 = task_factory(worker=dead_worker0, state=models.TaskState.PROCESSING) 61 | done_task0 = task_factory(worker=dead_worker0, state=models.TaskState.DONE) 62 | 63 | dead_worker1 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=7)) 64 | dead_task2 = task_factory(worker=dead_worker1, state=models.TaskState.PROCESSING) 65 | 66 | alive_worker0 = worker_factory(last_heartbeat=now - datetime.timedelta(seconds=4)) 67 | other_task0 = task_factory(worker=alive_worker0, state=models.TaskState.PROCESSING) 68 | alive_worker1 = worker_factory() 69 | other_task1 = task_factory(worker=alive_worker1, state=models.TaskState.PROCESSING) 70 | 71 | task_count = worker_service.reschedule_dead_tasks( 72 | db.query(models.Worker.id).filter( 73 | models.Worker.id.in_([dead_worker0.id, dead_worker1.id]) 74 | ) 75 | ) 76 | assert task_count == 3 77 | db.commit() 78 | assert dead_task0.state == models.TaskState.PENDING 79 | assert dead_task0.worker is None 80 | assert dead_task1.state == models.TaskState.PENDING 81 | assert dead_task1.worker is None 82 | assert dead_task2.state == models.TaskState.PENDING 83 | assert dead_task2.worker is None 84 | assert done_task0.state == models.TaskState.DONE 85 | assert other_task0.state == models.TaskState.PROCESSING 86 | assert other_task1.state == models.TaskState.PROCESSING 87 | -------------------------------------------------------------------------------- /tests/unit/test_config.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | import pytest 4 | from pydantic import PostgresDsn 5 | from pydantic import ValidationError 6 | 7 | from bq.config import Config 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "value, expected", 12 | [ 13 | (None, PostgresDsn("postgresql://bq@localhost/bq")), 14 | ( 15 | "postgresql://myuser@example.com/mydb", 16 | PostgresDsn("postgresql://myuser@example.com/mydb"), 17 | ), 18 | ( 19 | PostgresDsn("postgresql://myuser@example.com/mydb"), 20 | PostgresDsn("postgresql://myuser@example.com/mydb"), 21 | ), 22 | ], 23 | ) 24 | def test_database_url(value: typing.Any, expected: PostgresDsn): 25 | assert Config(DATABASE_URL=value).DATABASE_URL == expected 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "value", 30 | [ 31 | 1234, 32 | 12.34, 33 | object(), 34 | list(), 35 | dict(), 36 | ], 37 | ) 38 | def test_bad_database_url_type(value: typing.Any): 39 | with pytest.raises(ValidationError): 40 | Config(DATABASE_URL=value) 41 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | requires-python = ">=3.11, <4" 3 | resolution-markers = [ 4 | "(platform_machine == 'AMD64' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'WIN32' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'aarch64' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'amd64' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'ppc64le' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'win32' and platform_system == 'Windows' and sys_platform == 'win32') or (platform_machine == 'x86_64' and platform_system == 'Windows' and sys_platform == 'win32')", 5 | "(platform_machine == 'AMD64' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'WIN32' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'aarch64' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'amd64' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'ppc64le' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'win32' and platform_system == 'Windows' and sys_platform != 'win32') or (platform_machine == 'x86_64' and platform_system == 'Windows' and sys_platform != 'win32')", 6 | "platform_machine != 'AMD64' and platform_machine != 'WIN32' and platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'ppc64le' and platform_machine != 'win32' and platform_machine != 'x86_64' and platform_system == 'Windows' and sys_platform == 'win32'", 7 | "platform_machine != 'AMD64' and platform_machine != 'WIN32' and platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'ppc64le' and platform_machine != 'win32' and platform_machine != 'x86_64' and platform_system == 'Windows' and sys_platform != 'win32'", 8 | "(platform_machine == 'AMD64' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'WIN32' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'aarch64' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'amd64' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'ppc64le' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'win32' and platform_system != 'Windows' and sys_platform == 'win32') or (platform_machine == 'x86_64' and platform_system != 'Windows' and sys_platform == 'win32')", 9 | "(platform_machine == 'AMD64' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'WIN32' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'aarch64' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'amd64' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'ppc64le' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'win32' and platform_system != 'Windows' and sys_platform != 'win32') or (platform_machine == 'x86_64' and platform_system != 'Windows' and sys_platform != 'win32')", 10 | "platform_machine != 'AMD64' and platform_machine != 'WIN32' and platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'ppc64le' and platform_machine != 'win32' and platform_machine != 'x86_64' and platform_system != 'Windows' and sys_platform == 'win32'", 11 | "platform_machine != 'AMD64' and platform_machine != 'WIN32' and platform_machine != 'aarch64' and platform_machine != 'amd64' and platform_machine != 'ppc64le' and platform_machine != 'win32' and platform_machine != 'x86_64' and platform_system != 'Windows' and sys_platform != 'win32'", 12 | ] 13 | 14 | [[package]] 15 | name = "annotated-types" 16 | version = "0.6.0" 17 | source = { registry = "https://pypi.org/simple" } 18 | sdist = { url = "https://files.pythonhosted.org/packages/67/fe/8c7b275824c6d2cd17c93ee85d0ee81c090285b6d52f4876ccc47cf9c3c4/annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d", size = 14670 } 19 | wheels = [ 20 | { url = "https://files.pythonhosted.org/packages/28/78/d31230046e58c207284c6b2c4e8d96e6d3cb4e52354721b944d3e1ee4aa5/annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43", size = 12360 }, 21 | ] 22 | 23 | [[package]] 24 | name = "beanqueue" 25 | version = "1.1.7" 26 | source = { editable = "." } 27 | dependencies = [ 28 | { name = "blinker" }, 29 | { name = "click" }, 30 | { name = "pydantic-settings" }, 31 | { name = "rich" }, 32 | { name = "sqlalchemy" }, 33 | { name = "venusian" }, 34 | ] 35 | 36 | [package.dev-dependencies] 37 | dev = [ 38 | { name = "psycopg2-binary" }, 39 | { name = "pytest-factoryboy" }, 40 | ] 41 | 42 | [package.metadata] 43 | requires-dist = [ 44 | { name = "blinker", specifier = ">=1.8.2,<2" }, 45 | { name = "click", specifier = ">=8.1.7,<9" }, 46 | { name = "pydantic-settings", specifier = ">=2.2.1,<3" }, 47 | { name = "rich", specifier = ">=13.7.1,<14" }, 48 | { name = "sqlalchemy", specifier = ">=2.0.30,<3" }, 49 | { name = "venusian", specifier = ">=3.1.0,<4" }, 50 | ] 51 | 52 | [package.metadata.requires-dev] 53 | dev = [ 54 | { name = "psycopg2-binary", specifier = ">=2.9.9,<3" }, 55 | { name = "pytest-factoryboy", specifier = ">=2.7.0,<3" }, 56 | ] 57 | 58 | [[package]] 59 | name = "blinker" 60 | version = "1.8.2" 61 | source = { registry = "https://pypi.org/simple" } 62 | sdist = { url = "https://files.pythonhosted.org/packages/1e/57/a6a1721eff09598fb01f3c7cda070c1b6a0f12d63c83236edf79a440abcc/blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83", size = 23161 } 63 | wheels = [ 64 | { url = "https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01", size = 9456 }, 65 | ] 66 | 67 | [[package]] 68 | name = "click" 69 | version = "8.1.7" 70 | source = { registry = "https://pypi.org/simple" } 71 | dependencies = [ 72 | { name = "colorama", marker = "platform_system == 'Windows'" }, 73 | ] 74 | sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } 75 | wheels = [ 76 | { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, 77 | ] 78 | 79 | [[package]] 80 | name = "colorama" 81 | version = "0.4.6" 82 | source = { registry = "https://pypi.org/simple" } 83 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } 84 | wheels = [ 85 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, 86 | ] 87 | 88 | [[package]] 89 | name = "factory-boy" 90 | version = "3.3.0" 91 | source = { registry = "https://pypi.org/simple" } 92 | dependencies = [ 93 | { name = "faker" }, 94 | ] 95 | sdist = { url = "https://files.pythonhosted.org/packages/ec/02/30796763f5661ef0028e12f09b66757b2c8dd1ab783d6b7e6d834a404884/factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1", size = 163604 } 96 | wheels = [ 97 | { url = "https://files.pythonhosted.org/packages/7d/37/69bc18ffa39ae7723b61ca0dde30130ea45f9127c129f084f5c6ca5d5dae/factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c", size = 36684 }, 98 | ] 99 | 100 | [[package]] 101 | name = "faker" 102 | version = "25.1.0" 103 | source = { registry = "https://pypi.org/simple" } 104 | dependencies = [ 105 | { name = "python-dateutil" }, 106 | ] 107 | sdist = { url = "https://files.pythonhosted.org/packages/c5/45/6b4875fd0b5f2ccb16091429d7f9180301ce4f596342db23ad5d0b80feea/Faker-25.1.0.tar.gz", hash = "sha256:2107618cf306bb188dcfea3e5cfd94aa92d65c7293a2437c1e96a99c83274755", size = 1760306 } 108 | wheels = [ 109 | { url = "https://files.pythonhosted.org/packages/87/92/71f0cdfc51c8b82cd86acd943192895fdd4523f88fc563ba16bcb73e3d78/Faker-25.1.0-py3-none-any.whl", hash = "sha256:24e28dce0b89683bb9e017e042b971c8c4909cff551b6d46f1e207674c7c2526", size = 1797185 }, 110 | ] 111 | 112 | [[package]] 113 | name = "greenlet" 114 | version = "3.0.3" 115 | source = { registry = "https://pypi.org/simple" } 116 | sdist = { url = "https://files.pythonhosted.org/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", size = 182013 } 117 | wheels = [ 118 | { url = "https://files.pythonhosted.org/packages/6e/20/68a278a6f93fa36e21cfc3d7599399a8a831225644eb3b6b18755cd3d6fc/greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61", size = 271666 }, 119 | { url = "https://files.pythonhosted.org/packages/21/b4/90e06e07c78513ab03855768200bdb35c8e764e805b3f14fb488e56f82dc/greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559", size = 657689 }, 120 | { url = "https://files.pythonhosted.org/packages/f6/a2/0ed21078039072f9dc738bbf3af12b103a84106b1385ac4723841f846ce7/greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e", size = 673009 }, 121 | { url = "https://files.pythonhosted.org/packages/42/11/42ad6b1104c357826bbee7d7b9e4f24dbd9fde94899a03efb004aab62963/greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33", size = 667432 }, 122 | { url = "https://files.pythonhosted.org/packages/bb/6b/384dee7e0121cbd1757bdc1824a5ee28e43d8d4e3f99aa59521f629442fe/greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379", size = 667442 }, 123 | { url = "https://files.pythonhosted.org/packages/c6/1f/12d5a6cc26e8b483c2e7975f9c22e088ac735c0d8dcb8a8f72d31a4e5f04/greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22", size = 620032 }, 124 | { url = "https://files.pythonhosted.org/packages/c7/ec/85b647e59e0f137c7792a809156f413e38379cf7f3f2e1353c37f4be4026/greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3", size = 1154218 }, 125 | { url = "https://files.pythonhosted.org/packages/94/ed/1e5f4bca691a81700e5a88e86d6f0e538acb10188cd2cc17140e523255ef/greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d", size = 1180754 }, 126 | { url = "https://files.pythonhosted.org/packages/47/79/26d54d7d700ef65b689fc2665a40846d13e834da0486674a8d4f0f371a47/greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728", size = 292822 }, 127 | { url = "https://files.pythonhosted.org/packages/a2/2f/461615adc53ba81e99471303b15ac6b2a6daa8d2a0f7f77fd15605e16d5b/greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be", size = 273085 }, 128 | { url = "https://files.pythonhosted.org/packages/e9/55/2c3cfa3cdbb940cf7321fbcf544f0e9c74898eed43bf678abf416812d132/greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e", size = 660514 }, 129 | { url = "https://files.pythonhosted.org/packages/38/77/efb21ab402651896c74f24a172eb4d7479f9f53898bd5e56b9e20bb24ffd/greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676", size = 674295 }, 130 | { url = "https://files.pythonhosted.org/packages/74/3a/92f188ace0190f0066dca3636cf1b09481d0854c46e92ec5e29c7cefe5b1/greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc", size = 669395 }, 131 | { url = "https://files.pythonhosted.org/packages/63/0f/847ed02cdfce10f0e6e3425cd054296bddb11a17ef1b34681fa01a055187/greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230", size = 670455 }, 132 | { url = "https://files.pythonhosted.org/packages/bd/37/56b0da468a85e7704f3b2bc045015301bdf4be2184a44868c71f6dca6fe2/greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf", size = 625692 }, 133 | { url = "https://files.pythonhosted.org/packages/7c/68/b5f4084c0a252d7e9c0d95fc1cfc845d08622037adb74e05be3a49831186/greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305", size = 1152597 }, 134 | { url = "https://files.pythonhosted.org/packages/a4/fa/31e22345518adcd69d1d6ab5087a12c178aa7f3c51103f6d5d702199d243/greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6", size = 1181043 }, 135 | { url = "https://files.pythonhosted.org/packages/53/80/3d94d5999b4179d91bcc93745d1b0815b073d61be79dd546b840d17adb18/greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2", size = 293635 }, 136 | ] 137 | 138 | [[package]] 139 | name = "inflection" 140 | version = "0.5.1" 141 | source = { registry = "https://pypi.org/simple" } 142 | sdist = { url = "https://files.pythonhosted.org/packages/e1/7e/691d061b7329bc8d54edbf0ec22fbfb2afe61facb681f9aaa9bff7a27d04/inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", size = 15091 } 143 | wheels = [ 144 | { url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454 }, 145 | ] 146 | 147 | [[package]] 148 | name = "iniconfig" 149 | version = "2.0.0" 150 | source = { registry = "https://pypi.org/simple" } 151 | sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } 152 | wheels = [ 153 | { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, 154 | ] 155 | 156 | [[package]] 157 | name = "markdown-it-py" 158 | version = "3.0.0" 159 | source = { registry = "https://pypi.org/simple" } 160 | dependencies = [ 161 | { name = "mdurl" }, 162 | ] 163 | sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } 164 | wheels = [ 165 | { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, 166 | ] 167 | 168 | [[package]] 169 | name = "mdurl" 170 | version = "0.1.2" 171 | source = { registry = "https://pypi.org/simple" } 172 | sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } 173 | wheels = [ 174 | { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, 175 | ] 176 | 177 | [[package]] 178 | name = "packaging" 179 | version = "24.0" 180 | source = { registry = "https://pypi.org/simple" } 181 | sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9", size = 147882 } 182 | wheels = [ 183 | { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488 }, 184 | ] 185 | 186 | [[package]] 187 | name = "pluggy" 188 | version = "1.5.0" 189 | source = { registry = "https://pypi.org/simple" } 190 | sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } 191 | wheels = [ 192 | { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, 193 | ] 194 | 195 | [[package]] 196 | name = "psycopg2-binary" 197 | version = "2.9.9" 198 | source = { registry = "https://pypi.org/simple" } 199 | sdist = { url = "https://files.pythonhosted.org/packages/fc/07/e720e53bfab016ebcc34241695ccc06a9e3d91ba19b40ca81317afbdc440/psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c", size = 384973 } 200 | wheels = [ 201 | { url = "https://files.pythonhosted.org/packages/a5/ac/702d300f3df169b9d0cbef0340d9f34a78bc18dc2dbafbcb39ff0f165cf8/psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26", size = 2822581 }, 202 | { url = "https://files.pythonhosted.org/packages/7a/1f/a6cf0cdf944253f7c45d90fbc876cc8bed5cc9942349306245715c0d88d6/psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f", size = 2552633 }, 203 | { url = "https://files.pythonhosted.org/packages/81/0b/3adf561107c865928455891156d1dde5325253f7f4316fe56cd2c3f73570/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2", size = 2851075 }, 204 | { url = "https://files.pythonhosted.org/packages/f7/98/c2fedcbf0a9607519a010dcf88571138b2251062dbde3610cdba5ba1eee1/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0", size = 3080509 }, 205 | { url = "https://files.pythonhosted.org/packages/c2/05/81e8bc7fca95574c9323e487d9ce1b58a4cfcc17f89b8fe843af46361211/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53", size = 3264303 }, 206 | { url = "https://files.pythonhosted.org/packages/ce/85/62825cabc6aad53104b7b6d12eb2ad74737d268630032d07b74d4444cb72/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be", size = 3019515 }, 207 | { url = "https://files.pythonhosted.org/packages/e9/b0/9ca2b8e01a0912c9a14234fd5df7a241a1e44778c5797bf4b8eaa8dc3d3a/psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27", size = 2355892 }, 208 | { url = "https://files.pythonhosted.org/packages/73/17/ba28bb0022db5e2015a82d2df1c4b0d419c37fa07a588b3aff3adc4939f6/psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359", size = 2534903 }, 209 | { url = "https://files.pythonhosted.org/packages/3b/92/b463556409cdc12791cd8b1dae0072bf8efe817ef68b7ea3d9cf7d0e5656/psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2", size = 2486597 }, 210 | { url = "https://files.pythonhosted.org/packages/92/57/96576e07132d7f7a1ac1df939575e6fdd8951aea337ee152b586bb51a971/psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc", size = 2454908 }, 211 | { url = "https://files.pythonhosted.org/packages/7c/ae/cedd56e1f4a2b0e37213283caf3733a875c4c76f3372241e19c0d2a87355/psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d", size = 1024240 }, 212 | { url = "https://files.pythonhosted.org/packages/25/1f/7ae31759142999a8d06b3e250c1346c4abcdcada8fa884376775dc1de686/psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417", size = 1163655 }, 213 | { url = "https://files.pythonhosted.org/packages/a7/d0/5f2db14e7b53552276ab613399a83f83f85b173a862d3f20580bc7231139/psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf", size = 2823784 }, 214 | { url = "https://files.pythonhosted.org/packages/18/ca/da384fd47233e300e3e485c90e7aab5d7def896d1281239f75901faf87d4/psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d", size = 2553308 }, 215 | { url = "https://files.pythonhosted.org/packages/50/66/fa53d2d3d92f6e1ef469d92afc6a4fe3f6e8a9a04b687aa28fb1f1d954ee/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212", size = 2851283 }, 216 | { url = "https://files.pythonhosted.org/packages/04/37/2429360ac5547378202db14eec0dde76edbe1f6627df5a43c7e164922859/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493", size = 3081839 }, 217 | { url = "https://files.pythonhosted.org/packages/62/2a/c0530b59d7e0d09824bc2102ecdcec0456b8ca4d47c0caa82e86fce3ed4c/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996", size = 3264488 }, 218 | { url = "https://files.pythonhosted.org/packages/19/57/9f172b900795ea37246c78b5f52e00f4779984370855b3e161600156906d/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119", size = 3020700 }, 219 | { url = "https://files.pythonhosted.org/packages/94/68/1176fc14ea76861b7b8360be5176e87fb20d5091b137c76570eb4e237324/psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba", size = 2355968 }, 220 | { url = "https://files.pythonhosted.org/packages/70/bb/aec2646a705a09079d008ce88073401cd61fc9b04f92af3eb282caa3a2ec/psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07", size = 2536101 }, 221 | { url = "https://files.pythonhosted.org/packages/14/33/12818c157e333cb9d9e6753d1b2463b6f60dbc1fade115f8e4dc5c52cac4/psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb", size = 2487064 }, 222 | { url = "https://files.pythonhosted.org/packages/56/a2/7851c68fe8768f3c9c246198b6356ee3e4a8a7f6820cc798443faada3400/psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe", size = 2456257 }, 223 | { url = "https://files.pythonhosted.org/packages/6f/ee/3ba07c6dc7c3294e717e94720da1597aedc82a10b1b180203ce183d4631a/psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93", size = 1024709 }, 224 | { url = "https://files.pythonhosted.org/packages/7b/08/9c66c269b0d417a0af9fb969535f0371b8c538633535a7a6a5ca3f9231e2/psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab", size = 1163864 }, 225 | ] 226 | 227 | [[package]] 228 | name = "pydantic" 229 | version = "2.7.1" 230 | source = { registry = "https://pypi.org/simple" } 231 | dependencies = [ 232 | { name = "annotated-types" }, 233 | { name = "pydantic-core" }, 234 | { name = "typing-extensions" }, 235 | ] 236 | sdist = { url = "https://files.pythonhosted.org/packages/1f/74/0d009e056c2bd309cdc053b932d819fcb5ad3301fc3e690c097e1de3e714/pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc", size = 713923 } 237 | wheels = [ 238 | { url = "https://files.pythonhosted.org/packages/ed/76/9a17032880ed27f2dbd490c77a3431cbc80f47ba81534131de3c2846e736/pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5", size = 409311 }, 239 | ] 240 | 241 | [[package]] 242 | name = "pydantic-core" 243 | version = "2.18.2" 244 | source = { registry = "https://pypi.org/simple" } 245 | dependencies = [ 246 | { name = "typing-extensions" }, 247 | ] 248 | sdist = { url = "https://files.pythonhosted.org/packages/e9/23/a609c50e53959eb96393e42ae4891901f699aaad682998371348650a6651/pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e", size = 383446 } 249 | wheels = [ 250 | { url = "https://files.pythonhosted.org/packages/ce/9c/6ba3121fecd4c8a0ae48d87e02a87d97ec8831eb978c53bcbfa0b2e43600/pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3", size = 1860520 }, 251 | { url = "https://files.pythonhosted.org/packages/5d/61/bfc32484eac102051ef85f5e648c9777f57398c83e5f87e3c0a420a6550b/pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4", size = 1773475 }, 252 | { url = "https://files.pythonhosted.org/packages/cf/f0/eb883cfa0de1ec85e4e388db092e6e0297c54bfe27b3015fe4a2d82f639d/pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90", size = 1821936 }, 253 | { url = "https://files.pythonhosted.org/packages/0c/10/fc86b5cf407a0a2c0b01796f920b5568e13009d2c3a2c999188b1908718f/pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd", size = 1808286 }, 254 | { url = "https://files.pythonhosted.org/packages/3a/a7/fd69b88ea7d2e31d4dce763182349bea5d9f00184d29cde2a8d0e0375704/pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150", size = 2011228 }, 255 | { url = "https://files.pythonhosted.org/packages/38/9d/0b2b5ddacf7641f6aacf04508c92afde7179c564a7aa1eeddb6dd8a16d82/pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413", size = 2990524 }, 256 | { url = "https://files.pythonhosted.org/packages/80/b8/b93d756b36425f7ad378dcb9fdf5f6a03b88afaae0476f7bdb31dd8964be/pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6", size = 2056984 }, 257 | { url = "https://files.pythonhosted.org/packages/e1/37/046c7a966fc44b52a015be11b9ba99a0c5401770bbd0f69c84fa6d5957c1/pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c", size = 1929611 }, 258 | { url = "https://files.pythonhosted.org/packages/85/df/0adda842d84e7ca290cb01df7e33dee6463b01be0fa3171a38ceb7b1a21e/pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0", size = 1995902 }, 259 | { url = "https://files.pythonhosted.org/packages/1f/89/dbe3dd03e0d5c68f50f6aa98607221a304168196ee84e835747705cb7005/pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664", size = 2124618 }, 260 | { url = "https://files.pythonhosted.org/packages/ee/53/b5c6c4a2b0defa1b7a4ad99e86f7e8410a7c0fbf247bb5f376cbb2f4ddcc/pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e", size = 1728630 }, 261 | { url = "https://files.pythonhosted.org/packages/9d/b0/e8bebe8fd08ea6ec027b7304c84f4652f2933514caf9f6a418d259d2a950/pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3", size = 1916632 }, 262 | { url = "https://files.pythonhosted.org/packages/94/74/2a26c45cac39408398adda1a9d96d567d71b6cd60f037687695ce89295b7/pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d", size = 1801629 }, 263 | { url = "https://files.pythonhosted.org/packages/15/b1/e6edfe46402a5b415fc3de86aa64fb10009b323907f8d513175bfb839aa9/pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242", size = 1857389 }, 264 | { url = "https://files.pythonhosted.org/packages/30/49/397da3f6910d62f092684a50bcaba2566825c6eee27a743846583a01fadf/pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043", size = 1770630 }, 265 | { url = "https://files.pythonhosted.org/packages/6d/e0/1d65ae0cab571cf072b23a44bb3a4f0b4d45572e2157bce9f073e703e30b/pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182", size = 1816322 }, 266 | { url = "https://files.pythonhosted.org/packages/c6/79/a9bde518a69b983adab265a1a3fbe26392b50854b1cf3f8ad030b28972c4/pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f", size = 1797646 }, 267 | { url = "https://files.pythonhosted.org/packages/5d/c0/28331aab3be69f407a95b629be253b57c3df492ea1dd1c53dce9796d10c1/pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3", size = 2020413 }, 268 | { url = "https://files.pythonhosted.org/packages/11/4e/e06605ce50035dd9bf107dda3d514e4b1ba82a5551ef57a15e73e47d9053/pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f", size = 2884072 }, 269 | { url = "https://files.pythonhosted.org/packages/a1/c9/7d61469af6386e5846b5864bb93dc770979968c113863f923916c1a8bca2/pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72", size = 2082895 }, 270 | { url = "https://files.pythonhosted.org/packages/af/b2/dff1a30e6c7eae5e12ed90fc790733cda91d5d9d8da86db59c41359049d5/pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c", size = 1931326 }, 271 | { url = "https://files.pythonhosted.org/packages/0e/b9/28dc15be5a828708612cc429354609b456719f70180af9c66d7617bbac60/pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241", size = 1993024 }, 272 | { url = "https://files.pythonhosted.org/packages/d4/8f/51b3cb36a4d2f1ed5d72f3ea329bab203db234fff056414b76950f353984/pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3", size = 2129503 }, 273 | { url = "https://files.pythonhosted.org/packages/7e/47/2e8b7d24fa69e82e3b6cd74776c36f88f0fadf63e1c777fc4385db2bc63a/pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038", size = 1748771 }, 274 | { url = "https://files.pythonhosted.org/packages/e4/49/f29028068b5cb364ad066a58490dd26fd1d4ba2943d829eb0f85dbc8ab06/pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438", size = 1916735 }, 275 | { url = "https://files.pythonhosted.org/packages/ac/32/d288f59ef7af445bac9a9281936f1d65999568d654b24a837d14ae01cefa/pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec", size = 1814428 }, 276 | ] 277 | 278 | [[package]] 279 | name = "pydantic-settings" 280 | version = "2.2.1" 281 | source = { registry = "https://pypi.org/simple" } 282 | dependencies = [ 283 | { name = "pydantic" }, 284 | { name = "python-dotenv" }, 285 | ] 286 | sdist = { url = "https://files.pythonhosted.org/packages/00/a4/89191c3cce6e6f79b734bfe81d3a8f176d21b57b034689cfbdc57d61c412/pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed", size = 35495 } 287 | wheels = [ 288 | { url = "https://files.pythonhosted.org/packages/99/ee/24ec87e3a91426497c5a2b9880662d19cfd640342d477334ebc60fc2c276/pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091", size = 13150 }, 289 | ] 290 | 291 | [[package]] 292 | name = "pygments" 293 | version = "2.18.0" 294 | source = { registry = "https://pypi.org/simple" } 295 | sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } 296 | wheels = [ 297 | { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, 298 | ] 299 | 300 | [[package]] 301 | name = "pytest" 302 | version = "8.2.0" 303 | source = { registry = "https://pypi.org/simple" } 304 | dependencies = [ 305 | { name = "colorama", marker = "sys_platform == 'win32'" }, 306 | { name = "iniconfig" }, 307 | { name = "packaging" }, 308 | { name = "pluggy" }, 309 | ] 310 | sdist = { url = "https://files.pythonhosted.org/packages/09/9d/78b3785134306efe9329f40815af45b9215068d6ae4747ec0bc91ff1f4aa/pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f", size = 1422883 } 311 | wheels = [ 312 | { url = "https://files.pythonhosted.org/packages/c4/43/6b1debd95ecdf001bc46789a933f658da3f9738c65f32db3f4e8f2a4ca97/pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233", size = 339229 }, 313 | ] 314 | 315 | [[package]] 316 | name = "pytest-factoryboy" 317 | version = "2.7.0" 318 | source = { registry = "https://pypi.org/simple" } 319 | dependencies = [ 320 | { name = "factory-boy" }, 321 | { name = "inflection" }, 322 | { name = "packaging" }, 323 | { name = "pytest" }, 324 | { name = "typing-extensions" }, 325 | ] 326 | sdist = { url = "https://files.pythonhosted.org/packages/a6/bc/179653e8cce651575ac95377e4fdf9afd3c4821ab4bba101aae913ebcc27/pytest_factoryboy-2.7.0.tar.gz", hash = "sha256:67fc54ec8669a3feb8ac60094dd57cd71eb0b20b2c319d2957873674c776a77b", size = 17398 } 327 | wheels = [ 328 | { url = "https://files.pythonhosted.org/packages/c7/56/d3ef25286dc8df9d1da0b325ee4b1b1ffd9736e44f9b30cfbe464e9f4f14/pytest_factoryboy-2.7.0-py3-none-any.whl", hash = "sha256:bf3222db22d954fbf46f4bff902a0a8d82f3fc3594a47c04bbdc0546ff4c59a6", size = 16268 }, 329 | ] 330 | 331 | [[package]] 332 | name = "python-dateutil" 333 | version = "2.9.0.post0" 334 | source = { registry = "https://pypi.org/simple" } 335 | dependencies = [ 336 | { name = "six" }, 337 | ] 338 | sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } 339 | wheels = [ 340 | { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, 341 | ] 342 | 343 | [[package]] 344 | name = "python-dotenv" 345 | version = "1.0.1" 346 | source = { registry = "https://pypi.org/simple" } 347 | sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } 348 | wheels = [ 349 | { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, 350 | ] 351 | 352 | [[package]] 353 | name = "rich" 354 | version = "13.7.1" 355 | source = { registry = "https://pypi.org/simple" } 356 | dependencies = [ 357 | { name = "markdown-it-py" }, 358 | { name = "pygments" }, 359 | ] 360 | sdist = { url = "https://files.pythonhosted.org/packages/b3/01/c954e134dc440ab5f96952fe52b4fdc64225530320a910473c1fe270d9aa/rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432", size = 221248 } 361 | wheels = [ 362 | { url = "https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", size = 240681 }, 363 | ] 364 | 365 | [[package]] 366 | name = "six" 367 | version = "1.16.0" 368 | source = { registry = "https://pypi.org/simple" } 369 | sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } 370 | wheels = [ 371 | { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, 372 | ] 373 | 374 | [[package]] 375 | name = "sqlalchemy" 376 | version = "2.0.30" 377 | source = { registry = "https://pypi.org/simple" } 378 | dependencies = [ 379 | { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, 380 | { name = "typing-extensions" }, 381 | ] 382 | sdist = { url = "https://files.pythonhosted.org/packages/36/d0/0137ebcf0dc230c2e82a621b3af755b8788a2a9dd6fd1b8cd6d5e7f6b00d/SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255", size = 9579500 } 383 | wheels = [ 384 | { url = "https://files.pythonhosted.org/packages/cd/ae/062f6ebd474aef81a199a16d2b1fb521d5fb0bc38a470181b0bcbfe3eb11/SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0", size = 2083475 }, 385 | { url = "https://files.pythonhosted.org/packages/29/51/3baab95d7eea9816c59c8e093201288ce27651704927e03ccfe156b30792/SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46", size = 2073865 }, 386 | { url = "https://files.pythonhosted.org/packages/63/e1/9177748d4482d04ee67242b8cf441e18f7031b2d7e893b0894297f9e91f7/SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa", size = 3192301 }, 387 | { url = "https://files.pythonhosted.org/packages/4d/21/87bcad723070f7cd5f9d45fb05557596aa1d23d19eef078b13edc9e31813/SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13", size = 3192184 }, 388 | { url = "https://files.pythonhosted.org/packages/93/3a/5328fd0c2bcd5572b23b14e3ca78d0abc8ad126e70b282b9e6f9fb00af6b/SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9", size = 3197671 }, 389 | { url = "https://files.pythonhosted.org/packages/3e/39/b8a8633fb6f64dc4d4eef08d5d8b303d349eb14517c7cb602e1f03dc71a8/SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0", size = 3192278 }, 390 | { url = "https://files.pythonhosted.org/packages/01/d5/c9661baf0ad062375049ad0081b3343c4bd0e95e98e58ea6763f0dbbfc41/SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49", size = 2052074 }, 391 | { url = "https://files.pythonhosted.org/packages/74/9a/eec023807ae78e83342567303916b34a348d9d40703e7cef5dfb1e3635b6/SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260", size = 2078188 }, 392 | { url = "https://files.pythonhosted.org/packages/2d/7d/00282d131c31108ef6ae666888fbe7797f97f2b55d43c1d088cd3bab2e54/SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c", size = 2081993 }, 393 | { url = "https://files.pythonhosted.org/packages/f9/ab/d37a4483768accbc9cd433cc5d6c45fb428840164b9df0328131011ce27c/SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e", size = 2072478 }, 394 | { url = "https://files.pythonhosted.org/packages/5f/92/db44ea3953e1f3b81a9c2a2852aa7542839da3300e50ee5615a67c3932b0/SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134", size = 3226887 }, 395 | { url = "https://files.pythonhosted.org/packages/51/b8/3fd88455562518b6e8b97c4bc5784a819bd0a5c26be2a3409d3245626fac/SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3", size = 3237515 }, 396 | { url = "https://files.pythonhosted.org/packages/50/65/b85460a54d7e379ad92bb0fa816caf53d5cf45924b738c6b57791a03f639/SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472", size = 3230993 }, 397 | { url = "https://files.pythonhosted.org/packages/48/5e/620fa87990aa04308523e2bfaf61ce20ddc0a1082c9f3e548d0d26ab0033/SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e", size = 3236918 }, 398 | { url = "https://files.pythonhosted.org/packages/ae/72/7c2166f6182bcf3b35228878ec323771df60774bf0b63019afe3a0fc97b4/SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90", size = 2050758 }, 399 | { url = "https://files.pythonhosted.org/packages/dc/01/bff536f96ea323a7d80df128a7bc947e3c25a60383425bf491232112c30d/SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5", size = 2076248 }, 400 | { url = "https://files.pythonhosted.org/packages/de/80/13fc9c003dffc169e03244e0ce23495ff54bbd77ba1245ef01c9a5c04a4c/SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a", size = 1873477 }, 401 | ] 402 | 403 | [[package]] 404 | name = "typing-extensions" 405 | version = "4.11.0" 406 | source = { registry = "https://pypi.org/simple" } 407 | sdist = { url = "https://files.pythonhosted.org/packages/f6/f3/b827b3ab53b4e3d8513914586dcca61c355fa2ce8252dea4da56e67bf8f2/typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", size = 78744 } 408 | wheels = [ 409 | { url = "https://files.pythonhosted.org/packages/01/f3/936e209267d6ef7510322191003885de524fc48d1b43269810cd589ceaf5/typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a", size = 34698 }, 410 | ] 411 | 412 | [[package]] 413 | name = "venusian" 414 | version = "3.1.0" 415 | source = { registry = "https://pypi.org/simple" } 416 | sdist = { url = "https://files.pythonhosted.org/packages/f8/39/7c0d9011ec465951aaf71c252effc7c031a04404887422c6f66ba26500e1/venusian-3.1.0.tar.gz", hash = "sha256:eb72cdca6f3139a15dc80f9c95d3c10f8a54a0ba881eeef8e2ec5b42d3ee3a95", size = 37960 } 417 | wheels = [ 418 | { url = "https://files.pythonhosted.org/packages/2c/d7/36860f68eb977ad685d0f0fda733eca913dbda1bb29bbc5f1c5ba460201a/venusian-3.1.0-py3-none-any.whl", hash = "sha256:d1fb1e49927f42573f6c9b7c4fcf61c892af8fdcaa2314daa01d9a560b23488d", size = 13987 }, 419 | ] 420 | --------------------------------------------------------------------------------