├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yml │ ├── e2e.yaml │ ├── lint.yaml │ └── publish-python-sdk.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── Caddyfile ├── README.md ├── compose.yml ├── conftest.py ├── examples ├── _deprecated │ ├── README.md │ ├── concurrency_limit_rr │ │ ├── event.py │ │ ├── test_dep_concurrency_limit_rr.py │ │ └── worker.py │ └── test_event_client.py ├── affinity-workers │ ├── event.py │ └── worker.py ├── api │ ├── api.py │ ├── async_api.py │ └── test_api.py ├── async │ ├── event.py │ ├── test_async.py │ └── worker.py ├── blocked_async │ ├── event.py │ └── worker.py ├── bulk_fanout │ ├── bulk_trigger.py │ ├── stream.py │ ├── test_bulk_fanout.py │ ├── trigger.py │ └── worker.py ├── cancellation │ ├── test_cancellation.py │ └── worker.py ├── concurrency_limit │ ├── event.py │ ├── test_concurrency_limit.py │ └── worker.py ├── concurrency_limit_rr │ ├── event.py │ ├── test_concurrency_limit_rr.py │ └── worker.py ├── cron │ ├── programatic-async.py │ ├── programatic-sync.py │ └── workflow-definition.py ├── dag │ ├── event.py │ ├── test_dag.py │ └── worker.py ├── dedupe │ └── worker.py ├── default_priority │ └── worker.py ├── delayed │ ├── event.py │ ├── test_delayed.py │ └── worker.py ├── durable_sticky_with_affinity │ └── worker.py ├── events │ ├── event.py │ └── test_event.py ├── fanout │ ├── stream.py │ ├── sync_stream.py │ ├── test_fanout.py │ ├── trigger.py │ └── worker.py ├── fanout_sync │ ├── test_fanout_sync.py │ ├── trigger.py │ └── worker.py ├── logger │ ├── client.py │ ├── event.py │ ├── test_logger.py │ ├── worker.py │ └── workflow.py ├── manual_trigger │ ├── image.jpeg │ ├── stream.py │ ├── test_manual_trigger.py │ └── worker.py ├── on_failure │ ├── test_on_failure.py │ └── worker.py ├── opentelemetry_instrumentation │ ├── client.py │ ├── test_otel_instrumentation.py │ ├── tracer.py │ ├── triggers.py │ └── worker.py ├── overrides │ ├── test_overrides.py │ └── worker.py ├── programatic_replay │ ├── script.py │ └── test_programatic_replay.py ├── pydantic │ ├── test_pydantic.py │ ├── trigger.py │ └── worker.py ├── rate_limit │ ├── dynamic.py │ ├── event.py │ ├── test_rate_limit.py │ └── worker.py ├── retries_with_backoff │ └── worker.py ├── scheduled │ ├── programatic-async.py │ └── programatic-sync.py ├── simple │ ├── event.py │ └── worker.py ├── sticky_workers │ ├── event.py │ └── worker.py ├── sync_to_async │ └── worker.py ├── timeout │ ├── event.py │ ├── test_timeout.py │ └── worker.py ├── v2 │ └── simple │ │ ├── test_v2_worker.py │ │ └── worker.py └── worker_existing_loop │ └── worker.py ├── generate.sh ├── hatchet_sdk ├── __init__.py ├── client.py ├── clients │ ├── admin.py │ ├── dispatcher │ │ ├── action_listener.py │ │ └── dispatcher.py │ ├── event_ts.py │ ├── events.py │ ├── rest │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ ├── api_token_api.py │ │ │ ├── default_api.py │ │ │ ├── event_api.py │ │ │ ├── github_api.py │ │ │ ├── healthcheck_api.py │ │ │ ├── log_api.py │ │ │ ├── metadata_api.py │ │ │ ├── rate_limits_api.py │ │ │ ├── slack_api.py │ │ │ ├── sns_api.py │ │ │ ├── step_run_api.py │ │ │ ├── tenant_api.py │ │ │ ├── user_api.py │ │ │ ├── worker_api.py │ │ │ ├── workflow_api.py │ │ │ ├── workflow_run_api.py │ │ │ └── workflow_runs_api.py │ │ ├── api_client.py │ │ ├── api_response.py │ │ ├── configuration.py │ │ ├── exceptions.py │ │ ├── models │ │ │ ├── __init__.py │ │ │ ├── accept_invite_request.py │ │ │ ├── api_error.py │ │ │ ├── api_errors.py │ │ │ ├── api_meta.py │ │ │ ├── api_meta_auth.py │ │ │ ├── api_meta_integration.py │ │ │ ├── api_meta_posthog.py │ │ │ ├── api_resource_meta.py │ │ │ ├── api_token.py │ │ │ ├── bulk_create_event_request.py │ │ │ ├── bulk_create_event_response.py │ │ │ ├── cancel_event_request.py │ │ │ ├── cancel_step_run_request.py │ │ │ ├── concurrency_limit_strategy.py │ │ │ ├── create_api_token_request.py │ │ │ ├── create_api_token_response.py │ │ │ ├── create_cron_workflow_trigger_request.py │ │ │ ├── create_event_request.py │ │ │ ├── create_pull_request_from_step_run.py │ │ │ ├── create_sns_integration_request.py │ │ │ ├── create_tenant_alert_email_group_request.py │ │ │ ├── create_tenant_invite_request.py │ │ │ ├── create_tenant_request.py │ │ │ ├── cron_workflows.py │ │ │ ├── cron_workflows_list.py │ │ │ ├── cron_workflows_method.py │ │ │ ├── cron_workflows_order_by_field.py │ │ │ ├── event.py │ │ │ ├── event_data.py │ │ │ ├── event_key_list.py │ │ │ ├── event_list.py │ │ │ ├── event_order_by_direction.py │ │ │ ├── event_order_by_field.py │ │ │ ├── event_update_cancel200_response.py │ │ │ ├── event_workflow_run_summary.py │ │ │ ├── events.py │ │ │ ├── get_step_run_diff_response.py │ │ │ ├── github_app_installation.py │ │ │ ├── github_branch.py │ │ │ ├── github_repo.py │ │ │ ├── info_get_version200_response.py │ │ │ ├── job.py │ │ │ ├── job_run.py │ │ │ ├── job_run_status.py │ │ │ ├── link_github_repository_request.py │ │ │ ├── list_api_tokens_response.py │ │ │ ├── list_github_app_installations_response.py │ │ │ ├── list_pull_requests_response.py │ │ │ ├── list_slack_webhooks.py │ │ │ ├── list_sns_integrations.py │ │ │ ├── log_line.py │ │ │ ├── log_line_level.py │ │ │ ├── log_line_list.py │ │ │ ├── log_line_order_by_direction.py │ │ │ ├── log_line_order_by_field.py │ │ │ ├── pagination_response.py │ │ │ ├── pull_request.py │ │ │ ├── pull_request_state.py │ │ │ ├── queue_metrics.py │ │ │ ├── rate_limit.py │ │ │ ├── rate_limit_list.py │ │ │ ├── rate_limit_order_by_direction.py │ │ │ ├── rate_limit_order_by_field.py │ │ │ ├── recent_step_runs.py │ │ │ ├── reject_invite_request.py │ │ │ ├── replay_event_request.py │ │ │ ├── replay_workflow_runs_request.py │ │ │ ├── replay_workflow_runs_response.py │ │ │ ├── rerun_step_run_request.py │ │ │ ├── schedule_workflow_run_request.py │ │ │ ├── scheduled_run_status.py │ │ │ ├── scheduled_workflows.py │ │ │ ├── scheduled_workflows_list.py │ │ │ ├── scheduled_workflows_method.py │ │ │ ├── scheduled_workflows_order_by_field.py │ │ │ ├── semaphore_slots.py │ │ │ ├── slack_webhook.py │ │ │ ├── sns_integration.py │ │ │ ├── step.py │ │ │ ├── step_run.py │ │ │ ├── step_run_archive.py │ │ │ ├── step_run_archive_list.py │ │ │ ├── step_run_diff.py │ │ │ ├── step_run_event.py │ │ │ ├── step_run_event_list.py │ │ │ ├── step_run_event_reason.py │ │ │ ├── step_run_event_severity.py │ │ │ ├── step_run_status.py │ │ │ ├── tenant.py │ │ │ ├── tenant_alert_email_group.py │ │ │ ├── tenant_alert_email_group_list.py │ │ │ ├── tenant_alerting_settings.py │ │ │ ├── tenant_invite.py │ │ │ ├── tenant_invite_list.py │ │ │ ├── tenant_list.py │ │ │ ├── tenant_member.py │ │ │ ├── tenant_member_list.py │ │ │ ├── tenant_member_role.py │ │ │ ├── tenant_queue_metrics.py │ │ │ ├── tenant_resource.py │ │ │ ├── tenant_resource_limit.py │ │ │ ├── tenant_resource_policy.py │ │ │ ├── tenant_step_run_queue_metrics.py │ │ │ ├── trigger_workflow_run_request.py │ │ │ ├── update_tenant_alert_email_group_request.py │ │ │ ├── update_tenant_invite_request.py │ │ │ ├── update_tenant_request.py │ │ │ ├── update_worker_request.py │ │ │ ├── user.py │ │ │ ├── user_change_password_request.py │ │ │ ├── user_login_request.py │ │ │ ├── user_register_request.py │ │ │ ├── user_tenant_memberships_list.py │ │ │ ├── user_tenant_public.py │ │ │ ├── webhook_worker.py │ │ │ ├── webhook_worker_create_request.py │ │ │ ├── webhook_worker_create_response.py │ │ │ ├── webhook_worker_created.py │ │ │ ├── webhook_worker_list_response.py │ │ │ ├── webhook_worker_request.py │ │ │ ├── webhook_worker_request_list_response.py │ │ │ ├── webhook_worker_request_method.py │ │ │ ├── worker.py │ │ │ ├── worker_label.py │ │ │ ├── worker_list.py │ │ │ ├── worker_runtime_info.py │ │ │ ├── worker_runtime_sdks.py │ │ │ ├── worker_type.py │ │ │ ├── workflow.py │ │ │ ├── workflow_concurrency.py │ │ │ ├── workflow_deployment_config.py │ │ │ ├── workflow_kind.py │ │ │ ├── workflow_list.py │ │ │ ├── workflow_metrics.py │ │ │ ├── workflow_run.py │ │ │ ├── workflow_run_cancel200_response.py │ │ │ ├── workflow_run_list.py │ │ │ ├── workflow_run_order_by_direction.py │ │ │ ├── workflow_run_order_by_field.py │ │ │ ├── workflow_run_shape.py │ │ │ ├── workflow_run_status.py │ │ │ ├── workflow_run_triggered_by.py │ │ │ ├── workflow_runs_cancel_request.py │ │ │ ├── workflow_runs_metrics.py │ │ │ ├── workflow_runs_metrics_counts.py │ │ │ ├── workflow_tag.py │ │ │ ├── workflow_trigger_cron_ref.py │ │ │ ├── workflow_trigger_event_ref.py │ │ │ ├── workflow_triggers.py │ │ │ ├── workflow_update_request.py │ │ │ ├── workflow_version.py │ │ │ ├── workflow_version_concurrency.py │ │ │ ├── workflow_version_definition.py │ │ │ ├── workflow_version_meta.py │ │ │ └── workflow_workers_count.py │ │ ├── rest.py │ │ └── tenacity_utils.py │ ├── rest_client.py │ ├── run_event_listener.py │ └── workflow_listener.py ├── connection.py ├── context │ ├── __init__.py │ ├── context.py │ └── worker_context.py ├── contracts │ ├── dispatcher_pb2.py │ ├── dispatcher_pb2.pyi │ ├── dispatcher_pb2_grpc.py │ ├── events_pb2.py │ ├── events_pb2.pyi │ ├── events_pb2_grpc.py │ ├── workflows_pb2.py │ ├── workflows_pb2.pyi │ └── workflows_pb2_grpc.py ├── features │ ├── cron.py │ └── scheduled.py ├── hatchet.py ├── labels.py ├── loader.py ├── logger.py ├── metadata.py ├── opentelemetry │ └── instrumentor.py ├── py.typed ├── rate_limit.py ├── semver.py ├── token.py ├── utils │ ├── aio_utils.py │ ├── backoff.py │ ├── serialization.py │ ├── types.py │ └── typing.py ├── v2 │ ├── callable.py │ ├── concurrency.py │ └── hatchet.py ├── worker │ ├── __init__.py │ ├── action_listener_process.py │ ├── runner │ │ ├── run_loop_manager.py │ │ ├── runner.py │ │ └── utils │ │ │ ├── capture_logs.py │ │ │ └── error_with_traceback.py │ └── worker.py ├── workflow.py └── workflow_run.py ├── hot-reload.sh ├── lint.sh ├── openapi_patch.patch ├── poetry.lock ├── publish.sh └── pyproject.toml /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "gitsubmodule" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: auto-merge 2 | 3 | on: 4 | pull_request_target: 5 | 6 | jobs: 7 | auto-merge: 8 | runs-on: ubuntu-latest 9 | if: ${{ github.actor == 'dependabot[bot]' }} 10 | permissions: 11 | pull-requests: write 12 | issues: write 13 | repository-projects: write 14 | contents: write 15 | steps: 16 | - name: "Metadata" 17 | id: metadata 18 | uses: dependabot/fetch-metadata@v2.1.0 19 | with: 20 | github-token: "${{ secrets.GITHUB_TOKEN }}" 21 | - name: "Enable auto-squash" 22 | if: steps.metadata.outputs.package-ecosystem == 'submodules' 23 | run: gh pr merge --auto --squash "$PR_URL" 24 | env: 25 | PR_URL: ${{ github.event.pull_request.html_url }} 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | - name: "Approve (minor & patch)" 28 | id: approve 29 | if: steps.metadata.outputs.package-ecosystem == 'submodules' 30 | run: gh pr review --approve "$PR_URL" 31 | env: 32 | PR_URL: ${{ github.event.pull_request.html_url }} 33 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 34 | -------------------------------------------------------------------------------- /.github/workflows/e2e.yaml: -------------------------------------------------------------------------------- 1 | name: e2e 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.10", "3.11", "3.12", "3.13"] 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | 15 | - name: Run Hatchet Engine 16 | run: docker compose up -d 17 | 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v5 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | - name: Display Python version 23 | run: python -c "import sys; print(sys.version)" 24 | - name: Install Poetry 25 | uses: snok/install-poetry@v1 26 | with: 27 | version: 1.5.1 28 | virtualenvs-create: true 29 | virtualenvs-in-project: true 30 | - name: Install dependencies 31 | run: poetry install --no-interaction --all-extras 32 | 33 | - name: Generate Env File 34 | run: | 35 | cat < .env 36 | HATCHET_CLIENT_TOKEN="$(docker compose run --no-deps setup-config /hatchet/hatchet-admin token create --config /hatchet/config --tenant-id 707d0855-80ab-4e1f-a156-f1c4546cbf52 | xargs)" 37 | HATCHET_CLIENT_TLS_STRATEGY=none 38 | EOF 39 | 40 | - name: Set HATCHET_CLIENT_NAMESPACE 41 | run: | 42 | PYTHON_VERSION=$(python -c "import sys; print(f'py{sys.version_info.major}{sys.version_info.minor}')") 43 | SHORT_SHA=$(git rev-parse --short HEAD) 44 | echo "HATCHET_CLIENT_NAMESPACE=${PYTHON_VERSION}-${SHORT_SHA}" >> $GITHUB_ENV 45 | - name: Run pytest 46 | run: | 47 | echo "Using HATCHET_CLIENT_NAMESPACE: $HATCHET_CLIENT_NAMESPACE" 48 | poetry run pytest -s -vvv --maxfail=5 --timeout=120 --capture=no 49 | -------------------------------------------------------------------------------- /.github/workflows/lint.yaml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | lint: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v4 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: "3.11" 20 | 21 | - name: Install Poetry 22 | uses: snok/install-poetry@v1 23 | with: 24 | version: 1.5.1 25 | virtualenvs-create: true 26 | virtualenvs-in-project: true 27 | 28 | - name: Install linting tools 29 | run: poetry install --all-extras 30 | 31 | - name: Run Black 32 | run: poetry run black . --check --verbose --diff --color 33 | 34 | - name: Run Isort 35 | run: poetry run isort . --check-only --diff 36 | 37 | - name: Run MyPy 38 | run: poetry run mypy --config-file=pyproject.toml 39 | -------------------------------------------------------------------------------- /.github/workflows/publish-python-sdk.yaml: -------------------------------------------------------------------------------- 1 | name: Publish Python SDK 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout Repository 11 | uses: actions/checkout@v4 12 | with: 13 | submodules: recursive 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v5 17 | with: 18 | python-version: "3.x" 19 | 20 | - name: Install Poetry 21 | run: | 22 | pipx install poetry==1.7.1 23 | 24 | - name: Run publish.sh script 25 | run: | 26 | sh publish.sh 27 | env: 28 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.POETRY_PYPI_TOKEN_PYPI }} 29 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.5.0 4 | hooks: 5 | - id: check-merge-conflict 6 | - id: mixed-line-ending 7 | args: ["--fix=lf"] 8 | - id: end-of-file-fixer 9 | exclude: \.sql 10 | - id: check-yaml 11 | - repo: https://github.com/PyCQA/isort 12 | rev: 5.13.2 13 | hooks: 14 | - id: isort 15 | additional_dependencies: ["isort[pyproject]"] 16 | exclude: _pb2(_grpc)?\.py 17 | types: 18 | - python 19 | - repo: https://github.com/psf/black 20 | rev: 24.3.0 21 | hooks: 22 | - id: black 23 | args: ["--config=pyproject.toml"] 24 | exclude: _pb2(_grpc)?\.py 25 | types: 26 | - python 27 | - repo: https://github.com/pre-commit/mirrors-mypy 28 | rev: v1.13.0 29 | hooks: 30 | - id: mypy 31 | args: [--config-file=pyproject.toml] 32 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "." 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true, 7 | "cSpell.words": [ 8 | "cygrpc", 9 | "cython", 10 | "dotenv", 11 | "reqs" 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /Caddyfile: -------------------------------------------------------------------------------- 1 | http://localhost:8080 { 2 | handle /api/* { 3 | reverse_proxy hatchet-api:8080 4 | } 5 | 6 | handle /* { 7 | reverse_proxy hatchet-frontend:80 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Moved to https://github.com/hatchet-dev/hatchet/tree/main/sdks/python 2 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import subprocess 3 | import time 4 | from io import BytesIO 5 | from threading import Thread 6 | from typing import AsyncGenerator, Callable, cast 7 | 8 | import psutil 9 | import pytest 10 | import pytest_asyncio 11 | 12 | from hatchet_sdk import Hatchet 13 | 14 | 15 | @pytest_asyncio.fixture(scope="session") 16 | async def aiohatchet() -> AsyncGenerator[Hatchet, None]: 17 | yield Hatchet(debug=True) 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def hatchet() -> Hatchet: 22 | return Hatchet(debug=True) 23 | 24 | 25 | @pytest.fixture() 26 | def worker(request: pytest.FixtureRequest): 27 | example = cast(str, request.param) 28 | 29 | command = ["poetry", "run", example] 30 | 31 | logging.info(f"Starting background worker: {' '.join(command)}") 32 | proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 33 | 34 | # Check if the process is still running 35 | if proc.poll() is not None: 36 | raise Exception(f"Worker failed to start with return code {proc.returncode}") 37 | 38 | time.sleep(5) 39 | 40 | def log_output(pipe: BytesIO, log_func: Callable[[str], None]) -> None: 41 | for line in iter(pipe.readline, b""): 42 | log_func(line.decode().strip()) 43 | 44 | Thread(target=log_output, args=(proc.stdout, logging.info), daemon=True).start() 45 | Thread(target=log_output, args=(proc.stderr, logging.error), daemon=True).start() 46 | 47 | yield proc 48 | 49 | logging.info("Cleaning up background worker") 50 | parent = psutil.Process(proc.pid) 51 | children = parent.children(recursive=True) 52 | for child in children: 53 | child.terminate() 54 | parent.terminate() 55 | 56 | _, alive = psutil.wait_procs([parent] + children, timeout=3) 57 | for p in alive: 58 | logging.warning(f"Force killing process {p.pid}") 59 | p.kill() 60 | -------------------------------------------------------------------------------- /examples/_deprecated/README.md: -------------------------------------------------------------------------------- 1 | The examples and tests in this directory are deprecated, but we're maintaining them to ensure backwards compatibility. 2 | -------------------------------------------------------------------------------- /examples/_deprecated/concurrency_limit_rr/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | for i in range(200): 10 | group = "0" 11 | 12 | if i % 2 == 0: 13 | group = "1" 14 | 15 | client.event.push("concurrency-test", {"group": group}) 16 | -------------------------------------------------------------------------------- /examples/_deprecated/concurrency_limit_rr/test_dep_concurrency_limit_rr.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | 4 | import pytest 5 | 6 | from hatchet_sdk import Hatchet, Worker 7 | from hatchet_sdk.workflow_run import WorkflowRunRef 8 | 9 | 10 | # requires scope module or higher for shared event loop 11 | @pytest.mark.parametrize("worker", ["concurrency_limit_rr"], indirect=True) 12 | @pytest.mark.skip(reason="The timing for this test is not reliable") 13 | @pytest.mark.asyncio(scope="session") 14 | async def test_run(aiohatchet: Hatchet, worker: Worker) -> None: 15 | num_groups = 2 16 | runs: list[WorkflowRunRef] = [] 17 | 18 | # Start all runs 19 | for i in range(1, num_groups + 1): 20 | run = aiohatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) 21 | runs.append(run) 22 | run = aiohatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) 23 | runs.append(run) 24 | 25 | # Wait for all results 26 | successful_runs = [] 27 | cancelled_runs = [] 28 | 29 | start_time = time.time() 30 | 31 | # Process each run individually 32 | for i, run in enumerate(runs, start=1): 33 | try: 34 | result = await run.result() 35 | successful_runs.append((i, result)) 36 | except Exception as e: 37 | if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): 38 | cancelled_runs.append((i, str(e))) 39 | else: 40 | raise # Re-raise if it's an unexpected error 41 | 42 | end_time = time.time() 43 | total_time = end_time - start_time 44 | 45 | # Check that we have the correct number of successful and cancelled runs 46 | assert ( 47 | len(successful_runs) == 4 48 | ), f"Expected 4 successful runs, got {len(successful_runs)}" 49 | assert ( 50 | len(cancelled_runs) == 0 51 | ), f"Expected 0 cancelled run, got {len(cancelled_runs)}" 52 | 53 | # Check that the total time is close to 2 seconds 54 | assert ( 55 | 3.8 <= total_time <= 7 56 | ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" 57 | 58 | print(f"Total execution time: {total_time:.2f} seconds") 59 | -------------------------------------------------------------------------------- /examples/_deprecated/concurrency_limit_rr/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import ConcurrencyLimitStrategy, Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["concurrency-test"], schedule_timeout="10m") 13 | class ConcurrencyDemoWorkflowRR: 14 | 15 | # NOTE: We're replacing the concurrency key function with a CEL expression 16 | # to simplify architecture. 17 | # See ../../concurrency_limit_rr/worker.py for the new implementation. 18 | @hatchet.concurrency( 19 | max_runs=1, limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN 20 | ) 21 | def concurrency(self, context: Context) -> str: 22 | input = context.workflow_input() 23 | print(input) 24 | return f'group-{input["group"]}' 25 | 26 | @hatchet.step() 27 | def step1(self, context: Context) -> None: 28 | print("starting step1") 29 | time.sleep(2) 30 | print("finished step1") 31 | pass 32 | 33 | 34 | workflow = ConcurrencyDemoWorkflowRR() 35 | worker = hatchet.worker("concurrency-demo-worker-rr", max_runs=10) 36 | worker.register_workflow(workflow) 37 | 38 | worker.start() 39 | -------------------------------------------------------------------------------- /examples/_deprecated/test_event_client.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dotenv import load_dotenv 3 | 4 | from hatchet_sdk import new_client 5 | from hatchet_sdk.hatchet import Hatchet 6 | 7 | load_dotenv() 8 | 9 | 10 | @pytest.mark.asyncio(scope="session") 11 | async def test_direct_client_event() -> None: 12 | client = new_client() 13 | e = client.event.push("user:create", {"test": "test"}) 14 | 15 | assert e.eventId is not None 16 | 17 | 18 | @pytest.mark.filterwarnings( 19 | "ignore:Direct access to client is deprecated:DeprecationWarning" 20 | ) 21 | @pytest.mark.asyncio(scope="session") 22 | async def test_hatchet_client_event() -> None: 23 | hatchet = Hatchet() 24 | e = hatchet.client.event.push("user:create", {"test": "test"}) 25 | 26 | assert e.eventId is not None 27 | -------------------------------------------------------------------------------- /examples/affinity-workers/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk.hatchet import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | hatchet.event.push( 10 | "affinity:run", 11 | {"test": "test"}, 12 | options={"additional_metadata": {"hello": "moon"}}, 13 | ) 14 | -------------------------------------------------------------------------------- /examples/affinity-workers/worker.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Context, Hatchet, WorkerLabelComparator 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | 10 | @hatchet.workflow(on_events=["affinity:run"]) 11 | class AffinityWorkflow: 12 | @hatchet.step( 13 | desired_worker_labels={ 14 | "model": {"value": "fancy-ai-model-v2", "weight": 10}, 15 | "memory": { 16 | "value": 256, 17 | "required": True, 18 | "comparator": WorkerLabelComparator.LESS_THAN, 19 | }, 20 | }, 21 | ) 22 | async def step(self, context: Context) -> dict[str, str | None]: 23 | if context.worker.labels().get("model") != "fancy-ai-model-v2": 24 | context.worker.upsert_labels({"model": "unset"}) 25 | # DO WORK TO EVICT OLD MODEL / LOAD NEW MODEL 26 | context.worker.upsert_labels({"model": "fancy-ai-model-v2"}) 27 | 28 | return {"worker": context.worker.id()} 29 | 30 | 31 | def main() -> None: 32 | worker = hatchet.worker( 33 | "affinity-worker", 34 | max_runs=10, 35 | labels={ 36 | "model": "fancy-ai-model-v2", 37 | "memory": 512, 38 | }, 39 | ) 40 | worker.register_workflow(AffinityWorkflow()) 41 | worker.start() 42 | 43 | 44 | if __name__ == "__main__": 45 | main() 46 | -------------------------------------------------------------------------------- /examples/api/api.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet, WorkflowList 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | 10 | def main() -> None: 11 | workflow_list = hatchet.rest.workflow_list() 12 | rows = workflow_list.rows or [] 13 | 14 | for workflow in rows: 15 | print(workflow.name) 16 | print(workflow.metadata.id) 17 | print(workflow.metadata.created_at) 18 | print(workflow.metadata.updated_at) 19 | 20 | 21 | if __name__ == "__main__": 22 | main() 23 | -------------------------------------------------------------------------------- /examples/api/async_api.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import cast 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Hatchet, WorkflowList 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | 13 | async def main() -> None: 14 | workflow_list = await hatchet.rest.aio.workflow_list() 15 | rows = workflow_list.rows or [] 16 | 17 | for workflow in rows: 18 | print(workflow.name) 19 | print(workflow.metadata.id) 20 | print(workflow.metadata.created_at) 21 | print(workflow.metadata.updated_at) 22 | 23 | 24 | if __name__ == "__main__": 25 | asyncio.run(main()) 26 | -------------------------------------------------------------------------------- /examples/api/test_api.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | ## IMPORTANT: Worker needs to be set here to ensure at least one workflow exists 8 | @pytest.mark.parametrize("worker", ["concurrency_limit_rr"], indirect=True) 9 | @pytest.mark.asyncio(scope="session") 10 | async def test_list_workflows(hatchet: Hatchet, worker: Worker) -> None: 11 | workflows = hatchet.rest.workflow_list() 12 | 13 | assert len(workflows.rows or []) != 0 14 | 15 | 16 | # requires scope module or higher for shared event loop 17 | @pytest.mark.parametrize("worker", ["concurrency_limit_rr"], indirect=True) 18 | @pytest.mark.asyncio(scope="session") 19 | async def test_async_list_workflows(aiohatchet: Hatchet, worker: Worker) -> None: 20 | workflows = await aiohatchet.rest.aio.workflow_list() 21 | 22 | assert len(workflows.rows or []) != 0 23 | -------------------------------------------------------------------------------- /examples/async/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet() 8 | hatchet.event.push("async:create", {"test": "test"}) 9 | -------------------------------------------------------------------------------- /examples/async/test_async.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["async"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("AsyncWorkflow", {}) 11 | result = await run.result() 12 | assert result["step1"]["test"] == "test" 13 | 14 | 15 | @pytest.mark.parametrize("worker", ["async"], indirect=True) 16 | @pytest.mark.skip(reason="Skipping this test until we can dedicate more time to debug") 17 | @pytest.mark.asyncio(scope="session") 18 | async def test_run_async(aiohatchet: Hatchet, worker: Worker) -> None: 19 | run = await aiohatchet.admin.aio.run_workflow("AsyncWorkflow", {}) 20 | result = await run.result() 21 | assert result["step1"]["test"] == "test" 22 | -------------------------------------------------------------------------------- /examples/async/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["async:create"]) 13 | class AsyncWorkflow: 14 | 15 | @hatchet.step(timeout="10s") 16 | async def step1(self, context: Context) -> dict[str, str]: 17 | print("started step1") 18 | return {"test": "test"} 19 | 20 | @hatchet.step(parents=["step1"], timeout="10s") 21 | async def step2(self, context: Context) -> None: 22 | print("finished step2") 23 | 24 | 25 | async def _main() -> None: 26 | workflow = AsyncWorkflow() 27 | worker = hatchet.worker("async-worker", max_runs=4) 28 | worker.register_workflow(workflow) 29 | await worker.async_start() 30 | 31 | 32 | def main() -> None: 33 | asyncio.run(_main()) 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /examples/blocked_async/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import PushEventOptions, new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | # client.event.push("user:create", {"test": "test"}) 10 | client.event.push( 11 | "user:create", {"test": "test"}, options={"additional_metadata": {"hello": "moon"}} 12 | ) 13 | -------------------------------------------------------------------------------- /examples/blocked_async/worker.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import time 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | # WARNING: this is an example of what NOT to do 13 | # This workflow is intentionally blocking the main thread 14 | # and will block the worker from processing other workflows 15 | # 16 | # You do not want to run long sync functions in an async def function 17 | 18 | 19 | @hatchet.workflow(on_events=["user:create"]) 20 | class Blocked: 21 | @hatchet.step(timeout="11s", retries=3) 22 | async def step1(self, context: Context) -> dict[str, str | int | float]: 23 | print("Executing step1") 24 | 25 | # CPU-bound task: Calculate a large number of SHA-256 hashes 26 | start_time = time.time() 27 | iterations = 10_000_000 28 | for i in range(iterations): 29 | hashlib.sha256(f"data{i}".encode()).hexdigest() 30 | 31 | end_time = time.time() 32 | execution_time = end_time - start_time 33 | 34 | print( 35 | f"Completed {iterations} hash calculations in {execution_time:.2f} seconds" 36 | ) 37 | 38 | return { 39 | "step1": "step1", 40 | "iterations": iterations, 41 | "execution_time": execution_time, 42 | } 43 | 44 | 45 | def main() -> None: 46 | workflow = Blocked() 47 | worker = hatchet.worker("blocked-worker", max_runs=3) 48 | worker.register_workflow(workflow) 49 | worker.start() 50 | 51 | 52 | if __name__ == "__main__": 53 | main() 54 | -------------------------------------------------------------------------------- /examples/bulk_fanout/bulk_trigger.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | from typing import Any 6 | 7 | from dotenv import load_dotenv 8 | 9 | from hatchet_sdk import new_client 10 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 11 | from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun 12 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 13 | 14 | 15 | async def main() -> None: 16 | load_dotenv() 17 | hatchet = new_client() 18 | 19 | workflowRuns: list[dict[str, Any]] = [] 20 | 21 | # we are going to run the BulkParent workflow 20 which will trigger the Child workflows n times for each n in range(20) 22 | for i in range(20): 23 | workflowRuns.append( 24 | { 25 | "workflow_name": "BulkParent", 26 | "input": {"n": i}, 27 | "options": { 28 | "additional_metadata": { 29 | "bulk-trigger": i, 30 | "hello-{i}": "earth-{i}", 31 | }, 32 | }, 33 | } 34 | ) 35 | 36 | workflowRunRefs = hatchet.admin.run_workflows( 37 | workflowRuns, 38 | ) 39 | 40 | results = await asyncio.gather( 41 | *[workflowRunRef.result() for workflowRunRef in workflowRunRefs], 42 | return_exceptions=True, 43 | ) 44 | 45 | for result in results: 46 | if isinstance(result, Exception): 47 | print(f"An error occurred: {result}") # Handle the exception here 48 | else: 49 | print(result) 50 | 51 | 52 | if __name__ == "__main__": 53 | asyncio.run(main()) 54 | -------------------------------------------------------------------------------- /examples/bulk_fanout/stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import random 6 | 7 | from dotenv import load_dotenv 8 | 9 | from hatchet_sdk import new_client 10 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 11 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 12 | from hatchet_sdk.v2.hatchet import Hatchet 13 | 14 | 15 | async def main() -> None: 16 | load_dotenv() 17 | hatchet = Hatchet() 18 | 19 | # Generate a random stream key to use to track all 20 | # stream events for this workflow run. 21 | 22 | streamKey = "streamKey" 23 | streamVal = f"sk-{random.randint(1, 100)}" 24 | 25 | # Specify the stream key as additional metadata 26 | # when running the workflow. 27 | 28 | # This key gets propagated to all child workflows 29 | # and can have an arbitrary property name. 30 | 31 | workflowRun = hatchet.admin.run_workflow( 32 | "Parent", 33 | {"n": 2}, 34 | options={"additional_metadata": {streamKey: streamVal}}, 35 | ) 36 | 37 | # Stream all events for the additional meta key value 38 | listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal) 39 | 40 | async for event in listener: 41 | print(event.type, event.payload) 42 | 43 | 44 | if __name__ == "__main__": 45 | asyncio.run(main()) 46 | -------------------------------------------------------------------------------- /examples/bulk_fanout/test_bulk_fanout.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["bulk_fanout"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("BulkParent", {"n": 12}) 11 | result = await run.result() 12 | assert len(result["spawn"]["results"]) == 12 13 | 14 | 15 | # requires scope module or higher for shared event loop 16 | @pytest.mark.asyncio(scope="session") 17 | @pytest.mark.parametrize("worker", ["bulk_fanout"], indirect=True) 18 | async def test_run2(hatchet: Hatchet, worker: Worker) -> None: 19 | run = hatchet.admin.run_workflow("BulkParent", {"n": 10}) 20 | result = await run.result() 21 | assert len(result["spawn"]["results"]) == 10 22 | -------------------------------------------------------------------------------- /examples/bulk_fanout/trigger.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | 6 | from dotenv import load_dotenv 7 | 8 | from hatchet_sdk import new_client 9 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 10 | from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun 11 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 12 | 13 | 14 | async def main() -> None: 15 | load_dotenv() 16 | hatchet = new_client() 17 | 18 | workflowRuns: WorkflowRun = [] # type: ignore[assignment] 19 | 20 | event = hatchet.event.push( 21 | "parent:create", {"n": 999}, {"additional_metadata": {"no-dedupe": "world"}} 22 | ) 23 | 24 | 25 | if __name__ == "__main__": 26 | asyncio.run(main()) 27 | -------------------------------------------------------------------------------- /examples/bulk_fanout/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | from hatchet_sdk.clients.admin import ChildWorkflowRunDict 8 | 9 | load_dotenv() 10 | 11 | hatchet = Hatchet(debug=True) 12 | 13 | 14 | @hatchet.workflow(on_events=["parent:create"]) 15 | class BulkParent: 16 | @hatchet.step(timeout="5m") 17 | async def spawn(self, context: Context) -> dict[str, list[Any]]: 18 | print("spawning child") 19 | 20 | context.put_stream("spawning...") 21 | results = [] 22 | 23 | n = context.workflow_input().get("n", 100) 24 | 25 | child_workflow_runs: list[ChildWorkflowRunDict] = [] 26 | 27 | for i in range(n): 28 | 29 | child_workflow_runs.append( 30 | { 31 | "workflow_name": "BulkChild", 32 | "input": {"a": str(i)}, 33 | "key": f"child{i}", 34 | "options": {"additional_metadata": {"hello": "earth"}}, 35 | } 36 | ) 37 | 38 | if len(child_workflow_runs) == 0: 39 | return {} 40 | 41 | spawn_results = await context.aio.spawn_workflows(child_workflow_runs) 42 | 43 | results = await asyncio.gather( 44 | *[workflowRunRef.result() for workflowRunRef in spawn_results], 45 | return_exceptions=True, 46 | ) 47 | 48 | print("finished spawning children") 49 | 50 | for result in results: 51 | if isinstance(result, Exception): 52 | print(f"An error occurred: {result}") 53 | else: 54 | print(result) 55 | 56 | return {"results": results} 57 | 58 | 59 | @hatchet.workflow(on_events=["child:create"]) 60 | class BulkChild: 61 | @hatchet.step() 62 | def process(self, context: Context) -> dict[str, str]: 63 | a = context.workflow_input()["a"] 64 | print(f"child process {a}") 65 | context.put_stream("child 1...") 66 | return {"status": "success " + a} 67 | 68 | @hatchet.step() 69 | def process2(self, context: Context) -> dict[str, str]: 70 | print("child process2") 71 | context.put_stream("child 2...") 72 | return {"status2": "success"} 73 | 74 | 75 | def main() -> None: 76 | worker = hatchet.worker("fanout-worker", max_runs=40) 77 | worker.register_workflow(BulkParent()) 78 | worker.register_workflow(BulkChild()) 79 | worker.start() 80 | 81 | 82 | if __name__ == "__main__": 83 | main() 84 | -------------------------------------------------------------------------------- /examples/cancellation/test_cancellation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["cancellation"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("CancelWorkflow", {}) 11 | result = await run.result() 12 | # TODO is this the expected result for a timed out run... 13 | assert result == {} 14 | -------------------------------------------------------------------------------- /examples/cancellation/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["user:create"]) 13 | class CancelWorkflow: 14 | @hatchet.step(timeout="10s", retries=1) 15 | async def step1(self, context: Context) -> None: 16 | i = 0 17 | while not context.exit_flag and i < 20: 18 | print(f"Waiting for cancellation {i}") 19 | await asyncio.sleep(1) 20 | i += 1 21 | 22 | if context.exit_flag: 23 | print("Cancelled") 24 | 25 | 26 | def main() -> None: 27 | workflow = CancelWorkflow() 28 | worker = hatchet.worker("cancellation-worker", max_runs=4) 29 | worker.register_workflow(workflow) 30 | 31 | worker.start() 32 | 33 | 34 | if __name__ == "__main__": 35 | main() 36 | -------------------------------------------------------------------------------- /examples/concurrency_limit/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | client.event.push("concurrency-test", {"group": "test"}) 10 | -------------------------------------------------------------------------------- /examples/concurrency_limit/test_concurrency_limit.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | from hatchet_sdk.workflow_run import WorkflowRunRef 5 | 6 | 7 | # requires scope module or higher for shared event loop 8 | @pytest.mark.asyncio(scope="session") 9 | @pytest.mark.skip(reason="The timing for this test is not reliable") 10 | @pytest.mark.parametrize("worker", ["concurrency_limit"], indirect=True) 11 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 12 | num_runs = 6 13 | runs: list[WorkflowRunRef] = [] 14 | 15 | # Start all runs 16 | for i in range(1, num_runs + 1): 17 | run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflow", {"run": i}) 18 | runs.append(run) 19 | 20 | # Wait for all results 21 | successful_runs = [] 22 | cancelled_runs = [] 23 | 24 | # Process each run individually 25 | for i, run in enumerate(runs, start=1): 26 | try: 27 | result = await run.result() 28 | successful_runs.append((i, result)) 29 | except Exception as e: 30 | if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): 31 | cancelled_runs.append((i, str(e))) 32 | else: 33 | raise # Re-raise if it's an unexpected error 34 | 35 | # Check that we have the correct number of successful and cancelled runs 36 | assert ( 37 | len(successful_runs) == 5 38 | ), f"Expected 5 successful runs, got {len(successful_runs)}" 39 | assert ( 40 | len(cancelled_runs) == 1 41 | ), f"Expected 1 cancelled run, got {len(cancelled_runs)}" 42 | -------------------------------------------------------------------------------- /examples/concurrency_limit/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | from hatchet_sdk.contracts.workflows_pb2 import ConcurrencyLimitStrategy 8 | from hatchet_sdk.workflow import ConcurrencyExpression 9 | 10 | load_dotenv() 11 | 12 | hatchet = Hatchet(debug=True) 13 | 14 | 15 | @hatchet.workflow( 16 | on_events=["concurrency-test"], 17 | concurrency=ConcurrencyExpression( 18 | expression="input.group", 19 | max_runs=5, 20 | limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, 21 | ), 22 | ) 23 | class ConcurrencyDemoWorkflow: 24 | 25 | @hatchet.step() 26 | def step1(self, context: Context) -> dict[str, Any]: 27 | input = context.workflow_input() 28 | time.sleep(3) 29 | print("executed step1") 30 | return {"run": input["run"]} 31 | 32 | 33 | def main() -> None: 34 | workflow = ConcurrencyDemoWorkflow() 35 | worker = hatchet.worker("concurrency-demo-worker", max_runs=10) 36 | worker.register_workflow(workflow) 37 | 38 | worker.start() 39 | 40 | 41 | if __name__ == "__main__": 42 | main() 43 | -------------------------------------------------------------------------------- /examples/concurrency_limit_rr/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | for i in range(200): 10 | group = "0" 11 | 12 | if i % 2 == 0: 13 | group = "1" 14 | 15 | client.event.push("concurrency-test", {"group": group}) 16 | -------------------------------------------------------------------------------- /examples/concurrency_limit_rr/test_concurrency_limit_rr.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import pytest 4 | 5 | from hatchet_sdk import Hatchet, Worker 6 | from hatchet_sdk.workflow_run import WorkflowRunRef 7 | 8 | 9 | # requires scope module or higher for shared event loop 10 | @pytest.mark.skip(reason="The timing for this test is not reliable") 11 | @pytest.mark.asyncio(scope="session") 12 | @pytest.mark.parametrize("worker", ["concurrency_limit_rr"], indirect=True) 13 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 14 | num_groups = 2 15 | runs: list[WorkflowRunRef] = [] 16 | 17 | # Start all runs 18 | for i in range(1, num_groups + 1): 19 | run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) 20 | runs.append(run) 21 | run = hatchet.admin.run_workflow("ConcurrencyDemoWorkflowRR", {"group": i}) 22 | runs.append(run) 23 | 24 | # Wait for all results 25 | successful_runs = [] 26 | cancelled_runs = [] 27 | 28 | start_time = time.time() 29 | 30 | # Process each run individually 31 | for i, run in enumerate(runs, start=1): 32 | try: 33 | result = await run.result() 34 | successful_runs.append((i, result)) 35 | except Exception as e: 36 | if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): 37 | cancelled_runs.append((i, str(e))) 38 | else: 39 | raise # Re-raise if it's an unexpected error 40 | 41 | end_time = time.time() 42 | total_time = end_time - start_time 43 | 44 | # Check that we have the correct number of successful and cancelled runs 45 | assert ( 46 | len(successful_runs) == 4 47 | ), f"Expected 4 successful runs, got {len(successful_runs)}" 48 | assert ( 49 | len(cancelled_runs) == 0 50 | ), f"Expected 0 cancelled run, got {len(cancelled_runs)}" 51 | 52 | # Check that the total time is close to 2 seconds 53 | assert ( 54 | 3.8 <= total_time <= 7 55 | ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" 56 | 57 | print(f"Total execution time: {total_time:.2f} seconds") 58 | -------------------------------------------------------------------------------- /examples/concurrency_limit_rr/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import ( 6 | ConcurrencyExpression, 7 | ConcurrencyLimitStrategy, 8 | Context, 9 | Hatchet, 10 | ) 11 | 12 | load_dotenv() 13 | 14 | hatchet = Hatchet(debug=True) 15 | 16 | 17 | @hatchet.workflow( 18 | on_events=["concurrency-test"], 19 | schedule_timeout="10m", 20 | concurrency=ConcurrencyExpression( 21 | expression="input.group", 22 | max_runs=1, 23 | limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, 24 | ), 25 | ) 26 | class ConcurrencyDemoWorkflowRR: 27 | 28 | @hatchet.step() 29 | def step1(self, context: Context) -> None: 30 | print("starting step1") 31 | time.sleep(2) 32 | print("finished step1") 33 | pass 34 | 35 | 36 | def main() -> None: 37 | workflow = ConcurrencyDemoWorkflowRR() 38 | worker = hatchet.worker("concurrency-demo-worker-rr", max_runs=10) 39 | worker.register_workflow(workflow) 40 | 41 | worker.start() 42 | 43 | 44 | if __name__ == "__main__": 45 | main() 46 | -------------------------------------------------------------------------------- /examples/cron/programatic-async.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet() 8 | 9 | 10 | async def create_cron() -> None: 11 | # ❓ Create 12 | cron_trigger = await hatchet.cron.aio.create( 13 | workflow_name="simple-cron-workflow", 14 | cron_name="customer-a-daily-report", 15 | expression="0 12 * * *", 16 | input={ 17 | "name": "John Doe", 18 | }, 19 | additional_metadata={ 20 | "customer_id": "customer-a", 21 | }, 22 | ) 23 | 24 | id = cron_trigger.metadata.id # the id of the cron trigger 25 | # !! 26 | 27 | # ❓ List 28 | cron_triggers = await hatchet.cron.aio.list() 29 | # !! 30 | 31 | # ❓ Get 32 | cron_trigger = await hatchet.cron.aio.get(cron_trigger=cron_trigger.metadata.id) 33 | # !! 34 | 35 | # ❓ Delete 36 | await hatchet.cron.aio.delete(cron_trigger=cron_trigger.metadata.id) 37 | # !! 38 | -------------------------------------------------------------------------------- /examples/cron/programatic-sync.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet() 8 | 9 | # ❓ Create 10 | cron_trigger = hatchet.cron.create( 11 | workflow_name="simple-cron-workflow", 12 | cron_name="customer-a-daily-report", 13 | expression="0 12 * * *", 14 | input={ 15 | "name": "John Doe", 16 | }, 17 | additional_metadata={ 18 | "customer_id": "customer-a", 19 | }, 20 | ) 21 | 22 | id = cron_trigger.metadata.id # the id of the cron trigger 23 | # !! 24 | 25 | # ❓ List 26 | cron_triggers = hatchet.cron.list() 27 | # !! 28 | 29 | # ❓ Get 30 | cron_trigger = hatchet.cron.get(cron_trigger=cron_trigger.metadata.id) 31 | # !! 32 | 33 | # ❓ Delete 34 | hatchet.cron.delete(cron_trigger=cron_trigger.metadata.id) 35 | # !! 36 | -------------------------------------------------------------------------------- /examples/cron/workflow-definition.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | # ❓ Workflow Definition Cron Trigger 13 | # Adding a cron trigger to a workflow is as simple 14 | # as adding a `cron expression` to the `on_cron` 15 | # prop of the workflow definition 16 | @hatchet.workflow(on_crons=["* * * * *"]) 17 | class CronWorkflow: 18 | @hatchet.step() 19 | def step1(self, context: Context) -> dict[str, str]: 20 | 21 | return { 22 | "time": "step1", 23 | } 24 | 25 | 26 | # !! 27 | 28 | 29 | def main() -> None: 30 | workflow = CronWorkflow() 31 | worker = hatchet.worker("test-worker", max_runs=1) 32 | worker.register_workflow(workflow) 33 | worker.start() 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /examples/dag/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Context, Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | # for i in range(10): 10 | hatchet.event.push("dag:create", {"test": "test"}) 11 | -------------------------------------------------------------------------------- /examples/dag/test_dag.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["dag"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("DagWorkflow", {}) 11 | result = await run.result() 12 | 13 | one = result["step1"]["rando"] 14 | two = result["step2"]["rando"] 15 | assert result["step3"]["sum"] == one + two 16 | assert result["step4"]["step4"] == "step4" 17 | -------------------------------------------------------------------------------- /examples/dag/worker.py: -------------------------------------------------------------------------------- 1 | import random 2 | import time 3 | from typing import Any, cast 4 | 5 | from dotenv import load_dotenv 6 | 7 | from hatchet_sdk import Context, Hatchet 8 | 9 | load_dotenv() 10 | 11 | hatchet = Hatchet(debug=True) 12 | 13 | 14 | @hatchet.workflow(on_events=["dag:create"], schedule_timeout="10m") 15 | class DagWorkflow: 16 | 17 | @hatchet.step(timeout="5s") 18 | def step1(self, context: Context) -> dict[str, int]: 19 | rando = random.randint( 20 | 1, 100 21 | ) # Generate a random number between 1 and 100return { 22 | return { 23 | "rando": rando, 24 | } 25 | 26 | @hatchet.step(timeout="5s") 27 | def step2(self, context: Context) -> dict[str, int]: 28 | rando = random.randint( 29 | 1, 100 30 | ) # Generate a random number between 1 and 100return { 31 | return { 32 | "rando": rando, 33 | } 34 | 35 | @hatchet.step(parents=["step1", "step2"]) 36 | def step3(self, context: Context) -> dict[str, int]: 37 | one = cast(dict[str, Any], context.step_output("step1"))["rando"] 38 | two = cast(dict[str, Any], context.step_output("step2"))["rando"] 39 | 40 | return { 41 | "sum": one + two, 42 | } 43 | 44 | @hatchet.step(parents=["step1", "step3"]) 45 | def step4(self, context: Context) -> dict[str, str]: 46 | print( 47 | "executed step4", 48 | time.strftime("%H:%M:%S", time.localtime()), 49 | context.workflow_input(), 50 | context.step_output("step1"), 51 | context.step_output("step3"), 52 | ) 53 | return { 54 | "step4": "step4", 55 | } 56 | 57 | 58 | def main() -> None: 59 | workflow = DagWorkflow() 60 | worker = hatchet.worker("dag-worker") 61 | worker.register_workflow(workflow) 62 | 63 | worker.start() 64 | 65 | 66 | if __name__ == "__main__": 67 | main() 68 | -------------------------------------------------------------------------------- /examples/dedupe/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | from typing import Any 4 | 5 | from dotenv import load_dotenv 6 | 7 | from hatchet_sdk import Context, Hatchet 8 | from hatchet_sdk.clients.admin import DedupeViolationErr 9 | from hatchet_sdk.loader import ClientConfig 10 | 11 | load_dotenv() 12 | 13 | hatchet = Hatchet(debug=True) 14 | 15 | 16 | @hatchet.workflow(on_events=["parent:create"]) 17 | class DedupeParent: 18 | @hatchet.step(timeout="1m") 19 | async def spawn(self, context: Context) -> dict[str, list[Any]]: 20 | print("spawning child") 21 | 22 | results = [] 23 | 24 | for i in range(2): 25 | try: 26 | results.append( 27 | ( 28 | await context.aio.spawn_workflow( 29 | "DedupeChild", 30 | {"a": str(i)}, 31 | key=f"child{i}", 32 | options={"additional_metadata": {"dedupe": "test"}}, 33 | ) 34 | ).result() 35 | ) 36 | except DedupeViolationErr as e: 37 | print(f"dedupe violation {e}") 38 | continue 39 | 40 | result = await asyncio.gather(*results) 41 | print(f"results {result}") 42 | 43 | return {"results": result} 44 | 45 | 46 | @hatchet.workflow(on_events=["child:create"]) 47 | class DedupeChild: 48 | @hatchet.step() 49 | async def process(self, context: Context) -> dict[str, str]: 50 | await asyncio.sleep(3) 51 | 52 | print(f"child process") 53 | return {"status": "success"} 54 | 55 | @hatchet.step() 56 | async def process2(self, context: Context) -> dict[str, str]: 57 | print("child process2") 58 | return {"status2": "success"} 59 | 60 | 61 | def main() -> None: 62 | worker = hatchet.worker("fanout-worker", max_runs=100) 63 | worker.register_workflow(DedupeParent()) 64 | worker.register_workflow(DedupeChild()) 65 | worker.start() 66 | 67 | 68 | if __name__ == "__main__": 69 | main() 70 | -------------------------------------------------------------------------------- /examples/default_priority/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import TypedDict 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context 7 | from hatchet_sdk.v2.hatchet import Hatchet 8 | 9 | load_dotenv() 10 | 11 | hatchet = Hatchet(debug=True) 12 | 13 | 14 | class MyResultType(TypedDict): 15 | return_string: str 16 | 17 | 18 | @hatchet.function(default_priority=2) 19 | async def high_prio_func(context: Context) -> MyResultType: 20 | await asyncio.sleep(5) 21 | return MyResultType(return_string="High Priority Return") 22 | 23 | 24 | @hatchet.function(default_priority=1) 25 | async def low_prio_func(context: Context) -> MyResultType: 26 | await asyncio.sleep(5) 27 | return MyResultType(return_string="Low Priority Return") 28 | 29 | 30 | def main() -> None: 31 | worker = hatchet.worker("example-priority-worker", max_runs=1) 32 | hatchet.admin.run(high_prio_func, {"test": "test"}) 33 | hatchet.admin.run(high_prio_func, {"test": "test"}) 34 | hatchet.admin.run(low_prio_func, {"test": "test"}) 35 | hatchet.admin.run(low_prio_func, {"test": "test"}) 36 | worker.start() 37 | 38 | 39 | if __name__ == "__main__": 40 | main() 41 | -------------------------------------------------------------------------------- /examples/delayed/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | client.event.push("printer:schedule", {"message": "test"}) 10 | -------------------------------------------------------------------------------- /examples/delayed/test_delayed.py: -------------------------------------------------------------------------------- 1 | # from hatchet_sdk import Hatchet 2 | # import pytest 3 | 4 | # from tests.utils import fixture_bg_worker 5 | 6 | 7 | # worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) 8 | 9 | # # requires scope module or higher for shared event loop 10 | # @pytest.mark.asyncio(scope="session") 11 | # async def test_run(hatchet: Hatchet): 12 | # # TODO 13 | -------------------------------------------------------------------------------- /examples/delayed/worker.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["printer:schedule"]) 13 | class PrintSchedule: 14 | @hatchet.step() 15 | def schedule(self, context: Context) -> None: 16 | now = datetime.now() 17 | print(f"the time is \t {now.strftime('%H:%M:%S')}") 18 | future_time = now + timedelta(seconds=15) 19 | print(f"scheduling for \t {future_time.strftime('%H:%M:%S')}") 20 | 21 | hatchet.admin.schedule_workflow( 22 | "PrintPrinter", [future_time], context.workflow_input() 23 | ) 24 | 25 | 26 | @hatchet.workflow() 27 | class PrintPrinter: 28 | @hatchet.step() 29 | def step1(self, context: Context) -> None: 30 | now = datetime.now() 31 | print(f"printed at \t {now.strftime('%H:%M:%S')}") 32 | print(f"message \t {context.workflow_input()['message']}") 33 | 34 | 35 | def main() -> None: 36 | worker = hatchet.worker("delayed-worker", max_runs=4) 37 | worker.register_workflow(PrintSchedule()) 38 | worker.register_workflow(PrintPrinter()) 39 | 40 | worker.start() 41 | 42 | 43 | if __name__ == "__main__": 44 | main() 45 | -------------------------------------------------------------------------------- /examples/durable_sticky_with_affinity/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, StickyStrategy, WorkerLabelComparator 7 | from hatchet_sdk.v2.callable import DurableContext 8 | from hatchet_sdk.v2.hatchet import Hatchet 9 | 10 | load_dotenv() 11 | 12 | hatchet = Hatchet(debug=True) 13 | 14 | 15 | @hatchet.durable( 16 | sticky=StickyStrategy.HARD, 17 | desired_worker_labels={ 18 | "running_workflow": { 19 | "value": "True", 20 | "required": True, 21 | "comparator": WorkerLabelComparator.NOT_EQUAL, 22 | }, 23 | }, 24 | ) 25 | async def my_durable_func(context: DurableContext) -> dict[str, Any]: 26 | try: 27 | ref = await context.aio.spawn_workflow( 28 | "StickyChildWorkflow", {}, options={"sticky": True} 29 | ) 30 | result = await ref.result() 31 | except Exception as e: 32 | result = str(e) 33 | 34 | await context.worker.async_upsert_labels({"running_workflow": "False"}) 35 | return {"worker_result": result} 36 | 37 | 38 | @hatchet.workflow(on_events=["sticky:child"], sticky=StickyStrategy.HARD) 39 | class StickyChildWorkflow: 40 | @hatchet.step( 41 | desired_worker_labels={ 42 | "running_workflow": { 43 | "value": "True", 44 | "required": True, 45 | "comparator": WorkerLabelComparator.NOT_EQUAL, 46 | }, 47 | }, 48 | ) 49 | async def child(self, context: Context) -> dict[str, str | None]: 50 | await context.worker.async_upsert_labels({"running_workflow": "True"}) 51 | 52 | print(f"Heavy work started on {context.worker.id()}") 53 | await asyncio.sleep(15) 54 | print(f"Finished Heavy work on {context.worker.id()}") 55 | 56 | return {"worker": context.worker.id()} 57 | 58 | 59 | def main() -> None: 60 | worker = hatchet.worker( 61 | "sticky-worker", 62 | max_runs=10, 63 | labels={"running_workflow": "False"}, 64 | ) 65 | 66 | worker.register_workflow(StickyChildWorkflow()) 67 | 68 | worker.start() 69 | 70 | 71 | if __name__ == "__main__": 72 | main() 73 | -------------------------------------------------------------------------------- /examples/events/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet() 8 | hatchet.event.push("user:create", {"test": "test"}) 9 | -------------------------------------------------------------------------------- /examples/events/test_event.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | 5 | from hatchet_sdk.clients.events import BulkPushEventOptions, BulkPushEventWithMetadata 6 | from hatchet_sdk.hatchet import Hatchet 7 | 8 | 9 | # requires scope module or higher for shared event loop 10 | @pytest.mark.asyncio(scope="session") 11 | async def test_event_push(hatchet: Hatchet) -> None: 12 | e = hatchet.event.push("user:create", {"test": "test"}) 13 | 14 | assert e.eventId is not None 15 | 16 | 17 | @pytest.mark.asyncio(scope="session") 18 | async def test_async_event_push(aiohatchet: Hatchet) -> None: 19 | e = await aiohatchet.event.async_push("user:create", {"test": "test"}) 20 | 21 | assert e.eventId is not None 22 | 23 | 24 | @pytest.mark.asyncio(scope="session") 25 | async def test_async_event_bulk_push(aiohatchet: Hatchet) -> None: 26 | 27 | events: List[BulkPushEventWithMetadata] = [ 28 | { 29 | "key": "event1", 30 | "payload": {"message": "This is event 1"}, 31 | "additional_metadata": {"source": "test", "user_id": "user123"}, 32 | }, 33 | { 34 | "key": "event2", 35 | "payload": {"message": "This is event 2"}, 36 | "additional_metadata": {"source": "test", "user_id": "user456"}, 37 | }, 38 | { 39 | "key": "event3", 40 | "payload": {"message": "This is event 3"}, 41 | "additional_metadata": {"source": "test", "user_id": "user789"}, 42 | }, 43 | ] 44 | opts: BulkPushEventOptions = {"namespace": "bulk-test"} 45 | 46 | e = await aiohatchet.event.async_bulk_push(events, opts) 47 | 48 | assert len(e) == 3 49 | 50 | # Sort both lists of events by their key to ensure comparison order 51 | sorted_events = sorted(events, key=lambda x: x["key"]) 52 | sorted_returned_events = sorted(e, key=lambda x: x.key) 53 | namespace = "bulk-test" 54 | 55 | # Check that the returned events match the original events 56 | for original_event, returned_event in zip(sorted_events, sorted_returned_events): 57 | assert returned_event.key == namespace + original_event["key"] 58 | -------------------------------------------------------------------------------- /examples/fanout/stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import random 6 | 7 | from dotenv import load_dotenv 8 | 9 | from hatchet_sdk import new_client 10 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 11 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 12 | from hatchet_sdk.v2.hatchet import Hatchet 13 | 14 | 15 | async def main() -> None: 16 | load_dotenv() 17 | hatchet = Hatchet() 18 | 19 | # Generate a random stream key to use to track all 20 | # stream events for this workflow run. 21 | 22 | streamKey = "streamKey" 23 | streamVal = f"sk-{random.randint(1, 100)}" 24 | 25 | # Specify the stream key as additional metadata 26 | # when running the workflow. 27 | 28 | # This key gets propagated to all child workflows 29 | # and can have an arbitrary property name. 30 | 31 | workflowRun = hatchet.admin.run_workflow( 32 | "Parent", 33 | {"n": 2}, 34 | options={"additional_metadata": {streamKey: streamVal}}, 35 | ) 36 | 37 | # Stream all events for the additional meta key value 38 | listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal) 39 | 40 | async for event in listener: 41 | print(event.type, event.payload) 42 | 43 | 44 | if __name__ == "__main__": 45 | asyncio.run(main()) 46 | -------------------------------------------------------------------------------- /examples/fanout/sync_stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | import random 6 | 7 | from dotenv import load_dotenv 8 | 9 | from hatchet_sdk import new_client 10 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 11 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 12 | from hatchet_sdk.v2.hatchet import Hatchet 13 | 14 | 15 | def main() -> None: 16 | load_dotenv() 17 | hatchet = Hatchet() 18 | 19 | # Generate a random stream key to use to track all 20 | # stream events for this workflow run. 21 | 22 | streamKey = "streamKey" 23 | streamVal = f"sk-{random.randint(1, 100)}" 24 | 25 | # Specify the stream key as additional metadata 26 | # when running the workflow. 27 | 28 | # This key gets propagated to all child workflows 29 | # and can have an arbitrary property name. 30 | 31 | workflowRun = hatchet.admin.run_workflow( 32 | "Parent", 33 | {"n": 2}, 34 | options={"additional_metadata": {streamKey: streamVal}}, 35 | ) 36 | 37 | # Stream all events for the additional meta key value 38 | listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal) 39 | 40 | for event in listener: 41 | print(event.type, event.payload) 42 | 43 | print("DONE.") 44 | 45 | 46 | if __name__ == "__main__": 47 | main() 48 | -------------------------------------------------------------------------------- /examples/fanout/test_fanout.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["fanout"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("Parent", {"n": 2}) 11 | result = await run.result() 12 | assert len(result["spawn"]["results"]) == 2 13 | 14 | 15 | # requires scope module or higher for shared event loop 16 | @pytest.mark.asyncio(scope="session") 17 | @pytest.mark.parametrize("worker", ["fanout"], indirect=True) 18 | async def test_run2(hatchet: Hatchet, worker: Worker) -> None: 19 | run = hatchet.admin.run_workflow("Parent", {"n": 2}) 20 | result = await run.result() 21 | assert len(result["spawn"]["results"]) == 2 22 | -------------------------------------------------------------------------------- /examples/fanout/trigger.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | 6 | from dotenv import load_dotenv 7 | 8 | from hatchet_sdk import new_client 9 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 10 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 11 | 12 | 13 | async def main() -> None: 14 | load_dotenv() 15 | hatchet = new_client() 16 | 17 | hatchet.admin.run_workflow( 18 | "Parent", 19 | {"test": "test"}, 20 | options={"additional_metadata": {"hello": "moon"}}, 21 | ) 22 | 23 | 24 | if __name__ == "__main__": 25 | asyncio.run(main()) 26 | -------------------------------------------------------------------------------- /examples/fanout/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | 13 | @hatchet.workflow(on_events=["parent:create"]) 14 | class Parent: 15 | @hatchet.step(timeout="5m") 16 | async def spawn(self, context: Context) -> dict[str, Any]: 17 | print("spawning child") 18 | 19 | context.put_stream("spawning...") 20 | results = [] 21 | 22 | n = context.workflow_input().get("n", 100) 23 | 24 | for i in range(n): 25 | results.append( 26 | ( 27 | await context.aio.spawn_workflow( 28 | "Child", 29 | {"a": str(i)}, 30 | key=f"child{i}", 31 | options={"additional_metadata": {"hello": "earth"}}, 32 | ) 33 | ).result() 34 | ) 35 | 36 | result = await asyncio.gather(*results) 37 | print(f"results {result}") 38 | 39 | return {"results": result} 40 | 41 | 42 | @hatchet.workflow(on_events=["child:create"]) 43 | class Child: 44 | @hatchet.step() 45 | def process(self, context: Context) -> dict[str, str]: 46 | a = context.workflow_input()["a"] 47 | print(f"child process {a}") 48 | context.put_stream("child 1...") 49 | return {"status": "success " + a} 50 | 51 | @hatchet.step() 52 | def process2(self, context: Context) -> dict[str, str]: 53 | print("child process2") 54 | context.put_stream("child 2...") 55 | return {"status2": "success"} 56 | 57 | 58 | def main() -> None: 59 | worker = hatchet.worker("fanout-worker", max_runs=40) 60 | worker.register_workflow(Parent()) 61 | worker.register_workflow(Child()) 62 | worker.start() 63 | 64 | 65 | if __name__ == "__main__": 66 | main() 67 | -------------------------------------------------------------------------------- /examples/fanout_sync/test_fanout_sync.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | @pytest.mark.parametrize("worker", ["fanout_sync"], indirect=True) 7 | def test_run(hatchet: Hatchet, worker: Worker) -> None: 8 | run = hatchet.admin.run_workflow("SyncFanoutParent", {"n": 2}) 9 | result = run.sync_result() 10 | assert len(result["spawn"]["results"]) == 2 11 | -------------------------------------------------------------------------------- /examples/fanout_sync/trigger.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import new_client 6 | 7 | 8 | async def main() -> None: 9 | load_dotenv() 10 | hatchet = new_client() 11 | 12 | hatchet.admin.run_workflow( 13 | "SyncFanoutParent", 14 | {"test": "test"}, 15 | options={"additional_metadata": {"hello": "moon"}}, 16 | ) 17 | 18 | 19 | if __name__ == "__main__": 20 | asyncio.run(main()) 21 | -------------------------------------------------------------------------------- /examples/fanout_sync/worker.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | from hatchet_sdk.workflow_run import WorkflowRunRef 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | 13 | @hatchet.workflow(on_events=["parent:create"]) 14 | class SyncFanoutParent: 15 | @hatchet.step(timeout="5m") 16 | def spawn(self, context: Context) -> dict[str, Any]: 17 | print("spawning child") 18 | 19 | n = context.workflow_input().get("n", 5) 20 | 21 | runs = context.spawn_workflows( 22 | [ 23 | { 24 | "workflow_name": "SyncFanoutChild", 25 | "input": {"a": str(i)}, 26 | "key": f"child{i}", 27 | "options": {"additional_metadata": {"hello": "earth"}}, 28 | } 29 | for i in range(n) 30 | ] 31 | ) 32 | 33 | results = [r.sync_result() for r in runs] 34 | 35 | print(f"results {results}") 36 | 37 | return {"results": results} 38 | 39 | 40 | @hatchet.workflow(on_events=["child:create"]) 41 | class SyncFanoutChild: 42 | @hatchet.step() 43 | def process(self, context: Context) -> dict[str, str]: 44 | return {"status": "success " + context.workflow_input()["a"]} 45 | 46 | 47 | def main() -> None: 48 | worker = hatchet.worker("sync-fanout-worker", max_runs=40) 49 | worker.register_workflow(SyncFanoutParent()) 50 | worker.register_workflow(SyncFanoutChild()) 51 | worker.start() 52 | 53 | 54 | if __name__ == "__main__": 55 | main() 56 | -------------------------------------------------------------------------------- /examples/logger/client.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import sys 4 | import time 5 | 6 | from dotenv import load_dotenv 7 | 8 | from hatchet_sdk import ClientConfig, Hatchet 9 | 10 | load_dotenv() 11 | 12 | logging.basicConfig(level=logging.INFO) 13 | 14 | hatchet = Hatchet( 15 | debug=True, 16 | config=ClientConfig( 17 | logger=logging.getLogger(), 18 | ), 19 | ) 20 | -------------------------------------------------------------------------------- /examples/logger/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import PushEventOptions, new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | client.event.push( 10 | "user:create", {"test": "test"}, options={"additional_metadata": {"hello": "moon"}} 11 | ) 12 | -------------------------------------------------------------------------------- /examples/logger/test_logger.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["logger"], indirect=True) 9 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("LoggingWorkflow", {}) 11 | result = await run.result() 12 | assert result["step1"]["status"] == "success" 13 | -------------------------------------------------------------------------------- /examples/logger/worker.py: -------------------------------------------------------------------------------- 1 | from logging import getLogger 2 | 3 | from dotenv import load_dotenv 4 | 5 | from examples.logger.client import hatchet 6 | from examples.logger.workflow import LoggingWorkflow 7 | 8 | 9 | def main() -> None: 10 | worker = hatchet.worker("logger-worker", max_runs=5) 11 | 12 | workflow = LoggingWorkflow() 13 | worker.register_workflow(workflow) 14 | 15 | worker.start() 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /examples/logger/workflow.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | from examples.logger.client import hatchet 5 | from hatchet_sdk import Context 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | @hatchet.workflow() 11 | class LoggingWorkflow: 12 | @hatchet.step() 13 | def step1(self, context: Context) -> dict[str, str]: 14 | for i in range(12): 15 | logger.info("executed step1 - {}".format(i)) 16 | logger.info({"step1": "step1"}) 17 | time.sleep(0.1) 18 | return {"status": "success"} 19 | -------------------------------------------------------------------------------- /examples/manual_trigger/image.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hatchet-dev/hatchet-python/5131c7505dadb0e44b798b27086808f59dda92fd/examples/manual_trigger/image.jpeg -------------------------------------------------------------------------------- /examples/manual_trigger/stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import json 4 | import os 5 | 6 | from dotenv import load_dotenv 7 | 8 | from hatchet_sdk import new_client 9 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 10 | from hatchet_sdk.clients.run_event_listener import StepRunEventType 11 | 12 | 13 | async def main() -> None: 14 | load_dotenv() 15 | hatchet = new_client() 16 | 17 | workflowRun = hatchet.admin.run_workflow( 18 | "ManualTriggerWorkflow", 19 | {"test": "test"}, 20 | options={"additional_metadata": {"hello": "moon"}}, 21 | ) 22 | 23 | listener = workflowRun.stream() 24 | 25 | # Get the directory of the current script 26 | script_dir = os.path.dirname(os.path.abspath(__file__)) 27 | 28 | # Create the "out" directory if it doesn't exist 29 | out_dir = os.path.join(script_dir, "out") 30 | os.makedirs(out_dir, exist_ok=True) 31 | 32 | async for event in listener: 33 | print(event.type, event.payload) 34 | if event.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM: 35 | # Decode the base64-encoded payload 36 | decoded_payload = base64.b64decode(event.payload) 37 | 38 | # Construct the path to the payload file in the "out" directory 39 | payload_path = os.path.join(out_dir, "payload.jpg") 40 | 41 | with open(payload_path, "wb") as f: 42 | f.write(decoded_payload) 43 | 44 | data = json.dumps( 45 | {"type": event.type, "messageId": workflowRun.workflow_run_id} 46 | ) 47 | print("data: " + data + "\n\n") 48 | 49 | result = await workflowRun.result() 50 | 51 | print("result: " + json.dumps(result, indent=2)) 52 | 53 | 54 | if __name__ == "__main__": 55 | asyncio.run(main()) 56 | -------------------------------------------------------------------------------- /examples/manual_trigger/test_manual_trigger.py: -------------------------------------------------------------------------------- 1 | # from hatchet_sdk import Hatchet 2 | # import pytest 3 | 4 | # from tests.utils import fixture_bg_worker 5 | 6 | # worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) 7 | 8 | # # requires scope module or higher for shared event loop 9 | # @pytest.mark.asyncio(scope="session") 10 | # async def test_run(hatchet: Hatchet): 11 | # # TODO 12 | -------------------------------------------------------------------------------- /examples/manual_trigger/worker.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os 3 | import time 4 | 5 | from dotenv import load_dotenv 6 | 7 | from hatchet_sdk import Context, Hatchet 8 | 9 | load_dotenv() 10 | 11 | hatchet = Hatchet(debug=True) 12 | 13 | 14 | @hatchet.workflow(on_events=["man:create"]) 15 | class ManualTriggerWorkflow: 16 | @hatchet.step() 17 | def step1(self, context: Context) -> dict[str, str]: 18 | res = context.playground("res", "HELLO") 19 | 20 | # Get the directory of the current script 21 | script_dir = os.path.dirname(os.path.abspath(__file__)) 22 | 23 | # Construct the path to the image file relative to the script's directory 24 | image_path = os.path.join(script_dir, "image.jpeg") 25 | 26 | # Load the image file 27 | with open(image_path, "rb") as image_file: 28 | image_data = image_file.read() 29 | 30 | print(len(image_data)) 31 | 32 | # Encode the image data as base64 33 | base64_image = base64.b64encode(image_data).decode("utf-8") 34 | 35 | # Stream the base64-encoded image data 36 | context.put_stream(base64_image) 37 | 38 | time.sleep(3) 39 | print("executed step1") 40 | return {"step1": "data1 " + (res or "")} 41 | 42 | @hatchet.step(parents=["step1"], timeout="4s") 43 | def step2(self, context: Context) -> dict[str, str]: 44 | print("started step2") 45 | time.sleep(1) 46 | print("finished step2") 47 | return {"step2": "data2"} 48 | 49 | 50 | def main() -> None: 51 | workflow = ManualTriggerWorkflow() 52 | worker = hatchet.worker("manual-worker", max_runs=4) 53 | worker.register_workflow(workflow) 54 | 55 | worker.start() 56 | 57 | 58 | if __name__ == "__main__": 59 | main() 60 | -------------------------------------------------------------------------------- /examples/on_failure/test_on_failure.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from hatchet_sdk import Hatchet, Worker 6 | from hatchet_sdk.clients.rest.models.job_run_status import JobRunStatus 7 | 8 | 9 | # requires scope module or higher for shared event loop 10 | @pytest.mark.asyncio(scope="session") 11 | @pytest.mark.parametrize("worker", ["on_failure"], indirect=True) 12 | async def test_run_timeout(hatchet: Hatchet, worker: Worker) -> None: 13 | run = hatchet.admin.run_workflow("OnFailureWorkflow", {}) 14 | try: 15 | await run.result() 16 | 17 | assert False, "Expected workflow to timeout" 18 | except Exception as e: 19 | assert "step1 failed" in str(e) 20 | 21 | await asyncio.sleep(5) # Wait for the on_failure job to finish 22 | 23 | job_runs = hatchet.rest.workflow_run_get(run.workflow_run_id).job_runs 24 | assert job_runs 25 | assert len(job_runs) == 2 26 | 27 | successful_job_runs = [jr for jr in job_runs if jr.status == JobRunStatus.SUCCEEDED] 28 | failed_job_runs = [jr for jr in job_runs if jr.status == JobRunStatus.FAILED] 29 | 30 | assert len(successful_job_runs) == 1 31 | assert len(failed_job_runs) == 1 32 | -------------------------------------------------------------------------------- /examples/on_failure/worker.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | # ❓ OnFailure Step 12 | # This workflow will fail because the step will throw an error 13 | # we define an onFailure step to handle this case 14 | 15 | 16 | @hatchet.workflow(on_events=["user:create"]) 17 | class OnFailureWorkflow: 18 | @hatchet.step(timeout="1s") 19 | def step1(self, context: Context) -> None: 20 | # 👀 this step will always raise an exception 21 | raise Exception("step1 failed") 22 | 23 | # 👀 After the workflow fails, this special step will run 24 | @hatchet.on_failure_step() 25 | def on_failure(self, context: Context) -> dict[str, str]: 26 | # 👀 we can do things like perform cleanup logic 27 | # or notify a user here 28 | 29 | # 👀 Fetch the errors from upstream step runs from the context 30 | print(context.step_run_errors()) 31 | 32 | return {"status": "success"} 33 | 34 | 35 | # ‼️ 36 | 37 | 38 | # ❓ OnFailure With Details 39 | # We can access the failure details in the onFailure step 40 | # via the context method 41 | @hatchet.workflow(on_events=["user:create"]) 42 | class OnFailureWorkflowWithDetails: 43 | # ... defined as above 44 | @hatchet.step(timeout="1s") 45 | def step1(self, context: Context) -> None: 46 | raise Exception("step1 failed") 47 | 48 | # 👀 After the workflow fails, this special step will run 49 | @hatchet.on_failure_step() 50 | def on_failure(self, context: Context) -> dict[str, str]: 51 | failures = context.fetch_run_failures() 52 | 53 | # 👀 we can access the failure details here 54 | print(json.dumps(failures, indent=2)) 55 | if len(failures) == 1 and "step1 failed" in failures[0]["error"]: 56 | return {"status": "success"} 57 | 58 | raise Exception("unexpected failure") 59 | 60 | 61 | # ‼️ 62 | 63 | 64 | def main() -> None: 65 | workflow = OnFailureWorkflow() 66 | workflow2 = OnFailureWorkflowWithDetails() 67 | worker = hatchet.worker("on-failure-worker", max_runs=4) 68 | worker.register_workflow(workflow) 69 | worker.register_workflow(workflow2) 70 | 71 | worker.start() 72 | 73 | 74 | if __name__ == "__main__": 75 | main() 76 | -------------------------------------------------------------------------------- /examples/opentelemetry_instrumentation/client.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | -------------------------------------------------------------------------------- /examples/opentelemetry_instrumentation/test_otel_instrumentation.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | from opentelemetry.trace import NoOpTracerProvider 5 | 6 | from hatchet_sdk import Hatchet, Worker 7 | from hatchet_sdk.clients.admin import TriggerWorkflowOptions 8 | from hatchet_sdk.clients.events import PushEventOptions 9 | from hatchet_sdk.opentelemetry.instrumentor import ( 10 | HatchetInstrumentor, 11 | create_traceparent, 12 | inject_traceparent_into_metadata, 13 | ) 14 | 15 | trace_provider = NoOpTracerProvider() 16 | 17 | instrumentor = HatchetInstrumentor(tracer_provider=trace_provider) 18 | instrumentor.instrument() 19 | 20 | tracer = trace_provider.get_tracer(__name__) 21 | 22 | 23 | def create_additional_metadata() -> dict[str, str]: 24 | return inject_traceparent_into_metadata({"hello": "world"}) 25 | 26 | 27 | def create_push_options() -> PushEventOptions: 28 | return {"additional_metadata": create_additional_metadata()} 29 | 30 | 31 | @pytest.mark.parametrize("worker", ["otel"], indirect=True) 32 | def test_push_event(hatchet: Hatchet, worker: Worker) -> None: 33 | key = "otel:event" 34 | payload = {"test": "test"} 35 | 36 | with tracer.start_as_current_span("push_event"): 37 | event = hatchet.event.push( 38 | event_key=key, 39 | payload=payload, 40 | options=create_push_options(), 41 | ) 42 | 43 | """Assert on `endswith` to ignore namespacing""" 44 | assert event.key.endswith(key) 45 | assert event.payload == json.dumps(payload) 46 | 47 | 48 | @pytest.mark.skip("Failing in CI for unknown reason") 49 | @pytest.mark.asyncio() 50 | @pytest.mark.parametrize("worker", ["otel"], indirect=True) 51 | async def test_run_workflow(aiohatchet: Hatchet, worker: Worker) -> None: 52 | with tracer.start_as_current_span("run_workflow") as span: 53 | workflow = aiohatchet.admin.run_workflow( 54 | "OTelWorkflow", 55 | {"test": "test"}, 56 | options=TriggerWorkflowOptions( 57 | additional_metadata=create_additional_metadata() 58 | ), 59 | ) 60 | 61 | with pytest.raises(Exception, match="Workflow Errors"): 62 | await workflow.result() 63 | -------------------------------------------------------------------------------- /examples/opentelemetry_instrumentation/tracer.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import cast 3 | 4 | from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter 5 | from opentelemetry.sdk.resources import SERVICE_NAME, Resource 6 | from opentelemetry.sdk.trace import TracerProvider 7 | from opentelemetry.sdk.trace.export import BatchSpanProcessor 8 | from opentelemetry.trace import NoOpTracerProvider 9 | 10 | trace_provider: TracerProvider | NoOpTracerProvider 11 | 12 | if os.getenv("CI", "false") == "true": 13 | trace_provider = NoOpTracerProvider() 14 | else: 15 | resource = Resource( 16 | attributes={ 17 | SERVICE_NAME: os.getenv("HATCHET_CLIENT_OTEL_SERVICE_NAME", "test-service") 18 | } 19 | ) 20 | 21 | headers = dict( 22 | [ 23 | cast( 24 | tuple[str, str], 25 | tuple( 26 | os.getenv( 27 | "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS", "foo=bar" 28 | ).split("=") 29 | ), 30 | ) 31 | ] 32 | ) 33 | 34 | processor = BatchSpanProcessor( 35 | OTLPSpanExporter( 36 | endpoint=os.getenv( 37 | "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4317" 38 | ), 39 | headers=headers, 40 | ), 41 | ) 42 | 43 | trace_provider = TracerProvider(resource=resource) 44 | 45 | trace_provider.add_span_processor(processor) 46 | -------------------------------------------------------------------------------- /examples/opentelemetry_instrumentation/worker.py: -------------------------------------------------------------------------------- 1 | from examples.opentelemetry_instrumentation.client import hatchet 2 | from examples.opentelemetry_instrumentation.tracer import trace_provider 3 | from hatchet_sdk import Context 4 | from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor 5 | 6 | HatchetInstrumentor( 7 | tracer_provider=trace_provider, 8 | ).instrument() 9 | 10 | 11 | @hatchet.workflow(on_events=["otel:event"]) 12 | class OTelWorkflow: 13 | @hatchet.step() 14 | def your_spans_are_children_of_hatchet_span( 15 | self, context: Context 16 | ) -> dict[str, str]: 17 | with trace_provider.get_tracer(__name__).start_as_current_span("step1"): 18 | print("executed step") 19 | return { 20 | "foo": "bar", 21 | } 22 | 23 | @hatchet.step() 24 | def your_spans_are_still_children_of_hatchet_span(self, context: Context) -> None: 25 | with trace_provider.get_tracer(__name__).start_as_current_span("step2"): 26 | raise Exception("Manually instrumented step failed failed") 27 | 28 | @hatchet.step() 29 | def this_step_is_still_instrumented(self, context: Context) -> dict[str, str]: 30 | print("executed still-instrumented step") 31 | return { 32 | "still": "instrumented", 33 | } 34 | 35 | @hatchet.step() 36 | def this_step_is_also_still_instrumented(self, context: Context) -> None: 37 | raise Exception("Still-instrumented step failed") 38 | 39 | 40 | def main() -> None: 41 | worker = hatchet.worker("otel-example-worker", max_runs=1) 42 | worker.register_workflow(OTelWorkflow()) 43 | worker.start() 44 | 45 | 46 | if __name__ == "__main__": 47 | main() 48 | -------------------------------------------------------------------------------- /examples/overrides/test_overrides.py: -------------------------------------------------------------------------------- 1 | # from hatchet_sdk import Hatchet 2 | # import pytest 3 | 4 | # from tests.utils import fixture_bg_worker 5 | 6 | # worker = fixture_bg_worker(["poetry", "run", "async"]) 7 | 8 | # # requires scope module or higher for shared event loop 9 | # @pytest.mark.asyncio(scope="session") 10 | # async def test_run(hatchet: Hatchet): 11 | # run = hatchet.admin.run_workflow("DagWorkflow", {}) 12 | # result = await run.result() 13 | # assert result["step1"]["test"] == "test" 14 | -------------------------------------------------------------------------------- /examples/overrides/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["overrides:create"], schedule_timeout="10m") 13 | class OverridesWorkflow: 14 | def __init__(self) -> None: 15 | self.my_value = "test" 16 | 17 | @hatchet.step(timeout="5s") 18 | def step1(self, context: Context) -> dict[str, str | None]: 19 | print( 20 | "starting step1", 21 | time.strftime("%H:%M:%S", time.localtime()), 22 | context.workflow_input(), 23 | ) 24 | overrideValue = context.playground("prompt", "You are an AI assistant...") 25 | time.sleep(3) 26 | # pretty-print time 27 | print("executed step1", time.strftime("%H:%M:%S", time.localtime())) 28 | return { 29 | "step1": overrideValue, 30 | } 31 | 32 | @hatchet.step() 33 | def step2(self, context: Context) -> dict[str, str]: 34 | print( 35 | "starting step2", 36 | time.strftime("%H:%M:%S", time.localtime()), 37 | context.workflow_input(), 38 | ) 39 | time.sleep(5) 40 | print("executed step2", time.strftime("%H:%M:%S", time.localtime())) 41 | return { 42 | "step2": "step2", 43 | } 44 | 45 | @hatchet.step(parents=["step1", "step2"]) 46 | def step3(self, context: Context) -> dict[str, str]: 47 | print( 48 | "executed step3", 49 | time.strftime("%H:%M:%S", time.localtime()), 50 | context.workflow_input(), 51 | context.step_output("step1"), 52 | context.step_output("step2"), 53 | ) 54 | return { 55 | "step3": "step3", 56 | } 57 | 58 | @hatchet.step(parents=["step1", "step3"]) 59 | def step4(self, context: Context) -> dict[str, str]: 60 | print( 61 | "executed step4", 62 | time.strftime("%H:%M:%S", time.localtime()), 63 | context.workflow_input(), 64 | context.step_output("step1"), 65 | context.step_output("step3"), 66 | ) 67 | return { 68 | "step4": "step4", 69 | } 70 | 71 | 72 | def main() -> None: 73 | workflow = OverridesWorkflow() 74 | worker = hatchet.worker("overrides-worker") 75 | worker.register_workflow(workflow) 76 | 77 | worker.start() 78 | 79 | 80 | if __name__ == "__main__": 81 | main() 82 | -------------------------------------------------------------------------------- /examples/programatic_replay/script.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk.hatchet import HatchetRest 4 | 5 | load_dotenv() 6 | 7 | hatchet = HatchetRest() 8 | 9 | 10 | def main() -> None: 11 | # Look up the failed workflow runs 12 | run = hatchet.rest.workflow_run_create("19528945-17df-48df-88f4-72d650ce7cae", {}) 13 | print(run) 14 | 15 | 16 | if __name__ == "__main__": 17 | main() 18 | -------------------------------------------------------------------------------- /examples/programatic_replay/test_programatic_replay.py: -------------------------------------------------------------------------------- 1 | # from hatchet_sdk import Hatchet 2 | # import pytest 3 | 4 | # from tests.utils import fixture_bg_worker 5 | 6 | # worker = fixture_bg_worker(["poetry", "run", "async"]) 7 | 8 | # # requires scope module or higher for shared event loop 9 | # @pytest.mark.asyncio(scope="session") 10 | # async def test_run(hatchet: Hatchet): 11 | # run = hatchet.admin.run_workflow("DagWorkflow", {}) 12 | # result = await run.result() 13 | # assert result["step1"]["test"] == "test" 14 | -------------------------------------------------------------------------------- /examples/pydantic/test_pydantic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["pydantic"], indirect=True) 9 | async def test_run_validation_error(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow( 11 | "Parent", 12 | {}, 13 | ) 14 | 15 | with pytest.raises(Exception, match="1 validation error for ParentInput"): 16 | await run.result() 17 | 18 | 19 | # requires scope module or higher for shared event loop 20 | @pytest.mark.asyncio(scope="session") 21 | @pytest.mark.parametrize("worker", ["pydantic"], indirect=True) 22 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 23 | run = hatchet.admin.run_workflow( 24 | "Parent", 25 | {"x": "foobar"}, 26 | ) 27 | 28 | result = await run.result() 29 | 30 | assert len(result["spawn"]) == 3 31 | -------------------------------------------------------------------------------- /examples/pydantic/trigger.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import new_client 6 | 7 | 8 | async def main() -> None: 9 | load_dotenv() 10 | hatchet = new_client() 11 | 12 | hatchet.admin.run_workflow( 13 | "Parent", 14 | {"x": "foo bar baz"}, 15 | ) 16 | 17 | 18 | if __name__ == "__main__": 19 | asyncio.run(main()) 20 | -------------------------------------------------------------------------------- /examples/pydantic/worker.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from dotenv import load_dotenv 4 | from pydantic import BaseModel 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | 13 | # ❓ Pydantic 14 | # This workflow shows example usage of Pydantic within Hatchet 15 | class ParentInput(BaseModel): 16 | x: str 17 | 18 | 19 | @hatchet.workflow(input_validator=ParentInput) 20 | class Parent: 21 | @hatchet.step(timeout="5m") 22 | async def spawn(self, context: Context) -> dict[str, str]: 23 | ## Use `typing.cast` to cast your `workflow_input` 24 | ## to the type of your `input_validator` 25 | input = cast(ParentInput, context.workflow_input()) ## This is a `ParentInput` 26 | 27 | child = await context.aio.spawn_workflow( 28 | "Child", 29 | {"a": 1, "b": "10"}, 30 | ) 31 | 32 | return cast(dict[str, str], await child.result()) 33 | 34 | 35 | class ChildInput(BaseModel): 36 | a: int 37 | b: int 38 | 39 | 40 | class StepResponse(BaseModel): 41 | status: str 42 | 43 | 44 | @hatchet.workflow(input_validator=ChildInput) 45 | class Child: 46 | @hatchet.step() 47 | def process(self, context: Context) -> StepResponse: 48 | ## This is an instance `ChildInput` 49 | input = cast(ChildInput, context.workflow_input()) 50 | 51 | return StepResponse(status="success") 52 | 53 | @hatchet.step(parents=["process"]) 54 | def process2(self, context: Context) -> StepResponse: 55 | ## This is an instance of `StepResponse` 56 | process_output = cast(StepResponse, context.step_output("process")) 57 | 58 | return {"status": "step 2 - success"} # type: ignore[return-value] 59 | 60 | @hatchet.step(parents=["process2"]) 61 | def process3(self, context: Context) -> StepResponse: 62 | ## This is an instance of `StepResponse`, even though the 63 | ## response of `process2` was a dictionary. Note that 64 | ## Hatchet will attempt to parse that dictionary into 65 | ## an object of type `StepResponse` 66 | process_2_output = cast(StepResponse, context.step_output("process2")) 67 | 68 | return StepResponse(status="step 3 - success") 69 | 70 | 71 | # ‼️ 72 | 73 | 74 | def main() -> None: 75 | worker = hatchet.worker("pydantic-worker") 76 | worker.register_workflow(Parent()) 77 | worker.register_workflow(Child()) 78 | worker.start() 79 | 80 | 81 | if __name__ == "__main__": 82 | main() 83 | -------------------------------------------------------------------------------- /examples/rate_limit/dynamic.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Context, Hatchet 4 | from hatchet_sdk.rate_limit import RateLimit, RateLimitDuration 5 | 6 | load_dotenv() 7 | 8 | hatchet = Hatchet(debug=True) 9 | 10 | 11 | @hatchet.workflow(on_events=["rate_limit:create"]) 12 | class RateLimitWorkflow: 13 | 14 | @hatchet.step( 15 | rate_limits=[ 16 | RateLimit( 17 | dynamic_key=f'"LIMIT:"+input.group', 18 | units="input.units", 19 | limit="input.limit", 20 | ) 21 | ] 22 | ) 23 | def step1(self, context: Context) -> None: 24 | print("executed step1") 25 | 26 | 27 | def main() -> None: 28 | worker = hatchet.worker("rate-limit-worker", max_runs=10) 29 | worker.register_workflow(RateLimitWorkflow()) 30 | 31 | worker.start() 32 | -------------------------------------------------------------------------------- /examples/rate_limit/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk.hatchet import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | hatchet.event.push("rate_limit:create", {"test": "1"}) 10 | hatchet.event.push("rate_limit:create", {"test": "2"}) 11 | hatchet.event.push("rate_limit:create", {"test": "3"}) 12 | -------------------------------------------------------------------------------- /examples/rate_limit/test_rate_limit.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | 4 | import pytest 5 | 6 | from hatchet_sdk import Hatchet, Worker 7 | 8 | 9 | # requires scope module or higher for shared event loop 10 | @pytest.mark.skip(reason="The timing for this test is not reliable") 11 | @pytest.mark.asyncio(scope="session") 12 | @pytest.mark.parametrize("worker", ["rate_limit"], indirect=True) 13 | async def test_run(hatchet: Hatchet, worker: Worker) -> None: 14 | 15 | run1 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) 16 | run2 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) 17 | run3 = hatchet.admin.run_workflow("RateLimitWorkflow", {}) 18 | 19 | start_time = time.time() 20 | 21 | await asyncio.gather(run1.result(), run2.result(), run3.result()) 22 | 23 | end_time = time.time() 24 | 25 | total_time = end_time - start_time 26 | 27 | assert ( 28 | 1 <= total_time <= 5 29 | ), f"Expected runtime to be a bit more than 1 seconds, but it took {total_time:.2f} seconds" 30 | -------------------------------------------------------------------------------- /examples/rate_limit/worker.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Context, Hatchet 4 | from hatchet_sdk.rate_limit import RateLimit, RateLimitDuration 5 | 6 | load_dotenv() 7 | 8 | hatchet = Hatchet(debug=True) 9 | 10 | 11 | @hatchet.workflow(on_events=["rate_limit:create"]) 12 | class RateLimitWorkflow: 13 | 14 | @hatchet.step(rate_limits=[RateLimit(key="test-limit", units=1)]) 15 | def step1(self, context: Context) -> None: 16 | print("executed step1") 17 | 18 | 19 | def main() -> None: 20 | hatchet.admin.put_rate_limit("test-limit", 2, RateLimitDuration.SECOND) 21 | 22 | worker = hatchet.worker("rate-limit-worker", max_runs=10) 23 | worker.register_workflow(RateLimitWorkflow()) 24 | 25 | worker.start() 26 | 27 | 28 | if __name__ == "__main__": 29 | main() 30 | -------------------------------------------------------------------------------- /examples/retries_with_backoff/worker.py: -------------------------------------------------------------------------------- 1 | from hatchet_sdk import Context, Hatchet 2 | 3 | hatchet = Hatchet(debug=True) 4 | 5 | 6 | # ❓ Backoff 7 | @hatchet.workflow() 8 | class BackoffWorkflow: 9 | # 👀 Backoff configuration 10 | @hatchet.step( 11 | retries=10, 12 | # 👀 Maximum number of seconds to wait between retries 13 | backoff_max_seconds=60, 14 | # 👀 Factor to increase the wait time between retries. 15 | # This sequence will be 2s, 4s, 8s, 16s, 32s, 60s... due to the maxSeconds limit 16 | backoff_factor=2.0, 17 | ) 18 | def step1(self, context: Context) -> dict[str, str]: 19 | if context.retry_count() < 3: 20 | raise Exception("step1 failed") 21 | 22 | return {"status": "success"} 23 | 24 | 25 | # ‼️ 26 | 27 | 28 | def main() -> None: 29 | workflow = BackoffWorkflow() 30 | worker = hatchet.worker("backoff-worker", max_runs=4) 31 | worker.register_workflow(workflow) 32 | 33 | worker.start() 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /examples/scheduled/programatic-async.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Hatchet 6 | from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet() 11 | 12 | 13 | async def create_scheduled() -> None: 14 | # ❓ Create 15 | scheduled_run = await hatchet.scheduled.aio.create( 16 | workflow_name="simple-workflow", 17 | trigger_at=datetime.now() + timedelta(seconds=10), 18 | input={ 19 | "data": "simple-workflow-data", 20 | }, 21 | additional_metadata={ 22 | "customer_id": "customer-a", 23 | }, 24 | ) 25 | 26 | id = scheduled_run.metadata.id # the id of the scheduled run trigger 27 | # !! 28 | 29 | # ❓ Delete 30 | await hatchet.scheduled.aio.delete(scheduled=scheduled_run.metadata.id) 31 | # !! 32 | 33 | # ❓ List 34 | scheduled_runs = await hatchet.scheduled.aio.list() 35 | # !! 36 | 37 | # ❓ Get 38 | scheduled_run = await hatchet.scheduled.aio.get(scheduled=scheduled_run.metadata.id) 39 | # !! 40 | -------------------------------------------------------------------------------- /examples/scheduled/programatic-sync.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet() 10 | 11 | # ❓ Create 12 | scheduled_run = hatchet.scheduled.create( 13 | workflow_name="simple-workflow", 14 | trigger_at=datetime.now() + timedelta(seconds=10), 15 | input={ 16 | "data": "simple-workflow-data", 17 | }, 18 | additional_metadata={ 19 | "customer_id": "customer-a", 20 | }, 21 | ) 22 | 23 | id = scheduled_run.metadata.id # the id of the scheduled run trigger 24 | # !! 25 | 26 | # ❓ Delete 27 | hatchet.scheduled.delete(scheduled=scheduled_run.metadata.id) 28 | # !! 29 | 30 | # ❓ List 31 | scheduled_runs = hatchet.scheduled.list() 32 | # !! 33 | 34 | # ❓ Get 35 | scheduled_run = hatchet.scheduled.get(scheduled=scheduled_run.metadata.id) 36 | # !! 37 | -------------------------------------------------------------------------------- /examples/simple/event.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import new_client 6 | from hatchet_sdk.clients.events import BulkPushEventWithMetadata 7 | 8 | load_dotenv() 9 | 10 | client = new_client() 11 | 12 | # client.event.push("user:create", {"test": "test"}) 13 | client.event.push( 14 | "user:create", {"test": "test"}, options={"additional_metadata": {"hello": "moon"}} 15 | ) 16 | 17 | events: List[BulkPushEventWithMetadata] = [ 18 | { 19 | "key": "event1", 20 | "payload": {"message": "This is event 1"}, 21 | "additional_metadata": {"source": "test", "user_id": "user123"}, 22 | }, 23 | { 24 | "key": "event2", 25 | "payload": {"message": "This is event 2"}, 26 | "additional_metadata": {"source": "test", "user_id": "user456"}, 27 | }, 28 | { 29 | "key": "event3", 30 | "payload": {"message": "This is event 3"}, 31 | "additional_metadata": {"source": "test", "user_id": "user789"}, 32 | }, 33 | ] 34 | 35 | 36 | result = client.event.bulk_push( 37 | events, 38 | options={"namespace": "bulk-test"}, 39 | ) 40 | 41 | print(result) 42 | -------------------------------------------------------------------------------- /examples/simple/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["user:create"]) 13 | class MyWorkflow: 14 | @hatchet.step(timeout="11s", retries=3) 15 | def step1(self, context: Context) -> dict[str, str]: 16 | print("executed step1") 17 | time.sleep(10) 18 | # raise Exception("test") 19 | return { 20 | "step1": "step1", 21 | } 22 | 23 | 24 | def main() -> None: 25 | workflow = MyWorkflow() 26 | worker = hatchet.worker("test-worker", max_runs=1) 27 | worker.register_workflow(workflow) 28 | worker.start() 29 | 30 | 31 | if __name__ == "__main__": 32 | main() 33 | -------------------------------------------------------------------------------- /examples/sticky_workers/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk.hatchet import Hatchet 4 | 5 | load_dotenv() 6 | 7 | hatchet = Hatchet(debug=True) 8 | 9 | # client.event.push("user:create", {"test": "test"}) 10 | hatchet.event.push( 11 | "sticky:parent", 12 | {"test": "test"}, 13 | options={"additional_metadata": {"hello": "moon"}}, 14 | ) 15 | -------------------------------------------------------------------------------- /examples/sticky_workers/worker.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import Context, Hatchet, StickyStrategy 4 | from hatchet_sdk.context.context import ContextAioImpl 5 | 6 | load_dotenv() 7 | 8 | hatchet = Hatchet(debug=True) 9 | 10 | 11 | @hatchet.workflow(on_events=["sticky:parent"], sticky=StickyStrategy.SOFT) 12 | class StickyWorkflow: 13 | @hatchet.step() 14 | def step1a(self, context: Context) -> dict[str, str | None]: 15 | return {"worker": context.worker.id()} 16 | 17 | @hatchet.step() 18 | def step1b(self, context: Context) -> dict[str, str | None]: 19 | return {"worker": context.worker.id()} 20 | 21 | @hatchet.step(parents=["step1a", "step1b"]) 22 | async def step2(self, context: ContextAioImpl) -> dict[str, str | None]: 23 | ref = await context.spawn_workflow( 24 | "StickyChildWorkflow", {}, options={"sticky": True} 25 | ) 26 | 27 | await ref.result() 28 | 29 | return {"worker": context.worker.id()} 30 | 31 | 32 | @hatchet.workflow(on_events=["sticky:child"], sticky=StickyStrategy.SOFT) 33 | class StickyChildWorkflow: 34 | @hatchet.step() 35 | def child(self, context: Context) -> dict[str, str | None]: 36 | return {"worker": context.worker.id()} 37 | 38 | 39 | def main() -> None: 40 | worker = hatchet.worker("sticky-worker", max_runs=10) 41 | worker.register_workflow(StickyWorkflow()) 42 | worker.register_workflow(StickyChildWorkflow()) 43 | worker.start() 44 | 45 | 46 | if __name__ == "__main__": 47 | main() 48 | -------------------------------------------------------------------------------- /examples/timeout/event.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | 3 | from hatchet_sdk import new_client 4 | 5 | load_dotenv() 6 | 7 | client = new_client() 8 | 9 | client.event.push("user:create", {"test": "test"}) 10 | -------------------------------------------------------------------------------- /examples/timeout/test_timeout.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hatchet_sdk import Hatchet, Worker 4 | 5 | 6 | # requires scope module or higher for shared event loop 7 | @pytest.mark.asyncio(scope="session") 8 | @pytest.mark.parametrize("worker", ["timeout"], indirect=True) 9 | async def test_run_timeout(hatchet: Hatchet, worker: Worker) -> None: 10 | run = hatchet.admin.run_workflow("TimeoutWorkflow", {}) 11 | try: 12 | await run.result() 13 | assert False, "Expected workflow to timeout" 14 | except Exception as e: 15 | assert str(e) == "Workflow Errors: ['TIMED_OUT']" 16 | 17 | 18 | @pytest.mark.asyncio(scope="session") 19 | @pytest.mark.parametrize("worker", ["timeout"], indirect=True) 20 | async def test_run_refresh_timeout(hatchet: Hatchet, worker: Worker) -> None: 21 | run = hatchet.admin.run_workflow("RefreshTimeoutWorkflow", {}) 22 | result = await run.result() 23 | assert result["step1"]["status"] == "success" 24 | -------------------------------------------------------------------------------- /examples/timeout/worker.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from dotenv import load_dotenv 4 | 5 | from hatchet_sdk import Context, Hatchet 6 | 7 | load_dotenv() 8 | 9 | hatchet = Hatchet(debug=True) 10 | 11 | 12 | @hatchet.workflow(on_events=["timeout:create"]) 13 | class TimeoutWorkflow: 14 | 15 | @hatchet.step(timeout="4s") 16 | def step1(self, context: Context) -> dict[str, str]: 17 | time.sleep(5) 18 | return {"status": "success"} 19 | 20 | 21 | @hatchet.workflow(on_events=["refresh:create"]) 22 | class RefreshTimeoutWorkflow: 23 | 24 | @hatchet.step(timeout="4s") 25 | def step1(self, context: Context) -> dict[str, str]: 26 | 27 | context.refresh_timeout("10s") 28 | time.sleep(5) 29 | 30 | return {"status": "success"} 31 | 32 | 33 | def main() -> None: 34 | worker = hatchet.worker("timeout-worker", max_runs=4) 35 | worker.register_workflow(TimeoutWorkflow()) 36 | worker.register_workflow(RefreshTimeoutWorkflow()) 37 | 38 | worker.start() 39 | 40 | 41 | if __name__ == "__main__": 42 | main() 43 | -------------------------------------------------------------------------------- /examples/v2/simple/test_v2_worker.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from examples.v2.simple.worker import MyResultType, my_durable_func, my_func 4 | from hatchet_sdk import Hatchet, Worker 5 | from hatchet_sdk.workflow_run import RunRef 6 | 7 | 8 | # requires scope module or higher for shared event loop 9 | @pytest.mark.asyncio(scope="session") 10 | @pytest.mark.parametrize("worker", ["v2_simple"], indirect=True) 11 | async def test_durable(hatchet: Hatchet, worker: Worker) -> None: 12 | durable_run: RunRef[dict[str, str]] = hatchet.admin.run( 13 | my_durable_func, {"test": "test"} 14 | ) 15 | result = await durable_run.result() 16 | 17 | assert result == {"my_durable_func": "testing123"} 18 | 19 | 20 | @pytest.mark.asyncio(scope="session") 21 | @pytest.mark.parametrize("worker", ["v2_simple"], indirect=True) 22 | async def test_func(hatchet: Hatchet, worker: Worker) -> None: 23 | durable_run: RunRef[MyResultType] = hatchet.admin.run(my_func, {"test": "test"}) 24 | result = await durable_run.result() 25 | 26 | assert result == {"my_func": "testing123"} 27 | -------------------------------------------------------------------------------- /examples/v2/simple/worker.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from typing import Any, TypedDict, cast 4 | 5 | from dotenv import load_dotenv 6 | 7 | from hatchet_sdk import Context 8 | from hatchet_sdk.v2.callable import DurableContext 9 | from hatchet_sdk.v2.hatchet import Hatchet 10 | from hatchet_sdk.workflow_run import RunRef 11 | 12 | load_dotenv() 13 | 14 | hatchet = Hatchet(debug=True) 15 | 16 | 17 | class MyResultType(TypedDict): 18 | my_func: str 19 | 20 | 21 | @hatchet.function() 22 | def my_func(context: Context) -> MyResultType: 23 | return MyResultType(my_func="testing123") 24 | 25 | 26 | @hatchet.durable() 27 | async def my_durable_func(context: DurableContext) -> dict[str, MyResultType | None]: 28 | result = cast(dict[str, Any], await context.run(my_func, {"test": "test"}).result()) 29 | 30 | context.log(result) 31 | 32 | return {"my_durable_func": result.get("my_func")} 33 | 34 | 35 | def main() -> None: 36 | worker = hatchet.worker("test-worker", max_runs=5) 37 | 38 | hatchet.admin.run(my_durable_func, {"test": "test"}) 39 | 40 | worker.start() 41 | 42 | 43 | if __name__ == "__main__": 44 | main() 45 | -------------------------------------------------------------------------------- /examples/worker_existing_loop/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from contextlib import suppress 3 | 4 | from dotenv import load_dotenv 5 | 6 | from hatchet_sdk import Context, Hatchet 7 | 8 | load_dotenv() 9 | 10 | hatchet = Hatchet(debug=True) 11 | 12 | 13 | @hatchet.workflow(name="MyWorkflow") 14 | class MyWorkflow: 15 | @hatchet.step() 16 | async def step(self, context: Context) -> dict[str, str]: 17 | print("started") 18 | await asyncio.sleep(10) 19 | print("finished") 20 | return {"result": "returned result"} 21 | 22 | 23 | async def async_main() -> None: 24 | worker = None 25 | try: 26 | workflow = MyWorkflow() 27 | worker = hatchet.worker("test-worker", max_runs=1) 28 | worker.register_workflow(workflow) 29 | worker.start() 30 | 31 | ref = hatchet.admin.run_workflow("MyWorkflow", input={}) 32 | print(await ref.result()) 33 | while True: 34 | await asyncio.sleep(1) 35 | finally: 36 | if worker: 37 | await worker.exit_gracefully() 38 | 39 | 40 | def main() -> None: 41 | with suppress(KeyboardInterrupt): 42 | asyncio.run(async_main()) 43 | 44 | 45 | if __name__ == "__main__": 46 | main() 47 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/event_ts.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | 5 | class Event_ts(asyncio.Event): 6 | """ 7 | Event_ts is a subclass of asyncio.Event that allows for thread-safe setting and clearing of the event. 8 | """ 9 | 10 | def __init__(self, *args, **kwargs): 11 | super().__init__(*args, **kwargs) 12 | if self._loop is None: 13 | self._loop = asyncio.get_event_loop() 14 | 15 | def set(self): 16 | if not self._loop.is_closed(): 17 | self._loop.call_soon_threadsafe(super().set) 18 | 19 | def clear(self): 20 | self._loop.call_soon_threadsafe(super().clear) 21 | 22 | 23 | async def read_with_interrupt(listener: Any, interrupt: Event_ts): 24 | try: 25 | result = await listener.read() 26 | return result 27 | finally: 28 | interrupt.set() 29 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/api/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | # import apis into api package 4 | from hatchet_sdk.clients.rest.api.api_token_api import APITokenApi 5 | from hatchet_sdk.clients.rest.api.default_api import DefaultApi 6 | from hatchet_sdk.clients.rest.api.event_api import EventApi 7 | from hatchet_sdk.clients.rest.api.github_api import GithubApi 8 | from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi 9 | from hatchet_sdk.clients.rest.api.log_api import LogApi 10 | from hatchet_sdk.clients.rest.api.metadata_api import MetadataApi 11 | from hatchet_sdk.clients.rest.api.rate_limits_api import RateLimitsApi 12 | from hatchet_sdk.clients.rest.api.slack_api import SlackApi 13 | from hatchet_sdk.clients.rest.api.sns_api import SNSApi 14 | from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi 15 | from hatchet_sdk.clients.rest.api.tenant_api import TenantApi 16 | from hatchet_sdk.clients.rest.api.user_api import UserApi 17 | from hatchet_sdk.clients.rest.api.worker_api import WorkerApi 18 | from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi 19 | from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi 20 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/api_response.py: -------------------------------------------------------------------------------- 1 | """API response object.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Generic, Mapping, Optional, TypeVar 6 | 7 | from pydantic import BaseModel, Field, StrictBytes, StrictInt 8 | 9 | T = TypeVar("T") 10 | 11 | 12 | class ApiResponse(BaseModel, Generic[T]): 13 | """ 14 | API response object 15 | """ 16 | 17 | status_code: StrictInt = Field(description="HTTP status code") 18 | headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") 19 | data: T = Field(description="Deserialized data given the data type") 20 | raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") 21 | 22 | model_config = {"arbitrary_types_allowed": True} 23 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/accept_invite_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field 23 | from typing_extensions import Annotated, Self 24 | 25 | 26 | class AcceptInviteRequest(BaseModel): 27 | """ 28 | AcceptInviteRequest 29 | """ # noqa: E501 30 | 31 | invite: Annotated[str, Field(min_length=36, strict=True, max_length=36)] 32 | __properties: ClassVar[List[str]] = ["invite"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of AcceptInviteRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of AcceptInviteRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"invite": obj.get("invite")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/api_meta_auth.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class APIMetaAuth(BaseModel): 27 | """ 28 | APIMetaAuth 29 | """ # noqa: E501 30 | 31 | schemes: Optional[List[StrictStr]] = Field( 32 | default=None, description="the supported types of authentication" 33 | ) 34 | __properties: ClassVar[List[str]] = ["schemes"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of APIMetaAuth from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of APIMetaAuth from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"schemes": obj.get("schemes")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/cancel_event_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field 23 | from typing_extensions import Annotated, Self 24 | 25 | 26 | class CancelEventRequest(BaseModel): 27 | """ 28 | CancelEventRequest 29 | """ # noqa: E501 30 | 31 | event_ids: List[ 32 | Annotated[str, Field(min_length=36, strict=True, max_length=36)] 33 | ] = Field(alias="eventIds") 34 | __properties: ClassVar[List[str]] = ["eventIds"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of CancelEventRequest from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of CancelEventRequest from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"eventIds": obj.get("eventIds")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/cancel_step_run_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel 23 | from typing_extensions import Self 24 | 25 | 26 | class CancelStepRunRequest(BaseModel): 27 | """ 28 | CancelStepRunRequest 29 | """ # noqa: E501 30 | 31 | input: Dict[str, Any] 32 | __properties: ClassVar[List[str]] = ["input"] 33 | 34 | model_config = { 35 | "populate_by_name": True, 36 | "validate_assignment": True, 37 | "protected_namespaces": (), 38 | } 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of CancelStepRunRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of CancelStepRunRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"input": obj.get("input")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/concurrency_limit_strategy.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class ConcurrencyLimitStrategy(str, Enum): 24 | """ 25 | ConcurrencyLimitStrategy 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | CANCEL_IN_PROGRESS = "CANCEL_IN_PROGRESS" 32 | DROP_NEWEST = "DROP_NEWEST" 33 | QUEUE_NEWEST = "QUEUE_NEWEST" 34 | GROUP_ROUND_ROBIN = "GROUP_ROUND_ROBIN" 35 | 36 | @classmethod 37 | def from_json(cls, json_str: str) -> Self: 38 | """Create an instance of ConcurrencyLimitStrategy from a JSON string""" 39 | return cls(json.loads(json_str)) 40 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/create_api_token_response.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class CreateAPITokenResponse(BaseModel): 27 | """ 28 | CreateAPITokenResponse 29 | """ # noqa: E501 30 | 31 | token: StrictStr = Field(description="The API token.") 32 | __properties: ClassVar[List[str]] = ["token"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of CreateAPITokenResponse from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of CreateAPITokenResponse from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"token": obj.get("token")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/create_pull_request_from_step_run.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class CreatePullRequestFromStepRun(BaseModel): 27 | """ 28 | CreatePullRequestFromStepRun 29 | """ # noqa: E501 30 | 31 | branch_name: StrictStr = Field(alias="branchName") 32 | __properties: ClassVar[List[str]] = ["branchName"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of CreatePullRequestFromStepRun from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of CreatePullRequestFromStepRun from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"branchName": obj.get("branchName")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/create_sns_integration_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class CreateSNSIntegrationRequest(BaseModel): 27 | """ 28 | CreateSNSIntegrationRequest 29 | """ # noqa: E501 30 | 31 | topic_arn: StrictStr = Field( 32 | description="The Amazon Resource Name (ARN) of the SNS topic.", alias="topicArn" 33 | ) 34 | __properties: ClassVar[List[str]] = ["topicArn"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of CreateSNSIntegrationRequest from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of CreateSNSIntegrationRequest from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"topicArn": obj.get("topicArn")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/create_tenant_alert_email_group_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class CreateTenantAlertEmailGroupRequest(BaseModel): 27 | """ 28 | CreateTenantAlertEmailGroupRequest 29 | """ # noqa: E501 30 | 31 | emails: List[StrictStr] = Field(description="A list of emails for users") 32 | __properties: ClassVar[List[str]] = ["emails"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of CreateTenantAlertEmailGroupRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of CreateTenantAlertEmailGroupRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"emails": obj.get("emails")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/create_tenant_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class CreateTenantRequest(BaseModel): 27 | """ 28 | CreateTenantRequest 29 | """ # noqa: E501 30 | 31 | name: StrictStr = Field(description="The name of the tenant.") 32 | slug: StrictStr = Field(description="The slug of the tenant.") 33 | __properties: ClassVar[List[str]] = ["name", "slug"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of CreateTenantRequest from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of CreateTenantRequest from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate({"name": obj.get("name"), "slug": obj.get("slug")}) 84 | return _obj 85 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/cron_workflows_method.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class CronWorkflowsMethod(str, Enum): 24 | """ 25 | CronWorkflowsMethod 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | DEFAULT = "DEFAULT" 32 | API = "API" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of CronWorkflowsMethod from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class CronWorkflowsOrderByField(str, Enum): 24 | """ 25 | CronWorkflowsOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | NAME = "name" 32 | CREATEDAT = "createdAt" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of CronWorkflowsOrderByField from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/event_data.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class EventData(BaseModel): 27 | """ 28 | EventData 29 | """ # noqa: E501 30 | 31 | data: StrictStr = Field(description="The data for the event (JSON bytes).") 32 | __properties: ClassVar[List[str]] = ["data"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of EventData from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of EventData from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"data": obj.get("data")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/event_order_by_direction.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class EventOrderByDirection(str, Enum): 24 | """ 25 | EventOrderByDirection 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | ASC = "asc" 32 | DESC = "desc" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of EventOrderByDirection from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/event_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class EventOrderByField(str, Enum): 24 | """ 25 | EventOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | CREATEDAT = "createdAt" 32 | 33 | @classmethod 34 | def from_json(cls, json_str: str) -> Self: 35 | """Create an instance of EventOrderByField from a JSON string""" 36 | return cls(json.loads(json_str)) 37 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/github_branch.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class GithubBranch(BaseModel): 27 | """ 28 | GithubBranch 29 | """ # noqa: E501 30 | 31 | branch_name: StrictStr 32 | is_default: StrictBool 33 | __properties: ClassVar[List[str]] = ["branch_name", "is_default"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of GithubBranch from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of GithubBranch from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate( 84 | {"branch_name": obj.get("branch_name"), "is_default": obj.get("is_default")} 85 | ) 86 | return _obj 87 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/github_repo.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class GithubRepo(BaseModel): 27 | """ 28 | GithubRepo 29 | """ # noqa: E501 30 | 31 | repo_owner: StrictStr 32 | repo_name: StrictStr 33 | __properties: ClassVar[List[str]] = ["repo_owner", "repo_name"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of GithubRepo from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of GithubRepo from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate( 84 | {"repo_owner": obj.get("repo_owner"), "repo_name": obj.get("repo_name")} 85 | ) 86 | return _obj 87 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/info_get_version200_response.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class InfoGetVersion200Response(BaseModel): 27 | """ 28 | InfoGetVersion200Response 29 | """ # noqa: E501 30 | 31 | version: StrictStr 32 | __properties: ClassVar[List[str]] = ["version"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of InfoGetVersion200Response from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of InfoGetVersion200Response from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"version": obj.get("version")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/job_run_status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class JobRunStatus(str, Enum): 24 | """ 25 | JobRunStatus 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | PENDING = "PENDING" 32 | RUNNING = "RUNNING" 33 | SUCCEEDED = "SUCCEEDED" 34 | FAILED = "FAILED" 35 | CANCELLED = "CANCELLED" 36 | BACKOFF = "BACKOFF" 37 | 38 | @classmethod 39 | def from_json(cls, json_str: str) -> Self: 40 | """Create an instance of JobRunStatus from a JSON string""" 41 | return cls(json.loads(json_str)) 42 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/log_line_level.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class LogLineLevel(str, Enum): 24 | """ 25 | LogLineLevel 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | DEBUG = "DEBUG" 32 | INFO = "INFO" 33 | WARN = "WARN" 34 | ERROR = "ERROR" 35 | 36 | @classmethod 37 | def from_json(cls, json_str: str) -> Self: 38 | """Create an instance of LogLineLevel from a JSON string""" 39 | return cls(json.loads(json_str)) 40 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/log_line_order_by_direction.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class LogLineOrderByDirection(str, Enum): 24 | """ 25 | LogLineOrderByDirection 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | ASC = "asc" 32 | DESC = "desc" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of LogLineOrderByDirection from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/log_line_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class LogLineOrderByField(str, Enum): 24 | """ 25 | LogLineOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | CREATEDAT = "createdAt" 32 | 33 | @classmethod 34 | def from_json(cls, json_str: str) -> Self: 35 | """Create an instance of LogLineOrderByField from a JSON string""" 36 | return cls(json.loads(json_str)) 37 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/pull_request_state.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class PullRequestState(str, Enum): 24 | """ 25 | PullRequestState 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | OPEN = "open" 32 | CLOSED = "closed" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of PullRequestState from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class RateLimitOrderByDirection(str, Enum): 24 | """ 25 | RateLimitOrderByDirection 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | ASC = "asc" 32 | DESC = "desc" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of RateLimitOrderByDirection from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class RateLimitOrderByField(str, Enum): 24 | """ 25 | RateLimitOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | KEY = "key" 32 | VALUE = "value" 33 | LIMITVALUE = "limitValue" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of RateLimitOrderByField from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/reject_invite_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field 23 | from typing_extensions import Annotated, Self 24 | 25 | 26 | class RejectInviteRequest(BaseModel): 27 | """ 28 | RejectInviteRequest 29 | """ # noqa: E501 30 | 31 | invite: Annotated[str, Field(min_length=36, strict=True, max_length=36)] 32 | __properties: ClassVar[List[str]] = ["invite"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of RejectInviteRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of RejectInviteRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"invite": obj.get("invite")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/replay_event_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field 23 | from typing_extensions import Annotated, Self 24 | 25 | 26 | class ReplayEventRequest(BaseModel): 27 | """ 28 | ReplayEventRequest 29 | """ # noqa: E501 30 | 31 | event_ids: List[ 32 | Annotated[str, Field(min_length=36, strict=True, max_length=36)] 33 | ] = Field(alias="eventIds") 34 | __properties: ClassVar[List[str]] = ["eventIds"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of ReplayEventRequest from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of ReplayEventRequest from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"eventIds": obj.get("eventIds")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/rerun_step_run_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict 23 | from typing_extensions import Self 24 | 25 | 26 | class RerunStepRunRequest(BaseModel): 27 | """ 28 | RerunStepRunRequest 29 | """ # noqa: E501 30 | 31 | input: Dict[str, Any] 32 | __properties: ClassVar[List[str]] = ["input"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of RerunStepRunRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of RerunStepRunRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"input": obj.get("input")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/scheduled_run_status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class ScheduledRunStatus(str, Enum): 24 | """ 25 | ScheduledRunStatus 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | PENDING = "PENDING" 32 | RUNNING = "RUNNING" 33 | SUCCEEDED = "SUCCEEDED" 34 | FAILED = "FAILED" 35 | CANCELLED = "CANCELLED" 36 | QUEUED = "QUEUED" 37 | SCHEDULED = "SCHEDULED" 38 | 39 | @classmethod 40 | def from_json(cls, json_str: str) -> Self: 41 | """Create an instance of ScheduledRunStatus from a JSON string""" 42 | return cls(json.loads(json_str)) 43 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/scheduled_workflows_method.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class ScheduledWorkflowsMethod(str, Enum): 24 | """ 25 | ScheduledWorkflowsMethod 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | DEFAULT = "DEFAULT" 32 | API = "API" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of ScheduledWorkflowsMethod from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class ScheduledWorkflowsOrderByField(str, Enum): 24 | """ 25 | ScheduledWorkflowsOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | TRIGGERAT = "triggerAt" 32 | CREATEDAT = "createdAt" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of ScheduledWorkflowsOrderByField from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/step_run_event_reason.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class StepRunEventReason(str, Enum): 24 | """ 25 | StepRunEventReason 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | REQUEUED_NO_WORKER = "REQUEUED_NO_WORKER" 32 | REQUEUED_RATE_LIMIT = "REQUEUED_RATE_LIMIT" 33 | SCHEDULING_TIMED_OUT = "SCHEDULING_TIMED_OUT" 34 | ASSIGNED = "ASSIGNED" 35 | STARTED = "STARTED" 36 | ACKNOWLEDGED = "ACKNOWLEDGED" 37 | FINISHED = "FINISHED" 38 | FAILED = "FAILED" 39 | RETRYING = "RETRYING" 40 | CANCELLED = "CANCELLED" 41 | TIMEOUT_REFRESHED = "TIMEOUT_REFRESHED" 42 | REASSIGNED = "REASSIGNED" 43 | TIMED_OUT = "TIMED_OUT" 44 | SLOT_RELEASED = "SLOT_RELEASED" 45 | RETRIED_BY_USER = "RETRIED_BY_USER" 46 | WORKFLOW_RUN_GROUP_KEY_SUCCEEDED = "WORKFLOW_RUN_GROUP_KEY_SUCCEEDED" 47 | WORKFLOW_RUN_GROUP_KEY_FAILED = "WORKFLOW_RUN_GROUP_KEY_FAILED" 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Self: 51 | """Create an instance of StepRunEventReason from a JSON string""" 52 | return cls(json.loads(json_str)) 53 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/step_run_event_severity.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class StepRunEventSeverity(str, Enum): 24 | """ 25 | StepRunEventSeverity 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | INFO = "INFO" 32 | WARNING = "WARNING" 33 | CRITICAL = "CRITICAL" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of StepRunEventSeverity from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/step_run_status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class StepRunStatus(str, Enum): 24 | """ 25 | StepRunStatus 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | PENDING = "PENDING" 32 | PENDING_ASSIGNMENT = "PENDING_ASSIGNMENT" 33 | ASSIGNED = "ASSIGNED" 34 | RUNNING = "RUNNING" 35 | SUCCEEDED = "SUCCEEDED" 36 | FAILED = "FAILED" 37 | CANCELLED = "CANCELLED" 38 | CANCELLING = "CANCELLING" 39 | BACKOFF = "BACKOFF" 40 | 41 | @classmethod 42 | def from_json(cls, json_str: str) -> Self: 43 | """Create an instance of StepRunStatus from a JSON string""" 44 | return cls(json.loads(json_str)) 45 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/tenant_member_role.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class TenantMemberRole(str, Enum): 24 | """ 25 | TenantMemberRole 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | OWNER = "OWNER" 32 | ADMIN = "ADMIN" 33 | MEMBER = "MEMBER" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of TenantMemberRole from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/tenant_resource.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class TenantResource(str, Enum): 24 | """ 25 | TenantResource 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | WORKER = "WORKER" 32 | EVENT = "EVENT" 33 | WORKFLOW_RUN = "WORKFLOW_RUN" 34 | CRON = "CRON" 35 | SCHEDULE = "SCHEDULE" 36 | 37 | @classmethod 38 | def from_json(cls, json_str: str) -> Self: 39 | """Create an instance of TenantResource from a JSON string""" 40 | return cls(json.loads(json_str)) 41 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictInt 23 | from typing_extensions import Self 24 | 25 | 26 | class TenantStepRunQueueMetrics(BaseModel): 27 | """ 28 | TenantStepRunQueueMetrics 29 | """ # noqa: E501 30 | 31 | queues: Optional[Dict[str, StrictInt]] = None 32 | __properties: ClassVar[List[str]] = ["queues"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of TenantStepRunQueueMetrics from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of TenantStepRunQueueMetrics from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"queues": obj.get("queues")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/update_tenant_alert_email_group_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class UpdateTenantAlertEmailGroupRequest(BaseModel): 27 | """ 28 | UpdateTenantAlertEmailGroupRequest 29 | """ # noqa: E501 30 | 31 | emails: List[StrictStr] = Field(description="A list of emails for users") 32 | __properties: ClassVar[List[str]] = ["emails"] 33 | 34 | model_config = ConfigDict( 35 | populate_by_name=True, 36 | validate_assignment=True, 37 | protected_namespaces=(), 38 | ) 39 | 40 | def to_str(self) -> str: 41 | """Returns the string representation of the model using alias""" 42 | return pprint.pformat(self.model_dump(by_alias=True)) 43 | 44 | def to_json(self) -> str: 45 | """Returns the JSON representation of the model using alias""" 46 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 47 | return json.dumps(self.to_dict()) 48 | 49 | @classmethod 50 | def from_json(cls, json_str: str) -> Optional[Self]: 51 | """Create an instance of UpdateTenantAlertEmailGroupRequest from a JSON string""" 52 | return cls.from_dict(json.loads(json_str)) 53 | 54 | def to_dict(self) -> Dict[str, Any]: 55 | """Return the dictionary representation of the model using alias. 56 | 57 | This has the following differences from calling pydantic's 58 | `self.model_dump(by_alias=True)`: 59 | 60 | * `None` is only added to the output dict for nullable fields that 61 | were set at model initialization. Other fields with value `None` 62 | are ignored. 63 | """ 64 | excluded_fields: Set[str] = set([]) 65 | 66 | _dict = self.model_dump( 67 | by_alias=True, 68 | exclude=excluded_fields, 69 | exclude_none=True, 70 | ) 71 | return _dict 72 | 73 | @classmethod 74 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 75 | """Create an instance of UpdateTenantAlertEmailGroupRequest from a dict""" 76 | if obj is None: 77 | return None 78 | 79 | if not isinstance(obj, dict): 80 | return cls.model_validate(obj) 81 | 82 | _obj = cls.model_validate({"emails": obj.get("emails")}) 83 | return _obj 84 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/update_tenant_invite_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field 23 | from typing_extensions import Self 24 | 25 | from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole 26 | 27 | 28 | class UpdateTenantInviteRequest(BaseModel): 29 | """ 30 | UpdateTenantInviteRequest 31 | """ # noqa: E501 32 | 33 | role: TenantMemberRole = Field(description="The role of the user in the tenant.") 34 | __properties: ClassVar[List[str]] = ["role"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of UpdateTenantInviteRequest from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of UpdateTenantInviteRequest from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"role": obj.get("role")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/webhook_worker_request_method.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WebhookWorkerRequestMethod(str, Enum): 24 | """ 25 | WebhookWorkerRequestMethod 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | GET = "GET" 32 | POST = "POST" 33 | PUT = "PUT" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of WebhookWorkerRequestMethod from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/worker_runtime_sdks.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkerRuntimeSDKs(str, Enum): 24 | """ 25 | WorkerRuntimeSDKs 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | GOLANG = "GOLANG" 32 | PYTHON = "PYTHON" 33 | TYPESCRIPT = "TYPESCRIPT" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of WorkerRuntimeSDKs from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/worker_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkerType(str, Enum): 24 | """ 25 | WorkerType 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | SELFHOSTED = "SELFHOSTED" 32 | MANAGED = "MANAGED" 33 | WEBHOOK = "WEBHOOK" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of WorkerType from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_kind.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkflowKind(str, Enum): 24 | """ 25 | WorkflowKind 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | FUNCTION = "FUNCTION" 32 | DURABLE = "DURABLE" 33 | DAG = "DAG" 34 | 35 | @classmethod 36 | def from_json(cls, json_str: str) -> Self: 37 | """Create an instance of WorkflowKind from a JSON string""" 38 | return cls(json.loads(json_str)) 39 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_run_order_by_direction.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkflowRunOrderByDirection(str, Enum): 24 | """ 25 | WorkflowRunOrderByDirection 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | ASC = "ASC" 32 | DESC = "DESC" 33 | 34 | @classmethod 35 | def from_json(cls, json_str: str) -> Self: 36 | """Create an instance of WorkflowRunOrderByDirection from a JSON string""" 37 | return cls(json.loads(json_str)) 38 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_run_order_by_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkflowRunOrderByField(str, Enum): 24 | """ 25 | WorkflowRunOrderByField 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | CREATEDAT = "createdAt" 32 | STARTEDAT = "startedAt" 33 | FINISHEDAT = "finishedAt" 34 | DURATION = "duration" 35 | 36 | @classmethod 37 | def from_json(cls, json_str: str) -> Self: 38 | """Create an instance of WorkflowRunOrderByField from a JSON string""" 39 | return cls(json.loads(json_str)) 40 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_run_status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | from enum import Enum 19 | 20 | from typing_extensions import Self 21 | 22 | 23 | class WorkflowRunStatus(str, Enum): 24 | """ 25 | WorkflowRunStatus 26 | """ 27 | 28 | """ 29 | allowed enum values 30 | """ 31 | PENDING = "PENDING" 32 | RUNNING = "RUNNING" 33 | SUCCEEDED = "SUCCEEDED" 34 | FAILED = "FAILED" 35 | CANCELLED = "CANCELLED" 36 | QUEUED = "QUEUED" 37 | BACKOFF = "BACKOFF" 38 | 39 | @classmethod 40 | def from_json(cls, json_str: str) -> Self: 41 | """Create an instance of WorkflowRunStatus from a JSON string""" 42 | return cls(json.loads(json_str)) 43 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_tag.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class WorkflowTag(BaseModel): 27 | """ 28 | WorkflowTag 29 | """ # noqa: E501 30 | 31 | name: StrictStr = Field(description="The name of the workflow.") 32 | color: StrictStr = Field(description="The description of the workflow.") 33 | __properties: ClassVar[List[str]] = ["name", "color"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of WorkflowTag from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of WorkflowTag from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate({"name": obj.get("name"), "color": obj.get("color")}) 84 | return _obj 85 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_trigger_cron_ref.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class WorkflowTriggerCronRef(BaseModel): 27 | """ 28 | WorkflowTriggerCronRef 29 | """ # noqa: E501 30 | 31 | parent_id: Optional[StrictStr] = None 32 | cron: Optional[StrictStr] = None 33 | __properties: ClassVar[List[str]] = ["parent_id", "cron"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of WorkflowTriggerCronRef from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of WorkflowTriggerCronRef from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate( 84 | {"parent_id": obj.get("parent_id"), "cron": obj.get("cron")} 85 | ) 86 | return _obj 87 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_trigger_event_ref.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, StrictStr 23 | from typing_extensions import Self 24 | 25 | 26 | class WorkflowTriggerEventRef(BaseModel): 27 | """ 28 | WorkflowTriggerEventRef 29 | """ # noqa: E501 30 | 31 | parent_id: Optional[StrictStr] = None 32 | event_key: Optional[StrictStr] = None 33 | __properties: ClassVar[List[str]] = ["parent_id", "event_key"] 34 | 35 | model_config = ConfigDict( 36 | populate_by_name=True, 37 | validate_assignment=True, 38 | protected_namespaces=(), 39 | ) 40 | 41 | def to_str(self) -> str: 42 | """Returns the string representation of the model using alias""" 43 | return pprint.pformat(self.model_dump(by_alias=True)) 44 | 45 | def to_json(self) -> str: 46 | """Returns the JSON representation of the model using alias""" 47 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 48 | return json.dumps(self.to_dict()) 49 | 50 | @classmethod 51 | def from_json(cls, json_str: str) -> Optional[Self]: 52 | """Create an instance of WorkflowTriggerEventRef from a JSON string""" 53 | return cls.from_dict(json.loads(json_str)) 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Return the dictionary representation of the model using alias. 57 | 58 | This has the following differences from calling pydantic's 59 | `self.model_dump(by_alias=True)`: 60 | 61 | * `None` is only added to the output dict for nullable fields that 62 | were set at model initialization. Other fields with value `None` 63 | are ignored. 64 | """ 65 | excluded_fields: Set[str] = set([]) 66 | 67 | _dict = self.model_dump( 68 | by_alias=True, 69 | exclude=excluded_fields, 70 | exclude_none=True, 71 | ) 72 | return _dict 73 | 74 | @classmethod 75 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 76 | """Create an instance of WorkflowTriggerEventRef from a dict""" 77 | if obj is None: 78 | return None 79 | 80 | if not isinstance(obj, dict): 81 | return cls.model_validate(obj) 82 | 83 | _obj = cls.model_validate( 84 | {"parent_id": obj.get("parent_id"), "event_key": obj.get("event_key")} 85 | ) 86 | return _obj 87 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/models/workflow_update_request.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | Hatchet API 5 | 6 | The Hatchet API 7 | 8 | The version of the OpenAPI document: 1.0.0 9 | Generated by OpenAPI Generator (https://openapi-generator.tech) 10 | 11 | Do not edit the class manually. 12 | """ # noqa: E501 13 | 14 | 15 | from __future__ import annotations 16 | 17 | import json 18 | import pprint 19 | import re # noqa: F401 20 | from typing import Any, ClassVar, Dict, List, Optional, Set 21 | 22 | from pydantic import BaseModel, ConfigDict, Field, StrictBool 23 | from typing_extensions import Self 24 | 25 | 26 | class WorkflowUpdateRequest(BaseModel): 27 | """ 28 | WorkflowUpdateRequest 29 | """ # noqa: E501 30 | 31 | is_paused: Optional[StrictBool] = Field( 32 | default=None, description="Whether the workflow is paused.", alias="isPaused" 33 | ) 34 | __properties: ClassVar[List[str]] = ["isPaused"] 35 | 36 | model_config = ConfigDict( 37 | populate_by_name=True, 38 | validate_assignment=True, 39 | protected_namespaces=(), 40 | ) 41 | 42 | def to_str(self) -> str: 43 | """Returns the string representation of the model using alias""" 44 | return pprint.pformat(self.model_dump(by_alias=True)) 45 | 46 | def to_json(self) -> str: 47 | """Returns the JSON representation of the model using alias""" 48 | # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead 49 | return json.dumps(self.to_dict()) 50 | 51 | @classmethod 52 | def from_json(cls, json_str: str) -> Optional[Self]: 53 | """Create an instance of WorkflowUpdateRequest from a JSON string""" 54 | return cls.from_dict(json.loads(json_str)) 55 | 56 | def to_dict(self) -> Dict[str, Any]: 57 | """Return the dictionary representation of the model using alias. 58 | 59 | This has the following differences from calling pydantic's 60 | `self.model_dump(by_alias=True)`: 61 | 62 | * `None` is only added to the output dict for nullable fields that 63 | were set at model initialization. Other fields with value `None` 64 | are ignored. 65 | """ 66 | excluded_fields: Set[str] = set([]) 67 | 68 | _dict = self.model_dump( 69 | by_alias=True, 70 | exclude=excluded_fields, 71 | exclude_none=True, 72 | ) 73 | return _dict 74 | 75 | @classmethod 76 | def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: 77 | """Create an instance of WorkflowUpdateRequest from a dict""" 78 | if obj is None: 79 | return None 80 | 81 | if not isinstance(obj, dict): 82 | return cls.model_validate(obj) 83 | 84 | _obj = cls.model_validate({"isPaused": obj.get("isPaused")}) 85 | return _obj 86 | -------------------------------------------------------------------------------- /hatchet_sdk/clients/rest/tenacity_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, ParamSpec, TypeVar 2 | 3 | import grpc 4 | import tenacity 5 | 6 | from hatchet_sdk.logger import logger 7 | 8 | P = ParamSpec("P") 9 | R = TypeVar("R") 10 | 11 | 12 | def tenacity_retry(func: Callable[P, R]) -> Callable[P, R]: 13 | return tenacity.retry( 14 | reraise=True, 15 | wait=tenacity.wait_exponential_jitter(), 16 | stop=tenacity.stop_after_attempt(5), 17 | before_sleep=tenacity_alert_retry, 18 | retry=tenacity.retry_if_exception(tenacity_should_retry), 19 | )(func) 20 | 21 | 22 | def tenacity_alert_retry(retry_state: tenacity.RetryCallState) -> None: 23 | """Called between tenacity retries.""" 24 | logger.debug( 25 | f"Retrying {retry_state.fn}: attempt " 26 | f"{retry_state.attempt_number} ended with: {retry_state.outcome}", 27 | ) 28 | 29 | 30 | def tenacity_should_retry(ex: Exception) -> bool: 31 | if isinstance(ex, (grpc.aio.AioRpcError, grpc.RpcError)): 32 | if ex.code() in [ 33 | grpc.StatusCode.UNIMPLEMENTED, 34 | grpc.StatusCode.NOT_FOUND, 35 | ]: 36 | return False 37 | return True 38 | else: 39 | return False 40 | -------------------------------------------------------------------------------- /hatchet_sdk/connection.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import TYPE_CHECKING, Any 3 | 4 | import grpc 5 | 6 | if TYPE_CHECKING: 7 | from hatchet_sdk.loader import ClientConfig 8 | 9 | 10 | def new_conn(config: "ClientConfig", aio=False): 11 | 12 | credentials: grpc.ChannelCredentials | None = None 13 | 14 | # load channel credentials 15 | if config.tls_config.tls_strategy == "tls": 16 | root: Any | None = None 17 | 18 | if config.tls_config.ca_file: 19 | root = open(config.tls_config.ca_file, "rb").read() 20 | 21 | credentials = grpc.ssl_channel_credentials(root_certificates=root) 22 | elif config.tls_config.tls_strategy == "mtls": 23 | root = open(config.tls_config.ca_file, "rb").read() 24 | private_key = open(config.tls_config.key_file, "rb").read() 25 | certificate_chain = open(config.tls_config.cert_file, "rb").read() 26 | 27 | credentials = grpc.ssl_channel_credentials( 28 | root_certificates=root, 29 | private_key=private_key, 30 | certificate_chain=certificate_chain, 31 | ) 32 | 33 | start = grpc if not aio else grpc.aio 34 | 35 | channel_options = [ 36 | ("grpc.max_send_message_length", config.grpc_max_send_message_length), 37 | ("grpc.max_receive_message_length", config.grpc_max_recv_message_length), 38 | ("grpc.keepalive_time_ms", 10 * 1000), 39 | ("grpc.keepalive_timeout_ms", 60 * 1000), 40 | ("grpc.client_idle_timeout_ms", 60 * 1000), 41 | ("grpc.http2.max_pings_without_data", 0), 42 | ("grpc.keepalive_permit_without_calls", 1), 43 | ] 44 | 45 | # Set environment variable to disable fork support. Reference: https://github.com/grpc/grpc/issues/28557 46 | # When steps execute via os.fork, we see `TSI_DATA_CORRUPTED` errors. 47 | os.environ["GRPC_ENABLE_FORK_SUPPORT"] = "False" 48 | 49 | if config.tls_config.tls_strategy == "none": 50 | conn = start.insecure_channel( 51 | target=config.host_port, 52 | options=channel_options, 53 | ) 54 | else: 55 | channel_options.append( 56 | ("grpc.ssl_target_name_override", config.tls_config.server_name) 57 | ) 58 | 59 | conn = start.secure_channel( 60 | target=config.host_port, 61 | credentials=credentials, 62 | options=channel_options, 63 | ) 64 | return conn 65 | -------------------------------------------------------------------------------- /hatchet_sdk/context/__init__.py: -------------------------------------------------------------------------------- 1 | from .context import Context 2 | -------------------------------------------------------------------------------- /hatchet_sdk/context/worker_context.py: -------------------------------------------------------------------------------- 1 | from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient 2 | 3 | 4 | class WorkerContext: 5 | _worker_id: str | None = None 6 | _registered_workflow_names: list[str] = [] 7 | _labels: dict[str, str | int] = {} 8 | 9 | def __init__(self, labels: dict[str, str | int], client: DispatcherClient): 10 | self._labels = labels 11 | self.client = client 12 | 13 | def labels(self) -> dict[str, str | int]: 14 | return self._labels 15 | 16 | def upsert_labels(self, labels: dict[str, str | int]) -> None: 17 | self.client.upsert_worker_labels(self._worker_id, labels) 18 | self._labels.update(labels) 19 | 20 | async def async_upsert_labels(self, labels: dict[str, str | int]) -> None: 21 | await self.client.async_upsert_worker_labels(self._worker_id, labels) 22 | self._labels.update(labels) 23 | 24 | def id(self) -> str | None: 25 | return self._worker_id 26 | 27 | # def has_workflow(self, workflow_name: str): 28 | # return workflow_name in self._registered_workflow_names 29 | -------------------------------------------------------------------------------- /hatchet_sdk/labels.py: -------------------------------------------------------------------------------- 1 | from typing import TypedDict 2 | 3 | 4 | class DesiredWorkerLabel(TypedDict, total=False): 5 | value: str | int 6 | required: bool | None = None 7 | weight: int | None = None 8 | comparator: int | None = ( 9 | None # _ClassVar[WorkerLabelComparator] TODO figure out type 10 | ) 11 | -------------------------------------------------------------------------------- /hatchet_sdk/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | # Create a named logger 5 | logger = logging.getLogger("hatchet") 6 | logger.setLevel(logging.INFO) 7 | 8 | handler = logging.StreamHandler(sys.stdout) 9 | formatter = logging.Formatter("[%(levelname)s]\t🪓 -- %(asctime)s - %(message)s") 10 | handler.setFormatter(formatter) 11 | logger.addHandler(handler) 12 | 13 | logger.propagate = False 14 | -------------------------------------------------------------------------------- /hatchet_sdk/metadata.py: -------------------------------------------------------------------------------- 1 | def get_metadata(token: str): 2 | return [("authorization", "bearer " + token)] 3 | -------------------------------------------------------------------------------- /hatchet_sdk/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hatchet-dev/hatchet-python/5131c7505dadb0e44b798b27086808f59dda92fd/hatchet_sdk/py.typed -------------------------------------------------------------------------------- /hatchet_sdk/semver.py: -------------------------------------------------------------------------------- 1 | def bump_minor_version(version: str) -> str: 2 | """ 3 | Bumps the minor version of a semantic version string. NOTE this doesn't follow full semver, 4 | missing the build metadata and pre-release version. 5 | 6 | :param version: A semantic version string in the format major.minor.patch 7 | :return: A string with the minor version bumped and patch version reset to 0 8 | :raises ValueError: If the input is not a valid semantic version string 9 | """ 10 | # if it starts with a v, remove it 11 | had_v = False 12 | if version.startswith("v"): 13 | version = version[1:] 14 | had_v = True 15 | 16 | parts = version.split(".") 17 | if len(parts) != 3: 18 | raise ValueError(f"Invalid semantic version: {version}") 19 | 20 | try: 21 | major, minor, _ = map(int, parts) 22 | except ValueError: 23 | raise ValueError(f"Invalid semantic version: {version}") 24 | 25 | new_minor = minor + 1 26 | new_version = f"{major}.{new_minor}.0" 27 | 28 | if had_v: 29 | new_version = "v" + new_version 30 | return new_version 31 | -------------------------------------------------------------------------------- /hatchet_sdk/token.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | 4 | 5 | def get_tenant_id_from_jwt(token: str) -> str: 6 | claims = extract_claims_from_jwt(token) 7 | 8 | return claims.get("sub") 9 | 10 | 11 | def get_addresses_from_jwt(token: str) -> (str, str): 12 | claims = extract_claims_from_jwt(token) 13 | 14 | return claims.get("server_url"), claims.get("grpc_broadcast_address") 15 | 16 | 17 | def extract_claims_from_jwt(token: str): 18 | parts = token.split(".") 19 | if len(parts) != 3: 20 | raise ValueError("Invalid token format") 21 | 22 | claims_part = parts[1] 23 | claims_part += "=" * ((4 - len(claims_part) % 4) % 4) # Padding for base64 decoding 24 | claims_data = base64.urlsafe_b64decode(claims_part) 25 | claims = json.loads(claims_data) 26 | 27 | return claims 28 | -------------------------------------------------------------------------------- /hatchet_sdk/utils/backoff.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | 4 | 5 | async def exp_backoff_sleep(attempt: int, max_sleep_time: float = 5) -> None: 6 | base_time = 0.1 # starting sleep time in seconds (100 milliseconds) 7 | jitter = random.uniform(0, base_time) # add random jitter 8 | sleep_time = min(base_time * (2**attempt) + jitter, max_sleep_time) 9 | await asyncio.sleep(sleep_time) 10 | -------------------------------------------------------------------------------- /hatchet_sdk/utils/serialization.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | def flatten(xs: dict[str, Any], parent_key: str, separator: str) -> dict[str, Any]: 5 | if not xs: 6 | return {} 7 | 8 | items: list[tuple[str, Any]] = [] 9 | 10 | for k, v in xs.items(): 11 | new_key = parent_key + separator + k if parent_key else k 12 | 13 | if isinstance(v, dict): 14 | items.extend(flatten(v, new_key, separator).items()) 15 | else: 16 | items.append((new_key, v)) 17 | 18 | return dict(items) 19 | -------------------------------------------------------------------------------- /hatchet_sdk/utils/types.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class WorkflowValidator(BaseModel): 7 | workflow_input: Type[BaseModel] | None = None 8 | step_output: Type[BaseModel] | None = None 9 | -------------------------------------------------------------------------------- /hatchet_sdk/utils/typing.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Type, TypeGuard, TypeVar 2 | 3 | from pydantic import BaseModel 4 | 5 | T = TypeVar("T", bound=BaseModel) 6 | 7 | 8 | def is_basemodel_subclass(model: Any) -> bool: 9 | try: 10 | return issubclass(model, BaseModel) 11 | except TypeError: 12 | return False 13 | -------------------------------------------------------------------------------- /hatchet_sdk/v2/concurrency.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable 2 | 3 | from hatchet_sdk.context.context import Context 4 | from hatchet_sdk.contracts.workflows_pb2 import ( # type: ignore[attr-defined] 5 | ConcurrencyLimitStrategy, 6 | ) 7 | 8 | 9 | class ConcurrencyFunction: 10 | def __init__( 11 | self, 12 | func: Callable[[Context], str], 13 | name: str = "concurrency", 14 | max_runs: int = 1, 15 | limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, 16 | ): 17 | self.func = func 18 | self.name = name 19 | self.max_runs = max_runs 20 | self.limit_strategy = limit_strategy 21 | self.namespace = "default" 22 | 23 | def set_namespace(self, namespace: str) -> None: 24 | self.namespace = namespace 25 | 26 | def get_action_name(self) -> str: 27 | return self.namespace + ":" + self.name 28 | 29 | def __call__(self, *args: Any, **kwargs: Any) -> str: 30 | return self.func(*args, **kwargs) 31 | 32 | def __str__(self) -> str: 33 | return f"{self.name}({self.max_runs})" 34 | 35 | def __repr__(self) -> str: 36 | return f"{self.name}({self.max_runs})" 37 | 38 | 39 | def concurrency( 40 | name: str = "", 41 | max_runs: int = 1, 42 | limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, 43 | ) -> Callable[[Callable[[Context], str]], ConcurrencyFunction]: 44 | def inner(func: Callable[[Context], str]) -> ConcurrencyFunction: 45 | return ConcurrencyFunction(func, name, max_runs, limit_strategy) 46 | 47 | return inner 48 | -------------------------------------------------------------------------------- /hatchet_sdk/worker/__init__.py: -------------------------------------------------------------------------------- 1 | from .worker import Worker, WorkerStartOptions, WorkerStatus 2 | -------------------------------------------------------------------------------- /hatchet_sdk/worker/runner/utils/capture_logs.py: -------------------------------------------------------------------------------- 1 | import contextvars 2 | import functools 3 | import logging 4 | from concurrent.futures import ThreadPoolExecutor 5 | from io import StringIO 6 | from typing import Any, Coroutine 7 | 8 | from hatchet_sdk import logger 9 | from hatchet_sdk.clients.events import EventClient 10 | 11 | wr: contextvars.ContextVar[str | None] = contextvars.ContextVar( 12 | "workflow_run_id", default=None 13 | ) 14 | sr: contextvars.ContextVar[str | None] = contextvars.ContextVar( 15 | "step_run_id", default=None 16 | ) 17 | 18 | 19 | def copy_context_vars(ctx_vars, func, *args, **kwargs): 20 | for var, value in ctx_vars: 21 | var.set(value) 22 | return func(*args, **kwargs) 23 | 24 | 25 | class InjectingFilter(logging.Filter): 26 | # For some reason, only the InjectingFilter has access to the contextvars method sr.get(), 27 | # otherwise we would use emit within the CustomLogHandler 28 | def filter(self, record): 29 | record.workflow_run_id = wr.get() 30 | record.step_run_id = sr.get() 31 | return True 32 | 33 | 34 | class CustomLogHandler(logging.StreamHandler): 35 | def __init__(self, event_client: EventClient, stream=None): 36 | super().__init__(stream) 37 | self.logger_thread_pool = ThreadPoolExecutor(max_workers=1) 38 | self.event_client = event_client 39 | 40 | def _log(self, line: str, step_run_id: str | None): 41 | try: 42 | if not step_run_id: 43 | return 44 | 45 | self.event_client.log(message=line, step_run_id=step_run_id) 46 | except Exception as e: 47 | logger.error(f"Error logging: {e}") 48 | 49 | def emit(self, record): 50 | super().emit(record) 51 | 52 | log_entry = self.format(record) 53 | self.logger_thread_pool.submit(self._log, log_entry, record.step_run_id) 54 | 55 | 56 | def capture_logs( 57 | logger: logging.Logger, 58 | event_client: EventClient, 59 | func: Coroutine[Any, Any, Any], 60 | ): 61 | @functools.wraps(func) 62 | async def wrapper(*args, **kwargs): 63 | if not logger: 64 | raise Exception("No logger configured on client") 65 | 66 | log_stream = StringIO() 67 | custom_handler = CustomLogHandler(event_client, log_stream) 68 | custom_handler.setLevel(logging.INFO) 69 | custom_handler.addFilter(InjectingFilter()) 70 | logger.addHandler(custom_handler) 71 | 72 | try: 73 | result = await func(*args, **kwargs) 74 | finally: 75 | custom_handler.flush() 76 | logger.removeHandler(custom_handler) 77 | log_stream.close() 78 | 79 | return result 80 | 81 | return wrapper 82 | -------------------------------------------------------------------------------- /hatchet_sdk/worker/runner/utils/error_with_traceback.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | 3 | 4 | def errorWithTraceback(message: str, e: Exception): 5 | trace = "".join(traceback.format_exception(type(e), e, e.__traceback__)) 6 | return f"{message}\n{trace}" 7 | -------------------------------------------------------------------------------- /hatchet_sdk/workflow_run.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any, Coroutine, Generic, Optional, TypedDict, TypeVar 3 | 4 | from hatchet_sdk.clients.run_event_listener import ( 5 | RunEventListener, 6 | RunEventListenerClient, 7 | ) 8 | from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener 9 | from hatchet_sdk.utils.aio_utils import EventLoopThread, get_active_event_loop 10 | 11 | 12 | class WorkflowRunRef: 13 | workflow_run_id: str 14 | 15 | def __init__( 16 | self, 17 | workflow_run_id: str, 18 | workflow_listener: PooledWorkflowRunListener, 19 | workflow_run_event_listener: RunEventListenerClient, 20 | ): 21 | self.workflow_run_id = workflow_run_id 22 | self.workflow_listener = workflow_listener 23 | self.workflow_run_event_listener = workflow_run_event_listener 24 | 25 | def __str__(self): 26 | return self.workflow_run_id 27 | 28 | def stream(self) -> RunEventListener: 29 | return self.workflow_run_event_listener.stream(self.workflow_run_id) 30 | 31 | def result(self) -> Coroutine: 32 | return self.workflow_listener.result(self.workflow_run_id) 33 | 34 | def sync_result(self) -> dict: 35 | coro = self.workflow_listener.result(self.workflow_run_id) 36 | loop = get_active_event_loop() 37 | 38 | if loop is None: 39 | loop = asyncio.new_event_loop() 40 | asyncio.set_event_loop(loop) 41 | try: 42 | return loop.run_until_complete(coro) 43 | finally: 44 | asyncio.set_event_loop(None) 45 | else: 46 | return loop.run_until_complete(coro) 47 | 48 | 49 | T = TypeVar("T") 50 | 51 | 52 | class RunRef(WorkflowRunRef, Generic[T]): 53 | async def result(self) -> T: 54 | res = await self.workflow_listener.result(self.workflow_run_id) 55 | 56 | if len(res) == 1: 57 | return list(res.values())[0] 58 | 59 | return res 60 | -------------------------------------------------------------------------------- /hot-reload.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -z "$1" ]; then 4 | script="simple" 5 | else 6 | script="$1" 7 | fi 8 | 9 | watchmedo auto-restart --recursive --patterns="*.py" -- poetry run $script 10 | -------------------------------------------------------------------------------- /lint.sh: -------------------------------------------------------------------------------- 1 | poetry run black . --color 2 | poetry run isort . 3 | poetry run mypy --config-file=pyproject.toml 4 | -------------------------------------------------------------------------------- /publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This scripts generates and publishes the python package. 3 | 4 | # env name is required 5 | if [ -z "$POETRY_PYPI_TOKEN_PYPI" ]; then 6 | echo "Please set POETRY_PYPI_TOKEN_PYPI variable" 7 | exit 1 8 | fi 9 | 10 | poetry build 11 | poetry publish 12 | --------------------------------------------------------------------------------