├── .eslintrc.json ├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yml │ ├── lint.yml │ ├── publish-alpha.yml │ ├── publish.yml │ └── tests.yml ├── .gitignore ├── .gitmodules ├── .prettierrc.json ├── README.md ├── branch-sync.sh ├── generate-protoc.sh ├── jest.config.ts ├── package.json ├── pnpm-lock.yaml ├── src ├── clients │ ├── admin │ │ ├── admin-client.test.ts │ │ ├── admin-client.ts │ │ └── index.ts │ ├── dispatcher │ │ ├── action-listener.test.ts │ │ ├── action-listener.ts │ │ ├── dispatcher-client.test.ts │ │ ├── dispatcher-client.ts │ │ └── heartbeat │ │ │ ├── heartbeat-controller.ts │ │ │ └── heartbeat-worker.ts │ ├── event │ │ ├── event-client.test.ts │ │ └── event-client.ts │ ├── hatchet-client │ │ ├── client-config.ts │ │ ├── features │ │ │ ├── cron-client.test.ts │ │ │ ├── cron-client.ts │ │ │ └── schedule-client.ts │ │ ├── fixtures │ │ │ ├── .hatchet-invalid.yaml │ │ │ └── .hatchet.yaml │ │ ├── hatchet-client.test.ts │ │ ├── hatchet-client.ts │ │ ├── hatchet-logger.ts │ │ └── index.ts │ ├── listener │ │ ├── child-listener-client.ts │ │ └── listener-client.ts │ ├── rest │ │ ├── api.ts │ │ ├── generated │ │ │ ├── Api.ts │ │ │ ├── data-contracts.ts │ │ │ └── http-client.ts │ │ └── index.ts │ └── worker │ │ ├── handler.ts │ │ ├── index.ts │ │ ├── worker.test.ts │ │ └── worker.ts ├── examples │ ├── affinity-workers.ts │ ├── api.ts │ ├── bulk-fanout-trigger.ts │ ├── bulk-fanout-worker.ts │ ├── bulk-trigger.ts │ ├── byo-logger.ts │ ├── concurrency │ │ ├── cancel-in-progress │ │ │ ├── concurrency-event.ts │ │ │ └── concurrency-worker.ts │ │ └── group-round-robin │ │ │ ├── concurrency-event.ts │ │ │ ├── concurrency-worker-expression.ts │ │ │ └── concurrency-worker-key-fn.ts │ ├── crons │ │ ├── cron-worker.ts │ │ ├── cron.e2e.ts │ │ └── programatic-crons.ts │ ├── dag-worker.ts │ ├── example-event-with-results.ts │ ├── example-event.ts │ ├── fanout-worker.e2e.ts │ ├── fanout-worker.ts │ ├── logger.ts │ ├── manual-trigger.ts │ ├── multi-workflow.ts │ ├── namespaced-worker.e2e.ts │ ├── namespaced-worker.ts │ ├── on-failure.ts │ ├── playground.ts │ ├── rate-limit │ │ ├── events.ts │ │ └── worker.ts │ ├── retries-with-backoff.ts │ ├── retries-worker.ts │ ├── scheduled-runs │ │ └── programatic-schedules.ts │ ├── simple-worker.e2e.ts │ ├── simple-worker.ts │ ├── sticky-trigger.ts │ ├── sticky-worker-with-check.ts │ ├── sticky-worker.ts │ ├── stream-by-additional-meta.ts │ └── webhooks.e2e.ts ├── index.ts ├── protoc │ ├── dispatcher │ │ ├── dispatcher.ts │ │ └── index.ts │ ├── events │ │ ├── events.ts │ │ └── index.ts │ ├── google │ │ └── protobuf │ │ │ ├── timestamp.ts │ │ │ └── wrappers.ts │ └── workflows │ │ ├── index.ts │ │ └── workflows.ts ├── sdk.ts ├── step.ts ├── util │ ├── config-loader │ │ ├── config-loader.test.ts │ │ ├── config-loader.ts │ │ ├── fixtures │ │ │ ├── .hatchet-invalid.yaml │ │ │ └── .hatchet.yaml │ │ ├── index.ts │ │ ├── token.test.ts │ │ └── token.ts │ ├── errors │ │ └── hatchet-error.ts │ ├── hatchet-promise │ │ ├── hatchet-promise.test.ts │ │ └── hatchet-promise.ts │ ├── logger │ │ ├── index.ts │ │ └── logger.ts │ ├── parse.ts │ ├── retrier.ts │ ├── sleep.ts │ ├── thread-helper.ts │ └── workflow-run-ref.ts ├── version.ts └── workflow.ts ├── tsconfig.json └── typedoc.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "node": true, 5 | "jest": true 6 | }, 7 | "extends": ["airbnb-base", "prettier", "plugin:prettier/recommended"], 8 | "globals": { 9 | "Atomics": "readonly", 10 | "SharedArrayBuffer": "readonly" 11 | }, 12 | "parser": "@typescript-eslint/parser", 13 | "parserOptions": { 14 | "ecmaVersion": 11, 15 | "sourceType": "module" 16 | }, 17 | "plugins": ["@typescript-eslint", "import", "unused-imports", "prettier", "eslint-plugin-jest"], 18 | "rules": { 19 | "no-void": "off", 20 | "@typescript-eslint/no-shadow": "off", 21 | "@typescript-eslint/no-throw-literal": "off", 22 | "no-use-before-define": "off", 23 | "@typescript-eslint/no-use-before-define": "off", 24 | "import/extensions": "off", 25 | "unused-imports/no-unused-imports": "error", 26 | "no-unused-vars": "off", 27 | "prettier/prettier": "error", 28 | "@typescript-eslint/no-unused-vars": "warn", 29 | "curly": "error", 30 | "import/prefer-default-export": "off", 31 | "import/no-unresolved": "off", 32 | "lines-between-class-members": "off", 33 | "class-methods-use-this": "off", 34 | "no-await-in-loop": "off", 35 | "no-restricted-syntax": "off" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "gitsubmodule" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | 13 | - package-ecosystem: "npm" 14 | directory: "/" 15 | schedule: 16 | interval: "daily" 17 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: auto-merge 2 | 3 | on: 4 | pull_request_target: 5 | 6 | jobs: 7 | auto-merge: 8 | runs-on: ubuntu-latest 9 | if: ${{ github.actor == 'dependabot[bot]' }} 10 | permissions: 11 | pull-requests: write 12 | issues: write 13 | repository-projects: write 14 | contents: write 15 | steps: 16 | - name: "Metadata" 17 | id: metadata 18 | uses: dependabot/fetch-metadata@v2.3.0 19 | with: 20 | github-token: "${{ secrets.GITHUB_TOKEN }}" 21 | - name: "Enable auto-squash" 22 | if: steps.metadata.outputs.package-ecosystem == 'submodules' || steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch' 23 | run: gh pr merge --auto --squash "$PR_URL" 24 | env: 25 | PR_URL: ${{ github.event.pull_request.html_url }} 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | - name: "Approve (minor & patch)" 28 | id: approve 29 | if: steps.metadata.outputs.package-ecosystem == 'submodules' || steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch' 30 | run: gh pr review --approve "$PR_URL" 31 | env: 32 | PR_URL: ${{ github.event.pull_request.html_url }} 33 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 34 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | on: pull_request 3 | 4 | jobs: 5 | lint: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v4 10 | with: 11 | submodules: recursive 12 | 13 | - name: Install pnpm 14 | run: npm install -g pnpm@8 15 | 16 | - name: Get pnpm store directory 17 | shell: bash 18 | run: | 19 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 20 | 21 | - name: Setup pnpm cache 22 | uses: actions/cache@v4 23 | with: 24 | path: ${{ env.STORE_PATH }} 25 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} 26 | restore-keys: | 27 | ${{ runner.os }}-pnpm-store- 28 | 29 | - name: Install dependencies 30 | run: pnpm install 31 | 32 | - name: Lint 33 | run: pnpm lint:check 34 | -------------------------------------------------------------------------------- /.github/workflows/publish-alpha.yml: -------------------------------------------------------------------------------- 1 | name: publish-alpha 2 | on: 3 | workflow_dispatch: 4 | 5 | jobs: 6 | run-publish: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v4 11 | with: 12 | submodules: recursive 13 | 14 | - name: Install pnpm 15 | run: npm install -g pnpm@8 16 | 17 | - uses: actions/setup-node@v4 18 | with: 19 | node-version: "20.x" 20 | registry-url: "https://registry.npmjs.org" 21 | 22 | - name: Get pnpm store directory 23 | shell: bash 24 | run: | 25 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 26 | 27 | - name: Setup pnpm cache 28 | uses: actions/cache@v4 29 | with: 30 | path: ${{ env.STORE_PATH }} 31 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} 32 | restore-keys: | 33 | ${{ runner.os }}-pnpm-store- 34 | 35 | - name: Install dependencies 36 | run: pnpm install 37 | 38 | - name: Build and Publish SDK 39 | run: pnpm publish:ci:alpha 40 | env: 41 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 42 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: publish 2 | on: 3 | workflow_dispatch: 4 | 5 | jobs: 6 | run-publish: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v4 11 | with: 12 | submodules: recursive 13 | 14 | - name: Install pnpm 15 | run: npm install -g pnpm@8 16 | 17 | - uses: actions/setup-node@v4 18 | with: 19 | node-version: "20.x" 20 | registry-url: "https://registry.npmjs.org" 21 | 22 | - name: Get pnpm store directory 23 | shell: bash 24 | run: | 25 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 26 | 27 | - name: Setup pnpm cache 28 | uses: actions/cache@v4 29 | with: 30 | path: ${{ env.STORE_PATH }} 31 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} 32 | restore-keys: | 33 | ${{ runner.os }}-pnpm-store- 34 | 35 | - name: Install dependencies 36 | run: pnpm install 37 | 38 | - name: Build and Publish SDK 39 | run: pnpm publish:ci 40 | env: 41 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 42 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: tests 2 | on: pull_request 3 | 4 | jobs: 5 | unit: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v4 10 | with: 11 | submodules: recursive 12 | 13 | - name: Install pnpm 14 | run: npm install -g pnpm@8 15 | 16 | - name: Get pnpm store directory 17 | shell: bash 18 | run: | 19 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 20 | 21 | - name: Setup pnpm cache 22 | uses: actions/cache@v4 23 | with: 24 | path: ${{ env.STORE_PATH }} 25 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} 26 | restore-keys: | 27 | ${{ runner.os }}-pnpm-store- 28 | 29 | - name: Install dependencies 30 | run: pnpm install 31 | 32 | - name: Unit tests 33 | run: pnpm test:unit 34 | 35 | e2e: 36 | runs-on: ubuntu-latest 37 | timeout-minutes: 10 38 | steps: 39 | - name: Checkout 40 | uses: actions/checkout@v4 41 | - name: Sync Submodule Branch 42 | shell: bash 43 | run: | 44 | ./branch-sync.sh 45 | - name: Install Protoc 46 | uses: arduino/setup-protoc@v3 47 | with: 48 | version: '25.1' 49 | 50 | - name: Install Task 51 | uses: arduino/setup-task@v2 52 | 53 | - name: Setup Go 54 | uses: actions/setup-go@v5 55 | with: 56 | go-version: '1.21' 57 | 58 | - name: Install pnpm 59 | run: npm install -g pnpm@8 60 | 61 | - name: Install Atlas 62 | run: | 63 | curl -sSf https://atlasgo.sh | sh 64 | 65 | - name: Get pnpm store directory 66 | shell: bash 67 | run: | 68 | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV 69 | 70 | - name: Setup pnpm cache 71 | uses: actions/cache@v4 72 | with: 73 | path: ${{ env.STORE_PATH }} 74 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} 75 | restore-keys: | 76 | ${{ runner.os }}-pnpm-store- 77 | 78 | - name: Compose 79 | working-directory: hatchet 80 | run: docker compose up -d 81 | 82 | - name: Install dependencies 83 | run: pnpm install 84 | 85 | - name: Generate 86 | working-directory: hatchet 87 | run: | 88 | export DATABASE_URL="postgresql://hatchet:hatchet@127.0.0.1:5431/hatchet" 89 | /bin/bash ./hack/db/atlas-apply.sh 90 | task generate-go 91 | 92 | - name: Setup 93 | working-directory: hatchet 94 | run: | 95 | export SEED_DEVELOPMENT=true 96 | export SERVER_PORT=8080 97 | export SERVER_URL=http://localhost:8080 98 | export SERVER_AUTH_COOKIE_DOMAIN=localhost 99 | export SERVER_AUTH_COOKIE_INSECURE=true 100 | 101 | go run ./cmd/hatchet-admin quickstart 102 | 103 | go run ./cmd/hatchet-engine --config ./generated/ & 104 | go run ./cmd/hatchet-api --config ./generated/ & 105 | 106 | sleep 30 107 | - name: E2E tests 108 | run: | 109 | cd hatchet/ 110 | export HATCHET_CLIENT_TOKEN="$(go run ./cmd/hatchet-admin token create --config ./generated/ --tenant-id 707d0855-80ab-4e1f-a156-f1c4546cbf52)" 111 | cd .. 112 | export HATCHET_CLIENT_TLS_ROOT_CA_FILE=./hatchet/certs/ca.cert 113 | export NODE_TLS_REJECT_UNAUTHORIZED=0 114 | pnpm test:e2e 115 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /dist 2 | /certs 3 | 4 | # ignore docs for now, because we manually use the docs output to construct docs 5 | docs 6 | 7 | node_modules/ 8 | 9 | openapi.yaml 10 | 11 | **/.env 12 | 13 | .idea 14 | 15 | .vscode 16 | 17 | /src/version.ts 18 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "hatchet"] 2 | path = hatchet 3 | url = https://github.com/hatchet-dev/hatchet.git 4 | branch = main 5 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "tabWidth": 2, 4 | "singleQuote": true, 5 | "trailingComma": "es5" 6 | } 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Moved to https://github.com/hatchet-dev/hatchet/tree/main/sdks/typescript 2 | -------------------------------------------------------------------------------- /branch-sync.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 1. Get the current branch name 4 | current_branch=$(echo $GITHUB_HEAD_REF | sed 's/refs\/heads\///') 5 | 6 | if [ -z "$current_branch" ]; then 7 | current_branch=$(git rev-parse --abbrev-ref HEAD) 8 | fi 9 | 10 | # 2. Check a different repo and determine if a branch with the same name exists 11 | git ls-remote --heads https://github.com/hatchet-dev/hatchet.git $current_branch | grep -q refs/heads/$current_branch 12 | branch_exists=$? 13 | 14 | # 3. If it does, update the .gitmodules to set `branch = {the branch name}` 15 | if [ $branch_exists -eq 0 ]; then 16 | git config -f .gitmodules submodule.hatchet.branch $current_branch 17 | git add .gitmodules 18 | echo "Updated .gitmodules with branch $current_branch" 19 | else 20 | echo "Branch $current_branch does not exist in the remote repository. Pulling main branch instead." 21 | git config -f .gitmodules submodule.hatchet.branch main 22 | git add .gitmodules 23 | echo "Updated .gitmodules with branch main" 24 | fi 25 | 26 | # 4. Initialize and update the submodule 27 | git submodule init 28 | git submodule update --remote --merge 29 | -------------------------------------------------------------------------------- /generate-protoc.sh: -------------------------------------------------------------------------------- 1 | # Directory to write generated code to (.js and .d.ts files) 2 | 3 | OUT_DIR="./src/protoc" 4 | 5 | if [ -d "./hatchet" ]; then 6 | IN_DIR="./hatchet/api-contracts" 7 | else 8 | IN_DIR="../oss/api-contracts" 9 | fi 10 | 11 | # Generate code 12 | ./node_modules/.bin/grpc_tools_node_protoc \ 13 | --plugin=protoc-gen-ts_proto=./node_modules/.bin/protoc-gen-ts_proto \ 14 | --ts_proto_out=$OUT_DIR \ 15 | --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false \ 16 | --proto_path=$IN_DIR \ 17 | $IN_DIR/**/*.proto 18 | 19 | pnpm lint:fix -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * For a detailed explanation regarding each configuration property, visit: 3 | * https://jestjs.io/docs/configuration 4 | */ 5 | 6 | import type { Config } from 'jest'; 7 | 8 | const config: Config = { 9 | // All imported modules in your tests should be mocked automatically 10 | // automock: false, 11 | 12 | // Stop running tests after `n` failures 13 | // bail: 0, 14 | 15 | // The directory where Jest should store its cached dependency information 16 | // cacheDirectory: "/private/var/folders/tx/gj6zfqv915j4ww47qf9wsz7m0000gn/T/jest_dx", 17 | 18 | // Automatically clear mock calls, instances, contexts and results before every test 19 | clearMocks: true, 20 | 21 | // Indicates whether the coverage information should be collected while executing the test 22 | collectCoverage: false, 23 | 24 | // An array of glob patterns indicating a set of files for which coverage information should be collected 25 | // collectCoverageFrom: undefined, 26 | 27 | // The directory where Jest should output its coverage files 28 | coverageDirectory: 'coverage', 29 | 30 | // An array of regexp pattern strings used to skip coverage collection 31 | // coveragePathIgnorePatterns: [ 32 | // "/node_modules/" 33 | // ], 34 | 35 | // Indicates which provider should be used to instrument code for coverage 36 | coverageProvider: 'v8', 37 | 38 | // A list of reporter names that Jest uses when writing coverage reports 39 | // coverageReporters: [ 40 | // "json", 41 | // "text", 42 | // "lcov", 43 | // "clover" 44 | // ], 45 | 46 | // An object that configures minimum threshold enforcement for coverage results 47 | // coverageThreshold: undefined, 48 | 49 | // A path to a custom dependency extractor 50 | // dependencyExtractor: undefined, 51 | 52 | // Make calling deprecated APIs throw helpful error messages 53 | // errorOnDeprecated: false, 54 | 55 | // The default configuration for fake timers 56 | // fakeTimers: { 57 | // "enableGlobally": false 58 | // }, 59 | 60 | // Force coverage collection from ignored files using an array of glob patterns 61 | // forceCoverageMatch: [], 62 | 63 | // A path to a module which exports an async function that is triggered once before all test suites 64 | // globalSetup: undefined, 65 | 66 | // A path to a module which exports an async function that is triggered once after all test suites 67 | // globalTeardown: undefined, 68 | 69 | // A set of global variables that need to be available in all test environments 70 | // globals: {}, 71 | 72 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 73 | // maxWorkers: "50%", 74 | 75 | // An array of directory names to be searched recursively up from the requiring module's location 76 | // moduleDirectories: [ 77 | // "node_modules" 78 | // ], 79 | 80 | // An array of file extensions your modules use 81 | // moduleFileExtensions: [ 82 | // "js", 83 | // "mjs", 84 | // "cjs", 85 | // "jsx", 86 | // "ts", 87 | // "tsx", 88 | // "json", 89 | // "node" 90 | // ], 91 | 92 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 93 | // moduleNameMapper: {}, 94 | 95 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 96 | // modulePathIgnorePatterns: [], 97 | 98 | // Activates notifications for test results 99 | // notify: false, 100 | 101 | // An enum that specifies notification mode. Requires { notify: true } 102 | // notifyMode: "failure-change", 103 | 104 | // A preset that is used as a base for Jest's configuration 105 | // preset: undefined, 106 | 107 | // Run tests from one or more projects 108 | // projects: undefined, 109 | 110 | // Use this configuration option to add custom reporters to Jest 111 | // reporters: undefined, 112 | 113 | // Automatically reset mock state before every test 114 | // resetMocks: false, 115 | 116 | // Reset the module registry before running each individual test 117 | // resetModules: false, 118 | 119 | // A path to a custom resolver 120 | // resolver: undefined, 121 | 122 | // Automatically restore mock state and implementation before every test 123 | // restoreMocks: false, 124 | 125 | // The root directory that Jest should scan for tests and modules within 126 | // rootDir: undefined, 127 | 128 | // A list of paths to directories that Jest should use to search for files in 129 | // roots: [ 130 | // "" 131 | // ], 132 | 133 | // Allows you to use a custom runner instead of Jest's default test runner 134 | // runner: "jest-runner", 135 | 136 | // The paths to modules that run some code to configure or set up the testing environment before each test 137 | // setupFiles: [], 138 | 139 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 140 | // setupFilesAfterEnv: [], 141 | 142 | // The number of seconds after which a test is considered as slow and reported as such in the results. 143 | // slowTestThreshold: 5, 144 | 145 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 146 | // snapshotSerializers: [], 147 | 148 | // The test environment that will be used for testing 149 | // testEnvironment: "jest-environment-node", 150 | 151 | // Options that will be passed to the testEnvironment 152 | // testEnvironmentOptions: {}, 153 | 154 | // Adds a location field to test results 155 | // testLocationInResults: false, 156 | 157 | // The glob patterns Jest uses to detect test files 158 | // testMatch: [ 159 | // "**/__tests__/**/*.[jt]s?(x)", 160 | // "**/?(*.)+(spec|test).[tj]s?(x)" 161 | // ], 162 | 163 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 164 | testPathIgnorePatterns: ['/node_modules/', '/dist/'], 165 | 166 | // The regexp pattern or array of patterns that Jest uses to detect test files 167 | // testRegex: [], 168 | 169 | // This option allows the use of a custom results processor 170 | // testResultsProcessor: undefined, 171 | 172 | // This option allows use of a custom test runner 173 | // testRunner: "jest-circus/runner", 174 | 175 | // A map from regular expressions to paths to transformers 176 | transform: { 177 | '^.+\\.(js|ts)$': ['ts-jest', { tsconfig: './tsconfig.json' }], 178 | }, 179 | moduleNameMapper: { 180 | '^@hatchet/(.*)$': '/src/$1', 181 | '^@util/(.*)$': '/src/util/$1', 182 | '^@models/(.*)$': '/src/models/$1', 183 | '^@clients/(.*)$': '/src/clients/$1', 184 | }, 185 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 186 | // transformIgnorePatterns: [ 187 | // "/node_modules/", 188 | // "\\.pnp\\.[^\\/]+$" 189 | // ], 190 | 191 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 192 | // unmockedModulePathPatterns: undefined, 193 | 194 | // Indicates whether each individual test should be reported during the run 195 | // verbose: undefined, 196 | 197 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 198 | // watchPathIgnorePatterns: [], 199 | 200 | // Whether to use watchman for file crawling 201 | // watchman: true, 202 | }; 203 | 204 | export default config; 205 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@hatchet-dev/typescript-sdk", 3 | "version": "0.20.2", 4 | "description": "Background task orchestration & visibility for developers", 5 | "types": "dist/index.d.ts", 6 | "files": [ 7 | "*", 8 | "!**/*.test.js", 9 | "!**/*.test.d.ts", 10 | "!**/*.e2e.js", 11 | "!**/*.e2e.d.ts" 12 | ], 13 | "repository": { 14 | "type": "git", 15 | "url": "https://github.com/hatchet-dev/hatchet-typescript.git" 16 | }, 17 | "scripts": { 18 | "build": "echo 'build hatchet sdk with `npn run tsc:build` to ensure it is not build during the publish step' && exit 0", 19 | "prepare": "npm run build", 20 | "dump-version": "node -e \"console.log('export const HATCHET_VERSION = \\'' + require('./package.json').version + '\\';');\" > src/version.ts", 21 | "tsc:build": "npm run dump-version && tsc && resolve-tspaths", 22 | "test:unit": "jest --testMatch='**/*.test.ts'", 23 | "test:e2e": "jest --testMatch='**/*.e2e.ts'", 24 | "test:unit:watch": "jest --testMatch='**/*.test.ts' --watch", 25 | "generate": "pnpm run '/generate-.*/'", 26 | "generate-api": "npx --yes swagger-cli bundle ../oss/api-contracts/openapi/openapi.yaml --outfile openapi.yaml --type yaml && npx swagger-typescript-api -p openapi.yaml -o src/clients/rest/generated -n hatchet.ts --modular --axios", 27 | "generate-protoc": "./generate-protoc.sh", 28 | "lint:check": "npm run eslint:check && npm run prettier:check", 29 | "lint:fix": "npm run eslint:fix && npm run prettier:fix", 30 | "eslint:check": "eslint \"{src,tests}/**/*.{ts,tsx,js}\"", 31 | "eslint:fix": "eslint \"{src,tests}/**/*.{ts,tsx,js}\" --fix", 32 | "prettier:check": "prettier \"src/**/*.{ts,tsx}\" --list-different", 33 | "prettier:fix": "prettier \"src/**/*.{ts,tsx}\" --write", 34 | "exec": "npx dotenv -- ts-node -r tsconfig-paths/register --project tsconfig.json", 35 | "example:event": "npm run exec -- ./src/examples/example-event.ts", 36 | "example:event-listen": "npm run exec -- ./src/examples/example-event-with-results.ts", 37 | "worker:namespaced": "npm run exec -- ./src/examples/namespaced-worker.ts", 38 | "worker:rate": "npm run exec -- ./src/examples/rate-limit/worker.ts", 39 | "example:rate": "npm run exec -- ./src/examples/rate-limit/events.ts", 40 | "worker:fanout": "npm run exec -- ./src/examples/fanout-worker.ts", 41 | "worker:simple": "npm run exec -- ./src/examples/simple-worker.ts", 42 | "worker:affinity": "npm run exec -- ./src/examples/affinity-workers.ts", 43 | "worker:sticky": "npm run exec -- ./src/examples/sticky-worker.ts", 44 | "worker:sticky-with-check": "npm run exec -- ./src/examples/sticky-worker-with-check.ts", 45 | "trigger:sticky": "npm run exec -- ./src/examples/sticky-trigger.ts", 46 | "worker:on-failure": "npm run exec -- ./src/examples/on-failure.ts", 47 | "manual:trigger": "npm run exec -- ./src/examples/manual-trigger.ts", 48 | "manual:meta": "npm run exec -- ./src/examples/stream-by-additional-meta.ts", 49 | "bulk:trigger": "npm run exec -- ./src/examples/bulk-trigger.ts", 50 | "bulk:fanout:worker": "npm run exec -- ./src/examples/bulk-fanout-worker.ts", 51 | "bulk:fanout:trigger": "npm run exec -- ./src/examples/bulk-fanout-trigger.ts", 52 | "worker:dag": "npm run exec -- ./src/examples/dag-worker.ts", 53 | "worker:concurrency": "npm run exec -- ./src/examples/concurrency/cancel-in-progress/concurrency-worker.ts", 54 | "event:concurrency": "npm run exec -- ./src/examples/concurrency/cancel-in-progress/concurrency-event.ts", 55 | "worker:concurrency:rr": "npm run exec -- ./src/examples/concurrency/group-round-robin/concurrency-worker-expression.ts", 56 | "event:concurrency:rr": "npm run exec -- ./src/examples/concurrency/group-round-robin/concurrency-event.ts", 57 | "worker:playground": "npm run exec -- ./src/examples/playground.ts", 58 | "worker:retries": "npm run exec -- ./src/examples/retries-worker.ts", 59 | "worker:retries-with-backoff": "npm run exec -- ./src/examples/retries-with-backoff.ts", 60 | "worker:multi-workflow": "npm run exec -- ./src/examples/multi-workflow.ts", 61 | "worker:logger": "npm run exec -- ./src/examples/logger.ts", 62 | "worker:byo-logger": "npm run exec -- ./src/examples/byo-logger.ts", 63 | "api": "npm run exec -- ./src/examples/api.ts", 64 | "prepublish": "cp package.json dist/package.json;", 65 | "publish:ci": "rm -rf ./dist && npm run dump-version && npm run tsc:build && npm run prepublish && cd dist && npm publish --access public --no-git-checks", 66 | "publish:ci:alpha": "rm -rf ./dist && npm run dump-version && npm run tsc:build && npm run prepublish && cd dist && npm publish --access public --no-git-checks --tag alpha", 67 | "generate-docs": "typedoc" 68 | }, 69 | "keywords": [], 70 | "author": "", 71 | "license": "MIT", 72 | "devDependencies": { 73 | "@types/jest": "^29.5.11", 74 | "@types/node": "^22.0.0", 75 | "@typescript-eslint/eslint-plugin": "^6.4.0", 76 | "autoprefixer": "^10.4.16", 77 | "dotenv-cli": "^7.3.0", 78 | "eslint": "^8.56.0", 79 | "eslint-config-airbnb-typescript": "^18.0.0", 80 | "eslint-config-prettier": "^9.1.0", 81 | "eslint-config-standard-with-typescript": "^43.0.0", 82 | "eslint-import-resolver-typescript": "^3.6.1", 83 | "eslint-plugin-import": "^2.29.1", 84 | "eslint-plugin-jest": "^28.5.0", 85 | "eslint-plugin-n": "^15.0.0 || ^16.0.0 ", 86 | "eslint-plugin-prettier": "^5.0.1", 87 | "eslint-plugin-promise": "^6.0.0", 88 | "eslint-plugin-react": "^7.34.1", 89 | "eslint-plugin-react-hooks": "^4.6.0", 90 | "eslint-plugin-react-refresh": "^0.4.6", 91 | "eslint-plugin-unused-imports": "^4.1.3", 92 | "grpc-tools": "^1.12.4", 93 | "jest": "^29.7.0", 94 | "pino": "^9.6.0", 95 | "prettier": "^3.1.1", 96 | "resolve-tspaths": "^0.8.17", 97 | "ts-jest": "^29.1.1", 98 | "ts-node": "^10.9.2", 99 | "ts-proto": "^2.0.2", 100 | "typedoc": "^0.27.1", 101 | "typedoc-plugin-markdown": "^4.0.2", 102 | "typescript": "^5.3.3" 103 | }, 104 | "dependencies": { 105 | "@bufbuild/protobuf": "^2.1.0", 106 | "@types/qs": "^6.9.11", 107 | "abort-controller-x": "^0.4.3", 108 | "axios": "^1.6.7", 109 | "long": "^5.2.3", 110 | "nice-grpc": "^2.1.7", 111 | "nice-grpc-common": "^2.0.2", 112 | "protobufjs": "^7.2.6", 113 | "qs": "^6.11.2", 114 | "yaml": "^2.3.4", 115 | "zod": "^3.22.4" 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/clients/admin/admin-client.test.ts: -------------------------------------------------------------------------------- 1 | import { CreateWorkflowVersionOpts, WorkflowVersion } from '@hatchet/protoc/workflows'; 2 | import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; 3 | import { ClientConfig } from '@clients/hatchet-client'; 4 | import { AdminClient } from './admin-client'; 5 | import { mockChannel, mockFactory } from '../hatchet-client/hatchet-client.test'; 6 | import { ListenerClient } from '../listener/listener-client'; 7 | 8 | describe('AdminClient', () => { 9 | let client: AdminClient; 10 | 11 | it('should create a client', () => { 12 | const config: ClientConfig = { 13 | token: 14 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 15 | host_port: 'HOST_PORT', 16 | tls_config: { 17 | cert_file: 'TLS_CERT_FILE', 18 | key_file: 'TLS_KEY_FILE', 19 | ca_file: 'TLS_ROOT_CA_FILE', 20 | server_name: 'TLS_SERVER_NAME', 21 | }, 22 | api_url: 'API_URL', 23 | tenant_id: 'tenantId', 24 | logger: DEFAULT_LOGGER, 25 | }; 26 | 27 | const x = new AdminClient( 28 | config, 29 | mockChannel, 30 | mockFactory, 31 | {} as any, 32 | 'tenantId', 33 | new ListenerClient(config, mockChannel, mockFactory, {} as any) 34 | ); 35 | 36 | expect(x).toBeDefined(); 37 | }); 38 | 39 | beforeEach(() => { 40 | const config: ClientConfig = { 41 | token: 42 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 43 | host_port: 'HOST_PORT', 44 | tls_config: { 45 | cert_file: 'TLS_CERT_FILE', 46 | key_file: 'TLS_KEY_FILE', 47 | ca_file: 'TLS_ROOT_CA_FILE', 48 | server_name: 'TLS_SERVER_NAME', 49 | }, 50 | api_url: 'API_URL', 51 | tenant_id: 'tenantId', 52 | logger: DEFAULT_LOGGER, 53 | }; 54 | 55 | client = new AdminClient( 56 | config, 57 | mockChannel, 58 | mockFactory, 59 | {} as any, 60 | 'tenantId', 61 | new ListenerClient(config, mockChannel, mockFactory, {} as any) 62 | ); 63 | }); 64 | 65 | describe('putWorkflow', () => { 66 | it('should throw an error if no version and not auto version', async () => { 67 | const workflow: CreateWorkflowVersionOpts = { 68 | name: 'workflow1', 69 | version: '', 70 | description: 'description1', 71 | eventTriggers: [], 72 | cronTriggers: [], 73 | scheduledTriggers: [], 74 | jobs: [], 75 | concurrency: undefined, 76 | }; 77 | 78 | expect(() => client.putWorkflow(workflow)).rejects.toThrow( 79 | 'PutWorkflow error: workflow version is required, or use autoVersion' 80 | ); 81 | }); 82 | 83 | it('should attempt to put the workflow', async () => { 84 | const workflow: CreateWorkflowVersionOpts = { 85 | name: 'workflow1', 86 | version: 'v0.0.1', 87 | description: 'description1', 88 | eventTriggers: [], 89 | cronTriggers: [], 90 | scheduledTriggers: [], 91 | jobs: [], 92 | concurrency: undefined, 93 | }; 94 | 95 | const putSpy = jest.spyOn(client.client, 'putWorkflow').mockResolvedValue({ 96 | id: 'workflow1', 97 | version: 'v0.1.0', 98 | order: 1, 99 | workflowId: 'workflow1', 100 | createdAt: undefined, 101 | updatedAt: undefined, 102 | scheduledWorkflows: [], 103 | }); 104 | 105 | await client.putWorkflow(workflow); 106 | 107 | expect(putSpy).toHaveBeenCalled(); 108 | }); 109 | }); 110 | 111 | describe('schedule_workflow', () => { 112 | it('should schedule a workflow', () => { 113 | const res: WorkflowVersion = { 114 | id: 'string', 115 | version: 'v0.0.1', 116 | order: 1, 117 | workflowId: 'string', 118 | scheduledWorkflows: [], 119 | createdAt: undefined, 120 | updatedAt: undefined, 121 | }; 122 | 123 | const spy = jest.spyOn(client.client, 'scheduleWorkflow').mockResolvedValue(res); 124 | 125 | const now = new Date(); 126 | 127 | client.scheduleWorkflow('workflowName', { 128 | schedules: [now], 129 | }); 130 | 131 | expect(spy).toHaveBeenCalledWith({ 132 | name: 'workflowName', 133 | schedules: [now], 134 | }); 135 | }); 136 | }); 137 | }); 138 | -------------------------------------------------------------------------------- /src/clients/admin/index.ts: -------------------------------------------------------------------------------- 1 | export * from './admin-client'; 2 | -------------------------------------------------------------------------------- /src/clients/dispatcher/action-listener.test.ts: -------------------------------------------------------------------------------- 1 | import { ActionType, AssignedAction } from '@hatchet/protoc/dispatcher'; 2 | import sleep from '@util/sleep'; 3 | // import { ServerError, Status } from 'nice-grpc-common'; 4 | import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; 5 | import { DispatcherClient } from './dispatcher-client'; 6 | import { ActionListener } from './action-listener'; 7 | import { mockChannel, mockFactory } from '../hatchet-client/hatchet-client.test'; 8 | 9 | let dispatcher: DispatcherClient; 10 | 11 | type AssignActionMock = AssignedAction | Error; 12 | 13 | // Mock data for AssignedAction 14 | const mockAssignedActions: AssignActionMock[] = [ 15 | { 16 | tenantId: 'tenant1', 17 | jobId: 'job1', 18 | jobName: 'Job One', 19 | jobRunId: 'run1', 20 | stepId: 'step1', 21 | stepRunId: 'runStep1', 22 | actionId: 'action1', 23 | actionType: ActionType.START_STEP_RUN, 24 | actionPayload: 'payload1', 25 | workflowRunId: 'workflowRun1', 26 | getGroupKeyRunId: 'groupKeyRun1', 27 | stepName: 'step1', 28 | retryCount: 0, 29 | }, 30 | // ... Add more mock AssignedAction objects as needed 31 | ]; 32 | 33 | // Mock implementation of the listener 34 | export const mockListener = (fixture: AssignActionMock[]) => 35 | (async function* gen() { 36 | for (const action of fixture) { 37 | // Simulate asynchronous behavior 38 | await sleep(100); 39 | 40 | if (action instanceof Error) { 41 | throw action; 42 | } 43 | 44 | yield action; 45 | } 46 | })(); 47 | 48 | describe('ActionListener', () => { 49 | beforeEach(() => { 50 | dispatcher = new DispatcherClient( 51 | { 52 | token: 53 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 54 | 55 | host_port: 'HOST_PORT', 56 | log_level: 'OFF', 57 | tls_config: { 58 | cert_file: 'TLS_CERT_FILE', 59 | key_file: 'TLS_KEY_FILE', 60 | ca_file: 'TLS_ROOT_CA_FILE', 61 | server_name: 'TLS_SERVER_NAME', 62 | }, 63 | api_url: 'API_URL', 64 | tenant_id: 'tenantId', 65 | logger: DEFAULT_LOGGER, 66 | }, 67 | mockChannel, 68 | mockFactory 69 | ); 70 | }); 71 | 72 | it('should create a client', async () => { 73 | const listener = new ActionListener(dispatcher, 'WORKER_ID'); 74 | expect(listener).toBeDefined(); 75 | expect(listener.workerId).toEqual('WORKER_ID'); 76 | }); 77 | 78 | describe('actions', () => { 79 | // it('it should "yield" actions', async () => { 80 | // const listener = new ActionListener( 81 | // dispatcher, 82 | // 'WORKER_ID', 83 | // 100, 84 | // 5, 85 | // ); 86 | // const retrySpy = jest.spyOn(listener, 'getListenClient'); 87 | // retrySpy.mockReturnValue( 88 | // mockListener([...mockAssignedActions, new ServerError(Status.CANCELLED, 'CANCELLED')]) 89 | // ); 90 | // const actions = listener.actions(); 91 | // const res = []; 92 | // for await (const action of actions) { 93 | // res.push(action); 94 | // } 95 | // expect(res[0]).toEqual({ 96 | // tenantId: 'tenant1', 97 | // jobId: 'job1', 98 | // jobName: 'Job One', 99 | // jobRunId: 'run1', 100 | // stepId: 'step1', 101 | // stepRunId: 'runStep1', 102 | // actionId: 'action1', 103 | // actionType: ActionType.START_STEP_RUN, 104 | // actionPayload: 'payload1', 105 | // workflowRunId: 'workflowRun1', 106 | // getGroupKeyRunId: 'groupKeyRun1', 107 | // }); 108 | // }); 109 | // it('it should break on grpc CANCELLED', async () => { 110 | // const listener = new ActionListener( 111 | // dispatcher, 112 | // mockListener([...mockAssignedActions, new ServerError(Status.CANCELLED, 'CANCELLED')]), 113 | // 'WORKER_ID' 114 | // ); 115 | // const actions = listener.actions(); 116 | // // throw an error from listen client 117 | // const retrySpy = jest.spyOn(listener, 'getListenClient'); 118 | // const res = []; 119 | // for await (const action of actions) { 120 | // res.push(action); 121 | // } 122 | // expect(res.length).toEqual(1); 123 | // expect(retrySpy).toHaveBeenCalledTimes(1); 124 | // }); 125 | // it('it should break on unknown error', async () => { 126 | // const listener = new ActionListener( 127 | // dispatcher, 128 | // mockListener([...mockAssignedActions, new Error('Simulated error')]), 129 | // 'WORKER_ID' 130 | // ); 131 | // const actions = listener.actions(); 132 | // const retrySpy = jest.spyOn(listener, 'getListenClient'); 133 | // const res = []; 134 | // for await (const action of actions) { 135 | // res.push(action); 136 | // } 137 | // expect(res.length).toEqual(1); 138 | // expect(retrySpy).toHaveBeenCalledTimes(6); 139 | // }); 140 | // it('it should attempt to re-establish connection on grpc UNAVAILABLE', async () => { 141 | // const listener = new ActionListener( 142 | // dispatcher, 143 | // mockListener([...mockAssignedActions, new ServerError(Status.UNAVAILABLE, 'UNAVAILABLE')]), 144 | // 'WORKER_ID' 145 | // ); 146 | // const retrySpy = jest.spyOn(listener, 'getListenClient'); 147 | // const actions = listener.actions(); 148 | // const res = []; 149 | // for await (const action of actions) { 150 | // res.push(action); 151 | // } 152 | // expect(res.length).toEqual(1); 153 | // expect(retrySpy).toHaveBeenCalledTimes(6); 154 | // }); 155 | // }); 156 | // describe('retry_subscribe', () => { 157 | // it('should exit after successful connection', async () => { 158 | // const listener = new ActionListener( 159 | // dispatcher, 160 | // mockListener(mockAssignedActions), 161 | // 'WORKER_ID' 162 | // ); 163 | // // Mock the listener to throw an error on the first call 164 | // const listenSpy = jest 165 | // .spyOn(listener.client, 'listen') 166 | // .mockReturnValue(mockListener(mockAssignedActions)); 167 | // await listener.getListenClient(); 168 | // expect(listenSpy).toHaveBeenCalledTimes(1); 169 | // }); 170 | // it('should retry until success', async () => { 171 | // const listener = new ActionListener( 172 | // dispatcher, 173 | // mockListener(mockAssignedActions), 174 | // 'WORKER_ID' 175 | // ); 176 | // // Mock the listener to throw an error on the first call 177 | // // const listenSpy = jest 178 | // // .spyOn(listener.client, 'listen') 179 | // // .mockImplementationOnce(() => { 180 | // // throw new Error('Simulated error'); 181 | // // }) 182 | // // .mockImplementationOnce(() => mockListener(mockAssignedActions)); 183 | // await expect(async () => { 184 | // await listener.getListenClient(); 185 | // }).not.toThrow(); 186 | // }); 187 | // it('should not throw an error if successful', async () => { 188 | // const listener = new ActionListener( 189 | // dispatcher, 190 | // mockListener(mockAssignedActions), 191 | // 'WORKER_ID' 192 | // ); 193 | // // Mock the listener to throw an error on the first call 194 | // const listenSpy = jest 195 | // .spyOn(listener.client, 'listen') 196 | // .mockImplementationOnce(() => { 197 | // throw new Error('Simulated error'); 198 | // }) 199 | // .mockImplementationOnce(() => mockListener(mockAssignedActions)); 200 | // await listener.getListenClient(); 201 | // expect(listenSpy).toHaveBeenCalledTimes(2); 202 | // }); 203 | // it('should retry at most COUNT times and throw an error', async () => { 204 | // const listener = new ActionListener( 205 | // dispatcher, 206 | // mockListener(mockAssignedActions), 207 | // 'WORKER_ID' 208 | // ); 209 | // // Mock the listener to throw an error on the first call 210 | // const listenSpy = jest 211 | // .spyOn(listener.client, 'listen') 212 | // .mockImplementationOnce(() => { 213 | // throw new Error('Simulated error'); 214 | // }) 215 | // .mockImplementationOnce(() => { 216 | // throw new Error('Simulated error'); 217 | // }) 218 | // .mockImplementationOnce(() => { 219 | // throw new Error('Simulated error'); 220 | // }) 221 | // .mockImplementationOnce(() => { 222 | // throw new Error('Simulated error'); 223 | // }) 224 | // .mockImplementationOnce(() => { 225 | // throw new Error('Simulated error'); 226 | // }) 227 | // .mockImplementationOnce(() => { 228 | // throw new Error('Simulated error'); 229 | // }) 230 | // .mockImplementationOnce(() => mockListener(mockAssignedActions)); 231 | // try { 232 | // await listener.getListenClient(); 233 | // expect(listenSpy).toHaveBeenCalledTimes(5); 234 | // } catch (e: any) { 235 | // expect(e.message).toEqual(`Could not subscribe to the worker after 5 retries`); 236 | // } 237 | // }); 238 | }); 239 | 240 | describe('unregister', () => { 241 | // it('should unsubscribe itself', async () => { 242 | // const listener = new ActionListener( 243 | // dispatcher, 244 | // mockListener(mockAssignedActions), 245 | // 'WORKER_ID' 246 | // ); 247 | // const unsubscribeSpy = jest.spyOn(listener.client, 'unsubscribe').mockResolvedValue({ 248 | // tenantId: 'TENANT_ID', 249 | // workerId: 'WORKER_ID', 250 | // }); 251 | // const res = await listener.unregister(); 252 | // expect(unsubscribeSpy).toHaveBeenCalled(); 253 | // expect(res.workerId).toEqual('WORKER_ID'); 254 | // }); 255 | }); 256 | }); 257 | -------------------------------------------------------------------------------- /src/clients/dispatcher/action-listener.ts: -------------------------------------------------------------------------------- 1 | import { DispatcherClient as PbDispatcherClient, AssignedAction } from '@hatchet/protoc/dispatcher'; 2 | 3 | import { Status } from 'nice-grpc'; 4 | import { ClientConfig } from '@clients/hatchet-client/client-config'; 5 | import sleep from '@util/sleep'; 6 | import HatchetError from '@util/errors/hatchet-error'; 7 | import { Logger } from '@hatchet/util/logger'; 8 | 9 | import { DispatcherClient } from './dispatcher-client'; 10 | import { Heartbeat } from './heartbeat/heartbeat-controller'; 11 | 12 | const DEFAULT_ACTION_LISTENER_RETRY_INTERVAL = 5000; // milliseconds 13 | const DEFAULT_ACTION_LISTENER_RETRY_COUNT = 20; 14 | 15 | // eslint-disable-next-line no-shadow 16 | enum ListenStrategy { 17 | LISTEN_STRATEGY_V1 = 1, 18 | LISTEN_STRATEGY_V2 = 2, 19 | } 20 | 21 | export interface Action extends AssignedAction {} 22 | 23 | export class ActionListener { 24 | config: ClientConfig; 25 | client: PbDispatcherClient; 26 | workerId: string; 27 | logger: Logger; 28 | lastConnectionAttempt: number = 0; 29 | retries: number = 0; 30 | retryInterval: number = DEFAULT_ACTION_LISTENER_RETRY_INTERVAL; 31 | retryCount: number = DEFAULT_ACTION_LISTENER_RETRY_COUNT; 32 | done = false; 33 | listenStrategy = ListenStrategy.LISTEN_STRATEGY_V2; 34 | heartbeat: Heartbeat; 35 | 36 | constructor( 37 | client: DispatcherClient, 38 | workerId: string, 39 | retryInterval: number = DEFAULT_ACTION_LISTENER_RETRY_INTERVAL, 40 | retryCount: number = DEFAULT_ACTION_LISTENER_RETRY_COUNT 41 | ) { 42 | this.config = client.config; 43 | this.client = client.client; 44 | this.workerId = workerId; 45 | this.logger = client.config.logger(`ActionListener`, this.config.log_level); 46 | this.retryInterval = retryInterval; 47 | this.retryCount = retryCount; 48 | this.heartbeat = new Heartbeat(client, workerId); 49 | } 50 | 51 | actions = () => 52 | (async function* gen(client: ActionListener) { 53 | while (true) { 54 | if (client.done) { 55 | break; 56 | } 57 | 58 | try { 59 | const listenClient = await client.getListenClient(); 60 | 61 | for await (const assignedAction of listenClient) { 62 | const action: Action = { 63 | ...assignedAction, 64 | }; 65 | 66 | yield action; 67 | } 68 | } catch (e: any) { 69 | client.logger.info('Listener error'); 70 | 71 | // if this is a HatchetError, we should throw this error 72 | if (e instanceof HatchetError) { 73 | throw e; 74 | } 75 | 76 | if ( 77 | (await client.getListenStrategy()) === ListenStrategy.LISTEN_STRATEGY_V2 && 78 | e.code === Status.UNIMPLEMENTED 79 | ) { 80 | client.setListenStrategy(ListenStrategy.LISTEN_STRATEGY_V1); 81 | } 82 | 83 | client.incrementRetries(); 84 | client.logger.error(`Listener encountered an error: ${e.message}`); 85 | if (client.retries > 1) { 86 | client.logger.info(`Retrying in ${client.retryInterval}ms...`); 87 | await sleep(client.retryInterval); 88 | } else { 89 | client.logger.info(`Retrying`); 90 | } 91 | } 92 | } 93 | })(this); 94 | async setListenStrategy(strategy: ListenStrategy) { 95 | this.listenStrategy = strategy; 96 | } 97 | 98 | async getListenStrategy(): Promise { 99 | return this.listenStrategy; 100 | } 101 | 102 | async incrementRetries() { 103 | this.retries += 1; 104 | } 105 | 106 | async getListenClient(): Promise> { 107 | const currentTime = Math.floor(Date.now()); 108 | 109 | // attempt to account for the time it takes to establish the listener 110 | if (currentTime - this.lastConnectionAttempt > this.retryInterval * 4) { 111 | this.retries = 0; 112 | } 113 | 114 | this.lastConnectionAttempt = currentTime; 115 | 116 | if (this.retries > DEFAULT_ACTION_LISTENER_RETRY_COUNT) { 117 | throw new HatchetError( 118 | `Could not subscribe to the worker after ${DEFAULT_ACTION_LISTENER_RETRY_COUNT} retries` 119 | ); 120 | } 121 | 122 | this.logger.info( 123 | `Connecting to Hatchet to establish listener for actions... ${this.retries}/${DEFAULT_ACTION_LISTENER_RETRY_COUNT} (last attempt: ${this.lastConnectionAttempt})` 124 | ); 125 | 126 | if (this.retries >= 1) { 127 | await sleep(DEFAULT_ACTION_LISTENER_RETRY_INTERVAL); 128 | } 129 | 130 | try { 131 | if (this.listenStrategy === ListenStrategy.LISTEN_STRATEGY_V1) { 132 | const result = this.client.listen({ 133 | workerId: this.workerId, 134 | }); 135 | this.logger.green('Connection established using LISTEN_STRATEGY_V1'); 136 | return result; 137 | } 138 | 139 | const res = this.client.listenV2({ 140 | workerId: this.workerId, 141 | }); 142 | 143 | await this.heartbeat.start(); 144 | this.logger.green('Connection established using LISTEN_STRATEGY_V2'); 145 | return res; 146 | } catch (e: any) { 147 | this.retries += 1; 148 | this.logger.error(`Attempt ${this.retries}: Failed to connect, retrying...`); 149 | 150 | if (e.code === Status.UNAVAILABLE) { 151 | // Connection lost, reset heartbeat interval and retry connection 152 | this.heartbeat.stop(); 153 | return this.getListenClient(); 154 | } 155 | 156 | throw e; 157 | } 158 | } 159 | 160 | async unregister() { 161 | this.done = true; 162 | this.heartbeat.stop(); 163 | try { 164 | return await this.client.unsubscribe({ 165 | workerId: this.workerId, 166 | }); 167 | } catch (e: any) { 168 | throw new HatchetError(`Failed to unsubscribe: ${e.message}`); 169 | } 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /src/clients/dispatcher/dispatcher-client.test.ts: -------------------------------------------------------------------------------- 1 | import { StepActionEventType } from '@hatchet/protoc/dispatcher'; 2 | import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; 3 | import { DispatcherClient } from './dispatcher-client'; 4 | import { mockChannel, mockFactory } from '../hatchet-client/hatchet-client.test'; 5 | 6 | let client: DispatcherClient; 7 | 8 | describe('DispatcherClient', () => { 9 | it('should create a client', () => { 10 | const x = new DispatcherClient( 11 | { 12 | token: 13 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 14 | 15 | host_port: 'HOST_PORT', 16 | log_level: 'OFF', 17 | tls_config: { 18 | cert_file: 'TLS_CERT_FILE', 19 | key_file: 'TLS_KEY_FILE', 20 | ca_file: 'TLS_ROOT_CA_FILE', 21 | server_name: 'TLS_SERVER_NAME', 22 | }, 23 | api_url: 'API_URL', 24 | tenant_id: 'tenantId', 25 | logger: DEFAULT_LOGGER, 26 | }, 27 | mockChannel, 28 | mockFactory 29 | ); 30 | 31 | expect(x).toBeDefined(); 32 | }); 33 | 34 | beforeEach(() => { 35 | client = new DispatcherClient( 36 | { 37 | token: 38 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 39 | 40 | host_port: 'HOST_PORT', 41 | log_level: 'OFF', 42 | tls_config: { 43 | cert_file: 'TLS_CERT_FILE', 44 | key_file: 'TLS_KEY_FILE', 45 | ca_file: 'TLS_ROOT_CA_FILE', 46 | server_name: 'TLS_SERVER_NAME', 47 | }, 48 | api_url: 'API_URL', 49 | tenant_id: 'tenantId', 50 | logger: DEFAULT_LOGGER, 51 | }, 52 | mockChannel, 53 | mockFactory 54 | ); 55 | }); 56 | 57 | describe('get_action_listener', () => { 58 | // it('should register the worker', async () => { 59 | // const clientSpy = jest.spyOn(client.client, 'register').mockResolvedValue({ 60 | // workerId: 'WORKER_ID', 61 | // tenantId: 'TENANT_ID', 62 | // workerName: 'WORKER_NAME', 63 | // }); 64 | // const listenerSpy = jest.spyOn(client.client, 'listen'); 65 | // const listener = await client.getActionListener({ 66 | // workerName: 'WORKER_NAME', 67 | // services: ['SERVICE'], 68 | // actions: ['ACTION'], 69 | // }); 70 | // expect(clientSpy).toHaveBeenCalledWith({ 71 | // workerName: 'WORKER_NAME', 72 | // services: ['SERVICE'], 73 | // actions: ['ACTION'], 74 | // }); 75 | // expect(listenerSpy).toHaveBeenCalledWith({ 76 | // workerId: 'WORKER_ID', 77 | // }); 78 | // expect(listener).toBeDefined(); 79 | // expect(listener.workerId).toEqual('WORKER_ID'); 80 | // }); 81 | }); 82 | 83 | describe('send_action_event', () => { 84 | it('should send action events', () => { 85 | const clientSpy = jest.spyOn(client.client, 'sendStepActionEvent').mockResolvedValue({ 86 | tenantId: 'TENANT_ID', 87 | workerId: 'WORKER_ID', 88 | }); 89 | 90 | client.sendStepActionEvent({ 91 | workerId: 'WORKER_ID', 92 | actionId: 'ACTION_ID', 93 | eventType: StepActionEventType.STEP_EVENT_TYPE_COMPLETED, 94 | eventPayload: '{"foo":"bar"}', 95 | eventTimestamp: new Date(), 96 | jobId: 'a', 97 | jobRunId: 'b', 98 | stepId: 'c', 99 | stepRunId: 'd', 100 | }); 101 | 102 | expect(clientSpy).toHaveBeenCalledWith({ 103 | workerId: 'WORKER_ID', 104 | actionId: 'ACTION_ID', 105 | eventType: StepActionEventType.STEP_EVENT_TYPE_COMPLETED, 106 | eventPayload: '{"foo":"bar"}', 107 | jobId: 'a', 108 | jobRunId: 'b', 109 | stepId: 'c', 110 | stepRunId: 'd', 111 | eventTimestamp: expect.any(Object), 112 | }); 113 | }); 114 | }); 115 | }); 116 | -------------------------------------------------------------------------------- /src/clients/dispatcher/dispatcher-client.ts: -------------------------------------------------------------------------------- 1 | import { Channel, ClientFactory } from 'nice-grpc'; 2 | import { 3 | DispatcherClient as PbDispatcherClient, 4 | DispatcherDefinition, 5 | StepActionEvent, 6 | GroupKeyActionEvent, 7 | OverridesData, 8 | DeepPartial, 9 | WorkerLabels as PbWorkerAffinityConfig, 10 | SDKS, 11 | RuntimeInfo, 12 | } from '@hatchet/protoc/dispatcher'; 13 | import { ClientConfig } from '@clients/hatchet-client/client-config'; 14 | import HatchetError from '@util/errors/hatchet-error'; 15 | import { Logger } from '@hatchet/util/logger'; 16 | 17 | import { retrier } from '@hatchet/util/retrier'; 18 | import { HATCHET_VERSION } from '@hatchet/version'; 19 | import { ActionListener } from './action-listener'; 20 | 21 | export type WorkerLabels = Record; 22 | 23 | interface GetActionListenerOptions { 24 | workerName: string; 25 | services: string[]; 26 | actions: string[]; 27 | maxRuns?: number; 28 | labels: Record; 29 | } 30 | 31 | export class DispatcherClient { 32 | config: ClientConfig; 33 | client: PbDispatcherClient; 34 | logger: Logger; 35 | 36 | constructor(config: ClientConfig, channel: Channel, factory: ClientFactory) { 37 | this.config = config; 38 | this.client = factory.create(DispatcherDefinition, channel); 39 | this.logger = config.logger(`Dispatcher`, config.log_level); 40 | } 41 | 42 | getRuntimeInfo(): RuntimeInfo { 43 | return { 44 | sdkVersion: HATCHET_VERSION, 45 | language: SDKS.TYPESCRIPT, 46 | languageVersion: process.version, 47 | os: process.platform, 48 | }; 49 | } 50 | 51 | async getActionListener(options: GetActionListenerOptions) { 52 | // Register the worker 53 | const registration = await this.client.register({ 54 | ...options, 55 | labels: options.labels ? mapLabels(options.labels) : undefined, 56 | runtimeInfo: this.getRuntimeInfo(), 57 | }); 58 | 59 | return new ActionListener(this, registration.workerId); 60 | } 61 | 62 | async sendStepActionEvent(in_: StepActionEvent) { 63 | try { 64 | return await retrier(async () => this.client.sendStepActionEvent(in_), this.logger); 65 | } catch (e: any) { 66 | throw new HatchetError(e.message); 67 | } 68 | } 69 | 70 | async sendGroupKeyActionEvent(in_: GroupKeyActionEvent) { 71 | try { 72 | return await retrier(async () => this.client.sendGroupKeyActionEvent(in_), this.logger); 73 | } catch (e: any) { 74 | throw new HatchetError(e.message); 75 | } 76 | } 77 | 78 | async putOverridesData(in_: DeepPartial) { 79 | return retrier(async () => this.client.putOverridesData(in_), this.logger).catch((e) => { 80 | this.logger.warn(`Could not put overrides data: ${e.message}`); 81 | }); 82 | } 83 | 84 | async refreshTimeout(incrementTimeoutBy: string, stepRunId: string) { 85 | try { 86 | return this.client.refreshTimeout({ 87 | stepRunId, 88 | incrementTimeoutBy, 89 | }); 90 | } catch (e: any) { 91 | throw new HatchetError(e.message); 92 | } 93 | } 94 | 95 | async upsertWorkerLabels(workerId: string, labels: WorkerLabels) { 96 | try { 97 | return await this.client.upsertWorkerLabels({ 98 | workerId, 99 | labels: mapLabels(labels), 100 | }); 101 | } catch (e: any) { 102 | throw new HatchetError(e.message); 103 | } 104 | } 105 | } 106 | 107 | function mapLabels(in_: WorkerLabels): Record { 108 | return Object.entries(in_).reduce>( 109 | (acc, [key, value]) => ({ 110 | ...acc, 111 | [key]: { 112 | strValue: typeof value === 'string' ? value : undefined, 113 | intValue: typeof value === 'number' ? value : undefined, 114 | } as PbWorkerAffinityConfig, 115 | }), 116 | {} as Record 117 | ); 118 | } 119 | -------------------------------------------------------------------------------- /src/clients/dispatcher/heartbeat/heartbeat-controller.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from '@hatchet/util/logger'; 2 | import { DispatcherClient as PbDispatcherClient } from '@hatchet/protoc/dispatcher'; 3 | import { Worker } from 'worker_threads'; 4 | import path from 'path'; 5 | import { runThreaded } from '@hatchet/util/thread-helper'; 6 | import { ClientConfig } from '../../hatchet-client'; 7 | import { DispatcherClient } from '../dispatcher-client'; 8 | 9 | export class Heartbeat { 10 | config: ClientConfig; 11 | client: PbDispatcherClient; 12 | workerId: string; 13 | logger: Logger; 14 | 15 | heartbeatWorker: Worker | undefined; 16 | 17 | constructor(client: DispatcherClient, workerId: string) { 18 | this.config = client.config; 19 | this.client = client.client; 20 | this.workerId = workerId; 21 | this.logger = client.config.logger(`HeartbeatController`, this.config.log_level); 22 | } 23 | 24 | async start() { 25 | if (!this.heartbeatWorker) { 26 | this.heartbeatWorker = runThreaded(path.join(__dirname, './heartbeat-worker'), { 27 | workerData: { 28 | config: { 29 | ...this.config, 30 | logger: undefined, 31 | }, 32 | workerId: this.workerId, 33 | }, 34 | }); 35 | } 36 | } 37 | 38 | async stop() { 39 | this.heartbeatWorker?.postMessage('stop'); 40 | this.heartbeatWorker?.terminate(); 41 | this.heartbeatWorker = undefined; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/clients/dispatcher/heartbeat/heartbeat-worker.ts: -------------------------------------------------------------------------------- 1 | import { parentPort, workerData } from 'worker_threads'; 2 | import { Logger } from '@util/logger'; 3 | import { 4 | ClientConfig, 5 | HatchetLogger, 6 | addTokenMiddleware, 7 | channelFactory, 8 | } from '@hatchet/clients/hatchet-client'; 9 | import { DispatcherClient as PbDispatcherClient } from '@hatchet/protoc/dispatcher'; 10 | import { ConfigLoader } from '@hatchet/util/config-loader'; 11 | import { Status, createClientFactory } from 'nice-grpc'; 12 | import { DispatcherClient } from '../dispatcher-client'; 13 | 14 | const HEARTBEAT_INTERVAL = 4000; 15 | 16 | class HeartbeatWorker { 17 | heartbeatInterval: any; 18 | logger: Logger; 19 | client: PbDispatcherClient; 20 | workerId: string; 21 | timeLastHeartbeat = new Date().getTime(); 22 | 23 | constructor(config: ClientConfig, workerId: string) { 24 | this.workerId = workerId; 25 | 26 | this.logger = new HatchetLogger(`Heartbeat`, config.log_level); 27 | 28 | const credentials = ConfigLoader.createCredentials(config.tls_config); 29 | const clientFactory = createClientFactory().use(addTokenMiddleware(config.token)); 30 | 31 | const dispatcher = new DispatcherClient( 32 | { ...config, logger: (ctx, level) => new HatchetLogger(ctx, level) }, 33 | channelFactory(config, credentials), 34 | clientFactory 35 | ); 36 | 37 | this.client = dispatcher.client; 38 | } 39 | 40 | async start() { 41 | if (this.heartbeatInterval) { 42 | return; 43 | } 44 | 45 | const beat = async () => { 46 | try { 47 | this.logger.debug('Heartbeat sending...'); 48 | await this.client.heartbeat({ 49 | workerId: this.workerId, 50 | heartbeatAt: new Date(), 51 | }); 52 | const now = new Date().getTime(); 53 | 54 | const actualInterval = now - this.timeLastHeartbeat; 55 | 56 | if (actualInterval > HEARTBEAT_INTERVAL * 1.2) { 57 | this.logger.warn( 58 | `Heartbeat interval delay (${actualInterval}ms >> ${HEARTBEAT_INTERVAL}ms)` 59 | ); 60 | } 61 | 62 | this.logger.debug(`Heartbeat sent ${actualInterval}ms ago`); 63 | this.timeLastHeartbeat = now; 64 | } catch (e: any) { 65 | if (e.code === Status.UNIMPLEMENTED) { 66 | // break out of interval 67 | this.logger.error('Heartbeat not implemented, closing heartbeat'); 68 | this.stop(); 69 | return; 70 | } 71 | 72 | this.logger.error(`Failed to send heartbeat: ${e.message}`); 73 | } 74 | }; 75 | 76 | // start with a heartbeat 77 | await beat(); 78 | this.heartbeatInterval = setInterval(beat, HEARTBEAT_INTERVAL); 79 | } 80 | 81 | stop() { 82 | if (this.heartbeatInterval) { 83 | clearInterval(this.heartbeatInterval); 84 | this.heartbeatInterval = null; 85 | } 86 | } 87 | } 88 | 89 | const heartbeat = new HeartbeatWorker(workerData.config, workerData.workerId); 90 | heartbeat.start(); 91 | 92 | parentPort?.on('stop', () => { 93 | heartbeat.stop(); 94 | }); 95 | -------------------------------------------------------------------------------- /src/clients/event/event-client.test.ts: -------------------------------------------------------------------------------- 1 | import HatchetError from '@util/errors/hatchet-error'; 2 | import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; 3 | import { EventClient } from './event-client'; 4 | import { mockChannel, mockFactory } from '../hatchet-client/hatchet-client.test'; 5 | 6 | let client: EventClient; 7 | 8 | describe('EventClient', () => { 9 | it('should create a client', () => { 10 | const x = new EventClient( 11 | { 12 | token: 13 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 14 | host_port: 'HOST_PORT', 15 | tls_config: { 16 | cert_file: 'TLS_CERT_FILE', 17 | key_file: 'TLS_KEY_FILE', 18 | ca_file: 'TLS_ROOT_CA_FILE', 19 | server_name: 'TLS_SERVER_NAME', 20 | }, 21 | api_url: 'API_URL', 22 | tenant_id: 'tenantId', 23 | logger: DEFAULT_LOGGER, 24 | }, 25 | mockChannel, 26 | mockFactory 27 | ); 28 | 29 | expect(x).toBeDefined(); 30 | }); 31 | 32 | beforeEach(() => { 33 | client = new EventClient( 34 | { 35 | token: 36 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 37 | host_port: 'HOST_PORT', 38 | tls_config: { 39 | cert_file: 'TLS_CERT_FILE', 40 | key_file: 'TLS_KEY_FILE', 41 | ca_file: 'TLS_ROOT_CA_FILE', 42 | server_name: 'TLS_SERVER_NAME', 43 | }, 44 | api_url: 'API_URL', 45 | tenant_id: 'tenantId', 46 | logger: DEFAULT_LOGGER, 47 | }, 48 | mockChannel, 49 | mockFactory 50 | ); 51 | }); 52 | 53 | it('should push events', async () => { 54 | const clientSpy = jest.spyOn(client.client, 'push').mockResolvedValue({ 55 | tenantId: 'x', 56 | eventId: 'y', 57 | key: 'z', 58 | eventTimestamp: new Date(), 59 | payload: 'string', 60 | }); 61 | 62 | await client.push('type', { foo: 'bar' }); 63 | 64 | expect(clientSpy).toHaveBeenCalledWith({ 65 | key: 'type', 66 | payload: '{"foo":"bar"}', 67 | eventTimestamp: expect.any(Date), 68 | }); 69 | }); 70 | 71 | it('should throw an error when push fails', async () => { 72 | const clientSpy = jest.spyOn(client.client, 'push'); 73 | clientSpy.mockImplementation(() => { 74 | throw new Error('foo'); 75 | }); 76 | 77 | jest.spyOn(client, 'retrier').mockImplementation((fn, logger, retries, interval) => fn()); 78 | 79 | await expect(client.push('type', { foo: 'bar' })).rejects.toThrow(new HatchetError('foo')); 80 | }); 81 | 82 | it('should bulk push events', async () => { 83 | // Mock the bulkPush method 84 | const clientSpy = jest.spyOn(client.client, 'bulkPush').mockResolvedValue({ 85 | events: [ 86 | { 87 | tenantId: 'tenantId', 88 | eventId: 'y1', 89 | key: 'z1', 90 | eventTimestamp: new Date(), 91 | payload: 'string1', 92 | }, 93 | { 94 | tenantId: 'tenantId', 95 | eventId: 'y2', 96 | key: 'z2', 97 | eventTimestamp: new Date(), 98 | payload: 'string2', 99 | }, 100 | ], 101 | }); 102 | 103 | // Call bulkPush with an array of events 104 | const events = [ 105 | { payload: { foo: 'bar1' }, additionalMetadata: { user_id: 'user1' } }, 106 | { payload: { foo: 'bar2' }, additionalMetadata: { user_id: 'user2' } }, 107 | ]; 108 | await client.bulkPush('type', events); 109 | 110 | // Verify the bulkPush method was called with the correct parameters 111 | expect(clientSpy).toHaveBeenCalledWith({ 112 | events: [ 113 | { 114 | key: 'type', 115 | payload: '{"foo":"bar1"}', 116 | eventTimestamp: expect.any(Date), 117 | additionalMetadata: '{"user_id":"user1"}', 118 | }, 119 | { 120 | key: 'type', 121 | payload: '{"foo":"bar2"}', 122 | eventTimestamp: expect.any(Date), 123 | additionalMetadata: '{"user_id":"user2"}', 124 | }, 125 | ], 126 | }); 127 | }); 128 | 129 | it('should throw an error when bulkPush fails', async () => { 130 | // Mock the bulkPush method to throw an error 131 | const clientSpy = jest.spyOn(client.client, 'bulkPush'); 132 | clientSpy.mockImplementation(() => { 133 | throw new Error('bulk error'); 134 | }); 135 | 136 | jest.spyOn(client, 'retrier').mockImplementation((fn, logger, retries, interval) => fn()); 137 | 138 | const events = [ 139 | { payload: { foo: 'bar1' }, additionalMetadata: { user_id: 'user1' } }, 140 | { payload: { foo: 'bar2' }, additionalMetadata: { user_id: 'user2' } }, 141 | ]; 142 | 143 | // Test that an error is thrown when bulkPush fails 144 | await expect(client.bulkPush('type', events)).rejects.toThrow(new HatchetError('bulk error')); 145 | }); 146 | }); 147 | -------------------------------------------------------------------------------- /src/clients/event/event-client.ts: -------------------------------------------------------------------------------- 1 | import { Channel, ClientFactory } from 'nice-grpc'; 2 | import { 3 | BulkPushEventRequest, 4 | EventsServiceClient, 5 | EventsServiceDefinition, 6 | PushEventRequest, 7 | } from '@hatchet/protoc/events/events'; 8 | import HatchetError from '@util/errors/hatchet-error'; 9 | import { ClientConfig } from '@clients/hatchet-client/client-config'; 10 | import { Logger } from '@hatchet/util/logger'; 11 | import { retrier } from '@hatchet/util/retrier'; 12 | 13 | // eslint-disable-next-line no-shadow 14 | export enum LogLevel { 15 | INFO = 'INFO', 16 | WARN = 'WARN', 17 | ERROR = 'ERROR', 18 | DEBUG = 'DEBUG', 19 | } 20 | 21 | export interface PushEventOptions { 22 | additionalMetadata?: Record; 23 | } 24 | 25 | export interface EventWithMetadata { 26 | payload: T; 27 | additionalMetadata?: Record; 28 | } 29 | 30 | export class EventClient { 31 | config: ClientConfig; 32 | client: EventsServiceClient; 33 | retrier: typeof retrier; 34 | 35 | logger: Logger; 36 | 37 | constructor(config: ClientConfig, channel: Channel, factory: ClientFactory) { 38 | this.config = config; 39 | this.client = factory.create(EventsServiceDefinition, channel); 40 | this.logger = config.logger(`Dispatcher`, config.log_level); 41 | this.retrier = retrier; 42 | } 43 | 44 | push(type: string, input: T, options: PushEventOptions = {}) { 45 | const namespacedType = `${this.config.namespace ?? ''}${type}`; 46 | 47 | const req: PushEventRequest = { 48 | key: namespacedType, 49 | payload: JSON.stringify(input), 50 | eventTimestamp: new Date(), 51 | additionalMetadata: options.additionalMetadata 52 | ? JSON.stringify(options.additionalMetadata) 53 | : undefined, 54 | }; 55 | 56 | try { 57 | const e = this.retrier(async () => this.client.push(req), this.logger); 58 | this.logger.info(`Event pushed: ${namespacedType}`); 59 | return e; 60 | } catch (e: any) { 61 | throw new HatchetError(e.message); 62 | } 63 | } 64 | 65 | bulkPush(type: string, inputs: EventWithMetadata[], options: PushEventOptions = {}) { 66 | const namespacedType = `${this.config.namespace ?? ''}${type}`; 67 | 68 | const events = inputs.map((input) => { 69 | return { 70 | key: namespacedType, 71 | payload: JSON.stringify(input.payload), 72 | eventTimestamp: new Date(), 73 | additionalMetadata: (() => { 74 | if (input.additionalMetadata) { 75 | return JSON.stringify(input.additionalMetadata); 76 | } 77 | if (options.additionalMetadata) { 78 | return JSON.stringify(options.additionalMetadata); 79 | } 80 | return undefined; 81 | })(), 82 | }; 83 | }); 84 | 85 | const req: BulkPushEventRequest = { 86 | events, 87 | }; 88 | 89 | try { 90 | const res = this.retrier(async () => this.client.bulkPush(req), this.logger); 91 | this.logger.info(`Bulk events pushed for type: ${namespacedType}`); 92 | return res; 93 | } catch (e: any) { 94 | throw new HatchetError(e.message); 95 | } 96 | } 97 | 98 | putLog(stepRunId: string, log: string, level?: LogLevel) { 99 | const createdAt = new Date(); 100 | 101 | if (log.length > 1_000) { 102 | this.logger.warn(`log is too long, skipping: ${log.length} characters`); 103 | return; 104 | } 105 | 106 | // fire and forget the log 107 | this.client 108 | .putLog({ 109 | stepRunId, 110 | createdAt, 111 | message: log, 112 | level: level || LogLevel.INFO, 113 | }) 114 | .catch((e: any) => { 115 | // log a warning, but this is not a fatal error 116 | this.logger.warn(`Could not put log: ${e.message}`); 117 | }); 118 | } 119 | 120 | putStream(stepRunId: string, data: string | Uint8Array) { 121 | const createdAt = new Date(); 122 | 123 | let dataBytes: Uint8Array; 124 | if (typeof data === 'string') { 125 | dataBytes = new TextEncoder().encode(data); 126 | } else if (data instanceof Uint8Array) { 127 | dataBytes = data; 128 | } else { 129 | throw new Error('Invalid data type. Expected string or Uint8Array.'); 130 | } 131 | 132 | retrier( 133 | async () => 134 | this.client.putStreamEvent({ 135 | stepRunId, 136 | createdAt, 137 | message: dataBytes, 138 | }), 139 | this.logger 140 | ).catch((e: any) => { 141 | // log a warning, but this is not a fatal error 142 | this.logger.warn(`Could not put log: ${e.message}`); 143 | }); 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/client-config.ts: -------------------------------------------------------------------------------- 1 | import { ChannelCredentials } from 'nice-grpc'; 2 | import { z } from 'zod'; 3 | import { Logger, LogLevel } from '@util/logger'; 4 | 5 | const ClientTLSConfigSchema = z.object({ 6 | tls_strategy: z.enum(['tls', 'mtls', 'none']).optional(), 7 | cert_file: z.string().optional(), 8 | ca_file: z.string().optional(), 9 | key_file: z.string().optional(), 10 | server_name: z.string().optional(), 11 | }); 12 | 13 | export const ClientConfigSchema = z.object({ 14 | token: z.string(), 15 | tls_config: ClientTLSConfigSchema, 16 | host_port: z.string(), 17 | api_url: z.string(), 18 | log_level: z.enum(['OFF', 'DEBUG', 'INFO', 'WARN', 'ERROR']).optional(), 19 | tenant_id: z.string(), 20 | namespace: z.string().optional(), 21 | }); 22 | 23 | export type LogConstructor = (context: string, logLevel?: LogLevel) => Logger; 24 | 25 | export type ClientConfig = z.infer & { 26 | credentials?: ChannelCredentials; 27 | } & { logger: LogConstructor }; 28 | export type ClientTLSConfig = z.infer; 29 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/features/cron-client.test.ts: -------------------------------------------------------------------------------- 1 | import { CronRegex } from './cron-client'; 2 | 3 | describe('CronRegex', () => { 4 | it('should accept valid crons', () => { 5 | // Basic examples 6 | expect(CronRegex.test('35 20 * * 1-5')).toBeTruthy(); // At 20:35 on every weekday (Monday through Friday) 7 | expect(CronRegex.test('0 0 * * *')).toBeTruthy(); // At 00:00 every day 8 | expect(CronRegex.test('*/15 * * * *')).toBeTruthy(); // Every 15 minutes 9 | expect(CronRegex.test('0 0 1,15 * *')).toBeTruthy(); // At 00:00 on the 1st and 15th of every month 10 | 11 | // Step values 12 | expect(CronRegex.test('0 */2 * * *')).toBeTruthy(); // Every 2 hours 13 | expect(CronRegex.test('0 0 */5 * *')).toBeTruthy(); // Every 5 days of the month 14 | 15 | // Day of week ranges and lists 16 | expect(CronRegex.test('0 0 * * 1,3,5')).toBeTruthy(); // At midnight on Monday, Wednesday, and Friday 17 | expect(CronRegex.test('30 18 * * 1-5')).toBeTruthy(); // At 18:30 on weekdays 18 | expect(CronRegex.test('45 17 * * 0,6')).toBeTruthy(); // At 17:45 on weekends (Sunday and Saturday) 19 | 20 | // Complex examples combining features 21 | expect(CronRegex.test('0 23 * * 5')).toBeTruthy(); // At 23:00 on Friday 22 | expect(CronRegex.test('45 17 1,15 * *')).toBeTruthy(); // At 17:45 on the 1st and 15th of every month 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/features/cron-client.ts: -------------------------------------------------------------------------------- 1 | import { AdminClient } from '@hatchet/clients/admin'; 2 | import { Api } from '@hatchet/clients/rest'; 3 | import { CronWorkflows, CronWorkflowsList } from '@hatchet/clients/rest/generated/data-contracts'; 4 | import { z } from 'zod'; 5 | import { Workflow } from '@hatchet/workflow'; 6 | import { AxiosError } from 'axios'; 7 | import { ClientConfig } from '@hatchet/clients/hatchet-client/client-config'; 8 | import { Logger } from '@util/logger'; 9 | 10 | export const CronRegex = 11 | /^(\*|([0-9]|[1-5][0-9])(,([0-9]|[1-5][0-9]))*|([0-9]|[1-5][0-9])-([0-9]|[1-5][0-9])|\*\/([0-9]|[1-5][0-9])) (\*|([0-9]|1[0-9]|2[0-3])(,([0-9]|1[0-9]|2[0-3]))*|([0-9]|1[0-9]|2[0-3])-([0-9]|1[0-9]|2[0-3])|\*\/([0-9]|1[0-9]|2[0-3])) (\*|([1-9]|[12][0-9]|3[01])(,([1-9]|[12][0-9]|3[01]))*|([1-9]|[12][0-9]|3[01])-([1-9]|[12][0-9]|3[01])|\*\/([1-9]|[12][0-9]|3[01])) (\*|([1-9]|1[0-2])(,([1-9]|1[0-2]))*|([1-9]|1[0-2])-([1-9]|1[0-2])|\*\/([1-9]|1[0-2])) (\*|([0-7])(,([0-7]))*|([0-7])-([0-7])|\*\/([0-7]))$/; 12 | 13 | /** 14 | * Schema for creating a Cron Trigger. 15 | */ 16 | export const CreateCronTriggerSchema = z.object({ 17 | name: z.string(), 18 | expression: z.string().refine((val) => CronRegex.test(val), 'Invalid cron expression'), 19 | input: z.record(z.any()).optional(), 20 | additionalMetadata: z.record(z.string()).optional(), 21 | }); 22 | 23 | /** 24 | * Type representing the input for creating a Cron. 25 | */ 26 | export type CreateCronInput = z.infer; 27 | 28 | /** 29 | * Client for managing Cron Triggers. 30 | */ 31 | export class CronClient { 32 | private logger: Logger; 33 | 34 | /** 35 | * Initializes a new instance of CronClient. 36 | * @param tenantId - The tenant identifier. 37 | * @param config - Client configuration settings. 38 | * @param api - API instance for REST interactions. 39 | * @param adminClient - Admin client for administrative operations. 40 | */ 41 | constructor( 42 | private readonly tenantId: string, 43 | private readonly config: ClientConfig, 44 | private readonly api: Api, 45 | private readonly adminClient: AdminClient 46 | ) { 47 | this.logger = config.logger('Cron', this.config.log_level); 48 | } 49 | 50 | /** 51 | * Retrieves the Cron ID from a CronWorkflows object or a string. 52 | * @param cron - The CronWorkflows object or Cron ID as a string. 53 | * @returns The Cron ID as a string. 54 | */ 55 | private getCronId(cron: CronWorkflows | string): string { 56 | return typeof cron === 'string' ? cron : cron.metadata.id; 57 | } 58 | 59 | /** 60 | * Creates a new Cron workflow. 61 | * @param workflow - The workflow identifier or Workflow object. 62 | * @param cron - The input data for creating the Cron Trigger. 63 | * @returns A promise that resolves to the created CronWorkflows object. 64 | * @throws Will throw an error if the input is invalid or the API call fails. 65 | */ 66 | async create(workflow: string | Workflow, cron: CreateCronInput): Promise { 67 | const workflowId = typeof workflow === 'string' ? workflow : workflow.id; 68 | 69 | // Validate cron input with zod schema 70 | try { 71 | const parsedCron = CreateCronTriggerSchema.parse(cron); 72 | const response = await this.api.cronWorkflowTriggerCreate(this.tenantId, workflowId, { 73 | cronName: parsedCron.name, 74 | cronExpression: parsedCron.expression, 75 | input: parsedCron.input ?? {}, 76 | additionalMetadata: parsedCron.additionalMetadata ?? {}, 77 | }); 78 | return response.data; 79 | } catch (err) { 80 | if (err instanceof z.ZodError) { 81 | throw new Error(`Invalid cron input: ${err.message}`); 82 | } 83 | 84 | if (err instanceof AxiosError) { 85 | throw new Error(JSON.stringify(err.response?.data.errors)); 86 | } 87 | 88 | throw err; 89 | } 90 | } 91 | 92 | /** 93 | * Deletes an existing Cron Trigger. 94 | * @param cron - The Cron Trigger ID as a string or CronWorkflows object. 95 | * @returns A promise that resolves when the Cron Trigger is deleted. 96 | */ 97 | async delete(cron: string | CronWorkflows): Promise { 98 | const cronId = this.getCronId(cron); 99 | await this.api.workflowCronDelete(this.tenantId, cronId); 100 | } 101 | 102 | /** 103 | * Lists all Cron Triggers based on the provided query parameters. 104 | * @param query - Query parameters for listing Cron Triggers. 105 | * @returns A promise that resolves to a CronWorkflowsList object. 106 | */ 107 | async list(query: Parameters[1]): Promise { 108 | const response = await this.api.cronWorkflowList(this.tenantId, query); 109 | return response.data; 110 | } 111 | 112 | /** 113 | * Retrieves a specific Cron Trigger by its ID. 114 | * @param cron - The Cron Trigger ID as a string or CronWorkflows object. 115 | * @returns A promise that resolves to the CronWorkflows object. 116 | */ 117 | async get(cron: string | CronWorkflows): Promise { 118 | const cronId = this.getCronId(cron); 119 | const response = await this.api.workflowCronGet(this.tenantId, cronId); 120 | return response.data; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/features/schedule-client.ts: -------------------------------------------------------------------------------- 1 | import { AdminClient } from '@hatchet/clients/admin'; 2 | import { Api } from '@hatchet/clients/rest'; 3 | import { 4 | ScheduledWorkflows, 5 | ScheduledWorkflowsList, 6 | } from '@hatchet/clients/rest/generated/data-contracts'; 7 | import { z } from 'zod'; 8 | import { Workflow } from '@hatchet/workflow'; 9 | import { AxiosError } from 'axios'; 10 | import { ClientConfig } from '@hatchet/clients/hatchet-client/client-config'; 11 | import { Logger } from '@util/logger'; 12 | 13 | /** 14 | * Schema for creating a Scheduled Run Trigger. 15 | */ 16 | export const CreateScheduledRunTriggerSchema = z.object({ 17 | triggerAt: z.coerce.date(), 18 | input: z.record(z.any()).optional(), 19 | additionalMetadata: z.record(z.string()).optional(), 20 | }); 21 | 22 | /** 23 | * Type representing the input for creating a Cron. 24 | */ 25 | export type CreateScheduledRunInput = z.infer; 26 | 27 | /** 28 | * Client for managing Scheduled Runs. 29 | */ 30 | export class ScheduleClient { 31 | private logger: Logger; 32 | 33 | /** 34 | * Initializes a new instance of ScheduleClient. 35 | * @param tenantId - The tenant identifier. 36 | * @param config - Client configuration settings. 37 | * @param api - API instance for REST interactions. 38 | * @param adminClient - Admin client for administrative operations. 39 | */ 40 | constructor( 41 | private readonly tenantId: string, 42 | private readonly config: ClientConfig, 43 | private readonly api: Api, 44 | private readonly adminClient: AdminClient 45 | ) { 46 | this.logger = config.logger('Scheduled Run', this.config.log_level); 47 | } 48 | 49 | /** 50 | * Retrieves the Scheduled Run ID from a ScheduledRun object or a string. 51 | * @param scheduledRun - The ScheduledRun object or Scheduled Run ID as a string. 52 | * @returns The Scheduled Run ID as a string. 53 | */ 54 | private getScheduledRunId(scheduledRun: ScheduledWorkflows | string): string { 55 | return typeof scheduledRun === 'string' ? scheduledRun : scheduledRun.metadata.id; 56 | } 57 | 58 | /** 59 | * Creates a new Scheduled Run. 60 | * @param workflow - The workflow name or Workflow object. 61 | * @param scheduledRun - The input data for creating the Scheduled Run. 62 | * @returns A promise that resolves to the created ScheduledWorkflows object. 63 | * @throws Will throw an error if the input is invalid or the API call fails. 64 | */ 65 | async create( 66 | workflow: string | Workflow, 67 | cron: CreateScheduledRunInput 68 | ): Promise { 69 | const workflowId = typeof workflow === 'string' ? workflow : workflow.id; 70 | 71 | // Validate cron input with zod schema 72 | try { 73 | const parsedCron = CreateScheduledRunTriggerSchema.parse(cron); 74 | const response = await this.api.scheduledWorkflowRunCreate(this.tenantId, workflowId, { 75 | input: parsedCron.input ?? {}, 76 | additionalMetadata: parsedCron.additionalMetadata ?? {}, 77 | triggerAt: parsedCron.triggerAt.toISOString(), 78 | }); 79 | return response.data; 80 | } catch (err) { 81 | if (err instanceof z.ZodError) { 82 | throw new Error(`Invalid cron input: ${err.message}`); 83 | } 84 | 85 | if (err instanceof AxiosError) { 86 | throw new Error(JSON.stringify(err.response?.data.errors)); 87 | } 88 | 89 | throw err; 90 | } 91 | } 92 | 93 | /** 94 | * Deletes an existing Scheduled Run. 95 | * @param scheduledRun - The Scheduled Run ID as a string or ScheduledWorkflows object. 96 | * @returns A promise that resolves when the Scheduled Run is deleted. 97 | */ 98 | async delete(scheduledRun: string | ScheduledWorkflows): Promise { 99 | const scheduledRunId = this.getScheduledRunId(scheduledRun); 100 | await this.api.workflowScheduledDelete(this.tenantId, scheduledRunId); 101 | } 102 | 103 | /** 104 | * Lists all Cron Triggers based on the provided query parameters. 105 | * @param query - Query parameters for listing Scheduled Runs. 106 | * @returns A promise that resolves to a ScheduledWorkflowsList object. 107 | */ 108 | async list( 109 | query: Parameters[1] 110 | ): Promise { 111 | const response = await this.api.workflowScheduledList(this.tenantId, query); 112 | return response.data; 113 | } 114 | 115 | /** 116 | * Retrieves a specific Scheduled Run by its ID. 117 | * @param scheduledRun - The Scheduled Run ID as a string or ScheduledWorkflows object. 118 | * @returns A promise that resolves to the ScheduledWorkflows object. 119 | */ 120 | async get(scheduledRun: string | ScheduledWorkflows): Promise { 121 | const scheduledRunId = this.getScheduledRunId(scheduledRun); 122 | const response = await this.api.workflowScheduledGet(this.tenantId, scheduledRunId); 123 | return response.data; 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/fixtures/.hatchet-invalid.yaml: -------------------------------------------------------------------------------- 1 | tenant_id: 'TENANT_ID_YAML' 2 | host_port: 'HOST_PORT_YAML' 3 | tls_config: 4 | cert_file: 'TLS_CERT_FILE_YAML' 5 | key_file: 'TLS_KEY_FILE_YAML' 6 | ca_file: 'TLS_ROOT_CA_FILE_YAML' 7 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/fixtures/.hatchet.yaml: -------------------------------------------------------------------------------- 1 | token: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef' 2 | tenant_id: 'TENANT_ID_YAML' 3 | host_port: 'HOST_PORT_YAML' 4 | tls_config: 5 | cert_file: 'TLS_CERT_FILE_YAML' 6 | key_file: 'TLS_KEY_FILE_YAML' 7 | ca_file: 'TLS_ROOT_CA_FILE_YAML' 8 | server_name: 'TLS_SERVER_NAME_YAML' 9 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/hatchet-client.test.ts: -------------------------------------------------------------------------------- 1 | import { ChannelCredentials, createChannel, createClientFactory } from 'nice-grpc'; 2 | import { HatchetClient } from './hatchet-client'; 3 | 4 | export const mockChannel = createChannel('localhost:50051'); 5 | export const mockFactory = createClientFactory(); 6 | 7 | describe('Client', () => { 8 | beforeEach(() => { 9 | process.env.HATCHET_CLIENT_TOKEN = 10 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef'; 11 | }); 12 | 13 | it('should load from environment variables', () => { 14 | const hatchet = new HatchetClient( 15 | { 16 | token: 17 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 18 | host_port: '127.0.0.1:8080', 19 | log_level: 'OFF', 20 | namespace: '', 21 | tls_config: { 22 | cert_file: 'TLS_CERT_FILE', 23 | key_file: 'TLS_KEY_FILE', 24 | ca_file: 'TLS_ROOT_CA_FILE', 25 | server_name: 'TLS_SERVER_NAME', 26 | tls_strategy: 'tls', 27 | }, 28 | }, 29 | { 30 | credentials: ChannelCredentials.createInsecure(), 31 | } 32 | ); 33 | 34 | expect(hatchet.config).toEqual( 35 | expect.objectContaining({ 36 | token: 37 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 38 | host_port: '127.0.0.1:8080', 39 | log_level: 'OFF', 40 | namespace: '', 41 | api_url: 'http://localhost:8080', 42 | tenant_id: '707d0855-80ab-4e1f-a156-f1c4546cbf52', 43 | tls_config: { 44 | tls_strategy: 'tls', 45 | cert_file: 'TLS_CERT_FILE', 46 | key_file: 'TLS_KEY_FILE', 47 | ca_file: 'TLS_ROOT_CA_FILE', 48 | server_name: 'TLS_SERVER_NAME', 49 | }, 50 | }) 51 | ); 52 | }); 53 | 54 | it('should throw an error if the config param is invalid', () => { 55 | expect( 56 | () => 57 | new HatchetClient({ 58 | host_port: 'HOST_PORT', 59 | tls_config: { 60 | tls_strategy: 'tls', 61 | cert_file: 'TLS_CERT_FILE', 62 | key_file: 'TLS_KEY_FILE', 63 | ca_file: 'TLS_ROOT_CA_FILE', 64 | // @ts-ignore 65 | server_name: undefined, 66 | }, 67 | }) 68 | ).toThrow(); 69 | }); 70 | 71 | it('should favor config param over yaml over env vars ', () => { 72 | const hatchet = new HatchetClient( 73 | { 74 | tls_config: { 75 | cert_file: 'TLS_CERT_FILE', 76 | key_file: 'TLS_KEY_FILE', 77 | ca_file: 'TLS_ROOT_CA_FILE', 78 | server_name: 'TLS_SERVER_NAME', 79 | tls_strategy: 'tls', 80 | }, 81 | }, 82 | { 83 | config_path: './fixtures/.hatchet.yaml', 84 | credentials: ChannelCredentials.createInsecure(), 85 | } 86 | ); 87 | 88 | expect(hatchet.config).toEqual( 89 | expect.objectContaining({ 90 | token: 91 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 92 | host_port: 'HOST_PORT_YAML', 93 | log_level: 'INFO', 94 | namespace: '', 95 | api_url: 'http://localhost:8080', 96 | tenant_id: '707d0855-80ab-4e1f-a156-f1c4546cbf52', 97 | tls_config: { 98 | tls_strategy: 'tls', 99 | cert_file: 'TLS_CERT_FILE', 100 | key_file: 'TLS_KEY_FILE', 101 | ca_file: 'TLS_ROOT_CA_FILE', 102 | server_name: 'TLS_SERVER_NAME', 103 | }, 104 | }) 105 | ); 106 | }); 107 | 108 | describe('Worker', () => { 109 | let hatchet: HatchetClient; 110 | 111 | beforeEach(() => { 112 | hatchet = new HatchetClient( 113 | { 114 | token: 115 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 116 | host_port: 'HOST_PORT', 117 | log_level: 'OFF', 118 | tls_config: { 119 | cert_file: 'TLS_CERT_FILE', 120 | key_file: 'TLS_KEY_FILE', 121 | ca_file: 'TLS_ROOT_CA_FILE', 122 | server_name: 'TLS_SERVER_NAME', 123 | }, 124 | }, 125 | { 126 | credentials: ChannelCredentials.createInsecure(), 127 | } 128 | ); 129 | }); 130 | 131 | describe('run', () => { 132 | xit('should start a worker', () => { 133 | const worker = hatchet.run('workflow1'); 134 | expect(worker).toBeDefined(); 135 | }); 136 | }); 137 | 138 | describe('worker', () => { 139 | it('should start a worker', () => { 140 | const worker = hatchet.worker('workflow1'); 141 | 142 | expect(worker).toBeDefined(); 143 | }); 144 | }); 145 | }); 146 | }); 147 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/hatchet-client.ts: -------------------------------------------------------------------------------- 1 | import { z } from 'zod'; 2 | import { ConfigLoader } from '@util/config-loader'; 3 | import { EventClient } from '@clients/event/event-client'; 4 | import { DispatcherClient } from '@clients/dispatcher/dispatcher-client'; 5 | import { AdminClient } from '@clients/admin/admin-client'; 6 | import { 7 | CallOptions, 8 | ChannelCredentials, 9 | ClientMiddlewareCall, 10 | createChannel, 11 | createClientFactory, 12 | Metadata, 13 | } from 'nice-grpc'; 14 | import { Workflow } from '@hatchet/workflow'; 15 | import { Worker, WorkerOpts } from '@clients/worker'; 16 | import { AxiosRequestConfig } from 'axios'; 17 | import { Logger } from '@util/logger'; 18 | import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; 19 | import { ClientConfig, ClientConfigSchema } from './client-config'; 20 | import { ListenerClient } from '../listener/listener-client'; 21 | import { Api } from '../rest/generated/Api'; 22 | import api from '../rest'; 23 | import { CronClient } from './features/cron-client'; 24 | import { ScheduleClient } from './features/schedule-client'; 25 | 26 | export interface HatchetClientOptions { 27 | config_path?: string; 28 | credentials?: ChannelCredentials; 29 | } 30 | 31 | export const channelFactory = (config: ClientConfig, credentials: ChannelCredentials) => 32 | createChannel(config.host_port, credentials, { 33 | 'grpc.ssl_target_name_override': config.tls_config.server_name, 34 | 'grpc.keepalive_timeout_ms': 60 * 1000, 35 | 'grpc.client_idle_timeout_ms': 60 * 1000, 36 | // Send keepalive pings every 10 seconds, default is 2 hours. 37 | 'grpc.keepalive_time_ms': 10 * 1000, 38 | // Allow keepalive pings when there are no gRPC calls. 39 | 'grpc.keepalive_permit_without_calls': 1, 40 | }); 41 | 42 | export const addTokenMiddleware = (token: string) => 43 | async function* _( 44 | call: ClientMiddlewareCall, 45 | options: CallOptions 46 | ) { 47 | const optionsWithAuth: CallOptions = { 48 | ...options, 49 | metadata: new Metadata({ authorization: `bearer ${token}` }), 50 | }; 51 | 52 | if (!call.responseStream) { 53 | const response = yield* call.next(call.request, optionsWithAuth); 54 | 55 | return response; 56 | } 57 | 58 | for await (const response of call.next(call.request, optionsWithAuth)) { 59 | yield response; 60 | } 61 | 62 | return undefined; 63 | }; 64 | 65 | export class HatchetClient { 66 | config: ClientConfig; 67 | credentials: ChannelCredentials; 68 | event: EventClient; 69 | dispatcher: DispatcherClient; 70 | admin: AdminClient; 71 | api: Api; 72 | listener: ListenerClient; 73 | tenantId: string; 74 | 75 | logger: Logger; 76 | 77 | cron: CronClient; 78 | schedule: ScheduleClient; 79 | constructor( 80 | config?: Partial, 81 | options?: HatchetClientOptions, 82 | axiosOpts?: AxiosRequestConfig 83 | ) { 84 | // Initializes a new Client instance. 85 | // Loads config in the following order: config param > yaml file > env vars 86 | 87 | const loaded = ConfigLoader.loadClientConfig(config, { 88 | path: options?.config_path, 89 | }); 90 | 91 | try { 92 | const valid = ClientConfigSchema.parse(loaded); 93 | 94 | let logConstructor = config?.logger; 95 | 96 | if (logConstructor == null) { 97 | logConstructor = DEFAULT_LOGGER; 98 | } 99 | 100 | this.config = { 101 | ...valid, 102 | logger: logConstructor, 103 | }; 104 | } catch (e) { 105 | if (e instanceof z.ZodError) { 106 | throw new Error(`Invalid client config: ${e.message}`); 107 | } 108 | throw e; 109 | } 110 | 111 | this.credentials = 112 | options?.credentials ?? ConfigLoader.createCredentials(this.config.tls_config); 113 | 114 | const clientFactory = createClientFactory().use(addTokenMiddleware(this.config.token)); 115 | 116 | this.tenantId = this.config.tenant_id; 117 | this.api = api(this.config.api_url, this.config.token, axiosOpts); 118 | this.event = new EventClient( 119 | this.config, 120 | channelFactory(this.config, this.credentials), 121 | clientFactory 122 | ); 123 | this.dispatcher = new DispatcherClient( 124 | this.config, 125 | channelFactory(this.config, this.credentials), 126 | clientFactory 127 | ); 128 | this.listener = new ListenerClient( 129 | this.config, 130 | channelFactory(this.config, this.credentials), 131 | clientFactory, 132 | this.api 133 | ); 134 | this.admin = new AdminClient( 135 | this.config, 136 | channelFactory(this.config, this.credentials), 137 | clientFactory, 138 | this.api, 139 | this.tenantId, 140 | this.listener 141 | ); 142 | 143 | this.logger = this.config.logger('HatchetClient', this.config.log_level); 144 | this.logger.info(`Initialized HatchetClient`); 145 | 146 | // Feature Clients 147 | this.cron = new CronClient(this.tenantId, this.config, this.api, this.admin); 148 | this.schedule = new ScheduleClient(this.tenantId, this.config, this.api, this.admin); 149 | } 150 | 151 | static init( 152 | config?: Partial, 153 | options?: HatchetClientOptions, 154 | axiosConfig?: AxiosRequestConfig 155 | ): HatchetClient { 156 | return new HatchetClient(config, options, axiosConfig); 157 | } 158 | 159 | // @deprecated 160 | async run(workflow: string | Workflow): Promise { 161 | this.logger.warn( 162 | 'HatchetClient.run is deprecated and will be removed in a future release. Use HatchetClient.worker and Worker.start instead.' 163 | ); 164 | const worker = await this.worker(workflow); 165 | worker.start(); 166 | return worker; 167 | } 168 | 169 | async worker( 170 | workflow: string | Workflow, 171 | opts?: Omit | number 172 | ): Promise { 173 | const name = typeof workflow === 'string' ? workflow : workflow.id; 174 | 175 | let options: WorkerOpts = { 176 | name, 177 | }; 178 | 179 | if (typeof opts === 'number') { 180 | this.logger.warn( 181 | '@deprecated maxRuns param is deprecated and will be removed in a future release in favor of WorkerOpts' 182 | ); 183 | options = { ...options, maxRuns: opts }; 184 | } else { 185 | options = { ...options, ...opts }; 186 | } 187 | 188 | const worker = new Worker(this, options); 189 | 190 | if (typeof workflow !== 'string') { 191 | await worker.registerWorkflow(workflow); 192 | return worker; 193 | } 194 | 195 | return worker; 196 | } 197 | 198 | webhooks(workflows: Workflow[]) { 199 | const worker = new Worker(this, { 200 | name: 'webhook-worker', 201 | }); 202 | 203 | return worker.getHandler(workflows); 204 | } 205 | } 206 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/hatchet-logger.ts: -------------------------------------------------------------------------------- 1 | import { Logger, LogLevel, LogLevelEnum } from '@util/logger'; 2 | 3 | export const DEFAULT_LOGGER = (context: string, logLevel?: LogLevel) => 4 | new HatchetLogger(context, logLevel); 5 | 6 | export class HatchetLogger implements Logger { 7 | private logLevel: LogLevel; 8 | private context: string; 9 | 10 | constructor(context: string, logLevel: LogLevel = 'INFO') { 11 | this.logLevel = logLevel; 12 | this.context = context; 13 | } 14 | 15 | private log(level: LogLevel, message: string, color?: string): void { 16 | if (LogLevelEnum[level] >= LogLevelEnum[this.logLevel]) { 17 | const time = new Date().toLocaleString('en-US', { 18 | month: '2-digit', 19 | day: '2-digit', 20 | year: '2-digit', 21 | hour: '2-digit', 22 | minute: '2-digit', 23 | second: '2-digit', 24 | }); 25 | // eslint-disable-next-line no-console 26 | console.log( 27 | `🪓 ${process.pid} | ${time} ${color && `\x1b[${color}m`} [${level}/${this.context}] ${message}\x1b[0m` 28 | ); 29 | } 30 | } 31 | 32 | debug(message: string): void { 33 | this.log('DEBUG', message, '35'); 34 | } 35 | 36 | info(message: string): void { 37 | this.log('INFO', message); 38 | } 39 | 40 | green(message: string): void { 41 | this.log('INFO', message, '32'); 42 | } 43 | 44 | warn(message: string, error?: Error): void { 45 | this.log('WARN', `${message} ${error}`, '93'); 46 | } 47 | 48 | error(message: string, error?: Error): void { 49 | this.log('ERROR', `${message} ${error}`, '91'); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/clients/hatchet-client/index.ts: -------------------------------------------------------------------------------- 1 | export * from './hatchet-client'; 2 | export * from './client-config'; 3 | export { HatchetLogger } from '@clients/hatchet-client/hatchet-logger'; 4 | -------------------------------------------------------------------------------- /src/clients/listener/child-listener-client.ts: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line max-classes-per-file 2 | import { EventEmitter, on } from 'events'; 3 | import { 4 | WorkflowRunEvent, 5 | SubscribeToWorkflowRunsRequest, 6 | WorkflowRunEventType, 7 | } from '@hatchet/protoc/dispatcher'; 8 | import { isAbortError } from 'abort-controller-x'; 9 | import sleep from '@hatchet/util/sleep'; 10 | import { ListenerClient } from './listener-client'; 11 | 12 | export class Streamable { 13 | listener: AsyncIterable; 14 | id: string; 15 | 16 | responseEmitter = new EventEmitter(); 17 | 18 | constructor(listener: AsyncIterable, id: string) { 19 | this.listener = listener; 20 | this.id = id; 21 | } 22 | 23 | async *stream(): AsyncGenerator { 24 | while (true) { 25 | const req: WorkflowRunEvent = await new Promise((resolve) => { 26 | this.responseEmitter.once('response', resolve); 27 | }); 28 | yield req; 29 | } 30 | } 31 | } 32 | 33 | export class GrpcPooledListener { 34 | listener: AsyncIterable | undefined; 35 | requestEmitter = new EventEmitter(); 36 | signal: AbortController = new AbortController(); 37 | client: ListenerClient; 38 | 39 | subscribers: Record = {}; 40 | onFinish: () => void = () => {}; 41 | 42 | constructor(client: ListenerClient, onFinish: () => void) { 43 | this.client = client; 44 | this.init(); 45 | this.onFinish = onFinish; 46 | } 47 | 48 | private async init(retries = 0) { 49 | let retryCount = retries; 50 | const MAX_RETRY_INTERVAL = 5000; // 5 seconds in milliseconds 51 | const BASE_RETRY_INTERVAL = 100; // 0.1 seconds in milliseconds 52 | 53 | if (retries > 0) { 54 | const backoffTime = Math.min(BASE_RETRY_INTERVAL * 2 ** (retries - 1), MAX_RETRY_INTERVAL); 55 | this.client.logger.info(`Retrying in ... ${backoffTime / 1000} seconds`); 56 | await sleep(backoffTime); 57 | } 58 | 59 | try { 60 | this.client.logger.debug('Initializing child-listener'); 61 | 62 | this.signal = new AbortController(); 63 | this.listener = this.client.client.subscribeToWorkflowRuns(this.request(), { 64 | signal: this.signal.signal, 65 | }); 66 | 67 | if (retries > 0) setTimeout(() => this.replayRequests(), 100); 68 | 69 | for await (const event of this.listener) { 70 | retryCount = 0; 71 | 72 | const emitter = this.subscribers[event.workflowRunId]; 73 | if (emitter) { 74 | emitter.responseEmitter.emit('response', event); 75 | if (event.eventType === WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_FINISHED) { 76 | delete this.subscribers[event.workflowRunId]; 77 | } 78 | } 79 | } 80 | 81 | this.client.logger.debug('Child listener finished'); 82 | } catch (e: any) { 83 | if (isAbortError(e)) { 84 | this.client.logger.debug('Child Listener aborted'); 85 | return; 86 | } 87 | this.client.logger.error(`Error in child-listener: ${e.message}`); 88 | } finally { 89 | // it is possible the server hangs up early, 90 | // restart the listener if we still have subscribers 91 | this.client.logger.debug( 92 | `Child listener loop exited with ${Object.keys(this.subscribers).length} subscribers` 93 | ); 94 | this.client.logger.debug(`Restarting child listener retry ${retryCount + 1}`); 95 | this.init(retryCount + 1); 96 | } 97 | } 98 | 99 | subscribe(request: SubscribeToWorkflowRunsRequest) { 100 | if (!this.listener) throw new Error('listener not initialized'); 101 | 102 | this.subscribers[request.workflowRunId] = new Streamable(this.listener, request.workflowRunId); 103 | this.requestEmitter.emit('subscribe', request); 104 | return this.subscribers[request.workflowRunId]; 105 | } 106 | 107 | replayRequests() { 108 | const subs = Object.values(this.subscribers); 109 | this.client.logger.debug(`Replaying ${subs.length} requests...`); 110 | 111 | for (const subscriber of subs) { 112 | this.requestEmitter.emit('subscribe', { workflowRunId: subscriber.id }); 113 | } 114 | } 115 | 116 | private async *request(): AsyncIterable { 117 | for await (const e of on(this.requestEmitter, 'subscribe')) { 118 | yield e[0]; 119 | } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/clients/listener/listener-client.ts: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line max-classes-per-file 2 | import { Channel, ClientFactory, Status } from 'nice-grpc'; 3 | import { EventEmitter, on } from 'events'; 4 | import { 5 | DispatcherClient as PbDispatcherClient, 6 | DispatcherDefinition, 7 | ResourceEventType, 8 | ResourceType, 9 | DispatcherClient, 10 | WorkflowEvent, 11 | } from '@hatchet/protoc/dispatcher'; 12 | import { ClientConfig } from '@clients/hatchet-client/client-config'; 13 | import HatchetError from '@util/errors/hatchet-error'; 14 | import { Logger } from '@hatchet/util/logger'; 15 | import sleep from '@hatchet/util/sleep'; 16 | import { Api } from '../rest'; 17 | import { WorkflowRunStatus } from '../rest/generated/data-contracts'; 18 | import { GrpcPooledListener } from './child-listener-client'; 19 | 20 | const DEFAULT_EVENT_LISTENER_RETRY_INTERVAL = 5; // seconds 21 | const DEFAULT_EVENT_LISTENER_RETRY_COUNT = 5; 22 | const DEFAULT_EVENT_LISTENER_POLL_INTERVAL = 5000; // milliseconds 23 | 24 | // eslint-disable-next-line no-shadow 25 | export enum RunEventType { 26 | STEP_RUN_EVENT_TYPE_STARTED = 'STEP_RUN_EVENT_TYPE_STARTED', 27 | STEP_RUN_EVENT_TYPE_COMPLETED = 'STEP_RUN_EVENT_TYPE_COMPLETED', 28 | STEP_RUN_EVENT_TYPE_FAILED = 'STEP_RUN_EVENT_TYPE_FAILED', 29 | STEP_RUN_EVENT_TYPE_CANCELLED = 'STEP_RUN_EVENT_TYPE_CANCELLED', 30 | STEP_RUN_EVENT_TYPE_TIMED_OUT = 'STEP_RUN_EVENT_TYPE_TIMED_OUT', 31 | STEP_RUN_EVENT_TYPE_STREAM = 'STEP_RUN_EVENT_TYPE_STREAM', 32 | WORKFLOW_RUN_EVENT_TYPE_STARTED = 'WORKFLOW_RUN_EVENT_TYPE_STARTED', 33 | WORKFLOW_RUN_EVENT_TYPE_COMPLETED = 'WORKFLOW_RUN_EVENT_TYPE_COMPLETED', 34 | WORKFLOW_RUN_EVENT_TYPE_FAILED = 'WORKFLOW_RUN_EVENT_TYPE_FAILED', 35 | WORKFLOW_RUN_EVENT_TYPE_CANCELLED = 'WORKFLOW_RUN_EVENT_TYPE_CANCELLED', 36 | WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT = 'WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT', 37 | } 38 | 39 | const stepEventTypeMap: Record = { 40 | [ResourceEventType.RESOURCE_EVENT_TYPE_STARTED]: RunEventType.STEP_RUN_EVENT_TYPE_STARTED, 41 | [ResourceEventType.RESOURCE_EVENT_TYPE_COMPLETED]: RunEventType.STEP_RUN_EVENT_TYPE_COMPLETED, 42 | [ResourceEventType.RESOURCE_EVENT_TYPE_FAILED]: RunEventType.STEP_RUN_EVENT_TYPE_FAILED, 43 | [ResourceEventType.RESOURCE_EVENT_TYPE_CANCELLED]: RunEventType.STEP_RUN_EVENT_TYPE_CANCELLED, 44 | [ResourceEventType.RESOURCE_EVENT_TYPE_TIMED_OUT]: RunEventType.STEP_RUN_EVENT_TYPE_TIMED_OUT, 45 | [ResourceEventType.RESOURCE_EVENT_TYPE_STREAM]: RunEventType.STEP_RUN_EVENT_TYPE_STREAM, 46 | [ResourceEventType.RESOURCE_EVENT_TYPE_UNKNOWN]: undefined, 47 | [ResourceEventType.UNRECOGNIZED]: undefined, 48 | }; 49 | 50 | const workflowEventTypeMap: Record = { 51 | [ResourceEventType.RESOURCE_EVENT_TYPE_STARTED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_STARTED, 52 | [ResourceEventType.RESOURCE_EVENT_TYPE_COMPLETED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_COMPLETED, 53 | [ResourceEventType.RESOURCE_EVENT_TYPE_FAILED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_FAILED, 54 | [ResourceEventType.RESOURCE_EVENT_TYPE_CANCELLED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_CANCELLED, 55 | [ResourceEventType.RESOURCE_EVENT_TYPE_TIMED_OUT]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT, 56 | [ResourceEventType.RESOURCE_EVENT_TYPE_UNKNOWN]: undefined, 57 | [ResourceEventType.RESOURCE_EVENT_TYPE_STREAM]: undefined, 58 | [ResourceEventType.UNRECOGNIZED]: undefined, 59 | }; 60 | 61 | const resourceTypeMap: Record< 62 | ResourceType, 63 | Record | undefined 64 | > = { 65 | [ResourceType.RESOURCE_TYPE_STEP_RUN]: stepEventTypeMap, 66 | [ResourceType.RESOURCE_TYPE_WORKFLOW_RUN]: workflowEventTypeMap, 67 | [ResourceType.RESOURCE_TYPE_UNKNOWN]: undefined, 68 | [ResourceType.UNRECOGNIZED]: undefined, 69 | }; 70 | 71 | const workflowStatusMap: Record = { 72 | [WorkflowRunStatus.SUCCEEDED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_COMPLETED, 73 | [WorkflowRunStatus.FAILED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_FAILED, 74 | [WorkflowRunStatus.CANCELLED]: RunEventType.WORKFLOW_RUN_EVENT_TYPE_CANCELLED, 75 | [WorkflowRunStatus.PENDING]: undefined, 76 | [WorkflowRunStatus.RUNNING]: undefined, 77 | [WorkflowRunStatus.QUEUED]: undefined, 78 | }; 79 | 80 | export interface StepRunEvent { 81 | type: RunEventType; 82 | payload: string; 83 | resourceId: string; 84 | workflowRunId: string; 85 | } 86 | 87 | export class RunEventListener { 88 | client: DispatcherClient; 89 | 90 | q: Array = []; 91 | eventEmitter = new EventEmitter(); 92 | 93 | pollInterval: any; 94 | 95 | constructor(client: DispatcherClient) { 96 | this.client = client; 97 | } 98 | 99 | static forRunId(workflowRunId: string, client: DispatcherClient): RunEventListener { 100 | const listener = new RunEventListener(client); 101 | listener.listenForRunId(workflowRunId); 102 | return listener; 103 | } 104 | 105 | static forAdditionalMeta(key: string, value: string, client: DispatcherClient): RunEventListener { 106 | const listener = new RunEventListener(client); 107 | listener.listenForAdditionalMeta(key, value); 108 | return listener; 109 | } 110 | 111 | emit(event: StepRunEvent) { 112 | this.q.push(event); 113 | this.eventEmitter.emit('event'); 114 | } 115 | 116 | async listenForRunId(workflowRunId: string) { 117 | const listenerFactory = () => 118 | this.client.subscribeToWorkflowEvents({ 119 | workflowRunId, 120 | }); 121 | 122 | return this.listenLoop(listenerFactory); 123 | } 124 | 125 | async listenForAdditionalMeta(key: string, value: string) { 126 | const listenerFactory = () => 127 | this.client.subscribeToWorkflowEvents({ 128 | additionalMetaKey: key, 129 | additionalMetaValue: value, 130 | }); 131 | 132 | return this.listenLoop(listenerFactory); 133 | } 134 | 135 | async listenLoop(listenerFactory: () => AsyncIterable) { 136 | let listener = listenerFactory(); 137 | 138 | try { 139 | for await (const workflowEvent of listener) { 140 | const eventType = resourceTypeMap[workflowEvent.resourceType]?.[workflowEvent.eventType]; 141 | if (eventType) { 142 | this.emit({ 143 | type: eventType, 144 | payload: workflowEvent.eventPayload, 145 | resourceId: workflowEvent.resourceId, 146 | workflowRunId: workflowEvent.workflowRunId, 147 | }); 148 | } 149 | } 150 | } catch (e: any) { 151 | if (e.code === Status.CANCELLED) { 152 | return; 153 | } 154 | if (e.code === Status.UNAVAILABLE) { 155 | listener = await this.retrySubscribe(listenerFactory); 156 | } 157 | } 158 | } 159 | 160 | async retrySubscribe(listenerFactory: () => AsyncIterable) { 161 | let retries = 0; 162 | 163 | while (retries < DEFAULT_EVENT_LISTENER_RETRY_COUNT) { 164 | try { 165 | await sleep(DEFAULT_EVENT_LISTENER_RETRY_INTERVAL); 166 | return listenerFactory(); 167 | } catch (e: any) { 168 | retries += 1; 169 | } 170 | } 171 | 172 | throw new HatchetError( 173 | `Could not subscribe to the worker after ${DEFAULT_EVENT_LISTENER_RETRY_COUNT} retries` 174 | ); 175 | } 176 | 177 | async *stream(): AsyncGenerator { 178 | for await (const _ of on(this.eventEmitter, 'event')) { 179 | while (this.q.length > 0) { 180 | const r = this.q.shift(); 181 | if (r) { 182 | yield r; 183 | } 184 | } 185 | } 186 | } 187 | } 188 | 189 | export class ListenerClient { 190 | config: ClientConfig; 191 | client: PbDispatcherClient; 192 | logger: Logger; 193 | api: Api; 194 | 195 | pooledListener: GrpcPooledListener | undefined; 196 | 197 | constructor(config: ClientConfig, channel: Channel, factory: ClientFactory, api: Api) { 198 | this.config = config; 199 | this.client = factory.create(DispatcherDefinition, channel); 200 | this.logger = config.logger(`Listener`, config.log_level); 201 | this.api = api; 202 | } 203 | 204 | get(workflowRunId: string) { 205 | if (!this.pooledListener) { 206 | this.pooledListener = new GrpcPooledListener(this, () => { 207 | this.pooledListener = undefined; 208 | }); 209 | } 210 | 211 | return this.pooledListener.subscribe({ 212 | workflowRunId, 213 | }); 214 | } 215 | 216 | async stream(workflowRunId: string): Promise> { 217 | const listener = RunEventListener.forRunId(workflowRunId, this.client); 218 | return listener.stream(); 219 | } 220 | 221 | async streamByRunId(workflowRunId: string): Promise> { 222 | return this.stream(workflowRunId); 223 | } 224 | 225 | async streamByAdditionalMeta( 226 | key: string, 227 | value: string 228 | ): Promise> { 229 | const listener = RunEventListener.forAdditionalMeta(key, value, this.client); 230 | return listener.stream(); 231 | } 232 | } 233 | -------------------------------------------------------------------------------- /src/clients/rest/api.ts: -------------------------------------------------------------------------------- 1 | import qs from 'qs'; 2 | import { AxiosRequestConfig } from 'axios'; 3 | import { Api } from './generated/Api'; 4 | 5 | const api = (serverUrl: string, token: string, axiosOpts?: AxiosRequestConfig) => { 6 | return new Api({ 7 | baseURL: serverUrl, 8 | headers: { 9 | Authorization: `Bearer ${token}`, 10 | }, 11 | paramsSerializer: (params) => qs.stringify(params, { arrayFormat: 'repeat' }), 12 | ...axiosOpts, 13 | }); 14 | }; 15 | 16 | export default api; 17 | -------------------------------------------------------------------------------- /src/clients/rest/generated/http-client.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /* tslint:disable */ 3 | /* 4 | * --------------------------------------------------------------- 5 | * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## 6 | * ## ## 7 | * ## AUTHOR: acacode ## 8 | * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## 9 | * --------------------------------------------------------------- 10 | */ 11 | 12 | import type { 13 | AxiosInstance, 14 | AxiosRequestConfig, 15 | AxiosResponse, 16 | HeadersDefaults, 17 | ResponseType, 18 | } from 'axios'; 19 | import axios from 'axios'; 20 | 21 | export type QueryParamsType = Record; 22 | 23 | export interface FullRequestParams 24 | extends Omit { 25 | /** set parameter to `true` for call `securityWorker` for this request */ 26 | secure?: boolean; 27 | /** request path */ 28 | path: string; 29 | /** content type of request body */ 30 | type?: ContentType; 31 | /** query params */ 32 | query?: QueryParamsType; 33 | /** format of response (i.e. response.json() -> format: "json") */ 34 | format?: ResponseType; 35 | /** request body */ 36 | body?: unknown; 37 | } 38 | 39 | export type RequestParams = Omit; 40 | 41 | export interface ApiConfig 42 | extends Omit { 43 | securityWorker?: ( 44 | securityData: SecurityDataType | null 45 | ) => Promise | AxiosRequestConfig | void; 46 | secure?: boolean; 47 | format?: ResponseType; 48 | } 49 | 50 | export enum ContentType { 51 | Json = 'application/json', 52 | FormData = 'multipart/form-data', 53 | UrlEncoded = 'application/x-www-form-urlencoded', 54 | Text = 'text/plain', 55 | } 56 | 57 | export class HttpClient { 58 | public instance: AxiosInstance; 59 | private securityData: SecurityDataType | null = null; 60 | private securityWorker?: ApiConfig['securityWorker']; 61 | private secure?: boolean; 62 | private format?: ResponseType; 63 | 64 | constructor({ 65 | securityWorker, 66 | secure, 67 | format, 68 | ...axiosConfig 69 | }: ApiConfig = {}) { 70 | this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || '' }); 71 | this.secure = secure; 72 | this.format = format; 73 | this.securityWorker = securityWorker; 74 | } 75 | 76 | public setSecurityData = (data: SecurityDataType | null) => { 77 | this.securityData = data; 78 | }; 79 | 80 | protected mergeRequestParams( 81 | params1: AxiosRequestConfig, 82 | params2?: AxiosRequestConfig 83 | ): AxiosRequestConfig { 84 | const method = params1.method || (params2 && params2.method); 85 | 86 | return { 87 | ...this.instance.defaults, 88 | ...params1, 89 | ...(params2 || {}), 90 | headers: { 91 | ...((method && 92 | this.instance.defaults.headers[method.toLowerCase() as keyof HeadersDefaults]) || 93 | {}), 94 | ...(params1.headers || {}), 95 | ...((params2 && params2.headers) || {}), 96 | }, 97 | }; 98 | } 99 | 100 | protected stringifyFormItem(formItem: unknown) { 101 | if (typeof formItem === 'object' && formItem !== null) { 102 | return JSON.stringify(formItem); 103 | } else { 104 | return `${formItem}`; 105 | } 106 | } 107 | 108 | protected createFormData(input: Record): FormData { 109 | if (input instanceof FormData) { 110 | return input; 111 | } 112 | return Object.keys(input || {}).reduce((formData, key) => { 113 | const property = input[key]; 114 | const propertyContent: any[] = property instanceof Array ? property : [property]; 115 | 116 | for (const formItem of propertyContent) { 117 | const isFileType = formItem instanceof Blob || formItem instanceof File; 118 | formData.append(key, isFileType ? formItem : this.stringifyFormItem(formItem)); 119 | } 120 | 121 | return formData; 122 | }, new FormData()); 123 | } 124 | 125 | public request = async ({ 126 | secure, 127 | path, 128 | type, 129 | query, 130 | format, 131 | body, 132 | ...params 133 | }: FullRequestParams): Promise> => { 134 | const secureParams = 135 | ((typeof secure === 'boolean' ? secure : this.secure) && 136 | this.securityWorker && 137 | (await this.securityWorker(this.securityData))) || 138 | {}; 139 | const requestParams = this.mergeRequestParams(params, secureParams); 140 | const responseFormat = format || this.format || undefined; 141 | 142 | if (type === ContentType.FormData && body && body !== null && typeof body === 'object') { 143 | body = this.createFormData(body as Record); 144 | } 145 | 146 | if (type === ContentType.Text && body && body !== null && typeof body !== 'string') { 147 | body = JSON.stringify(body); 148 | } 149 | 150 | return this.instance.request({ 151 | ...requestParams, 152 | headers: { 153 | ...(requestParams.headers || {}), 154 | ...(type ? { 'Content-Type': type } : {}), 155 | }, 156 | params: query, 157 | responseType: responseFormat, 158 | data: body, 159 | url: path, 160 | }); 161 | }; 162 | } 163 | -------------------------------------------------------------------------------- /src/clients/rest/index.ts: -------------------------------------------------------------------------------- 1 | import api from './api'; 2 | import { Api } from './generated/Api'; 3 | 4 | import { Worker as _Worker, Workflow as _Workflow } from './generated/data-contracts'; 5 | import * as APIContracts from './generated/data-contracts'; 6 | 7 | // Then, re-export them as needed 8 | type ApiWorker = _Worker; 9 | type ApiWorkflow = _Workflow; 10 | 11 | // Export everything by default 12 | export { ApiWorker, ApiWorkflow, APIContracts, Api }; 13 | 14 | export default api; 15 | -------------------------------------------------------------------------------- /src/clients/worker/handler.ts: -------------------------------------------------------------------------------- 1 | import HatchetError from '@util/errors/hatchet-error'; 2 | import { createHmac } from 'crypto'; 3 | import { IncomingMessage, ServerResponse } from 'http'; 4 | import { Workflow } from '@hatchet/workflow'; 5 | import { Worker } from './worker'; 6 | 7 | export interface HandlerOpts { 8 | secret: string; 9 | } 10 | 11 | const okMessage = 'The Hatchet webhooks endpoint is up and running!'; 12 | 13 | export class WebhookHandler { 14 | // eslint-disable-next-line no-useless-constructor 15 | constructor( 16 | private worker: Worker, 17 | private workflows: Workflow[] 18 | // eslint-disable-next-line no-empty-function 19 | ) {} 20 | 21 | /** 22 | * Handles a request with a provided body, secret, and signature. 23 | * 24 | * @param {string | undefined} body - The body of the request. 25 | * @param {string | undefined} secret - The secret used for signature verification. 26 | * @param {string | string[] | undefined | null} signature - The signature of the request. 27 | * 28 | * @throws {HatchetError} - If no signature is provided or the signature is not a string. 29 | * @throws {HatchetError} - If no secret is provided. 30 | * @throws {HatchetError} - If no body is provided. 31 | */ 32 | async handle( 33 | body: string | undefined, 34 | signature: string | string[] | undefined | null, 35 | secret: string | undefined 36 | ) { 37 | this.checkSignature(body, signature, secret); 38 | 39 | const action = JSON.parse(body!); 40 | 41 | await this.worker.handleAction(action); 42 | } 43 | 44 | private checkSignature( 45 | body: string | undefined, 46 | signature: string | string[] | undefined | null, 47 | secret: string | undefined 48 | ) { 49 | if (!signature || typeof signature !== 'string') { 50 | throw new HatchetError('No signature provided'); 51 | } 52 | if (!secret) { 53 | throw new HatchetError('No secret provided'); 54 | } 55 | if (!body) { 56 | throw new HatchetError('No body provided'); 57 | } 58 | 59 | // verify hmac signature 60 | const actualSignature = createHmac('sha256', secret).update(body).digest('hex'); 61 | if (actualSignature !== signature) { 62 | throw new HatchetError(`Invalid signature, expected ${actualSignature}, got ${signature}`); 63 | } 64 | } 65 | 66 | private async getHealthcheckResponse( 67 | body: string | undefined, 68 | signature: string | string[] | undefined | null, 69 | secret: string | undefined 70 | ) { 71 | this.checkSignature(body, signature, secret); 72 | 73 | for (const workflow of this.workflows) { 74 | await this.worker.registerWorkflow(workflow); 75 | } 76 | 77 | return { 78 | actions: Object.keys(this.worker.action_registry), 79 | }; 80 | } 81 | 82 | /** 83 | * Express Handler 84 | * 85 | * This method is an asynchronous function that returns an Express middleware handler. 86 | * The handler function is responsible for handling incoming requests and invoking the 87 | * corresponding logic based on the provided secret. 88 | */ 89 | expressHandler({ secret }: HandlerOpts) { 90 | return (req: any, res: any) => { 91 | if (req.method === 'GET') { 92 | res.status(200).send(okMessage); 93 | return; 94 | } 95 | 96 | if (req.method === 'PUT') { 97 | let { body } = req; 98 | 99 | if (typeof body !== 'string') { 100 | body = JSON.stringify(body); 101 | } 102 | 103 | this.getHealthcheckResponse(body, req.headers['x-hatchet-signature'], secret) 104 | .then((resp) => { 105 | res.status(200).json(resp); 106 | }) 107 | .catch((err) => { 108 | res.status(500); 109 | this.worker.logger.error(`Error handling request: ${err.message}`); 110 | }); 111 | return; 112 | } 113 | 114 | if (req.method !== 'POST') { 115 | res.status(405).json({ error: 'Method not allowed' }); 116 | return; 117 | } 118 | 119 | let action = req.body; 120 | 121 | if (typeof action !== 'string') { 122 | action = JSON.stringify(action); 123 | } 124 | 125 | this.handle(action, req.headers['x-hatchet-signature'], secret) 126 | .then(() => { 127 | res.status(200); 128 | }) 129 | .catch((err) => { 130 | res.status(500); 131 | this.worker.logger.error(`Error handling request: ${err.message}`); 132 | }); 133 | }; 134 | } 135 | 136 | /** 137 | * A method that returns an HTTP request handler. 138 | */ 139 | httpHandler({ secret }: HandlerOpts) { 140 | return (req: IncomingMessage, res: ServerResponse) => { 141 | const handle = async () => { 142 | if (req.method === 'GET') { 143 | res.writeHead(200, { 'Content-Type': 'application/json' }); 144 | res.write(okMessage); 145 | res.end(); 146 | return; 147 | } 148 | 149 | const body = await this.getBody(req); 150 | 151 | if (req.method === 'PUT') { 152 | const resp = await this.getHealthcheckResponse( 153 | body, 154 | req.headers['x-hatchet-signature'], 155 | secret 156 | ); 157 | res.writeHead(200, { 'Content-Type': 'application/json' }); 158 | res.write(JSON.stringify(resp)); 159 | res.end(); 160 | return; 161 | } 162 | 163 | if (req.method !== 'POST') { 164 | res.writeHead(405, { 'Content-Type': 'application/json' }); 165 | res.write(JSON.stringify({ error: 'Method not allowed' })); 166 | res.end(); 167 | return; 168 | } 169 | 170 | await this.handle(body, req.headers['x-hatchet-signature'], secret); 171 | 172 | res.writeHead(200, 'OK'); 173 | res.end(); 174 | }; 175 | 176 | handle().catch((e) => { 177 | this.worker.logger.error(`Error handling request: ${e.message}`); 178 | res.writeHead(500, 'Internal server error'); 179 | res.end(); 180 | }); 181 | }; 182 | } 183 | 184 | /** 185 | * A method that returns a Next.js pages router request handler. 186 | */ 187 | nextJSPagesHandler({ secret }: HandlerOpts) { 188 | return async (req: any, res: any) => { 189 | if (req.method === 'GET') { 190 | return res.status(200).send(okMessage); 191 | } 192 | const sig = req.headers['x-hatchet-signature']; 193 | const body = JSON.stringify(req.body); 194 | if (req.method === 'PUT') { 195 | const resp = await this.getHealthcheckResponse(body, sig, secret); 196 | return res.status(200).send(JSON.stringify(resp)); 197 | } 198 | if (req.method !== 'POST') { 199 | return res.status(405).send('Method not allowed'); 200 | } 201 | await this.handle(body, sig, secret); 202 | return res.status(200).send('ok'); 203 | }; 204 | } 205 | 206 | /** 207 | * A method that returns a Next.js request handler. 208 | */ 209 | nextJSHandler({ secret }: HandlerOpts) { 210 | const ok = async () => { 211 | return new Response(okMessage, { status: 200 }); 212 | }; 213 | const f = async (req: Request) => { 214 | const sig = req.headers.get('x-hatchet-signature'); 215 | const body = await req.text(); 216 | if (req.method === 'PUT') { 217 | const resp = await this.getHealthcheckResponse(body, sig, secret); 218 | return new Response(JSON.stringify(resp), { status: 200 }); 219 | } 220 | if (req.method !== 'POST') { 221 | return new Response('Method not allowed', { status: 405 }); 222 | } 223 | await this.handle(body, sig, secret); 224 | return new Response('ok', { status: 200 }); 225 | }; 226 | return { 227 | GET: ok, 228 | POST: f, 229 | PUT: f, 230 | }; 231 | } 232 | 233 | private getBody(req: IncomingMessage): Promise { 234 | return new Promise((resolve) => { 235 | let body = ''; 236 | req.on('data', (chunk) => { 237 | body += chunk; 238 | }); 239 | req.on('end', () => { 240 | resolve(body); 241 | }); 242 | }); 243 | } 244 | } 245 | -------------------------------------------------------------------------------- /src/clients/worker/index.ts: -------------------------------------------------------------------------------- 1 | export * from './worker'; 2 | -------------------------------------------------------------------------------- /src/examples/affinity-workers.ts: -------------------------------------------------------------------------------- 1 | import { WorkerLabelComparator } from '@hatchet/protoc/workflows'; 2 | import Hatchet from '../sdk'; 3 | import { Workflow } from '../workflow'; 4 | 5 | const hatchet = Hatchet.init(); 6 | 7 | const workflow: Workflow = { 8 | id: 'affinity-workflow', 9 | description: 'test', 10 | steps: [ 11 | { 12 | name: 'step1', 13 | run: async (ctx) => { 14 | const results: Promise[] = []; 15 | // eslint-disable-next-line no-plusplus 16 | for (let i = 0; i < 50; i++) { 17 | const result = await ctx.spawnWorkflow(childWorkflow.id, {}); 18 | results.push(result.result()); 19 | } 20 | console.log('Spawned 50 child workflows'); 21 | console.log('Results:', await Promise.all(results)); 22 | 23 | return { step1: 'step1 results!' }; 24 | }, 25 | }, 26 | ], 27 | }; 28 | 29 | const childWorkflow: Workflow = { 30 | id: 'child-affinity-workflow', 31 | description: 'test', 32 | steps: [ 33 | { 34 | name: 'child-step1', 35 | worker_labels: { 36 | model: { 37 | value: 'xyz', 38 | required: true, 39 | }, 40 | }, 41 | run: async (ctx) => { 42 | console.log('starting child-step1 with the following input', ctx.workflowInput()); 43 | return { childStep1: 'childStep1 results!' }; 44 | }, 45 | }, 46 | { 47 | name: 'child-step2', 48 | worker_labels: { 49 | memory: { 50 | value: 512, 51 | required: true, 52 | comparator: WorkerLabelComparator.LESS_THAN, 53 | }, 54 | }, 55 | run: async (ctx) => { 56 | console.log('starting child-step2 with the following input', ctx.workflowInput()); 57 | return { childStep2: 'childStep2 results!' }; 58 | }, 59 | }, 60 | ], 61 | }; 62 | 63 | async function main() { 64 | const worker1 = await hatchet.worker('affinity-worker-1', { 65 | labels: { 66 | model: 'abc', 67 | memory: 1024, 68 | }, 69 | }); 70 | await worker1.registerWorkflow(workflow); 71 | await worker1.registerWorkflow(childWorkflow); 72 | worker1.start(); 73 | 74 | const worker2 = await hatchet.worker('affinity-worker-2', { 75 | labels: { 76 | model: 'xyz', 77 | memory: 512, 78 | }, 79 | }); 80 | await worker2.registerWorkflow(workflow); 81 | await worker2.registerWorkflow(childWorkflow); 82 | worker2.start(); 83 | } 84 | 85 | main(); 86 | -------------------------------------------------------------------------------- /src/examples/api.ts: -------------------------------------------------------------------------------- 1 | import Hatchet, { Context } from '..'; 2 | import { CreateWorkflowVersionOpts } from '../protoc/workflows'; 3 | 4 | type CustomUserData = { 5 | example: string; 6 | }; 7 | 8 | const opts: CreateWorkflowVersionOpts = { 9 | name: 'api-workflow', 10 | description: 'My workflow', 11 | version: '', 12 | eventTriggers: [], 13 | cronTriggers: [], 14 | scheduledTriggers: [], 15 | concurrency: undefined, 16 | jobs: [ 17 | { 18 | name: 'my-job', 19 | description: 'Job description', 20 | steps: [ 21 | { 22 | retries: 0, 23 | readableId: 'custom-step', 24 | action: `slack:example`, 25 | timeout: '60s', 26 | inputs: '{}', 27 | parents: [], 28 | workerLabels: {}, 29 | userData: `{ 30 | "example": "value" 31 | }`, 32 | rateLimits: [], 33 | }, 34 | ], 35 | }, 36 | ], 37 | }; 38 | 39 | type StepOneInput = { 40 | key: string; 41 | }; 42 | 43 | async function main() { 44 | const hatchet = Hatchet.init(); 45 | 46 | const { admin } = hatchet; 47 | 48 | await admin.putWorkflow(opts); 49 | 50 | const worker = await hatchet.worker('example-worker'); 51 | 52 | worker.registerAction('slack:example', async (ctx: Context) => { 53 | const setData = ctx.userData(); 54 | console.log('executed step1!', setData); 55 | return { step1: 'step1' }; 56 | }); 57 | 58 | await hatchet.admin.runWorkflow('api-workflow', {}); 59 | 60 | worker.start(); 61 | } 62 | 63 | main(); 64 | -------------------------------------------------------------------------------- /src/examples/bulk-fanout-trigger.ts: -------------------------------------------------------------------------------- 1 | import { WorkflowRun } from '@hatchet/clients/admin'; 2 | import Hatchet from '../sdk'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | async function main() { 7 | const workflowRuns: WorkflowRun[] = []; 8 | 9 | workflowRuns[0] = { 10 | workflowName: 'bulk-parent-workflow', 11 | input: {}, 12 | options: { 13 | additionalMetadata: { 14 | key: 'value', 15 | }, 16 | }, 17 | }; 18 | 19 | workflowRuns[1] = { 20 | workflowName: 'bulk-parent-workflow', 21 | input: { second: 'second' }, 22 | options: { 23 | additionalMetadata: { 24 | key: 'value', 25 | }, 26 | }, 27 | }; 28 | 29 | try { 30 | const workflowRunResponse = hatchet.admin.runWorkflows(workflowRuns); 31 | 32 | const result = await workflowRunResponse; 33 | 34 | console.log('result', result); 35 | 36 | result.forEach(async (workflowRun) => { 37 | const stream = await workflowRun.stream(); 38 | 39 | for await (const event of stream) { 40 | console.log('event received', event); 41 | } 42 | }); 43 | } catch (error) { 44 | console.log('error', error); 45 | } 46 | } 47 | 48 | main(); 49 | -------------------------------------------------------------------------------- /src/examples/bulk-fanout-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | type Input = { 7 | input: string; 8 | }; 9 | 10 | type Output = { 11 | 'child-work': { 12 | 'child-output': string; 13 | }; 14 | }; 15 | 16 | const parentWorkflow: Workflow = { 17 | id: 'bulk-parent-workflow', 18 | description: 'simple example for spawning child workflows', 19 | on: { 20 | event: 'bulk:fanout:create', 21 | }, 22 | steps: [ 23 | { 24 | name: 'parent-spawn', 25 | timeout: '70s', 26 | run: async (ctx) => { 27 | // Prepare the workflows to spawn 28 | const workflowRequests = Array.from({ length: 300 }, (_, i) => ({ 29 | workflow: 'child-workflow', 30 | input: { input: `child-input-${i}` }, 31 | options: { additionalMetadata: { childKey: 'childValue' } }, 32 | })); 33 | 34 | const spawnedWorkflows = await ctx.spawnWorkflows(workflowRequests); 35 | 36 | const results = await Promise.all( 37 | spawnedWorkflows.map((workflowRef) => 38 | workflowRef.result().then((result) => { 39 | ctx.log('spawned workflow result:'); 40 | return result; 41 | }) 42 | ) 43 | ); 44 | 45 | console.log('spawned workflow results:', results); 46 | console.log('number of spawned workflows:', results.length); 47 | return { spawned: results.length }; 48 | }, 49 | }, 50 | ], 51 | }; 52 | 53 | const childWorkflow: Workflow = { 54 | id: 'child-workflow', 55 | description: 'simple example for spawning child workflows', 56 | on: { 57 | event: 'child:create', 58 | }, 59 | steps: [ 60 | { 61 | name: 'child-work', 62 | run: async (ctx) => { 63 | const { input } = ctx.workflowInput(); 64 | // throw new Error('child error'); 65 | return { 'child-output': 'sm' }; 66 | }, 67 | }, 68 | { 69 | name: 'child-work2', 70 | run: async (ctx) => { 71 | const { input } = ctx.workflowInput(); 72 | // Perform CPU-bound work 73 | // throw new Error('child error'); 74 | console.log('child workflow input:', input); 75 | // Generate a large amount of garbage data 76 | 77 | const garbageData = 'garbage'; // Print a snippet of the garbage data 78 | return { 'child-output': garbageData }; 79 | }, 80 | }, 81 | { 82 | name: 'child-work3', 83 | parents: ['child-work'], 84 | run: async (ctx) => { 85 | const { input } = ctx.workflowInput(); 86 | // throw new Error('child error'); 87 | const garbageData = 'child garbage'; 88 | return { 'child-output': garbageData }; 89 | }, 90 | }, 91 | ], 92 | }; 93 | 94 | async function main() { 95 | const worker = await hatchet.worker('fanout-worker', { maxRuns: 1000 }); 96 | await worker.registerWorkflow(parentWorkflow); 97 | await worker.registerWorkflow(childWorkflow); 98 | worker.start(); 99 | } 100 | 101 | main(); 102 | -------------------------------------------------------------------------------- /src/examples/bulk-trigger.ts: -------------------------------------------------------------------------------- 1 | import { WorkflowRun } from '@hatchet/clients/admin'; 2 | import Hatchet from '../sdk'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | async function main() { 7 | const workflowRuns: WorkflowRun[] = []; 8 | 9 | for (let i = 0; i < 100; i += 1) { 10 | workflowRuns.push({ 11 | workflowName: 'simple-workflow', 12 | input: {}, 13 | options: { 14 | additionalMetadata: { 15 | key: 'value', 16 | dedupe: 'key', 17 | }, 18 | }, 19 | }); 20 | } 21 | 22 | const workflowRunResponse = hatchet.admin.runWorkflows(workflowRuns); 23 | 24 | const result = await workflowRunResponse; 25 | 26 | console.log('result', result); 27 | 28 | result.forEach(async (workflowRun) => { 29 | const stream = await workflowRun.stream(); 30 | 31 | for await (const event of stream) { 32 | console.log('event received', event); 33 | } 34 | }); 35 | } 36 | 37 | main(); 38 | -------------------------------------------------------------------------------- /src/examples/byo-logger.ts: -------------------------------------------------------------------------------- 1 | import { Logger, LogLevel } from '@hatchet/util/logger'; 2 | // eslint-disable-next-line import/no-extraneous-dependencies 3 | import pino from 'pino'; 4 | import Hatchet from '../sdk'; 5 | import { Workflow } from '../workflow'; 6 | 7 | const logger = pino(); 8 | 9 | class PinoLogger implements Logger { 10 | logLevel: LogLevel; 11 | context: string; 12 | 13 | constructor(context: string, logLevel: LogLevel = 'DEBUG') { 14 | this.logLevel = logLevel; 15 | this.context = context; 16 | } 17 | 18 | debug(message: string): void { 19 | logger.debug(message); 20 | } 21 | 22 | info(message: string): void { 23 | logger.info(message); 24 | } 25 | 26 | green(message: string): void { 27 | logger.info(`%c${message}`); 28 | } 29 | 30 | warn(message: string, error?: Error): void { 31 | logger.warn(`${message} ${error}`); 32 | } 33 | 34 | error(message: string, error?: Error): void { 35 | logger.error(`${message} ${error}`); 36 | } 37 | } 38 | 39 | const hatchet = Hatchet.init({ 40 | log_level: 'DEBUG', 41 | logger: (ctx, level) => new PinoLogger(ctx, level), 42 | }); 43 | 44 | const sleep = (ms: number) => 45 | new Promise((resolve) => { 46 | setTimeout(resolve, ms); 47 | }); 48 | 49 | const workflow: Workflow = { 50 | id: 'byo-logger-example', 51 | description: 'An example showing how to pass a custom logger to Hatchet', 52 | on: { 53 | event: 'byo-logger:spawn', 54 | }, 55 | steps: [ 56 | { 57 | name: 'logger-step1', 58 | run: async (ctx) => { 59 | // eslint-disable-next-line no-plusplus 60 | for (let i = 0; i < 5; i++) { 61 | logger.info(`log message ${i}`); 62 | await sleep(500); 63 | } 64 | 65 | return { step1: 'completed step run' }; 66 | }, 67 | }, 68 | ], 69 | }; 70 | 71 | async function main() { 72 | const worker = await hatchet.worker('byo-logger-worker', 1); 73 | await worker.registerWorkflow(workflow); 74 | worker.start(); 75 | } 76 | 77 | main(); 78 | -------------------------------------------------------------------------------- /src/examples/concurrency/cancel-in-progress/concurrency-event.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | const sleep = (ms: number) => 6 | new Promise((resolve) => { 7 | setTimeout(resolve, ms); 8 | }); 9 | 10 | async function main() { 11 | hatchet.event.push('concurrency:create', { 12 | data: 'event 1', 13 | userId: 'user1', 14 | }); 15 | 16 | // step 1 will wait 5000 ms, 17 | // so sending a second event 18 | // before that will cancel 19 | // the first run and run the second event 20 | await sleep(1000); 21 | 22 | hatchet.event.push('concurrency:create', { 23 | data: 'event 2', 24 | userId: 'user1', 25 | }); 26 | } 27 | 28 | main(); 29 | -------------------------------------------------------------------------------- /src/examples/concurrency/cancel-in-progress/concurrency-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../../sdk'; 2 | import { Workflow } from '../../../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const sleep = (ms: number) => 7 | new Promise((resolve) => { 8 | setTimeout(resolve, ms); 9 | }); 10 | 11 | const workflow: Workflow = { 12 | id: 'concurrency-example', 13 | description: 'test', 14 | on: { 15 | event: 'concurrency:create', 16 | }, 17 | concurrency: { 18 | name: 'user-concurrency', 19 | key: (ctx) => ctx.workflowInput().userId, 20 | }, 21 | steps: [ 22 | { 23 | name: 'step1', 24 | run: async (ctx) => { 25 | const { data } = ctx.workflowInput(); 26 | const { signal } = ctx.controller; 27 | 28 | if (signal.aborted) throw new Error('step1 was aborted'); 29 | 30 | console.log('starting step1 and waiting 5 seconds...', data); 31 | await sleep(5000); 32 | 33 | if (signal.aborted) throw new Error('step1 was aborted'); 34 | 35 | // NOTE: the AbortController signal can be passed to many http libraries to cancel active requests 36 | // fetch(url, { signal }) 37 | // axios.get(url, { signal }) 38 | 39 | console.log('executed step1!'); 40 | return { step1: `step1 results for ${data}!` }; 41 | }, 42 | }, 43 | { 44 | name: 'step2', 45 | parents: ['step1'], 46 | run: (ctx) => { 47 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 48 | return { step2: 'step2 results!' }; 49 | }, 50 | }, 51 | ], 52 | }; 53 | 54 | async function main() { 55 | const worker = await hatchet.worker('example-worker'); 56 | await worker.registerWorkflow(workflow); 57 | worker.start(); 58 | } 59 | 60 | main(); 61 | -------------------------------------------------------------------------------- /src/examples/concurrency/group-round-robin/concurrency-event.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | const sleep = (ms: number) => 6 | new Promise((resolve) => { 7 | setTimeout(resolve, ms); 8 | }); 9 | 10 | async function main() { 11 | // eslint-disable-next-line no-plusplus 12 | for (let i = 0; i < 20; i++) { 13 | let group = 0; 14 | 15 | if (i > 10) { 16 | group = 1; 17 | } 18 | 19 | hatchet.event.push('concurrency:create', { 20 | data: `event ${i}`, 21 | group, 22 | }); 23 | } 24 | } 25 | 26 | main(); 27 | -------------------------------------------------------------------------------- /src/examples/concurrency/group-round-robin/concurrency-worker-expression.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../../sdk'; 2 | import { ConcurrencyLimitStrategy, Workflow } from '../../../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const sleep = (ms: number) => 7 | new Promise((resolve) => { 8 | setTimeout(resolve, ms); 9 | }); 10 | 11 | const workflow: Workflow = { 12 | id: 'concurrency-example-rr', 13 | description: 'test', 14 | on: { 15 | event: 'concurrency:create', 16 | }, 17 | concurrency: { 18 | name: 'user-concurrency', 19 | expression: 'input.group', 20 | maxRuns: 2, 21 | limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, 22 | }, 23 | steps: [ 24 | { 25 | name: 'step1', 26 | run: async (ctx) => { 27 | const { data } = ctx.workflowInput(); 28 | const { signal } = ctx.controller; 29 | 30 | if (signal.aborted) throw new Error('step1 was aborted'); 31 | 32 | console.log('starting step1 and waiting 5 seconds...', data); 33 | await sleep(2000); 34 | 35 | if (signal.aborted) throw new Error('step1 was aborted'); 36 | 37 | // NOTE: the AbortController signal can be passed to many http libraries to cancel active requests 38 | // fetch(url, { signal }) 39 | // axios.get(url, { signal }) 40 | 41 | console.log('executed step1!'); 42 | return { step1: `step1 results for ${data}!` }; 43 | }, 44 | }, 45 | ], 46 | }; 47 | 48 | async function main() { 49 | const worker = await hatchet.worker('example-worker'); 50 | await worker.registerWorkflow(workflow); 51 | worker.start(); 52 | } 53 | 54 | main(); 55 | -------------------------------------------------------------------------------- /src/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../../sdk'; 2 | import { ConcurrencyLimitStrategy, Workflow } from '../../../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const sleep = (ms: number) => 7 | new Promise((resolve) => { 8 | setTimeout(resolve, ms); 9 | }); 10 | 11 | const workflow: Workflow = { 12 | id: 'concurrency-example-rr', 13 | description: 'test', 14 | on: { 15 | event: 'concurrency:create', 16 | }, 17 | concurrency: { 18 | name: 'user-concurrency', 19 | // NOTE: it is recommended to use expression unless you specifically need to use a custom key function 20 | key: (ctx) => ctx.workflowInput().group, 21 | maxRuns: 2, 22 | limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN, 23 | }, 24 | steps: [ 25 | { 26 | name: 'step1', 27 | run: async (ctx) => { 28 | const { data } = ctx.workflowInput(); 29 | const { signal } = ctx.controller; 30 | 31 | if (signal.aborted) throw new Error('step1 was aborted'); 32 | 33 | console.log('starting step1 and waiting 5 seconds...', data); 34 | await sleep(2000); 35 | 36 | if (signal.aborted) throw new Error('step1 was aborted'); 37 | 38 | // NOTE: the AbortController signal can be passed to many http libraries to cancel active requests 39 | // fetch(url, { signal }) 40 | // axios.get(url, { signal }) 41 | 42 | console.log('executed step1!'); 43 | return { step1: `step1 results for ${data}!` }; 44 | }, 45 | }, 46 | ], 47 | }; 48 | 49 | async function main() { 50 | const worker = await hatchet.worker('example-worker'); 51 | await worker.registerWorkflow(workflow); 52 | worker.start(); 53 | } 54 | 55 | main(); 56 | -------------------------------------------------------------------------------- /src/examples/crons/cron-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../sdk'; 2 | import { Workflow } from '../../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | // ❓ Workflow Definition Cron Trigger 7 | // Adding a cron trigger to a workflow is as simple as adding a `cron expression` to the `on` prop of the workflow definition 8 | 9 | export const simpleCronWorkflow: Workflow = { 10 | id: 'simple-cron-workflow', 11 | on: { 12 | // 👀 define the cron expression to run every minute 13 | cron: '* * * * *', 14 | }, 15 | // ... normal workflow definition 16 | description: 'return the current time every minute', 17 | steps: [ 18 | { 19 | name: 'what-time-is-it', 20 | run: (ctx) => { 21 | return { time: new Date().toISOString() }; 22 | }, 23 | }, 24 | ], 25 | // , 26 | }; 27 | // ‼️ 28 | 29 | async function main() { 30 | const worker = await hatchet.worker('example-worker'); 31 | await worker.registerWorkflow(simpleCronWorkflow); 32 | worker.start(); 33 | } 34 | 35 | if (require.main === module) { 36 | main(); 37 | } 38 | -------------------------------------------------------------------------------- /src/examples/crons/cron.e2e.ts: -------------------------------------------------------------------------------- 1 | import sleep from '@hatchet/util/sleep'; 2 | import Hatchet, { Worker } from '../..'; 3 | import { simpleCronWorkflow } from './cron-worker'; 4 | 5 | xdescribe('cron-e2e', () => { 6 | fit( 7 | 'should invoke the workflow on the cron schedule', 8 | async () => { 9 | let worker: Worker | undefined; 10 | try { 11 | const hatchet = Hatchet.init(); 12 | worker = await hatchet.worker('example-worker'); 13 | 14 | const startTime = new Date(); 15 | 16 | await worker.registerWorkflow(simpleCronWorkflow); 17 | void worker.start(); 18 | await sleep(60 * 2 + 1000); 19 | 20 | const workflowRuns = await hatchet.api.workflowRunList(hatchet.tenantId, { 21 | createdAfter: startTime.toISOString(), 22 | }); 23 | 24 | expect(workflowRuns.data.rows?.length).toEqual(2); 25 | } finally { 26 | if (worker) { 27 | await worker.stop(); 28 | } 29 | } 30 | }, 31 | 60 * 2 + 2000 32 | ); 33 | }); 34 | -------------------------------------------------------------------------------- /src/examples/crons/programatic-crons.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../sdk'; 2 | import { simpleCronWorkflow } from './cron-worker'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | // This example assumes you have a worker already running 7 | // and registered the cron workflow to it 8 | 9 | async function main() { 10 | // ? Create 11 | // You can create dynamic cron triggers programmatically via the API 12 | const createdCron = await hatchet.cron.create( 13 | simpleCronWorkflow, // workflow object or string workflow id 14 | { 15 | name: 'customer-a-daily-report', // friendly name for the cron trigger 16 | expression: '0 12 * * *', // every day at noon 17 | input: { 18 | name: 'John Doe', 19 | }, 20 | additionalMetadata: { 21 | customerId: '123', 22 | }, 23 | } 24 | ); 25 | const { id } = createdCron.metadata; // id which you can later use to reference the cron trigger 26 | // !! 27 | 28 | // ? Get 29 | // You can get a specific cron trigger by passing in the cron trigger id 30 | const cron = await hatchet.cron.get(id); 31 | // !! 32 | 33 | // ? Delete 34 | // You can delete a cron trigger by passing the cron object 35 | // or a cron Id to the delete method 36 | await hatchet.cron.delete(cron); 37 | // !! 38 | 39 | // ? List 40 | // You can list all cron triggers by passing in a query object 41 | const cronList = await hatchet.cron.list({ 42 | offset: 0, 43 | limit: 10, 44 | }); 45 | // !! 46 | } 47 | 48 | main(); 49 | -------------------------------------------------------------------------------- /src/examples/dag-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init({ 5 | log_level: 'OFF', 6 | }); 7 | 8 | const sleep = (ms: number) => 9 | new Promise((resolve) => { 10 | setTimeout(resolve, ms); 11 | }); 12 | 13 | const workflow: Workflow = { 14 | id: 'dag-example', 15 | description: 'test', 16 | on: { 17 | event: 'user:create', 18 | }, 19 | steps: [ 20 | { 21 | name: 'dag-step1', 22 | run: async (ctx) => { 23 | console.log('executed step1!'); 24 | await sleep(5000); 25 | return { step1: 'step1' }; 26 | }, 27 | }, 28 | { 29 | name: 'dag-step2', 30 | parents: ['dag-step1'], 31 | run: async (ctx) => { 32 | console.log('executed step2!'); 33 | await sleep(5000); 34 | return { step2: 'step2' }; 35 | }, 36 | }, 37 | { 38 | name: 'dag-step3', 39 | parents: ['dag-step1', 'dag-step2'], 40 | run: (ctx) => { 41 | console.log('executed step3!'); 42 | return { step3: 'step3' }; 43 | }, 44 | }, 45 | { 46 | name: 'dag-step4', 47 | parents: ['dag-step1', 'dag-step3'], 48 | run: async (ctx) => { 49 | await sleep(5000); 50 | 51 | // simulate a really slow network call 52 | setTimeout(async () => { 53 | await sleep(1000); 54 | ctx.playground('slow', 'call'); 55 | }, 5000); 56 | 57 | return { step4: 'step4' }; 58 | }, 59 | }, 60 | ], 61 | }; 62 | 63 | async function main() { 64 | const worker = await hatchet.worker('example-worker', 1); 65 | await worker.registerWorkflow(workflow); 66 | worker.start(); 67 | } 68 | 69 | main(); 70 | -------------------------------------------------------------------------------- /src/examples/example-event-with-results.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | async function main() { 4 | const hatchet = Hatchet.init(); 5 | 6 | const ref = await hatchet.admin.runWorkflow('simple-workflow', { 7 | test: 'test', 8 | }); 9 | 10 | const listener = await hatchet.listener.stream(await ref.getWorkflowRunId()); 11 | 12 | console.log('listening for events'); 13 | for await (const event of listener) { 14 | console.log('event received', event); 15 | } 16 | console.log('done listening for events'); 17 | } 18 | 19 | main(); 20 | -------------------------------------------------------------------------------- /src/examples/example-event.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | // Push a single event (example) 6 | hatchet.event.push('user:create', { 7 | test: 'test', 8 | }); 9 | 10 | // Example events to be pushed in bulk 11 | const events = [ 12 | { 13 | payload: { test: 'test1' }, 14 | additionalMetadata: { user_id: 'user1', source: 'test' }, 15 | }, 16 | { 17 | payload: { test: 'test2' }, 18 | additionalMetadata: { user_id: 'user2', source: 'test' }, 19 | }, 20 | { 21 | payload: { test: 'test3' }, 22 | additionalMetadata: { user_id: 'user3', source: 'test' }, 23 | }, 24 | ]; 25 | 26 | // Bulk push the events and compare the keys 27 | hatchet.event 28 | .bulkPush('user:create:bulk', events) 29 | .then((result) => { 30 | const returnedEvents = result.events; 31 | 32 | const keysMatch = returnedEvents.every((returnedEvent) => { 33 | const expectedKey = `user:create:bulk`; 34 | 35 | return returnedEvent.key === expectedKey; 36 | }); 37 | 38 | if (keysMatch) { 39 | // eslint-disable-next-line no-console 40 | console.log('All keys match the original events.'); 41 | } else { 42 | // eslint-disable-next-line no-console 43 | console.log('Mismatch found between original events and returned events.'); 44 | } 45 | }) 46 | .catch((error) => { 47 | // eslint-disable-next-line no-console 48 | console.error('Error during bulk push:', error); 49 | }); 50 | -------------------------------------------------------------------------------- /src/examples/fanout-worker.e2e.ts: -------------------------------------------------------------------------------- 1 | import { Workflow } from '..'; 2 | import sleep from '../util/sleep'; 3 | import Hatchet from '../sdk'; 4 | 5 | xdescribe('fanout-e2e', () => { 6 | it('should pass a fanout workflow', async () => { 7 | let invoked = 0; 8 | const start = new Date(); 9 | const parentWorkflow: Workflow = { 10 | id: 'parent-workflow', 11 | description: 'simple example for spawning child workflows', 12 | steps: [ 13 | { 14 | name: 'parent-spawn', 15 | timeout: '10s', 16 | run: async (ctx) => { 17 | const ref = ctx.spawnWorkflow('child-workflow', { input: 'child-input' }); 18 | 19 | const res = await ref.result(); 20 | console.log('spawned workflow result:', res); 21 | invoked += 1; 22 | return { spawned: [res] }; 23 | }, 24 | }, 25 | ], 26 | }; 27 | const childWorkflow: Workflow = { 28 | id: 'child-workflow', 29 | description: 'simple example for spawning child workflows', 30 | steps: [ 31 | { 32 | name: 'child-work', 33 | run: async (ctx) => { 34 | const { input } = ctx.workflowInput(); 35 | await sleep(1000); 36 | invoked += 1; 37 | console.log('child workflow input:', input); 38 | return { 'child-output': 'results' }; 39 | }, 40 | }, 41 | ], 42 | }; 43 | 44 | const hatchet = Hatchet.init(); 45 | const worker = await hatchet.worker('fanout-worker'); 46 | 47 | console.log('registering workflow...'); 48 | await worker.registerWorkflow(parentWorkflow); 49 | await worker.registerWorkflow(childWorkflow); 50 | 51 | void worker.start(); 52 | 53 | console.log('worker started.'); 54 | 55 | await sleep(5000); 56 | 57 | console.log('running workflow...'); 58 | 59 | await hatchet.admin.runWorkflow('parent-workflow', { input: 'parent-input' }); 60 | 61 | await sleep(10000); 62 | 63 | console.log('invoked', invoked); 64 | 65 | expect(invoked).toEqual(2); 66 | 67 | await worker.stop(); 68 | }, 120000); 69 | }); 70 | -------------------------------------------------------------------------------- /src/examples/fanout-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | type Input = { 7 | input: string; 8 | }; 9 | 10 | type Output = { 11 | 'child-work': { 12 | 'child-output': string; 13 | }; 14 | }; 15 | 16 | const parentWorkflow: Workflow = { 17 | id: 'parent-workflow', 18 | description: 'simple example for spawning child workflows', 19 | on: { 20 | event: 'fanout:create', 21 | }, 22 | steps: [ 23 | { 24 | name: 'parent-spawn', 25 | timeout: '70s', 26 | run: async (ctx) => { 27 | const promises = Array.from({ length: 3 }, (_, i) => 28 | ctx 29 | .spawnWorkflow( 30 | 'child-workflow', 31 | { input: `child-input-${i}` }, 32 | { additionalMetadata: { childKey: 'childValue' } } 33 | ) 34 | .result() 35 | .then((result) => { 36 | ctx.log('spawned workflow result:'); 37 | return result; 38 | }) 39 | ); 40 | 41 | const results = await Promise.all(promises); 42 | console.log('spawned workflow results:', results); 43 | console.log('number of spawned workflows:', results.length); 44 | return { spawned: 'x' }; 45 | }, 46 | }, 47 | ], 48 | }; 49 | 50 | const childWorkflow: Workflow = { 51 | id: 'child-workflow', 52 | description: 'simple example for spawning child workflows', 53 | on: { 54 | event: 'child:create', 55 | }, 56 | steps: [ 57 | { 58 | name: 'child-work', 59 | run: async (ctx) => { 60 | const { input } = ctx.workflowInput(); 61 | // throw new Error('child error'); 62 | return { 'child-output': 'sm' }; 63 | }, 64 | }, 65 | { 66 | name: 'child-work2', 67 | run: async (ctx) => { 68 | const { input } = ctx.workflowInput(); 69 | // Perform CPU-bound work 70 | // throw new Error('child error'); 71 | console.log('child workflow input:', input); 72 | // Generate a large amount of garbage data 73 | 74 | const garbageData = Array.from({ length: 1e6 / 3.5 }, (_, i) => `garbage-${i}`).join(','); 75 | console.log('Generated garbage data:', `${garbageData.slice(0, 100)}...`); // Print a snippet of the garbage data 76 | return { 'child-output': garbageData }; 77 | }, 78 | }, 79 | { 80 | name: 'child-work3', 81 | parents: ['child-work'], 82 | run: async (ctx) => { 83 | const { input } = ctx.workflowInput(); 84 | // throw new Error('child error'); 85 | const garbageData = Array.from({ length: 1e6 / 3.5 }, (_, i) => `garbage-${i}`).join(','); 86 | console.log('Generated garbage data:', `${garbageData.slice(0, 100)}...`); // Print a snippet of the garbage data 87 | return { 'child-output': garbageData }; 88 | }, 89 | }, 90 | ], 91 | }; 92 | 93 | async function main() { 94 | const worker = await hatchet.worker('fanout-worker', { maxRuns: 1000 }); 95 | await worker.registerWorkflow(parentWorkflow); 96 | await worker.registerWorkflow(childWorkflow); 97 | worker.start(); 98 | } 99 | 100 | main(); 101 | -------------------------------------------------------------------------------- /src/examples/logger.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init({ 5 | log_level: 'OFF', 6 | }); 7 | 8 | const sleep = (ms: number) => 9 | new Promise((resolve) => { 10 | setTimeout(resolve, ms); 11 | }); 12 | 13 | const workflow: Workflow = { 14 | id: 'logger-example', 15 | description: 'test', 16 | on: { 17 | event: 'user:create', 18 | }, 19 | steps: [ 20 | { 21 | name: 'logger-step1', 22 | run: async (ctx) => { 23 | // log in a for loop 24 | // eslint-disable-next-line no-plusplus 25 | for (let i = 0; i < 10; i++) { 26 | ctx.log(`log message ${i}`); 27 | await sleep(200); 28 | } 29 | 30 | return { step1: 'completed step run' }; 31 | }, 32 | }, 33 | ], 34 | }; 35 | 36 | async function main() { 37 | const worker = await hatchet.worker('logger-worker', 1); 38 | await worker.registerWorkflow(workflow); 39 | worker.start(); 40 | } 41 | 42 | main(); 43 | -------------------------------------------------------------------------------- /src/examples/manual-trigger.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | async function main() { 6 | const workflowRun = hatchet.admin.runWorkflow('simple-workflow', {}); 7 | const stream = await workflowRun.stream(); 8 | 9 | for await (const event of stream) { 10 | console.log('event received', event); 11 | } 12 | } 13 | 14 | main(); 15 | -------------------------------------------------------------------------------- /src/examples/multi-workflow.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | const hatchet: Hatchet = Hatchet.init(); 4 | 5 | async function main() { 6 | const worker = await hatchet.worker('test-worker2', 5); 7 | 8 | await worker.registerWorkflow({ 9 | id: 'test1', 10 | description: 'desc', 11 | on: { 12 | event: 'test1', 13 | }, 14 | steps: [ 15 | { 16 | name: 'test1-step1', 17 | run: (ctx) => { 18 | console.log('executed step1 of test1!'); 19 | return { step1: 'step1' }; 20 | }, 21 | }, 22 | ], 23 | }); 24 | 25 | await worker.registerWorkflow({ 26 | id: 'test2', 27 | description: 'desc', 28 | on: { 29 | event: 'test2', 30 | }, 31 | steps: [ 32 | { 33 | name: 'test2-step1', 34 | run: (ctx) => { 35 | console.log('executed step1 of test2!'); 36 | return { step1: 'step1' }; 37 | }, 38 | }, 39 | ], 40 | }); 41 | 42 | await worker.start(); 43 | } 44 | 45 | main(); 46 | -------------------------------------------------------------------------------- /src/examples/namespaced-worker.e2e.ts: -------------------------------------------------------------------------------- 1 | import { Workflow, Worker } from '..'; 2 | import sleep from '../util/sleep'; 3 | import Hatchet from '../sdk'; 4 | 5 | xdescribe('e2e', () => { 6 | let hatchet: Hatchet; 7 | let worker: Worker; 8 | 9 | beforeEach(async () => { 10 | hatchet = Hatchet.init({ 11 | namespace: 'dev', 12 | }); 13 | worker = await hatchet.worker('example-worker'); 14 | }); 15 | 16 | afterEach(async () => { 17 | await worker.stop(); 18 | await sleep(2000); 19 | }); 20 | 21 | it('should pass a simple workflow', async () => { 22 | let invoked = 0; 23 | const start = new Date(); 24 | 25 | const workflow: Workflow = { 26 | id: 'namespaced-e2e-workflow', 27 | description: 'test', 28 | on: { 29 | event: 'user:create-namespaced', 30 | }, 31 | steps: [ 32 | { 33 | name: 'step1', 34 | run: async (ctx) => { 35 | console.log('starting step1 with the following input', ctx.workflowInput()); 36 | console.log(`took ${new Date().getTime() - start.getTime()}ms`); 37 | invoked += 1; 38 | return { step1: 'step1 results!' }; 39 | }, 40 | }, 41 | { 42 | name: 'step2', 43 | parents: ['step1'], 44 | run: (ctx) => { 45 | console.log(`step 1 -> 2 took ${new Date().getTime() - start.getTime()}ms`); 46 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 47 | invoked += 1; 48 | return { step2: 'step2 results!' }; 49 | }, 50 | }, 51 | ], 52 | }; 53 | 54 | console.log('registering workflow...'); 55 | await worker.registerWorkflow(workflow); 56 | 57 | void worker.start(); 58 | 59 | console.log('worker started.'); 60 | 61 | await sleep(5000); 62 | 63 | console.log('pushing event...'); 64 | 65 | await hatchet.event.push('user:create-namespaced', { 66 | test: 'test', 67 | }); 68 | 69 | await sleep(10000); 70 | 71 | console.log('invoked', invoked); 72 | 73 | expect(invoked).toEqual(2); 74 | }, 60000); 75 | }); 76 | -------------------------------------------------------------------------------- /src/examples/namespaced-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init({ 5 | namespace: 'example-namespace', 6 | }); 7 | 8 | const sleep = (ms: number) => 9 | new Promise((resolve) => { 10 | setTimeout(resolve, ms); 11 | }); 12 | 13 | const workflow: Workflow = { 14 | id: 'simple-workflow', 15 | description: 'test', 16 | on: { 17 | event: 'user:create', 18 | }, 19 | steps: [ 20 | { 21 | name: 'step1', 22 | run: async (ctx) => { 23 | console.log('starting step1 with the following input', ctx.workflowInput()); 24 | console.log('waiting 5 seconds...'); 25 | await sleep(5000); 26 | console.log('executed step1!'); 27 | return { step1: 'step1 results!' }; 28 | }, 29 | }, 30 | { 31 | name: 'step2', 32 | parents: ['step1'], 33 | run: (ctx) => { 34 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 35 | return { step2: 'step2 results!' }; 36 | }, 37 | }, 38 | ], 39 | }; 40 | 41 | async function main() { 42 | const worker = await hatchet.worker('example-worker'); 43 | await worker.registerWorkflow(workflow); 44 | worker.start(); 45 | } 46 | 47 | main(); 48 | -------------------------------------------------------------------------------- /src/examples/on-failure.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | // ❓ OnFailure Step 7 | // This workflow will fail because the step will throw an error 8 | // we define an onFailure step to handle this case 9 | 10 | const workflow: Workflow = { 11 | // ... normal workflow definition 12 | id: 'on-failure-example', 13 | description: 'test', 14 | on: { 15 | event: 'user:create', 16 | }, 17 | // , 18 | steps: [ 19 | { 20 | name: 'step1', 21 | run: async (ctx) => { 22 | // 👀 this step will always throw an error 23 | throw new Error('Step 1 failed'); 24 | }, 25 | }, 26 | ], 27 | // 👀 After the workflow fails, this special step will run 28 | onFailure: { 29 | name: 'on-failure-step', 30 | run: async (ctx) => { 31 | // 👀 we can do things like perform cleanup logic 32 | // or notify a user here 33 | 34 | // 👀 you can access the error from the failed step(s) like this 35 | console.log(ctx.stepRunErrors()); 36 | 37 | return { onFailure: 'step' }; 38 | }, 39 | }, 40 | }; 41 | // ‼️ 42 | 43 | // ❓ OnFailure With Details 44 | // Coming soon to TypeScript! https://github.com/hatchet-dev/hatchet-typescript/issues/447 45 | // ‼️ 46 | 47 | async function main() { 48 | const worker = await hatchet.worker('example-worker', 1); 49 | await worker.registerWorkflow(workflow); 50 | worker.start(); 51 | } 52 | 53 | main(); 54 | -------------------------------------------------------------------------------- /src/examples/playground.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Context } from '../step'; 3 | 4 | const hatchet: Hatchet = Hatchet.init(); 5 | 6 | async function main() { 7 | const worker = await hatchet.worker('test-playground'); 8 | 9 | await worker.registerWorkflow({ 10 | id: 'playground-ts', 11 | description: 'desc', 12 | on: { 13 | event: 'test1', 14 | }, 15 | steps: [ 16 | { 17 | name: 'test1-step1', 18 | run: (ctx: Context) => { 19 | const playground = ctx.playground('test1', 'default'); 20 | 21 | return { step1: playground, name: ctx.stepName(), workflowRunId: ctx.workflowRunId() }; 22 | }, 23 | }, 24 | ], 25 | }); 26 | 27 | await worker.start(); 28 | } 29 | 30 | main(); 31 | -------------------------------------------------------------------------------- /src/examples/rate-limit/events.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | hatchet.event.push('rate-limit:create', { 6 | test: '1', 7 | }); 8 | hatchet.event.push('rate-limit:create', { 9 | test: '2', 10 | }); 11 | hatchet.event.push('rate-limit:create', { 12 | test: '3', 13 | }); 14 | -------------------------------------------------------------------------------- /src/examples/rate-limit/worker.ts: -------------------------------------------------------------------------------- 1 | import { RateLimitDuration } from '../../protoc/workflows'; 2 | import Hatchet from '../../sdk'; 3 | import { Workflow } from '../../workflow'; 4 | 5 | const hatchet = Hatchet.init(); 6 | 7 | const workflow: Workflow = { 8 | id: 'rate-limit-workflow', 9 | description: 'test', 10 | on: { 11 | event: 'rate-limit:create', 12 | }, 13 | steps: [ 14 | { 15 | name: 'dynamic', 16 | rate_limits: [ 17 | { 18 | dynamicKey: 'input.group', 19 | units: 1, 20 | limit: 10, 21 | duration: RateLimitDuration.DAY, 22 | }, 23 | ], 24 | run: async (ctx) => { 25 | console.log( 26 | 'starting step1 with the following input and a dynamic rate limit', 27 | ctx.workflowInput() 28 | ); 29 | return { step1: 'step1 results!' }; 30 | }, 31 | }, 32 | { 33 | name: 'static', 34 | rate_limits: [{ key: 'test-limit', units: 1 }], 35 | run: async (ctx) => { 36 | console.log( 37 | 'starting step1 with the following input and a static rate limit', 38 | ctx.workflowInput() 39 | ); 40 | return { step1: 'step1 results!' }; 41 | }, 42 | }, 43 | ], 44 | }; 45 | 46 | async function main() { 47 | await hatchet.admin.putRateLimit('test-limit', 1, RateLimitDuration.MINUTE); 48 | const worker = await hatchet.worker('example-worker'); 49 | await worker.registerWorkflow(workflow); 50 | worker.start(); 51 | } 52 | 53 | main(); 54 | -------------------------------------------------------------------------------- /src/examples/retries-with-backoff.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | let numRetries = 0; 7 | 8 | // ❓ Backoff 9 | const workflow: Workflow = { 10 | // ... normal workflow definition 11 | id: 'retries-with-backoff', 12 | description: 'Backoff', 13 | // , 14 | steps: [ 15 | { 16 | name: 'backoff-step', 17 | // ... step definition 18 | run: async (ctx) => { 19 | if (numRetries < 5) { 20 | numRetries += 1; 21 | throw new Error('failed'); 22 | } 23 | 24 | return { backoff: 'completed' }; 25 | }, 26 | // , 27 | retries: 10, 28 | // 👀 Backoff configuration 29 | backoff: { 30 | // 👀 Maximum number of seconds to wait between retries 31 | maxSeconds: 60, 32 | // 👀 Factor to increase the wait time between retries. 33 | // This sequence will be 2s, 4s, 8s, 16s, 32s, 60s... due to the maxSeconds limit 34 | factor: 2, 35 | }, 36 | }, 37 | ], 38 | }; 39 | // ‼️ 40 | 41 | async function main() { 42 | const worker = await hatchet.worker('backoff-worker'); 43 | await worker.registerWorkflow(workflow); 44 | worker.start(); 45 | } 46 | 47 | main(); 48 | -------------------------------------------------------------------------------- /src/examples/retries-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const sleep = (ms: number) => 7 | new Promise((resolve) => { 8 | setTimeout(resolve, ms); 9 | }); 10 | 11 | let numRetries = 0; 12 | 13 | const workflow: Workflow = { 14 | id: 'retries-workflow', 15 | description: 'test', 16 | on: { 17 | event: 'user:create', 18 | }, 19 | steps: [ 20 | { 21 | name: 'step1', 22 | run: async (ctx) => { 23 | if (numRetries < 3) { 24 | numRetries += 1; 25 | throw new Error('step1 failed'); 26 | } 27 | 28 | console.log('starting step1 with the following input', ctx.workflowInput()); 29 | console.log('waiting 5 seconds...'); 30 | await sleep(5000); 31 | console.log('executed step1!'); 32 | return { step1: 'step1 results!' }; 33 | }, 34 | retries: 3, 35 | }, 36 | { 37 | name: 'step2', 38 | parents: ['step1'], 39 | run: (ctx) => { 40 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 41 | return { step2: 'step2 results!' }; 42 | }, 43 | }, 44 | ], 45 | }; 46 | 47 | async function main() { 48 | const worker = await hatchet.worker('example-worker'); 49 | await worker.registerWorkflow(workflow); 50 | worker.start(); 51 | } 52 | 53 | main(); 54 | -------------------------------------------------------------------------------- /src/examples/scheduled-runs/programatic-schedules.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../../sdk'; 2 | import { simpleWorkflow } from '../simple-worker'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | // This example assumes you have a worker already running 7 | // and registered simpleWorkflow to it 8 | 9 | async function main() { 10 | // ? Create 11 | // You can create dynamic scheduled runs programmatically via the API 12 | const createdScheduledRun = await hatchet.schedule.create( 13 | simpleWorkflow, // workflow object or string workflow id 14 | { 15 | triggerAt: new Date(Date.now() + 1000 * 60 * 60 * 24), // 24 hours from now 16 | input: { 17 | name: 'John Doe', 18 | }, 19 | additionalMetadata: { 20 | customerId: '123', 21 | }, 22 | } 23 | ); 24 | const { id } = createdScheduledRun.metadata; // id which you can later use to reference the scheduled run 25 | // !! 26 | 27 | // ? Get 28 | // You can get a specific scheduled run by passing in the scheduled run id 29 | const scheduledRun = await hatchet.schedule.get(id); 30 | // !! 31 | 32 | // ? Delete 33 | // You can delete a scheduled run by passing the scheduled run object 34 | // or a scheduled run Id to the delete method 35 | await hatchet.schedule.delete(scheduledRun); 36 | // !! 37 | 38 | // ? List 39 | // You can list all scheduled runs by passing in a query object 40 | const scheduledRunList = await hatchet.schedule.list({ 41 | offset: 0, 42 | limit: 10, 43 | }); 44 | // !! 45 | } 46 | 47 | main(); 48 | -------------------------------------------------------------------------------- /src/examples/simple-worker.e2e.ts: -------------------------------------------------------------------------------- 1 | import { Workflow, Worker } from '..'; 2 | import sleep from '../util/sleep'; 3 | import Hatchet from '../sdk'; 4 | 5 | describe('e2e', () => { 6 | let hatchet: Hatchet; 7 | let worker: Worker; 8 | 9 | beforeEach(async () => { 10 | hatchet = Hatchet.init(); 11 | worker = await hatchet.worker('example-worker'); 12 | }); 13 | 14 | afterEach(async () => { 15 | await worker.stop(); 16 | await sleep(2000); 17 | }); 18 | 19 | it('should pass a simple workflow', async () => { 20 | let invoked = 0; 21 | const start = new Date(); 22 | 23 | const workflow: Workflow = { 24 | id: 'simple-e2e-workflow', 25 | description: 'test', 26 | on: { 27 | event: 'user:create-simple', 28 | }, 29 | steps: [ 30 | { 31 | name: 'step1', 32 | run: async (ctx) => { 33 | console.log('starting step1 with the following input', ctx.workflowInput()); 34 | console.log(`took ${new Date().getTime() - start.getTime()}ms`); 35 | invoked += 1; 36 | return { step1: 'step1 results!' }; 37 | }, 38 | }, 39 | { 40 | name: 'step2', 41 | parents: ['step1'], 42 | run: (ctx) => { 43 | console.log(`step 1 -> 2 took ${new Date().getTime() - start.getTime()}ms`); 44 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 45 | invoked += 1; 46 | return { step2: 'step2 results!' }; 47 | }, 48 | }, 49 | ], 50 | }; 51 | 52 | console.log('registering workflow...'); 53 | await worker.registerWorkflow(workflow); 54 | 55 | void worker.start(); 56 | 57 | console.log('worker started.'); 58 | 59 | await sleep(5000); 60 | 61 | console.log('pushing event...'); 62 | 63 | await hatchet.event.push('user:create-simple', { 64 | test: 'test', 65 | }); 66 | 67 | await sleep(10000); 68 | 69 | console.log('invoked', invoked); 70 | 71 | expect(invoked).toEqual(2); 72 | 73 | await worker.stop(); 74 | }, 60000); 75 | }); 76 | -------------------------------------------------------------------------------- /src/examples/simple-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const sleep = (ms: number) => 7 | new Promise((resolve) => { 8 | setTimeout(resolve, ms); 9 | }); 10 | 11 | export const simpleWorkflow: Workflow = { 12 | id: 'simple-workflow', 13 | description: 'test', 14 | on: { 15 | event: 'user:create', 16 | }, 17 | steps: [ 18 | { 19 | name: 'step1', 20 | run: async (ctx) => { 21 | console.log('starting step1 with the following input', ctx.workflowInput()); 22 | console.log('waiting 5 seconds...'); 23 | await sleep(5000); 24 | ctx.putStream('step1 stream'); 25 | console.log('executed step1!'); 26 | return { step1: 'step1 results!' }; 27 | }, 28 | }, 29 | { 30 | name: 'step2', 31 | parents: ['step1'], 32 | run: (ctx) => { 33 | console.log('executed step2 after step1 returned ', ctx.stepOutput('step1')); 34 | return { step2: 'step2 results!' }; 35 | }, 36 | }, 37 | ], 38 | }; 39 | 40 | async function main() { 41 | const worker = await hatchet.worker('example-worker'); 42 | await worker.registerWorkflow(simpleWorkflow); 43 | worker.start(); 44 | } 45 | 46 | if (require.main === module) { 47 | main(); 48 | } 49 | -------------------------------------------------------------------------------- /src/examples/sticky-trigger.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | async function main() { 6 | const workflowRunResponse = hatchet.admin.runWorkflow( 7 | 'sticky-workflow', 8 | {}, 9 | { 10 | additionalMetadata: { 11 | key: 'value', 12 | }, 13 | } 14 | ); 15 | 16 | const result = await workflowRunResponse; 17 | 18 | for await (const event of await result.stream()) { 19 | console.log('event received', event); 20 | } 21 | } 22 | 23 | main(); 24 | -------------------------------------------------------------------------------- /src/examples/sticky-worker-with-check.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { StickyStrategy, Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const workflow: Workflow = { 7 | id: 'sticky-workflow', 8 | description: 'test', 9 | 10 | steps: [ 11 | { 12 | name: 'step1', 13 | run: async (ctx) => { 14 | const results: Promise[] = []; 15 | const count = 57; 16 | hardChildWorkerId = undefined; // we reset this - if we run this multiple times at the same time it will break 17 | // eslint-disable-next-line no-plusplus 18 | for (let i = 0; i < count; i++) { 19 | const result = await ctx.spawnWorkflow(childWorkflow, {}, { sticky: true }); 20 | results.push(result.result()); 21 | const result2 = await ctx.spawnWorkflow(softChildWorkflow, {}, { sticky: true }); 22 | results.push(result2.result()); 23 | } 24 | console.log('Spawned ', count, ' child workflows of each type'); 25 | console.log('Results:', await Promise.all(results)); 26 | 27 | return { step1: 'step1 results!' }; 28 | }, 29 | }, 30 | ], 31 | }; 32 | let hardChildWorkerId: string | undefined; 33 | const childWorkflow: Workflow = { 34 | id: 'child-sticky-workflow', 35 | description: 'test', 36 | sticky: StickyStrategy.HARD, 37 | steps: [ 38 | { 39 | name: 'child-step1', 40 | run: async (ctx) => { 41 | const workerId = ctx.worker.id(); 42 | 43 | console.log(`1: Worker ID: ${workerId}`); 44 | 45 | if (!hardChildWorkerId) { 46 | hardChildWorkerId = workerId; 47 | } else if (hardChildWorkerId !== workerId) { 48 | throw new Error(`Expected worker ID ${hardChildWorkerId} but got ${workerId}`); 49 | } 50 | return { childStep1: `${workerId}` }; 51 | }, 52 | }, 53 | { 54 | name: 'child-step2', 55 | run: async (ctx) => { 56 | const workerId = ctx.worker.id(); 57 | console.log(`2: Worker ID: ${workerId}`); 58 | return { childStep2: `${workerId}` }; 59 | }, 60 | }, 61 | ], 62 | }; 63 | 64 | const softChildWorkflow: Workflow = { 65 | id: 'child-sticky-workflow-soft', 66 | description: 'test', 67 | sticky: StickyStrategy.SOFT, 68 | steps: [ 69 | { 70 | name: 'child-step1', 71 | run: async (ctx) => { 72 | const workerId = ctx.worker.id(); 73 | 74 | console.log(`1: Worker ID: ${workerId}`); 75 | return { childStep1: `SOFT ${workerId}` }; 76 | }, 77 | }, 78 | { 79 | name: 'child-step2', 80 | run: async (ctx) => { 81 | const workerId = ctx.worker.id(); 82 | console.log(`2: Worker ID: ${workerId}`); 83 | return { childStep2: `SOFT ${workerId}` }; 84 | }, 85 | }, 86 | ], 87 | }; 88 | 89 | async function main() { 90 | const worker1 = await hatchet.worker('sticky-worker-1'); 91 | await worker1.registerWorkflow(workflow); 92 | await worker1.registerWorkflow(childWorkflow); 93 | await worker1.registerWorkflow(softChildWorkflow); 94 | worker1.start(); 95 | 96 | const worker2 = await hatchet.worker('sticky-worker-2'); 97 | await worker2.registerWorkflow(workflow); 98 | await worker2.registerWorkflow(childWorkflow); 99 | await worker2.registerWorkflow(softChildWorkflow); 100 | 101 | worker2.start(); 102 | } 103 | 104 | main(); 105 | -------------------------------------------------------------------------------- /src/examples/sticky-worker.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | import { StickyStrategy, Workflow } from '../workflow'; 3 | 4 | const hatchet = Hatchet.init(); 5 | 6 | const workflow: Workflow = { 7 | id: 'sticky-workflow', 8 | description: 'test', 9 | steps: [ 10 | { 11 | name: 'step1', 12 | run: async (ctx) => { 13 | const results: Promise[] = []; 14 | 15 | // eslint-disable-next-line no-plusplus 16 | for (let i = 0; i < 50; i++) { 17 | const result = await ctx.spawnWorkflow(childWorkflow, {}, { sticky: true }); 18 | results.push(result.result()); 19 | } 20 | console.log('Spawned 50 child workflows'); 21 | console.log('Results:', await Promise.all(results)); 22 | 23 | return { step1: 'step1 results!' }; 24 | }, 25 | }, 26 | ], 27 | }; 28 | 29 | const childWorkflow: Workflow = { 30 | id: 'child-sticky-workflow', 31 | description: 'test', 32 | sticky: StickyStrategy.HARD, 33 | steps: [ 34 | { 35 | name: 'child-step1', 36 | run: async (ctx) => { 37 | const workerId = ctx.worker.id(); 38 | 39 | console.log(`1: Worker ID: ${workerId}`); 40 | return { childStep1: `${workerId}` }; 41 | }, 42 | }, 43 | { 44 | name: 'child-step2', 45 | run: async (ctx) => { 46 | const workerId = ctx.worker.id(); 47 | console.log(`2: Worker ID: ${workerId}`); 48 | return { childStep2: `${workerId}` }; 49 | }, 50 | }, 51 | ], 52 | }; 53 | 54 | async function main() { 55 | const worker1 = await hatchet.worker('sticky-worker-1'); 56 | await worker1.registerWorkflow(workflow); 57 | await worker1.registerWorkflow(childWorkflow); 58 | worker1.start(); 59 | 60 | const worker2 = await hatchet.worker('sticky-worker-2'); 61 | await worker2.registerWorkflow(workflow); 62 | await worker2.registerWorkflow(childWorkflow); 63 | worker2.start(); 64 | } 65 | 66 | main(); 67 | -------------------------------------------------------------------------------- /src/examples/stream-by-additional-meta.ts: -------------------------------------------------------------------------------- 1 | import Hatchet from '../sdk'; 2 | 3 | const hatchet = Hatchet.init(); 4 | 5 | async function main() { 6 | // Generate a random stream key to use to track all 7 | // stream events for this workflow run. 8 | const streamKey = 'streamKey'; 9 | const streamVal = `sk-${Math.random().toString(36).substring(7)}`; 10 | 11 | // Specify the stream key as additional metadata 12 | // when running the workflow. 13 | 14 | // This key gets propagated to all child workflows 15 | // and can have an arbitrary property name. 16 | await hatchet.admin.runWorkflow( 17 | 'parent-workflow', 18 | {}, 19 | { additionalMetadata: { [streamKey]: streamVal } } 20 | ); 21 | 22 | // Stream all events for the additional meta key value 23 | const stream = await hatchet.listener.streamByAdditionalMeta(streamKey, streamVal); 24 | 25 | for await (const event of stream) { 26 | console.log('event received', event); 27 | } 28 | } 29 | 30 | main(); 31 | -------------------------------------------------------------------------------- /src/examples/webhooks.e2e.ts: -------------------------------------------------------------------------------- 1 | import { createServer } from 'node:http'; 2 | import { AxiosError } from 'axios'; 3 | import { Workflow, Worker } from '..'; 4 | import sleep from '../util/sleep'; 5 | import Hatchet from '../sdk'; 6 | 7 | const port = 8369; 8 | 9 | describe('webhooks', () => { 10 | let hatchet: Hatchet; 11 | let worker: Worker; 12 | 13 | beforeEach(async () => { 14 | hatchet = Hatchet.init(); 15 | worker = await hatchet.worker('webhook-workflow'); 16 | }); 17 | 18 | afterEach(async () => { 19 | await worker.stop(); 20 | await sleep(2000); 21 | }); 22 | 23 | it('should pass a webhook workflow', async () => { 24 | let invoked = 0; 25 | 26 | const workflow: Workflow = { 27 | id: 'webhook-workflow', 28 | description: 'test', 29 | on: { 30 | event: 'user:create-webhook', 31 | }, 32 | steps: [ 33 | { 34 | name: 'step1', 35 | run: async (ctx) => { 36 | console.log('step1', ctx.workflowInput()); 37 | invoked += 1; 38 | return { message: `${ctx.workflowName()} results!` }; 39 | }, 40 | }, 41 | { 42 | name: 'step2', 43 | parents: ['step1'], 44 | run: (ctx) => { 45 | console.log('step2', ctx.workflowInput()); 46 | invoked += 1; 47 | return { message: `${ctx.workflowName()} results!` }; 48 | }, 49 | }, 50 | ], 51 | }; 52 | 53 | // registering workflows is not needed because it will be done automatically 54 | 55 | const secret = 'secret'; 56 | 57 | console.log('registering webhook...'); 58 | try { 59 | await worker.registerWebhook({ 60 | name: 'webhook-example', 61 | secret, 62 | url: `http://localhost:${port}/webhook`, 63 | }); 64 | } catch (e) { 65 | const axiosError = e as AxiosError; 66 | console.error(axiosError.response?.data, axiosError.request, axiosError.request.method); 67 | throw e; 68 | } 69 | 70 | console.log('starting worker...'); 71 | 72 | const handler = hatchet.webhooks([workflow]); 73 | 74 | const server = createServer(handler.httpHandler({ secret })); 75 | 76 | await new Promise((resolve) => { 77 | server.listen(port, () => { 78 | resolve(''); 79 | }); 80 | }); 81 | 82 | console.log('server started.'); 83 | console.log('waiting for worker to be registered...'); 84 | 85 | // wait for engine to pick up the webhook worker 86 | await sleep(30_000 + 10_000); 87 | 88 | console.log('webhook wait time complete.'); 89 | 90 | console.log('pushing event...'); 91 | 92 | await hatchet.event.push('user:create-webhook', { 93 | test: 'test', 94 | }); 95 | 96 | await sleep(10000); 97 | 98 | console.log('invoked', invoked); 99 | 100 | // FIXME: add this back 101 | // expect(invoked).toEqual(2); 102 | 103 | await worker.stop(); 104 | }, 60000); 105 | }); 106 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { HatchetClient as Hatchet } from '@clients/hatchet-client'; 2 | 3 | export * from './workflow'; 4 | export * from './step'; 5 | export * from './clients/worker'; 6 | export * from './clients/rest'; 7 | export * from './clients/admin'; 8 | export * from './util/workflow-run-ref'; 9 | 10 | export default Hatchet; 11 | export { Hatchet }; 12 | -------------------------------------------------------------------------------- /src/protoc/dispatcher/index.ts: -------------------------------------------------------------------------------- 1 | export * from './dispatcher'; 2 | -------------------------------------------------------------------------------- /src/protoc/events/index.ts: -------------------------------------------------------------------------------- 1 | export * from './events'; 2 | -------------------------------------------------------------------------------- /src/protoc/google/protobuf/timestamp.ts: -------------------------------------------------------------------------------- 1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT. 2 | // versions: 3 | // protoc-gen-ts_proto v2.6.0 4 | // protoc v3.19.1 5 | // source: google/protobuf/timestamp.proto 6 | 7 | /* eslint-disable */ 8 | import { BinaryReader, BinaryWriter } from '@bufbuild/protobuf/wire'; 9 | 10 | export const protobufPackage = 'google.protobuf'; 11 | 12 | /** 13 | * A Timestamp represents a point in time independent of any time zone or local 14 | * calendar, encoded as a count of seconds and fractions of seconds at 15 | * nanosecond resolution. The count is relative to an epoch at UTC midnight on 16 | * January 1, 1970, in the proleptic Gregorian calendar which extends the 17 | * Gregorian calendar backwards to year one. 18 | * 19 | * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap 20 | * second table is needed for interpretation, using a [24-hour linear 21 | * smear](https://developers.google.com/time/smear). 22 | * 23 | * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By 24 | * restricting to that range, we ensure that we can convert to and from [RFC 25 | * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. 26 | * 27 | * # Examples 28 | * 29 | * Example 1: Compute Timestamp from POSIX `time()`. 30 | * 31 | * Timestamp timestamp; 32 | * timestamp.set_seconds(time(NULL)); 33 | * timestamp.set_nanos(0); 34 | * 35 | * Example 2: Compute Timestamp from POSIX `gettimeofday()`. 36 | * 37 | * struct timeval tv; 38 | * gettimeofday(&tv, NULL); 39 | * 40 | * Timestamp timestamp; 41 | * timestamp.set_seconds(tv.tv_sec); 42 | * timestamp.set_nanos(tv.tv_usec * 1000); 43 | * 44 | * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. 45 | * 46 | * FILETIME ft; 47 | * GetSystemTimeAsFileTime(&ft); 48 | * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; 49 | * 50 | * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z 51 | * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. 52 | * Timestamp timestamp; 53 | * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); 54 | * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); 55 | * 56 | * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. 57 | * 58 | * long millis = System.currentTimeMillis(); 59 | * 60 | * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) 61 | * .setNanos((int) ((millis % 1000) * 1000000)).build(); 62 | * 63 | * Example 5: Compute Timestamp from Java `Instant.now()`. 64 | * 65 | * Instant now = Instant.now(); 66 | * 67 | * Timestamp timestamp = 68 | * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) 69 | * .setNanos(now.getNano()).build(); 70 | * 71 | * Example 6: Compute Timestamp from current time in Python. 72 | * 73 | * timestamp = Timestamp() 74 | * timestamp.GetCurrentTime() 75 | * 76 | * # JSON Mapping 77 | * 78 | * In JSON format, the Timestamp type is encoded as a string in the 79 | * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the 80 | * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" 81 | * where {year} is always expressed using four digits while {month}, {day}, 82 | * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional 83 | * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), 84 | * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone 85 | * is required. A proto3 JSON serializer should always use UTC (as indicated by 86 | * "Z") when printing the Timestamp type and a proto3 JSON parser should be 87 | * able to accept both UTC and other timezones (as indicated by an offset). 88 | * 89 | * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 90 | * 01:30 UTC on January 15, 2017. 91 | * 92 | * In JavaScript, one can convert a Date object to this format using the 93 | * standard 94 | * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) 95 | * method. In Python, a standard `datetime.datetime` object can be converted 96 | * to this format using 97 | * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with 98 | * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use 99 | * the Joda Time's [`ISODateTimeFormat.dateTime()`]( 100 | * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D 101 | * ) to obtain a formatter capable of generating timestamps in this format. 102 | */ 103 | export interface Timestamp { 104 | /** 105 | * Represents seconds of UTC time since Unix epoch 106 | * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 107 | * 9999-12-31T23:59:59Z inclusive. 108 | */ 109 | seconds: number; 110 | /** 111 | * Non-negative fractions of a second at nanosecond resolution. Negative 112 | * second values with fractions must still have non-negative nanos values 113 | * that count forward in time. Must be from 0 to 999,999,999 114 | * inclusive. 115 | */ 116 | nanos: number; 117 | } 118 | 119 | function createBaseTimestamp(): Timestamp { 120 | return { seconds: 0, nanos: 0 }; 121 | } 122 | 123 | export const Timestamp: MessageFns = { 124 | encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { 125 | if (message.seconds !== 0) { 126 | writer.uint32(8).int64(message.seconds); 127 | } 128 | if (message.nanos !== 0) { 129 | writer.uint32(16).int32(message.nanos); 130 | } 131 | return writer; 132 | }, 133 | 134 | decode(input: BinaryReader | Uint8Array, length?: number): Timestamp { 135 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input); 136 | let end = length === undefined ? reader.len : reader.pos + length; 137 | const message = createBaseTimestamp(); 138 | while (reader.pos < end) { 139 | const tag = reader.uint32(); 140 | switch (tag >>> 3) { 141 | case 1: { 142 | if (tag !== 8) { 143 | break; 144 | } 145 | 146 | message.seconds = longToNumber(reader.int64()); 147 | continue; 148 | } 149 | case 2: { 150 | if (tag !== 16) { 151 | break; 152 | } 153 | 154 | message.nanos = reader.int32(); 155 | continue; 156 | } 157 | } 158 | if ((tag & 7) === 4 || tag === 0) { 159 | break; 160 | } 161 | reader.skip(tag & 7); 162 | } 163 | return message; 164 | }, 165 | 166 | fromJSON(object: any): Timestamp { 167 | return { 168 | seconds: isSet(object.seconds) ? globalThis.Number(object.seconds) : 0, 169 | nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, 170 | }; 171 | }, 172 | 173 | toJSON(message: Timestamp): unknown { 174 | const obj: any = {}; 175 | if (message.seconds !== 0) { 176 | obj.seconds = Math.round(message.seconds); 177 | } 178 | if (message.nanos !== 0) { 179 | obj.nanos = Math.round(message.nanos); 180 | } 181 | return obj; 182 | }, 183 | 184 | create(base?: DeepPartial): Timestamp { 185 | return Timestamp.fromPartial(base ?? {}); 186 | }, 187 | fromPartial(object: DeepPartial): Timestamp { 188 | const message = createBaseTimestamp(); 189 | message.seconds = object.seconds ?? 0; 190 | message.nanos = object.nanos ?? 0; 191 | return message; 192 | }, 193 | }; 194 | 195 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; 196 | 197 | export type DeepPartial = T extends Builtin 198 | ? T 199 | : T extends globalThis.Array 200 | ? globalThis.Array> 201 | : T extends ReadonlyArray 202 | ? ReadonlyArray> 203 | : T extends {} 204 | ? { [K in keyof T]?: DeepPartial } 205 | : Partial; 206 | 207 | function longToNumber(int64: { toString(): string }): number { 208 | const num = globalThis.Number(int64.toString()); 209 | if (num > globalThis.Number.MAX_SAFE_INTEGER) { 210 | throw new globalThis.Error('Value is larger than Number.MAX_SAFE_INTEGER'); 211 | } 212 | if (num < globalThis.Number.MIN_SAFE_INTEGER) { 213 | throw new globalThis.Error('Value is smaller than Number.MIN_SAFE_INTEGER'); 214 | } 215 | return num; 216 | } 217 | 218 | function isSet(value: any): boolean { 219 | return value !== null && value !== undefined; 220 | } 221 | 222 | export interface MessageFns { 223 | encode(message: T, writer?: BinaryWriter): BinaryWriter; 224 | decode(input: BinaryReader | Uint8Array, length?: number): T; 225 | fromJSON(object: any): T; 226 | toJSON(message: T): unknown; 227 | create(base?: DeepPartial): T; 228 | fromPartial(object: DeepPartial): T; 229 | } 230 | -------------------------------------------------------------------------------- /src/protoc/workflows/index.ts: -------------------------------------------------------------------------------- 1 | export * from './workflows'; 2 | -------------------------------------------------------------------------------- /src/sdk.ts: -------------------------------------------------------------------------------- 1 | import { HatchetClient as Hatchet } from '@clients/hatchet-client'; 2 | 3 | export default Hatchet; 4 | -------------------------------------------------------------------------------- /src/util/config-loader/config-loader.test.ts: -------------------------------------------------------------------------------- 1 | import { ConfigLoader } from './config-loader'; 2 | 3 | fdescribe('ConfigLoader', () => { 4 | beforeEach(() => { 5 | process.env.HATCHET_CLIENT_TOKEN = 6 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef'; 7 | process.env.HATCHET_CLIENT_TLS_CERT_FILE = 'TLS_CERT_FILE'; 8 | process.env.HATCHET_CLIENT_TLS_KEY_FILE = 'TLS_KEY_FILE'; 9 | process.env.HATCHET_CLIENT_TLS_ROOT_CA_FILE = 'TLS_ROOT_CA_FILE'; 10 | process.env.HATCHET_CLIENT_TLS_SERVER_NAME = 'TLS_SERVER_NAME'; 11 | }); 12 | 13 | it('should load from environment variables', () => { 14 | const config = ConfigLoader.loadClientConfig(); 15 | expect(config).toEqual({ 16 | host_port: '127.0.0.1:8080', 17 | log_level: 'INFO', 18 | namespace: '', 19 | api_url: 'http://localhost:8080', 20 | token: 21 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 22 | tenant_id: '707d0855-80ab-4e1f-a156-f1c4546cbf52', 23 | tls_config: { 24 | tls_strategy: 'tls', 25 | cert_file: 'TLS_CERT_FILE', 26 | key_file: 'TLS_KEY_FILE', 27 | ca_file: 'TLS_ROOT_CA_FILE', 28 | server_name: 'TLS_SERVER_NAME', 29 | }, 30 | }); 31 | }); 32 | 33 | it('should throw an error if the file is not found', () => { 34 | expect(() => 35 | ConfigLoader.loadClientConfig( 36 | {}, 37 | { 38 | path: './fixtures/not-found.yaml', 39 | } 40 | ) 41 | ).toThrow(); 42 | }); 43 | 44 | xit('should throw an error if the yaml file fails validation', () => { 45 | expect(() => 46 | // This test is failing because there is no invalid state of the yaml file, need to update with tls and mtls settings 47 | ConfigLoader.loadClientConfig( 48 | {}, 49 | { 50 | path: './fixtures/.hatchet-invalid.yaml', 51 | } 52 | ) 53 | ).toThrow(); 54 | }); 55 | 56 | it('should favor yaml config over env vars', () => { 57 | const config = ConfigLoader.loadClientConfig( 58 | {}, 59 | { 60 | path: './fixtures/.hatchet.yaml', 61 | } 62 | ); 63 | expect(config).toEqual({ 64 | token: 65 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 66 | host_port: 'HOST_PORT_YAML', 67 | log_level: 'INFO', 68 | namespace: '', 69 | api_url: 'http://localhost:8080', 70 | tenant_id: '707d0855-80ab-4e1f-a156-f1c4546cbf52', 71 | tls_config: { 72 | tls_strategy: 'tls', 73 | cert_file: 'TLS_CERT_FILE_YAML', 74 | key_file: 'TLS_KEY_FILE_YAML', 75 | ca_file: 'TLS_ROOT_CA_FILE_YAML', 76 | server_name: 'TLS_SERVER_NAME_YAML', 77 | }, 78 | }); 79 | }); 80 | 81 | xit('should attempt to load the root .hatchet.yaml config', () => { 82 | // i'm not sure the best way to test this, maybe spy on readFileSync called with 83 | const config = ConfigLoader.loadClientConfig( 84 | {}, 85 | { 86 | path: './fixtures/.hatchet.yaml', 87 | } 88 | ); 89 | expect(config).toEqual({ 90 | token: 91 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', 92 | host_port: 'HOST_PORT_YAML', 93 | tls_config: { 94 | tls_strategy: 'tls', 95 | cert_file: 'TLS_CERT_FILE_YAML', 96 | key_file: 'TLS_KEY_FILE_YAML', 97 | ca_file: 'TLS_ROOT_CA_FILE_YAML', 98 | server_name: 'TLS_SERVER_NAME_YAML', 99 | }, 100 | }); 101 | }); 102 | }); 103 | -------------------------------------------------------------------------------- /src/util/config-loader/config-loader.ts: -------------------------------------------------------------------------------- 1 | import { parse } from 'yaml'; 2 | import { readFileSync } from 'fs'; 3 | import * as p from 'path'; 4 | import { z } from 'zod'; 5 | import { ClientConfig, ClientConfigSchema } from '@clients/hatchet-client'; 6 | import { ChannelCredentials } from 'nice-grpc'; 7 | import { LogLevel } from '@util/logger'; 8 | import { getAddressesFromJWT, getTenantIdFromJWT } from './token'; 9 | 10 | type EnvVars = 11 | | 'HATCHET_CLIENT_TOKEN' 12 | | 'HATCHET_CLIENT_TLS_STRATEGY' 13 | | 'HATCHET_CLIENT_HOST_PORT' 14 | | 'HATCHET_CLIENT_API_URL' 15 | | 'HATCHET_CLIENT_TLS_CERT_FILE' 16 | | 'HATCHET_CLIENT_TLS_KEY_FILE' 17 | | 'HATCHET_CLIENT_TLS_ROOT_CA_FILE' 18 | | 'HATCHET_CLIENT_TLS_SERVER_NAME' 19 | | 'HATCHET_CLIENT_LOG_LEVEL' 20 | | 'HATCHET_CLIENT_NAMESPACE'; 21 | 22 | type TLSStrategy = 'tls' | 'mtls'; 23 | 24 | interface LoadClientConfigOptions { 25 | path?: string; 26 | } 27 | 28 | const DEFAULT_CONFIG_FILE = '.hatchet.yaml'; 29 | 30 | export class ConfigLoader { 31 | static loadClientConfig( 32 | override?: Partial, 33 | config?: LoadClientConfigOptions 34 | ): Partial { 35 | const yaml = this.loadYamlConfig(config?.path); 36 | const tlsConfig = override?.tls_config ?? { 37 | tls_strategy: 38 | yaml?.tls_config?.tls_strategy ?? 39 | (this.env('HATCHET_CLIENT_TLS_STRATEGY') as TLSStrategy | undefined) ?? 40 | 'tls', 41 | cert_file: yaml?.tls_config?.cert_file ?? this.env('HATCHET_CLIENT_TLS_CERT_FILE')!, 42 | key_file: yaml?.tls_config?.key_file ?? this.env('HATCHET_CLIENT_TLS_KEY_FILE')!, 43 | ca_file: yaml?.tls_config?.ca_file ?? this.env('HATCHET_CLIENT_TLS_ROOT_CA_FILE')!, 44 | server_name: yaml?.tls_config?.server_name ?? this.env('HATCHET_CLIENT_TLS_SERVER_NAME')!, 45 | }; 46 | 47 | const token = override?.token ?? yaml?.token ?? this.env('HATCHET_CLIENT_TOKEN'); 48 | 49 | if (!token) { 50 | throw new Error( 51 | 'No token provided. Provide it by setting the HATCHET_CLIENT_TOKEN environment variable.' 52 | ); 53 | } 54 | 55 | let grpcBroadcastAddress: string | undefined; 56 | let apiUrl: string | undefined; 57 | const tenantId = getTenantIdFromJWT(token!); 58 | 59 | if (!tenantId) { 60 | throw new Error('Tenant ID not found in subject claim of token'); 61 | } 62 | 63 | try { 64 | const addresses = getAddressesFromJWT(token!); 65 | 66 | grpcBroadcastAddress = 67 | override?.host_port ?? 68 | yaml?.host_port ?? 69 | this.env('HATCHET_CLIENT_HOST_PORT') ?? 70 | addresses.grpcBroadcastAddress; 71 | 72 | apiUrl = 73 | override?.api_url ?? 74 | yaml?.api_url ?? 75 | this.env('HATCHET_CLIENT_API_URL') ?? 76 | addresses.serverUrl; 77 | } catch (e) { 78 | grpcBroadcastAddress = 79 | override?.host_port ?? yaml?.host_port ?? this.env('HATCHET_CLIENT_HOST_PORT'); 80 | apiUrl = override?.api_url ?? yaml?.api_url ?? this.env('HATCHET_CLIENT_API_URL'); 81 | } 82 | 83 | const namespace = 84 | override?.namespace ?? yaml?.namespace ?? this.env('HATCHET_CLIENT_NAMESPACE'); 85 | 86 | return { 87 | token: override?.token ?? yaml?.token ?? this.env('HATCHET_CLIENT_TOKEN'), 88 | host_port: grpcBroadcastAddress, 89 | api_url: apiUrl, 90 | tls_config: tlsConfig, 91 | log_level: 92 | override?.log_level ?? 93 | yaml?.log_level ?? 94 | (this.env('HATCHET_CLIENT_LOG_LEVEL') as LogLevel) ?? 95 | 'INFO', 96 | tenant_id: tenantId, 97 | namespace: namespace ? `${namespace}_`.toLowerCase() : '', 98 | }; 99 | } 100 | 101 | static get default_yaml_config_path() { 102 | return p.join(process.cwd(), DEFAULT_CONFIG_FILE); 103 | } 104 | 105 | static createCredentials(config: ClientConfig['tls_config']): ChannelCredentials { 106 | // if none, create insecure credentials 107 | if (config.tls_strategy === 'none') { 108 | return ChannelCredentials.createInsecure(); 109 | } 110 | 111 | if (config.tls_strategy === 'tls') { 112 | const rootCerts = config.ca_file ? readFileSync(config.ca_file) : undefined; 113 | return ChannelCredentials.createSsl(rootCerts); 114 | } 115 | 116 | const rootCerts = config.ca_file ? readFileSync(config.ca_file) : null; 117 | const privateKey = config.key_file ? readFileSync(config.key_file) : null; 118 | const certChain = config.cert_file ? readFileSync(config.cert_file) : null; 119 | return ChannelCredentials.createSsl(rootCerts, privateKey, certChain); 120 | } 121 | 122 | static loadYamlConfig(path?: string): ClientConfig | undefined { 123 | try { 124 | const configFile = readFileSync( 125 | p.join(__dirname, path ?? this.default_yaml_config_path), 126 | 'utf8' 127 | ); 128 | 129 | const config = parse(configFile); 130 | 131 | ClientConfigSchema.partial().parse(config); 132 | 133 | return config as ClientConfig; 134 | } catch (e) { 135 | if (!path) return undefined; 136 | 137 | if (e instanceof z.ZodError) { 138 | throw new Error(`Invalid yaml config: ${e.message}`); 139 | } 140 | 141 | throw e; 142 | } 143 | } 144 | 145 | private static env(name: EnvVars): string | undefined { 146 | return process.env[name]; 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/util/config-loader/fixtures/.hatchet-invalid.yaml: -------------------------------------------------------------------------------- 1 | tenant_id: 'TENANT_ID_YAML' 2 | host_port: 'HOST_PORT_YAML' 3 | tls_config: 4 | cert_file: 'TLS_CERT_FILE_YAML' 5 | key_file: 'TLS_KEY_FILE_YAML' 6 | ca_file: 'TLS_ROOT_CA_FILE_YAML' 7 | -------------------------------------------------------------------------------- /src/util/config-loader/fixtures/.hatchet.yaml: -------------------------------------------------------------------------------- 1 | token: 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef' 2 | tenant_id: 'TENANT_ID_YAML' 3 | host_port: 'HOST_PORT_YAML' 4 | tls_config: 5 | cert_file: 'TLS_CERT_FILE_YAML' 6 | key_file: 'TLS_KEY_FILE_YAML' 7 | ca_file: 'TLS_ROOT_CA_FILE_YAML' 8 | server_name: 'TLS_SERVER_NAME_YAML' 9 | -------------------------------------------------------------------------------- /src/util/config-loader/index.ts: -------------------------------------------------------------------------------- 1 | export * from './config-loader'; 2 | -------------------------------------------------------------------------------- /src/util/config-loader/token.test.ts: -------------------------------------------------------------------------------- 1 | import { getAddressesFromJWT } from './token'; 2 | 3 | describe('extractClaimsFromJWT', () => { 4 | it('should correctly extract custom claims from a valid JWT token', () => { 5 | // Example token, not a real one 6 | const token = 7 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef'; 8 | const addresses = getAddressesFromJWT(token); 9 | expect(addresses).toHaveProperty('grpcBroadcastAddress', '127.0.0.1:8080'); 10 | expect(addresses).toHaveProperty('serverUrl', 'http://localhost:8080'); 11 | }); 12 | 13 | it('should throw an error for invalid token format', () => { 14 | const token = 'invalid.token'; 15 | expect(() => getAddressesFromJWT(token)).toThrow('Invalid token format'); 16 | }); 17 | }); 18 | -------------------------------------------------------------------------------- /src/util/config-loader/token.ts: -------------------------------------------------------------------------------- 1 | export function getTenantIdFromJWT(token: string): string { 2 | const claims = extractClaimsFromJWT(token); 3 | return claims.sub; 4 | } 5 | 6 | export function getAddressesFromJWT(token: string): { 7 | serverUrl: string; 8 | grpcBroadcastAddress: string; 9 | } { 10 | const claims = extractClaimsFromJWT(token); 11 | return { 12 | serverUrl: claims.server_url, 13 | grpcBroadcastAddress: claims.grpc_broadcast_address, 14 | }; 15 | } 16 | 17 | function extractClaimsFromJWT(token: string): any { 18 | const parts = token.split('.'); 19 | if (parts.length !== 3) { 20 | throw new Error('Invalid token format'); 21 | } 22 | 23 | const claimsPart = parts[1]; 24 | const claimsData = atob(claimsPart.replace(/-/g, '+').replace(/_/g, '/')); 25 | const claims = JSON.parse(claimsData); 26 | 27 | return claims; 28 | } 29 | -------------------------------------------------------------------------------- /src/util/errors/hatchet-error.ts: -------------------------------------------------------------------------------- 1 | class HatchetError extends Error { 2 | constructor(message: string) { 3 | super(message); 4 | this.name = 'HatchetError'; 5 | } 6 | } 7 | 8 | export default HatchetError; 9 | -------------------------------------------------------------------------------- /src/util/hatchet-promise/hatchet-promise.test.ts: -------------------------------------------------------------------------------- 1 | import HatchetPromise from './hatchet-promise'; 2 | 3 | describe('HatchetPromise', () => { 4 | it('should resolve the original promise if not canceled', async () => { 5 | const hatchetPromise = new HatchetPromise( 6 | new Promise((resolve) => { 7 | setTimeout(() => resolve('RESOLVED'), 500); 8 | }) 9 | ); 10 | const result = await hatchetPromise.promise; 11 | expect(result).toEqual('RESOLVED'); 12 | }); 13 | it('should resolve the cancel promise if canceled', async () => { 14 | const hatchetPromise = new HatchetPromise( 15 | new Promise((resolve) => { 16 | setTimeout(() => resolve('RESOLVED'), 500); 17 | }) 18 | ); 19 | 20 | const result = hatchetPromise.promise; 21 | setTimeout(() => { 22 | hatchetPromise.cancel(); 23 | }, 100); 24 | 25 | try { 26 | await result; 27 | expect(true).toEqual(false); // this should not be reached 28 | } catch (e) { 29 | expect(e).toEqual(undefined); 30 | } 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /src/util/hatchet-promise/hatchet-promise.ts: -------------------------------------------------------------------------------- 1 | class HatchetPromise { 2 | cancel: Function = (reason: any) => {}; 3 | promise: Promise; 4 | 5 | constructor(promise: Promise) { 6 | this.promise = new Promise((resolve, reject) => { 7 | this.cancel = reject; 8 | Promise.resolve(promise).then(resolve).catch(reject); 9 | }); 10 | } 11 | } 12 | 13 | export default HatchetPromise; 14 | -------------------------------------------------------------------------------- /src/util/logger/index.ts: -------------------------------------------------------------------------------- 1 | export * from './logger'; 2 | -------------------------------------------------------------------------------- /src/util/logger/logger.ts: -------------------------------------------------------------------------------- 1 | export abstract class Logger { 2 | abstract debug(message: string): void; 3 | abstract info(message: string): void; 4 | abstract green(message: string): void; 5 | abstract warn(message: string, error?: Error): void; 6 | abstract error(message: string, error?: Error): void; 7 | } 8 | 9 | export type LogLevel = 'OFF' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'; 10 | 11 | // eslint-disable-next-line no-shadow 12 | export enum LogLevelEnum { 13 | OFF = -1, 14 | DEBUG = 0, 15 | INFO = 1, 16 | WARN = 2, 17 | ERROR = 3, 18 | } 19 | -------------------------------------------------------------------------------- /src/util/parse.ts: -------------------------------------------------------------------------------- 1 | export function parseJSON(json: string): any { 2 | try { 3 | const firstParse = JSON.parse(json); 4 | 5 | // Hatchet engine versions <=0.14.0 return JSON as a quoted string which needs to be parsed again. 6 | // This is a workaround for that issue, but will not be needed in future versions. 7 | try { 8 | return JSON.parse(firstParse); 9 | } catch (e: any) { 10 | return firstParse; 11 | } 12 | } catch (e: any) { 13 | throw new Error(`Could not parse JSON: ${e.message}`); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/util/retrier.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from './logger'; 2 | import sleep from './sleep'; 3 | 4 | const DEFAULT_RETRY_INTERVAL = 0.1; // seconds 5 | const DEFAULT_RETRY_COUNT = 8; 6 | const MAX_JITTER = 100; // milliseconds 7 | 8 | export async function retrier( 9 | fn: () => Promise, 10 | logger: Logger, 11 | retries: number = DEFAULT_RETRY_COUNT, 12 | interval: number = DEFAULT_RETRY_INTERVAL 13 | ) { 14 | let lastError: Error | undefined; 15 | 16 | // eslint-disable-next-line no-plusplus 17 | for (let i = 0; i < retries; i++) { 18 | try { 19 | return await fn(); 20 | } catch (e: any) { 21 | lastError = e; 22 | logger.error(`Error: ${e.message}`); 23 | 24 | // Calculate exponential backoff with random jitter 25 | const exponentialDelay = interval * 2 ** i * 1000; 26 | const jitter = Math.random() * MAX_JITTER; 27 | const totalDelay = exponentialDelay + jitter; 28 | 29 | await sleep(totalDelay); 30 | } 31 | } 32 | 33 | throw lastError; 34 | } 35 | -------------------------------------------------------------------------------- /src/util/sleep.ts: -------------------------------------------------------------------------------- 1 | const sleep = (ms: number) => 2 | new Promise((resolve) => { 3 | setTimeout(resolve, ms); 4 | }); 5 | 6 | export default sleep; 7 | -------------------------------------------------------------------------------- /src/util/thread-helper.ts: -------------------------------------------------------------------------------- 1 | import { Worker, WorkerOptions } from 'worker_threads'; 2 | import path from 'path'; 3 | 4 | export function runThreaded(scriptPath: string, options: WorkerOptions) { 5 | const resolvedPath = require.resolve(scriptPath); 6 | 7 | const isTs = /\.ts$/.test(resolvedPath); 8 | 9 | // NOTE: if the file is typescript, we are in the SDK dev environment and need to so some funky work. 10 | // otherwise, we pass the file directly to the worker. 11 | const ex = isTs 12 | ? ` 13 | const wk = require('worker_threads'); 14 | require('tsconfig-paths/register'); 15 | require('ts-node').register({ 16 | "include": ["src/**/*.ts"], 17 | "exclude": ["./dist"], 18 | "compilerOptions": { 19 | "types": ["node"], 20 | "target": "es2016", 21 | "esModuleInterop": true, 22 | "module": "commonjs", 23 | "rootDir": "${path.join(__dirname, '../../../')}", 24 | } 25 | }); 26 | let file = '${resolvedPath}'; 27 | require(file); 28 | ` 29 | : resolvedPath; 30 | 31 | return new Worker(ex, { 32 | ...options, 33 | eval: isTs ? true : undefined, 34 | }); 35 | } 36 | 37 | // execArgv: ? ['--require', 'ts-node/register'] : undefined, 38 | -------------------------------------------------------------------------------- /src/util/workflow-run-ref.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable max-classes-per-file */ 2 | import { ListenerClient, StepRunEvent } from '@hatchet/clients/listener/listener-client'; 3 | import { Status } from 'nice-grpc'; 4 | import { WorkflowRunEventType } from '../protoc/dispatcher'; 5 | 6 | type EventualWorkflowRunId = 7 | | string 8 | | Promise 9 | | Promise<{ 10 | workflowRunId: string; 11 | }>; 12 | 13 | export class DedupeViolationErr extends Error { 14 | constructor(message: string) { 15 | super(message); 16 | this.name = 'DedupeViolationErr'; 17 | } 18 | } 19 | 20 | async function getWorkflowRunId(workflowRunId: EventualWorkflowRunId): Promise { 21 | if (typeof workflowRunId === 'string') { 22 | return workflowRunId; 23 | } 24 | 25 | if (workflowRunId instanceof Promise) { 26 | try { 27 | const resolved = await workflowRunId; 28 | if (typeof resolved === 'string') { 29 | return resolved; 30 | } 31 | 32 | return resolved.workflowRunId; 33 | } catch (e: any) { 34 | if (e.code && e.code === Status.ALREADY_EXISTS) { 35 | throw new DedupeViolationErr(e.details); 36 | } 37 | 38 | throw e; 39 | } 40 | } 41 | 42 | throw new Error('Invalid workflowRunId: must be a string or a promise'); 43 | } 44 | 45 | export default class WorkflowRunRef { 46 | workflowRunId: EventualWorkflowRunId; 47 | parentWorkflowRunId?: string; 48 | private client: ListenerClient; 49 | 50 | constructor( 51 | workflowRunId: 52 | | string 53 | | Promise 54 | | Promise<{ 55 | workflowRunId: string; 56 | }>, 57 | client: ListenerClient, 58 | parentWorkflowRunId?: string 59 | ) { 60 | this.workflowRunId = workflowRunId; 61 | this.parentWorkflowRunId = parentWorkflowRunId; 62 | this.client = client; 63 | } 64 | 65 | async getWorkflowRunId(): Promise { 66 | return getWorkflowRunId(this.workflowRunId); 67 | } 68 | 69 | async stream(): Promise> { 70 | const workflowRunId = await getWorkflowRunId(this.workflowRunId); 71 | return this.client.stream(workflowRunId); 72 | } 73 | 74 | async result(): Promise { 75 | const workflowRunId = await getWorkflowRunId(this.workflowRunId); 76 | 77 | const streamable = await this.client.get(workflowRunId); 78 | 79 | return new Promise((resolve, reject) => { 80 | (async () => { 81 | for await (const event of streamable.stream()) { 82 | if (event.eventType === WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_FINISHED) { 83 | if (event.results.some((r) => !!r.error)) { 84 | reject(event.results); 85 | return; 86 | } 87 | 88 | if (event.results.length === 0) { 89 | const data = await this.client.api.workflowRunGetShape( 90 | this.client.config.tenant_id, 91 | event.workflowRunId 92 | ); 93 | 94 | const mostRecentJobRun = data.data.jobRuns?.[0]; 95 | 96 | if (!mostRecentJobRun) { 97 | reject(new Error('No job runs found')); 98 | return; 99 | } 100 | 101 | const outputs: { [readableStepName: string]: any } = {}; 102 | 103 | mostRecentJobRun.stepRuns?.forEach((stepRun) => { 104 | const readable = mostRecentJobRun.job?.steps?.find( 105 | (step) => step.metadata.id === stepRun.stepId 106 | ); 107 | const readableStepName = `${readable?.readableId}`; 108 | try { 109 | outputs[readableStepName] = JSON.parse(stepRun.output || '{}'); 110 | } catch (error) { 111 | outputs[readableStepName] = stepRun.output; 112 | } 113 | }); 114 | 115 | resolve(outputs as T); 116 | return; 117 | } 118 | 119 | const result = event.results.reduce( 120 | (acc, r) => ({ 121 | ...acc, 122 | [r.stepReadableId]: JSON.parse(r.output || '{}'), 123 | }), 124 | {} as T 125 | ); 126 | 127 | resolve(result); 128 | return; 129 | } 130 | } 131 | })(); 132 | }); 133 | } 134 | 135 | async toJSON(): Promise { 136 | return JSON.stringify({ 137 | workflowRunId: await this.workflowRunId, 138 | }); 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/version.ts: -------------------------------------------------------------------------------- 1 | export const HATCHET_VERSION = 'development'; 2 | -------------------------------------------------------------------------------- /src/workflow.ts: -------------------------------------------------------------------------------- 1 | import * as z from 'zod'; 2 | 3 | import { CreateStep, CreateStepSchema } from './step'; 4 | import { 5 | ConcurrencyLimitStrategy as PbConcurrencyLimitStrategy, 6 | StickyStrategy as PbStickyStrategy, 7 | } from './protoc/workflows'; 8 | 9 | const CronConfigSchema = z.object({ 10 | cron: z.string(), 11 | event: z.undefined(), 12 | }); 13 | 14 | const EventConfigSchema = z.object({ 15 | cron: z.undefined(), 16 | event: z.string(), 17 | }); 18 | 19 | const OnConfigSchema = z.union([CronConfigSchema, EventConfigSchema]).optional(); 20 | 21 | const StepsSchema = z.array(CreateStepSchema); 22 | 23 | export type Steps = z.infer; 24 | 25 | export const ConcurrencyLimitStrategy = PbConcurrencyLimitStrategy; 26 | 27 | export const WorkflowConcurrency = z.object({ 28 | name: z.string(), 29 | maxRuns: z.number().optional(), 30 | limitStrategy: z.nativeEnum(ConcurrencyLimitStrategy).optional(), 31 | expression: z.string().optional(), 32 | }); 33 | 34 | export const HatchetTimeoutSchema = z.string(); 35 | 36 | export const StickyStrategy = PbStickyStrategy; 37 | 38 | export const CreateWorkflowSchema = z.object({ 39 | id: z.string(), 40 | description: z.string(), 41 | version: z.string().optional(), 42 | /** 43 | * sticky will attempt to run all steps for workflow on the same worker 44 | */ 45 | sticky: z.nativeEnum(StickyStrategy).optional(), 46 | scheduleTimeout: z.string().optional(), 47 | /** 48 | * @deprecated Workflow timeout is deprecated. Use step timeouts instead. 49 | */ 50 | timeout: HatchetTimeoutSchema.optional(), 51 | on: OnConfigSchema, 52 | steps: StepsSchema, 53 | onFailure: CreateStepSchema?.optional(), 54 | }); 55 | 56 | export interface Workflow extends z.infer { 57 | concurrency?: z.infer & { 58 | key?: (ctx: any) => string; 59 | }; 60 | steps: CreateStep[]; 61 | onFailure?: CreateStep; 62 | } 63 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://typedoc.org/schema.json", 3 | "entryPoints": ["./src/index.ts"], 4 | "out": "docs", 5 | "plugin": ["typedoc-plugin-markdown"] 6 | } 7 | --------------------------------------------------------------------------------