├── .github └── workflows │ └── node.js.yml ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── commonjs.json ├── eslint.config.js ├── esm.json ├── example ├── .gitignore ├── convex │ ├── _generated │ │ ├── api.d.ts │ │ ├── api.js │ │ ├── dataModel.d.ts │ │ ├── server.d.ts │ │ └── server.js │ ├── admin.ts │ ├── convex.config.ts │ ├── example.test.ts │ ├── example.ts │ ├── schema.ts │ ├── setup.test.ts │ ├── transcription.ts │ └── tsconfig.json ├── package-lock.json └── package.json ├── package-lock.json ├── package.json ├── src ├── client │ ├── environment.ts │ ├── index.ts │ ├── step.ts │ ├── stepContext.ts │ ├── validator.ts │ └── workflowMutation.ts ├── component │ ├── _generated │ │ ├── api.d.ts │ │ ├── api.js │ │ ├── dataModel.d.ts │ │ ├── server.d.ts │ │ └── server.js │ ├── convex.config.ts │ ├── journal.ts │ ├── logging.ts │ ├── model.ts │ ├── pool.ts │ ├── schema.ts │ ├── setup.test.ts │ ├── utils.ts │ └── workflow.ts └── types.ts └── tsconfig.json /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | on: 3 | push: 4 | branches: ["main"] 5 | pull_request: 6 | branches: ["main"] 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: Use Node.js 13 | uses: actions/setup-node@v4 14 | - run: npm i 15 | - run: npm ci 16 | - run: cd example && npm i && cd .. 17 | - run: npm run typecheck 18 | - run: cd example && npm run lint && cd .. 19 | - run: npm test 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea 3 | *.local 4 | *.log 5 | /.vscode/ 6 | /docs/.vitepress/cache 7 | dist 8 | dist-ssr 9 | explorations 10 | node_modules 11 | .eslintcache 12 | # components are libraries! 13 | .package-lock.json 14 | 15 | # this is a package-json-redirect stub dir, see https://github.com/andrewbranch/example-subpath-exports-ts-compat?tab=readme-ov-file 16 | frontend/package.json 17 | # npm pack output 18 | *.tgz 19 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Developing guide 2 | 3 | ## Running locally 4 | 5 | ```sh 6 | npm i 7 | cd example 8 | npm i 9 | npx convex dev 10 | ``` 11 | 12 | ## Testing 13 | 14 | ```sh 15 | rm -rf dist/ && npm run build 16 | npm run typecheck 17 | npm run test 18 | cd example 19 | npm run lint 20 | cd .. 21 | ``` 22 | 23 | ## Deploying 24 | 25 | ### Building a one-off package 26 | 27 | ```sh 28 | rm -rf dist/ && npm run build 29 | npm pack 30 | ``` 31 | 32 | ### Deploying a new version 33 | 34 | ```sh 35 | # this will change the version and commit it (if you run it in the root directory) 36 | npm version patch 37 | npm publish --dry-run 38 | # sanity check files being included 39 | npm publish 40 | git push --tags 41 | ``` 42 | 43 | #### Alpha release 44 | 45 | The same as above, but it requires extra flags so the release is only installed with `@alpha`: 46 | 47 | ```sh 48 | npm version prerelease --preid alpha 49 | npm publish --tag alpha 50 | ``` 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Convex Workflow 2 | 3 | [![npm version](https://badge.fury.io/js/@convex-dev%2Fworkflow.svg?)](https://badge.fury.io/js/@convex-dev%2Fworkflow) 4 | 5 | 6 | 7 | Have you ever wanted to run a series of functions reliably and durably, where 8 | each can have its own retry behavior, the overall workflow will survive server 9 | restarts, and you can have long-running workflows spanning months that can be 10 | canceled? Do you want to observe the status of a workflow reactively, as well as 11 | the results written from each step? 12 | 13 | And do you want to do this with code, instead of a DSL? 14 | 15 | Welcome to the world of Convex workflows. 16 | 17 | - Run workflows asynchronously, and observe their status reactively via 18 | subscriptions, from one or many users simultaneously, even on page refreshes. 19 | - Workflows can run for months, and survive server restarts. You can specify 20 | delays or custom times to run each step. 21 | - Run steps in parallel, or in sequence. 22 | - Output from previous steps is available to pass to subsequent steps. 23 | - Run queries, mutations, and actions. 24 | - Specify retry behavior on a per-step basis, along with a default policy. 25 | - Specify how many workflows can run in parallel to manage load. 26 | - Cancel long-running workflows. 27 | - Clean up workflows after they're done. 28 | 29 | ```ts 30 | import { WorkflowManager } from "@convex-dev/workflow"; 31 | import { components } from "./_generated/api"; 32 | 33 | export const workflow = new WorkflowManager(components.workflow); 34 | 35 | export const exampleWorkflow = workflow.define({ 36 | args: { 37 | storageId: v.id("_storage"), 38 | }, 39 | handler: async (step, args): Promise => { 40 | const transcription = await step.runAction( 41 | internal.index.computeTranscription, 42 | { storageId: args.storageId }, 43 | ); 44 | 45 | const embedding = await step.runAction( 46 | internal.index.computeEmbedding, 47 | { transcription }, 48 | // Run this a month after the transcription is computed. 49 | { runAfter: 30 * 24 * 60 * 60 * 1000 }, 50 | ); 51 | return embedding; 52 | }, 53 | }); 54 | ``` 55 | 56 | This component adds durably executed _workflows_ to Convex. Combine Convex queries, mutations, 57 | and actions into long-lived workflows, and the system will always fully execute a workflow 58 | to completion. 59 | 60 | Open a [GitHub issue](https://github.com/get-convex/workflow/issues) with any feedback or bugs you find. 61 | 62 | ## Installation 63 | 64 | First, add `@convex-dev/workflow` to your Convex project: 65 | 66 | ```sh 67 | npm install @convex-dev/workflow 68 | ``` 69 | 70 | Then, install the component within your `convex/convex.config.ts` file: 71 | 72 | ```ts 73 | // convex/convex.config.ts 74 | import workflow from "@convex-dev/workflow/convex.config"; 75 | import { defineApp } from "convex/server"; 76 | 77 | const app = defineApp(); 78 | app.use(workflow); 79 | export default app; 80 | ``` 81 | 82 | Finally, create a workflow manager within your `convex/` folder, and point it 83 | to the installed component: 84 | 85 | ```ts 86 | // convex/index.ts 87 | import { WorkflowManager } from "@convex-dev/workflow"; 88 | import { components } from "./_generated/api"; 89 | 90 | export const workflow = new WorkflowManager(components.workflow); 91 | ``` 92 | 93 | ## Usage 94 | 95 | The first step is to define a workflow using `workflow.define()`. This function 96 | is designed to feel like a Convex action but with a few restrictions: 97 | 98 | 1. The workflow runs in the background, so it can't return a value. 99 | 2. The workflow must be _deterministic_, so it should implement most of its logic 100 | by calling out to other Convex functions. We will be lifting some of these 101 | restrictions over time by implementing `Math.random()`, `Date.now()`, and 102 | `fetch` within our workflow environment. 103 | 104 | Note: To help avoid type cycles, always annotate the return type of the `handler` 105 | with the return type of the workflow. 106 | 107 | ```ts 108 | export const exampleWorkflow = workflow.define({ 109 | args: { name: v.string() }, 110 | handler: async (step, args): Promise => { 111 | const queryResult = await step.runQuery( 112 | internal.example.exampleQuery, 113 | args, 114 | ); 115 | const actionResult = await step.runAction( 116 | internal.example.exampleAction, 117 | { queryResult }, // pass in results from previous steps! 118 | ); 119 | return actionResult; 120 | }, 121 | }); 122 | 123 | export const exampleQuery = internalQuery({ 124 | args: { name: v.string() }, 125 | handler: async (ctx, args) => { 126 | return `The query says... Hi ${args.name}!`; 127 | }, 128 | }); 129 | 130 | export const exampleAction = internalAction({ 131 | args: { queryResult: v.string() }, 132 | handler: async (ctx, args) => { 133 | return args.queryResult + " The action says... Hi back!"; 134 | }, 135 | }); 136 | ``` 137 | 138 | ### Starting a workflow 139 | 140 | Once you've defined a workflow, you can start it from a mutation or action 141 | using `workflow.start()`. 142 | 143 | ```ts 144 | export const kickoffWorkflow = mutation({ 145 | handler: async (ctx) => { 146 | const workflowId = await workflow.start( 147 | ctx, 148 | internal.example.exampleWorkflow, 149 | { name: "James" }, 150 | ); 151 | }, 152 | }); 153 | ``` 154 | 155 | ### Handling the workflow's result with onComplete 156 | 157 | You can handle the workflow's result with `onComplete`. This is useful for 158 | cleaning up any resources used by the workflow. 159 | 160 | Note: when you return things from a workflow, you'll need to specify the return 161 | type of your `handler` to break type cycles due to using `internal.*` functions 162 | in the body, which then inform the type of the workflow, which is included in 163 | the `internal.*` type. 164 | 165 | You can also specify a `returns` validator to do runtime validation on the 166 | return value. If it fails, your `onComplete` handler will be called with an 167 | error instead of success. You can also do validation in the `onComplete` handler 168 | to have more control over handling that situation. 169 | 170 | ```ts 171 | import { vWorkflowId } from "@convex-dev/workflow"; 172 | import { vResultValidator } from "@convex-dev/workpool"; 173 | 174 | export const foo = mutation({ 175 | handler: async (ctx) => { 176 | const name = "James"; 177 | const workflowId = await workflow.start( 178 | ctx, 179 | internal.example.exampleWorkflow, 180 | { name }, 181 | { 182 | onComplete: internal.example.handleOnComplete, 183 | context: name, // can be anything 184 | }, 185 | ); 186 | }, 187 | }); 188 | 189 | export const handleOnComplete = mutation({ 190 | args: { 191 | workflowId: vWorkflowId, 192 | result: vResultValidator, 193 | context: v.any(), // used to pass through data from the start site. 194 | } 195 | handler: async (ctx, args) => { 196 | const name = (args.context as { name: string }).name; 197 | if (args.result.kind === "success") { 198 | const text = args.result.returnValue; 199 | console.log(`${name} result: ${text}`); 200 | } else if (args.result.kind === "error") { 201 | console.error("Workflow failed", args.result.error); 202 | } else if (args.result.kind === "canceled") { 203 | console.log("Workflow canceled", args.context); 204 | } 205 | }, 206 | }); 207 | ``` 208 | 209 | ### Running steps in parallel 210 | 211 | You can run steps in parallel by calling `step.runAction()` multiple times in 212 | a `Promise.all()` call. 213 | 214 | ```ts 215 | export const exampleWorkflow = workflow.define({ 216 | args: { name: v.string() }, 217 | handler: async (step, args): Promise => { 218 | const [result1, result2] = await Promise.all([ 219 | step.runAction(internal.example.myAction, args), 220 | step.runAction(internal.example.myAction, args), 221 | ]); 222 | }, 223 | }); 224 | ``` 225 | 226 | Note: The workflow will not proceed until all steps fired off at once have completed. 227 | 228 | ### Specifying retry behavior 229 | 230 | Sometimes actions fail due to transient errors, whether it was an unreliable 231 | third-party API or a server restart. You can have the workflow automatically 232 | retry actions using best practices (exponential backoff & jitter). 233 | By default there are no retries, and the workflow will fail. 234 | 235 | You can specify default retry behavior for all workflows on the WorkflowManager, 236 | or override it on a per-workflow basis. 237 | 238 | You can also specify a custom retry behavior per-step, to opt-out of retries 239 | for actions that may want at-most-once semantics. 240 | 241 | Workpool options: 242 | 243 | If you specify any of these, it will override the 244 | [`DEFAULT_RETRY_BEHAVIOR`](./src/component/pool.ts). 245 | 246 | - `defaultRetryBehavior`: The default retry behavior for all workflows. 247 | - `maxAttempts`: The maximum number of attempts to retry an action. 248 | - `initialBackoffMs`: The initial backoff time in milliseconds. 249 | - `base`: The base multiplier for the backoff. Default is 2. 250 | - `retryActionsByDefault`: Whether to retry actions, by default is false. 251 | - If you specify a retry behavior at the step level, it will always retry. 252 | 253 | At the step level, you can also specify `true` or `false` to disable or use 254 | the default policy. 255 | 256 | ```ts 257 | const workflow = new WorkflowManager(components.workflow, { 258 | defaultRetryBehavior: { 259 | maxAttempts: 3, 260 | initialBackoffMs: 100, 261 | base: 2, 262 | }, 263 | // If specified, this sets the defaults, overridden per-workflow or per-step. 264 | workpoolOptions: { ... } 265 | }); 266 | 267 | export const exampleWorkflow = workflow.define({ 268 | args: { name: v.string() }, 269 | handler: async (step, args): Promise => { 270 | // Uses default retry behavior & retryActionsByDefault 271 | await step.runAction(internal.example.myAction, args); 272 | // Retries will be attempted with the default behavior 273 | await step.runAction(internal.example.myAction, args, { retry: true }); 274 | // No retries will be attempted 275 | await step.runAction(internal.example.myAction, args, { retry: false }); 276 | // Custom retry behavior will be used 277 | await step.runAction(internal.example.myAction, args, { 278 | retry: { maxAttempts: 2, initialBackoffMs: 100, base: 2 }, 279 | }); 280 | }, 281 | // If specified, this will override the workflow manager's default 282 | workpoolOptions: { ... }, 283 | }); 284 | ``` 285 | 286 | ### Specifying how many workflows can run in parallel 287 | 288 | You can specify how many workflows can run in parallel by setting the `maxParallelism` 289 | workpool option. It has a reasonable default. You should not exceed 50 across 290 | all your workflows. If you want to do a lot of work in parallel, you should 291 | employ batching, where each workflow operates on a batch of work, e.g. scraping 292 | a list of links instead of one link per workflow. 293 | 294 | ```ts 295 | const workflow = new WorkflowManager(components.workflow, { 296 | workpoolOptions: { 297 | // You must only set this to one value per components.xyz! 298 | // You can set different values if you "use" multiple different components 299 | // in convex.config.ts. 300 | maxParallelism: 10, 301 | }, 302 | }); 303 | ``` 304 | 305 | ### Checking a workflow's status 306 | 307 | The `workflow.start()` method returns a `WorkflowId`, which can then be used for querying 308 | a workflow's status. 309 | 310 | ```ts 311 | export const kickoffWorkflow = action({ 312 | handler: async (ctx) => { 313 | const workflowId = await workflow.start( 314 | ctx, 315 | internal.example.exampleWorkflow, 316 | { name: "James" }, 317 | ); 318 | await new Promise((resolve) => setTimeout(resolve, 1000)); 319 | 320 | const status = await workflow.status(ctx, workflowId); 321 | console.log("Workflow status after 1s", status); 322 | }, 323 | }); 324 | ``` 325 | 326 | ### Canceling a workflow 327 | 328 | You can cancel a workflow with `workflow.cancel()`, halting the workflow's execution immmediately. 329 | In-progress calls to `step.runAction()`, however, will finish executing. 330 | 331 | ```ts 332 | export const kickoffWorkflow = action({ 333 | handler: async (ctx) => { 334 | const workflowId = await workflow.start( 335 | ctx, 336 | internal.example.exampleWorkflow, 337 | { name: "James" }, 338 | ); 339 | await new Promise((resolve) => setTimeout(resolve, 1000)); 340 | 341 | // Cancel the workflow after 1 second. 342 | await workflow.cancel(ctx, workflowId); 343 | }, 344 | }); 345 | ``` 346 | 347 | ### Cleaning up a workflow 348 | 349 | After a workflow has completed, you can clean up its storage with `workflow.cleanup()`. 350 | Completed workflows are not automatically cleaned up by the system. 351 | 352 | ```ts 353 | export const kickoffWorkflow = action({ 354 | handler: async (ctx) => { 355 | const workflowId = await workflow.start( 356 | ctx, 357 | internal.example.exampleWorkflow, 358 | { name: "James" }, 359 | ); 360 | try { 361 | while (true) { 362 | const status = await workflow.status(ctx, workflowId); 363 | if (status.type === "inProgress") { 364 | await new Promise((resolve) => setTimeout(resolve, 1000)); 365 | continue; 366 | } 367 | console.log("Workflow completed with status:", status); 368 | break; 369 | } 370 | } finally { 371 | await workflow.cleanup(ctx, workflowId); 372 | } 373 | }, 374 | }); 375 | ``` 376 | 377 | ### Specifying a custom name for a step 378 | 379 | You can specify a custom name for a step by passing a `name` option to the step. 380 | 381 | This allows the events emitted to your logs to be more descriptive. 382 | By default it uses the `file/folder:function` name. 383 | 384 | ```ts 385 | export const exampleWorkflow = workflow.define({ 386 | args: { name: v.string() }, 387 | handler: async (step, args): Promise => { 388 | await step.runAction(internal.example.myAction, args, { name: "FOO" }); 389 | }, 390 | }); 391 | ``` 392 | 393 | ## Tips and troubleshooting 394 | 395 | ### Circular dependencies 396 | 397 | Having the return value of workflows depend on other Convex functions can lead to circular dependencies due to the 398 | `internal.foo.bar` way of specifying functions. The way to fix this is to explicitly type the return value of the 399 | workflow. When in doubt, add return types to more `handler` functions, like this: 400 | 401 | ```diff 402 | export const supportAgentWorkflow = workflow.define({ 403 | args: { prompt: v.string(), userId: v.string(), threadId: v.string() }, 404 | + handler: async (step, { prompt, userId, threadId }): Promise => { 405 | // ... 406 | }, 407 | }); 408 | 409 | // And regular functions too: 410 | export const myFunction = action({ 411 | args: { prompt: v.string() }, 412 | + handler: async (ctx, { prompt }): Promise => { 413 | // ... 414 | }, 415 | }); 416 | ``` 417 | 418 | ### More concise workflows 419 | 420 | To avoid the noise of `internal.foo.*` syntax, you can use a variable. 421 | For instance, if you define all your steps in `convex/steps.ts`, you can do this: 422 | 423 | ```diff 424 | const s = internal.steps; 425 | 426 | export const myWorkflow = workflow.define({ 427 | args: { prompt: v.string() }, 428 | handler: async (step, args): Promise => { 429 | + const result = await step.runAction(s.myAction, args); 430 | return result; 431 | }, 432 | }); 433 | ``` 434 | 435 | ## Limitations 436 | 437 | Here are a few limitations to keep in mind: 438 | 439 | - Steps can only take in and return a total of _1 MiB_ of data within a single 440 | workflow execution. If you run into journal size limits, you can work around 441 | this by storing results in the DB from your step functions and passing IDs 442 | around within the the workflow. 443 | - `console.log()` isn't currently captured, so you may see duplicate log lines 444 | within your Convex dashboard if you log within the workflow definition. 445 | - We currently do not collect backtraces from within function calls from workflows. 446 | - If you need to use side effects like `fetch`, `Math.random()`, or `Date.now()`, 447 | you'll need to do that in a step, not in the workflow definition. 448 | - If the implementation of the workflow meaningfully changes (steps added, 449 | removed, or reordered) then it will fail with a determinism violation. 450 | The implementation should stay stable for the lifetime of active workflows. 451 | See [this issue](https://github.com/get-convex/workflow/issues/35) for ideas 452 | on how to make this better. 453 | 454 | 455 | -------------------------------------------------------------------------------- /commonjs.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src/**/*"], 4 | "exclude": ["src/**/*.test.*", "../src/package.json"], 5 | "compilerOptions": { 6 | "outDir": "./dist/commonjs" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import globals from "globals"; 2 | import pluginJs from "@eslint/js"; 3 | import typescriptEslint from "@typescript-eslint/eslint-plugin"; 4 | import typescriptParser from "@typescript-eslint/parser"; 5 | 6 | export default [ 7 | { 8 | files: ["src/**/*.{js,mjs,cjs,ts,tsx}", "example/**/*.{js,mjs,cjs,ts,tsx}"], 9 | }, 10 | { 11 | ignores: [ 12 | "dist/**", 13 | "eslint.config.js", 14 | "**/_generated/", 15 | "node10stubs.mjs", 16 | ], 17 | }, 18 | { 19 | languageOptions: { 20 | globals: { 21 | ...globals.worker, 22 | ...globals.node, 23 | }, 24 | parser: typescriptParser, 25 | parserOptions: { 26 | project: true, 27 | tsconfigRootDir: ".", 28 | }, 29 | }, 30 | plugins: { 31 | "@typescript-eslint": typescriptEslint, 32 | }, 33 | rules: { 34 | ...typescriptEslint.configs["recommended"].rules, 35 | ...pluginJs.configs.recommended.rules, 36 | "@typescript-eslint/no-floating-promises": "error", 37 | "@typescript-eslint/no-explicit-any": "warn", 38 | // allow (_arg: number) => {} and const _foo = 1; 39 | "no-unused-vars": "off", 40 | "@typescript-eslint/no-unused-vars": [ 41 | "warn", 42 | { 43 | argsIgnorePattern: "^_", 44 | varsIgnorePattern: "^_", 45 | }, 46 | ], 47 | }, 48 | }, 49 | ]; 50 | -------------------------------------------------------------------------------- /esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src/**/*"], 4 | "exclude": ["src/**/*.test.*", "../src/package.json"], 5 | "compilerOptions": { 6 | "outDir": "./dist/esm" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | !**/glob-import/dir/node_modules 2 | .DS_Store 3 | .idea 4 | *.cpuprofile 5 | *.local 6 | *.log 7 | /.vscode/ 8 | /docs/.vitepress/cache 9 | dist 10 | dist-ssr 11 | explorations 12 | node_modules 13 | playground-temp 14 | temp 15 | TODOs.md 16 | .eslintcache 17 | -------------------------------------------------------------------------------- /example/convex/_generated/api.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type * as admin from "../admin.js"; 12 | import type * as example from "../example.js"; 13 | import type * as transcription from "../transcription.js"; 14 | 15 | import type { 16 | ApiFromModules, 17 | FilterApi, 18 | FunctionReference, 19 | } from "convex/server"; 20 | 21 | /** 22 | * A utility for referencing Convex functions in your app's API. 23 | * 24 | * Usage: 25 | * ```js 26 | * const myFunctionReference = api.myModule.myFunction; 27 | * ``` 28 | */ 29 | declare const fullApi: ApiFromModules<{ 30 | admin: typeof admin; 31 | example: typeof example; 32 | transcription: typeof transcription; 33 | }>; 34 | declare const fullApiWithMounts: typeof fullApi; 35 | 36 | export declare const api: FilterApi< 37 | typeof fullApiWithMounts, 38 | FunctionReference 39 | >; 40 | export declare const internal: FilterApi< 41 | typeof fullApiWithMounts, 42 | FunctionReference 43 | >; 44 | 45 | export declare const components: { 46 | workflow: { 47 | journal: { 48 | load: FunctionReference< 49 | "query", 50 | "internal", 51 | { workflowId: string }, 52 | { 53 | inProgress: Array<{ 54 | _creationTime: number; 55 | _id: string; 56 | step: { 57 | args: any; 58 | argsSize: number; 59 | completedAt?: number; 60 | functionType: "query" | "mutation" | "action"; 61 | handle: string; 62 | inProgress: boolean; 63 | name: string; 64 | runResult?: 65 | | { kind: "success"; returnValue: any } 66 | | { error: string; kind: "failed" } 67 | | { kind: "canceled" }; 68 | startedAt: number; 69 | workId?: string; 70 | }; 71 | stepNumber: number; 72 | workflowId: string; 73 | }>; 74 | journalEntries: Array<{ 75 | _creationTime: number; 76 | _id: string; 77 | step: { 78 | args: any; 79 | argsSize: number; 80 | completedAt?: number; 81 | functionType: "query" | "mutation" | "action"; 82 | handle: string; 83 | inProgress: boolean; 84 | name: string; 85 | runResult?: 86 | | { kind: "success"; returnValue: any } 87 | | { error: string; kind: "failed" } 88 | | { kind: "canceled" }; 89 | startedAt: number; 90 | workId?: string; 91 | }; 92 | stepNumber: number; 93 | workflowId: string; 94 | }>; 95 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 96 | ok: boolean; 97 | workflow: { 98 | _creationTime: number; 99 | _id: string; 100 | args: any; 101 | generationNumber: number; 102 | logLevel?: any; 103 | name?: string; 104 | onComplete?: { context?: any; fnHandle: string }; 105 | runResult?: 106 | | { kind: "success"; returnValue: any } 107 | | { error: string; kind: "failed" } 108 | | { kind: "canceled" }; 109 | startedAt?: any; 110 | state?: any; 111 | workflowHandle: string; 112 | }; 113 | } 114 | >; 115 | startStep: FunctionReference< 116 | "mutation", 117 | "internal", 118 | { 119 | generationNumber: number; 120 | name: string; 121 | retry?: 122 | | boolean 123 | | { base: number; initialBackoffMs: number; maxAttempts: number }; 124 | schedulerOptions?: { runAt?: number } | { runAfter?: number }; 125 | step: { 126 | args: any; 127 | argsSize: number; 128 | completedAt?: number; 129 | functionType: "query" | "mutation" | "action"; 130 | handle: string; 131 | inProgress: boolean; 132 | name: string; 133 | runResult?: 134 | | { kind: "success"; returnValue: any } 135 | | { error: string; kind: "failed" } 136 | | { kind: "canceled" }; 137 | startedAt: number; 138 | workId?: string; 139 | }; 140 | workflowId: string; 141 | workpoolOptions?: { 142 | defaultRetryBehavior?: { 143 | base: number; 144 | initialBackoffMs: number; 145 | maxAttempts: number; 146 | }; 147 | logLevel?: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 148 | maxParallelism?: number; 149 | retryActionsByDefault?: boolean; 150 | }; 151 | }, 152 | { 153 | _creationTime: number; 154 | _id: string; 155 | step: { 156 | args: any; 157 | argsSize: number; 158 | completedAt?: number; 159 | functionType: "query" | "mutation" | "action"; 160 | handle: string; 161 | inProgress: boolean; 162 | name: string; 163 | runResult?: 164 | | { kind: "success"; returnValue: any } 165 | | { error: string; kind: "failed" } 166 | | { kind: "canceled" }; 167 | startedAt: number; 168 | workId?: string; 169 | }; 170 | stepNumber: number; 171 | workflowId: string; 172 | } 173 | >; 174 | }; 175 | workflow: { 176 | cancel: FunctionReference< 177 | "mutation", 178 | "internal", 179 | { workflowId: string }, 180 | null 181 | >; 182 | cleanup: FunctionReference< 183 | "mutation", 184 | "internal", 185 | { workflowId: string }, 186 | boolean 187 | >; 188 | complete: FunctionReference< 189 | "mutation", 190 | "internal", 191 | { 192 | generationNumber: number; 193 | now: number; 194 | runResult: 195 | | { kind: "success"; returnValue: any } 196 | | { error: string; kind: "failed" } 197 | | { kind: "canceled" }; 198 | workflowId: string; 199 | }, 200 | null 201 | >; 202 | create: FunctionReference< 203 | "mutation", 204 | "internal", 205 | { 206 | maxParallelism?: number; 207 | onComplete?: { context?: any; fnHandle: string }; 208 | validateAsync?: boolean; 209 | workflowArgs: any; 210 | workflowHandle: string; 211 | workflowName: string; 212 | }, 213 | string 214 | >; 215 | getStatus: FunctionReference< 216 | "query", 217 | "internal", 218 | { workflowId: string }, 219 | { 220 | inProgress: Array<{ 221 | _creationTime: number; 222 | _id: string; 223 | step: { 224 | args: any; 225 | argsSize: number; 226 | completedAt?: number; 227 | functionType: "query" | "mutation" | "action"; 228 | handle: string; 229 | inProgress: boolean; 230 | name: string; 231 | runResult?: 232 | | { kind: "success"; returnValue: any } 233 | | { error: string; kind: "failed" } 234 | | { kind: "canceled" }; 235 | startedAt: number; 236 | workId?: string; 237 | }; 238 | stepNumber: number; 239 | workflowId: string; 240 | }>; 241 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 242 | workflow: { 243 | _creationTime: number; 244 | _id: string; 245 | args: any; 246 | generationNumber: number; 247 | logLevel?: any; 248 | name?: string; 249 | onComplete?: { context?: any; fnHandle: string }; 250 | runResult?: 251 | | { kind: "success"; returnValue: any } 252 | | { error: string; kind: "failed" } 253 | | { kind: "canceled" }; 254 | startedAt?: any; 255 | state?: any; 256 | workflowHandle: string; 257 | }; 258 | } 259 | >; 260 | }; 261 | }; 262 | }; 263 | -------------------------------------------------------------------------------- /example/convex/_generated/api.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { anyApi, componentsGeneric } from "convex/server"; 12 | 13 | /** 14 | * A utility for referencing Convex functions in your app's API. 15 | * 16 | * Usage: 17 | * ```js 18 | * const myFunctionReference = api.myModule.myFunction; 19 | * ``` 20 | */ 21 | export const api = anyApi; 22 | export const internal = anyApi; 23 | export const components = componentsGeneric(); 24 | -------------------------------------------------------------------------------- /example/convex/_generated/dataModel.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated data model types. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type { 12 | DataModelFromSchemaDefinition, 13 | DocumentByName, 14 | TableNamesInDataModel, 15 | SystemTableNames, 16 | } from "convex/server"; 17 | import type { GenericId } from "convex/values"; 18 | import schema from "../schema.js"; 19 | 20 | /** 21 | * The names of all of your Convex tables. 22 | */ 23 | export type TableNames = TableNamesInDataModel; 24 | 25 | /** 26 | * The type of a document stored in Convex. 27 | * 28 | * @typeParam TableName - A string literal type of the table name (like "users"). 29 | */ 30 | export type Doc = DocumentByName< 31 | DataModel, 32 | TableName 33 | >; 34 | 35 | /** 36 | * An identifier for a document in Convex. 37 | * 38 | * Convex documents are uniquely identified by their `Id`, which is accessible 39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids). 40 | * 41 | * Documents can be loaded using `db.get(id)` in query and mutation functions. 42 | * 43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other 44 | * strings when type checking. 45 | * 46 | * @typeParam TableName - A string literal type of the table name (like "users"). 47 | */ 48 | export type Id = 49 | GenericId; 50 | 51 | /** 52 | * A type describing your Convex data model. 53 | * 54 | * This type includes information about what tables you have, the type of 55 | * documents stored in those tables, and the indexes defined on them. 56 | * 57 | * This type is used to parameterize methods like `queryGeneric` and 58 | * `mutationGeneric` to make them type-safe. 59 | */ 60 | export type DataModel = DataModelFromSchemaDefinition; 61 | -------------------------------------------------------------------------------- /example/convex/_generated/server.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | ActionBuilder, 13 | AnyComponents, 14 | HttpActionBuilder, 15 | MutationBuilder, 16 | QueryBuilder, 17 | GenericActionCtx, 18 | GenericMutationCtx, 19 | GenericQueryCtx, 20 | GenericDatabaseReader, 21 | GenericDatabaseWriter, 22 | FunctionReference, 23 | } from "convex/server"; 24 | import type { DataModel } from "./dataModel.js"; 25 | 26 | type GenericCtx = 27 | | GenericActionCtx 28 | | GenericMutationCtx 29 | | GenericQueryCtx; 30 | 31 | /** 32 | * Define a query in this Convex app's public API. 33 | * 34 | * This function will be allowed to read your Convex database and will be accessible from the client. 35 | * 36 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 37 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 38 | */ 39 | export declare const query: QueryBuilder; 40 | 41 | /** 42 | * Define a query that is only accessible from other Convex functions (but not from the client). 43 | * 44 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 45 | * 46 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 47 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 48 | */ 49 | export declare const internalQuery: QueryBuilder; 50 | 51 | /** 52 | * Define a mutation in this Convex app's public API. 53 | * 54 | * This function will be allowed to modify your Convex database and will be accessible from the client. 55 | * 56 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 57 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 58 | */ 59 | export declare const mutation: MutationBuilder; 60 | 61 | /** 62 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 63 | * 64 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 65 | * 66 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 67 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 68 | */ 69 | export declare const internalMutation: MutationBuilder; 70 | 71 | /** 72 | * Define an action in this Convex app's public API. 73 | * 74 | * An action is a function which can execute any JavaScript code, including non-deterministic 75 | * code and code with side-effects, like calling third-party services. 76 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 77 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 78 | * 79 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 80 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 81 | */ 82 | export declare const action: ActionBuilder; 83 | 84 | /** 85 | * Define an action that is only accessible from other Convex functions (but not from the client). 86 | * 87 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 88 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 89 | */ 90 | export declare const internalAction: ActionBuilder; 91 | 92 | /** 93 | * Define an HTTP action. 94 | * 95 | * This function will be used to respond to HTTP requests received by a Convex 96 | * deployment if the requests matches the path and method where this action 97 | * is routed. Be sure to route your action in `convex/http.js`. 98 | * 99 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 100 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. 101 | */ 102 | export declare const httpAction: HttpActionBuilder; 103 | 104 | /** 105 | * A set of services for use within Convex query functions. 106 | * 107 | * The query context is passed as the first argument to any Convex query 108 | * function run on the server. 109 | * 110 | * This differs from the {@link MutationCtx} because all of the services are 111 | * read-only. 112 | */ 113 | export type QueryCtx = GenericQueryCtx; 114 | 115 | /** 116 | * A set of services for use within Convex mutation functions. 117 | * 118 | * The mutation context is passed as the first argument to any Convex mutation 119 | * function run on the server. 120 | */ 121 | export type MutationCtx = GenericMutationCtx; 122 | 123 | /** 124 | * A set of services for use within Convex action functions. 125 | * 126 | * The action context is passed as the first argument to any Convex action 127 | * function run on the server. 128 | */ 129 | export type ActionCtx = GenericActionCtx; 130 | 131 | /** 132 | * An interface to read from the database within Convex query functions. 133 | * 134 | * The two entry points are {@link DatabaseReader.get}, which fetches a single 135 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts 136 | * building a query. 137 | */ 138 | export type DatabaseReader = GenericDatabaseReader; 139 | 140 | /** 141 | * An interface to read from and write to the database within Convex mutation 142 | * functions. 143 | * 144 | * Convex guarantees that all writes within a single mutation are 145 | * executed atomically, so you never have to worry about partial writes leaving 146 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control) 147 | * for the guarantees Convex provides your functions. 148 | */ 149 | export type DatabaseWriter = GenericDatabaseWriter; 150 | -------------------------------------------------------------------------------- /example/convex/_generated/server.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | actionGeneric, 13 | httpActionGeneric, 14 | queryGeneric, 15 | mutationGeneric, 16 | internalActionGeneric, 17 | internalMutationGeneric, 18 | internalQueryGeneric, 19 | componentsGeneric, 20 | } from "convex/server"; 21 | 22 | /** 23 | * Define a query in this Convex app's public API. 24 | * 25 | * This function will be allowed to read your Convex database and will be accessible from the client. 26 | * 27 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 28 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 29 | */ 30 | export const query = queryGeneric; 31 | 32 | /** 33 | * Define a query that is only accessible from other Convex functions (but not from the client). 34 | * 35 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 36 | * 37 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 38 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 39 | */ 40 | export const internalQuery = internalQueryGeneric; 41 | 42 | /** 43 | * Define a mutation in this Convex app's public API. 44 | * 45 | * This function will be allowed to modify your Convex database and will be accessible from the client. 46 | * 47 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 48 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 49 | */ 50 | export const mutation = mutationGeneric; 51 | 52 | /** 53 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 54 | * 55 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 56 | * 57 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 58 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 59 | */ 60 | export const internalMutation = internalMutationGeneric; 61 | 62 | /** 63 | * Define an action in this Convex app's public API. 64 | * 65 | * An action is a function which can execute any JavaScript code, including non-deterministic 66 | * code and code with side-effects, like calling third-party services. 67 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 68 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 69 | * 70 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 71 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 72 | */ 73 | export const action = actionGeneric; 74 | 75 | /** 76 | * Define an action that is only accessible from other Convex functions (but not from the client). 77 | * 78 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 79 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 80 | */ 81 | export const internalAction = internalActionGeneric; 82 | 83 | /** 84 | * Define a Convex HTTP action. 85 | * 86 | * @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object 87 | * as its second. 88 | * @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`. 89 | */ 90 | export const httpAction = httpActionGeneric; 91 | -------------------------------------------------------------------------------- /example/convex/admin.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { WorkflowId, vWorkflowId } from "@convex-dev/workflow"; 3 | import { mutation, query } from "./_generated/server"; 4 | import { workflow } from "./example"; 5 | 6 | export const getWorkflowStatus = query({ 7 | args: { 8 | workflowId: v.string(), 9 | }, 10 | handler: async (ctx, args) => { 11 | return await workflow.status(ctx, args.workflowId as WorkflowId); 12 | }, 13 | }); 14 | 15 | export const getWorkflowResult = query({ 16 | args: { 17 | workflowId: v.optional(vWorkflowId), 18 | }, 19 | handler: async (ctx, args) => { 20 | const workflowId = args.workflowId; 21 | const flow = await (workflowId 22 | ? ctx.db 23 | .query("flows") 24 | .withIndex("workflowId", (q) => q.eq("workflowId", workflowId)) 25 | .first() 26 | : ctx.db.query("flows").order("desc").first()); 27 | if (!flow) { 28 | throw new Error(`Flow not found: ${workflowId}`); 29 | } 30 | return flow.out; 31 | }, 32 | }); 33 | export const cancelWorkflow = mutation({ 34 | args: { 35 | workflowId: v.string(), 36 | }, 37 | returns: v.null(), 38 | handler: async (ctx, args) => { 39 | return await workflow.cancel(ctx, args.workflowId as WorkflowId); 40 | }, 41 | }); 42 | -------------------------------------------------------------------------------- /example/convex/convex.config.ts: -------------------------------------------------------------------------------- 1 | import { defineApp } from "convex/server"; 2 | import workflow from "@convex-dev/workflow/convex.config"; 3 | 4 | const app = defineApp(); 5 | app.use(workflow); 6 | export default app; 7 | -------------------------------------------------------------------------------- /example/convex/example.test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | 3 | import { afterEach, beforeEach, describe, test, vi } from "vitest"; 4 | import { initConvexTest } from "./setup.test"; 5 | 6 | describe("workpool", () => { 7 | async function setupTest() { 8 | const t = initConvexTest(); 9 | return t; 10 | } 11 | 12 | let t: Awaited>; 13 | 14 | beforeEach(async () => { 15 | vi.useFakeTimers(); 16 | console.log("beforeEach"); 17 | t = await setupTest(); 18 | }); 19 | 20 | afterEach(async () => { 21 | console.log("afterEach"); 22 | await t.finishAllScheduledFunctions(vi.runAllTimers); 23 | vi.useRealTimers(); 24 | }); 25 | 26 | test("TODO", async () => { 27 | console.log("TODO"); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /example/convex/example.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { WorkflowId, WorkflowManager } from "@convex-dev/workflow"; 3 | import { internal } from "./_generated/api.js"; 4 | import { internalAction, internalMutation } from "./_generated/server.js"; 5 | import { components } from "./_generated/api.js"; 6 | import { vWorkflowId } from "@convex-dev/workflow"; 7 | import { vResultValidator } from "@convex-dev/workpool"; 8 | 9 | export const workflow = new WorkflowManager(components.workflow, { 10 | workpoolOptions: { 11 | maxParallelism: 2, 12 | }, 13 | }); 14 | 15 | export const exampleWorkflow = workflow.define({ 16 | args: { 17 | location: v.string(), 18 | }, 19 | handler: async ( 20 | step, 21 | args, 22 | // When returning things from other functions, you need to break the type 23 | // inference cycle by specifying the return type explicitly. 24 | ): Promise<{ 25 | name: string; 26 | celsius: number; 27 | farenheit: number; 28 | windSpeed: number; 29 | windGust: number; 30 | }> => { 31 | // Run in parallel! 32 | const [{ latitude, longitude, name }, weather2] = await Promise.all([ 33 | step.runAction(internal.example.getGeocoding, args, { runAfter: 100 }), 34 | step.runAction(internal.example.getGeocoding, args, { retry: true }), 35 | ]); 36 | console.log("Is geocoding is consistent?", latitude === weather2.latitude); 37 | 38 | const weather = await step.runAction(internal.example.getWeather, { 39 | latitude, 40 | longitude, 41 | }); 42 | const celsius = weather.temperature; 43 | const farenheit = (celsius * 9) / 5 + 32; 44 | const { temperature, windSpeed, windGust } = weather; 45 | console.log( 46 | `Weather in ${name}: ${farenheit.toFixed(1)}°F (${temperature}°C), ${windSpeed} km/h, ${windGust} km/h`, 47 | ); 48 | return { name, celsius, farenheit, windSpeed, windGust }; 49 | }, 50 | workpoolOptions: { 51 | retryActionsByDefault: true, 52 | }, 53 | // If you also want to run runtime validation on the return value. 54 | returns: v.object({ 55 | name: v.string(), 56 | celsius: v.number(), 57 | farenheit: v.number(), 58 | windSpeed: v.number(), 59 | windGust: v.number(), 60 | }), 61 | }); 62 | 63 | export const startWorkflow = internalMutation({ 64 | args: { 65 | location: v.optional(v.string()), 66 | }, 67 | returns: v.string(), 68 | handler: async (ctx, args) => { 69 | const location = args.location ?? "San Francisco"; 70 | const id: WorkflowId = await workflow.start( 71 | ctx, 72 | internal.example.exampleWorkflow, 73 | { location }, 74 | { 75 | onComplete: internal.example.flowCompleted, 76 | context: { location }, 77 | }, 78 | ); 79 | await ctx.db.insert("flows", { workflowId: id, in: location, out: null }); 80 | return id; 81 | }, 82 | }); 83 | 84 | export const flowCompleted = internalMutation({ 85 | args: { 86 | workflowId: vWorkflowId, 87 | result: vResultValidator, 88 | context: v.any(), 89 | }, 90 | handler: async (ctx, args) => { 91 | const flow = await ctx.db 92 | .query("flows") 93 | .withIndex("workflowId", (q) => q.eq("workflowId", args.workflowId)) 94 | .first(); 95 | if (!flow) { 96 | throw new Error(`Flow not found: ${args.workflowId}`); 97 | } 98 | await ctx.db.patch(flow._id, { 99 | out: args.result, 100 | }); 101 | }, 102 | }); 103 | 104 | export const getGeocoding = internalAction({ 105 | args: { 106 | location: v.string(), 107 | }, 108 | returns: v.object({ 109 | latitude: v.number(), 110 | longitude: v.number(), 111 | name: v.string(), 112 | }), 113 | handler: async (_ctx, { location }) => { 114 | const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(location)}&count=1`; 115 | const geocodingResponse = await fetch(geocodingUrl); 116 | const geocodingData = (await geocodingResponse.json()) as { 117 | results: { 118 | latitude: number; 119 | longitude: number; 120 | name: string; 121 | }[]; 122 | }; 123 | 124 | if (!geocodingData.results?.[0]) { 125 | throw new Error(`Location '${location}' not found`); 126 | } 127 | 128 | const { latitude, longitude, name } = geocodingData.results[0]; 129 | return { latitude, longitude, name }; 130 | }, 131 | }); 132 | 133 | export const getWeather = internalAction({ 134 | args: { 135 | latitude: v.number(), 136 | longitude: v.number(), 137 | }, 138 | returns: v.object({ 139 | temperature: v.number(), 140 | windSpeed: v.number(), 141 | windGust: v.number(), 142 | }), 143 | handler: async (_ctx, { latitude, longitude }) => { 144 | const weatherUrl = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}¤t=temperature_2m,apparent_temperature,relative_humidity_2m,wind_speed_10m,wind_gusts_10m,weather_code`; 145 | 146 | const response = await fetch(weatherUrl); 147 | const data = (await response.json()) as { 148 | current: { 149 | time: string; 150 | temperature_2m: number; 151 | wind_speed_10m: number; 152 | wind_gusts_10m: number; 153 | }; 154 | }; 155 | return { 156 | temperature: data.current.temperature_2m, 157 | windSpeed: data.current.wind_speed_10m, 158 | windGust: data.current.wind_gusts_10m, 159 | }; 160 | }, 161 | }); 162 | 163 | export const updateFlow = internalMutation({ 164 | args: { 165 | workflowId: vWorkflowId, 166 | out: v.any(), 167 | }, 168 | returns: v.null(), 169 | handler: async (ctx, args) => { 170 | const flow = await ctx.db 171 | .query("flows") 172 | .withIndex("workflowId", (q) => q.eq("workflowId", args.workflowId)) 173 | .first(); 174 | if (!flow) { 175 | throw new Error(`Flow not found: ${args.workflowId}`); 176 | } 177 | await ctx.db.patch(flow._id, { 178 | out: args.out, 179 | }); 180 | }, 181 | }); 182 | -------------------------------------------------------------------------------- /example/convex/schema.ts: -------------------------------------------------------------------------------- 1 | import { defineTable, defineSchema } from "convex/server"; 2 | import { v } from "convex/values"; 3 | import { vWorkflowId } from "@convex-dev/workflow"; 4 | 5 | export default defineSchema({ 6 | flows: defineTable({ 7 | in: v.string(), 8 | workflowId: vWorkflowId, 9 | out: v.any(), 10 | }).index("workflowId", ["workflowId"]), 11 | }); 12 | -------------------------------------------------------------------------------- /example/convex/setup.test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | import { test } from "vitest"; 3 | import { convexTest } from "convex-test"; 4 | import schema from "./schema"; 5 | export const modules = import.meta.glob("./**/*.*s"); 6 | 7 | // Sorry about everything 8 | import componentSchema from "../node_modules/@convex-dev/workflow/src/component/schema"; 9 | export { componentSchema }; 10 | export const componentModules = import.meta.glob( 11 | "../node_modules/@convex-dev/workflow/src/component/**/*.ts", 12 | ); 13 | 14 | export function initConvexTest() { 15 | const t = convexTest(schema, modules); 16 | t.registerComponent("bigPool", componentSchema, componentModules); 17 | t.registerComponent("smallPool", componentSchema, componentModules); 18 | return t; 19 | } 20 | 21 | test("setup", () => {}); 22 | -------------------------------------------------------------------------------- /example/convex/transcription.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { WorkflowManager } from "@convex-dev/workflow"; 3 | import { internal } from "./_generated/api.js"; 4 | import { internalAction, internalMutation } from "./_generated/server.js"; 5 | import { components } from "./_generated/api.js"; 6 | import { OpenAI } from "openai"; 7 | import { workflow } from "./example.js"; 8 | 9 | function getOpenAI() { 10 | if (!process.env.OPENAI_API_KEY) { 11 | throw new Error( 12 | "OPENAI_API_KEY is not configured.\n" + 13 | "npx convex env set OPENAI_API_KEY sk-****", 14 | ); 15 | } 16 | return new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); 17 | } 18 | 19 | export const startTranscription = internalMutation({ 20 | args: { 21 | storageId: v.id("_storage"), 22 | }, 23 | handler: async (ctx, args) => { 24 | const workflow = new WorkflowManager(components.workflow, { 25 | workpoolOptions: { 26 | maxParallelism: 1, 27 | }, 28 | }); 29 | const id: string = await workflow.start( 30 | ctx, 31 | internal.transcription.transcriptionWorkflow, 32 | { storageId: args.storageId }, 33 | ); 34 | return id; 35 | }, 36 | }); 37 | 38 | export const transcriptionWorkflow = workflow.define({ 39 | args: { 40 | storageId: v.id("_storage"), 41 | }, 42 | handler: async (step, args) => { 43 | const transcription = await step.runAction( 44 | internal.transcription.computeTranscription, 45 | { 46 | storageId: args.storageId, 47 | }, 48 | ); 49 | console.log(transcription); 50 | const embedding = await step.runAction( 51 | internal.transcription.computeEmbedding, 52 | { transcription }, 53 | { retry: false }, 54 | ); 55 | console.log(embedding.slice(0, 20)); 56 | }, 57 | workpoolOptions: { 58 | retryActionsByDefault: false, 59 | defaultRetryBehavior: { 60 | maxAttempts: 5, 61 | initialBackoffMs: 10, 62 | base: 2, 63 | }, 64 | }, 65 | }); 66 | 67 | export const computeTranscription = internalAction({ 68 | args: { 69 | storageId: v.id("_storage"), 70 | }, 71 | returns: v.string(), 72 | handler: async (ctx, args) => { 73 | const blob = await ctx.storage.get(args.storageId); 74 | if (!blob) { 75 | throw new Error(`Invalid storage ID: ${args.storageId}`); 76 | } 77 | const file = new File([blob], `${args.storageId}`, { 78 | type: blob.type, 79 | }); 80 | const transcription = await getOpenAI().audio.transcriptions.create({ 81 | file, 82 | model: "whisper-1", 83 | }); 84 | return transcription.text; 85 | }, 86 | }); 87 | 88 | export const computeEmbedding = internalAction({ 89 | args: { 90 | transcription: v.string(), 91 | }, 92 | returns: v.array(v.number()), 93 | handler: async (ctx, args) => { 94 | const embeddingResponse = await getOpenAI().embeddings.create({ 95 | input: [args.transcription], 96 | model: "text-embedding-3-small", 97 | }); 98 | const embedding = embeddingResponse.data[0].embedding; 99 | return embedding; 100 | }, 101 | }); 102 | -------------------------------------------------------------------------------- /example/convex/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | /* This TypeScript project config describes the environment that 3 | * Convex functions run in and is used to typecheck them. 4 | * You can modify it, but some settings required to use Convex. 5 | */ 6 | "compilerOptions": { 7 | /* These settings are not required by Convex and can be modified. */ 8 | "allowJs": true, 9 | "strict": true, 10 | "skipLibCheck": true, 11 | 12 | /* These compiler options are required by Convex */ 13 | "target": "ESNext", 14 | "lib": ["ES2021", "dom", "ESNext.Array"], 15 | "forceConsistentCasingInFileNames": true, 16 | "allowSyntheticDefaultImports": true, 17 | "module": "ESNext", 18 | "moduleResolution": "Bundler", 19 | "isolatedModules": true, 20 | "noEmit": true, 21 | 22 | /* This should only be used in this example. Real apps should not attempt 23 | * to compile TypeScript because differences between tsconfig.json files can 24 | * cause the code to be compiled differently. 25 | */ 26 | "customConditions": ["@convex-dev/component-source"] 27 | }, 28 | "include": ["./**/*"], 29 | "exclude": ["./_generated"] 30 | } 31 | -------------------------------------------------------------------------------- /example/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "uses-component", 3 | "version": "0.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "uses-component", 9 | "version": "0.0.0", 10 | "dependencies": { 11 | "@convex-dev/workflow": "file:..", 12 | "convex": "^1.16.5" 13 | }, 14 | "devDependencies": { 15 | "@types/node": "^22.14.0", 16 | "convex-test": "^0.0.36", 17 | "eslint": "^8.55.0", 18 | "typescript": "^5.2.2" 19 | } 20 | }, 21 | "..": { 22 | "name": "@convex-dev/workflow", 23 | "version": "0.2.3", 24 | "license": "Apache-2.0", 25 | "dependencies": { 26 | "async-channel": "^0.2.0" 27 | }, 28 | "devDependencies": { 29 | "@eslint/eslintrc": "^3.1.0", 30 | "@eslint/js": "^9.9.1", 31 | "@types/node": "^18.19.86", 32 | "@typescript-eslint/eslint-plugin": "^7.0.0", 33 | "@typescript-eslint/parser": "^7.0.0", 34 | "eslint": "^8.57.0", 35 | "globals": "^15.9.0", 36 | "openai": "^4.54.0", 37 | "prettier": "3.2.5", 38 | "typescript": "~5.5.0", 39 | "typescript-eslint": "^7.0.0", 40 | "vitest": "^3.1.1" 41 | }, 42 | "peerDependencies": { 43 | "@convex-dev/workpool": "^0.2.9", 44 | "convex": ">=1.21.0 <1.35.0", 45 | "convex-helpers": "^0.1.77" 46 | } 47 | }, 48 | "node_modules/@convex-dev/workflow": { 49 | "resolved": "..", 50 | "link": true 51 | }, 52 | "node_modules/@esbuild/aix-ppc64": { 53 | "version": "0.25.1", 54 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", 55 | "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", 56 | "cpu": [ 57 | "ppc64" 58 | ], 59 | "optional": true, 60 | "os": [ 61 | "aix" 62 | ], 63 | "engines": { 64 | "node": ">=18" 65 | } 66 | }, 67 | "node_modules/@esbuild/android-arm": { 68 | "version": "0.25.1", 69 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", 70 | "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", 71 | "cpu": [ 72 | "arm" 73 | ], 74 | "optional": true, 75 | "os": [ 76 | "android" 77 | ], 78 | "engines": { 79 | "node": ">=18" 80 | } 81 | }, 82 | "node_modules/@esbuild/android-arm64": { 83 | "version": "0.25.1", 84 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", 85 | "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", 86 | "cpu": [ 87 | "arm64" 88 | ], 89 | "optional": true, 90 | "os": [ 91 | "android" 92 | ], 93 | "engines": { 94 | "node": ">=18" 95 | } 96 | }, 97 | "node_modules/@esbuild/android-x64": { 98 | "version": "0.25.1", 99 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", 100 | "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", 101 | "cpu": [ 102 | "x64" 103 | ], 104 | "optional": true, 105 | "os": [ 106 | "android" 107 | ], 108 | "engines": { 109 | "node": ">=18" 110 | } 111 | }, 112 | "node_modules/@esbuild/darwin-arm64": { 113 | "version": "0.25.1", 114 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", 115 | "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", 116 | "cpu": [ 117 | "arm64" 118 | ], 119 | "optional": true, 120 | "os": [ 121 | "darwin" 122 | ], 123 | "engines": { 124 | "node": ">=18" 125 | } 126 | }, 127 | "node_modules/@esbuild/darwin-x64": { 128 | "version": "0.25.1", 129 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", 130 | "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", 131 | "cpu": [ 132 | "x64" 133 | ], 134 | "optional": true, 135 | "os": [ 136 | "darwin" 137 | ], 138 | "engines": { 139 | "node": ">=18" 140 | } 141 | }, 142 | "node_modules/@esbuild/freebsd-arm64": { 143 | "version": "0.25.1", 144 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", 145 | "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", 146 | "cpu": [ 147 | "arm64" 148 | ], 149 | "optional": true, 150 | "os": [ 151 | "freebsd" 152 | ], 153 | "engines": { 154 | "node": ">=18" 155 | } 156 | }, 157 | "node_modules/@esbuild/freebsd-x64": { 158 | "version": "0.25.1", 159 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", 160 | "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", 161 | "cpu": [ 162 | "x64" 163 | ], 164 | "optional": true, 165 | "os": [ 166 | "freebsd" 167 | ], 168 | "engines": { 169 | "node": ">=18" 170 | } 171 | }, 172 | "node_modules/@esbuild/linux-arm": { 173 | "version": "0.25.1", 174 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", 175 | "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", 176 | "cpu": [ 177 | "arm" 178 | ], 179 | "optional": true, 180 | "os": [ 181 | "linux" 182 | ], 183 | "engines": { 184 | "node": ">=18" 185 | } 186 | }, 187 | "node_modules/@esbuild/linux-arm64": { 188 | "version": "0.25.1", 189 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", 190 | "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", 191 | "cpu": [ 192 | "arm64" 193 | ], 194 | "optional": true, 195 | "os": [ 196 | "linux" 197 | ], 198 | "engines": { 199 | "node": ">=18" 200 | } 201 | }, 202 | "node_modules/@esbuild/linux-ia32": { 203 | "version": "0.25.1", 204 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", 205 | "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", 206 | "cpu": [ 207 | "ia32" 208 | ], 209 | "optional": true, 210 | "os": [ 211 | "linux" 212 | ], 213 | "engines": { 214 | "node": ">=18" 215 | } 216 | }, 217 | "node_modules/@esbuild/linux-loong64": { 218 | "version": "0.25.1", 219 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", 220 | "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", 221 | "cpu": [ 222 | "loong64" 223 | ], 224 | "optional": true, 225 | "os": [ 226 | "linux" 227 | ], 228 | "engines": { 229 | "node": ">=18" 230 | } 231 | }, 232 | "node_modules/@esbuild/linux-mips64el": { 233 | "version": "0.25.1", 234 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", 235 | "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", 236 | "cpu": [ 237 | "mips64el" 238 | ], 239 | "optional": true, 240 | "os": [ 241 | "linux" 242 | ], 243 | "engines": { 244 | "node": ">=18" 245 | } 246 | }, 247 | "node_modules/@esbuild/linux-ppc64": { 248 | "version": "0.25.1", 249 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", 250 | "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", 251 | "cpu": [ 252 | "ppc64" 253 | ], 254 | "optional": true, 255 | "os": [ 256 | "linux" 257 | ], 258 | "engines": { 259 | "node": ">=18" 260 | } 261 | }, 262 | "node_modules/@esbuild/linux-riscv64": { 263 | "version": "0.25.1", 264 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", 265 | "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", 266 | "cpu": [ 267 | "riscv64" 268 | ], 269 | "optional": true, 270 | "os": [ 271 | "linux" 272 | ], 273 | "engines": { 274 | "node": ">=18" 275 | } 276 | }, 277 | "node_modules/@esbuild/linux-s390x": { 278 | "version": "0.25.1", 279 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", 280 | "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", 281 | "cpu": [ 282 | "s390x" 283 | ], 284 | "optional": true, 285 | "os": [ 286 | "linux" 287 | ], 288 | "engines": { 289 | "node": ">=18" 290 | } 291 | }, 292 | "node_modules/@esbuild/linux-x64": { 293 | "version": "0.25.1", 294 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", 295 | "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", 296 | "cpu": [ 297 | "x64" 298 | ], 299 | "optional": true, 300 | "os": [ 301 | "linux" 302 | ], 303 | "engines": { 304 | "node": ">=18" 305 | } 306 | }, 307 | "node_modules/@esbuild/netbsd-arm64": { 308 | "version": "0.25.1", 309 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", 310 | "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", 311 | "cpu": [ 312 | "arm64" 313 | ], 314 | "optional": true, 315 | "os": [ 316 | "netbsd" 317 | ], 318 | "engines": { 319 | "node": ">=18" 320 | } 321 | }, 322 | "node_modules/@esbuild/netbsd-x64": { 323 | "version": "0.25.1", 324 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", 325 | "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", 326 | "cpu": [ 327 | "x64" 328 | ], 329 | "optional": true, 330 | "os": [ 331 | "netbsd" 332 | ], 333 | "engines": { 334 | "node": ">=18" 335 | } 336 | }, 337 | "node_modules/@esbuild/openbsd-arm64": { 338 | "version": "0.25.1", 339 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", 340 | "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", 341 | "cpu": [ 342 | "arm64" 343 | ], 344 | "optional": true, 345 | "os": [ 346 | "openbsd" 347 | ], 348 | "engines": { 349 | "node": ">=18" 350 | } 351 | }, 352 | "node_modules/@esbuild/openbsd-x64": { 353 | "version": "0.25.1", 354 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", 355 | "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", 356 | "cpu": [ 357 | "x64" 358 | ], 359 | "optional": true, 360 | "os": [ 361 | "openbsd" 362 | ], 363 | "engines": { 364 | "node": ">=18" 365 | } 366 | }, 367 | "node_modules/@esbuild/sunos-x64": { 368 | "version": "0.25.1", 369 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", 370 | "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", 371 | "cpu": [ 372 | "x64" 373 | ], 374 | "optional": true, 375 | "os": [ 376 | "sunos" 377 | ], 378 | "engines": { 379 | "node": ">=18" 380 | } 381 | }, 382 | "node_modules/@esbuild/win32-arm64": { 383 | "version": "0.25.1", 384 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", 385 | "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", 386 | "cpu": [ 387 | "arm64" 388 | ], 389 | "optional": true, 390 | "os": [ 391 | "win32" 392 | ], 393 | "engines": { 394 | "node": ">=18" 395 | } 396 | }, 397 | "node_modules/@esbuild/win32-ia32": { 398 | "version": "0.25.1", 399 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", 400 | "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", 401 | "cpu": [ 402 | "ia32" 403 | ], 404 | "optional": true, 405 | "os": [ 406 | "win32" 407 | ], 408 | "engines": { 409 | "node": ">=18" 410 | } 411 | }, 412 | "node_modules/@esbuild/win32-x64": { 413 | "version": "0.25.1", 414 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", 415 | "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", 416 | "cpu": [ 417 | "x64" 418 | ], 419 | "optional": true, 420 | "os": [ 421 | "win32" 422 | ], 423 | "engines": { 424 | "node": ">=18" 425 | } 426 | }, 427 | "node_modules/@eslint-community/eslint-utils": { 428 | "version": "4.4.0", 429 | "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", 430 | "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", 431 | "dev": true, 432 | "dependencies": { 433 | "eslint-visitor-keys": "^3.3.0" 434 | }, 435 | "engines": { 436 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 437 | }, 438 | "peerDependencies": { 439 | "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" 440 | } 441 | }, 442 | "node_modules/@eslint-community/regexpp": { 443 | "version": "4.11.1", 444 | "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", 445 | "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", 446 | "dev": true, 447 | "engines": { 448 | "node": "^12.0.0 || ^14.0.0 || >=16.0.0" 449 | } 450 | }, 451 | "node_modules/@eslint/eslintrc": { 452 | "version": "2.1.4", 453 | "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", 454 | "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", 455 | "dev": true, 456 | "dependencies": { 457 | "ajv": "^6.12.4", 458 | "debug": "^4.3.2", 459 | "espree": "^9.6.0", 460 | "globals": "^13.19.0", 461 | "ignore": "^5.2.0", 462 | "import-fresh": "^3.2.1", 463 | "js-yaml": "^4.1.0", 464 | "minimatch": "^3.1.2", 465 | "strip-json-comments": "^3.1.1" 466 | }, 467 | "engines": { 468 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 469 | }, 470 | "funding": { 471 | "url": "https://opencollective.com/eslint" 472 | } 473 | }, 474 | "node_modules/@eslint/eslintrc/node_modules/globals": { 475 | "version": "13.24.0", 476 | "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", 477 | "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", 478 | "dev": true, 479 | "dependencies": { 480 | "type-fest": "^0.20.2" 481 | }, 482 | "engines": { 483 | "node": ">=8" 484 | }, 485 | "funding": { 486 | "url": "https://github.com/sponsors/sindresorhus" 487 | } 488 | }, 489 | "node_modules/@eslint/js": { 490 | "version": "8.57.1", 491 | "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", 492 | "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", 493 | "dev": true, 494 | "engines": { 495 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 496 | } 497 | }, 498 | "node_modules/@humanwhocodes/config-array": { 499 | "version": "0.13.0", 500 | "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", 501 | "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", 502 | "deprecated": "Use @eslint/config-array instead", 503 | "dev": true, 504 | "dependencies": { 505 | "@humanwhocodes/object-schema": "^2.0.3", 506 | "debug": "^4.3.1", 507 | "minimatch": "^3.0.5" 508 | }, 509 | "engines": { 510 | "node": ">=10.10.0" 511 | } 512 | }, 513 | "node_modules/@humanwhocodes/module-importer": { 514 | "version": "1.0.1", 515 | "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", 516 | "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", 517 | "dev": true, 518 | "engines": { 519 | "node": ">=12.22" 520 | }, 521 | "funding": { 522 | "type": "github", 523 | "url": "https://github.com/sponsors/nzakas" 524 | } 525 | }, 526 | "node_modules/@humanwhocodes/object-schema": { 527 | "version": "2.0.3", 528 | "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", 529 | "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", 530 | "deprecated": "Use @eslint/object-schema instead", 531 | "dev": true 532 | }, 533 | "node_modules/@nodelib/fs.scandir": { 534 | "version": "2.1.5", 535 | "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", 536 | "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", 537 | "dev": true, 538 | "dependencies": { 539 | "@nodelib/fs.stat": "2.0.5", 540 | "run-parallel": "^1.1.9" 541 | }, 542 | "engines": { 543 | "node": ">= 8" 544 | } 545 | }, 546 | "node_modules/@nodelib/fs.stat": { 547 | "version": "2.0.5", 548 | "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", 549 | "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", 550 | "dev": true, 551 | "engines": { 552 | "node": ">= 8" 553 | } 554 | }, 555 | "node_modules/@nodelib/fs.walk": { 556 | "version": "1.2.8", 557 | "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", 558 | "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", 559 | "dev": true, 560 | "dependencies": { 561 | "@nodelib/fs.scandir": "2.1.5", 562 | "fastq": "^1.6.0" 563 | }, 564 | "engines": { 565 | "node": ">= 8" 566 | } 567 | }, 568 | "node_modules/@types/node": { 569 | "version": "22.14.0", 570 | "resolved": "https://registry.npmjs.org/@types/node/-/node-22.14.0.tgz", 571 | "integrity": "sha512-Kmpl+z84ILoG+3T/zQFyAJsU6EPTmOCj8/2+83fSN6djd6I4o7uOuGIH6vq3PrjY5BGitSbFuMN18j3iknubbA==", 572 | "dev": true, 573 | "dependencies": { 574 | "undici-types": "~6.21.0" 575 | } 576 | }, 577 | "node_modules/@ungap/structured-clone": { 578 | "version": "1.2.0", 579 | "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", 580 | "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", 581 | "dev": true 582 | }, 583 | "node_modules/acorn": { 584 | "version": "8.12.1", 585 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", 586 | "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", 587 | "dev": true, 588 | "bin": { 589 | "acorn": "bin/acorn" 590 | }, 591 | "engines": { 592 | "node": ">=0.4.0" 593 | } 594 | }, 595 | "node_modules/acorn-jsx": { 596 | "version": "5.3.2", 597 | "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", 598 | "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", 599 | "dev": true, 600 | "peerDependencies": { 601 | "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" 602 | } 603 | }, 604 | "node_modules/ajv": { 605 | "version": "6.12.6", 606 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", 607 | "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", 608 | "dev": true, 609 | "dependencies": { 610 | "fast-deep-equal": "^3.1.1", 611 | "fast-json-stable-stringify": "^2.0.0", 612 | "json-schema-traverse": "^0.4.1", 613 | "uri-js": "^4.2.2" 614 | }, 615 | "funding": { 616 | "type": "github", 617 | "url": "https://github.com/sponsors/epoberezkin" 618 | } 619 | }, 620 | "node_modules/ansi-regex": { 621 | "version": "5.0.1", 622 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 623 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 624 | "dev": true, 625 | "engines": { 626 | "node": ">=8" 627 | } 628 | }, 629 | "node_modules/ansi-styles": { 630 | "version": "4.3.0", 631 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", 632 | "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", 633 | "dev": true, 634 | "dependencies": { 635 | "color-convert": "^2.0.1" 636 | }, 637 | "engines": { 638 | "node": ">=8" 639 | }, 640 | "funding": { 641 | "url": "https://github.com/chalk/ansi-styles?sponsor=1" 642 | } 643 | }, 644 | "node_modules/argparse": { 645 | "version": "2.0.1", 646 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", 647 | "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", 648 | "dev": true 649 | }, 650 | "node_modules/balanced-match": { 651 | "version": "1.0.2", 652 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", 653 | "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", 654 | "dev": true 655 | }, 656 | "node_modules/brace-expansion": { 657 | "version": "1.1.11", 658 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", 659 | "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", 660 | "dev": true, 661 | "dependencies": { 662 | "balanced-match": "^1.0.0", 663 | "concat-map": "0.0.1" 664 | } 665 | }, 666 | "node_modules/callsites": { 667 | "version": "3.1.0", 668 | "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", 669 | "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", 670 | "dev": true, 671 | "engines": { 672 | "node": ">=6" 673 | } 674 | }, 675 | "node_modules/chalk": { 676 | "version": "4.1.2", 677 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", 678 | "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", 679 | "dev": true, 680 | "dependencies": { 681 | "ansi-styles": "^4.1.0", 682 | "supports-color": "^7.1.0" 683 | }, 684 | "engines": { 685 | "node": ">=10" 686 | }, 687 | "funding": { 688 | "url": "https://github.com/chalk/chalk?sponsor=1" 689 | } 690 | }, 691 | "node_modules/color-convert": { 692 | "version": "2.0.1", 693 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 694 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 695 | "dev": true, 696 | "dependencies": { 697 | "color-name": "~1.1.4" 698 | }, 699 | "engines": { 700 | "node": ">=7.0.0" 701 | } 702 | }, 703 | "node_modules/color-name": { 704 | "version": "1.1.4", 705 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 706 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 707 | "dev": true 708 | }, 709 | "node_modules/concat-map": { 710 | "version": "0.0.1", 711 | "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", 712 | "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", 713 | "dev": true 714 | }, 715 | "node_modules/convex": { 716 | "version": "1.22.0", 717 | "resolved": "https://registry.npmjs.org/convex/-/convex-1.22.0.tgz", 718 | "integrity": "sha512-zHDagTUO8SftALBX7MsE90aZ+CpPEV3xkRgmZReN4k3KnM1R6q4qPReY5yUFpKPUwlmSeIXLKtivz4XEaBvj+g==", 719 | "dependencies": { 720 | "esbuild": "0.25.1", 721 | "jwt-decode": "^4.0.0", 722 | "prettier": "3.5.1" 723 | }, 724 | "bin": { 725 | "convex": "bin/main.js" 726 | }, 727 | "engines": { 728 | "node": ">=18.0.0", 729 | "npm": ">=7.0.0" 730 | }, 731 | "peerDependencies": { 732 | "@auth0/auth0-react": "^2.0.1", 733 | "@clerk/clerk-react": "^4.12.8 || ^5.0.0", 734 | "react": "^17.0.2 || ^18.0.0 || ^19.0.0-0 || ^19.0.0", 735 | "react-dom": "^17.0.2 || ^18.0.0 || ^19.0.0-0 || ^19.0.0" 736 | }, 737 | "peerDependenciesMeta": { 738 | "@auth0/auth0-react": { 739 | "optional": true 740 | }, 741 | "@clerk/clerk-react": { 742 | "optional": true 743 | }, 744 | "react": { 745 | "optional": true 746 | }, 747 | "react-dom": { 748 | "optional": true 749 | } 750 | } 751 | }, 752 | "node_modules/convex-test": { 753 | "version": "0.0.36", 754 | "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.36.tgz", 755 | "integrity": "sha512-xcmjiYodRNypQLIVTSq/23BSH1sbJ8GKKKSX9A/JmZovrm1SEV0ATYriOlvRyoU6+3BNWt0AvP2Wql2HOSMHOg==", 756 | "dev": true, 757 | "peerDependencies": { 758 | "convex": "^1.16.4" 759 | } 760 | }, 761 | "node_modules/cross-spawn": { 762 | "version": "7.0.6", 763 | "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", 764 | "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", 765 | "dev": true, 766 | "dependencies": { 767 | "path-key": "^3.1.0", 768 | "shebang-command": "^2.0.0", 769 | "which": "^2.0.1" 770 | }, 771 | "engines": { 772 | "node": ">= 8" 773 | } 774 | }, 775 | "node_modules/debug": { 776 | "version": "4.3.7", 777 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", 778 | "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", 779 | "dev": true, 780 | "dependencies": { 781 | "ms": "^2.1.3" 782 | }, 783 | "engines": { 784 | "node": ">=6.0" 785 | }, 786 | "peerDependenciesMeta": { 787 | "supports-color": { 788 | "optional": true 789 | } 790 | } 791 | }, 792 | "node_modules/deep-is": { 793 | "version": "0.1.4", 794 | "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", 795 | "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", 796 | "dev": true 797 | }, 798 | "node_modules/doctrine": { 799 | "version": "3.0.0", 800 | "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", 801 | "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", 802 | "dev": true, 803 | "dependencies": { 804 | "esutils": "^2.0.2" 805 | }, 806 | "engines": { 807 | "node": ">=6.0.0" 808 | } 809 | }, 810 | "node_modules/esbuild": { 811 | "version": "0.25.1", 812 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", 813 | "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", 814 | "hasInstallScript": true, 815 | "bin": { 816 | "esbuild": "bin/esbuild" 817 | }, 818 | "engines": { 819 | "node": ">=18" 820 | }, 821 | "optionalDependencies": { 822 | "@esbuild/aix-ppc64": "0.25.1", 823 | "@esbuild/android-arm": "0.25.1", 824 | "@esbuild/android-arm64": "0.25.1", 825 | "@esbuild/android-x64": "0.25.1", 826 | "@esbuild/darwin-arm64": "0.25.1", 827 | "@esbuild/darwin-x64": "0.25.1", 828 | "@esbuild/freebsd-arm64": "0.25.1", 829 | "@esbuild/freebsd-x64": "0.25.1", 830 | "@esbuild/linux-arm": "0.25.1", 831 | "@esbuild/linux-arm64": "0.25.1", 832 | "@esbuild/linux-ia32": "0.25.1", 833 | "@esbuild/linux-loong64": "0.25.1", 834 | "@esbuild/linux-mips64el": "0.25.1", 835 | "@esbuild/linux-ppc64": "0.25.1", 836 | "@esbuild/linux-riscv64": "0.25.1", 837 | "@esbuild/linux-s390x": "0.25.1", 838 | "@esbuild/linux-x64": "0.25.1", 839 | "@esbuild/netbsd-arm64": "0.25.1", 840 | "@esbuild/netbsd-x64": "0.25.1", 841 | "@esbuild/openbsd-arm64": "0.25.1", 842 | "@esbuild/openbsd-x64": "0.25.1", 843 | "@esbuild/sunos-x64": "0.25.1", 844 | "@esbuild/win32-arm64": "0.25.1", 845 | "@esbuild/win32-ia32": "0.25.1", 846 | "@esbuild/win32-x64": "0.25.1" 847 | } 848 | }, 849 | "node_modules/escape-string-regexp": { 850 | "version": "4.0.0", 851 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", 852 | "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", 853 | "dev": true, 854 | "engines": { 855 | "node": ">=10" 856 | }, 857 | "funding": { 858 | "url": "https://github.com/sponsors/sindresorhus" 859 | } 860 | }, 861 | "node_modules/eslint": { 862 | "version": "8.57.1", 863 | "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", 864 | "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", 865 | "dev": true, 866 | "dependencies": { 867 | "@eslint-community/eslint-utils": "^4.2.0", 868 | "@eslint-community/regexpp": "^4.6.1", 869 | "@eslint/eslintrc": "^2.1.4", 870 | "@eslint/js": "8.57.1", 871 | "@humanwhocodes/config-array": "^0.13.0", 872 | "@humanwhocodes/module-importer": "^1.0.1", 873 | "@nodelib/fs.walk": "^1.2.8", 874 | "@ungap/structured-clone": "^1.2.0", 875 | "ajv": "^6.12.4", 876 | "chalk": "^4.0.0", 877 | "cross-spawn": "^7.0.2", 878 | "debug": "^4.3.2", 879 | "doctrine": "^3.0.0", 880 | "escape-string-regexp": "^4.0.0", 881 | "eslint-scope": "^7.2.2", 882 | "eslint-visitor-keys": "^3.4.3", 883 | "espree": "^9.6.1", 884 | "esquery": "^1.4.2", 885 | "esutils": "^2.0.2", 886 | "fast-deep-equal": "^3.1.3", 887 | "file-entry-cache": "^6.0.1", 888 | "find-up": "^5.0.0", 889 | "glob-parent": "^6.0.2", 890 | "globals": "^13.19.0", 891 | "graphemer": "^1.4.0", 892 | "ignore": "^5.2.0", 893 | "imurmurhash": "^0.1.4", 894 | "is-glob": "^4.0.0", 895 | "is-path-inside": "^3.0.3", 896 | "js-yaml": "^4.1.0", 897 | "json-stable-stringify-without-jsonify": "^1.0.1", 898 | "levn": "^0.4.1", 899 | "lodash.merge": "^4.6.2", 900 | "minimatch": "^3.1.2", 901 | "natural-compare": "^1.4.0", 902 | "optionator": "^0.9.3", 903 | "strip-ansi": "^6.0.1", 904 | "text-table": "^0.2.0" 905 | }, 906 | "bin": { 907 | "eslint": "bin/eslint.js" 908 | }, 909 | "engines": { 910 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 911 | }, 912 | "funding": { 913 | "url": "https://opencollective.com/eslint" 914 | } 915 | }, 916 | "node_modules/eslint-scope": { 917 | "version": "7.2.2", 918 | "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", 919 | "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", 920 | "dev": true, 921 | "dependencies": { 922 | "esrecurse": "^4.3.0", 923 | "estraverse": "^5.2.0" 924 | }, 925 | "engines": { 926 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 927 | }, 928 | "funding": { 929 | "url": "https://opencollective.com/eslint" 930 | } 931 | }, 932 | "node_modules/eslint-visitor-keys": { 933 | "version": "3.4.3", 934 | "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", 935 | "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", 936 | "dev": true, 937 | "engines": { 938 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 939 | }, 940 | "funding": { 941 | "url": "https://opencollective.com/eslint" 942 | } 943 | }, 944 | "node_modules/eslint/node_modules/globals": { 945 | "version": "13.24.0", 946 | "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", 947 | "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", 948 | "dev": true, 949 | "dependencies": { 950 | "type-fest": "^0.20.2" 951 | }, 952 | "engines": { 953 | "node": ">=8" 954 | }, 955 | "funding": { 956 | "url": "https://github.com/sponsors/sindresorhus" 957 | } 958 | }, 959 | "node_modules/espree": { 960 | "version": "9.6.1", 961 | "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", 962 | "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", 963 | "dev": true, 964 | "dependencies": { 965 | "acorn": "^8.9.0", 966 | "acorn-jsx": "^5.3.2", 967 | "eslint-visitor-keys": "^3.4.1" 968 | }, 969 | "engines": { 970 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 971 | }, 972 | "funding": { 973 | "url": "https://opencollective.com/eslint" 974 | } 975 | }, 976 | "node_modules/esquery": { 977 | "version": "1.6.0", 978 | "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", 979 | "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", 980 | "dev": true, 981 | "dependencies": { 982 | "estraverse": "^5.1.0" 983 | }, 984 | "engines": { 985 | "node": ">=0.10" 986 | } 987 | }, 988 | "node_modules/esrecurse": { 989 | "version": "4.3.0", 990 | "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", 991 | "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", 992 | "dev": true, 993 | "dependencies": { 994 | "estraverse": "^5.2.0" 995 | }, 996 | "engines": { 997 | "node": ">=4.0" 998 | } 999 | }, 1000 | "node_modules/estraverse": { 1001 | "version": "5.3.0", 1002 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", 1003 | "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", 1004 | "dev": true, 1005 | "engines": { 1006 | "node": ">=4.0" 1007 | } 1008 | }, 1009 | "node_modules/esutils": { 1010 | "version": "2.0.3", 1011 | "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", 1012 | "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", 1013 | "dev": true, 1014 | "engines": { 1015 | "node": ">=0.10.0" 1016 | } 1017 | }, 1018 | "node_modules/fast-deep-equal": { 1019 | "version": "3.1.3", 1020 | "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", 1021 | "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", 1022 | "dev": true 1023 | }, 1024 | "node_modules/fast-json-stable-stringify": { 1025 | "version": "2.1.0", 1026 | "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", 1027 | "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", 1028 | "dev": true 1029 | }, 1030 | "node_modules/fast-levenshtein": { 1031 | "version": "2.0.6", 1032 | "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", 1033 | "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", 1034 | "dev": true 1035 | }, 1036 | "node_modules/fastq": { 1037 | "version": "1.17.1", 1038 | "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", 1039 | "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", 1040 | "dev": true, 1041 | "dependencies": { 1042 | "reusify": "^1.0.4" 1043 | } 1044 | }, 1045 | "node_modules/file-entry-cache": { 1046 | "version": "6.0.1", 1047 | "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", 1048 | "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", 1049 | "dev": true, 1050 | "dependencies": { 1051 | "flat-cache": "^3.0.4" 1052 | }, 1053 | "engines": { 1054 | "node": "^10.12.0 || >=12.0.0" 1055 | } 1056 | }, 1057 | "node_modules/find-up": { 1058 | "version": "5.0.0", 1059 | "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", 1060 | "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", 1061 | "dev": true, 1062 | "dependencies": { 1063 | "locate-path": "^6.0.0", 1064 | "path-exists": "^4.0.0" 1065 | }, 1066 | "engines": { 1067 | "node": ">=10" 1068 | }, 1069 | "funding": { 1070 | "url": "https://github.com/sponsors/sindresorhus" 1071 | } 1072 | }, 1073 | "node_modules/flat-cache": { 1074 | "version": "3.2.0", 1075 | "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", 1076 | "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", 1077 | "dev": true, 1078 | "dependencies": { 1079 | "flatted": "^3.2.9", 1080 | "keyv": "^4.5.3", 1081 | "rimraf": "^3.0.2" 1082 | }, 1083 | "engines": { 1084 | "node": "^10.12.0 || >=12.0.0" 1085 | } 1086 | }, 1087 | "node_modules/flatted": { 1088 | "version": "3.3.1", 1089 | "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", 1090 | "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", 1091 | "dev": true 1092 | }, 1093 | "node_modules/fs.realpath": { 1094 | "version": "1.0.0", 1095 | "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", 1096 | "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", 1097 | "dev": true 1098 | }, 1099 | "node_modules/glob": { 1100 | "version": "7.2.3", 1101 | "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", 1102 | "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", 1103 | "deprecated": "Glob versions prior to v9 are no longer supported", 1104 | "dev": true, 1105 | "dependencies": { 1106 | "fs.realpath": "^1.0.0", 1107 | "inflight": "^1.0.4", 1108 | "inherits": "2", 1109 | "minimatch": "^3.1.1", 1110 | "once": "^1.3.0", 1111 | "path-is-absolute": "^1.0.0" 1112 | }, 1113 | "engines": { 1114 | "node": "*" 1115 | }, 1116 | "funding": { 1117 | "url": "https://github.com/sponsors/isaacs" 1118 | } 1119 | }, 1120 | "node_modules/glob-parent": { 1121 | "version": "6.0.2", 1122 | "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", 1123 | "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", 1124 | "dev": true, 1125 | "dependencies": { 1126 | "is-glob": "^4.0.3" 1127 | }, 1128 | "engines": { 1129 | "node": ">=10.13.0" 1130 | } 1131 | }, 1132 | "node_modules/graphemer": { 1133 | "version": "1.4.0", 1134 | "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", 1135 | "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", 1136 | "dev": true 1137 | }, 1138 | "node_modules/has-flag": { 1139 | "version": "4.0.0", 1140 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", 1141 | "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", 1142 | "dev": true, 1143 | "engines": { 1144 | "node": ">=8" 1145 | } 1146 | }, 1147 | "node_modules/ignore": { 1148 | "version": "5.3.2", 1149 | "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", 1150 | "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", 1151 | "dev": true, 1152 | "engines": { 1153 | "node": ">= 4" 1154 | } 1155 | }, 1156 | "node_modules/import-fresh": { 1157 | "version": "3.3.0", 1158 | "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", 1159 | "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", 1160 | "dev": true, 1161 | "dependencies": { 1162 | "parent-module": "^1.0.0", 1163 | "resolve-from": "^4.0.0" 1164 | }, 1165 | "engines": { 1166 | "node": ">=6" 1167 | }, 1168 | "funding": { 1169 | "url": "https://github.com/sponsors/sindresorhus" 1170 | } 1171 | }, 1172 | "node_modules/imurmurhash": { 1173 | "version": "0.1.4", 1174 | "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", 1175 | "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", 1176 | "dev": true, 1177 | "engines": { 1178 | "node": ">=0.8.19" 1179 | } 1180 | }, 1181 | "node_modules/inflight": { 1182 | "version": "1.0.6", 1183 | "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", 1184 | "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", 1185 | "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", 1186 | "dev": true, 1187 | "dependencies": { 1188 | "once": "^1.3.0", 1189 | "wrappy": "1" 1190 | } 1191 | }, 1192 | "node_modules/inherits": { 1193 | "version": "2.0.4", 1194 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 1195 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 1196 | "dev": true 1197 | }, 1198 | "node_modules/is-extglob": { 1199 | "version": "2.1.1", 1200 | "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", 1201 | "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", 1202 | "dev": true, 1203 | "engines": { 1204 | "node": ">=0.10.0" 1205 | } 1206 | }, 1207 | "node_modules/is-glob": { 1208 | "version": "4.0.3", 1209 | "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", 1210 | "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", 1211 | "dev": true, 1212 | "dependencies": { 1213 | "is-extglob": "^2.1.1" 1214 | }, 1215 | "engines": { 1216 | "node": ">=0.10.0" 1217 | } 1218 | }, 1219 | "node_modules/is-path-inside": { 1220 | "version": "3.0.3", 1221 | "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", 1222 | "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", 1223 | "dev": true, 1224 | "engines": { 1225 | "node": ">=8" 1226 | } 1227 | }, 1228 | "node_modules/isexe": { 1229 | "version": "2.0.0", 1230 | "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", 1231 | "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", 1232 | "dev": true 1233 | }, 1234 | "node_modules/js-yaml": { 1235 | "version": "4.1.0", 1236 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", 1237 | "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", 1238 | "dev": true, 1239 | "dependencies": { 1240 | "argparse": "^2.0.1" 1241 | }, 1242 | "bin": { 1243 | "js-yaml": "bin/js-yaml.js" 1244 | } 1245 | }, 1246 | "node_modules/json-buffer": { 1247 | "version": "3.0.1", 1248 | "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", 1249 | "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", 1250 | "dev": true 1251 | }, 1252 | "node_modules/json-schema-traverse": { 1253 | "version": "0.4.1", 1254 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", 1255 | "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", 1256 | "dev": true 1257 | }, 1258 | "node_modules/json-stable-stringify-without-jsonify": { 1259 | "version": "1.0.1", 1260 | "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", 1261 | "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", 1262 | "dev": true 1263 | }, 1264 | "node_modules/jwt-decode": { 1265 | "version": "4.0.0", 1266 | "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz", 1267 | "integrity": "sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==", 1268 | "engines": { 1269 | "node": ">=18" 1270 | } 1271 | }, 1272 | "node_modules/keyv": { 1273 | "version": "4.5.4", 1274 | "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", 1275 | "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", 1276 | "dev": true, 1277 | "dependencies": { 1278 | "json-buffer": "3.0.1" 1279 | } 1280 | }, 1281 | "node_modules/levn": { 1282 | "version": "0.4.1", 1283 | "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", 1284 | "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", 1285 | "dev": true, 1286 | "dependencies": { 1287 | "prelude-ls": "^1.2.1", 1288 | "type-check": "~0.4.0" 1289 | }, 1290 | "engines": { 1291 | "node": ">= 0.8.0" 1292 | } 1293 | }, 1294 | "node_modules/locate-path": { 1295 | "version": "6.0.0", 1296 | "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", 1297 | "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", 1298 | "dev": true, 1299 | "dependencies": { 1300 | "p-locate": "^5.0.0" 1301 | }, 1302 | "engines": { 1303 | "node": ">=10" 1304 | }, 1305 | "funding": { 1306 | "url": "https://github.com/sponsors/sindresorhus" 1307 | } 1308 | }, 1309 | "node_modules/lodash.merge": { 1310 | "version": "4.6.2", 1311 | "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", 1312 | "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", 1313 | "dev": true 1314 | }, 1315 | "node_modules/minimatch": { 1316 | "version": "3.1.2", 1317 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", 1318 | "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", 1319 | "dev": true, 1320 | "dependencies": { 1321 | "brace-expansion": "^1.1.7" 1322 | }, 1323 | "engines": { 1324 | "node": "*" 1325 | } 1326 | }, 1327 | "node_modules/ms": { 1328 | "version": "2.1.3", 1329 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 1330 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", 1331 | "dev": true 1332 | }, 1333 | "node_modules/natural-compare": { 1334 | "version": "1.4.0", 1335 | "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", 1336 | "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", 1337 | "dev": true 1338 | }, 1339 | "node_modules/once": { 1340 | "version": "1.4.0", 1341 | "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", 1342 | "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", 1343 | "dev": true, 1344 | "dependencies": { 1345 | "wrappy": "1" 1346 | } 1347 | }, 1348 | "node_modules/optionator": { 1349 | "version": "0.9.4", 1350 | "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", 1351 | "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", 1352 | "dev": true, 1353 | "dependencies": { 1354 | "deep-is": "^0.1.3", 1355 | "fast-levenshtein": "^2.0.6", 1356 | "levn": "^0.4.1", 1357 | "prelude-ls": "^1.2.1", 1358 | "type-check": "^0.4.0", 1359 | "word-wrap": "^1.2.5" 1360 | }, 1361 | "engines": { 1362 | "node": ">= 0.8.0" 1363 | } 1364 | }, 1365 | "node_modules/p-limit": { 1366 | "version": "3.1.0", 1367 | "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", 1368 | "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", 1369 | "dev": true, 1370 | "dependencies": { 1371 | "yocto-queue": "^0.1.0" 1372 | }, 1373 | "engines": { 1374 | "node": ">=10" 1375 | }, 1376 | "funding": { 1377 | "url": "https://github.com/sponsors/sindresorhus" 1378 | } 1379 | }, 1380 | "node_modules/p-locate": { 1381 | "version": "5.0.0", 1382 | "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", 1383 | "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", 1384 | "dev": true, 1385 | "dependencies": { 1386 | "p-limit": "^3.0.2" 1387 | }, 1388 | "engines": { 1389 | "node": ">=10" 1390 | }, 1391 | "funding": { 1392 | "url": "https://github.com/sponsors/sindresorhus" 1393 | } 1394 | }, 1395 | "node_modules/parent-module": { 1396 | "version": "1.0.1", 1397 | "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", 1398 | "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", 1399 | "dev": true, 1400 | "dependencies": { 1401 | "callsites": "^3.0.0" 1402 | }, 1403 | "engines": { 1404 | "node": ">=6" 1405 | } 1406 | }, 1407 | "node_modules/path-exists": { 1408 | "version": "4.0.0", 1409 | "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", 1410 | "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", 1411 | "dev": true, 1412 | "engines": { 1413 | "node": ">=8" 1414 | } 1415 | }, 1416 | "node_modules/path-is-absolute": { 1417 | "version": "1.0.1", 1418 | "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", 1419 | "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", 1420 | "dev": true, 1421 | "engines": { 1422 | "node": ">=0.10.0" 1423 | } 1424 | }, 1425 | "node_modules/path-key": { 1426 | "version": "3.1.1", 1427 | "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", 1428 | "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", 1429 | "dev": true, 1430 | "engines": { 1431 | "node": ">=8" 1432 | } 1433 | }, 1434 | "node_modules/prelude-ls": { 1435 | "version": "1.2.1", 1436 | "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", 1437 | "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", 1438 | "dev": true, 1439 | "engines": { 1440 | "node": ">= 0.8.0" 1441 | } 1442 | }, 1443 | "node_modules/prettier": { 1444 | "version": "3.5.1", 1445 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.1.tgz", 1446 | "integrity": "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw==", 1447 | "bin": { 1448 | "prettier": "bin/prettier.cjs" 1449 | }, 1450 | "engines": { 1451 | "node": ">=14" 1452 | }, 1453 | "funding": { 1454 | "url": "https://github.com/prettier/prettier?sponsor=1" 1455 | } 1456 | }, 1457 | "node_modules/punycode": { 1458 | "version": "2.3.1", 1459 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", 1460 | "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", 1461 | "dev": true, 1462 | "engines": { 1463 | "node": ">=6" 1464 | } 1465 | }, 1466 | "node_modules/queue-microtask": { 1467 | "version": "1.2.3", 1468 | "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", 1469 | "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", 1470 | "dev": true, 1471 | "funding": [ 1472 | { 1473 | "type": "github", 1474 | "url": "https://github.com/sponsors/feross" 1475 | }, 1476 | { 1477 | "type": "patreon", 1478 | "url": "https://www.patreon.com/feross" 1479 | }, 1480 | { 1481 | "type": "consulting", 1482 | "url": "https://feross.org/support" 1483 | } 1484 | ] 1485 | }, 1486 | "node_modules/resolve-from": { 1487 | "version": "4.0.0", 1488 | "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", 1489 | "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", 1490 | "dev": true, 1491 | "engines": { 1492 | "node": ">=4" 1493 | } 1494 | }, 1495 | "node_modules/reusify": { 1496 | "version": "1.0.4", 1497 | "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", 1498 | "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", 1499 | "dev": true, 1500 | "engines": { 1501 | "iojs": ">=1.0.0", 1502 | "node": ">=0.10.0" 1503 | } 1504 | }, 1505 | "node_modules/rimraf": { 1506 | "version": "3.0.2", 1507 | "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", 1508 | "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", 1509 | "deprecated": "Rimraf versions prior to v4 are no longer supported", 1510 | "dev": true, 1511 | "dependencies": { 1512 | "glob": "^7.1.3" 1513 | }, 1514 | "bin": { 1515 | "rimraf": "bin.js" 1516 | }, 1517 | "funding": { 1518 | "url": "https://github.com/sponsors/isaacs" 1519 | } 1520 | }, 1521 | "node_modules/run-parallel": { 1522 | "version": "1.2.0", 1523 | "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", 1524 | "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", 1525 | "dev": true, 1526 | "funding": [ 1527 | { 1528 | "type": "github", 1529 | "url": "https://github.com/sponsors/feross" 1530 | }, 1531 | { 1532 | "type": "patreon", 1533 | "url": "https://www.patreon.com/feross" 1534 | }, 1535 | { 1536 | "type": "consulting", 1537 | "url": "https://feross.org/support" 1538 | } 1539 | ], 1540 | "dependencies": { 1541 | "queue-microtask": "^1.2.2" 1542 | } 1543 | }, 1544 | "node_modules/shebang-command": { 1545 | "version": "2.0.0", 1546 | "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", 1547 | "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", 1548 | "dev": true, 1549 | "dependencies": { 1550 | "shebang-regex": "^3.0.0" 1551 | }, 1552 | "engines": { 1553 | "node": ">=8" 1554 | } 1555 | }, 1556 | "node_modules/shebang-regex": { 1557 | "version": "3.0.0", 1558 | "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", 1559 | "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", 1560 | "dev": true, 1561 | "engines": { 1562 | "node": ">=8" 1563 | } 1564 | }, 1565 | "node_modules/strip-ansi": { 1566 | "version": "6.0.1", 1567 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 1568 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 1569 | "dev": true, 1570 | "dependencies": { 1571 | "ansi-regex": "^5.0.1" 1572 | }, 1573 | "engines": { 1574 | "node": ">=8" 1575 | } 1576 | }, 1577 | "node_modules/strip-json-comments": { 1578 | "version": "3.1.1", 1579 | "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", 1580 | "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", 1581 | "dev": true, 1582 | "engines": { 1583 | "node": ">=8" 1584 | }, 1585 | "funding": { 1586 | "url": "https://github.com/sponsors/sindresorhus" 1587 | } 1588 | }, 1589 | "node_modules/supports-color": { 1590 | "version": "7.2.0", 1591 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", 1592 | "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", 1593 | "dev": true, 1594 | "dependencies": { 1595 | "has-flag": "^4.0.0" 1596 | }, 1597 | "engines": { 1598 | "node": ">=8" 1599 | } 1600 | }, 1601 | "node_modules/text-table": { 1602 | "version": "0.2.0", 1603 | "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", 1604 | "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", 1605 | "dev": true 1606 | }, 1607 | "node_modules/type-check": { 1608 | "version": "0.4.0", 1609 | "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", 1610 | "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", 1611 | "dev": true, 1612 | "dependencies": { 1613 | "prelude-ls": "^1.2.1" 1614 | }, 1615 | "engines": { 1616 | "node": ">= 0.8.0" 1617 | } 1618 | }, 1619 | "node_modules/type-fest": { 1620 | "version": "0.20.2", 1621 | "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", 1622 | "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", 1623 | "dev": true, 1624 | "engines": { 1625 | "node": ">=10" 1626 | }, 1627 | "funding": { 1628 | "url": "https://github.com/sponsors/sindresorhus" 1629 | } 1630 | }, 1631 | "node_modules/typescript": { 1632 | "version": "5.6.2", 1633 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", 1634 | "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", 1635 | "dev": true, 1636 | "bin": { 1637 | "tsc": "bin/tsc", 1638 | "tsserver": "bin/tsserver" 1639 | }, 1640 | "engines": { 1641 | "node": ">=14.17" 1642 | } 1643 | }, 1644 | "node_modules/undici-types": { 1645 | "version": "6.21.0", 1646 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", 1647 | "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", 1648 | "dev": true 1649 | }, 1650 | "node_modules/uri-js": { 1651 | "version": "4.4.1", 1652 | "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", 1653 | "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", 1654 | "dev": true, 1655 | "dependencies": { 1656 | "punycode": "^2.1.0" 1657 | } 1658 | }, 1659 | "node_modules/which": { 1660 | "version": "2.0.2", 1661 | "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", 1662 | "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", 1663 | "dev": true, 1664 | "dependencies": { 1665 | "isexe": "^2.0.0" 1666 | }, 1667 | "bin": { 1668 | "node-which": "bin/node-which" 1669 | }, 1670 | "engines": { 1671 | "node": ">= 8" 1672 | } 1673 | }, 1674 | "node_modules/word-wrap": { 1675 | "version": "1.2.5", 1676 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", 1677 | "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", 1678 | "dev": true, 1679 | "engines": { 1680 | "node": ">=0.10.0" 1681 | } 1682 | }, 1683 | "node_modules/wrappy": { 1684 | "version": "1.0.2", 1685 | "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", 1686 | "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", 1687 | "dev": true 1688 | }, 1689 | "node_modules/yocto-queue": { 1690 | "version": "0.1.0", 1691 | "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", 1692 | "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", 1693 | "dev": true, 1694 | "engines": { 1695 | "node": ">=10" 1696 | }, 1697 | "funding": { 1698 | "url": "https://github.com/sponsors/sindresorhus" 1699 | } 1700 | } 1701 | } 1702 | } 1703 | -------------------------------------------------------------------------------- /example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "uses-component", 3 | "private": true, 4 | "version": "0.0.0", 5 | "scripts": { 6 | "dev": "convex dev --live-component-sources --typecheck-components", 7 | "logs": "convex logs", 8 | "lint": "tsc -p convex && eslint convex" 9 | }, 10 | "dependencies": { 11 | "@convex-dev/workflow": "file:..", 12 | "convex": "^1.16.5" 13 | }, 14 | "devDependencies": { 15 | "@types/node": "^22.14.0", 16 | "convex-test": "^0.0.36", 17 | "eslint": "^8.55.0", 18 | "typescript": "^5.2.2" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@convex-dev/workflow", 3 | "version": "0.2.4", 4 | "description": "Convex component for durably executing workflows.", 5 | "keywords": [ 6 | "convex", 7 | "workflow", 8 | "durable-execution" 9 | ], 10 | "homepage": "https://github.com/get-convex/workflow", 11 | "repository": "github:get-convex/workflow", 12 | "bugs": "https://github.com/get-convex/workflow/issues", 13 | "license": "Apache-2.0", 14 | "type": "module", 15 | "scripts": { 16 | "build": "npm run build:esm && npm run build:cjs", 17 | "build:esm": "tsc --project ./esm.json && echo '{\\n \"type\": \"module\"\\n}' > dist/esm/package.json", 18 | "build:cjs": "tsc --project ./commonjs.json && echo '{\\n \"type\": \"commonjs\"\\n}' > dist/commonjs/package.json", 19 | "typecheck": "tsc --noEmit", 20 | "test": "vitest run", 21 | "alpha": "rm -rf dist && npm run build && npm run test && npm version prerelease --preid alpha && npm publish --tag alpha && git push --tags", 22 | "release": "rm -rf dist && npm run build && npm run test && npm version patch && npm publish && git push --tags", 23 | "prepare": "npm run build" 24 | }, 25 | "files": [ 26 | "dist", 27 | "src" 28 | ], 29 | "exports": { 30 | "./package.json": "./package.json", 31 | ".": { 32 | "import": { 33 | "@convex-dev/component-source": "./src/client/index.ts", 34 | "types": "./dist/esm/client/index.d.ts", 35 | "default": "./dist/esm/client/index.js" 36 | }, 37 | "require": { 38 | "@convex-dev/component-source": "./src/client/index.ts", 39 | "types": "./dist/commonjs/client/index.d.ts", 40 | "default": "./dist/commonjs/client/index.js" 41 | } 42 | }, 43 | "./convex.config": { 44 | "import": { 45 | "@convex-dev/component-source": "./src/component/convex.config.ts", 46 | "types": "./dist/esm/component/convex.config.d.ts", 47 | "default": "./dist/esm/component/convex.config.js" 48 | } 49 | } 50 | }, 51 | "peerDependencies": { 52 | "@convex-dev/workpool": "^0.2.9", 53 | "convex": ">=1.21.0 <1.35.0", 54 | "convex-helpers": "^0.1.77" 55 | }, 56 | "dependencies": { 57 | "async-channel": "^0.2.0" 58 | }, 59 | "devDependencies": { 60 | "@eslint/eslintrc": "^3.1.0", 61 | "@eslint/js": "^9.9.1", 62 | "@types/node": "^18.19.86", 63 | "@typescript-eslint/eslint-plugin": "^7.0.0", 64 | "@typescript-eslint/parser": "^7.0.0", 65 | "eslint": "^8.57.0", 66 | "globals": "^15.9.0", 67 | "openai": "^4.54.0", 68 | "prettier": "3.2.5", 69 | "typescript": "~5.5.0", 70 | "typescript-eslint": "^7.0.0", 71 | "vitest": "^3.1.1" 72 | }, 73 | "main": "./dist/commonjs/client/index.js", 74 | "types": "./dist/commonjs/client/index.d.ts", 75 | "module": "./dist/esm/client/index.js" 76 | } 77 | -------------------------------------------------------------------------------- /src/client/environment.ts: -------------------------------------------------------------------------------- 1 | import { OriginalEnv } from "./step.js"; 2 | import { StepContext } from "./stepContext.js"; 3 | 4 | export function setupEnvironment(_ctx: StepContext): OriginalEnv { 5 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 6 | const global = globalThis as any; 7 | 8 | global.Math.random = () => { 9 | throw new Error("Math.random() isn't currently supported within workflows"); 10 | }; 11 | 12 | const originalDate = global.Date; 13 | delete global.Date; 14 | 15 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 16 | function Date(this: any, ...args: any[]) { 17 | // `Date()` was called directly, not as a constructor. 18 | if (!(this instanceof Date)) { 19 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 20 | const date = new (Date as any)(); 21 | return date.toString(); 22 | } 23 | if (args.length === 0) { 24 | const unixTsMs = Date.now(); 25 | return new originalDate(unixTsMs); 26 | } 27 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 28 | return new (originalDate as any)(...args); 29 | } 30 | Date.now = function () { 31 | throw new Error("Date.now() isn't currently supported within workflows."); 32 | }; 33 | Date.parse = originalDate.parse; 34 | Date.UTC = originalDate.UTC; 35 | Date.prototype = originalDate.prototype; 36 | Date.prototype.constructor = Date; 37 | 38 | global.Date = Date; 39 | 40 | delete global.process; 41 | 42 | delete global.Crypto; 43 | delete global.crypto; 44 | delete global.CryptoKey; 45 | delete global.SubtleCrypto; 46 | 47 | global.fetch = (_input: RequestInfo | URL, _init?: RequestInit) => { 48 | throw new Error( 49 | `Fetch isn't currently supported within workflows. Perform the fetch within an action and call it with step.runAction().`, 50 | ); 51 | }; 52 | return { Date: originalDate }; 53 | } 54 | -------------------------------------------------------------------------------- /src/client/index.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createFunctionHandle, 3 | FunctionArgs, 4 | FunctionReference, 5 | FunctionReturnType, 6 | FunctionVisibility, 7 | GenericDataModel, 8 | GenericMutationCtx, 9 | GenericQueryCtx, 10 | getFunctionName, 11 | RegisteredMutation, 12 | ReturnValueForOptionalValidator, 13 | } from "convex/server"; 14 | import { ObjectType, PropertyValidators, Validator } from "convex/values"; 15 | import { api } from "../component/_generated/api.js"; 16 | import { OnCompleteArgs, OpaqueIds, UseApi, WorkflowId } from "../types.js"; 17 | import { workflowMutation } from "./workflowMutation.js"; 18 | import { 19 | NameOption, 20 | RetryOption, 21 | SchedulerOptions, 22 | WorkpoolOptions, 23 | WorkpoolRetryOptions, 24 | } from "@convex-dev/workpool"; 25 | export { vWorkflowId } from "../types.js"; 26 | import { Step } from "../component/schema.js"; 27 | 28 | export type { WorkflowId }; 29 | 30 | export type CallbackOptions = { 31 | /** 32 | * A mutation to run after the function succeeds, fails, or is canceled. 33 | * The context type is for your use, feel free to provide a validator for it. 34 | * e.g. 35 | * ```ts 36 | * export const completion = internalMutation({ 37 | * args: { 38 | * workId: workIdValidator, 39 | * context: v.any(), 40 | * result: resultValidator, 41 | * }, 42 | * handler: async (ctx, args) => { 43 | * console.log(args.result, "Got Context back -> ", args.context, Date.now() - args.context); 44 | * }, 45 | * }); 46 | * ``` 47 | */ 48 | onComplete?: FunctionReference< 49 | "mutation", 50 | FunctionVisibility, 51 | OnCompleteArgs 52 | > | null; 53 | 54 | /** 55 | * A context object to pass to the `onComplete` mutation. 56 | * Useful for passing data from the enqueue site to the onComplete site. 57 | */ 58 | context?: unknown; 59 | }; 60 | 61 | export type WorkflowStep = { 62 | /** 63 | * The ID of the workflow currently running. 64 | */ 65 | workflowId: string; 66 | /** 67 | * Run a query with the given name and arguments. 68 | * 69 | * @param query - The query to run, like `internal.index.exampleQuery`. 70 | * @param args - The arguments to the query function. 71 | * @param opts - Options for scheduling and naming the query. 72 | */ 73 | runQuery>( 74 | query: Query, 75 | args: FunctionArgs, 76 | opts?: NameOption & SchedulerOptions, 77 | ): Promise>; 78 | 79 | /** 80 | * Run a mutation with the given name and arguments. 81 | * 82 | * @param mutation - The mutation to run, like `internal.index.exampleMutation`. 83 | * @param args - The arguments to the mutation function. 84 | * @param opts - Options for scheduling and naming the mutation. 85 | */ 86 | runMutation>( 87 | mutation: Mutation, 88 | args: FunctionArgs, 89 | opts?: NameOption & SchedulerOptions, 90 | ): Promise>; 91 | 92 | /** 93 | * Run an action with the given name and arguments. 94 | * 95 | * @param action - The action to run, like `internal.index.exampleAction`. 96 | * @param args - The arguments to the action function. 97 | * @param opts - Options for retrying, scheduling and naming the action. 98 | */ 99 | runAction>( 100 | action: Action, 101 | args: FunctionArgs, 102 | opts?: NameOption & SchedulerOptions & RetryOption, 103 | ): Promise>; 104 | }; 105 | 106 | export type WorkflowDefinition< 107 | ArgsValidator extends PropertyValidators, 108 | ReturnsValidator extends Validator | void, 109 | ReturnValue extends ReturnValueForOptionalValidator = any, 110 | > = { 111 | args?: ArgsValidator; 112 | handler: ( 113 | step: WorkflowStep, 114 | args: ObjectType, 115 | ) => Promise; 116 | returns?: ReturnsValidator; 117 | workpoolOptions?: WorkpoolRetryOptions; 118 | }; 119 | 120 | export type WorkflowStatus = 121 | | { type: "inProgress"; running: OpaqueIds[] } 122 | | { type: "completed" } 123 | | { type: "canceled" } 124 | | { type: "failed"; error: string }; 125 | 126 | export class WorkflowManager { 127 | constructor( 128 | private component: UseApi, 129 | public options?: { 130 | workpoolOptions: WorkpoolOptions; 131 | }, 132 | ) {} 133 | 134 | /** 135 | * Define a new workflow. 136 | * 137 | * @param workflow - The workflow definition. 138 | * @returns The workflow mutation. 139 | */ 140 | define< 141 | ArgsValidator extends PropertyValidators, 142 | ReturnsValidator extends Validator | void, 143 | ReturnValue extends ReturnValueForOptionalValidator = any, 144 | >( 145 | workflow: WorkflowDefinition, 146 | ): RegisteredMutation<"internal", ObjectType, void> { 147 | return workflowMutation( 148 | this.component, 149 | workflow, 150 | this.options?.workpoolOptions, 151 | ); 152 | } 153 | 154 | /** 155 | * Kick off a defined workflow. 156 | * 157 | * @param ctx - The Convex context. 158 | * @param workflow - The workflow to start (e.g. `internal.index.exampleWorkflow`). 159 | * @param args - The workflow arguments. 160 | * @returns The workflow ID. 161 | */ 162 | async start>( 163 | ctx: RunMutationCtx, 164 | workflow: F, 165 | args: FunctionArgs, 166 | options?: CallbackOptions & { 167 | /** 168 | * By default, during creation the workflow will be initiated immediately. 169 | * The benefit is that you catch errors earlier (e.g. passing a bad 170 | * workflow reference or catch arg validation). 171 | * 172 | * If you set this to true, the workflow will be created but the run 173 | * will be scheduled to run asynchronously. 174 | * You can use this to make `start` faster (you still get a workflowId). 175 | */ 176 | validateAsync?: boolean; 177 | }, 178 | ): Promise { 179 | const handle = await createFunctionHandle(workflow); 180 | const onComplete = options?.onComplete 181 | ? { 182 | fnHandle: await createFunctionHandle(options.onComplete), 183 | context: options.context, 184 | } 185 | : undefined; 186 | const workflowId = await ctx.runMutation(this.component.workflow.create, { 187 | workflowName: getFunctionName(workflow), 188 | workflowHandle: handle, 189 | workflowArgs: args, 190 | maxParallelism: this.options?.workpoolOptions?.maxParallelism, 191 | onComplete, 192 | validateAsync: options?.validateAsync, 193 | }); 194 | return workflowId as unknown as WorkflowId; 195 | } 196 | 197 | /** 198 | * Get a workflow's status. 199 | * 200 | * @param ctx - The Convex context. 201 | * @param workflowId - The workflow ID. 202 | * @returns The workflow status. 203 | */ 204 | async status( 205 | ctx: RunQueryCtx, 206 | workflowId: WorkflowId, 207 | ): Promise { 208 | const { workflow, inProgress } = await ctx.runQuery( 209 | this.component.workflow.getStatus, 210 | { workflowId }, 211 | ); 212 | const running = inProgress.map((entry) => entry.step); 213 | switch (workflow.runResult?.kind) { 214 | case undefined: 215 | return { type: "inProgress", running }; 216 | case "canceled": 217 | return { type: "canceled" }; 218 | case "failed": 219 | return { type: "failed", error: workflow.runResult.error }; 220 | case "success": 221 | return { type: "completed" }; 222 | } 223 | } 224 | 225 | /** 226 | * Cancel a running workflow. 227 | * 228 | * @param ctx - The Convex context. 229 | * @param workflowId - The workflow ID. 230 | */ 231 | async cancel(ctx: RunMutationCtx, workflowId: WorkflowId) { 232 | await ctx.runMutation(this.component.workflow.cancel, { 233 | workflowId, 234 | }); 235 | } 236 | 237 | /** 238 | * Clean up a completed workflow's storage. 239 | * 240 | * @param ctx - The Convex context. 241 | * @param workflowId - The workflow ID. 242 | * @returns - Whether the workflow's state was cleaned up. 243 | */ 244 | async cleanup(ctx: RunMutationCtx, workflowId: WorkflowId): Promise { 245 | return await ctx.runMutation(this.component.workflow.cleanup, { 246 | workflowId, 247 | }); 248 | } 249 | } 250 | 251 | type RunQueryCtx = { 252 | runQuery: GenericQueryCtx["runQuery"]; 253 | }; 254 | type RunMutationCtx = { 255 | runMutation: GenericMutationCtx["runMutation"]; 256 | }; 257 | -------------------------------------------------------------------------------- /src/client/step.ts: -------------------------------------------------------------------------------- 1 | import { BaseChannel } from "async-channel"; 2 | import { 3 | GenericMutationCtx, 4 | GenericDataModel, 5 | FunctionType, 6 | FunctionReference, 7 | createFunctionHandle, 8 | } from "convex/server"; 9 | import { convexToJson } from "convex/values"; 10 | import { 11 | JournalEntry, 12 | journalEntrySize, 13 | Step, 14 | valueSize, 15 | } from "../component/schema.js"; 16 | import { api } from "../component/_generated/api.js"; 17 | import { UseApi } from "../types.js"; 18 | import { 19 | RetryBehavior, 20 | WorkpoolOptions, 21 | RunResult, 22 | SchedulerOptions, 23 | } from "@convex-dev/workpool"; 24 | 25 | export type OriginalEnv = { 26 | Date: { 27 | now: () => number; 28 | }; 29 | }; 30 | 31 | export type WorkerResult = 32 | | { type: "handlerDone"; runResult: RunResult } 33 | | { type: "executorBlocked" }; 34 | 35 | export type StepRequest = { 36 | name: string; 37 | functionType: FunctionType; 38 | function: FunctionReference; 39 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 40 | args: any; 41 | retry: RetryBehavior | boolean | undefined; 42 | schedulerOptions: SchedulerOptions; 43 | 44 | resolve: (result: unknown) => void; 45 | reject: (error: unknown) => void; 46 | }; 47 | 48 | const MAX_JOURNAL_SIZE = 1 << 20; 49 | 50 | export class StepExecutor { 51 | private journalEntrySize: number; 52 | 53 | constructor( 54 | private workflowId: string, 55 | private generationNumber: number, 56 | private ctx: GenericMutationCtx, 57 | private component: UseApi, 58 | private journalEntries: Array, 59 | private receiver: BaseChannel, 60 | private originalEnv: OriginalEnv, 61 | private workpoolOptions: WorkpoolOptions | undefined, 62 | ) { 63 | this.journalEntrySize = journalEntries.reduce( 64 | (size, entry) => size + journalEntrySize(entry), 65 | 0, 66 | ); 67 | } 68 | async run(): Promise { 69 | // eslint-disable-next-line no-constant-condition 70 | while (true) { 71 | const message = await this.receiver.get(); 72 | // In the future we can correlate the calls to entries by handle, args, 73 | // etc. instead of just ordering. As is, the fn order can't change. 74 | const entry = this.journalEntries.shift(); 75 | // why not to run queries inline: they fetch too much data internally 76 | if (entry) { 77 | this.completeMessage(message, entry); 78 | continue; 79 | } 80 | // TODO: is this too late? 81 | if (this.journalEntrySize > MAX_JOURNAL_SIZE) { 82 | message.reject(journalSizeError(this.journalEntrySize)); 83 | continue; 84 | } 85 | const messages = [message]; 86 | const size = this.receiver.bufferSize; 87 | for (let i = 0; i < size; i++) { 88 | const message = await this.receiver.get(); 89 | messages.push(message); 90 | } 91 | for (const message of messages) { 92 | await this.startStep(message); 93 | } 94 | return { 95 | type: "executorBlocked", 96 | }; 97 | } 98 | } 99 | 100 | completeMessage(message: StepRequest, entry: JournalEntry) { 101 | if (entry.step.inProgress) { 102 | throw new Error( 103 | `Assertion failed: not blocked but have in-progress journal entry`, 104 | ); 105 | } 106 | const stepArgsJson = JSON.stringify(convexToJson(entry.step.args)); 107 | const messageArgsJson = JSON.stringify(convexToJson(message.args)); 108 | if (stepArgsJson !== messageArgsJson) { 109 | throw new Error( 110 | `Journal entry mismatch: ${entry.step.args} !== ${message.args}`, 111 | ); 112 | } 113 | if (entry.step.runResult === undefined) { 114 | throw new Error( 115 | `Assertion failed: no outcome for completed function call`, 116 | ); 117 | } 118 | switch (entry.step.runResult.kind) { 119 | case "success": 120 | message.resolve(entry.step.runResult.returnValue); 121 | break; 122 | case "failed": 123 | message.reject(new Error(entry.step.runResult.error)); 124 | break; 125 | case "canceled": 126 | message.reject(new Error("Canceled")); 127 | break; 128 | } 129 | } 130 | 131 | async startStep(message: StepRequest): Promise { 132 | const step = { 133 | inProgress: true, 134 | name: message.name, 135 | functionType: message.functionType, 136 | handle: await createFunctionHandle(message.function), 137 | args: message.args, 138 | argsSize: valueSize(message.args), 139 | outcome: undefined, 140 | startedAt: this.originalEnv.Date.now(), 141 | completedAt: undefined, 142 | }; 143 | const entry = (await this.ctx.runMutation( 144 | this.component.journal.startStep, 145 | { 146 | workflowId: this.workflowId, 147 | generationNumber: this.generationNumber, 148 | step, 149 | name: message.name, 150 | retry: message.retry, 151 | workpoolOptions: this.workpoolOptions, 152 | schedulerOptions: message.schedulerOptions, 153 | }, 154 | )) as JournalEntry; 155 | this.journalEntrySize += journalEntrySize(entry); 156 | return entry; 157 | } 158 | } 159 | 160 | function journalSizeError(size: number): Error { 161 | const lines = [ 162 | `Workflow journal size limit exceeded (${size} bytes > ${MAX_JOURNAL_SIZE} bytes).`, 163 | "Consider breaking up the workflow into multiple runs, using smaller step \ 164 | arguments or return values, or using fewer steps.", 165 | ]; 166 | return new Error(lines.join("\n")); 167 | } 168 | -------------------------------------------------------------------------------- /src/client/stepContext.ts: -------------------------------------------------------------------------------- 1 | import { BaseChannel } from "async-channel"; 2 | import { 3 | FunctionReference, 4 | FunctionArgs, 5 | FunctionReturnType, 6 | createFunctionHandle, 7 | getFunctionName, 8 | FunctionType, 9 | } from "convex/server"; 10 | import { WorkflowStep } from "./index.js"; 11 | import { StepRequest } from "./step.js"; 12 | import { 13 | NameOption, 14 | RetryOption, 15 | SchedulerOptions, 16 | } from "@convex-dev/workpool"; 17 | 18 | export class StepContext implements WorkflowStep { 19 | constructor( 20 | public workflowId: string, 21 | private sender: BaseChannel, 22 | ) {} 23 | 24 | async runQuery>( 25 | query: Query, 26 | args: FunctionArgs, 27 | opts?: NameOption & SchedulerOptions, 28 | ): Promise> { 29 | return this.runFunction("query", query, args, opts); 30 | } 31 | 32 | async runMutation>( 33 | mutation: Mutation, 34 | args: FunctionArgs, 35 | opts?: NameOption & SchedulerOptions, 36 | ): Promise> { 37 | return this.runFunction("mutation", mutation, args, opts); 38 | } 39 | 40 | async runAction>( 41 | action: Action, 42 | args: FunctionArgs, 43 | opts?: NameOption & SchedulerOptions & RetryOption, 44 | ): Promise> { 45 | return this.runFunction("action", action, args, opts); 46 | } 47 | 48 | private async runFunction>( 49 | functionType: FunctionType, 50 | f: F, 51 | args: any, 52 | opts?: NameOption & SchedulerOptions & RetryOption, 53 | ): Promise { 54 | let send: any; 55 | const { name, ...rest } = opts ?? {}; 56 | const { retry, ...schedulerOptions } = rest; 57 | const p = new Promise((resolve, reject) => { 58 | send = this.sender.push({ 59 | name: name ?? getFunctionName(f), 60 | functionType, 61 | function: f, 62 | args, 63 | retry, 64 | schedulerOptions, 65 | resolve, 66 | reject, 67 | }); 68 | }); 69 | await send; 70 | return p; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/client/validator.ts: -------------------------------------------------------------------------------- 1 | import { GenericValidator, PropertyValidators, v, Value } from "convex/values"; 2 | 3 | export function checkArgs( 4 | args: Value, 5 | validator: PropertyValidators | undefined, 6 | ) { 7 | if (!validator) { 8 | return; 9 | } 10 | const result = check(args, v.object(validator)); 11 | if (!result.ok) { 12 | throw new Error(result.message); 13 | } 14 | } 15 | 16 | function check( 17 | value: Value, 18 | validator: GenericValidator, 19 | ): { ok: true } | { ok: false; message: string } { 20 | switch (validator.kind) { 21 | case "id": { 22 | if (typeof value !== "string") { 23 | return { 24 | ok: false, 25 | message: `v.id() failed: Expected an ID, received: ${value}`, 26 | }; 27 | } 28 | break; 29 | } 30 | case "string": { 31 | if (typeof value !== "string") { 32 | return { 33 | ok: false, 34 | message: `v.string() failed: Expected a string, received: ${value}`, 35 | }; 36 | } 37 | break; 38 | } 39 | case "float64": { 40 | if (typeof value !== "number") { 41 | return { 42 | ok: false, 43 | message: `v.float64() failed: Expected a number, received: ${value}`, 44 | }; 45 | } 46 | break; 47 | } 48 | case "int64": { 49 | if (typeof value !== "bigint") { 50 | return { 51 | ok: false, 52 | message: `v.int64() failed: Expected a number, received: ${value}`, 53 | }; 54 | } 55 | break; 56 | } 57 | case "boolean": { 58 | if (typeof value !== "boolean") { 59 | return { 60 | ok: false, 61 | message: `v.boolean() failed: Expected a boolean, received: ${value}`, 62 | }; 63 | } 64 | break; 65 | } 66 | case "null": { 67 | if (value !== null) { 68 | return { 69 | ok: false, 70 | message: `v.null() failed: Expected null, received: ${value}`, 71 | }; 72 | } 73 | break; 74 | } 75 | case "any": { 76 | break; 77 | } 78 | case "literal": { 79 | if (value !== validator.value) { 80 | return { 81 | ok: false, 82 | message: `v.literal(${validator.value}) failed: Expected ${validator.value}, received: ${value}`, 83 | }; 84 | } 85 | break; 86 | } 87 | case "bytes": { 88 | if (!(value instanceof ArrayBuffer)) { 89 | return { 90 | ok: false, 91 | message: `v.bytes() failed: Expected an ArrayBuffer, received: ${value}`, 92 | }; 93 | } 94 | break; 95 | } 96 | case "object": { 97 | if (!isSimpleObject(value)) { 98 | return { 99 | ok: false, 100 | message: `v.object() failed: Expected a simple object, received: ${value}`, 101 | }; 102 | } 103 | for (const [key, fieldValidator] of Object.entries(validator.fields)) { 104 | const fieldValue = (value as any)[key]; 105 | if (fieldValue === undefined) { 106 | if (fieldValidator.isOptional === "required") { 107 | return { 108 | ok: false, 109 | message: `v.object() failed: Expected field "${key}", received: ${value}`, 110 | }; 111 | } 112 | } else { 113 | const result = check(fieldValue, fieldValidator); 114 | if (!result.ok) { 115 | return { 116 | ok: false, 117 | message: `v.object() failed: ${result.message}`, 118 | }; 119 | } 120 | } 121 | } 122 | break; 123 | } 124 | case "array": { 125 | if (!Array.isArray(value)) { 126 | return { 127 | ok: false, 128 | message: `v.array() failed: Expected an array, received: ${value}`, 129 | }; 130 | } 131 | for (const element of value) { 132 | const result = check(element, validator.element); 133 | if (!result.ok) { 134 | return { ok: false, message: `v.array() failed: ${result.message}` }; 135 | } 136 | } 137 | break; 138 | } 139 | case "record": { 140 | if (!isSimpleObject(value)) { 141 | return { 142 | ok: false, 143 | message: `v.record() failed: Expected a simple object, received: ${value}`, 144 | }; 145 | } 146 | for (const [field, fieldValue] of Object.entries(value as any)) { 147 | const keyResult = check(field, validator.key); 148 | if (!keyResult.ok) { 149 | return { 150 | ok: false, 151 | message: `v.record() failed: ${keyResult.message}`, 152 | }; 153 | } 154 | const valueResult = check(fieldValue as any, validator.value); 155 | if (!valueResult.ok) { 156 | return { 157 | ok: false, 158 | message: `v.record() failed: ${valueResult.message}`, 159 | }; 160 | } 161 | } 162 | break; 163 | } 164 | case "union": { 165 | let anyOk = false; 166 | for (const member of validator.members) { 167 | const result = check(value, member); 168 | if (result.ok) { 169 | anyOk = true; 170 | break; 171 | } 172 | } 173 | if (!anyOk) { 174 | return { 175 | ok: false, 176 | message: `v.union() failed: Expected one of: ${validator.members.map((m) => m.kind).join(", ")}, received: ${value}`, 177 | }; 178 | } 179 | break; 180 | } 181 | default: { 182 | throw new Error(`Unknown validator kind`); 183 | } 184 | } 185 | return { ok: true }; 186 | } 187 | 188 | function isSimpleObject(value: unknown) { 189 | const isObject = typeof value === "object"; 190 | const prototype = Object.getPrototypeOf(value); 191 | const isSimple = 192 | prototype === null || 193 | prototype === Object.prototype || 194 | // Objects generated from other contexts (e.g. across Node.js `vm` modules) will not satisfy the previous 195 | // conditions but are still simple objects. 196 | prototype?.constructor?.name === "Object"; 197 | return isObject && isSimple; 198 | } 199 | -------------------------------------------------------------------------------- /src/client/workflowMutation.ts: -------------------------------------------------------------------------------- 1 | import { BaseChannel } from "async-channel"; 2 | import { assert } from "convex-helpers"; 3 | import { validate } from "convex-helpers/validators"; 4 | import { internalMutationGeneric, RegisteredMutation } from "convex/server"; 5 | import { 6 | asObjectValidator, 7 | ObjectType, 8 | PropertyValidators, 9 | v, 10 | } from "convex/values"; 11 | import { api } from "../component/_generated/api.js"; 12 | import { createLogger } from "../component/logging.js"; 13 | import { JournalEntry } from "../component/schema.js"; 14 | import { UseApi } from "../types.js"; 15 | import { setupEnvironment } from "./environment.js"; 16 | import { WorkflowDefinition } from "./index.js"; 17 | import { StepExecutor, StepRequest, WorkerResult } from "./step.js"; 18 | import { StepContext } from "./stepContext.js"; 19 | import { checkArgs } from "./validator.js"; 20 | import { RunResult, WorkpoolOptions } from "@convex-dev/workpool"; 21 | 22 | const workflowArgs = v.object({ 23 | workflowId: v.id("workflows"), 24 | generationNumber: v.number(), 25 | }); 26 | const INVALID_WORKFLOW_MESSAGE = `Invalid arguments for workflow: Did you invoke the workflow with ctx.runMutation() instead of workflow.start()?`; 27 | 28 | // This function is defined in the calling component but then gets passed by 29 | // function handle to the workflow component for execution. This function runs 30 | // one "poll" of the workflow, replaying its execution from the journal until 31 | // it blocks next. 32 | export function workflowMutation( 33 | component: UseApi, 34 | registered: WorkflowDefinition, 35 | defaultWorkpoolOptions?: WorkpoolOptions, 36 | ): RegisteredMutation<"internal", ObjectType, void> { 37 | const workpoolOptions = { 38 | ...defaultWorkpoolOptions, 39 | ...registered.workpoolOptions, 40 | }; 41 | return internalMutationGeneric({ 42 | handler: async (ctx, args) => { 43 | if (!validate(workflowArgs, args)) { 44 | throw new Error(INVALID_WORKFLOW_MESSAGE); 45 | } 46 | const { workflowId, generationNumber } = args; 47 | const { workflow, inProgress, logLevel, journalEntries, ok } = 48 | await ctx.runQuery(component.journal.load, { workflowId }); 49 | const console = createLogger(logLevel); 50 | if (!ok) { 51 | console.error(`Failed to load journal for ${workflowId}`); 52 | await ctx.runMutation(component.workflow.complete, { 53 | workflowId, 54 | generationNumber, 55 | runResult: { kind: "failed", error: "Failed to load journal" }, 56 | now: Date.now(), 57 | }); 58 | return; 59 | } 60 | if (workflow.generationNumber !== generationNumber) { 61 | console.error(`Invalid generation number: ${generationNumber}`); 62 | return; 63 | } 64 | if (workflow.runResult?.kind === "success") { 65 | console.log(`Workflow ${workflowId} completed, returning.`); 66 | return; 67 | } 68 | if (inProgress.length > 0) { 69 | console.log( 70 | `Workflow ${workflowId} blocked by ` + 71 | inProgress 72 | .map((entry) => `${entry.step.name} (${entry._id})`) 73 | .join(", "), 74 | ); 75 | return; 76 | } 77 | for (const journalEntry of journalEntries) { 78 | assert( 79 | !journalEntry.step.inProgress, 80 | `Assertion failed: not blocked but have in-progress journal entry`, 81 | ); 82 | } 83 | const channel = new BaseChannel( 84 | workpoolOptions.maxParallelism ?? 10, 85 | ); 86 | const step = new StepContext(workflowId, channel); 87 | const originalEnv = setupEnvironment(step); 88 | const executor = new StepExecutor( 89 | workflowId, 90 | generationNumber, 91 | ctx, 92 | component, 93 | journalEntries as JournalEntry[], 94 | channel, 95 | originalEnv, 96 | workpoolOptions, 97 | ); 98 | 99 | const handlerWorker = async (): Promise => { 100 | let runResult: RunResult; 101 | try { 102 | checkArgs(workflow.args, registered.args); 103 | const returnValue = 104 | (await registered.handler(step, workflow.args)) ?? null; 105 | runResult = { kind: "success", returnValue }; 106 | if (registered.returns) { 107 | try { 108 | validate(asObjectValidator(registered.returns), returnValue, { 109 | throw: true, 110 | }); 111 | } catch (error) { 112 | const message = 113 | error instanceof Error ? error.message : `${error}`; 114 | runResult = { 115 | kind: "failed", 116 | error: "Invalid return value: " + message, 117 | }; 118 | } 119 | } 120 | } catch (error) { 121 | runResult = { kind: "failed", error: (error as Error).message }; 122 | } 123 | return { type: "handlerDone", runResult }; 124 | }; 125 | const executorWorker = async (): Promise => { 126 | return await executor.run(); 127 | }; 128 | const result = await Promise.race([handlerWorker(), executorWorker()]); 129 | switch (result.type) { 130 | case "handlerDone": { 131 | await ctx.runMutation(component.workflow.complete, { 132 | workflowId, 133 | generationNumber, 134 | runResult: result.runResult, 135 | now: originalEnv.Date.now(), 136 | }); 137 | break; 138 | } 139 | case "executorBlocked": { 140 | // Nothing to do, we already started steps in the StepExecutor. 141 | break; 142 | } 143 | } 144 | }, 145 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 146 | }) as any; 147 | } 148 | 149 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 150 | const console = "THIS IS A REMINDER TO USE getDefaultLogger"; 151 | -------------------------------------------------------------------------------- /src/component/_generated/api.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type * as journal from "../journal.js"; 12 | import type * as logging from "../logging.js"; 13 | import type * as model from "../model.js"; 14 | import type * as pool from "../pool.js"; 15 | import type * as utils from "../utils.js"; 16 | import type * as workflow from "../workflow.js"; 17 | 18 | import type { 19 | ApiFromModules, 20 | FilterApi, 21 | FunctionReference, 22 | } from "convex/server"; 23 | 24 | /** 25 | * A utility for referencing Convex functions in your app's API. 26 | * 27 | * Usage: 28 | * ```js 29 | * const myFunctionReference = api.myModule.myFunction; 30 | * ``` 31 | */ 32 | declare const fullApi: ApiFromModules<{ 33 | journal: typeof journal; 34 | logging: typeof logging; 35 | model: typeof model; 36 | pool: typeof pool; 37 | utils: typeof utils; 38 | workflow: typeof workflow; 39 | }>; 40 | export type Mounts = { 41 | journal: { 42 | load: FunctionReference< 43 | "query", 44 | "public", 45 | { workflowId: string }, 46 | { 47 | inProgress: Array<{ 48 | _creationTime: number; 49 | _id: string; 50 | step: { 51 | args: any; 52 | argsSize: number; 53 | completedAt?: number; 54 | functionType: "query" | "mutation" | "action"; 55 | handle: string; 56 | inProgress: boolean; 57 | name: string; 58 | runResult?: 59 | | { kind: "success"; returnValue: any } 60 | | { error: string; kind: "failed" } 61 | | { kind: "canceled" }; 62 | startedAt: number; 63 | workId?: string; 64 | }; 65 | stepNumber: number; 66 | workflowId: string; 67 | }>; 68 | journalEntries: Array<{ 69 | _creationTime: number; 70 | _id: string; 71 | step: { 72 | args: any; 73 | argsSize: number; 74 | completedAt?: number; 75 | functionType: "query" | "mutation" | "action"; 76 | handle: string; 77 | inProgress: boolean; 78 | name: string; 79 | runResult?: 80 | | { kind: "success"; returnValue: any } 81 | | { error: string; kind: "failed" } 82 | | { kind: "canceled" }; 83 | startedAt: number; 84 | workId?: string; 85 | }; 86 | stepNumber: number; 87 | workflowId: string; 88 | }>; 89 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 90 | ok: boolean; 91 | workflow: { 92 | _creationTime: number; 93 | _id: string; 94 | args: any; 95 | generationNumber: number; 96 | logLevel?: any; 97 | name?: string; 98 | onComplete?: { context?: any; fnHandle: string }; 99 | runResult?: 100 | | { kind: "success"; returnValue: any } 101 | | { error: string; kind: "failed" } 102 | | { kind: "canceled" }; 103 | startedAt?: any; 104 | state?: any; 105 | workflowHandle: string; 106 | }; 107 | } 108 | >; 109 | startStep: FunctionReference< 110 | "mutation", 111 | "public", 112 | { 113 | generationNumber: number; 114 | name: string; 115 | retry?: 116 | | boolean 117 | | { base: number; initialBackoffMs: number; maxAttempts: number }; 118 | schedulerOptions?: { runAt?: number } | { runAfter?: number }; 119 | step: { 120 | args: any; 121 | argsSize: number; 122 | completedAt?: number; 123 | functionType: "query" | "mutation" | "action"; 124 | handle: string; 125 | inProgress: boolean; 126 | name: string; 127 | runResult?: 128 | | { kind: "success"; returnValue: any } 129 | | { error: string; kind: "failed" } 130 | | { kind: "canceled" }; 131 | startedAt: number; 132 | workId?: string; 133 | }; 134 | workflowId: string; 135 | workpoolOptions?: { 136 | defaultRetryBehavior?: { 137 | base: number; 138 | initialBackoffMs: number; 139 | maxAttempts: number; 140 | }; 141 | logLevel?: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 142 | maxParallelism?: number; 143 | retryActionsByDefault?: boolean; 144 | }; 145 | }, 146 | { 147 | _creationTime: number; 148 | _id: string; 149 | step: { 150 | args: any; 151 | argsSize: number; 152 | completedAt?: number; 153 | functionType: "query" | "mutation" | "action"; 154 | handle: string; 155 | inProgress: boolean; 156 | name: string; 157 | runResult?: 158 | | { kind: "success"; returnValue: any } 159 | | { error: string; kind: "failed" } 160 | | { kind: "canceled" }; 161 | startedAt: number; 162 | workId?: string; 163 | }; 164 | stepNumber: number; 165 | workflowId: string; 166 | } 167 | >; 168 | }; 169 | workflow: { 170 | cancel: FunctionReference< 171 | "mutation", 172 | "public", 173 | { workflowId: string }, 174 | null 175 | >; 176 | cleanup: FunctionReference< 177 | "mutation", 178 | "public", 179 | { workflowId: string }, 180 | boolean 181 | >; 182 | complete: FunctionReference< 183 | "mutation", 184 | "public", 185 | { 186 | generationNumber: number; 187 | now: number; 188 | runResult: 189 | | { kind: "success"; returnValue: any } 190 | | { error: string; kind: "failed" } 191 | | { kind: "canceled" }; 192 | workflowId: string; 193 | }, 194 | null 195 | >; 196 | create: FunctionReference< 197 | "mutation", 198 | "public", 199 | { 200 | maxParallelism?: number; 201 | onComplete?: { context?: any; fnHandle: string }; 202 | validateAsync?: boolean; 203 | workflowArgs: any; 204 | workflowHandle: string; 205 | workflowName: string; 206 | }, 207 | string 208 | >; 209 | getStatus: FunctionReference< 210 | "query", 211 | "public", 212 | { workflowId: string }, 213 | { 214 | inProgress: Array<{ 215 | _creationTime: number; 216 | _id: string; 217 | step: { 218 | args: any; 219 | argsSize: number; 220 | completedAt?: number; 221 | functionType: "query" | "mutation" | "action"; 222 | handle: string; 223 | inProgress: boolean; 224 | name: string; 225 | runResult?: 226 | | { kind: "success"; returnValue: any } 227 | | { error: string; kind: "failed" } 228 | | { kind: "canceled" }; 229 | startedAt: number; 230 | workId?: string; 231 | }; 232 | stepNumber: number; 233 | workflowId: string; 234 | }>; 235 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 236 | workflow: { 237 | _creationTime: number; 238 | _id: string; 239 | args: any; 240 | generationNumber: number; 241 | logLevel?: any; 242 | name?: string; 243 | onComplete?: { context?: any; fnHandle: string }; 244 | runResult?: 245 | | { kind: "success"; returnValue: any } 246 | | { error: string; kind: "failed" } 247 | | { kind: "canceled" }; 248 | startedAt?: any; 249 | state?: any; 250 | workflowHandle: string; 251 | }; 252 | } 253 | >; 254 | }; 255 | }; 256 | // For now fullApiWithMounts is only fullApi which provides 257 | // jump-to-definition in component client code. 258 | // Use Mounts for the same type without the inference. 259 | declare const fullApiWithMounts: typeof fullApi; 260 | 261 | export declare const api: FilterApi< 262 | typeof fullApiWithMounts, 263 | FunctionReference 264 | >; 265 | export declare const internal: FilterApi< 266 | typeof fullApiWithMounts, 267 | FunctionReference 268 | >; 269 | 270 | export declare const components: { 271 | workpool: { 272 | lib: { 273 | cancel: FunctionReference< 274 | "mutation", 275 | "internal", 276 | { 277 | id: string; 278 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 279 | }, 280 | any 281 | >; 282 | cancelAll: FunctionReference< 283 | "mutation", 284 | "internal", 285 | { 286 | before?: number; 287 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 288 | }, 289 | any 290 | >; 291 | enqueue: FunctionReference< 292 | "mutation", 293 | "internal", 294 | { 295 | config: { 296 | logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR"; 297 | maxParallelism: number; 298 | }; 299 | fnArgs: any; 300 | fnHandle: string; 301 | fnName: string; 302 | fnType: "action" | "mutation" | "query"; 303 | onComplete?: { context?: any; fnHandle: string }; 304 | retryBehavior?: { 305 | base: number; 306 | initialBackoffMs: number; 307 | maxAttempts: number; 308 | }; 309 | runAt: number; 310 | }, 311 | string 312 | >; 313 | status: FunctionReference< 314 | "query", 315 | "internal", 316 | { id: string }, 317 | | { previousAttempts: number; state: "pending" } 318 | | { previousAttempts: number; state: "running" } 319 | | { state: "finished" } 320 | >; 321 | }; 322 | }; 323 | }; 324 | -------------------------------------------------------------------------------- /src/component/_generated/api.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { anyApi, componentsGeneric } from "convex/server"; 12 | 13 | /** 14 | * A utility for referencing Convex functions in your app's API. 15 | * 16 | * Usage: 17 | * ```js 18 | * const myFunctionReference = api.myModule.myFunction; 19 | * ``` 20 | */ 21 | export const api = anyApi; 22 | export const internal = anyApi; 23 | export const components = componentsGeneric(); 24 | -------------------------------------------------------------------------------- /src/component/_generated/dataModel.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated data model types. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type { 12 | DataModelFromSchemaDefinition, 13 | DocumentByName, 14 | TableNamesInDataModel, 15 | SystemTableNames, 16 | } from "convex/server"; 17 | import type { GenericId } from "convex/values"; 18 | import schema from "../schema.js"; 19 | 20 | /** 21 | * The names of all of your Convex tables. 22 | */ 23 | export type TableNames = TableNamesInDataModel; 24 | 25 | /** 26 | * The type of a document stored in Convex. 27 | * 28 | * @typeParam TableName - A string literal type of the table name (like "users"). 29 | */ 30 | export type Doc = DocumentByName< 31 | DataModel, 32 | TableName 33 | >; 34 | 35 | /** 36 | * An identifier for a document in Convex. 37 | * 38 | * Convex documents are uniquely identified by their `Id`, which is accessible 39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids). 40 | * 41 | * Documents can be loaded using `db.get(id)` in query and mutation functions. 42 | * 43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other 44 | * strings when type checking. 45 | * 46 | * @typeParam TableName - A string literal type of the table name (like "users"). 47 | */ 48 | export type Id = 49 | GenericId; 50 | 51 | /** 52 | * A type describing your Convex data model. 53 | * 54 | * This type includes information about what tables you have, the type of 55 | * documents stored in those tables, and the indexes defined on them. 56 | * 57 | * This type is used to parameterize methods like `queryGeneric` and 58 | * `mutationGeneric` to make them type-safe. 59 | */ 60 | export type DataModel = DataModelFromSchemaDefinition; 61 | -------------------------------------------------------------------------------- /src/component/_generated/server.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | ActionBuilder, 13 | AnyComponents, 14 | HttpActionBuilder, 15 | MutationBuilder, 16 | QueryBuilder, 17 | GenericActionCtx, 18 | GenericMutationCtx, 19 | GenericQueryCtx, 20 | GenericDatabaseReader, 21 | GenericDatabaseWriter, 22 | FunctionReference, 23 | } from "convex/server"; 24 | import type { DataModel } from "./dataModel.js"; 25 | 26 | type GenericCtx = 27 | | GenericActionCtx 28 | | GenericMutationCtx 29 | | GenericQueryCtx; 30 | 31 | /** 32 | * Define a query in this Convex app's public API. 33 | * 34 | * This function will be allowed to read your Convex database and will be accessible from the client. 35 | * 36 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 37 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 38 | */ 39 | export declare const query: QueryBuilder; 40 | 41 | /** 42 | * Define a query that is only accessible from other Convex functions (but not from the client). 43 | * 44 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 45 | * 46 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 47 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 48 | */ 49 | export declare const internalQuery: QueryBuilder; 50 | 51 | /** 52 | * Define a mutation in this Convex app's public API. 53 | * 54 | * This function will be allowed to modify your Convex database and will be accessible from the client. 55 | * 56 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 57 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 58 | */ 59 | export declare const mutation: MutationBuilder; 60 | 61 | /** 62 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 63 | * 64 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 65 | * 66 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 67 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 68 | */ 69 | export declare const internalMutation: MutationBuilder; 70 | 71 | /** 72 | * Define an action in this Convex app's public API. 73 | * 74 | * An action is a function which can execute any JavaScript code, including non-deterministic 75 | * code and code with side-effects, like calling third-party services. 76 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 77 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 78 | * 79 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 80 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 81 | */ 82 | export declare const action: ActionBuilder; 83 | 84 | /** 85 | * Define an action that is only accessible from other Convex functions (but not from the client). 86 | * 87 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 88 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 89 | */ 90 | export declare const internalAction: ActionBuilder; 91 | 92 | /** 93 | * Define an HTTP action. 94 | * 95 | * This function will be used to respond to HTTP requests received by a Convex 96 | * deployment if the requests matches the path and method where this action 97 | * is routed. Be sure to route your action in `convex/http.js`. 98 | * 99 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 100 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. 101 | */ 102 | export declare const httpAction: HttpActionBuilder; 103 | 104 | /** 105 | * A set of services for use within Convex query functions. 106 | * 107 | * The query context is passed as the first argument to any Convex query 108 | * function run on the server. 109 | * 110 | * This differs from the {@link MutationCtx} because all of the services are 111 | * read-only. 112 | */ 113 | export type QueryCtx = GenericQueryCtx; 114 | 115 | /** 116 | * A set of services for use within Convex mutation functions. 117 | * 118 | * The mutation context is passed as the first argument to any Convex mutation 119 | * function run on the server. 120 | */ 121 | export type MutationCtx = GenericMutationCtx; 122 | 123 | /** 124 | * A set of services for use within Convex action functions. 125 | * 126 | * The action context is passed as the first argument to any Convex action 127 | * function run on the server. 128 | */ 129 | export type ActionCtx = GenericActionCtx; 130 | 131 | /** 132 | * An interface to read from the database within Convex query functions. 133 | * 134 | * The two entry points are {@link DatabaseReader.get}, which fetches a single 135 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts 136 | * building a query. 137 | */ 138 | export type DatabaseReader = GenericDatabaseReader; 139 | 140 | /** 141 | * An interface to read from and write to the database within Convex mutation 142 | * functions. 143 | * 144 | * Convex guarantees that all writes within a single mutation are 145 | * executed atomically, so you never have to worry about partial writes leaving 146 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control) 147 | * for the guarantees Convex provides your functions. 148 | */ 149 | export type DatabaseWriter = GenericDatabaseWriter; 150 | -------------------------------------------------------------------------------- /src/component/_generated/server.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | actionGeneric, 13 | httpActionGeneric, 14 | queryGeneric, 15 | mutationGeneric, 16 | internalActionGeneric, 17 | internalMutationGeneric, 18 | internalQueryGeneric, 19 | componentsGeneric, 20 | } from "convex/server"; 21 | 22 | /** 23 | * Define a query in this Convex app's public API. 24 | * 25 | * This function will be allowed to read your Convex database and will be accessible from the client. 26 | * 27 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 28 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 29 | */ 30 | export const query = queryGeneric; 31 | 32 | /** 33 | * Define a query that is only accessible from other Convex functions (but not from the client). 34 | * 35 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 36 | * 37 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 38 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 39 | */ 40 | export const internalQuery = internalQueryGeneric; 41 | 42 | /** 43 | * Define a mutation in this Convex app's public API. 44 | * 45 | * This function will be allowed to modify your Convex database and will be accessible from the client. 46 | * 47 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 48 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 49 | */ 50 | export const mutation = mutationGeneric; 51 | 52 | /** 53 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 54 | * 55 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 56 | * 57 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 58 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 59 | */ 60 | export const internalMutation = internalMutationGeneric; 61 | 62 | /** 63 | * Define an action in this Convex app's public API. 64 | * 65 | * An action is a function which can execute any JavaScript code, including non-deterministic 66 | * code and code with side-effects, like calling third-party services. 67 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 68 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 69 | * 70 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 71 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 72 | */ 73 | export const action = actionGeneric; 74 | 75 | /** 76 | * Define an action that is only accessible from other Convex functions (but not from the client). 77 | * 78 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 79 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 80 | */ 81 | export const internalAction = internalActionGeneric; 82 | 83 | /** 84 | * Define a Convex HTTP action. 85 | * 86 | * @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object 87 | * as its second. 88 | * @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`. 89 | */ 90 | export const httpAction = httpActionGeneric; 91 | -------------------------------------------------------------------------------- /src/component/convex.config.ts: -------------------------------------------------------------------------------- 1 | import { defineComponent } from "convex/server"; 2 | import workpool from "@convex-dev/workpool/convex.config"; 3 | 4 | const component = defineComponent("workflow"); 5 | 6 | component.use(workpool); 7 | 8 | export default component; 9 | -------------------------------------------------------------------------------- /src/component/journal.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { mutation, query } from "./_generated/server.js"; 3 | import { 4 | journalDocument, 5 | JournalEntry, 6 | journalEntrySize, 7 | step, 8 | workflowDocument, 9 | } from "./schema.js"; 10 | import { getWorkflow } from "./model.js"; 11 | import { createLogger, logLevel } from "./logging.js"; 12 | import { vRetryBehavior, vWorkIdValidator, WorkId } from "@convex-dev/workpool"; 13 | import { assert } from "convex-helpers"; 14 | import { getStatusHandler } from "./workflow.js"; 15 | import { getWorkpool, OnCompleteContext, workpoolOptions } from "./pool.js"; 16 | import { internal } from "./_generated/api.js"; 17 | import { FunctionHandle } from "convex/server"; 18 | import { getDefaultLogger } from "./utils.js"; 19 | 20 | export const load = query({ 21 | args: { 22 | workflowId: v.id("workflows"), 23 | }, 24 | returns: v.object({ 25 | workflow: workflowDocument, 26 | inProgress: v.array(journalDocument), 27 | journalEntries: v.array(journalDocument), 28 | ok: v.boolean(), 29 | logLevel, 30 | }), 31 | handler: async (ctx, { workflowId }) => { 32 | const { workflow, inProgress, logLevel } = await getStatusHandler(ctx, { 33 | workflowId, 34 | }); 35 | const journalEntries: JournalEntry[] = []; 36 | let sizeSoFar = 0; 37 | for await (const entry of ctx.db 38 | .query("steps") 39 | .withIndex("workflow", (q) => q.eq("workflowId", workflowId))) { 40 | journalEntries.push(entry); 41 | sizeSoFar += journalEntrySize(entry); 42 | if (sizeSoFar > 4 * 1024 * 1024) { 43 | return { journalEntries, ok: false, workflow, inProgress, logLevel }; 44 | } 45 | } 46 | return { journalEntries, ok: true, workflow, inProgress, logLevel }; 47 | }, 48 | }); 49 | 50 | // TODO: have it also start the step 51 | export const startStep = mutation({ 52 | args: { 53 | workflowId: v.string(), 54 | generationNumber: v.number(), 55 | name: v.string(), 56 | step, 57 | workpoolOptions: v.optional(workpoolOptions), 58 | retry: v.optional(v.union(v.boolean(), vRetryBehavior)), 59 | schedulerOptions: v.optional( 60 | v.union( 61 | v.object({ runAt: v.optional(v.number()) }), 62 | v.object({ runAfter: v.optional(v.number()) }), 63 | ), 64 | ), 65 | }, 66 | returns: journalDocument, 67 | handler: async (ctx, args): Promise => { 68 | if (!args.step.inProgress) { 69 | throw new Error(`Assertion failed: not in progress`); 70 | } 71 | const workflow = await getWorkflow( 72 | ctx, 73 | args.workflowId, 74 | args.generationNumber, 75 | ); 76 | const console = await getDefaultLogger(ctx); 77 | 78 | if (workflow.runResult !== undefined) { 79 | throw new Error(`Workflow not running: ${args.workflowId}`); 80 | } 81 | const maxEntry = await ctx.db 82 | .query("steps") 83 | .withIndex("workflow", (q) => q.eq("workflowId", workflow._id)) 84 | .order("desc") 85 | .first(); 86 | const stepNumber = maxEntry ? maxEntry.stepNumber + 1 : 0; 87 | const { name, step, generationNumber, retry } = args; 88 | const stepId = await ctx.db.insert("steps", { 89 | workflowId: workflow._id, 90 | stepNumber, 91 | step, 92 | }); 93 | const entry = await ctx.db.get(stepId); 94 | const workpool = await getWorkpool(ctx, args.workpoolOptions); 95 | const onComplete = internal.pool.onComplete; 96 | const context: OnCompleteContext = { 97 | generationNumber, 98 | stepId, 99 | }; 100 | let workId: WorkId; 101 | switch (step.functionType) { 102 | case "query": { 103 | workId = await workpool.enqueueQuery( 104 | ctx, 105 | step.handle as FunctionHandle<"query">, 106 | step.args, 107 | { context, onComplete, name, ...args.schedulerOptions }, 108 | ); 109 | break; 110 | } 111 | case "mutation": { 112 | workId = await workpool.enqueueMutation( 113 | ctx, 114 | step.handle as FunctionHandle<"mutation">, 115 | step.args, 116 | { context, onComplete, name, ...args.schedulerOptions }, 117 | ); 118 | break; 119 | } 120 | case "action": { 121 | workId = await workpool.enqueueAction( 122 | ctx, 123 | step.handle as FunctionHandle<"action">, 124 | step.args, 125 | { context, onComplete, name, retry, ...args.schedulerOptions }, 126 | ); 127 | break; 128 | } 129 | } 130 | 131 | console.event("started", { 132 | workflowId: workflow._id, 133 | workflowName: workflow.name, 134 | stepName: step.name, 135 | stepNumber, 136 | }); 137 | return entry! as JournalEntry; 138 | }, 139 | }); 140 | -------------------------------------------------------------------------------- /src/component/logging.ts: -------------------------------------------------------------------------------- 1 | import { v, Infer } from "convex/values"; 2 | 3 | export const DEFAULT_LOG_LEVEL: LogLevel = "WARN"; 4 | 5 | // NOTE: the ordering here is important! A config level of "INFO" will log 6 | // "INFO", "REPORT", "WARN",and "ERROR" events. 7 | export const logLevel = v.union( 8 | v.literal("DEBUG"), 9 | v.literal("TRACE"), 10 | v.literal("INFO"), 11 | v.literal("REPORT"), 12 | v.literal("WARN"), 13 | v.literal("ERROR"), 14 | ); 15 | export type LogLevel = Infer; 16 | 17 | export type Logger = { 18 | debug: (...args: unknown[]) => void; 19 | info: (...args: unknown[]) => void; 20 | log: (...args: unknown[]) => void; 21 | warn: (...args: unknown[]) => void; 22 | error: (...args: unknown[]) => void; 23 | time: (label: string) => void; 24 | timeEnd: (label: string) => void; 25 | event: (event: string, payload: Record) => void; 26 | logLevel: LogLevel; 27 | }; 28 | 29 | const logLevelOrder = logLevel.members.map((l) => l.value); 30 | const logLevelByName = logLevelOrder.reduce( 31 | (acc, l, i) => { 32 | acc[l] = i; 33 | return acc; 34 | }, 35 | {} as Record, 36 | ); 37 | export function shouldLog(config: LogLevel, level: LogLevel) { 38 | return logLevelByName[config] <= logLevelByName[level]; 39 | } 40 | const DEBUG = logLevelByName["DEBUG"]; 41 | const TRACE = logLevelByName["TRACE"]; 42 | const INFO = logLevelByName["INFO"]; 43 | const REPORT = logLevelByName["REPORT"]; 44 | const WARN = logLevelByName["WARN"]; 45 | const ERROR = logLevelByName["ERROR"]; 46 | 47 | export function createLogger(level?: LogLevel): Logger { 48 | const logLevel = level ?? DEFAULT_LOG_LEVEL; 49 | const levelIndex = logLevelByName[logLevel]; 50 | if (levelIndex === undefined) { 51 | throw new Error(`Invalid log level: ${logLevel}`); 52 | } 53 | return { 54 | logLevel, 55 | debug: (...args: unknown[]) => { 56 | if (levelIndex <= DEBUG) { 57 | console.debug(...args); 58 | } 59 | }, 60 | log: (...args: unknown[]) => { 61 | if (levelIndex <= INFO) { 62 | console.log(...args); 63 | } 64 | }, 65 | info: (...args: unknown[]) => { 66 | if (levelIndex <= INFO) { 67 | console.info(...args); 68 | } 69 | }, 70 | warn: (...args: unknown[]) => { 71 | if (levelIndex <= WARN) { 72 | console.warn(...args); 73 | } 74 | }, 75 | error: (...args: unknown[]) => { 76 | if (levelIndex <= ERROR) { 77 | console.error(...args); 78 | } 79 | }, 80 | time: (label: string) => { 81 | if (levelIndex <= TRACE) { 82 | console.time(label); 83 | } 84 | }, 85 | timeEnd: (label: string) => { 86 | if (levelIndex <= TRACE) { 87 | console.timeEnd(label); 88 | } 89 | }, 90 | event: (event: string, payload: Record) => { 91 | const fullPayload = { 92 | component: "workflow", 93 | event, 94 | ...payload, 95 | }; 96 | if (levelIndex === REPORT && event === "report") { 97 | console.info(JSON.stringify(fullPayload)); 98 | } else if (levelIndex <= INFO) { 99 | console.info(JSON.stringify(fullPayload)); 100 | } 101 | }, 102 | }; 103 | } 104 | -------------------------------------------------------------------------------- /src/component/model.ts: -------------------------------------------------------------------------------- 1 | import { QueryCtx } from "./_generated/server.js"; 2 | 3 | export async function getWorkflow( 4 | ctx: QueryCtx, 5 | workflowIdStr: string, 6 | expectedGenerationNumber: number, 7 | ) { 8 | const workflowId = ctx.db.normalizeId("workflows", workflowIdStr); 9 | if (!workflowId) { 10 | throw new Error(`Invalid workflow ID: ${workflowIdStr}`); 11 | } 12 | const workflow = await ctx.db.get(workflowId); 13 | if (!workflow) { 14 | throw new Error(`Workflow not found: ${workflowId}`); 15 | } 16 | if (workflow.generationNumber !== expectedGenerationNumber) { 17 | throw new Error(`Invalid generation number: ${expectedGenerationNumber}`); 18 | } 19 | return workflow; 20 | } 21 | 22 | export async function getJournalEntry(ctx: QueryCtx, journalIdStr: string) { 23 | const journalId = ctx.db.normalizeId("steps", journalIdStr); 24 | if (!journalId) { 25 | throw new Error(`Invalid journal ID: ${journalIdStr}`); 26 | } 27 | const journalEntry = await ctx.db.get(journalId); 28 | if (!journalEntry) { 29 | throw new Error(`Journal entry not found: ${journalId}`); 30 | } 31 | return journalEntry; 32 | } 33 | -------------------------------------------------------------------------------- /src/component/pool.ts: -------------------------------------------------------------------------------- 1 | import { 2 | resultValidator, 3 | vRetryBehavior, 4 | workIdValidator, 5 | Workpool, 6 | WorkpoolOptions, 7 | } from "@convex-dev/workpool"; 8 | import { assert } from "convex-helpers"; 9 | import { validate } from "convex-helpers/validators"; 10 | import { 11 | FunctionHandle, 12 | FunctionReference, 13 | RegisteredAction, 14 | } from "convex/server"; 15 | import { Infer, v } from "convex/values"; 16 | import { api, components, internal } from "./_generated/api.js"; 17 | import { internalMutation, MutationCtx } from "./_generated/server.js"; 18 | import { logLevel } from "./logging.js"; 19 | import { getWorkflow } from "./model.js"; 20 | import { getDefaultLogger } from "./utils.js"; 21 | 22 | export const workpoolOptions = v.object({ 23 | logLevel: v.optional(logLevel), 24 | maxParallelism: v.optional(v.number()), 25 | defaultRetryBehavior: v.optional(vRetryBehavior), 26 | retryActionsByDefault: v.optional(v.boolean()), 27 | }); 28 | // type check 29 | const _: WorkpoolOptions = {} as Infer; 30 | 31 | export const DEFAULT_MAX_PARALLELISM = 25; 32 | export const DEFAULT_RETRY_BEHAVIOR = { 33 | maxAttempts: 5, 34 | initialBackoffMs: 500, 35 | base: 2, 36 | }; 37 | 38 | export async function getWorkpool( 39 | ctx: MutationCtx, 40 | opts: WorkpoolOptions | undefined, 41 | ) { 42 | // nit: can fetch config only if necessary 43 | const config = await ctx.db.query("config").first(); 44 | const logLevel = opts?.logLevel ?? config?.logLevel; 45 | const maxParallelism = 46 | opts?.maxParallelism ?? config?.maxParallelism ?? DEFAULT_MAX_PARALLELISM; 47 | return new Workpool(components.workpool, { 48 | logLevel, 49 | maxParallelism, 50 | defaultRetryBehavior: opts?.defaultRetryBehavior ?? DEFAULT_RETRY_BEHAVIOR, 51 | retryActionsByDefault: opts?.retryActionsByDefault ?? false, 52 | }); 53 | } 54 | 55 | export const onCompleteContext = v.object({ 56 | generationNumber: v.number(), 57 | stepId: v.id("steps"), 58 | workpoolOptions: v.optional(workpoolOptions), 59 | }); 60 | 61 | export type OnCompleteContext = Infer; 62 | 63 | export const onComplete = internalMutation({ 64 | args: { 65 | workId: workIdValidator, 66 | result: resultValidator, 67 | context: v.any(), // Ensure we can catch invalid context to fail workflow. 68 | }, 69 | returns: v.null(), 70 | handler: async (ctx, args) => { 71 | const console = await getDefaultLogger(ctx); 72 | const stepId = args.context.stepId; 73 | if (!validate(v.id("steps"), stepId, { db: ctx.db })) { 74 | // Write to failures table and return 75 | // So someone can investigate if this ever happens 76 | console.error("Invalid onComplete context", args.context); 77 | await ctx.db.insert("onCompleteFailures", args); 78 | return; 79 | } 80 | const journalEntry = await ctx.db.get(stepId); 81 | assert(journalEntry, `Journal entry not found: ${stepId}`); 82 | const workflowId = journalEntry.workflowId; 83 | 84 | const error = !validate(onCompleteContext, args.context) 85 | ? `Invalid onComplete context for workId ${args.workId}` + 86 | JSON.stringify(args.context) 87 | : !journalEntry.step.inProgress 88 | ? `Journal entry not in progress: ${stepId}` 89 | : undefined; 90 | if (error) { 91 | await ctx.db.patch(workflowId, { 92 | runResult: { 93 | kind: "failed", 94 | error, 95 | }, 96 | }); 97 | return; 98 | } 99 | const { generationNumber } = args.context; 100 | const workflow = await getWorkflow(ctx, workflowId, generationNumber); 101 | journalEntry.step.inProgress = false; 102 | journalEntry.step.completedAt = Date.now(); 103 | console.event("stepCompleted", { 104 | workflowId, 105 | workflowName: workflow.name, 106 | status: args.result.kind, 107 | stepName: journalEntry.step.name, 108 | stepNumber: journalEntry.stepNumber, 109 | durationMs: journalEntry.step.completedAt - journalEntry.step.startedAt, 110 | }); 111 | switch (args.result.kind) { 112 | case "success": 113 | journalEntry.step.runResult = { 114 | kind: "success", 115 | returnValue: args.result.returnValue, 116 | }; 117 | break; 118 | case "failed": 119 | journalEntry.step.runResult = { 120 | kind: "failed", 121 | error: args.result.error, 122 | }; 123 | break; 124 | case "canceled": 125 | journalEntry.step.runResult = { 126 | kind: "canceled", 127 | }; 128 | break; 129 | } 130 | await ctx.db.replace(journalEntry._id, journalEntry); 131 | console.debug(`Completed execution of ${stepId}`, journalEntry); 132 | if (workflow.runResult === undefined) { 133 | // TODO: Technically this doesn't obey the workpool, but... 134 | // it's better than calling it directly, and enqueuing can now happen 135 | // in the root component. 136 | const workpool = await getWorkpool(ctx, args.context.workpoolOptions); 137 | await workpool.enqueueMutation( 138 | ctx, 139 | workflow.workflowHandle as FunctionHandle<"mutation">, 140 | { workflowId: workflow._id, generationNumber }, 141 | { 142 | onComplete: internal.pool.handlerOnComplete, 143 | context: { workflowId, generationNumber }, 144 | }, 145 | ); 146 | } else { 147 | console.error( 148 | `Workflow not running: ${workflowId} when completing ${stepId}`, 149 | ); 150 | } 151 | }, 152 | }); 153 | 154 | export type OnComplete = 155 | typeof onComplete extends RegisteredAction< 156 | "public", 157 | infer Args, 158 | infer ReturnValue 159 | > 160 | ? FunctionReference<"action", "internal", Args, ReturnValue> 161 | : never; 162 | 163 | const handlerOnCompleteContext = v.object({ 164 | workflowId: v.id("workflows"), 165 | generationNumber: v.number(), 166 | }); 167 | 168 | export const handlerOnComplete = internalMutation({ 169 | args: { 170 | workId: workIdValidator, 171 | result: resultValidator, 172 | context: v.any(), 173 | }, 174 | returns: v.null(), 175 | handler: async (ctx, args) => { 176 | if (args.result.kind !== "success") { 177 | const console = await getDefaultLogger(ctx); 178 | if (!validate(handlerOnCompleteContext, args.context)) { 179 | console.error("Invalid handlerOnComplete context", args.context); 180 | if ( 181 | validate(v.id("workflows"), args.context.workflowId, { db: ctx.db }) 182 | ) { 183 | await ctx.db.patch(args.context.workflowId, { 184 | runResult: { 185 | kind: "failed", 186 | error: 187 | "Invalid handlerOnComplete context: " + 188 | JSON.stringify(args.context), 189 | }, 190 | }); 191 | } 192 | return; 193 | } 194 | const { workflowId, generationNumber } = args.context; 195 | await ctx.runMutation(api.workflow.complete, { 196 | workflowId, 197 | generationNumber, 198 | runResult: args.result, 199 | now: Date.now(), 200 | }); 201 | } 202 | }, 203 | }); 204 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 205 | const console = "THIS IS A REMINDER TO USE getDefaultLogger"; 206 | -------------------------------------------------------------------------------- /src/component/schema.ts: -------------------------------------------------------------------------------- 1 | import { 2 | resultValidator, 3 | vResultValidator, 4 | RunResult, 5 | vRetryBehavior, 6 | vWorkIdValidator, 7 | workIdValidator, 8 | vOnComplete, 9 | } from "@convex-dev/workpool"; 10 | import { defineSchema, defineTable } from "convex/server"; 11 | import { convexToJson, Infer, v, Value } from "convex/values"; 12 | import { logLevel } from "./logging.js"; 13 | import { deprecated, literals } from "convex-helpers/validators"; 14 | import { workpoolOptions } from "./pool.js"; 15 | 16 | export function valueSize(value: Value): number { 17 | return JSON.stringify(convexToJson(value)).length; 18 | } 19 | 20 | export function resultSize(result: RunResult): number { 21 | let size = 0; 22 | size += result.kind.length; 23 | switch (result.kind) { 24 | case "success": { 25 | size += 8 + valueSize(result.returnValue); 26 | break; 27 | } 28 | case "failed": { 29 | size += result.error.length; 30 | break; 31 | } 32 | case "canceled": { 33 | break; 34 | } 35 | } 36 | return size; 37 | } 38 | 39 | const workflowObject = { 40 | name: v.optional(v.string()), 41 | workflowHandle: v.string(), 42 | args: v.any(), 43 | onComplete: v.optional(vOnComplete), 44 | logLevel: deprecated, 45 | startedAt: deprecated, 46 | state: deprecated, 47 | // undefined 48 | runResult: v.optional(vResultValidator), 49 | 50 | // Internal execution status, used to totally order mutations. 51 | generationNumber: v.number(), 52 | }; 53 | 54 | export const workflowDocument = v.object({ 55 | _id: v.string(), 56 | _creationTime: v.number(), 57 | ...workflowObject, 58 | }); 59 | export type Workflow = Infer; 60 | 61 | export const step = v.object({ 62 | name: v.string(), 63 | inProgress: v.boolean(), 64 | workId: v.optional(vWorkIdValidator), 65 | functionType: literals("query", "mutation", "action"), 66 | handle: v.string(), 67 | argsSize: v.number(), 68 | args: v.any(), 69 | runResult: v.optional(vResultValidator), 70 | 71 | startedAt: v.number(), 72 | completedAt: v.optional(v.number()), 73 | }); 74 | export type Step = Infer; 75 | 76 | function stepSize(step: Step): number { 77 | let size = 0; 78 | size += step.name.length; 79 | size += 1; // inProgress 80 | if (step.workId) { 81 | size += step.workId.length; 82 | } 83 | size += step.functionType.length; 84 | size += step.handle.length; 85 | // TODO: start time, for scheduled steps 86 | size += 8 + step.argsSize; 87 | if (step.runResult) { 88 | size += resultSize(step.runResult); 89 | } 90 | size += 8; // startedAt 91 | size += 8; // completedAt 92 | return size; 93 | } 94 | 95 | const journalObject = { 96 | workflowId: v.id("workflows"), 97 | stepNumber: v.number(), 98 | step, 99 | }; 100 | 101 | export const journalDocument = v.object({ 102 | _id: v.string(), 103 | _creationTime: v.number(), 104 | ...journalObject, 105 | }); 106 | export type JournalEntry = Infer; 107 | 108 | export function journalEntrySize(entry: JournalEntry): number { 109 | let size = 0; 110 | size += entry._id.length; 111 | size += 8; // _creationTime 112 | size += entry.workflowId.length; 113 | size += 8; // stepNumber 114 | size += stepSize(entry.step); 115 | return size; 116 | } 117 | 118 | export default defineSchema({ 119 | config: defineTable({ 120 | logLevel: v.optional(logLevel), 121 | maxParallelism: v.optional(v.number()), 122 | }), 123 | workflows: defineTable(workflowObject), 124 | steps: defineTable(journalObject) 125 | .index("workflow", ["workflowId", "stepNumber"]) 126 | .index("inProgress", ["step.inProgress", "workflowId"]), 127 | onCompleteFailures: defineTable({ 128 | workId: workIdValidator, 129 | result: resultValidator, 130 | context: v.any(), 131 | }), 132 | }); 133 | -------------------------------------------------------------------------------- /src/component/setup.test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | import { test } from "vitest"; 3 | export const modules = import.meta.glob("./**/*.*s"); 4 | 5 | test("setup", () => {}); 6 | -------------------------------------------------------------------------------- /src/component/utils.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { internalMutation, QueryCtx } from "./_generated/server.js"; 3 | import { createLogger, DEFAULT_LOG_LEVEL, logLevel } from "./logging.js"; 4 | 5 | export async function getDefaultLogger(ctx: QueryCtx) { 6 | const config = await ctx.db.query("config").first(); 7 | return createLogger(config?.logLevel ?? DEFAULT_LOG_LEVEL); 8 | } 9 | 10 | // For now, only configure by calling from the dashboard or CLI. 11 | export const updateConfig = internalMutation({ 12 | args: { 13 | logLevel: v.optional(logLevel), 14 | maxParallelism: v.optional(v.number()), 15 | }, 16 | handler: async (ctx, args) => { 17 | const config = await ctx.db.query("config").first(); 18 | if (!config) { 19 | await ctx.db.insert("config", { 20 | logLevel: args.logLevel ?? DEFAULT_LOG_LEVEL, 21 | maxParallelism: args.maxParallelism, 22 | }); 23 | } else { 24 | if (args.logLevel) { 25 | await ctx.db.patch(config._id, { 26 | logLevel: args.logLevel, 27 | }); 28 | } 29 | if (args.maxParallelism) { 30 | await ctx.db.patch(config._id, { 31 | maxParallelism: args.maxParallelism, 32 | }); 33 | } 34 | } 35 | }, 36 | }); 37 | -------------------------------------------------------------------------------- /src/component/workflow.ts: -------------------------------------------------------------------------------- 1 | import { vOnComplete, vResultValidator } from "@convex-dev/workpool"; 2 | import { assert } from "convex-helpers"; 3 | import { FunctionHandle } from "convex/server"; 4 | import { v } from "convex/values"; 5 | import { Id } from "./_generated/dataModel.js"; 6 | import { mutation, MutationCtx, query, QueryCtx } from "./_generated/server.js"; 7 | import { createLogger, Logger, logLevel } from "./logging.js"; 8 | import { getWorkflow } from "./model.js"; 9 | import { getWorkpool } from "./pool.js"; 10 | import { journalDocument, JournalEntry, workflowDocument } from "./schema.js"; 11 | import { getDefaultLogger } from "./utils.js"; 12 | import { WorkflowId, OnCompleteArgs } from "../types.js"; 13 | 14 | export const create = mutation({ 15 | args: { 16 | workflowName: v.string(), 17 | workflowHandle: v.string(), 18 | workflowArgs: v.any(), 19 | maxParallelism: v.optional(v.number()), 20 | onComplete: v.optional(vOnComplete), 21 | validateAsync: v.optional(v.boolean()), 22 | // TODO: ttl 23 | }, 24 | returns: v.string(), 25 | handler: async (ctx, args) => { 26 | const now = Date.now(); 27 | const console = await getDefaultLogger(ctx); 28 | await updateMaxParallelism(ctx, console, args.maxParallelism); 29 | const workflowId = await ctx.db.insert("workflows", { 30 | name: args.workflowName, 31 | workflowHandle: args.workflowHandle, 32 | args: args.workflowArgs, 33 | generationNumber: 0, 34 | onComplete: args.onComplete, 35 | }); 36 | console.debug( 37 | `Created workflow ${workflowId}:`, 38 | args.workflowArgs, 39 | args.workflowHandle, 40 | ); 41 | // If we can't start it, may as well not create it, eh? Fail fast... 42 | if (args.validateAsync) { 43 | await ctx.scheduler.runAfter( 44 | 0, 45 | args.workflowHandle as FunctionHandle<"mutation">, 46 | { workflowId, generationNumber: 0 }, 47 | ); 48 | } else { 49 | await ctx.runMutation(args.workflowHandle as FunctionHandle<"mutation">, { 50 | workflowId, 51 | generationNumber: 0, 52 | }); 53 | } 54 | return workflowId as string; 55 | }, 56 | }); 57 | 58 | export const getStatus = query({ 59 | args: { 60 | workflowId: v.id("workflows"), 61 | }, 62 | returns: v.object({ 63 | workflow: workflowDocument, 64 | inProgress: v.array(journalDocument), 65 | logLevel: logLevel, 66 | }), 67 | handler: getStatusHandler, 68 | }); 69 | 70 | export async function getStatusHandler( 71 | ctx: QueryCtx, 72 | args: { workflowId: Id<"workflows"> }, 73 | ) { 74 | const workflow = await ctx.db.get(args.workflowId); 75 | assert(workflow, `Workflow not found: ${args.workflowId}`); 76 | const console = await getDefaultLogger(ctx); 77 | 78 | const result: JournalEntry[] = []; 79 | const inProgressEntries = await ctx.db 80 | .query("steps") 81 | .withIndex("inProgress", (q) => 82 | q.eq("step.inProgress", true).eq("workflowId", args.workflowId), 83 | ) 84 | .collect(); 85 | result.push(...inProgressEntries); 86 | console.debug(`${args.workflowId} blocked by`, result); 87 | return { workflow, inProgress: result, logLevel: console.logLevel }; 88 | } 89 | 90 | export const cancel = mutation({ 91 | args: { 92 | workflowId: v.id("workflows"), 93 | }, 94 | returns: v.null(), 95 | handler: async (ctx, { workflowId }) => { 96 | const { workflow, inProgress, logLevel } = await getStatusHandler(ctx, { 97 | workflowId, 98 | }); 99 | const console = createLogger(logLevel); 100 | if (inProgress.length > 0) { 101 | const workpool = await getWorkpool(ctx, {}); 102 | for (const step of inProgress) { 103 | if (step.step.workId) { 104 | await workpool.cancel(ctx, step.step.workId); 105 | } 106 | } 107 | } 108 | assert(workflow.runResult === undefined, `Not running: ${workflowId}`); 109 | workflow.runResult = { kind: "canceled" }; 110 | workflow.generationNumber += 1; 111 | console.debug(`Canceled workflow ${workflowId}:`, workflow); 112 | // TODO: Call onComplete hook 113 | // TODO: delete everything unless ttl is set 114 | await ctx.db.replace(workflow._id, workflow); 115 | }, 116 | }); 117 | 118 | export const complete = mutation({ 119 | args: { 120 | workflowId: v.id("workflows"), 121 | generationNumber: v.number(), 122 | runResult: vResultValidator, 123 | now: v.number(), 124 | }, 125 | returns: v.null(), 126 | handler: async (ctx, args) => { 127 | const workflow = await getWorkflow( 128 | ctx, 129 | args.workflowId, 130 | args.generationNumber, 131 | ); 132 | const console = await getDefaultLogger(ctx); 133 | if (workflow.runResult) { 134 | throw new Error(`Workflow not running: ${workflow}`); 135 | } 136 | workflow.runResult = args.runResult; 137 | console.event("completed", { 138 | workflowId: workflow._id, 139 | name: workflow.name, 140 | status: workflow.runResult.kind, 141 | overallDurationMs: Date.now() - workflow._creationTime, 142 | }); 143 | if (workflow.onComplete) { 144 | await ctx.runMutation( 145 | workflow.onComplete.fnHandle as FunctionHandle< 146 | "mutation", 147 | OnCompleteArgs 148 | >, 149 | { 150 | workflowId: workflow._id as unknown as WorkflowId, 151 | result: workflow.runResult, 152 | context: workflow.onComplete.context, 153 | }, 154 | ); 155 | } 156 | // TODO: delete everything unless ttl is set 157 | console.debug(`Completed workflow ${workflow._id}:`, workflow); 158 | await ctx.db.replace(workflow._id, workflow); 159 | }, 160 | }); 161 | 162 | export const cleanup = mutation({ 163 | args: { 164 | workflowId: v.string(), 165 | }, 166 | returns: v.boolean(), 167 | handler: async (ctx, args) => { 168 | const workflowId = ctx.db.normalizeId("workflows", args.workflowId); 169 | if (!workflowId) { 170 | throw new Error(`Invalid workflow ID: ${args.workflowId}`); 171 | } 172 | const workflow = await ctx.db.get(workflowId); 173 | if (!workflow) { 174 | return false; 175 | } 176 | const logger = await getDefaultLogger(ctx); 177 | if (workflow.runResult?.kind !== "success") { 178 | logger.debug( 179 | `Can't clean up workflow ${workflowId} since it hasn't completed.`, 180 | ); 181 | return false; 182 | } 183 | logger.debug(`Cleaning up workflow ${workflowId}`, workflow); 184 | await ctx.db.delete(workflowId); 185 | const journalEntries = await ctx.db 186 | .query("steps") 187 | .withIndex("workflow", (q) => q.eq("workflowId", workflowId)) 188 | .collect(); 189 | for (const journalEntry of journalEntries) { 190 | logger.debug("Deleting journal entry", journalEntry); 191 | await ctx.db.delete(journalEntry._id); 192 | } 193 | return true; 194 | }, 195 | }); 196 | 197 | async function updateMaxParallelism( 198 | ctx: MutationCtx, 199 | console: Logger, 200 | maxParallelism: number | undefined, 201 | ) { 202 | const config = await ctx.db.query("config").first(); 203 | if (config) { 204 | if (maxParallelism && maxParallelism !== config.maxParallelism) { 205 | console.warn("Updating max parallelism to", maxParallelism); 206 | await ctx.db.patch(config._id, { maxParallelism }); 207 | } 208 | } else { 209 | await ctx.db.insert("config", { maxParallelism }); 210 | } 211 | } 212 | 213 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 214 | const console = "THIS IS A REMINDER TO USE getDefaultLogger"; 215 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { RunResult, WorkId } from "@convex-dev/workpool"; 2 | import { Expand, FunctionReference } from "convex/server"; 3 | import { GenericId, v, VString } from "convex/values"; 4 | 5 | export type WorkflowId = string & { __isWorkflowId: true }; 6 | export const vWorkflowId = v.string() as VString; 7 | 8 | export type UseApi = Expand<{ 9 | [mod in keyof API]: API[mod] extends FunctionReference< 10 | infer FType, 11 | "public", 12 | infer FArgs, 13 | infer FReturnType, 14 | infer FComponentPath 15 | > 16 | ? FunctionReference< 17 | FType, 18 | "internal", 19 | OpaqueIds, 20 | OpaqueIds, 21 | FComponentPath 22 | > 23 | : UseApi; 24 | }>; 25 | 26 | export type OpaqueIds = 27 | T extends GenericId 28 | ? string 29 | : T extends WorkId 30 | ? string 31 | : T extends (infer U)[] 32 | ? OpaqueIds[] 33 | : T extends object 34 | ? { [K in keyof T]: OpaqueIds } 35 | : T; 36 | export type OnCompleteArgs = { 37 | /** 38 | * The ID of the work that completed. 39 | */ 40 | workflowId: WorkflowId; 41 | /** 42 | * The context object passed when enqueuing the work. 43 | * Useful for passing data from the enqueue site to the onComplete site. 44 | */ 45 | context: unknown; 46 | /** 47 | * The result of the run that completed. 48 | */ 49 | result: RunResult; 50 | }; 51 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowJs": true, 4 | "checkJs": true, 5 | "strict": true, 6 | 7 | "target": "ESNext", 8 | "lib": ["ES2021", "dom"], 9 | "forceConsistentCasingInFileNames": true, 10 | "allowSyntheticDefaultImports": true, 11 | "module": "NodeNext", 12 | "moduleResolution": "NodeNext", 13 | 14 | "isolatedModules": true, 15 | "declaration": true, 16 | "declarationMap": true, 17 | "sourceMap": true, 18 | "outDir": "./dist", 19 | "skipLibCheck": true 20 | }, 21 | "include": ["./src/**/*"] 22 | } 23 | --------------------------------------------------------------------------------