├── .cursor └── rules │ └── convex_rules.mdc ├── .github └── workflows │ └── node.js.yml ├── .gitignore ├── .prettierrc.json ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── commonjs.json ├── eslint.config.js ├── esm.json ├── example ├── .cursor │ └── mcp.json ├── .gitignore ├── README.md ├── convex.json ├── convex │ ├── README.md │ ├── _generated │ │ ├── api.d.ts │ │ ├── api.js │ │ ├── dataModel.d.ts │ │ ├── server.d.ts │ │ └── server.js │ ├── _libsql_workaround.ts │ ├── convex.config.ts │ ├── example.ts │ ├── schema.ts │ ├── tsconfig.json │ └── v8Runtime.ts ├── eslint.config.js ├── index.html ├── package.json ├── src │ ├── App.css │ ├── App.tsx │ ├── index.css │ ├── main.tsx │ ├── mastra │ │ ├── agents │ │ │ └── index.ts │ │ ├── index.ts │ │ ├── tools │ │ │ └── index.ts │ │ └── workflows │ │ │ └── index.ts │ └── vite-env.d.ts ├── tsconfig.json └── vite.config.ts ├── node10stubs.mjs ├── package.json ├── src ├── ai │ ├── types.test.ts │ └── types.ts ├── client │ ├── client.ts │ ├── in-memory.ts │ ├── index.ts │ ├── storage.ts │ ├── types.ts │ └── vector.ts ├── component │ ├── _generated │ │ ├── api.d.ts │ │ ├── api.js │ │ ├── dataModel.d.ts │ │ ├── server.d.ts │ │ └── server.js │ ├── convex.config.ts │ ├── debug.ts │ ├── logger.ts │ ├── schema.ts │ ├── setup.test.ts │ ├── storage │ │ ├── messages.ts │ │ ├── storage.ts │ │ └── tables.ts │ └── vector │ │ ├── tables.ts │ │ └── vector.ts ├── mapping │ ├── index.test.ts │ └── index.ts ├── react │ └── index.ts └── utils.ts └── tsconfig.json /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | on: 3 | push: 4 | branches: ["main"] 5 | pull_request: 6 | branches: ["main"] 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: Use Node.js 13 | uses: actions/setup-node@v4 14 | with: 15 | cache-dependency-path: | 16 | example/package.json 17 | package.json 18 | node-version: "18.x" 19 | cache: "npm" 20 | - run: npm i 21 | - run: npm ci 22 | - run: cd example && npm i && cd .. 23 | - run: npm test 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea 3 | *.local 4 | *.log 5 | /.vscode/ 6 | /docs/.vitepress/cache 7 | dist 8 | dist-ssr 9 | explorations 10 | node_modules 11 | .eslintcache 12 | # components are libraries! 13 | package-lock.json 14 | 15 | # this is a package-json-redirect stub dir, see https://github.com/andrewbranch/example-subpath-exports-ts-compat?tab=readme-ov-file 16 | react/package.json 17 | # npm pack output 18 | *.tgz 19 | .env.development 20 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5" 3 | } 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Developing guide 2 | 3 | ## Running locally 4 | 5 | ```sh 6 | npm i 7 | cd example 8 | npm i 9 | npx convex dev 10 | ``` 11 | 12 | ## Testing 13 | 14 | ```sh 15 | rm -rf dist/ && npm run build 16 | npm run typecheck 17 | npm run test 18 | cd example 19 | npm run lint 20 | cd .. 21 | ``` 22 | 23 | ## Deploying 24 | 25 | ### Building a one-off package 26 | 27 | ```sh 28 | rm -rf dist/ && npm run build 29 | npm pack 30 | ``` 31 | 32 | ### Deploying a new version 33 | 34 | ```sh 35 | # this will change the version and commit it (if you run it in the root directory) 36 | npm version patch 37 | npm publish --dry-run 38 | # sanity check files being included 39 | npm publish 40 | git push --tags 41 | ``` 42 | 43 | #### Alpha release 44 | 45 | The same as above, but it requires extra flags so the release is only installed with `@alpha`: 46 | 47 | ```sh 48 | npm version prerelease --preid alpha 49 | npm publish --tag alpha 50 | ``` 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Convex Mastra Component 2 | 3 | [![npm version](https://badge.fury.io/js/@convex-dev%2Fmastra.svg)](https://badge.fury.io/js/@convex-dev%2Fmastra) 4 | 5 | 6 | 7 | Use [Mastra](https://mastra.ai) to build workflows and define agents, 8 | then use this component to run and save them on [Convex](https://convex.dev/). 9 | 10 | 1. Run workflows asynchronously. Fire and forget from a serverless function (mutation or action). 11 | 1. Track the status of the workflow. Reactive queries and run-to-completion utilities. 12 | Or just write to the database from your steps and use normal Convex reactivity. 13 | 1. Resume a workflow from where it left off, after suspending it for user input. 14 | 1. Full support for Mastra's step forking, joining, triggering, and more. 15 | 16 | ```ts 17 | const storage = new ConvexStorage(components.mastra); 18 | const vector = new ConvexVector(components.mastra); 19 | 20 | // Uses storage to save and load messages and threads. 21 | // Uses vector to save and query embeddings for RAG on messages. 22 | const agent = new Agent({ memory: new Memory({ storage, vector}), ... }) 23 | // Uses storage to save and load workflow state. 24 | const mastra = new Mastra({ storage, ...}) 25 | 26 | export const myAction = action({ 27 | args: { workflowName: v.string()}, 28 | handler: async (ctx, args) => { 29 | // IMPORTANT: 30 | // <- must be called before using storage or vector 31 | storage.setCtx(ctx); 32 | vector.setCtx(ctx); 33 | 34 | const workflow = mastra.getWorkflow(args.workflowName); 35 | const { runId, start } = await workflow.create(ctx); 36 | await start({...}); 37 | } 38 | }) 39 | ``` 40 | 41 | ### Use cases 42 | 43 | - Agentic workflows, such as taking user input, calling multiple LLMs, calling third parties, etc. 44 | - ... Everything else you want to do with Mastra. 45 | 46 | Found a bug? Feature request? [File it here](https://github.com/get-convex/mastra/issues). 47 | 48 | ### Future work 49 | 50 | - Provide Storage and Vector integrations for using Convex **from** Mastra servers. 51 | - Enables running from both `mastra dev` and `convex dev` for fast iterations. 52 | - Enables using Convex for Agent Memory. 53 | - Provide helpers to export functions so browsers can call them safely. 54 | - Add a custom mutation step, for a transactional step that will always terminate 55 | without needing a retry configuration (built-in for Convex). 56 | 57 | ## Pre-requisite: Convex 58 | 59 | You'll need an existing Convex project to use the component. 60 | Convex is a hosted backend platform, including a database, serverless functions, 61 | and a ton more you can learn about [here](https://docs.convex.dev/get-started). 62 | 63 | Run `npm create convex` or follow any of the [quickstarts](https://docs.convex.dev/home) to set one up. 64 | 65 | ## Installation 66 | 67 | Install the component package: 68 | 69 | ```ts 70 | npm install @convex-dev/mastra 71 | ``` 72 | 73 | **NOTE**: You also need to: 74 | 75 | - Directly install `@libsql/client` 76 | - Mark it as an external package 77 | - Export it from a file in your /convex folder due to current bundling issues. 78 | 79 | You can do all of this by running the following commands from the project root: 80 | 81 | ```sh 82 | npm install -D @libsql/client 83 | echo '{"node":{"externalPackages":["@libsql/client"]}}' > convex.json 84 | printf '"use node";\nexport * as _ from "@libsql/client";' > convex/_workaround.ts 85 | ``` 86 | 87 | Create a `convex.config.ts` file in your app's `convex/` folder and install the component by calling `use`: 88 | 89 | ```ts 90 | // convex/convex.config.ts 91 | import { defineApp } from "convex/server"; 92 | import mastra from "@convex-dev/mastra/convex.config"; 93 | 94 | const app = defineApp(); 95 | app.use(mastra); 96 | 97 | export default app; 98 | ``` 99 | 100 | ## Usage 101 | 102 | - It's important to call `storage.setCtx(ctx)` and `vector.setCtx(ctx)` before 103 | using the storage or vector in an action. 104 | 105 | ```ts 106 | "use node"; 107 | const storage = new ConvexStorage(components.mastra); 108 | const vector = new ConvexVector(components.mastra); 109 | 110 | // Uses storage to save and load messages and threads. 111 | // Uses vector to save and query embeddings for RAG on messages. 112 | const agent = new Agent({ memory: new Memory({ storage, vector}), ... }) 113 | // Uses storage to save and load workflow state. 114 | const mastra = new Mastra({ storage, ...}) 115 | 116 | export const myAction = action({ 117 | args: { workflowName: v.string()}, 118 | handler: async (ctx, args) => { 119 | // IMPORTANT: 120 | // <- must be called before using storage or vector 121 | storage.setCtx(ctx); 122 | vector.setCtx(ctx); 123 | 124 | const workflow = mastra.getWorkflow(args.workflowName); 125 | const { runId, start } = await workflow.create(ctx); 126 | await start({...}); 127 | } 128 | }) 129 | ``` 130 | 131 | Querying the status reactively from a non-node file: 132 | 133 | ```ts 134 | import { query } from "./_generated/server"; 135 | import { components } from "./_generated/api"; 136 | import { v } from "convex/values"; 137 | import { 138 | mapSerializedToMastra, 139 | TABLE_WORKFLOW_SNAPSHOT, 140 | } from "@convex-dev/mastra/mapping"; 141 | 142 | export const getStatus = query({ 143 | args: { runId: v.string() }, 144 | handler: async (ctx, args) => { 145 | const doc = await ctx.runQuery( 146 | components.mastra.storage.storage.loadSnapshot, 147 | { 148 | workflowName: "weatherToOutfitWorkflow", 149 | runId: args.runId, 150 | } 151 | ); 152 | if (!doc) { 153 | return null; 154 | } 155 | const snapshot = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, doc); 156 | const { childStates, activePaths, suspendedSteps } = snapshot.snapshot; 157 | return { childStates, activePaths, suspendedSteps }; 158 | }, 159 | }); 160 | ``` 161 | 162 | See more example usage in [example.ts](./example/convex/example.ts). 163 | 164 | ## Limitations 165 | 166 | 1. For local development, you need to run `mastra dev` in Node 20, but 167 | `convex dev` in Node 18. 168 | If you see issues about syscalls at import time, try using the cloud dev 169 | environment instead. 170 | 1. Currently you can only interact with Mastra classes from Node actions, so 171 | you can't start them from a mutation without doing it indirectly via the 172 | Scheduler or Workpool by enqueuing the node action to run. 173 | 1. To reactively query for the status of a workflow, you need to call the 174 | component API directly. There's an example above and in 175 | [v8Runtime.ts](./example/convex/v8Runtime.ts). 176 | 177 | ### TODO before it's out of alpha 178 | 179 | - [ ] Validate the Storage and Vector implementations (from Convex). 180 | - [ ] Ensure @mastra/memory can be bundled in Convex. 181 | 182 | ### TODO before it's out of beta 183 | 184 | - [ ] Support using Storage and Vector from `mastra dev`. 185 | - [ ] Configurable vacuuming of workflow state. 186 | - [ ] Support queries on workflow state without hitting the component directly. 187 | 188 | ### Backlog: 189 | 190 | 1. Support exposing the same `hono` HTTP API as Mastra servers. 191 | 1. Better logging and tracing. 192 | 1. Provide a Mutation Step to avoid the v8 action and is executed exactly once. 193 | 1. Workflows currently only run in Node Actions. You can create/start/resume 194 | them from anywhere, but each step will be executed in the node runtime. 195 | This is a bit slower and more expensive than running in the default runtime. 196 | 1. Using the `ConvexStorage` from Mastra doesn't share state with workflows 197 | made via the Component. They're currently stored in separate tables with 198 | different schemas. 199 | 200 | ## Troubleshooting 201 | 202 | ### Libsql errors 203 | 204 | If you see an error like this: 205 | 206 | ``` 207 | Uncaught Failed to analyze _deps/node/4QMS5IZK.js: Cannot find module '@libsql/linux-arm64-gnu' 208 | ``` 209 | 210 | You need to add `@libsql/client` to the `externalPackages` in a `convex.json` 211 | file in the root of your project: 212 | 213 | ```json 214 | { 215 | "node": { 216 | "externalPackages": ["@libsql/client"] 217 | } 218 | } 219 | ``` 220 | 221 | If that still doesn't solve it, add a `convex/_workaround.ts` file: 222 | 223 | ```ts 224 | "use node"; 225 | export * as _ from "@libsql/client"; 226 | ``` 227 | 228 | ### Errors about 'no loader is configured for ".node" files' 229 | 230 | If you see an error like this: 231 | 232 | ``` 233 | ✘ [ERROR] No loader is configured for ".node" files: node_modules/onnxruntime-node/bin/napi-v3/win32/arm64/onnxruntime_binding.nodel 234 | ``` 235 | 236 | You're likely importing some node package through a dependency that isn't 237 | supported. One workaround is to add it as an explicit dependency, then add it 238 | to the `externalPackages` in a `convex.json` file in the root of your project, 239 | then export something from it, similar to `@libsql/client` above 240 | 241 | You can also try deleting your `node_modules` and `package-lock.json` and 242 | re-installing using node 18. 243 | 244 | ### Errors about node packages not being available 245 | 246 | ``` 247 | ✘ [ERROR] Could not resolve "assert" 248 | 249 | node_modules/sonic-boom/index.js:8:23: 250 | 8 │ const assert = require('assert') 251 | ╵ ~~~~~~~~ 252 | 253 | The package "assert" wasn't found on the file system but is built into node. Are you trying to 254 | bundle for node? You can use "platform: 'node'" to do that, which will remove this error. 255 | ✖ It looks like you are using Node APIs from a file without the "use node" directive. 256 | ``` 257 | 258 | This is because you're using a Node API in a file that doesn't have 259 | `"use node";` as the first line in the file 260 | Or you're importing a file in your convex/ directory that imports from a 261 | node dependency that doesn't have the `"use node"` directive. 262 | 263 | To fix this, add the `"use node"` directive to the file. Note: these files can 264 | only have actions, since mutations and queries only run in the default runtime. 265 | 266 | 267 | -------------------------------------------------------------------------------- /commonjs.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src/**/*"], 4 | "exclude": ["src/**/*.test.*", "../src/package.json"], 5 | "compilerOptions": { 6 | "outDir": "./dist/commonjs" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import globals from "globals"; 2 | import pluginJs from "@eslint/js"; 3 | import tseslint from "typescript-eslint"; 4 | 5 | export default [ 6 | { files: ["src/**/*.{js,mjs,cjs,ts,tsx}"] }, 7 | { 8 | ignores: [ 9 | "dist/**", 10 | "eslint.config.js", 11 | "**/_generated/", 12 | "node10stubs.mjs", 13 | ], 14 | }, 15 | { 16 | languageOptions: { 17 | globals: globals.worker, 18 | parser: tseslint.parser, 19 | 20 | parserOptions: { 21 | project: true, 22 | tsconfigRootDir: ".", 23 | }, 24 | }, 25 | }, 26 | pluginJs.configs.recommended, 27 | ...tseslint.configs.recommended, 28 | { 29 | rules: { 30 | "@typescript-eslint/no-floating-promises": "error", 31 | "eslint-comments/no-unused-disable": "off", 32 | 33 | // allow (_arg: number) => {} and const _foo = 1; 34 | "no-unused-vars": "off", 35 | "@typescript-eslint/no-unused-vars": [ 36 | "warn", 37 | { 38 | argsIgnorePattern: "^_", 39 | varsIgnorePattern: "^_", 40 | }, 41 | ], 42 | }, 43 | }, 44 | ]; 45 | -------------------------------------------------------------------------------- /esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src/**/*"], 4 | "exclude": ["src/**/*.test.*", "../src/package.json"], 5 | "compilerOptions": { 6 | "outDir": "./dist/esm" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /example/.cursor/mcp.json: -------------------------------------------------------------------------------- 1 | { 2 | "mcpServers": { 3 | "mastra": { 4 | "command": "npx", 5 | "args": [ 6 | "-y", 7 | "@mastra/mcp-docs-server@latest" 8 | ] 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | !**/glob-import/dir/node_modules 2 | .DS_Store 3 | .idea 4 | *.cpuprofile 5 | *.local 6 | *.log 7 | /.vscode/ 8 | /docs/.vitepress/cache 9 | dist 10 | dist-ssr 11 | explorations 12 | node_modules 13 | playground-temp 14 | temp 15 | TODOs.md 16 | .eslintcache 17 | **/.mastra 18 | -------------------------------------------------------------------------------- /example/README.md: -------------------------------------------------------------------------------- 1 | # Example app 2 | 3 | Components need an app that uses them in order to run codegen. An example app is also useful 4 | for testing and documentation. 5 | 6 | -------------------------------------------------------------------------------- /example/convex.json: -------------------------------------------------------------------------------- 1 | { 2 | "node": { 3 | "externalPackages": [ 4 | "@libsql/client" 5 | ] 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /example/convex/README.md: -------------------------------------------------------------------------------- 1 | # Welcome to your Convex functions directory! 2 | 3 | Write your Convex functions here. 4 | See https://docs.convex.dev/functions for more. 5 | 6 | A query function that takes two arguments looks like: 7 | 8 | ```ts 9 | // functions.js 10 | import { query } from "./_generated/server"; 11 | import { v } from "convex/values"; 12 | 13 | export const myQueryFunction = query({ 14 | // Validators for arguments. 15 | args: { 16 | first: v.number(), 17 | second: v.string(), 18 | }, 19 | 20 | // Function implementation. 21 | handler: async (ctx, args) => { 22 | // Read the database as many times as you need here. 23 | // See https://docs.convex.dev/database/reading-data. 24 | const documents = await ctx.db.query("tablename").collect(); 25 | 26 | // Arguments passed from the client are properties of the args object. 27 | console.log(args.first, args.second); 28 | 29 | // Write arbitrary JavaScript here: filter, aggregate, build derived data, 30 | // remove non-public properties, or create new objects. 31 | return documents; 32 | }, 33 | }); 34 | ``` 35 | 36 | Using this query function in a React component looks like: 37 | 38 | ```ts 39 | const data = useQuery(api.functions.myQueryFunction, { 40 | first: 10, 41 | second: "hello", 42 | }); 43 | ``` 44 | 45 | A mutation function looks like: 46 | 47 | ```ts 48 | // functions.js 49 | import { mutation } from "./_generated/server"; 50 | import { v } from "convex/values"; 51 | 52 | export const myMutationFunction = mutation({ 53 | // Validators for arguments. 54 | args: { 55 | first: v.string(), 56 | second: v.string(), 57 | }, 58 | 59 | // Function implementation. 60 | handler: async (ctx, args) => { 61 | // Insert or modify documents in the database here. 62 | // Mutations can also read from the database like queries. 63 | // See https://docs.convex.dev/database/writing-data. 64 | const message = { body: args.first, author: args.second }; 65 | const id = await ctx.db.insert("messages", message); 66 | 67 | // Optionally, return a value from your mutation. 68 | return await ctx.db.get(id); 69 | }, 70 | }); 71 | ``` 72 | 73 | Using this mutation function in a React component looks like: 74 | 75 | ```ts 76 | const mutation = useMutation(api.functions.myMutationFunction); 77 | function handleButtonPress() { 78 | // fire and forget, the most common way to use mutations 79 | mutation({ first: "Hello!", second: "me" }); 80 | // OR 81 | // use the result once the mutation has completed 82 | mutation({ first: "Hello!", second: "me" }).then((result) => 83 | console.log(result), 84 | ); 85 | } 86 | ``` 87 | 88 | Use the Convex CLI to push your functions to a deployment. See everything 89 | the Convex CLI can do by running `npx convex -h` in your project root 90 | directory. To learn more, launch the docs with `npx convex docs`. 91 | -------------------------------------------------------------------------------- /example/convex/_generated/api.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { anyApi, componentsGeneric } from "convex/server"; 12 | 13 | /** 14 | * A utility for referencing Convex functions in your app's API. 15 | * 16 | * Usage: 17 | * ```js 18 | * const myFunctionReference = api.myModule.myFunction; 19 | * ``` 20 | */ 21 | export const api = anyApi; 22 | export const internal = anyApi; 23 | export const components = componentsGeneric(); 24 | -------------------------------------------------------------------------------- /example/convex/_generated/dataModel.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated data model types. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type { 12 | DataModelFromSchemaDefinition, 13 | DocumentByName, 14 | TableNamesInDataModel, 15 | SystemTableNames, 16 | } from "convex/server"; 17 | import type { GenericId } from "convex/values"; 18 | import schema from "../schema.js"; 19 | 20 | /** 21 | * The names of all of your Convex tables. 22 | */ 23 | export type TableNames = TableNamesInDataModel; 24 | 25 | /** 26 | * The type of a document stored in Convex. 27 | * 28 | * @typeParam TableName - A string literal type of the table name (like "users"). 29 | */ 30 | export type Doc = DocumentByName< 31 | DataModel, 32 | TableName 33 | >; 34 | 35 | /** 36 | * An identifier for a document in Convex. 37 | * 38 | * Convex documents are uniquely identified by their `Id`, which is accessible 39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids). 40 | * 41 | * Documents can be loaded using `db.get(id)` in query and mutation functions. 42 | * 43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other 44 | * strings when type checking. 45 | * 46 | * @typeParam TableName - A string literal type of the table name (like "users"). 47 | */ 48 | export type Id = 49 | GenericId; 50 | 51 | /** 52 | * A type describing your Convex data model. 53 | * 54 | * This type includes information about what tables you have, the type of 55 | * documents stored in those tables, and the indexes defined on them. 56 | * 57 | * This type is used to parameterize methods like `queryGeneric` and 58 | * `mutationGeneric` to make them type-safe. 59 | */ 60 | export type DataModel = DataModelFromSchemaDefinition; 61 | -------------------------------------------------------------------------------- /example/convex/_generated/server.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | ActionBuilder, 13 | AnyComponents, 14 | HttpActionBuilder, 15 | MutationBuilder, 16 | QueryBuilder, 17 | GenericActionCtx, 18 | GenericMutationCtx, 19 | GenericQueryCtx, 20 | GenericDatabaseReader, 21 | GenericDatabaseWriter, 22 | FunctionReference, 23 | } from "convex/server"; 24 | import type { DataModel } from "./dataModel.js"; 25 | 26 | type GenericCtx = 27 | | GenericActionCtx 28 | | GenericMutationCtx 29 | | GenericQueryCtx; 30 | 31 | /** 32 | * Define a query in this Convex app's public API. 33 | * 34 | * This function will be allowed to read your Convex database and will be accessible from the client. 35 | * 36 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 37 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 38 | */ 39 | export declare const query: QueryBuilder; 40 | 41 | /** 42 | * Define a query that is only accessible from other Convex functions (but not from the client). 43 | * 44 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 45 | * 46 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 47 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 48 | */ 49 | export declare const internalQuery: QueryBuilder; 50 | 51 | /** 52 | * Define a mutation in this Convex app's public API. 53 | * 54 | * This function will be allowed to modify your Convex database and will be accessible from the client. 55 | * 56 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 57 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 58 | */ 59 | export declare const mutation: MutationBuilder; 60 | 61 | /** 62 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 63 | * 64 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 65 | * 66 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 67 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 68 | */ 69 | export declare const internalMutation: MutationBuilder; 70 | 71 | /** 72 | * Define an action in this Convex app's public API. 73 | * 74 | * An action is a function which can execute any JavaScript code, including non-deterministic 75 | * code and code with side-effects, like calling third-party services. 76 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 77 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 78 | * 79 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 80 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 81 | */ 82 | export declare const action: ActionBuilder; 83 | 84 | /** 85 | * Define an action that is only accessible from other Convex functions (but not from the client). 86 | * 87 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 88 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 89 | */ 90 | export declare const internalAction: ActionBuilder; 91 | 92 | /** 93 | * Define an HTTP action. 94 | * 95 | * This function will be used to respond to HTTP requests received by a Convex 96 | * deployment if the requests matches the path and method where this action 97 | * is routed. Be sure to route your action in `convex/http.js`. 98 | * 99 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 100 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. 101 | */ 102 | export declare const httpAction: HttpActionBuilder; 103 | 104 | /** 105 | * A set of services for use within Convex query functions. 106 | * 107 | * The query context is passed as the first argument to any Convex query 108 | * function run on the server. 109 | * 110 | * This differs from the {@link MutationCtx} because all of the services are 111 | * read-only. 112 | */ 113 | export type QueryCtx = GenericQueryCtx; 114 | 115 | /** 116 | * A set of services for use within Convex mutation functions. 117 | * 118 | * The mutation context is passed as the first argument to any Convex mutation 119 | * function run on the server. 120 | */ 121 | export type MutationCtx = GenericMutationCtx; 122 | 123 | /** 124 | * A set of services for use within Convex action functions. 125 | * 126 | * The action context is passed as the first argument to any Convex action 127 | * function run on the server. 128 | */ 129 | export type ActionCtx = GenericActionCtx; 130 | 131 | /** 132 | * An interface to read from the database within Convex query functions. 133 | * 134 | * The two entry points are {@link DatabaseReader.get}, which fetches a single 135 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts 136 | * building a query. 137 | */ 138 | export type DatabaseReader = GenericDatabaseReader; 139 | 140 | /** 141 | * An interface to read from and write to the database within Convex mutation 142 | * functions. 143 | * 144 | * Convex guarantees that all writes within a single mutation are 145 | * executed atomically, so you never have to worry about partial writes leaving 146 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control) 147 | * for the guarantees Convex provides your functions. 148 | */ 149 | export type DatabaseWriter = GenericDatabaseWriter; 150 | -------------------------------------------------------------------------------- /example/convex/_generated/server.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | actionGeneric, 13 | httpActionGeneric, 14 | queryGeneric, 15 | mutationGeneric, 16 | internalActionGeneric, 17 | internalMutationGeneric, 18 | internalQueryGeneric, 19 | componentsGeneric, 20 | } from "convex/server"; 21 | 22 | /** 23 | * Define a query in this Convex app's public API. 24 | * 25 | * This function will be allowed to read your Convex database and will be accessible from the client. 26 | * 27 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 28 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 29 | */ 30 | export const query = queryGeneric; 31 | 32 | /** 33 | * Define a query that is only accessible from other Convex functions (but not from the client). 34 | * 35 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 36 | * 37 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 38 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 39 | */ 40 | export const internalQuery = internalQueryGeneric; 41 | 42 | /** 43 | * Define a mutation in this Convex app's public API. 44 | * 45 | * This function will be allowed to modify your Convex database and will be accessible from the client. 46 | * 47 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 48 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 49 | */ 50 | export const mutation = mutationGeneric; 51 | 52 | /** 53 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 54 | * 55 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 56 | * 57 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 58 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 59 | */ 60 | export const internalMutation = internalMutationGeneric; 61 | 62 | /** 63 | * Define an action in this Convex app's public API. 64 | * 65 | * An action is a function which can execute any JavaScript code, including non-deterministic 66 | * code and code with side-effects, like calling third-party services. 67 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 68 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 69 | * 70 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 71 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 72 | */ 73 | export const action = actionGeneric; 74 | 75 | /** 76 | * Define an action that is only accessible from other Convex functions (but not from the client). 77 | * 78 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 79 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 80 | */ 81 | export const internalAction = internalActionGeneric; 82 | 83 | /** 84 | * Define a Convex HTTP action. 85 | * 86 | * @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object 87 | * as its second. 88 | * @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`. 89 | */ 90 | export const httpAction = httpActionGeneric; 91 | -------------------------------------------------------------------------------- /example/convex/_libsql_workaround.ts: -------------------------------------------------------------------------------- 1 | "use node"; 2 | export * as _ from "@libsql/client"; 3 | -------------------------------------------------------------------------------- /example/convex/convex.config.ts: -------------------------------------------------------------------------------- 1 | import { defineApp } from "convex/server"; 2 | import mastra from "@convex-dev/mastra/convex.config"; 3 | 4 | const app = defineApp(); 5 | app.use(mastra); 6 | 7 | export default app; 8 | -------------------------------------------------------------------------------- /example/convex/example.ts: -------------------------------------------------------------------------------- 1 | "use node"; 2 | import { action, internalAction } from "./_generated/server"; 3 | import { components } from "./_generated/api"; 4 | import { Agent, createStep, Mastra, Workflow } from "@mastra/core"; 5 | // import { Memory } from "@mastra/memory"; 6 | import { openai } from "@ai-sdk/openai"; 7 | import { z } from "zod"; 8 | import { weatherAgent, outfitAgent } from "../src/mastra/agents"; 9 | import { weatherToOutfitWorkflow } from "../src/mastra/workflows"; 10 | import { ConvexStorage, ConvexVector } from "@convex-dev/mastra"; 11 | 12 | // TODO: is this still necessary? 13 | import crypto from "crypto"; 14 | import { v } from "convex/values"; 15 | // ts-ignore 16 | globalThis.crypto = crypto as any; 17 | 18 | const storage = new ConvexStorage(components.mastra); 19 | const vector = new ConvexVector(components.mastra); 20 | 21 | const agent = new Agent({ 22 | // memory: new Memory({ storage, vector }), 23 | name: "summarizer", 24 | instructions: "You are a helpful assistant that summarizes text.", 25 | model: openai("gpt-4o"), 26 | }); 27 | 28 | const summarize = createStep({ 29 | id: "summarize", 30 | inputSchema: z.object({ 31 | text: z.string(), 32 | guidance: z.string().optional(), 33 | }), 34 | async execute({ context, suspend, resourceId, threadId }) { 35 | // const console = createLogger(context.logLevel); 36 | console.debug({ threadId, resourceId, context }); 37 | const guidance = context.inputData.text; 38 | const result = await agent.generate( 39 | context.inputData.text + guidance 40 | ? `\n\nHere is some guidance: ${guidance}` 41 | : "" 42 | ); 43 | if (!guidance) { 44 | await suspend({ 45 | ask: "Does this look good?", 46 | result: result.response.messages, 47 | }); 48 | } 49 | return result.text; 50 | }, 51 | outputSchema: z.string(), 52 | }); 53 | const A = createStep({ 54 | id: "A", 55 | execute: async ({ context, suspend }) => { 56 | console.info("A"); 57 | return "A"; 58 | }, 59 | }); 60 | const B = createStep({ 61 | id: "B", 62 | execute: async ({ context }) => { 63 | console.info("B"); 64 | return "B"; 65 | }, 66 | }); 67 | const C = createStep({ 68 | id: "C", 69 | execute: async ({ context }) => { 70 | console.info("C"); 71 | return "C"; 72 | }, 73 | }); 74 | const D = createStep({ 75 | id: "D", 76 | execute: async ({ context }) => { 77 | console.info("D"); 78 | return "D"; 79 | }, 80 | }); 81 | const E = createStep({ 82 | id: "E", 83 | execute: async ({ context }) => { 84 | console.info("E"); 85 | return "E"; 86 | }, 87 | }); 88 | const Counter = createStep({ 89 | id: "Counter", 90 | execute: async ({ context }) => { 91 | const previous = context.getStepResult("Counter"); 92 | return { count: (previous?.count ?? 0) + 1 }; 93 | }, 94 | outputSchema: z.object({ 95 | count: z.number(), 96 | }), 97 | }); 98 | const SuspendsUntilHumanInput = createStep({ 99 | id: "SuspendsUntilHumanInput", 100 | inputSchema: z.object({ 101 | human: z.string().optional(), 102 | }), 103 | execute: async ({ context, suspend }) => { 104 | console.info("SuspendsUntilHumanInput"); 105 | if (context.inputData.human) { 106 | console.info("Human message", context.inputData.human); 107 | } else { 108 | console.info("Suspending until human input"); 109 | await suspend({ ask: "Can you help?" }); 110 | } 111 | return "SuspendsUntilHumanInput"; 112 | }, 113 | }); 114 | const RetryOnce = createStep({ 115 | id: "RetryOnce", 116 | execute: async ({ context }) => { 117 | const previous = context.getStepResult("RetryOnce"); 118 | if (previous) { 119 | return { status: "success" }; 120 | } 121 | return { status: "retry" }; 122 | }, 123 | }); 124 | const FailsOnSecondRun = createStep({ 125 | id: "FailsOnSecondRun", 126 | execute: async ({ context }) => { 127 | const previous = context.getStepResult("FailsOnSecondRun"); 128 | console.info("FailsOnSecondRun", previous); 129 | if (previous) throw new Error("FailsOnSecondRun already ran"); 130 | return (previous ?? 0) + 1; 131 | }, 132 | }); 133 | const Fail = createStep({ 134 | id: "Fail", 135 | execute: async ({ context }) => { 136 | console.info("Fail"); 137 | throw new Error("Fail"); 138 | }, 139 | }); 140 | const workflow = new Workflow({ 141 | name: "workflow", 142 | triggerSchema: z.object({ 143 | text: z.string(), 144 | nested: z.object({ 145 | text: z.string(), 146 | }), 147 | }), 148 | }) 149 | .step(A) 150 | .then(Counter, { 151 | when: { 152 | ref: { 153 | step: A, 154 | path: ".", 155 | }, 156 | query: { 157 | $eq: "A", 158 | }, 159 | }, 160 | }) 161 | // .if(async ({ context }) => context.getStepResult("A") === "A") 162 | // .then(B) 163 | // .step(Fail) 164 | // .after([A, Fail]) 165 | // .step(C) 166 | // .after(A) 167 | .step(B) 168 | .then(C, { 169 | when: { 170 | ref: { 171 | step: { id: "B" }, 172 | path: "status", 173 | }, 174 | query: { 175 | $eq: "success", 176 | }, 177 | }, 178 | }) 179 | .after([A, C]) 180 | .step(D, { 181 | when: { 182 | "B.status": "success", 183 | }, 184 | }) 185 | .then(Counter) 186 | .after(B) 187 | // skip 188 | .step(Fail, { 189 | when: { "RetryOnce.status": "retry" }, 190 | }) 191 | .step(RetryOnce) 192 | // .until(async ({ context }) => context.getStepResult("Counter").count >= 5, Counter) 193 | .step(E, { 194 | when: { 195 | ref: { 196 | step: { id: "Counter" }, 197 | path: "count", 198 | }, 199 | query: { $lt: 5 }, 200 | }, 201 | }) 202 | .step(RetryOnce, { 203 | when: { 204 | and: [ 205 | { 206 | ref: { 207 | step: { id: "Counter" }, 208 | path: "status", 209 | }, 210 | query: { 211 | $eq: "success", 212 | }, 213 | }, 214 | { 215 | ref: { 216 | step: { id: "Counter" }, 217 | path: "count", 218 | }, 219 | query: { 220 | $eq: 5, 221 | }, 222 | }, 223 | ], 224 | }, 225 | }) 226 | // .step(D); 227 | // .then(D); 228 | // .after(B) 229 | // .step(A, { 230 | // // when: { "B.status": "retry" }, 231 | // when: async ({ context }) => context.getStepResult("B") === "foo", 232 | // }); 233 | // .after([A, B]) 234 | // .step(A) 235 | // .then(B) 236 | // .while(async ({ context }) => context.inputData.text === "B", A) 237 | // .then(C) 238 | // .until(async () => true, D) 239 | // .after(B) 240 | // .step(D) 241 | // .then(E); 242 | // .then(C); 243 | // when: ({ context }) => context.inputData.text === "B", 244 | // }).step(C, { 245 | // when: ({ context }) => context.inputData.text === "C", 246 | // }).step(D, { 247 | // when: ({ context }) => context.inputData.text === "D", 248 | // }) 249 | 250 | .step(summarize, { 251 | variables: { 252 | text: { step: "trigger", path: "nested.text" }, 253 | }, 254 | }) 255 | .commit(); 256 | 257 | // Can run this not in node: 258 | const mastra = new Mastra({ 259 | agents: { 260 | weatherAgent, 261 | outfitAgent, 262 | }, 263 | storage, 264 | workflows: { 265 | workflow, 266 | weatherToOutfitWorkflow, 267 | }, 268 | }); 269 | type M = ReturnType>; 270 | 271 | export const startWorkflow = internalAction({ 272 | args: { 273 | runId: v.optional(v.string()), 274 | name: v.optional( 275 | v.union(v.literal("workflow"), v.literal("weatherToOutfitWorkflow")) 276 | ), 277 | initialData: v.optional(v.any()), 278 | }, 279 | handler: async (ctx, args) => { 280 | storage.ctx = ctx; 281 | const w = mastra.getWorkflow(args.name ?? "workflow"); 282 | const { start } = w.createRun({ runId: args.runId }); 283 | const result = await start(args.initialData); 284 | // Save the result somewhere 285 | return result.results; 286 | }, 287 | }); 288 | 289 | export const t = action({ 290 | async handler(ctx) { 291 | // console.debug({ 292 | // stepGraph: workflow.stepGraph, 293 | // stepSubscriberGraph: workflow.stepSubscriberGraph, 294 | // serializedStepGraph: JSON.stringify( 295 | // workflow.serializedStepGraph, 296 | // null, 297 | // 2 298 | // ), 299 | // serializedStepSubscriberGraph: JSON.stringify( 300 | // workflow.serializedStepSubscriberGraph, 301 | // null, 302 | // 2 303 | // ), 304 | // }); 305 | // return; 306 | // const { runId, start, resume } = workflow.createRun(); 307 | const w = mastra.getWorkflow("workflow"); 308 | const { runId, start, resume } = w.createRun(); 309 | return runId; 310 | // const afterResume = await resume({ 311 | // stepId: "A", 312 | // context: { 313 | // human: "Here is a human message", 314 | // }, 315 | // }); 316 | // console.debug("After resume", afterResume); 317 | // return JSON.stringify(result, null, 2); 318 | }, 319 | }); 320 | -------------------------------------------------------------------------------- /example/convex/schema.ts: -------------------------------------------------------------------------------- 1 | import { defineSchema } from "convex/server"; 2 | 3 | export default defineSchema( 4 | { 5 | // Any tables used by the example app go here. 6 | }, 7 | ); 8 | -------------------------------------------------------------------------------- /example/convex/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | /* This TypeScript project config describes the environment that 3 | * Convex functions run in and is used to typecheck them. 4 | * You can modify it, but some settings required to use Convex. 5 | */ 6 | "compilerOptions": { 7 | /* These settings are not required by Convex and can be modified. */ 8 | "allowJs": true, 9 | "strict": true, 10 | "skipLibCheck": true, 11 | 12 | /* These compiler options are required by Convex */ 13 | "target": "ESNext", 14 | "lib": ["ES2021", "dom", "ESNext.Array"], 15 | "forceConsistentCasingInFileNames": true, 16 | "allowSyntheticDefaultImports": true, 17 | "module": "ESNext", 18 | "moduleResolution": "Bundler", 19 | "isolatedModules": true, 20 | "noEmit": true, 21 | 22 | /* This should only be used in this example. Real apps should not attempt 23 | * to compile TypeScript because differences between tsconfig.json files can 24 | * cause the code to be compiled differently. 25 | */ 26 | "customConditions": ["@convex-dev/component-source"] 27 | }, 28 | "include": ["./**/*"], 29 | "exclude": ["./_generated"] 30 | } 31 | -------------------------------------------------------------------------------- /example/convex/v8Runtime.ts: -------------------------------------------------------------------------------- 1 | import { query } from "./_generated/server"; 2 | import { components } from "./_generated/api"; 3 | import { v } from "convex/values"; 4 | import { 5 | mapSerializedToMastra, 6 | TABLE_WORKFLOW_SNAPSHOT, 7 | } from "@convex-dev/mastra/mapping"; 8 | 9 | export const getStatus = query({ 10 | args: { runId: v.string() }, 11 | handler: async (ctx, args) => { 12 | const doc = await ctx.runQuery( 13 | components.mastra.storage.storage.loadSnapshot, 14 | { 15 | workflowName: "weatherToOutfitWorkflow", 16 | runId: args.runId, 17 | } 18 | ); 19 | if (!doc) { 20 | return null; 21 | } 22 | const snapshot = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, doc); 23 | const { childStates, activePaths, suspendedSteps } = snapshot.snapshot; 24 | return { childStates, activePaths, suspendedSteps }; 25 | }, 26 | }); 27 | -------------------------------------------------------------------------------- /example/eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from "@eslint/js"; 2 | import globals from "globals"; 3 | import reactHooks from "eslint-plugin-react-hooks"; 4 | import reactRefresh from "eslint-plugin-react-refresh"; 5 | import tseslint from "typescript-eslint"; 6 | 7 | export default tseslint.config( 8 | { ignores: ["dist"] }, 9 | { 10 | extends: [js.configs.recommended, ...tseslint.configs.recommended], 11 | files: ["**/*.{ts,tsx}"], 12 | ignores: ["convex"], 13 | languageOptions: { 14 | ecmaVersion: 2020, 15 | globals: globals.browser, 16 | }, 17 | plugins: { 18 | "react-hooks": reactHooks, 19 | "react-refresh": reactRefresh, 20 | }, 21 | rules: { 22 | ...reactHooks.configs.recommended.rules, 23 | // Allow explicit `any`s 24 | "@typescript-eslint/no-explicit-any": "off", 25 | "react-refresh/only-export-components": [ 26 | "warn", 27 | { allowConstantExport: true }, 28 | ], 29 | 30 | "no-unused-vars": "off", 31 | "@typescript-eslint/no-unused-vars": [ 32 | "warn", 33 | { 34 | argsIgnorePattern: "^_", 35 | varsIgnorePattern: "^_", 36 | }, 37 | ], 38 | }, 39 | } 40 | ); 41 | -------------------------------------------------------------------------------- /example/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Mastra Component Example 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "uses-component", 3 | "private": true, 4 | "type": "module", 5 | "version": "0.0.0", 6 | "scripts": { 7 | "dev": "convex dev --live-component-sources --typecheck-components", 8 | "dev:yolo": "convex dev --local --live-component-sources --typecheck disable", 9 | "dev:mastra": "mastra dev", 10 | "dev:frontend": "vite", 11 | "logs": "convex logs", 12 | "lint": "tsc -p convex && eslint convex" 13 | }, 14 | "dependencies": { 15 | "@ai-sdk/openai": "^1.3.1", 16 | "@convex-dev/mastra": "file:..", 17 | "@mastra/core": "file:../node_modules/@mastra/core", 18 | "@mastra/memory": "^0.2.6", 19 | "@types/node": "^22.13.13", 20 | "convex": "file:../node_modules/convex", 21 | "react": "^18.3.1", 22 | "react-dom": "^18.3.1", 23 | "zod": "^3.24.2" 24 | }, 25 | "devDependencies": { 26 | "@eslint/eslintrc": "^3.1.0", 27 | "@eslint/js": "^9.9.0", 28 | "@libsql/client": "^0.15.0", 29 | "@types/react": "^18.3.3", 30 | "@types/react-dom": "^18.3.0", 31 | "@vitejs/plugin-react": "^4.3.1", 32 | "eslint": "^9.9.0", 33 | "eslint-plugin-react-hooks": "^5.1.0-rc.0", 34 | "eslint-plugin-react-refresh": "^0.4.9", 35 | "globals": "^15.9.0", 36 | "mastra": "^0.4.3", 37 | "typescript": "^5.5.0", 38 | "typescript-eslint": "^8.0.1", 39 | "vite": "^6.2.3" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /example/src/App.css: -------------------------------------------------------------------------------- 1 | #root { 2 | max-width: 1280px; 3 | margin: 0 auto; 4 | padding: 2rem; 5 | text-align: center; 6 | } 7 | 8 | .logo { 9 | height: 6em; 10 | padding: 1.5em; 11 | will-change: filter; 12 | transition: filter 300ms; 13 | } 14 | .logo:hover { 15 | filter: drop-shadow(0 0 2em #646cffaa); 16 | } 17 | .logo.react:hover { 18 | filter: drop-shadow(0 0 2em #61dafbaa); 19 | } 20 | 21 | @keyframes logo-spin { 22 | from { 23 | transform: rotate(0deg); 24 | } 25 | to { 26 | transform: rotate(360deg); 27 | } 28 | } 29 | 30 | @media (prefers-reduced-motion: no-preference) { 31 | a:nth-of-type(2) .logo { 32 | animation: logo-spin infinite 20s linear; 33 | } 34 | } 35 | 36 | .card { 37 | padding: 2em; 38 | } 39 | 40 | .read-the-docs { 41 | color: #888; 42 | } 43 | -------------------------------------------------------------------------------- /example/src/App.tsx: -------------------------------------------------------------------------------- 1 | import "./App.css"; 2 | import { useMutation, useQuery } from "convex/react"; 3 | import { api } from "../convex/_generated/api"; 4 | 5 | function App() { 6 | const count = useQuery(api.app.getCount); 7 | const addOne = useMutation(api.app.addOne); 8 | 9 | return ( 10 | <> 11 |

Convex Mastra Component Example

12 |
13 | 14 |

15 | See example/convex/example.ts for all the ways to use 16 | this component 17 |

18 |
19 | 20 | ); 21 | } 22 | 23 | export default App; 24 | -------------------------------------------------------------------------------- /example/src/index.css: -------------------------------------------------------------------------------- 1 | :root { 2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; 3 | line-height: 1.5; 4 | font-weight: 400; 5 | 6 | color-scheme: light dark; 7 | color: rgba(255, 255, 255, 0.87); 8 | background-color: #242424; 9 | 10 | font-synthesis: none; 11 | text-rendering: optimizeLegibility; 12 | -webkit-font-smoothing: antialiased; 13 | -moz-osx-font-smoothing: grayscale; 14 | } 15 | 16 | a { 17 | font-weight: 500; 18 | color: #646cff; 19 | text-decoration: inherit; 20 | } 21 | a:hover { 22 | color: #535bf2; 23 | } 24 | 25 | body { 26 | margin: 0; 27 | display: flex; 28 | place-items: center; 29 | min-width: 320px; 30 | min-height: 100vh; 31 | } 32 | 33 | h1 { 34 | font-size: 3.2em; 35 | line-height: 1.1; 36 | } 37 | 38 | button { 39 | border-radius: 8px; 40 | border: 1px solid transparent; 41 | padding: 0.6em 1.2em; 42 | font-size: 1em; 43 | font-weight: 500; 44 | font-family: inherit; 45 | background-color: #1a1a1a; 46 | cursor: pointer; 47 | transition: border-color 0.25s; 48 | } 49 | button:hover { 50 | border-color: #646cff; 51 | } 52 | button:focus, 53 | button:focus-visible { 54 | outline: 4px auto -webkit-focus-ring-color; 55 | } 56 | 57 | @media (prefers-color-scheme: light) { 58 | :root { 59 | color: #213547; 60 | background-color: #ffffff; 61 | } 62 | a:hover { 63 | color: #747bff; 64 | } 65 | button { 66 | background-color: #f9f9f9; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /example/src/main.tsx: -------------------------------------------------------------------------------- 1 | import { StrictMode } from "react"; 2 | import { createRoot } from "react-dom/client"; 3 | import { ConvexProvider, ConvexReactClient } from "convex/react"; 4 | import App from "./App.tsx"; 5 | import "./index.css"; 6 | 7 | const address = import.meta.env.VITE_CONVEX_URL; 8 | 9 | const convex = new ConvexReactClient(address); 10 | 11 | createRoot(document.getElementById("root")!).render( 12 | 13 | 14 | 15 | 16 | 17 | ); 18 | -------------------------------------------------------------------------------- /example/src/mastra/agents/index.ts: -------------------------------------------------------------------------------- 1 | import { openai } from "@ai-sdk/openai"; 2 | import { Agent } from "@mastra/core/agent"; 3 | import { weatherTool } from "../tools"; 4 | 5 | export const weatherAgent = new Agent({ 6 | name: "Weather Agent", 7 | instructions: ` 8 | You are a helpful weather assistant that provides accurate weather information. 9 | 10 | Your primary function is to help users get weather details for specific locations. When responding: 11 | - Always ask for a location if none is provided 12 | - If giving a location with multiple parts (e.g. "New York, NY"), use the most relevant part (e.g. "New York") 13 | - Include relevant details like humidity, wind conditions, and precipitation 14 | - Keep responses concise but informative 15 | 16 | Use the weatherTool to fetch current weather data. 17 | `, 18 | model: openai("gpt-4o"), 19 | tools: { weatherTool }, 20 | }); 21 | 22 | export const outfitAgent = new Agent({ 23 | name: "Outfit Agent", 24 | instructions: ` 25 | You are a helpful outfit assistant that provides outfit recommendations 26 | based on the weather and local style. 27 | `, 28 | model: openai("gpt-4o"), 29 | }); 30 | -------------------------------------------------------------------------------- /example/src/mastra/index.ts: -------------------------------------------------------------------------------- 1 | import { Mastra } from "@mastra/core"; 2 | import { createLogger } from "@mastra/core/logger"; 3 | 4 | import { weatherAgent } from "./agents"; 5 | import { weatherToOutfitWorkflow, whenTest } from "./workflows"; 6 | // import { ConvexStorage } from "@convex-dev/mastra/registry"; 7 | 8 | export const mastra = new Mastra({ 9 | agents: { weatherAgent }, 10 | workflows: { weatherToOutfitWorkflow, whenTest }, 11 | logger: createLogger({ 12 | name: "Mastra", 13 | level: "debug", 14 | }), 15 | // storage: new ConvexStorage(), 16 | }); 17 | -------------------------------------------------------------------------------- /example/src/mastra/tools/index.ts: -------------------------------------------------------------------------------- 1 | import { createTool } from "@mastra/core/tools"; 2 | import { z } from "zod"; 3 | 4 | interface GeocodingResponse { 5 | results: { 6 | latitude: number; 7 | longitude: number; 8 | name: string; 9 | }[]; 10 | } 11 | interface WeatherResponse { 12 | current: { 13 | time: string; 14 | temperature_2m: number; 15 | apparent_temperature: number; 16 | relative_humidity_2m: number; 17 | wind_speed_10m: number; 18 | wind_gusts_10m: number; 19 | weather_code: number; 20 | }; 21 | } 22 | 23 | export const weatherTool = createTool({ 24 | id: "get-weather", 25 | description: "Get current weather for a location", 26 | inputSchema: z.object({ 27 | location: z.string().describe("City name"), 28 | }), 29 | outputSchema: z.object({ 30 | temperature: z.number(), 31 | feelsLike: z.number(), 32 | humidity: z.number(), 33 | windSpeed: z.number(), 34 | windGust: z.number(), 35 | conditions: z.string(), 36 | location: z.string(), 37 | }), 38 | execute: async ({ context }) => { 39 | return await getWeather(context.location); 40 | }, 41 | }); 42 | 43 | const getWeather = async (location: string) => { 44 | const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(location)}&count=1`; 45 | const geocodingResponse = await fetch(geocodingUrl); 46 | const geocodingData = (await geocodingResponse.json()) as GeocodingResponse; 47 | 48 | if (!geocodingData.results?.[0]) { 49 | throw new Error(`Location '${location}' not found`); 50 | } 51 | 52 | const { latitude, longitude, name } = geocodingData.results[0]; 53 | 54 | const weatherUrl = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}¤t=temperature_2m,apparent_temperature,relative_humidity_2m,wind_speed_10m,wind_gusts_10m,weather_code`; 55 | 56 | const response = await fetch(weatherUrl); 57 | const data = (await response.json()) as WeatherResponse; 58 | 59 | return { 60 | temperature: data.current.temperature_2m, 61 | feelsLike: data.current.apparent_temperature, 62 | humidity: data.current.relative_humidity_2m, 63 | windSpeed: data.current.wind_speed_10m, 64 | windGust: data.current.wind_gusts_10m, 65 | conditions: getWeatherCondition(data.current.weather_code), 66 | location: name, 67 | }; 68 | }; 69 | 70 | function getWeatherCondition(code: number): string { 71 | const conditions: Record = { 72 | 0: "Clear sky", 73 | 1: "Mainly clear", 74 | 2: "Partly cloudy", 75 | 3: "Overcast", 76 | 45: "Foggy", 77 | 48: "Depositing rime fog", 78 | 51: "Light drizzle", 79 | 53: "Moderate drizzle", 80 | 55: "Dense drizzle", 81 | 56: "Light freezing drizzle", 82 | 57: "Dense freezing drizzle", 83 | 61: "Slight rain", 84 | 63: "Moderate rain", 85 | 65: "Heavy rain", 86 | 66: "Light freezing rain", 87 | 67: "Heavy freezing rain", 88 | 71: "Slight snow fall", 89 | 73: "Moderate snow fall", 90 | 75: "Heavy snow fall", 91 | 77: "Snow grains", 92 | 80: "Slight rain showers", 93 | 81: "Moderate rain showers", 94 | 82: "Violent rain showers", 95 | 85: "Slight snow showers", 96 | 86: "Heavy snow showers", 97 | 95: "Thunderstorm", 98 | 96: "Thunderstorm with slight hail", 99 | 99: "Thunderstorm with heavy hail", 100 | }; 101 | return conditions[code] || "Unknown"; 102 | } 103 | -------------------------------------------------------------------------------- /example/src/mastra/workflows/index.ts: -------------------------------------------------------------------------------- 1 | import { Agent, createStep, Mastra, Workflow } from "@mastra/core"; 2 | import { z } from "zod"; 3 | import { outfitAgent, weatherAgent } from "../agents"; 4 | 5 | export const getWeather = createStep({ 6 | id: "getWeather", 7 | description: "Gets the weather for a location", 8 | inputSchema: z.object({ 9 | location: z.string(), 10 | }), 11 | outputSchema: z.object({ 12 | weather: z.string(), 13 | }), 14 | execute: async ({ context, suspend }) => { 15 | const weather = await weatherAgent.generate( 16 | `What's the weather in ${context.inputData.location}?` 17 | ); 18 | return { weather: weather.text }; 19 | }, 20 | }); 21 | 22 | export const getOutfit = createStep({ 23 | id: "getOutfit", 24 | description: "Gets the outfit for a location", 25 | inputSchema: z.object({ 26 | location: z.string(), 27 | weather: z.string(), 28 | }), 29 | outputSchema: z.object({ 30 | outfit: z.string(), 31 | }), 32 | execute: async ({ context, suspend, resourceId, threadId, runId }) => { 33 | const outfit = await outfitAgent.generate([ 34 | { 35 | role: "user", 36 | content: `What's the outfit for ${context.inputData.weather} in ${context.inputData.location}?`, 37 | }, 38 | ]); 39 | return { outfit: outfit.text }; 40 | }, 41 | }); 42 | 43 | export const refineOutfit = createStep({ 44 | id: "refineOutfit", 45 | description: "Refines the outfit", 46 | inputSchema: z.object({ 47 | outfit: z.string(), 48 | refinement: z.union([z.string(), z.literal(null)]).optional(), 49 | }), 50 | async execute({ context, suspend, resourceId, threadId, runId }) { 51 | const previous = context.getStepResult("refineOutfit"); 52 | if (!previous) { 53 | console.log("suspending", context.inputData.outfit); 54 | await suspend({ 55 | ask: `Do you want to change anything?`, 56 | outfit: context.inputData.outfit, 57 | }); 58 | return { outfit: context.inputData.outfit }; 59 | } 60 | if ( 61 | !context.inputData.refinement || 62 | context.inputData.refinement.toLowerCase().startsWith("no ") 63 | ) { 64 | return { outfit: previous.outfit }; 65 | } 66 | console.log("refining outfit", previous.outfit, context); 67 | // TODO: use memory for full history 68 | const refinement = await outfitAgent.generate([ 69 | { 70 | role: "user", 71 | content: `What's the outfit for ${context.inputData.weather} in ${context.inputData.location}?`, 72 | }, 73 | { 74 | role: "assistant", 75 | content: previous.outfit, 76 | }, 77 | { 78 | role: "user", 79 | content: context.inputData.refinement, 80 | }, 81 | ]); 82 | await suspend({ ask: `How about this?`, outfit: refinement.text }); 83 | return { outfit: refinement.text }; 84 | }, 85 | outputSchema: z.object({ 86 | outfit: z.string(), 87 | }), 88 | }); 89 | 90 | export const weatherToOutfitWorkflow = new Workflow({ 91 | name: "weatherToOutfit", 92 | triggerSchema: z.object({ 93 | location: z.string(), 94 | }), 95 | }) 96 | .step(getWeather, { 97 | variables: { 98 | location: { 99 | step: "trigger", 100 | path: "location", 101 | }, 102 | }, 103 | }) 104 | .then(getOutfit, { 105 | variables: { 106 | location: { 107 | step: "trigger", 108 | path: "location", 109 | }, 110 | weather: { 111 | step: getWeather as any, 112 | path: "weather", 113 | }, 114 | }, 115 | }) 116 | .then(refineOutfit, { 117 | variables: { 118 | outfit: { 119 | step: getOutfit as any, 120 | path: "outfit", 121 | }, 122 | refinement: { 123 | step: refineOutfit as any, 124 | path: "refinement", 125 | }, 126 | }, 127 | }); 128 | const A = createStep({ 129 | id: "A", 130 | execute: async ({ context, suspend }) => { 131 | console.info("A"); 132 | return "A"; 133 | }, 134 | }); 135 | const B = createStep({ 136 | id: "B", 137 | execute: async ({ context }) => { 138 | console.info("B"); 139 | return "B"; 140 | }, 141 | }); 142 | const C = createStep({ 143 | id: "C", 144 | execute: async ({ context }) => { 145 | console.info("C"); 146 | return "C"; 147 | }, 148 | }); 149 | const D = createStep({ 150 | id: "D", 151 | execute: async ({ context }) => { 152 | console.info("D"); 153 | return "D"; 154 | }, 155 | }); 156 | const E = createStep({ 157 | id: "E", 158 | execute: async ({ context }) => { 159 | console.info("E"); 160 | return "E"; 161 | }, 162 | }); 163 | const Counter = createStep({ 164 | id: "Counter", 165 | execute: async ({ context }) => { 166 | const previous = context.getStepResult("Counter"); 167 | return { count: (previous?.count ?? 0) + 1 }; 168 | }, 169 | outputSchema: z.object({ 170 | count: z.number(), 171 | }), 172 | }); 173 | const SuspendsUntilHumanInput = createStep({ 174 | id: "SuspendsUntilHumanInput", 175 | inputSchema: z.object({ 176 | human: z.string().optional(), 177 | }), 178 | execute: async ({ context, suspend }) => { 179 | console.info("SuspendsUntilHumanInput"); 180 | if (context.inputData.human) { 181 | console.info("Human message", context.inputData.human); 182 | } else { 183 | console.info("Suspending until human input"); 184 | await suspend({ ask: "Can you help?" }); 185 | } 186 | return "SuspendsUntilHumanInput"; 187 | }, 188 | }); 189 | const RetryOnce = createStep({ 190 | id: "RetryOnce", 191 | execute: async ({ context }) => { 192 | const previous = context.getStepResult("RetryOnce"); 193 | if (previous) { 194 | return { status: "success" }; 195 | } 196 | return { status: "retry" }; 197 | }, 198 | }); 199 | const FailsOnSecondRun = createStep({ 200 | id: "FailsOnSecondRun", 201 | execute: async ({ context }) => { 202 | const previous = context.getStepResult("FailsOnSecondRun"); 203 | console.info("FailsOnSecondRun", previous); 204 | if (previous) throw new Error("FailsOnSecondRun already ran"); 205 | return (previous ?? 0) + 1; 206 | }, 207 | }); 208 | const Fail = createStep({ 209 | id: "Fail", 210 | execute: async ({ context }) => { 211 | console.info("Fail"); 212 | throw new Error("Fail"); 213 | }, 214 | }); 215 | 216 | export const whenTest = new Workflow({ 217 | name: "whenTest", 218 | triggerSchema: z.object({ 219 | text: z.string(), 220 | nested: z.object({ 221 | text: z.string(), 222 | }), 223 | }), 224 | }) 225 | .step(A) 226 | .then(Counter) 227 | // .if(async ({ context }) => context.getStepResult("A") === "A") 228 | // .then(B) 229 | // .step(Fail) 230 | // .after([A, Fail]) 231 | // .step(C) 232 | .after(A) 233 | .step(B, { 234 | when: { 235 | "A.status": "success", 236 | // ref: { 237 | // step: A, 238 | // path: ".", 239 | // }, 240 | // query: { 241 | // $eq: "A", 242 | // }, 243 | }, 244 | }) 245 | // .then(C, { 246 | // when: { 247 | // ref: { 248 | // step: { id: "B" }, 249 | // path: "status", 250 | // }, 251 | // query: { 252 | // $eq: "success", 253 | // }, 254 | // }, 255 | // }) 256 | // .after([A, C]) 257 | // .step(D, { 258 | // when: { 259 | // "B.status": "success", 260 | // }, 261 | // }) 262 | // .then(Counter) 263 | // .after(B) 264 | // // skip 265 | // .step(Fail, { 266 | // when: { "RetryOnce.status": "retry" }, 267 | // }) 268 | // .until(async ({ context }) => context.getStepResult("Counter") === 5, Counter) 269 | // .step(E, { 270 | // when: { 271 | // ref: { 272 | // step: { id: "Counter" }, 273 | // path: "count", 274 | // }, 275 | // query: { $lt: 5 }, 276 | // }, 277 | // }) 278 | // .step(RetryOnce, { 279 | // when: { 280 | // and: [ 281 | // { 282 | // ref: { 283 | // step: { id: "Counter" }, 284 | // path: "status", 285 | // }, 286 | // query: { 287 | // $eq: "success", 288 | // }, 289 | // }, 290 | // { 291 | // ref: { 292 | // step: { id: "Counter" }, 293 | // path: "count", 294 | // }, 295 | // query: { 296 | // $eq: 5, 297 | // }, 298 | // }, 299 | // ], 300 | // }, 301 | // }) 302 | .commit(); 303 | -------------------------------------------------------------------------------- /example/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /example/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ESNext", 4 | "lib": ["DOM", "DOM.Iterable", "ESNext"], 5 | "skipLibCheck": false, 6 | "allowSyntheticDefaultImports": true, 7 | "strict": true, 8 | "forceConsistentCasingInFileNames": true, 9 | "module": "ESNext", 10 | "moduleResolution": "Bundler", 11 | "resolveJsonModule": true, 12 | "isolatedModules": true, 13 | "allowImportingTsExtensions": true, 14 | "noEmit": true, 15 | "jsx": "react-jsx", 16 | 17 | /* This should only be used in this example. Real apps should not attempt 18 | * to compile TypeScript because differences between tsconfig.json files can 19 | * cause the code to be compiled differently. 20 | */ 21 | "customConditions": ["@convex-dev/component-source"] 22 | }, 23 | "include": ["./src", "vite.config.ts"] 24 | } 25 | -------------------------------------------------------------------------------- /example/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vite"; 2 | import react from "@vitejs/plugin-react"; 3 | 4 | // https://vitejs.dev/config/ 5 | export default defineConfig({ 6 | plugins: [react()], 7 | resolve: { 8 | conditions: ["@convex-dev/component-source"], 9 | }, 10 | }); 11 | -------------------------------------------------------------------------------- /node10stubs.mjs: -------------------------------------------------------------------------------- 1 | import fs from "fs/promises"; 2 | import path from "path"; 3 | 4 | async function findPackageJson(directory) { 5 | const packagePath = path.join(directory, "package.json"); 6 | try { 7 | await fs.access(packagePath); 8 | return packagePath; 9 | } catch (error) { 10 | const parentDir = path.dirname(directory); 11 | if (parentDir === directory) { 12 | throw new Error("package.json not found"); 13 | } 14 | return findPackageJson(parentDir); 15 | } 16 | } 17 | 18 | async function processSubPackages(packageJsonPath, exports, cleanup = false) { 19 | const baseDir = path.dirname(packageJsonPath); 20 | 21 | for (const [subDir, _] of Object.entries(exports)) { 22 | // package.json is already right where Node10 resolution would expect it. 23 | if (subDir.endsWith("package.json")) continue; 24 | // No need for Node10 resolution for component.config.ts 25 | if (subDir.endsWith("convex.config.js")) continue; 26 | // . just works with Node10 resolution 27 | if (subDir === ".") continue; 28 | console.log(subDir); 29 | 30 | const newDir = path.join(baseDir, subDir); 31 | const newPackageJsonPath = path.join(newDir, "package.json"); 32 | 33 | if (cleanup) { 34 | try { 35 | await fs.rm(newDir, { recursive: true, force: true }); 36 | } catch (error) { 37 | console.error(`Failed to remove ${newDir}:`, error.message); 38 | } 39 | } else { 40 | const newPackageJson = { 41 | main: `../dist/commonjs/${subDir}/index.js`, 42 | module: `../dist/esm/${subDir}/index.js`, 43 | types: `../dist/commonjs/${subDir}/index.d.ts`, 44 | }; 45 | 46 | await fs.mkdir(newDir, { recursive: true }); 47 | await fs.writeFile( 48 | newPackageJsonPath, 49 | JSON.stringify(newPackageJson, null, 2), 50 | ); 51 | } 52 | } 53 | } 54 | 55 | async function main() { 56 | try { 57 | const isCleanup = process.argv.includes("--cleanup"); 58 | const isAddFiles = process.argv.includes("--addFiles"); 59 | const packageJsonPath = await findPackageJson(process.cwd()); 60 | const packageJson = JSON.parse(await fs.readFile(packageJsonPath, "utf-8")); 61 | 62 | if (!packageJson.exports) { 63 | throw new Error("exports not found in package.json"); 64 | } 65 | 66 | if (isAddFiles) { 67 | return; 68 | } 69 | 70 | await processSubPackages(packageJsonPath, packageJson.exports, isCleanup); 71 | 72 | if (isCleanup) { 73 | console.log( 74 | "Node10 module resolution compatibility stub directories removed.", 75 | ); 76 | } else { 77 | console.log( 78 | "Node10 module resolution compatibility stub directories created", 79 | ); 80 | } 81 | } catch (error) { 82 | console.error("Error:", error.message); 83 | } 84 | } 85 | 86 | main(); 87 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@convex-dev/mastra", 3 | "description": "A mastra component for Convex.", 4 | "repository": "github:get-convex/mastra", 5 | "homepage": "https://github.com/get-convex/mastra#readme", 6 | "bugs": { 7 | "email": "support@convex.dev", 8 | "url": "https://github.com/get-convex/mastra/issues" 9 | }, 10 | "version": "0.0.1-alpha.8", 11 | "license": "Apache-2.0", 12 | "keywords": [ 13 | "convex", 14 | "component" 15 | ], 16 | "type": "module", 17 | "scripts": { 18 | "build": "npm run build:esm && npm run build:cjs", 19 | "build:esm": "tsc --project ./esm.json && echo '{\\n \"type\": \"module\"\\n}' > dist/esm/package.json", 20 | "build:cjs": "tsc --project ./commonjs.json && echo '{\\n \"type\": \"commonjs\"\\n}' > dist/commonjs/package.json", 21 | "dev": "cd example; npm run dev", 22 | "typecheck": "tsc --noEmit", 23 | "prepare": "npm run build", 24 | "prepack": "node node10stubs.mjs", 25 | "postpack": "node node10stubs.mjs --cleanup", 26 | "test": "vitest run", 27 | "test:debug": "vitest --inspect-brk --no-file-parallelism", 28 | "test:coverage": "vitest run --coverage --coverage.reporter=text" 29 | }, 30 | "files": [ 31 | "dist", 32 | "src", 33 | "react" 34 | ], 35 | "exports": { 36 | "./package.json": "./package.json", 37 | ".": { 38 | "import": { 39 | "@convex-dev/component-source": "./src/client/index.ts", 40 | "types": "./dist/esm/client/index.d.ts", 41 | "default": "./dist/esm/client/index.js" 42 | }, 43 | "require": { 44 | "@convex-dev/component-source": "./src/client/index.ts", 45 | "types": "./dist/commonjs/client/index.d.ts", 46 | "default": "./dist/commonjs/client/index.js" 47 | } 48 | }, 49 | "./client": { 50 | "import": { 51 | "@convex-dev/component-source": "./src/client/client.ts", 52 | "types": "./dist/esm/client/client.d.ts", 53 | "default": "./dist/esm/client/client.js" 54 | }, 55 | "require": { 56 | "@convex-dev/component-source": "./src/client/client.ts", 57 | "types": "./dist/commonjs/client/client.d.ts", 58 | "default": "./dist/commonjs/client/client.js" 59 | } 60 | }, 61 | "./mapping": { 62 | "import": { 63 | "@convex-dev/component-source": "./src/mapping/index.ts", 64 | "types": "./dist/esm/mapping/index.d.ts", 65 | "default": "./dist/esm/mapping/index.js" 66 | }, 67 | "require": { 68 | "@convex-dev/component-source": "./src/mapping/index.ts", 69 | "types": "./dist/commonjs/mapping/index.d.ts", 70 | "default": "./dist/commonjs/mapping/index.js" 71 | } 72 | }, 73 | "./react": { 74 | "import": { 75 | "@convex-dev/component-source": "./src/react/index.ts", 76 | "types": "./dist/esm/react/index.d.ts", 77 | "default": "./dist/esm/react/index.js" 78 | }, 79 | "require": { 80 | "@convex-dev/component-source": "./src/react/index.ts", 81 | "types": "./dist/commonjs/react/index.d.ts", 82 | "default": "./dist/commonjs/react/index.js" 83 | } 84 | }, 85 | "./convex.config": { 86 | "import": { 87 | "@convex-dev/component-source": "./src/component/convex.config.ts", 88 | "types": "./dist/esm/component/convex.config.d.ts", 89 | "default": "./dist/esm/component/convex.config.js" 90 | } 91 | } 92 | }, 93 | "peerDependencies": { 94 | "@mastra/core": "^0.7.0 || ^0.8.0", 95 | "ai": "^4.2.0", 96 | "convex": ">=1.21.0 <1.25.0", 97 | "convex-helpers": "^0.1.74" 98 | }, 99 | "devDependencies": { 100 | "@ai-sdk/openai": "^1.3.6", 101 | "@eslint/js": "^9.9.1", 102 | "@libsql/client": "^0.15.0", 103 | "@types/node": "^18.17.0", 104 | "convex-test": "^0.0.33", 105 | "eslint": "^9.9.1", 106 | "globals": "^15.9.0", 107 | "prettier": "3.2.5", 108 | "typescript": "^5.8.2", 109 | "typescript-eslint": "^8.4.0", 110 | "vitest": "^3.0.9" 111 | }, 112 | "main": "./dist/commonjs/client/index.js", 113 | "types": "./dist/commonjs/client/index.d.ts", 114 | "module": "./dist/esm/client/index.js" 115 | } 116 | -------------------------------------------------------------------------------- /src/ai/types.test.ts: -------------------------------------------------------------------------------- 1 | import { Infer } from "convex/values"; 2 | import { test } from "vitest"; 3 | import { 4 | SerializeUrlsAndUint8Arrays, 5 | vAssistantContent, 6 | vFilePart, 7 | vImagePart, 8 | vReasoningPart, 9 | vRedactedReasoningPart, 10 | vTextPart, 11 | vToolCallPart, 12 | vToolContent, 13 | } from "./types"; 14 | import { vUserContent } from "./types"; 15 | import { 16 | AssistantContent, 17 | FilePart, 18 | ImagePart, 19 | TextPart, 20 | ToolCallPart, 21 | ToolContent, 22 | UserContent, 23 | } from "ai"; 24 | 25 | // type assertion 26 | type OurUserContent = SerializeUrlsAndUint8Arrays; 27 | const _userContent: Infer = [] as OurUserContent; 28 | const _userContent2: OurUserContent = [] as Infer; 29 | 30 | type OurAssistantContent = SerializeUrlsAndUint8Arrays; 31 | const _assistantContent: Infer = 32 | [] as OurAssistantContent; 33 | const _assistantContent2: OurAssistantContent = [] as Infer< 34 | typeof vAssistantContent 35 | >; 36 | 37 | type OurToolContent = SerializeUrlsAndUint8Arrays; 38 | const _toolContent: Infer = [] as OurToolContent; 39 | const _toolContent2: OurToolContent = [] as Infer; 40 | 41 | // type assertion 42 | const _toolCallPart: Infer = {} as ToolCallPart; 43 | const _toolCallPart2: ToolCallPart = {} as Infer; 44 | 45 | // type assertion 46 | type OurTextPart = SerializeUrlsAndUint8Arrays; 47 | const _textPart: Infer = {} as OurTextPart; 48 | const _textPart2: OurTextPart = {} as Infer; 49 | 50 | // type assertion 51 | type OurImagePart = SerializeUrlsAndUint8Arrays; 52 | const _imagePart: Infer = {} as OurImagePart; 53 | const _imagePart2: OurImagePart = {} as Infer; 54 | 55 | // type assertion 56 | type OurFilePart = SerializeUrlsAndUint8Arrays; 57 | const _filePart: Infer = {} as OurFilePart; 58 | const _filePart2: OurFilePart = {} as Infer; 59 | 60 | // narrow to the type 61 | type ReasoningPart = AssistantContent[number] & { type: "reasoning" } & object; 62 | type OurReasoningPart = SerializeUrlsAndUint8Arrays; 63 | const _reasoningPart: Infer = {} as OurReasoningPart; 64 | const _reasoningPart2: OurReasoningPart = {} as Infer; 65 | 66 | // narrow to the type 67 | type RedactedReasoningPart = AssistantContent[number] & { 68 | type: "redacted-reasoning"; 69 | } & object; 70 | type OurRedactedReasoningPart = 71 | SerializeUrlsAndUint8Arrays; 72 | const _redactedReasoningPart: Infer = 73 | {} as OurRedactedReasoningPart; 74 | const _redactedReasoningPart2: OurRedactedReasoningPart = {} as Infer< 75 | typeof vRedactedReasoningPart 76 | >; 77 | 78 | test("noop", () => {}); 79 | -------------------------------------------------------------------------------- /src/ai/types.ts: -------------------------------------------------------------------------------- 1 | import type { DataContent, ImagePart } from "ai"; 2 | import { Infer, v } from "convex/values"; 3 | 4 | // const deprecated = v.optional(v.any()) as unknown as VNull; 5 | 6 | const ProviderOptions = v.record(v.string(), v.record(v.string(), v.any())); 7 | 8 | export function dataContentToConvex(data: DataContent): string | ArrayBuffer { 9 | if (data instanceof Uint8Array) { 10 | return Buffer.from(data).toString("base64"); 11 | } 12 | return data; 13 | } 14 | 15 | export function imagePartFromConvex(part: Infer): ImagePart { 16 | if (typeof part.image === "string" && part.image.includes("://")) { 17 | return { 18 | ...part, 19 | image: new URL(part.image), 20 | }; 21 | } 22 | return part; 23 | } 24 | 25 | export function imagePartToConvex(part: ImagePart): Infer { 26 | const image = 27 | part.image instanceof URL 28 | ? part.image.toString() 29 | : dataContentToConvex(part.image); 30 | return { 31 | ...part, 32 | image, 33 | }; 34 | } 35 | 36 | export type SerializeUrlsAndUint8Arrays = T extends URL 37 | ? string 38 | : T extends Uint8Array | ArrayBufferLike 39 | ? ArrayBuffer 40 | : T extends Array 41 | ? Array> 42 | : // eslint-disable-next-line @typescript-eslint/no-explicit-any 43 | T extends Record 44 | ? { [K in keyof T]: SerializeUrlsAndUint8Arrays } 45 | : T; 46 | 47 | export const vTextPart = v.object({ 48 | type: v.literal("text"), 49 | text: v.string(), 50 | providerOptions: v.optional(ProviderOptions), 51 | experimental_providerMetadata: v.optional(ProviderOptions), 52 | }); 53 | 54 | export const vImagePart = v.object({ 55 | type: v.literal("image"), 56 | image: v.union(v.string(), v.bytes()), 57 | mimeType: v.optional(v.string()), 58 | providerOptions: v.optional(ProviderOptions), 59 | experimental_providerMetadata: v.optional(ProviderOptions), 60 | }); 61 | 62 | export const vFilePart = v.object({ 63 | type: v.literal("file"), 64 | data: v.union(v.string(), v.bytes()), 65 | mimeType: v.string(), 66 | providerOptions: v.optional(ProviderOptions), 67 | experimental_providerMetadata: v.optional(ProviderOptions), 68 | }); 69 | 70 | export const vUserContent = v.union( 71 | v.string(), 72 | v.array(v.union(vTextPart, vImagePart, vFilePart)) 73 | ); 74 | 75 | export const vReasoningPart = v.object({ 76 | type: v.literal("reasoning"), 77 | text: v.string(), 78 | providerOptions: v.optional(ProviderOptions), 79 | experimental_providerMetadata: v.optional(ProviderOptions), 80 | }); 81 | 82 | export const vRedactedReasoningPart = v.object({ 83 | type: v.literal("redacted-reasoning"), 84 | data: v.string(), 85 | providerOptions: v.optional(ProviderOptions), 86 | experimental_providerMetadata: v.optional(ProviderOptions), 87 | }); 88 | 89 | export const vToolCallPart = v.object({ 90 | type: v.literal("tool-call"), 91 | toolCallId: v.string(), 92 | toolName: v.string(), 93 | args: v.any(), // TODO: need to be optional? 94 | providerOptions: v.optional(ProviderOptions), 95 | experimental_providerMetadata: v.optional(ProviderOptions), 96 | }); 97 | 98 | export const vAssistantContent = v.union( 99 | v.string(), 100 | v.array( 101 | v.union( 102 | vTextPart, 103 | vFilePart, 104 | vReasoningPart, 105 | vRedactedReasoningPart, 106 | vToolCallPart 107 | ) 108 | ) 109 | ); 110 | 111 | const vToolResultContent = v.array( 112 | v.union( 113 | v.object({ 114 | type: v.literal("text"), 115 | text: v.string(), 116 | }), 117 | v.object({ 118 | type: v.literal("image"), 119 | data: v.string(), 120 | mimeType: v.optional(v.string()), 121 | }) 122 | ) 123 | ); 124 | 125 | const vToolResultPart = v.object({ 126 | type: v.literal("tool-result"), 127 | toolCallId: v.string(), 128 | toolName: v.string(), 129 | result: v.any(), 130 | experimental_content: v.optional(vToolResultContent), 131 | isError: v.optional(v.boolean()), 132 | providerOptions: v.optional(ProviderOptions), 133 | experimental_providerMetadata: v.optional(ProviderOptions), 134 | }); 135 | export const vToolContent = v.array(vToolResultPart); 136 | 137 | export const vContent = v.union(vUserContent, vAssistantContent, vToolContent); 138 | export type Content = Infer; 139 | -------------------------------------------------------------------------------- /src/client/client.ts: -------------------------------------------------------------------------------- 1 | import type { MessageType, StorageThreadType } from "@mastra/core"; 2 | import type { 3 | EvalRow, 4 | StorageColumn, 5 | StorageGetMessagesArg, 6 | } from "@mastra/core/storage"; 7 | import { MastraStorage, TABLE_NAMES } from "@mastra/core/storage"; 8 | import { anyApi, FunctionReference } from "convex/server"; 9 | import { mastraToConvexTableNames } from "../mapping/index.js"; 10 | import { ConvexHttpClient } from "convex/browser"; 11 | 12 | import type { Mounts } from "../component/_generated/api.js"; 13 | import { UseApi } from "./types.js"; 14 | import { MastraVector } from "@mastra/core"; 15 | import { SupportedTableName } from "../component/vector/tables.js"; 16 | import { 17 | GenericDataModel, 18 | GenericMutationCtx, 19 | GenericQueryCtx, 20 | } from "convex/server"; 21 | import { GenericActionCtx } from "convex/server"; 22 | export { InMemoryVector } from "./in-memory.js"; 23 | 24 | export type VectorApi = { 25 | vectorAction: FunctionReference<"action">; 26 | vectorMutation: FunctionReference<"mutation">; 27 | vectorQuery: FunctionReference<"query">; 28 | }; 29 | export class ConvexVector extends MastraVector { 30 | api: VectorApi; 31 | 32 | constructor( 33 | public client: ConvexHttpClient, 34 | public options?: { name?: string; api?: VectorApi } 35 | ) { 36 | super(); 37 | this.api = options?.api ?? (anyApi.mastra.api as unknown as VectorApi); 38 | } 39 | 40 | async query(...args: Parameters) { 41 | const { indexName, queryVector, topK, filter, includeVector } = 42 | this.normalizeArgs("query", args); 43 | return await this.client.action(this.api.vectorAction, { 44 | op: "search", 45 | args: { 46 | indexName, 47 | queryVector, 48 | topK: topK ?? 10, 49 | filter: filter ?? undefined, 50 | includeVector, 51 | }, 52 | }); 53 | } 54 | 55 | async upsert(...args: Parameters): Promise { 56 | const { indexName, vectors, metadata, ids } = this.normalizeArgs( 57 | "upsert", 58 | args 59 | ); 60 | return await this.client.action(this.api.vectorAction, { 61 | op: "upsert", 62 | args: { 63 | indexName, 64 | vectors, 65 | metadata, 66 | ids, 67 | }, 68 | }); 69 | } 70 | 71 | async createIndex(...args: Parameters) { 72 | const { indexName, dimension } = this.normalizeArgs("createIndex", args); 73 | if (dimension !== 1536) { 74 | throw new Error("Only 1536 dimensions supported"); 75 | } 76 | await this.client.action(this.api.vectorAction, { 77 | op: "createIndex", 78 | args: { 79 | indexName, 80 | dimensions: dimension, 81 | }, 82 | }); 83 | } 84 | 85 | async listIndexes() { 86 | return await this.client.query(this.api.vectorQuery, { 87 | op: "listIndexes", 88 | args: {}, 89 | }); 90 | } 91 | 92 | async describeIndex(indexName: string) { 93 | return await this.client.query(this.api.vectorQuery, { 94 | op: "describeIndex", 95 | args: { indexName }, 96 | }); 97 | } 98 | 99 | async deleteIndex(indexName: SupportedTableName) { 100 | await this.client.action(this.api.vectorAction, { 101 | op: "deleteIndex", 102 | args: { indexName }, 103 | }); 104 | } 105 | } 106 | 107 | export type StorageApi = { 108 | storageAction: FunctionReference<"action">; 109 | storageMutation: FunctionReference<"mutation">; 110 | storageQuery: FunctionReference<"query">; 111 | }; 112 | 113 | export class ConvexStorage extends MastraStorage { 114 | client: ConvexHttpClient; 115 | api: StorageApi; 116 | constructor( 117 | client: ConvexHttpClient, 118 | options?: { name?: string; api?: StorageApi } 119 | ) { 120 | super({ name: options?.name ?? "ConvexStorage" }); 121 | this.client = client; 122 | this.api = options?.api ?? (anyApi.mastra.api as unknown as StorageApi); 123 | this.shouldCacheInit = true; 124 | } 125 | 126 | async createTable(args: { 127 | tableName: TABLE_NAMES; 128 | schema: Record; 129 | }): Promise { 130 | const convexTableName = mastraToConvexTableNames[args.tableName]; 131 | if (!convexTableName) { 132 | throw new Error(`Unsupported table name: ${args.tableName}`); 133 | } 134 | // TODO: we could do more serious validation against the defined schema 135 | // await this.client.mutation(this.api.storageMutation, { 136 | // op: "createTable", 137 | // args, 138 | // }); 139 | } 140 | 141 | async clearTable(args: { tableName: TABLE_NAMES }): Promise { 142 | await this.client.action(this.api.storageAction, { 143 | op: "clearTable", 144 | args, 145 | }); 146 | } 147 | 148 | async insert(args: { 149 | tableName: TABLE_NAMES; 150 | record: Record; 151 | }): Promise { 152 | await this.client.mutation(this.api.storageMutation, { 153 | op: "insert", 154 | args, 155 | }); 156 | return; 157 | } 158 | 159 | async batchInsert(args: { 160 | tableName: TABLE_NAMES; 161 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 162 | records: Record[]; 163 | }): Promise { 164 | await this.client.mutation(this.api.storageMutation, { 165 | op: "batchInsert", 166 | args, 167 | }); 168 | } 169 | 170 | async load(args: { 171 | tableName: TABLE_NAMES; 172 | keys: Record; 173 | }): Promise { 174 | return await this.client.query(this.api.storageQuery, { 175 | op: "load", 176 | args, 177 | }); 178 | } 179 | 180 | async getThreadById({ 181 | threadId, 182 | }: { 183 | threadId: string; 184 | }): Promise { 185 | return await this.client.query(this.api.storageQuery, { 186 | op: "getThreadById", 187 | args: { threadId }, 188 | }); 189 | } 190 | 191 | async getThreadsByResourceId({ 192 | resourceId, 193 | }: { 194 | resourceId: string; 195 | }): Promise { 196 | return await this.client.query(this.api.storageQuery, { 197 | op: "getThreadsByResourceId", 198 | args: { resourceId }, 199 | }); 200 | } 201 | 202 | async saveThread({ 203 | thread, 204 | }: { 205 | thread: StorageThreadType; 206 | }): Promise { 207 | return await this.client.mutation(this.api.storageMutation, { 208 | op: "saveThread", 209 | args: { thread }, 210 | }); 211 | } 212 | 213 | async updateThread({ 214 | id, 215 | title, 216 | metadata, 217 | }: { 218 | id: string; 219 | title: string; 220 | metadata: Record; 221 | }): Promise { 222 | return await this.client.mutation(this.api.storageMutation, { 223 | op: "updateThread", 224 | args: { id, title, metadata }, 225 | }); 226 | } 227 | 228 | async deleteThread({ threadId }: { threadId: string }): Promise { 229 | await this.client.mutation(this.api.storageMutation, { 230 | op: "deleteThread", 231 | args: { threadId }, 232 | }); 233 | } 234 | 235 | async getMessages({ 236 | threadId, 237 | selectBy, 238 | }: StorageGetMessagesArg): Promise { 239 | return await this.client.query(this.api.storageQuery, { 240 | op: "getMessages", 241 | args: { threadId, selectBy }, 242 | }); 243 | } 244 | 245 | async saveMessages({ 246 | messages, 247 | }: { 248 | messages: MessageType[]; 249 | }): Promise { 250 | return await this.client.mutation(this.api.storageMutation, { 251 | op: "saveMessages", 252 | args: { messages }, 253 | }); 254 | } 255 | 256 | async getEvalsByAgentName( 257 | agentName: string, 258 | type?: "test" | "live" 259 | ): Promise { 260 | return await this.client.query(this.api.storageQuery, { 261 | op: "getEvalsByAgentName", 262 | args: { agentName, type }, 263 | }); 264 | } 265 | 266 | async getTraces(options?: { 267 | name?: string; 268 | scope?: string; 269 | page: number; 270 | perPage: number; 271 | attributes?: Record; 272 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 273 | }): Promise { 274 | return await this.client.action(this.api.storageAction, { 275 | op: "getTraces", 276 | args: options, 277 | }); 278 | } 279 | } 280 | -------------------------------------------------------------------------------- /src/client/in-memory.ts: -------------------------------------------------------------------------------- 1 | import type { MessageType, StorageThreadType } from "@mastra/core"; 2 | import type { 3 | EvalRow, 4 | StorageColumn, 5 | StorageGetMessagesArg, 6 | } from "@mastra/core/storage"; 7 | import { 8 | MastraStorage, 9 | TABLE_EVALS, 10 | TABLE_MESSAGES, 11 | TABLE_NAMES, 12 | TABLE_THREADS, 13 | TABLE_TRACES, 14 | TABLE_WORKFLOW_SNAPSHOT, 15 | } from "@mastra/core/storage"; 16 | 17 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 18 | type Row = Record; 19 | 20 | /** 21 | * InMemoryStorage is a simple in-memory storage implementation for Mastra. 22 | * It is used for testing and development purposes. 23 | */ 24 | export class InMemoryStorage extends MastraStorage { 25 | private tables: Record = { 26 | [TABLE_WORKFLOW_SNAPSHOT]: [], 27 | [TABLE_EVALS]: [], 28 | [TABLE_MESSAGES]: [], 29 | [TABLE_THREADS]: [], 30 | [TABLE_TRACES]: [], 31 | }; 32 | private primaryKeys: Record = { 33 | [TABLE_WORKFLOW_SNAPSHOT]: null, 34 | [TABLE_EVALS]: null, 35 | [TABLE_MESSAGES]: null, 36 | [TABLE_THREADS]: null, 37 | [TABLE_TRACES]: null, 38 | }; 39 | constructor() { 40 | super({ name: "InMemoryStorage" }); 41 | } 42 | 43 | async createTable({ 44 | tableName, 45 | schema, 46 | }: { 47 | tableName: TABLE_NAMES; 48 | schema: Record; 49 | }) { 50 | for (const [key, value] of Object.entries(schema)) { 51 | if (value.primaryKey) { 52 | this.primaryKeys[tableName] = key; 53 | } 54 | break; 55 | } 56 | return; 57 | } 58 | 59 | async clearTable({ tableName }: { tableName: TABLE_NAMES }) { 60 | this.tables[tableName] = []; 61 | } 62 | 63 | // We make this a non-async function so all inserts can happen transactionally 64 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 65 | _insert(tableName: TABLE_NAMES, record: Record) { 66 | if (this.primaryKeys[tableName]) { 67 | const primaryKey = record[this.primaryKeys[tableName]!]; 68 | const index = this.tables[tableName].findIndex( 69 | (record) => record[this.primaryKeys[tableName]!] === primaryKey 70 | ); 71 | if (index !== -1) { 72 | this.tables[tableName][index] = record; 73 | } else { 74 | this.tables[tableName].push(record); 75 | } 76 | } else { 77 | this.tables[tableName].push(record); 78 | } 79 | } 80 | 81 | async insert({ 82 | tableName, 83 | record, 84 | }: { 85 | tableName: TABLE_NAMES; 86 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 87 | record: Record; 88 | }) { 89 | this._insert(tableName, record); 90 | } 91 | 92 | async batchInsert({ 93 | tableName, 94 | records, 95 | }: { 96 | tableName: TABLE_NAMES; 97 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 98 | records: Record[]; 99 | }) { 100 | records.forEach((record) => this._insert(tableName, record)); 101 | } 102 | 103 | async load({ 104 | tableName, 105 | keys, 106 | }: { 107 | tableName: TABLE_NAMES; 108 | keys: Record; 109 | }): Promise { 110 | return this.tables[tableName].find((record) => 111 | Object.entries(keys).every(([key, value]) => record[key] === value) 112 | ) as R | null; 113 | } 114 | 115 | async getThreadById({ 116 | threadId, 117 | }: { 118 | threadId: string; 119 | }): Promise { 120 | return this.tables[TABLE_THREADS].find( 121 | (record) => record.id === threadId 122 | ) as StorageThreadType | null; 123 | } 124 | 125 | async getThreadsByResourceId({ 126 | resourceId, 127 | }: { 128 | resourceId: string; 129 | }): Promise { 130 | return this.tables[TABLE_THREADS].filter( 131 | (record) => record.resourceId === resourceId 132 | ) as StorageThreadType[]; 133 | } 134 | 135 | async saveThread({ 136 | thread, 137 | }: { 138 | thread: StorageThreadType; 139 | }): Promise { 140 | this._insert(TABLE_THREADS, thread); 141 | return thread; 142 | } 143 | 144 | async updateThread({ 145 | id, 146 | title, 147 | metadata, 148 | }: { 149 | id: string; 150 | title: string; 151 | metadata: Record; 152 | }): Promise { 153 | const index = this.tables[TABLE_THREADS].findIndex( 154 | (record) => record.id === id 155 | ); 156 | if (index === -1) { 157 | throw new Error(`Thread with id ${id} not found`); 158 | } 159 | this.tables[TABLE_THREADS][index] = { 160 | ...this.tables[TABLE_THREADS][index], 161 | title, 162 | metadata, 163 | }; 164 | return this.tables[TABLE_THREADS][index] as StorageThreadType; 165 | } 166 | 167 | async deleteThread({ threadId }: { threadId: string }): Promise { 168 | const index = this.tables[TABLE_THREADS].findIndex( 169 | (record) => record.id === threadId 170 | ); 171 | if (index !== -1) { 172 | this.tables[TABLE_THREADS].splice(index, 1); 173 | } 174 | } 175 | 176 | async getMessages({ 177 | threadId, 178 | selectBy, 179 | }: StorageGetMessagesArg): Promise { 180 | const allMessages = this.tables[TABLE_MESSAGES].filter( 181 | (record) => record.threadId === threadId 182 | ) as MessageType[]; 183 | const limit = typeof selectBy?.last === `number` ? selectBy.last : 40; 184 | const ranges = [ 185 | { start: allMessages.length - limit, end: allMessages.length }, 186 | ]; 187 | if (selectBy?.include?.length) { 188 | ranges.push( 189 | ...selectBy.include 190 | .map((i) => { 191 | const index = allMessages.findIndex((record) => record.id === i.id); 192 | return index !== -1 193 | ? { 194 | start: index - (i.withPreviousMessages || 0), 195 | end: index + (i.withNextMessages || 0), 196 | } 197 | : null; 198 | }) 199 | .flatMap((r) => (r ? [r] : [])) 200 | ); 201 | } 202 | const indexes = ranges 203 | .flatMap((r) => 204 | Array.from({ length: r.end - r.start + 1 }, (_, i) => r.start + i) 205 | ) 206 | .sort() 207 | .reduce( 208 | (acc, index) => (acc.at(-1) === index ? acc : [...acc, index]), 209 | [] as number[] 210 | ); 211 | return indexes 212 | .map((index) => allMessages[index]!) 213 | .map((m) => ({ 214 | ...m, 215 | content: tryJSONParse(m.content), 216 | createdAt: new Date(m.createdAt), 217 | })) as T; 218 | } 219 | 220 | async saveMessages({ 221 | messages, 222 | }: { 223 | messages: MessageType[]; 224 | }): Promise { 225 | messages.forEach((message) => 226 | this._insert(TABLE_MESSAGES, { 227 | id: message.id, 228 | threadId: message.threadId, 229 | content: 230 | typeof message.content === "object" 231 | ? JSON.stringify(message.content) 232 | : message.content, 233 | role: message.role, 234 | type: message.type, 235 | createdAt: 236 | message.createdAt instanceof Date 237 | ? message.createdAt.toISOString() 238 | : message.createdAt || new Date().toISOString(), 239 | }) 240 | ); 241 | return messages; 242 | } 243 | 244 | async getTraces(args: { 245 | name?: string; 246 | scope?: string; 247 | page: number; 248 | perPage: number; 249 | attributes?: Record; 250 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 251 | }): Promise { 252 | const { name, scope, page, perPage, attributes } = args; 253 | const limit = perPage; 254 | const offset = page * perPage; 255 | const traces = this.tables[TABLE_TRACES].filter((record) => { 256 | if (name && !record.name.startsWith(name)) { 257 | return false; 258 | } 259 | if (scope && record.scope !== scope) { 260 | return false; 261 | } 262 | if (attributes) { 263 | return Object.keys(attributes).every( 264 | (key) => record.attributes[key] === attributes[key] 265 | ); 266 | } 267 | return true; 268 | }); 269 | return traces.slice(offset, offset + limit); 270 | } 271 | 272 | async getEvalsByAgentName( 273 | agentName: string, 274 | type?: "test" | "live" 275 | ): Promise { 276 | return this.tables[TABLE_EVALS].filter( 277 | (record) => 278 | record.agentName === agentName && 279 | (type === "test" 280 | ? record.testInfo && record.testInfo.testPath 281 | : type === "live" 282 | ? !record.testInfo || !record.testInfo.testPath 283 | : true) 284 | ) as EvalRow[]; 285 | } 286 | } 287 | 288 | function tryJSONParse(content: unknown) { 289 | try { 290 | return JSON.parse(content as string); 291 | } catch { 292 | return content; 293 | } 294 | } 295 | 296 | /** 297 | * InMemoryVector is a simple in-memory vector implementation for Mastra. 298 | * It is used for testing and development purposes. 299 | */ 300 | import type { 301 | CreateIndexParams, 302 | UpsertVectorParams, 303 | QueryVectorParams, 304 | IndexStats, 305 | ParamsToArgs, 306 | QueryResult, 307 | CreateIndexArgs, 308 | UpsertVectorArgs, 309 | QueryVectorArgs, 310 | } from "@mastra/core/vector"; 311 | import { MastraVector } from "@mastra/core/vector"; 312 | type VectorDoc = { 313 | id: string; 314 | vector: number[]; 315 | metadata: Record; 316 | }; 317 | 318 | export class InMemoryVector extends MastraVector { 319 | private tables: Record = {}; 320 | private dimensions: Record = {}; 321 | constructor() { 322 | super(); 323 | } 324 | async query( 325 | ...args: ParamsToArgs | E 326 | ): Promise { 327 | const params = this.normalizeArgs( 328 | "query", 329 | args 330 | ); 331 | const index = this.tables[params.indexName]; 332 | if (!index) return []; 333 | const scored = index 334 | .filter( 335 | (doc) => 336 | !params.filter || 337 | Object.entries(params.filter).every( 338 | ([field, value]) => doc.metadata[field] === value 339 | ) 340 | ) 341 | .map((doc) => { 342 | const score = dotProduct(doc.vector, params.queryVector); 343 | return { score, doc }; 344 | }); 345 | return scored 346 | .sort((a, b) => b.score - a.score) 347 | .slice(0, params.topK) 348 | .map((scored) => ({ 349 | id: scored.doc.id, 350 | score: scored.score, 351 | ...scored.doc.metadata, 352 | ...(params.includeVector ? { vector: scored.doc.vector } : {}), 353 | })); 354 | } 355 | // Adds type checks for positional arguments if used 356 | async upsert( 357 | ...args: ParamsToArgs | E 358 | ): Promise { 359 | const params = this.normalizeArgs( 360 | "upsert", 361 | args 362 | ); 363 | const table = this.tables[params.indexName]; 364 | if (!table) throw new Error(`Index ${params.indexName} not found`); 365 | const normalized = params.vectors.map((vector, index) => { 366 | if (vector.length !== this.dimensions[params.indexName]) { 367 | throw new Error( 368 | `Vector ${index} has wrong dimension: ${vector.length} !== ${this.dimensions[params.indexName]}` 369 | ); 370 | } 371 | // Normalize the vector to unit length 372 | return vector.map( 373 | (value) => value / Math.sqrt(dotProduct(vector, vector)) 374 | ); 375 | }); 376 | 377 | const ids = params.ids || params.vectors.map(() => crypto.randomUUID()); 378 | normalized.forEach((vector, index) => { 379 | const existing = table.find((doc) => doc.id === ids[index]); 380 | if (existing) { 381 | existing.vector = vector; 382 | existing.metadata = params.metadata?.[index] ?? {}; 383 | } else { 384 | table.push({ 385 | id: ids[index]!, 386 | vector, 387 | metadata: params.metadata?.[index] ?? {}, 388 | }); 389 | } 390 | }); 391 | return ids; 392 | } 393 | // Adds type checks for positional arguments if used 394 | async createIndex( 395 | ...args: ParamsToArgs | E 396 | ): Promise { 397 | const params = this.normalizeArgs( 398 | "createIndex", 399 | args 400 | ); 401 | this.tables[params.indexName] = []; 402 | this.dimensions[params.indexName] = params.dimension; 403 | } 404 | 405 | async listIndexes(): Promise { 406 | return Object.keys(this.tables); 407 | } 408 | 409 | async describeIndex(indexName: string): Promise { 410 | const table = this.tables[indexName]; 411 | const dimension = this.dimensions[indexName]; 412 | if (!table) throw new Error(`Index ${indexName} not found`); 413 | if (!dimension) throw new Error(`Index ${indexName} has no dimension`); 414 | return { 415 | dimension, 416 | metric: "cosine", 417 | count: table.length, 418 | }; 419 | } 420 | 421 | async deleteIndex(indexName: string): Promise { 422 | delete this.tables[indexName]; 423 | delete this.dimensions[indexName]; 424 | } 425 | } 426 | 427 | function dotProduct(a: number[], b: number[]): number { 428 | return sum(a.map((value, index) => (b[index] ? value * b[index] : 0))); 429 | } 430 | 431 | function sum(a: number[]): number { 432 | return a.reduce((acc, curr) => acc + curr, 0); 433 | } 434 | -------------------------------------------------------------------------------- /src/client/index.ts: -------------------------------------------------------------------------------- 1 | export { ConvexStorage, InMemoryStorage } from "./storage.js"; 2 | export { ConvexVector, InMemoryVector } from "./vector.js"; 3 | -------------------------------------------------------------------------------- /src/client/storage.ts: -------------------------------------------------------------------------------- 1 | // Workaround to aid in bundling, to be combined with adding @libsql/client to 2 | // the externalPackages in a convex.json file in the root of your project. 3 | export * as libsql from "@libsql/client"; 4 | export { InMemoryStorage } from "./in-memory.js"; 5 | 6 | import type { MessageType, StorageThreadType } from "@mastra/core"; 7 | import type { 8 | EvalRow, 9 | StorageColumn, 10 | StorageGetMessagesArg, 11 | } from "@mastra/core/storage"; 12 | import { 13 | MastraStorage, 14 | TABLE_EVALS, 15 | TABLE_MESSAGES, 16 | TABLE_NAMES, 17 | TABLE_THREADS, 18 | TABLE_TRACES, 19 | TABLE_WORKFLOW_SNAPSHOT, 20 | } from "@mastra/core/storage"; 21 | import { 22 | GenericActionCtx, 23 | GenericDataModel, 24 | GenericMutationCtx, 25 | GenericQueryCtx, 26 | } from "convex/server"; 27 | import type { Mounts } from "../component/_generated/api.js"; 28 | import { 29 | mapMastraToSerialized, 30 | mapSerializedToMastra, 31 | mastraToConvexTableNames, 32 | SerializedMessage, 33 | SerializedThread, 34 | SerializedTrace, 35 | } from "../mapping/index.js"; 36 | import { UseApi } from "./types.js"; 37 | 38 | export class ConvexStorage extends MastraStorage { 39 | ctx: Ctx<"action" | "mutation" | "query"> | undefined; 40 | api: UseApi["storage"]; 41 | constructor(component: UseApi, options?: { name?: string }) { 42 | super({ name: options?.name ?? "ConvexStorage" }); 43 | this.api = component.storage; 44 | this.shouldCacheInit = true; 45 | } 46 | 47 | /** 48 | * Set the context for the storage. Must be called before using the storage 49 | * in a Convex function. If you are using the storage via the API, you do not 50 | * need to call this. 51 | * 52 | * @param ctx - The context to use for the storage. 53 | */ 54 | async setCtx(ctx: Ctx<"action" | "mutation" | "query"> | undefined) { 55 | this.ctx = ctx; 56 | } 57 | 58 | getApi(kind: T): Ctx { 59 | // TODO: get http client if that's specified 60 | if (!this.ctx) { 61 | throw new Error( 62 | "Context not set: ensure you're calling storage.setCtx" + 63 | " before using the storage." 64 | ); 65 | } 66 | switch (kind) { 67 | case "action": 68 | if (!(this.ctx as GenericActionCtx).runAction) { 69 | throw new Error("Context must be an action context to do this"); 70 | } 71 | // fallthrough 72 | case "mutation": 73 | if (!(this.ctx as GenericMutationCtx).runMutation) { 74 | throw new Error("Context doesn't have a way to run mutations"); 75 | } 76 | // fallthrough 77 | case "query": 78 | if (!(this.ctx as GenericQueryCtx).runQuery) { 79 | throw new Error("Context is not a query context"); 80 | } 81 | } 82 | return this.ctx as Ctx; 83 | } 84 | 85 | async createTable(args: { 86 | tableName: TABLE_NAMES; 87 | schema: Record; 88 | }): Promise { 89 | const convexTableName = mastraToConvexTableNames[args.tableName]; 90 | if (!convexTableName) { 91 | throw new Error(`Unsupported table name: ${args.tableName}`); 92 | } 93 | // TODO: we could do more serious validation against the defined schema 94 | // validateTableSchema(convexTableName, tableSchema); 95 | return; 96 | } 97 | 98 | async clearTable(args: { tableName: TABLE_NAMES }): Promise { 99 | const ctx = this.getApi("action"); 100 | const tableName = mastraToConvexTableNames[args.tableName]; 101 | await ctx.runAction(this.api.storage.clearTable, { tableName }); 102 | return; 103 | } 104 | 105 | async insert(args: { 106 | tableName: TABLE_NAMES; 107 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 108 | record: Record; 109 | }): Promise { 110 | const convexRecord = mapMastraToSerialized(args.tableName, args.record); 111 | const tableName = mastraToConvexTableNames[args.tableName]; 112 | const ctx = this.getApi("mutation"); 113 | await ctx.runMutation(this.api.storage.insert, { 114 | tableName, 115 | document: convexRecord, 116 | }); 117 | return; 118 | } 119 | 120 | async batchInsert(args: { 121 | tableName: TABLE_NAMES; 122 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 123 | records: Record[]; 124 | }): Promise { 125 | const ctx = this.getApi("mutation"); 126 | const tableName = mastraToConvexTableNames[args.tableName]; 127 | await ctx.runMutation(this.api.storage.batchInsert, { 128 | tableName, 129 | records: args.records.map((record) => 130 | mapMastraToSerialized(args.tableName, record) 131 | ), 132 | }); 133 | return; 134 | } 135 | 136 | async load(args: { 137 | tableName: TABLE_NAMES; 138 | keys: Record; 139 | }): Promise { 140 | const ctx = this.getApi("query"); 141 | const tableName = mastraToConvexTableNames[args.tableName]; 142 | if (args.tableName === TABLE_WORKFLOW_SNAPSHOT) { 143 | const { run_id, workflow_name } = args.keys; 144 | if (!run_id || !workflow_name) { 145 | throw new Error("Expected run_id and workflow_name to load a snapshot"); 146 | } 147 | const snapshot = await ctx.runQuery(this.api.storage.loadSnapshot, { 148 | runId: run_id, 149 | workflowName: workflow_name, 150 | }); 151 | if (!snapshot) { 152 | return null; 153 | } 154 | return mapSerializedToMastra(args.tableName, snapshot) as R; 155 | } 156 | return await ctx.runQuery(this.api.storage.load, { 157 | tableName, 158 | keys: args.keys, 159 | }); 160 | } 161 | 162 | async getThreadById({ 163 | threadId, 164 | }: { 165 | threadId: string; 166 | }): Promise { 167 | const ctx = this.getApi("query"); 168 | const thread = await ctx.runQuery(this.api.messages.getThreadById, { 169 | threadId, 170 | }); 171 | if (!thread) { 172 | return null; 173 | } 174 | return mapSerializedToMastra(TABLE_THREADS, thread); 175 | } 176 | 177 | async getThreadsByResourceId({ 178 | resourceId, 179 | }: { 180 | resourceId: string; 181 | }): Promise { 182 | const ctx = this.getApi("query"); 183 | const threads: SerializedThread[] = []; 184 | let cursor: string | null = null; 185 | while (true) { 186 | const page: { 187 | threads: SerializedThread[]; 188 | continueCursor: string; 189 | isDone: boolean; 190 | } = await ctx.runQuery(this.api.messages.getThreadsByResourceId, { 191 | resourceId, 192 | cursor, 193 | }); 194 | threads.push(...page.threads); 195 | if (page.isDone) { 196 | break; 197 | } 198 | cursor = page.continueCursor; 199 | } 200 | return threads.map((thread) => 201 | mapSerializedToMastra(TABLE_THREADS, thread) 202 | ); 203 | } 204 | 205 | async saveThread({ 206 | thread, 207 | }: { 208 | thread: StorageThreadType; 209 | }): Promise { 210 | const ctx = this.getApi("mutation"); 211 | await ctx.runMutation(this.api.messages.saveThread, { 212 | thread: mapMastraToSerialized(TABLE_THREADS, thread), 213 | }); 214 | return thread; 215 | } 216 | 217 | async updateThread({ 218 | id, 219 | title, 220 | metadata, 221 | }: { 222 | id: string; 223 | title: string; 224 | metadata: Record; 225 | }): Promise { 226 | const ctx = this.getApi("mutation"); 227 | const thread = await ctx.runMutation(this.api.messages.updateThread, { 228 | threadId: id, 229 | title, 230 | metadata, 231 | }); 232 | return mapSerializedToMastra(TABLE_THREADS, thread); 233 | } 234 | 235 | async deleteThread({ threadId }: { threadId: string }): Promise { 236 | const ctx = this.getApi("mutation"); 237 | await ctx.runMutation(this.api.messages.deleteThread, { threadId }); 238 | return; 239 | } 240 | 241 | async getMessages({ 242 | threadId, 243 | selectBy, 244 | }: StorageGetMessagesArg): Promise { 245 | const ctx = this.getApi("query"); 246 | const messages: SerializedMessage[] = await ctx.runQuery( 247 | this.api.messages.getMessagesPage, 248 | { 249 | threadId, 250 | selectBy, 251 | // memoryConfig: threadConfig, 252 | } 253 | ); 254 | return messages.map((message) => 255 | mapSerializedToMastra(TABLE_MESSAGES, message) 256 | ) as T[]; 257 | } 258 | 259 | async saveMessages({ 260 | messages, 261 | }: { 262 | messages: MessageType[]; 263 | }): Promise { 264 | const ctx = this.getApi("mutation"); 265 | await ctx.runMutation(this.api.messages.saveMessages, { 266 | messages: messages.map((message) => 267 | mapMastraToSerialized(TABLE_MESSAGES, message) 268 | ), 269 | }); 270 | return messages; 271 | } 272 | 273 | async getEvalsByAgentName( 274 | agentName: string, 275 | type?: "test" | "live" 276 | ): Promise { 277 | const ctx = this.getApi("query"); 278 | const evals = await ctx.runQuery(this.api.storage.getEvalsByAgentName, { 279 | agentName, 280 | type, 281 | }); 282 | return evals.map((e) => mapSerializedToMastra(TABLE_EVALS, e)); 283 | } 284 | 285 | async getTraces(options?: { 286 | name?: string; 287 | scope?: string; 288 | page: number; 289 | perPage: number; 290 | attributes?: Record; 291 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 292 | }): Promise { 293 | const { name, scope, page, perPage, attributes } = options ?? {}; 294 | const traces: SerializedTrace[] = []; 295 | let cursor: string | null = null; 296 | const numItems = perPage ?? 100; 297 | const pageNum = page ?? 0; 298 | while (true) { 299 | const ctx = this.getApi("query"); 300 | const results: { 301 | isDone: boolean; 302 | continuCursor: string; 303 | page: SerializedTrace[]; 304 | } = await ctx.runQuery(this.api.storage.getTracesPage, { 305 | name, 306 | scope, 307 | cursor, 308 | numItems, 309 | attributes, 310 | }); 311 | traces.push(...results.page); 312 | // Note: we'll refetch from the beginning on every page. 313 | if (results.isDone || traces.length >= numItems * pageNum) { 314 | break; 315 | } 316 | cursor = results.continuCursor; 317 | } 318 | return traces 319 | .slice(pageNum * numItems, (pageNum + 1) * numItems) 320 | .map((trace) => mapSerializedToMastra(TABLE_TRACES, trace)); 321 | } 322 | } 323 | 324 | type Ctx = T extends "action" 325 | ? GenericActionCtx 326 | : T extends "mutation" 327 | ? GenericMutationCtx 328 | : T extends "query" 329 | ? GenericQueryCtx 330 | : never; 331 | -------------------------------------------------------------------------------- /src/client/types.ts: -------------------------------------------------------------------------------- 1 | /* Type utils follow */ 2 | 3 | import { Expand, FunctionReference, GenericActionCtx } from "convex/server"; 4 | 5 | import { GenericMutationCtx } from "convex/server"; 6 | 7 | import { GenericQueryCtx } from "convex/server"; 8 | 9 | import { GenericDataModel } from "convex/server"; 10 | import { GenericId } from "convex/values"; 11 | 12 | export type RunQueryCtx = { 13 | runQuery: GenericQueryCtx["runQuery"]; 14 | }; 15 | export type RunMutationCtx = { 16 | runQuery: GenericMutationCtx["runQuery"]; 17 | runMutation: GenericMutationCtx["runMutation"]; 18 | }; 19 | export type RunActionCtx = { 20 | runQuery: GenericActionCtx["runQuery"]; 21 | runMutation: GenericActionCtx["runMutation"]; 22 | runAction: GenericActionCtx["runAction"]; 23 | }; 24 | 25 | export type OpaqueIds = 26 | T extends GenericId 27 | ? string 28 | : T extends (infer U)[] 29 | ? OpaqueIds[] 30 | : T extends ArrayBuffer 31 | ? ArrayBuffer 32 | : T extends object 33 | ? { [K in keyof T]: OpaqueIds } 34 | : T; 35 | 36 | export type UseApi = Expand<{ 37 | [mod in keyof API]: API[mod] extends FunctionReference< 38 | infer FType, 39 | "public", 40 | infer FArgs, 41 | infer FReturnType, 42 | infer FComponentPath 43 | > 44 | ? FunctionReference< 45 | FType, 46 | "internal", 47 | OpaqueIds, 48 | OpaqueIds, 49 | FComponentPath 50 | > 51 | : UseApi; 52 | }>; 53 | -------------------------------------------------------------------------------- /src/client/vector.ts: -------------------------------------------------------------------------------- 1 | import type { Mounts } from "../component/_generated/api.js"; 2 | import { UseApi } from "./types.js"; 3 | import { MastraVector } from "@mastra/core"; 4 | import { SupportedTableName } from "../component/vector/tables.js"; 5 | import { 6 | GenericDataModel, 7 | GenericMutationCtx, 8 | GenericQueryCtx, 9 | } from "convex/server"; 10 | import { GenericActionCtx } from "convex/server"; 11 | export { InMemoryVector } from "./in-memory.js"; 12 | 13 | export class ConvexVector extends MastraVector { 14 | ctx: Ctx<"action" | "mutation" | "query"> | undefined; 15 | api: UseApi["vector"]; 16 | 17 | constructor( 18 | component: UseApi, 19 | public options?: { name?: string } 20 | ) { 21 | super(); 22 | this.api = component.vector; 23 | } 24 | 25 | /** 26 | * Set the context for the storage. Must be called before using the storage 27 | * in a Convex function. If you are using the storage via the API, you do not 28 | * need to call this. 29 | * 30 | * @param ctx - The context to use for the storage. 31 | */ 32 | async setCtx(ctx: Ctx<"action" | "mutation" | "query"> | undefined) { 33 | this.ctx = ctx; 34 | } 35 | 36 | getApi(kind: T): Ctx { 37 | // TODO: get http client if that's specified 38 | if (!this.ctx) { 39 | throw new Error( 40 | "Context not set: ensure you're calling storage.setCtx" + 41 | " before using the storage." 42 | ); 43 | } 44 | switch (kind) { 45 | case "action": 46 | if (!(this.ctx as GenericActionCtx).runAction) { 47 | throw new Error("Context must be an action context to do this"); 48 | } 49 | // fallthrough 50 | case "mutation": 51 | if (!(this.ctx as GenericMutationCtx).runMutation) { 52 | throw new Error("Context doesn't have a way to run mutations"); 53 | } 54 | // fallthrough 55 | case "query": 56 | if (!(this.ctx as GenericQueryCtx).runQuery) { 57 | throw new Error("Context is not a query context"); 58 | } 59 | } 60 | return this.ctx as Ctx; 61 | } 62 | 63 | async query(...args: Parameters) { 64 | const { indexName, queryVector, topK, filter, includeVector } = 65 | this.normalizeArgs("query", args); 66 | const ctx = this.getApi("action"); 67 | return await ctx.runAction(this.api.vector.search, { 68 | indexName, 69 | queryVector, 70 | topK: topK ?? 10, 71 | filter: filter ?? undefined, 72 | includeVector, 73 | }); 74 | } 75 | 76 | async upsert(...args: Parameters): Promise { 77 | const { indexName, vectors, metadata, ids } = this.normalizeArgs( 78 | "upsert", 79 | args 80 | ); 81 | const ctx = this.getApi("mutation"); 82 | return await ctx.runMutation(this.api.vector.upsert, { 83 | indexName, 84 | vectors, 85 | metadata, 86 | ids, 87 | }); 88 | } 89 | 90 | async createIndex(...args: Parameters) { 91 | const { indexName, dimension } = this.normalizeArgs("createIndex", args); 92 | if (dimension !== 1536) { 93 | throw new Error("Only 1536 dimensions supported"); 94 | } 95 | const ctx = this.getApi("mutation"); 96 | await ctx.runMutation(this.api.vector.createIndex, { 97 | indexName, 98 | dimensions: dimension, 99 | }); 100 | } 101 | 102 | async listIndexes() { 103 | const ctx = this.getApi("query"); 104 | return await ctx.runQuery(this.api.vector.listIndexes, {}); 105 | } 106 | 107 | async describeIndex(indexName: string) { 108 | const ctx = this.getApi("query"); 109 | return await ctx.runQuery(this.api.vector.describeIndex, { indexName }); 110 | } 111 | 112 | async deleteIndex(indexName: SupportedTableName) { 113 | const ctx = this.getApi("action"); 114 | await ctx.runAction(this.api.vector.deleteIndex, { indexName }); 115 | } 116 | } 117 | 118 | type Ctx = T extends "action" 119 | ? GenericActionCtx 120 | : T extends "mutation" 121 | ? GenericMutationCtx 122 | : T extends "query" 123 | ? GenericQueryCtx 124 | : never; 125 | -------------------------------------------------------------------------------- /src/component/_generated/api.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type * as debug from "../debug.js"; 12 | import type * as logger from "../logger.js"; 13 | import type * as storage_messages from "../storage/messages.js"; 14 | import type * as storage_storage from "../storage/storage.js"; 15 | import type * as storage_tables from "../storage/tables.js"; 16 | import type * as vector_tables from "../vector/tables.js"; 17 | import type * as vector_vector from "../vector/vector.js"; 18 | 19 | import type { 20 | ApiFromModules, 21 | FilterApi, 22 | FunctionReference, 23 | } from "convex/server"; 24 | 25 | /** 26 | * A utility for referencing Convex functions in your app's API. 27 | * 28 | * Usage: 29 | * ```js 30 | * const myFunctionReference = api.myModule.myFunction; 31 | * ``` 32 | */ 33 | declare const fullApi: ApiFromModules<{ 34 | debug: typeof debug; 35 | logger: typeof logger; 36 | "storage/messages": typeof storage_messages; 37 | "storage/storage": typeof storage_storage; 38 | "storage/tables": typeof storage_tables; 39 | "vector/tables": typeof vector_tables; 40 | "vector/vector": typeof vector_vector; 41 | }>; 42 | export type Mounts = { 43 | storage: { 44 | messages: { 45 | deleteThread: FunctionReference< 46 | "mutation", 47 | "public", 48 | { threadId: string }, 49 | null 50 | >; 51 | getMessagesPage: FunctionReference< 52 | "query", 53 | "public", 54 | { 55 | selectBy?: { 56 | include?: Array<{ 57 | id: string; 58 | withNextMessages?: number; 59 | withPreviousMessages?: number; 60 | }>; 61 | last?: number | false; 62 | vectorSearchString?: string; 63 | }; 64 | threadId: string; 65 | }, 66 | Array<{ 67 | content: 68 | | string 69 | | Array< 70 | | { 71 | experimental_providerMetadata?: Record< 72 | string, 73 | Record 74 | >; 75 | providerOptions?: Record>; 76 | text: string; 77 | type: "text"; 78 | } 79 | | { 80 | experimental_providerMetadata?: Record< 81 | string, 82 | Record 83 | >; 84 | image: string | ArrayBuffer; 85 | mimeType?: string; 86 | providerOptions?: Record>; 87 | type: "image"; 88 | } 89 | | { 90 | data: string | ArrayBuffer; 91 | experimental_providerMetadata?: Record< 92 | string, 93 | Record 94 | >; 95 | mimeType: string; 96 | providerOptions?: Record>; 97 | type: "file"; 98 | } 99 | > 100 | | string 101 | | Array< 102 | | { 103 | experimental_providerMetadata?: Record< 104 | string, 105 | Record 106 | >; 107 | providerOptions?: Record>; 108 | text: string; 109 | type: "text"; 110 | } 111 | | { 112 | data: string | ArrayBuffer; 113 | experimental_providerMetadata?: Record< 114 | string, 115 | Record 116 | >; 117 | mimeType: string; 118 | providerOptions?: Record>; 119 | type: "file"; 120 | } 121 | | { 122 | experimental_providerMetadata?: Record< 123 | string, 124 | Record 125 | >; 126 | providerOptions?: Record>; 127 | text: string; 128 | type: "reasoning"; 129 | } 130 | | { 131 | data: string; 132 | experimental_providerMetadata?: Record< 133 | string, 134 | Record 135 | >; 136 | providerOptions?: Record>; 137 | type: "redacted-reasoning"; 138 | } 139 | | { 140 | args: any; 141 | experimental_providerMetadata?: Record< 142 | string, 143 | Record 144 | >; 145 | providerOptions?: Record>; 146 | toolCallId: string; 147 | toolName: string; 148 | type: "tool-call"; 149 | } 150 | > 151 | | Array<{ 152 | experimental_content?: Array< 153 | | { text: string; type: "text" } 154 | | { data: string; mimeType?: string; type: "image" } 155 | >; 156 | experimental_providerMetadata?: Record< 157 | string, 158 | Record 159 | >; 160 | isError?: boolean; 161 | providerOptions?: Record>; 162 | result: any; 163 | toolCallId: string; 164 | toolName: string; 165 | type: "tool-result"; 166 | }>; 167 | createdAt: number; 168 | id: string; 169 | role: "system" | "user" | "assistant" | "tool"; 170 | threadId: string; 171 | type: "text" | "tool-call" | "tool-result"; 172 | }> 173 | >; 174 | getThreadById: FunctionReference< 175 | "query", 176 | "public", 177 | { threadId: string }, 178 | { 179 | createdAt: number; 180 | id: string; 181 | metadata?: Record; 182 | resourceId: string; 183 | title?: string; 184 | updatedAt: number; 185 | } | null 186 | >; 187 | getThreadsByResourceId: FunctionReference< 188 | "query", 189 | "public", 190 | { cursor?: string | null; resourceId: string }, 191 | { 192 | continueCursor: string; 193 | isDone: boolean; 194 | threads: Array<{ 195 | createdAt: number; 196 | id: string; 197 | metadata?: Record; 198 | resourceId: string; 199 | title?: string; 200 | updatedAt: number; 201 | }>; 202 | } 203 | >; 204 | saveMessages: FunctionReference< 205 | "mutation", 206 | "public", 207 | { 208 | messages: Array<{ 209 | content: 210 | | string 211 | | Array< 212 | | { 213 | experimental_providerMetadata?: Record< 214 | string, 215 | Record 216 | >; 217 | providerOptions?: Record>; 218 | text: string; 219 | type: "text"; 220 | } 221 | | { 222 | experimental_providerMetadata?: Record< 223 | string, 224 | Record 225 | >; 226 | image: string | ArrayBuffer; 227 | mimeType?: string; 228 | providerOptions?: Record>; 229 | type: "image"; 230 | } 231 | | { 232 | data: string | ArrayBuffer; 233 | experimental_providerMetadata?: Record< 234 | string, 235 | Record 236 | >; 237 | mimeType: string; 238 | providerOptions?: Record>; 239 | type: "file"; 240 | } 241 | > 242 | | string 243 | | Array< 244 | | { 245 | experimental_providerMetadata?: Record< 246 | string, 247 | Record 248 | >; 249 | providerOptions?: Record>; 250 | text: string; 251 | type: "text"; 252 | } 253 | | { 254 | data: string | ArrayBuffer; 255 | experimental_providerMetadata?: Record< 256 | string, 257 | Record 258 | >; 259 | mimeType: string; 260 | providerOptions?: Record>; 261 | type: "file"; 262 | } 263 | | { 264 | experimental_providerMetadata?: Record< 265 | string, 266 | Record 267 | >; 268 | providerOptions?: Record>; 269 | text: string; 270 | type: "reasoning"; 271 | } 272 | | { 273 | data: string; 274 | experimental_providerMetadata?: Record< 275 | string, 276 | Record 277 | >; 278 | providerOptions?: Record>; 279 | type: "redacted-reasoning"; 280 | } 281 | | { 282 | args: any; 283 | experimental_providerMetadata?: Record< 284 | string, 285 | Record 286 | >; 287 | providerOptions?: Record>; 288 | toolCallId: string; 289 | toolName: string; 290 | type: "tool-call"; 291 | } 292 | > 293 | | Array<{ 294 | experimental_content?: Array< 295 | | { text: string; type: "text" } 296 | | { data: string; mimeType?: string; type: "image" } 297 | >; 298 | experimental_providerMetadata?: Record< 299 | string, 300 | Record 301 | >; 302 | isError?: boolean; 303 | providerOptions?: Record>; 304 | result: any; 305 | toolCallId: string; 306 | toolName: string; 307 | type: "tool-result"; 308 | }>; 309 | createdAt: number; 310 | id: string; 311 | role: "system" | "user" | "assistant" | "tool"; 312 | threadId: string; 313 | type: "text" | "tool-call" | "tool-result"; 314 | }>; 315 | }, 316 | null 317 | >; 318 | saveThread: FunctionReference< 319 | "mutation", 320 | "public", 321 | { 322 | thread: { 323 | createdAt: number; 324 | id: string; 325 | metadata?: Record; 326 | resourceId: string; 327 | title?: string; 328 | updatedAt: number; 329 | }; 330 | }, 331 | null 332 | >; 333 | updateThread: FunctionReference< 334 | "mutation", 335 | "public", 336 | { metadata?: Record; threadId: string; title?: string }, 337 | { 338 | createdAt: number; 339 | id: string; 340 | metadata?: Record; 341 | resourceId: string; 342 | title?: string; 343 | updatedAt: number; 344 | } 345 | >; 346 | }; 347 | storage: { 348 | batchInsert: FunctionReference< 349 | "mutation", 350 | "public", 351 | { records: Array; tableName: string }, 352 | null 353 | >; 354 | clearTable: FunctionReference< 355 | "action", 356 | "public", 357 | { tableName: string }, 358 | null 359 | >; 360 | getEvalsByAgentName: FunctionReference< 361 | "query", 362 | "public", 363 | { agentName: string; type?: "test" | "live" }, 364 | Array<{ 365 | agentName: string; 366 | createdAt: number; 367 | globalRunId: string; 368 | input: string; 369 | instructions: string; 370 | metricName: string; 371 | output: string; 372 | result: any; 373 | runId: string; 374 | testInfo?: any; 375 | }> 376 | >; 377 | getTracesPage: FunctionReference< 378 | "query", 379 | "public", 380 | { 381 | attributes?: Record; 382 | cursor: string | null; 383 | name?: string; 384 | numItems: number; 385 | scope?: string; 386 | }, 387 | { 388 | continuCursor: string; 389 | isDone: boolean; 390 | page: Array<{ 391 | attributes?: any; 392 | createdAt: number; 393 | endTime: bigint; 394 | events?: any; 395 | id: string; 396 | kind: number | bigint; 397 | links?: any; 398 | name: string; 399 | other?: string; 400 | parentSpanId?: string | null; 401 | scope: string; 402 | startTime: bigint; 403 | status?: any; 404 | traceId: string; 405 | }>; 406 | } 407 | >; 408 | insert: FunctionReference< 409 | "mutation", 410 | "public", 411 | { document: any; tableName: string }, 412 | null 413 | >; 414 | load: FunctionReference< 415 | "query", 416 | "public", 417 | { keys: any; tableName: string }, 418 | any | null 419 | >; 420 | loadSnapshot: FunctionReference< 421 | "query", 422 | "public", 423 | { runId: string; workflowName: string }, 424 | { 425 | createdAt: number; 426 | runId: string; 427 | snapshot: string; 428 | updatedAt: number; 429 | workflowName: string; 430 | } | null 431 | >; 432 | }; 433 | }; 434 | vector: { 435 | vector: { 436 | createIndex: FunctionReference< 437 | "mutation", 438 | "public", 439 | { 440 | dimensions: 128 | 256 | 512 | 768 | 1024 | 1536 | 2048 | 3072 | 4096; 441 | indexName: string; 442 | }, 443 | null 444 | >; 445 | deleteIndex: FunctionReference< 446 | "action", 447 | "public", 448 | { indexName: string }, 449 | null 450 | >; 451 | describeIndex: FunctionReference< 452 | "query", 453 | "public", 454 | { indexName: string }, 455 | { 456 | count: number; 457 | dimension: 128 | 256 | 512 | 768 | 1024 | 1536 | 2048 | 3072 | 4096; 458 | metric: "cosine"; 459 | } 460 | >; 461 | listIndexes: FunctionReference<"query", "public", {}, Array>; 462 | search: FunctionReference< 463 | "action", 464 | "public", 465 | { 466 | filter?: Record; 467 | includeVector?: boolean; 468 | indexName: string; 469 | queryVector: Array; 470 | topK: number; 471 | }, 472 | Array<{ 473 | id: string; 474 | metadata?: Record; 475 | score: number; 476 | vector?: Array; 477 | }> 478 | >; 479 | upsert: FunctionReference< 480 | "mutation", 481 | "public", 482 | { 483 | ids?: Array; 484 | indexName: string; 485 | metadata?: Array>; 486 | vectors: Array>; 487 | }, 488 | Array 489 | >; 490 | }; 491 | }; 492 | }; 493 | // For now fullApiWithMounts is only fullApi which provides 494 | // jump-to-definition in component client code. 495 | // Use Mounts for the same type without the inference. 496 | declare const fullApiWithMounts: typeof fullApi; 497 | 498 | export declare const api: FilterApi< 499 | typeof fullApiWithMounts, 500 | FunctionReference 501 | >; 502 | export declare const internal: FilterApi< 503 | typeof fullApiWithMounts, 504 | FunctionReference 505 | >; 506 | 507 | export declare const components: {}; 508 | -------------------------------------------------------------------------------- /src/component/_generated/api.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated `api` utility. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { anyApi, componentsGeneric } from "convex/server"; 12 | 13 | /** 14 | * A utility for referencing Convex functions in your app's API. 15 | * 16 | * Usage: 17 | * ```js 18 | * const myFunctionReference = api.myModule.myFunction; 19 | * ``` 20 | */ 21 | export const api = anyApi; 22 | export const internal = anyApi; 23 | export const components = componentsGeneric(); 24 | -------------------------------------------------------------------------------- /src/component/_generated/dataModel.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated data model types. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import type { 12 | DataModelFromSchemaDefinition, 13 | DocumentByName, 14 | TableNamesInDataModel, 15 | SystemTableNames, 16 | } from "convex/server"; 17 | import type { GenericId } from "convex/values"; 18 | import schema from "../schema.js"; 19 | 20 | /** 21 | * The names of all of your Convex tables. 22 | */ 23 | export type TableNames = TableNamesInDataModel; 24 | 25 | /** 26 | * The type of a document stored in Convex. 27 | * 28 | * @typeParam TableName - A string literal type of the table name (like "users"). 29 | */ 30 | export type Doc = DocumentByName< 31 | DataModel, 32 | TableName 33 | >; 34 | 35 | /** 36 | * An identifier for a document in Convex. 37 | * 38 | * Convex documents are uniquely identified by their `Id`, which is accessible 39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids). 40 | * 41 | * Documents can be loaded using `db.get(id)` in query and mutation functions. 42 | * 43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other 44 | * strings when type checking. 45 | * 46 | * @typeParam TableName - A string literal type of the table name (like "users"). 47 | */ 48 | export type Id = 49 | GenericId; 50 | 51 | /** 52 | * A type describing your Convex data model. 53 | * 54 | * This type includes information about what tables you have, the type of 55 | * documents stored in those tables, and the indexes defined on them. 56 | * 57 | * This type is used to parameterize methods like `queryGeneric` and 58 | * `mutationGeneric` to make them type-safe. 59 | */ 60 | export type DataModel = DataModelFromSchemaDefinition; 61 | -------------------------------------------------------------------------------- /src/component/_generated/server.d.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | ActionBuilder, 13 | AnyComponents, 14 | HttpActionBuilder, 15 | MutationBuilder, 16 | QueryBuilder, 17 | GenericActionCtx, 18 | GenericMutationCtx, 19 | GenericQueryCtx, 20 | GenericDatabaseReader, 21 | GenericDatabaseWriter, 22 | FunctionReference, 23 | } from "convex/server"; 24 | import type { DataModel } from "./dataModel.js"; 25 | 26 | type GenericCtx = 27 | | GenericActionCtx 28 | | GenericMutationCtx 29 | | GenericQueryCtx; 30 | 31 | /** 32 | * Define a query in this Convex app's public API. 33 | * 34 | * This function will be allowed to read your Convex database and will be accessible from the client. 35 | * 36 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 37 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 38 | */ 39 | export declare const query: QueryBuilder; 40 | 41 | /** 42 | * Define a query that is only accessible from other Convex functions (but not from the client). 43 | * 44 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 45 | * 46 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 47 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 48 | */ 49 | export declare const internalQuery: QueryBuilder; 50 | 51 | /** 52 | * Define a mutation in this Convex app's public API. 53 | * 54 | * This function will be allowed to modify your Convex database and will be accessible from the client. 55 | * 56 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 57 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 58 | */ 59 | export declare const mutation: MutationBuilder; 60 | 61 | /** 62 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 63 | * 64 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 65 | * 66 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 67 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 68 | */ 69 | export declare const internalMutation: MutationBuilder; 70 | 71 | /** 72 | * Define an action in this Convex app's public API. 73 | * 74 | * An action is a function which can execute any JavaScript code, including non-deterministic 75 | * code and code with side-effects, like calling third-party services. 76 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 77 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 78 | * 79 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 80 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 81 | */ 82 | export declare const action: ActionBuilder; 83 | 84 | /** 85 | * Define an action that is only accessible from other Convex functions (but not from the client). 86 | * 87 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 88 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 89 | */ 90 | export declare const internalAction: ActionBuilder; 91 | 92 | /** 93 | * Define an HTTP action. 94 | * 95 | * This function will be used to respond to HTTP requests received by a Convex 96 | * deployment if the requests matches the path and method where this action 97 | * is routed. Be sure to route your action in `convex/http.js`. 98 | * 99 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 100 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. 101 | */ 102 | export declare const httpAction: HttpActionBuilder; 103 | 104 | /** 105 | * A set of services for use within Convex query functions. 106 | * 107 | * The query context is passed as the first argument to any Convex query 108 | * function run on the server. 109 | * 110 | * This differs from the {@link MutationCtx} because all of the services are 111 | * read-only. 112 | */ 113 | export type QueryCtx = GenericQueryCtx; 114 | 115 | /** 116 | * A set of services for use within Convex mutation functions. 117 | * 118 | * The mutation context is passed as the first argument to any Convex mutation 119 | * function run on the server. 120 | */ 121 | export type MutationCtx = GenericMutationCtx; 122 | 123 | /** 124 | * A set of services for use within Convex action functions. 125 | * 126 | * The action context is passed as the first argument to any Convex action 127 | * function run on the server. 128 | */ 129 | export type ActionCtx = GenericActionCtx; 130 | 131 | /** 132 | * An interface to read from the database within Convex query functions. 133 | * 134 | * The two entry points are {@link DatabaseReader.get}, which fetches a single 135 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts 136 | * building a query. 137 | */ 138 | export type DatabaseReader = GenericDatabaseReader; 139 | 140 | /** 141 | * An interface to read from and write to the database within Convex mutation 142 | * functions. 143 | * 144 | * Convex guarantees that all writes within a single mutation are 145 | * executed atomically, so you never have to worry about partial writes leaving 146 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control) 147 | * for the guarantees Convex provides your functions. 148 | */ 149 | export type DatabaseWriter = GenericDatabaseWriter; 150 | -------------------------------------------------------------------------------- /src/component/_generated/server.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /** 3 | * Generated utilities for implementing server-side Convex query and mutation functions. 4 | * 5 | * THIS CODE IS AUTOMATICALLY GENERATED. 6 | * 7 | * To regenerate, run `npx convex dev`. 8 | * @module 9 | */ 10 | 11 | import { 12 | actionGeneric, 13 | httpActionGeneric, 14 | queryGeneric, 15 | mutationGeneric, 16 | internalActionGeneric, 17 | internalMutationGeneric, 18 | internalQueryGeneric, 19 | componentsGeneric, 20 | } from "convex/server"; 21 | 22 | /** 23 | * Define a query in this Convex app's public API. 24 | * 25 | * This function will be allowed to read your Convex database and will be accessible from the client. 26 | * 27 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 28 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 29 | */ 30 | export const query = queryGeneric; 31 | 32 | /** 33 | * Define a query that is only accessible from other Convex functions (but not from the client). 34 | * 35 | * This function will be allowed to read from your Convex database. It will not be accessible from the client. 36 | * 37 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument. 38 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible. 39 | */ 40 | export const internalQuery = internalQueryGeneric; 41 | 42 | /** 43 | * Define a mutation in this Convex app's public API. 44 | * 45 | * This function will be allowed to modify your Convex database and will be accessible from the client. 46 | * 47 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 48 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 49 | */ 50 | export const mutation = mutationGeneric; 51 | 52 | /** 53 | * Define a mutation that is only accessible from other Convex functions (but not from the client). 54 | * 55 | * This function will be allowed to modify your Convex database. It will not be accessible from the client. 56 | * 57 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. 58 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. 59 | */ 60 | export const internalMutation = internalMutationGeneric; 61 | 62 | /** 63 | * Define an action in this Convex app's public API. 64 | * 65 | * An action is a function which can execute any JavaScript code, including non-deterministic 66 | * code and code with side-effects, like calling third-party services. 67 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. 68 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. 69 | * 70 | * @param func - The action. It receives an {@link ActionCtx} as its first argument. 71 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible. 72 | */ 73 | export const action = actionGeneric; 74 | 75 | /** 76 | * Define an action that is only accessible from other Convex functions (but not from the client). 77 | * 78 | * @param func - The function. It receives an {@link ActionCtx} as its first argument. 79 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible. 80 | */ 81 | export const internalAction = internalActionGeneric; 82 | 83 | /** 84 | * Define a Convex HTTP action. 85 | * 86 | * @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object 87 | * as its second. 88 | * @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`. 89 | */ 90 | export const httpAction = httpActionGeneric; 91 | -------------------------------------------------------------------------------- /src/component/convex.config.ts: -------------------------------------------------------------------------------- 1 | import { defineComponent } from "convex/server"; 2 | 3 | const component = defineComponent("mastra"); 4 | 5 | export default component; 6 | -------------------------------------------------------------------------------- /src/component/debug.ts: -------------------------------------------------------------------------------- 1 | import { v, VString } from "convex/values"; 2 | import { 3 | ActionCtx, 4 | internalAction, 5 | internalMutation, 6 | internalQuery, 7 | } from "./_generated/server"; 8 | 9 | import { logLevel } from "./logger.js"; 10 | import { internal } from "./_generated/api"; 11 | import { TableNames } from "./_generated/dataModel"; 12 | import { mapSerializedToMastra, TABLE_WORKFLOW_SNAPSHOT } from "../mapping"; 13 | 14 | export const debugOverrideLogLevel = internalMutation({ 15 | args: { 16 | logLevel, 17 | }, 18 | handler: async (ctx, args) => { 19 | const frozen = await ctx.db.query("config").first(); 20 | if (frozen) { 21 | await ctx.db.patch(frozen._id, { 22 | config: { 23 | ...frozen.config, 24 | logLevel: args.logLevel, 25 | }, 26 | }); 27 | } else { 28 | await ctx.db.insert("config", { 29 | config: { 30 | logLevel: args.logLevel, 31 | }, 32 | }); 33 | } 34 | }, 35 | returns: v.null(), 36 | }); 37 | 38 | export const deleteAll = internalAction({ 39 | args: {}, 40 | handler: async (ctx) => { 41 | await Promise.all([deleteTable(ctx, "config")]); 42 | }, 43 | returns: v.null(), 44 | }); 45 | 46 | async function deleteTable(ctx: ActionCtx, table: TableNames) { 47 | let cursor: string | null = null; 48 | let isDone = false; 49 | while (!isDone) { 50 | ({ isDone, cursor } = await ctx.runMutation(internal.debug.deletePage, { 51 | table, 52 | cursor, 53 | })); 54 | } 55 | } 56 | 57 | export const deletePage = internalMutation({ 58 | args: { 59 | table: v.string() as VString, 60 | cursor: v.union(v.string(), v.null()), 61 | }, 62 | handler: async (ctx, args) => { 63 | const results = await ctx.db.query(args.table).paginate({ 64 | cursor: args.cursor ?? null, 65 | numItems: 1000, 66 | }); 67 | await Promise.all(results.page.map((result) => ctx.db.delete(result._id))); 68 | return { 69 | isDone: results.isDone, 70 | cursor: results.continueCursor, 71 | }; 72 | }, 73 | returns: v.object({ 74 | isDone: v.boolean(), 75 | cursor: v.string(), 76 | }), 77 | }); 78 | 79 | export const getLatestWorkflowStatus = internalQuery({ 80 | args: {}, 81 | handler: async (ctx): Promise => { 82 | const latest = await ctx.db.query("snapshots").order("desc").first(); 83 | if (!latest) { 84 | return; 85 | } 86 | const workflow = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, latest); 87 | return workflow.snapshot; 88 | }, 89 | returns: v.any(), 90 | }); 91 | -------------------------------------------------------------------------------- /src/component/logger.ts: -------------------------------------------------------------------------------- 1 | import { Infer, v } from "convex/values"; 2 | import { internalQuery, QueryCtx } from "./_generated/server"; 3 | 4 | export const DEFAULT_LOG_LEVEL: LogLevel = "INFO"; 5 | 6 | export const logLevel = v.union( 7 | v.literal("DEBUG"), 8 | v.literal("TRACE"), 9 | v.literal("INFO"), 10 | v.literal("REPORT"), 11 | v.literal("WARN"), 12 | v.literal("ERROR") 13 | ); 14 | export type LogLevel = Infer; 15 | 16 | /* eslint-disable @typescript-eslint/no-explicit-any */ 17 | export type Logger = { 18 | debug: (...args: unknown[]) => void; 19 | info: (...args: unknown[]) => void; 20 | warn: (...args: unknown[]) => void; 21 | error: (...args: unknown[]) => void; 22 | time: (label: string) => void; 23 | timeEnd: (label: string) => void; 24 | event: (event: string, payload: Record) => void; 25 | logLevel: LogLevel; 26 | }; 27 | const logLevelOrder = logLevel.members.map((l) => l.value); 28 | const logLevelByName = logLevelOrder.reduce( 29 | (acc, l, i) => { 30 | acc[l] = i; 31 | return acc; 32 | }, 33 | {} as Record 34 | ); 35 | export function shouldLog(config: LogLevel, level: LogLevel) { 36 | return logLevelByName[config] <= logLevelByName[level]; 37 | } 38 | 39 | const DEBUG = logLevelByName["DEBUG"]; 40 | const TRACE = logLevelByName["TRACE"]; 41 | const INFO = logLevelByName["INFO"]; 42 | const REPORT = logLevelByName["REPORT"]; 43 | const WARN = logLevelByName["WARN"]; 44 | const ERROR = logLevelByName["ERROR"]; 45 | 46 | export function createLogger(level: LogLevel | undefined): Logger { 47 | const logLevel = level ?? DEFAULT_LOG_LEVEL; 48 | const levelIndex = logLevelByName[logLevel]; 49 | if (levelIndex === undefined) { 50 | throw new Error(`Invalid log level: ${level}`); 51 | } 52 | return { 53 | debug: (...args: unknown[]) => { 54 | if (levelIndex <= DEBUG) { 55 | console.debug(...args); 56 | } 57 | }, 58 | info: (...args: unknown[]) => { 59 | if (levelIndex <= INFO) { 60 | console.info(...args); 61 | } 62 | }, 63 | warn: (...args: unknown[]) => { 64 | if (levelIndex <= WARN) { 65 | console.warn(...args); 66 | } 67 | }, 68 | error: (...args: unknown[]) => { 69 | if (levelIndex <= ERROR) { 70 | console.error(...args); 71 | } 72 | }, 73 | time: (label: string) => { 74 | if (levelIndex <= TRACE) { 75 | console.time(label); 76 | } 77 | }, 78 | timeEnd: (label: string) => { 79 | if (levelIndex <= TRACE) { 80 | console.timeEnd(label); 81 | } 82 | }, 83 | event: (event: string, payload: Record) => { 84 | const fullPayload = { 85 | component: "mastra", 86 | event, 87 | ...payload, 88 | }; 89 | if (levelIndex === REPORT && event === "report") { 90 | console.info(JSON.stringify(fullPayload)); 91 | } else if (levelIndex <= INFO) { 92 | console.info(JSON.stringify(fullPayload)); 93 | } 94 | }, 95 | logLevel, 96 | }; 97 | } 98 | 99 | export async function makeConsole(ctx: QueryCtx) { 100 | const config = await ctx.db.query("config").first(); 101 | const console = createLogger(config?.config.logLevel); 102 | return console; 103 | } 104 | 105 | export const getLogLevel = internalQuery({ 106 | args: {}, 107 | handler: async (ctx) => { 108 | const config = await ctx.db.query("config").first(); 109 | return config?.config.logLevel ?? DEFAULT_LOG_LEVEL; 110 | }, 111 | returns: logLevel, 112 | }); 113 | -------------------------------------------------------------------------------- /src/component/schema.ts: -------------------------------------------------------------------------------- 1 | import { defineSchema, defineTable } from "convex/server"; 2 | import storageTables from "./storage/tables.js"; 3 | import { v } from "convex/values"; 4 | import { logLevel } from "./logger.js"; 5 | import vectorTables from "./vector/tables.js"; 6 | 7 | export default defineSchema({ 8 | config: defineTable({ 9 | config: v.object({ logLevel: logLevel }), 10 | }), 11 | ...storageTables, 12 | ...vectorTables, 13 | }); 14 | -------------------------------------------------------------------------------- /src/component/setup.test.ts: -------------------------------------------------------------------------------- 1 | /// 2 | import { test } from "vitest"; 3 | export const modules = import.meta.glob("./**/*.*s"); 4 | 5 | test("setup", () => {}); 6 | -------------------------------------------------------------------------------- /src/component/storage/messages.ts: -------------------------------------------------------------------------------- 1 | import { v } from "convex/values"; 2 | import { Doc } from "../_generated/dataModel.js"; 3 | import { mutation, query } from "../_generated/server.js"; 4 | import { 5 | type SerializedMessage, 6 | type SerializedThread, 7 | vSerializedMessage, 8 | vSerializedThread, 9 | } from "../../mapping/index.js"; 10 | import { paginator } from "convex-helpers/server/pagination"; 11 | import schema from "../schema.js"; 12 | import { makeConsole } from "../logger.js"; 13 | 14 | function threadToSerializedMastra(thread: Doc<"threads">): SerializedThread { 15 | const { id, title, metadata, resourceId, createdAt, updatedAt } = thread; 16 | return { id, title, metadata, resourceId, createdAt, updatedAt }; 17 | } 18 | 19 | export const getThreadById = query({ 20 | args: { threadId: v.string() }, 21 | handler: async (ctx, args) => { 22 | const console = await makeConsole(ctx); 23 | console.debug(`Getting thread by id ${args.threadId}`); 24 | const thread = await ctx.db 25 | .query("threads") 26 | .withIndex("id", (q) => q.eq("id", args.threadId)) 27 | .unique(); 28 | if (!thread) { 29 | console.debug(`Thread ${args.threadId} not found`); 30 | return null; 31 | } 32 | return threadToSerializedMastra(thread); 33 | }, 34 | returns: v.union(vSerializedThread, v.null()), 35 | }); 36 | 37 | export const getThreadsByResourceId = query({ 38 | args: { 39 | resourceId: v.string(), 40 | cursor: v.optional(v.union(v.string(), v.null())), 41 | }, 42 | handler: async ( 43 | ctx, 44 | args 45 | ): Promise<{ 46 | threads: SerializedThread[]; 47 | continueCursor: string; 48 | isDone: boolean; 49 | }> => { 50 | const console = await makeConsole(ctx); 51 | console.debug(`Getting threads by resource id ${args.resourceId}`); 52 | const threads = await paginator(ctx.db, schema) 53 | .query("threads") 54 | .withIndex("resourceId", (q) => q.eq("resourceId", args.resourceId)) 55 | .paginate({ 56 | numItems: 100, 57 | cursor: args.cursor ?? null, 58 | }); 59 | console.debug(`Got ${threads.page.length} threads`); 60 | return { 61 | threads: threads.page.map(threadToSerializedMastra), 62 | continueCursor: threads.continueCursor, 63 | isDone: threads.isDone, 64 | }; 65 | }, 66 | returns: v.object({ 67 | threads: v.array(vSerializedThread), 68 | continueCursor: v.string(), 69 | isDone: v.boolean(), 70 | }), 71 | }); 72 | 73 | export const saveThread = mutation({ 74 | args: { thread: vSerializedThread }, 75 | handler: async (ctx, args) => { 76 | const console = await makeConsole(ctx); 77 | console.debug(`Saving thread ${args.thread.id}`); 78 | await ctx.db.insert("threads", args.thread); 79 | }, 80 | returns: v.null(), 81 | }); 82 | 83 | export const updateThread = mutation({ 84 | args: { 85 | threadId: v.string(), 86 | title: v.optional(v.string()), 87 | metadata: v.optional(v.record(v.string(), v.any())), 88 | }, 89 | handler: async (ctx, args) => { 90 | const console = await makeConsole(ctx); 91 | console.debug(`Updating thread ${args.threadId}`); 92 | const thread = await ctx.db 93 | .query("threads") 94 | .withIndex("id", (q) => q.eq("id", args.threadId)) 95 | .unique(); 96 | if (!thread) { 97 | throw new Error(`Thread ${args.threadId} not found`); 98 | } 99 | if (args.title) { 100 | console.debug(`Updating title for thread ${args.threadId}`); 101 | await ctx.db.patch(thread._id, { 102 | title: args.title, 103 | updatedAt: Date.now(), 104 | }); 105 | } 106 | if (args.metadata) { 107 | console.debug(`Updating metadata for thread ${args.threadId}`); 108 | await ctx.db.patch(thread._id, { 109 | metadata: args.metadata, 110 | updatedAt: Date.now(), 111 | }); 112 | } 113 | return threadToSerializedMastra(thread); 114 | }, 115 | returns: vSerializedThread, 116 | }); 117 | 118 | export const deleteThread = mutation({ 119 | args: { threadId: v.string() }, 120 | handler: async (ctx, args) => { 121 | const console = await makeConsole(ctx); 122 | console.debug(`Deleting thread ${args.threadId}`); 123 | const thread = await ctx.db 124 | .query("threads") 125 | .withIndex("id", (q) => q.eq("id", args.threadId)) 126 | .unique(); 127 | if (!thread) { 128 | throw new Error(`Thread ${args.threadId} not found`); 129 | } 130 | await ctx.db.delete(thread._id); 131 | }, 132 | returns: v.null(), 133 | }); 134 | 135 | // const vMemoryConfig = v.object({ 136 | // lastMessages: v.optional(v.union(v.number(), v.literal(false))), 137 | // semanticRecall: v.optional( 138 | // v.union( 139 | // v.boolean(), 140 | // v.object({ 141 | // topK: v.number(), 142 | // messageRange: v.union( 143 | // v.number(), 144 | // v.object({ before: v.number(), after: v.number() }), 145 | // ), 146 | // }), 147 | // ), 148 | // ), 149 | // workingMemory: v.optional( 150 | // v.object({ 151 | // enabled: v.boolean(), 152 | // template: v.optional(v.string()), 153 | // use: v.optional( 154 | // v.union(v.literal("text-stream"), v.literal("tool-call")), 155 | // ), 156 | // }), 157 | // ), 158 | // threads: v.optional( 159 | // v.object({ 160 | // generateTitle: v.optional(v.boolean()), 161 | // }), 162 | // ), 163 | // }); 164 | const vSelectBy = v.object({ 165 | vectorSearchString: v.optional(v.string()), 166 | last: v.optional(v.union(v.number(), v.literal(false))), 167 | include: v.optional( 168 | v.array( 169 | v.object({ 170 | id: v.string(), 171 | withPreviousMessages: v.optional(v.number()), 172 | withNextMessages: v.optional(v.number()), 173 | }) 174 | ) 175 | ), 176 | }); 177 | 178 | function messageToSerializedMastra( 179 | message: Doc<"messages"> 180 | ): SerializedMessage { 181 | const { threadOrder: _, _id, _creationTime, ...serialized } = message; 182 | return serialized; 183 | } 184 | 185 | const DEFAULT_MESSAGES_LIMIT = 40; // What pg & upstash do too. 186 | 187 | export const getMessagesPage = query({ 188 | args: { 189 | threadId: v.string(), 190 | selectBy: v.optional(vSelectBy), 191 | // Unimplemented and as far I can tell no storage provider has either. 192 | // memoryConfig: v.optional(vMemoryConfig), 193 | }, 194 | handler: async (ctx, args): Promise => { 195 | const console = await makeConsole(ctx); 196 | console.debug(`Getting messages page for thread ${args.threadId}`); 197 | const messages = await ctx.db 198 | .query("messages") 199 | .withIndex("threadId", (q) => q.eq("threadId", args.threadId)) 200 | .order("desc") 201 | .take(args.selectBy?.last ? args.selectBy.last : DEFAULT_MESSAGES_LIMIT); 202 | 203 | const handled: boolean[] = []; 204 | const toFetch: number[] = []; 205 | for (const m of messages) { 206 | handled[m.threadOrder] = true; 207 | } 208 | await Promise.all( 209 | args.selectBy?.include?.map(async (range) => { 210 | const includeDoc = await ctx.db 211 | .query("messages") 212 | .withIndex("id", (q) => q.eq("id", range.id)) 213 | .unique(); 214 | if (!includeDoc) { 215 | console.warn(`Message ${range.id} not found`); 216 | return; 217 | } 218 | if (!range.withPreviousMessages && !range.withNextMessages) { 219 | messages.push(includeDoc); 220 | return; 221 | } 222 | const order = includeDoc.threadOrder; 223 | for ( 224 | let i = order - (range.withPreviousMessages ?? 0); 225 | i < order + (range.withNextMessages ?? 0); 226 | i++ 227 | ) { 228 | if (!handled[i]) { 229 | toFetch.push(i); 230 | handled[i] = true; 231 | } 232 | } 233 | }) ?? [] 234 | ); 235 | console.debug(`Need to fetch ${toFetch.length} messages`); 236 | // sort and find unique numbers in toFetch 237 | const uniqueToFetch = [...new Set(toFetch)].sort(); 238 | console.debug(`Unique to fetch ${uniqueToFetch}`); 239 | // find contiguous ranges in uniqueToFetch 240 | const ranges: { start: number; end: number }[] = []; 241 | for (let i = 0; i < uniqueToFetch.length; i++) { 242 | const start = uniqueToFetch[i]; 243 | let end = start; 244 | while (i + 1 < uniqueToFetch.length && uniqueToFetch[i + 1] === end + 1) { 245 | end++; 246 | i++; 247 | } 248 | ranges.push({ start, end }); 249 | } 250 | console.debug(`Ranges to fetch ${ranges}`); 251 | const fetched = ( 252 | await Promise.all( 253 | ranges.map(async (range) => { 254 | return await ctx.db 255 | .query("messages") 256 | .withIndex("threadId", (q) => 257 | q 258 | .eq("threadId", args.threadId) 259 | .gte("threadOrder", range.start) 260 | .lte("threadOrder", range.end) 261 | ) 262 | .collect(); 263 | }) 264 | ) 265 | ).flat(); 266 | console.debug(`Fetched ${fetched.length} messages`); 267 | messages.push(...fetched); 268 | console.debug(`Total messages ${messages.length}`); 269 | return messages.map(messageToSerializedMastra); 270 | }, 271 | returns: v.array(vSerializedMessage), 272 | }); 273 | 274 | export const saveMessages = mutation({ 275 | args: { messages: v.array(vSerializedMessage) }, 276 | handler: async (ctx, args) => { 277 | const console = await makeConsole(ctx); 278 | console.debug(`Saving messages ${args.messages.length}`); 279 | const messagesByThreadId: Record = {}; 280 | for (const message of args.messages) { 281 | messagesByThreadId[message.threadId] = [ 282 | ...(messagesByThreadId[message.threadId] ?? []), 283 | message, 284 | ]; 285 | } 286 | for (const threadId in messagesByThreadId) { 287 | const lastMessage = await ctx.db 288 | .query("messages") 289 | .withIndex("threadId", (q) => q.eq("threadId", threadId)) 290 | .order("desc") 291 | .first(); 292 | let threadOrder = lastMessage?.threadOrder ?? 0; 293 | for (const message of messagesByThreadId[threadId]) { 294 | threadOrder++; 295 | await ctx.db.insert("messages", { 296 | ...message, 297 | threadOrder, 298 | }); 299 | } 300 | } 301 | }, 302 | returns: v.null(), 303 | }); 304 | 305 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 306 | const console = "THIS IS A REMINDER TO USE makeConsole"; 307 | -------------------------------------------------------------------------------- /src/component/storage/storage.ts: -------------------------------------------------------------------------------- 1 | import { v, Validator } from "convex/values"; 2 | import tables from "./tables.js"; 3 | import { internal } from "../_generated/api.js"; 4 | import { TableNames } from "./tables.js"; 5 | import { 6 | action, 7 | internalMutation, 8 | mutation, 9 | query, 10 | } from "../_generated/server.js"; 11 | import { paginator } from "convex-helpers/server/pagination"; 12 | import schema from "../schema.js"; 13 | import { createLogger, makeConsole } from "../logger.js"; 14 | 15 | interface StorageColumn { 16 | type: "text" | "timestamp" | "uuid" | "jsonb" | "integer" | "bigint"; 17 | primaryKey?: boolean; 18 | nullable?: boolean; 19 | references?: { 20 | table: string; 21 | column: string; 22 | }; 23 | } 24 | 25 | export function validateTableSchema( 26 | tableName: TableNames, 27 | tableSchema: Record 28 | ) { 29 | if (!tables[tableName]) { 30 | throw new Error(`Table ${tableName} not found in schema`); 31 | } 32 | const table = tables[tableName]; 33 | const fields = table.validator.fields; 34 | for (const [name, field] of Object.entries(tableSchema)) { 35 | if (!(name in fields)) { 36 | throw new Error(`Field ${name} not found in schema for ${tableName}`); 37 | } 38 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 39 | let convexValue: Validator["kind"]; 40 | switch (field.type) { 41 | case "text": 42 | convexValue = "string"; 43 | break; 44 | case "integer": 45 | convexValue = "int64"; 46 | break; 47 | case "bigint": 48 | convexValue = "int64"; 49 | break; 50 | case "timestamp": 51 | convexValue = "int64"; 52 | break; 53 | case "jsonb": 54 | convexValue = "any"; 55 | break; 56 | case "uuid": 57 | convexValue = "string"; 58 | break; 59 | } 60 | if (!convexValue) { 61 | throw new Error( 62 | `Unexpected field type ${field.type} for ${name} in ${tableName}` 63 | ); 64 | } 65 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 66 | const expected = fields[name as keyof typeof fields] as Validator; 67 | if (expected.type !== convexValue) { 68 | throw new Error( 69 | `Field ${name} in table ${tableName} was expected to be a ${convexValue} but got ${expected.type}` 70 | ); 71 | } 72 | if (expected.isOptional === "required" && field.nullable) { 73 | throw new Error( 74 | `Field ${name} in table ${tableName} was expected to be required but the schema specified nullable` 75 | ); 76 | } 77 | } 78 | } 79 | 80 | export const insert = mutation({ 81 | args: { 82 | tableName: v.string(), 83 | document: v.any(), 84 | }, 85 | handler: async (ctx, args) => { 86 | const console = await makeConsole(ctx); 87 | console.debug(`Inserting ${args.tableName}`, args.document); 88 | // TODO: split out into inserts per usecase and enforce unique constraints 89 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 90 | await ctx.db.insert(args.tableName as any, args.document); 91 | }, 92 | returns: v.null(), 93 | }); 94 | 95 | export const batchInsert = mutation({ 96 | args: { 97 | tableName: v.string(), 98 | records: v.array(v.any()), 99 | }, 100 | handler: async (ctx, args) => { 101 | const console = await makeConsole(ctx); 102 | console.debug(`Batch inserting ${args.tableName}`, args.records); 103 | await Promise.all( 104 | args.records.map(async (record) => { 105 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 106 | await ctx.db.insert(args.tableName as any, record); 107 | }) 108 | ); 109 | }, 110 | returns: v.null(), 111 | }); 112 | 113 | export const loadSnapshot = query({ 114 | args: { 115 | runId: v.string(), 116 | workflowName: v.string(), 117 | }, 118 | handler: async (ctx, args) => { 119 | const console = await makeConsole(ctx); 120 | console.debug( 121 | `Loading snapshot for ${args.runId} and ${args.workflowName}` 122 | ); 123 | const snapshot = await ctx.db 124 | .query("snapshots") 125 | .withIndex("runId", (q) => 126 | q.eq("runId", args.runId).eq("workflowName", args.workflowName) 127 | ) 128 | .order("desc") 129 | .first(); 130 | if (!snapshot) { 131 | return null; 132 | } 133 | const { _id, _creationTime, ...rest } = snapshot; 134 | return rest; 135 | }, 136 | returns: v.union(tables.snapshots.validator, v.null()), 137 | }); 138 | 139 | export const load = query({ 140 | args: { 141 | tableName: v.string(), 142 | keys: v.any(), 143 | }, 144 | handler: async (ctx, args) => { 145 | const console = await makeConsole(ctx); 146 | console.debug(`Loading ${args.tableName}`, args.keys); 147 | if (args) 148 | throw new Error( 149 | `Not implemented: load for ${args.tableName}: ${JSON.stringify(args.keys)}` 150 | ); 151 | }, 152 | returns: v.union(v.any(), v.null()), 153 | }); 154 | 155 | export const clearTable = action({ 156 | args: { tableName: v.string() }, 157 | handler: async (ctx, args) => { 158 | const logLevel = await ctx.runQuery(internal.logger.getLogLevel); 159 | const console = createLogger(logLevel); 160 | console.debug(`Clearing ${args.tableName}`); 161 | let cursor: string | null = null; 162 | while (true) { 163 | cursor = await ctx.scheduler.runAfter( 164 | 0, 165 | internal.storage.storage.clearPage, 166 | { 167 | tableName: args.tableName, 168 | cursor, 169 | } 170 | ); 171 | if (!cursor) { 172 | break; 173 | } 174 | } 175 | console.debug(`Cleared ${args.tableName}`); 176 | }, 177 | returns: v.null(), 178 | }); 179 | 180 | export const clearPage = internalMutation({ 181 | args: { tableName: v.string(), cursor: v.union(v.string(), v.null()) }, 182 | handler: async (ctx, args): Promise => { 183 | const console = await makeConsole(ctx); 184 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 185 | const page = await ctx.db.query(args.tableName as any).paginate({ 186 | numItems: 1000, 187 | cursor: args.cursor ?? null, 188 | }); 189 | await Promise.all( 190 | page.page.map(async (item) => { 191 | await ctx.db.delete(item._id); 192 | }) 193 | ); 194 | console.debug(`Deleted ${page.page.length} items from ${args.tableName}`); 195 | if (!page.isDone) { 196 | return page.continueCursor; 197 | } 198 | return null; 199 | }, 200 | returns: v.union(v.string(), v.null()), 201 | }); 202 | 203 | export const getEvalsByAgentName = query({ 204 | args: { 205 | agentName: v.string(), 206 | type: v.optional(v.union(v.literal("test"), v.literal("live"))), 207 | }, 208 | handler: async (ctx, args) => { 209 | const console = await makeConsole(ctx); 210 | console.debug(`Getting evals by name ${args.agentName}, type ${args.type}`); 211 | const evals = await ctx.db 212 | .query("evals") 213 | .withIndex("agentName", (q) => { 214 | const byAgent = q.eq("agentName", args.agentName); 215 | if (args.type === "test") { 216 | return byAgent.gt("testInfo.testPath", null); 217 | } else if (args.type === "live") { 218 | return byAgent.lte("testInfo.testPath", null); 219 | } 220 | return byAgent; 221 | }) 222 | .collect(); 223 | return evals.map((e) => { 224 | const { _id, _creationTime, ...serialized } = e; 225 | return serialized; 226 | }); 227 | }, 228 | returns: v.array(tables.evals.validator), 229 | }); 230 | 231 | const MAX_TRACES_SCANNED = 4096; 232 | export const getTracesPage = query({ 233 | args: { 234 | name: v.optional(v.string()), 235 | scope: v.optional(v.string()), 236 | cursor: v.union(v.string(), v.null()), 237 | numItems: v.number(), 238 | attributes: v.optional(v.record(v.string(), v.string())), 239 | }, 240 | handler: async (ctx, args) => { 241 | const console = await makeConsole(ctx); 242 | console.debug( 243 | `Getting traces page with name ${args.name}, scope ${args.scope}, cursor ${args.cursor}, numItems ${args.numItems}, attributes ${args.attributes}` 244 | ); 245 | const { scope, name, cursor, numItems, attributes } = args; 246 | const overfetch = (scope ? 1 : 8) * (name ? 1 : 8); 247 | const traces = paginator(ctx.db, schema).query("traces"); 248 | const results = await ( 249 | scope 250 | ? traces.withIndex("scope", (q) => q.eq("scope", scope)) 251 | : name 252 | ? traces.withIndex("name", (q) => 253 | q.gte("name", name).lt("name", name + "~") 254 | ) 255 | : traces 256 | ).paginate({ 257 | numItems: Math.min(numItems * overfetch, MAX_TRACES_SCANNED), 258 | cursor: cursor, 259 | }); 260 | console.debug(`Got ${results.page.length} traces`); 261 | return { 262 | isDone: results.isDone, 263 | continuCursor: results.continueCursor, 264 | page: results.page 265 | .filter( 266 | (trace) => 267 | (!name || trace.name.startsWith(name)) && 268 | (!scope || trace.scope === scope) && 269 | (!attributes || 270 | Object.entries(attributes).every( 271 | ([key, value]) => trace[key as keyof typeof trace] === value 272 | )) 273 | ) 274 | .map((t) => { 275 | const { _id, _creationTime, ...serialized } = t; 276 | return serialized; 277 | }), 278 | }; 279 | }, 280 | returns: v.object({ 281 | isDone: v.boolean(), 282 | continuCursor: v.string(), 283 | page: v.array(tables.traces.validator), 284 | }), 285 | }); 286 | 287 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 288 | const console = "THIS IS A REMINDER TO USE makeConsole"; 289 | -------------------------------------------------------------------------------- /src/component/storage/tables.ts: -------------------------------------------------------------------------------- 1 | import { defineTable } from "convex/server"; 2 | import { v } from "convex/values"; 3 | import { vAssistantContent, vToolContent, vUserContent } from "../../ai/types"; 4 | 5 | const storageSchema = { 6 | // messages.ts 7 | messages: defineTable({ 8 | id: v.string(), // TODO: can we juse the _id? 9 | threadId: v.string(), // TODO: can we use v.id("threads")? 10 | threadOrder: v.number(), 11 | content: v.union(vUserContent, vAssistantContent, vToolContent), 12 | role: v.union( 13 | v.literal("system"), 14 | v.literal("user"), 15 | v.literal("assistant"), 16 | v.literal("tool") 17 | ), 18 | type: v.union( 19 | v.literal("text"), 20 | v.literal("tool-call"), 21 | v.literal("tool-result") 22 | ), 23 | createdAt: v.number(), 24 | }) 25 | .index("id", ["id"]) 26 | .index("threadId", ["threadId", "threadOrder"]), 27 | threads: defineTable({ 28 | id: v.string(), // TODO: can we juse the _id? 29 | resourceId: v.string(), 30 | title: v.optional(v.string()), 31 | metadata: v.optional(v.record(v.string(), v.any())), 32 | createdAt: v.number(), 33 | updatedAt: v.number(), 34 | }) 35 | .index("id", ["id"]) 36 | .index("resourceId", ["resourceId"]), 37 | 38 | // index.ts 39 | snapshots: defineTable({ 40 | workflowName: v.string(), 41 | runId: v.string(), 42 | snapshot: v.string(), // JSON for now, later: 43 | createdAt: v.number(), 44 | updatedAt: v.number(), 45 | }).index("runId", ["runId", "workflowName"]), 46 | evals: defineTable({ 47 | input: v.string(), 48 | output: v.string(), 49 | result: v.any(), 50 | agentName: v.string(), 51 | metricName: v.string(), 52 | instructions: v.string(), 53 | testInfo: v.optional(v.any()), 54 | globalRunId: v.string(), 55 | runId: v.string(), 56 | createdAt: v.number(), 57 | }).index("agentName", ["agentName", "testInfo.testPath"]), 58 | traces: defineTable({ 59 | id: v.string(), // TODO: can we juse the _id? 60 | parentSpanId: v.optional(v.union(v.string(), v.null())), 61 | name: v.string(), 62 | traceId: v.string(), 63 | scope: v.string(), 64 | kind: v.union(v.number(), v.int64()), 65 | attributes: v.optional(v.any()), 66 | status: v.optional(v.any()), 67 | events: v.optional(v.any()), 68 | links: v.optional(v.any()), 69 | other: v.optional(v.string()), 70 | startTime: v.int64(), 71 | endTime: v.int64(), 72 | createdAt: v.number(), 73 | }) 74 | .index("scope", ["scope"]) 75 | .index("name", ["name"]), 76 | }; 77 | 78 | export type TableNames = keyof typeof storageSchema; 79 | 80 | export default storageSchema; 81 | -------------------------------------------------------------------------------- /src/component/vector/tables.ts: -------------------------------------------------------------------------------- 1 | import { literals } from "convex-helpers/validators"; 2 | import { 3 | defineTable, 4 | GenericTableSearchIndexes, 5 | TableDefinition, 6 | } from "convex/server"; 7 | import { GenericId, ObjectType, v, VId, VObject, VUnion } from "convex/values"; 8 | 9 | const embeddings = { 10 | id: v.optional(v.string()), 11 | indexName: v.string(), 12 | vector: v.array(v.number()), 13 | metadata: v.optional(v.record(v.string(), v.any())), 14 | }; 15 | 16 | function table(dimensions: D): Table { 17 | return defineTable(embeddings) 18 | .vectorIndex("vector", { 19 | vectorField: "vector", 20 | dimensions, 21 | filterFields: ["indexName"], // TODO: More fields 22 | }) 23 | .index("id", ["id"]); 24 | } 25 | 26 | export const SUPPORTED_DIMENSIONS = [ 27 | 128, 256, 512, 768, 1024, 1536, 2048, 3072, 4096, 28 | ] as const; 29 | export type SupportedDimension = (typeof SUPPORTED_DIMENSIONS)[number]; 30 | export const SUPPORTED_TABLE_NAMES = SUPPORTED_DIMENSIONS.map( 31 | (d) => `embeddings_${d}` 32 | ) as `embeddings_${(typeof SUPPORTED_DIMENSIONS)[number]}`[]; 33 | export type SupportedTableName = (typeof SUPPORTED_TABLE_NAMES)[number]; 34 | export const SUPPORTED_TABLE_ID = v.union( 35 | ...SUPPORTED_TABLE_NAMES.map((name) => v.id(name)) 36 | ) as VUnion< 37 | GenericId<(typeof SUPPORTED_TABLE_NAMES)[number]>, 38 | VId<(typeof SUPPORTED_TABLE_NAMES)[number]>[] 39 | >; 40 | 41 | export const vSupportedDimension = literals(...SUPPORTED_DIMENSIONS); 42 | export const vSupportedTableName = literals(...SUPPORTED_TABLE_NAMES); 43 | export const vSupportedId = SUPPORTED_TABLE_ID; 44 | 45 | type Table = TableDefinition< 46 | VObject, typeof embeddings>, 47 | { id: ["id"] }, 48 | GenericTableSearchIndexes, 49 | VectorIndex 50 | >; 51 | 52 | type VectorIndex = { 53 | vector: { 54 | vectorField: "vector"; 55 | dimensions: D; 56 | filterFields: string; 57 | }; 58 | }; 59 | 60 | const tables: { 61 | [K in keyof typeof SUPPORTED_DIMENSIONS & 62 | number as `embeddings_${(typeof SUPPORTED_DIMENSIONS)[K]}`]: Table< 63 | (typeof SUPPORTED_DIMENSIONS)[K] 64 | >; 65 | } = Object.fromEntries( 66 | SUPPORTED_DIMENSIONS.map((dimensions) => [ 67 | `embeddings_${dimensions}`, 68 | table(dimensions), 69 | ]) 70 | ) as Record< 71 | `embeddings_${(typeof SUPPORTED_DIMENSIONS)[number]}`, 72 | Table<(typeof SUPPORTED_DIMENSIONS)[number]> 73 | >; 74 | 75 | // Hack to get vector indexes of arbitrary* dimensions 76 | export default { 77 | ...tables, 78 | indexTableMap: defineTable({ 79 | indexName: v.string(), 80 | tableName: vSupportedTableName, 81 | dimensions: vSupportedDimension, 82 | }).index("indexName", ["indexName"]), 83 | // documents: defineTable({ 84 | // id: v.string(), 85 | // content: v.string(), 86 | // }).index("id", ["id"]), 87 | }; 88 | -------------------------------------------------------------------------------- /src/component/vector/vector.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Implement the API based on ../storage/storage.ts, using and editing the tables in ./tables.ts 3 | * and providing an API that matches this abstract class, but not importing any Mastra* classes or from @mastra/core or @mastra/core/mastra or @mastra/core/vector 4 | */ 5 | 6 | import { v } from "convex/values"; 7 | import { 8 | action, 9 | query, 10 | mutation, 11 | internalQuery, 12 | internalMutation, 13 | QueryCtx, 14 | } from "../_generated/server"; 15 | import { 16 | SUPPORTED_DIMENSIONS, 17 | SupportedDimension, 18 | vSupportedDimension, 19 | vSupportedId, 20 | vSupportedTableName, 21 | } from "./tables"; 22 | import { internal } from "../_generated/api"; 23 | import { paginator } from "convex-helpers/server/pagination"; 24 | import schema from "../schema"; 25 | 26 | export const createIndex = mutation({ 27 | args: { indexName: v.string(), dimensions: vSupportedDimension }, 28 | handler: async (ctx, { indexName, dimensions }) => { 29 | // For now only validate that it maches one of the supported sizes 30 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) { 31 | throw new Error(`Unsupported index size: ${indexName}`); 32 | } 33 | const existing = await ctx.db 34 | .query("indexTableMap") 35 | .withIndex("indexName", (q) => q.eq("indexName", indexName)) 36 | .first(); 37 | if (existing) { 38 | if (existing.dimensions !== dimensions) { 39 | throw new Error("Index already exists with different dimensions"); 40 | } 41 | console.warn(`Index ${indexName} already exists, not creating...`); 42 | return; 43 | } 44 | console.log(`Creating index ${indexName} with dimensions ${dimensions}`); 45 | await ctx.db.insert("indexTableMap", { 46 | indexName, 47 | tableName: `embeddings_${dimensions}`, 48 | dimensions: dimensions, 49 | }); 50 | }, 51 | returns: v.null(), 52 | }); 53 | 54 | function getIndexMetadata(ctx: QueryCtx, name: string) { 55 | return ctx.db 56 | .query("indexTableMap") 57 | .withIndex("indexName", (q) => q.eq("indexName", name)) 58 | .order("desc") 59 | .first(); 60 | } 61 | 62 | export const getIndexMetadataQuery = internalQuery({ 63 | args: { indexName: v.string() }, 64 | handler: async (ctx, args) => { 65 | return await getIndexMetadata(ctx, args.indexName); 66 | }, 67 | }); 68 | 69 | export const upsert = mutation({ 70 | args: { 71 | indexName: v.string(), 72 | vectors: v.array(v.array(v.number())), 73 | metadata: v.optional(v.array(v.record(v.string(), v.any()))), 74 | ids: v.optional(v.array(v.string())), 75 | }, 76 | returns: v.array(v.string()), 77 | handler: async ( 78 | ctx, 79 | { indexName, vectors, metadata, ids } 80 | ): Promise => { 81 | const index = await ctx.runQuery( 82 | internal.vector.vector.getIndexMetadataQuery, 83 | { 84 | indexName, 85 | } 86 | ); 87 | if (!index) { 88 | throw new Error("Index not found"); 89 | } 90 | const dimensions = index.dimensions; 91 | if (!vectors.every((v) => v.length === dimensions)) { 92 | throw new Error(`All vectors must have ${dimensions} dimensions`); 93 | } 94 | if (metadata && vectors.length !== metadata.length) { 95 | throw new Error("vectors and metadata must have same length"); 96 | } 97 | if (ids && vectors.length !== ids.length) { 98 | throw new Error("vectors and ids must have same length"); 99 | } 100 | 101 | // Batch insert all vectors 102 | return await Promise.all( 103 | vectors.map(async (vector, i) => { 104 | const id = ids?.[i]; 105 | if (id) { 106 | const convexId = ctx.db.normalizeId(index.tableName, id); 107 | const existing = convexId 108 | ? await ctx.db.get(convexId) 109 | : await ctx.db 110 | .query(index.tableName) 111 | .withIndex("id", (q) => q.eq("id", id)) 112 | .first(); 113 | if (existing) { 114 | await ctx.db.patch(existing._id, { 115 | vector, 116 | metadata: metadata?.[i], 117 | }); 118 | return existing.id ?? existing._id; 119 | } 120 | } 121 | const newId = await ctx.db.insert(index.tableName, { 122 | id, 123 | vector, 124 | metadata: metadata?.[i], 125 | indexName, 126 | }); 127 | if (!id) { 128 | await ctx.db.patch(newId, { 129 | id: newId, 130 | }); 131 | } 132 | return id ?? newId; 133 | }) 134 | ); 135 | }, 136 | }); 137 | 138 | const vSearchResult = v.object({ 139 | id: v.string(), 140 | score: v.number(), 141 | metadata: v.optional(v.record(v.string(), v.any())), 142 | vector: v.optional(v.array(v.number())), 143 | }); 144 | 145 | export const search = action({ 146 | args: { 147 | indexName: v.string(), 148 | queryVector: v.array(v.number()), 149 | topK: v.number(), 150 | filter: v.optional(v.record(v.string(), v.any())), 151 | includeVector: v.optional(v.boolean()), 152 | }, 153 | handler: async ( 154 | ctx, 155 | { indexName, queryVector, topK, filter, includeVector } 156 | ): Promise => { 157 | const index = await ctx.runQuery( 158 | internal.vector.vector.getIndexMetadataQuery, 159 | { 160 | indexName, 161 | } 162 | ); 163 | if (!index) { 164 | throw new Error("Index not found"); 165 | } 166 | const dimensions = index.dimensions; 167 | if (queryVector.length !== dimensions) { 168 | throw new Error(`Query vector must have ${dimensions} dimensions`); 169 | } 170 | 171 | const results = await ctx.vectorSearch(index.tableName, "vector", { 172 | vector: queryVector, 173 | limit: Math.max(topK * 2 * (1 + Object.keys(filter ?? {}).length), 256), 174 | filter: filter 175 | ? (q) => { 176 | return q.eq("indexName", index.indexName); 177 | } 178 | : undefined, 179 | }); 180 | 181 | const entries = await ctx.runQuery(internal.vector.vector.lookupResults, { 182 | ids: results.map((r) => r._id), 183 | scores: results.map((r) => r._score), 184 | includeVector: includeVector ?? false, 185 | }); 186 | 187 | const filtered = entries.filter((r) => { 188 | if (filter) { 189 | return Object.entries(filter).every(([key, value]) => { 190 | return r.metadata?.[key] === value; 191 | }); 192 | } 193 | return true; 194 | }); 195 | 196 | return filtered; 197 | }, 198 | returns: v.array(vSearchResult), 199 | }); 200 | 201 | type SearchResult = { 202 | id: string; 203 | score: number; 204 | metadata?: Record; 205 | vector?: number[]; 206 | }; 207 | 208 | export const lookupResults = internalQuery({ 209 | args: { 210 | ids: v.array(vSupportedId), 211 | scores: v.array(v.number()), 212 | includeVector: v.boolean(), 213 | }, 214 | handler: async (ctx, args): Promise => { 215 | if (args.ids.length !== args.scores.length) { 216 | throw new Error("ids and scores must have same length"); 217 | } 218 | const results = await Promise.all(args.ids.map((id) => ctx.db.get(id))); 219 | return results.flatMap((r, i) => 220 | r 221 | ? [ 222 | { 223 | id: r._id, 224 | score: args.scores[i], 225 | metadata: r.metadata, 226 | vector: args.includeVector ? r.vector : undefined, 227 | }, 228 | ] 229 | : [] 230 | ); 231 | }, 232 | returns: v.array(vSearchResult), 233 | }); 234 | 235 | export const listIndexes = query({ 236 | args: {}, 237 | handler: async (ctx): Promise => { 238 | return (await ctx.db.query("indexTableMap").collect()).map( 239 | (i) => i.indexName 240 | ); 241 | }, 242 | returns: v.array(v.string()), 243 | }); 244 | 245 | export const describeIndex = query({ 246 | args: { indexName: v.string() }, 247 | handler: async (ctx, { indexName }) => { 248 | const index = await getIndexMetadata(ctx, indexName); 249 | if (!index) { 250 | throw new Error("Index not found"); 251 | } 252 | const dimensions = index.dimensions; 253 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) { 254 | throw new Error("Invalid index name"); 255 | } 256 | return { 257 | dimension: dimensions, 258 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 259 | count: await (ctx.db.query(index.tableName) as any).count(), 260 | metric: "cosine" as const, 261 | }; 262 | }, 263 | returns: v.object({ 264 | dimension: vSupportedDimension, 265 | count: v.number(), 266 | metric: v.literal("cosine"), 267 | }), 268 | }); 269 | 270 | export const deleteIndex = action({ 271 | args: { indexName: v.string() }, 272 | handler: async (ctx, { indexName }) => { 273 | const index = await ctx.runQuery( 274 | internal.vector.vector.getIndexMetadataQuery, 275 | { 276 | indexName, 277 | } 278 | ); 279 | if (!index) { 280 | console.warn(`Index ${indexName} not found, not deleting...`); 281 | return; 282 | } 283 | let cursor: string | null = null; 284 | while (true) { 285 | const results: PageResult = await ctx.runMutation( 286 | internal.vector.vector.deletePage, 287 | { 288 | indexName: index.tableName, 289 | cursor, 290 | } 291 | ); 292 | if (results.isDone) break; 293 | cursor = results.continueCursor; 294 | } 295 | }, 296 | returns: v.null(), 297 | }); 298 | 299 | type PageResult = { 300 | isDone: boolean; 301 | continueCursor: string; 302 | }; 303 | 304 | export const deletePage = internalMutation({ 305 | args: { 306 | indexName: vSupportedTableName, 307 | cursor: v.union(v.string(), v.null()), 308 | }, 309 | handler: async (ctx, { indexName, cursor }): Promise => { 310 | const dimensions = parseInt(indexName.split("_")[1]) as SupportedDimension; 311 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) { 312 | throw new Error("Invalid index name"); 313 | } 314 | const docs = await paginator(ctx.db, schema).query(indexName).paginate({ 315 | cursor, 316 | numItems: 1000, 317 | }); 318 | await Promise.all(docs.page.map((doc) => ctx.db.delete(doc._id))); 319 | return { 320 | isDone: docs.isDone, 321 | continueCursor: docs.continueCursor, 322 | }; 323 | }, 324 | returns: v.object({ 325 | isDone: v.boolean(), 326 | continueCursor: v.string(), 327 | }), 328 | }); 329 | -------------------------------------------------------------------------------- /src/mapping/index.test.ts: -------------------------------------------------------------------------------- 1 | import type { 2 | EvalRow, 3 | TABLE_NAMES as ORIGINAL_TABLE_NAMES, 4 | } from "@mastra/core/storage"; 5 | import { expect, test } from "vitest"; 6 | import { Content } from "../ai/types"; 7 | import { 8 | TABLE_NAMES as NEW_TABLE_NAMES, 9 | SerializedContent, 10 | TABLE_WORKFLOW_SNAPSHOT, 11 | TABLE_EVALS, 12 | TABLE_MESSAGES, 13 | TABLE_THREADS, 14 | TABLE_TRACES, 15 | mapMastraToSerialized, 16 | mapSerializedToMastra, 17 | serializeContent, 18 | deserializeContent, 19 | } from "./index.js"; 20 | import assert from "assert"; 21 | 22 | // Type compatibility tests 23 | const _tableNames: ORIGINAL_TABLE_NAMES = "" as NEW_TABLE_NAMES; 24 | const _tableNames2: NEW_TABLE_NAMES = "" as ORIGINAL_TABLE_NAMES; 25 | const _content: SerializedContent = [] as Content; 26 | const _content2: Content = [] as SerializedContent; 27 | 28 | test("table name mappings are bijective", () => { 29 | expect(TABLE_WORKFLOW_SNAPSHOT).toBe("mastra_workflow_snapshot"); 30 | expect(TABLE_EVALS).toBe("mastra_evals"); 31 | expect(TABLE_MESSAGES).toBe("mastra_messages"); 32 | expect(TABLE_THREADS).toBe("mastra_threads"); 33 | expect(TABLE_TRACES).toBe("mastra_traces"); 34 | }); 35 | 36 | test("workflow snapshot mapping", () => { 37 | const now = new Date(); 38 | const mastraRow = { 39 | workflow_name: "test_workflow", 40 | run_id: "run123", 41 | snapshot: { 42 | state: "RUNNING", 43 | value: { test: "test" }, 44 | context: { 45 | steps: {}, 46 | triggerData: {}, 47 | attempts: {}, 48 | }, 49 | activePaths: [], 50 | runId: "run123", 51 | timestamp: now.getTime(), 52 | }, 53 | created_at: now, 54 | updated_at: now, 55 | }; 56 | 57 | const serialized = mapMastraToSerialized(TABLE_WORKFLOW_SNAPSHOT, mastraRow); 58 | expect(serialized.workflowName).toBe(mastraRow.workflow_name); 59 | expect(serialized.runId).toBe(mastraRow.run_id); 60 | expect(serialized.snapshot).toBe(JSON.stringify(mastraRow.snapshot)); 61 | expect(serialized.createdAt).toBe(Number(now)); 62 | expect(serialized.updatedAt).toBe(Number(now)); 63 | 64 | const roundTripped = mapSerializedToMastra( 65 | TABLE_WORKFLOW_SNAPSHOT, 66 | serialized 67 | ); 68 | expect(roundTripped.workflow_name).toBe(mastraRow.workflow_name); 69 | expect(roundTripped.run_id).toBe(mastraRow.run_id); 70 | expect(roundTripped.snapshot).toEqual(mastraRow.snapshot); 71 | expect(roundTripped.created_at.getTime()).toBe(now.getTime()); 72 | expect(roundTripped.updated_at.getTime()).toBe(now.getTime()); 73 | }); 74 | 75 | test("eval row mapping", () => { 76 | const now = new Date(); 77 | const mastraRow: EvalRow = { 78 | input: "test input", 79 | output: "test output", 80 | result: { score: 1 }, 81 | agentName: "test_agent", 82 | metricName: "accuracy", 83 | instructions: "test instructions", 84 | testInfo: {}, 85 | globalRunId: "global123", 86 | runId: "run123", 87 | createdAt: now.toISOString(), 88 | }; 89 | 90 | const serialized = mapMastraToSerialized(TABLE_EVALS, mastraRow); 91 | expect(serialized.input).toBe(mastraRow.input); 92 | expect(serialized.output).toBe(mastraRow.output); 93 | expect(serialized.result).toBe(mastraRow.result); 94 | expect(serialized.createdAt).toBe(Number(now)); 95 | 96 | const roundTripped = mapSerializedToMastra(TABLE_EVALS, serialized); 97 | expect(roundTripped.input).toBe(mastraRow.input); 98 | expect(roundTripped.output).toBe(mastraRow.output); 99 | expect(roundTripped.result).toBe(mastraRow.result); 100 | expect(roundTripped.createdAt).toBe(mastraRow.createdAt); 101 | }); 102 | 103 | test("message mapping", () => { 104 | const now = new Date(); 105 | const mastraRow = { 106 | id: "msg123", 107 | threadId: "thread123", 108 | content: "test message", 109 | role: "user" as const, 110 | type: "text" as const, 111 | createdAt: now, 112 | }; 113 | 114 | const serialized = mapMastraToSerialized(TABLE_MESSAGES, mastraRow); 115 | expect(serialized.id).toBe(mastraRow.id); 116 | expect(serialized.threadId).toBe(mastraRow.threadId); 117 | expect(serialized.content).toBe(mastraRow.content); 118 | expect(serialized.role).toBe(mastraRow.role); 119 | expect(serialized.type).toBe(mastraRow.type); 120 | expect(serialized.createdAt).toBe(Number(now)); 121 | 122 | const roundTripped = mapSerializedToMastra(TABLE_MESSAGES, serialized); 123 | expect(roundTripped.id).toBe(mastraRow.id); 124 | expect(roundTripped.threadId).toBe(mastraRow.threadId); 125 | expect(roundTripped.content).toBe(mastraRow.content); 126 | expect(roundTripped.role).toBe(mastraRow.role); 127 | expect(roundTripped.type).toBe(mastraRow.type); 128 | expect(roundTripped.createdAt.getTime()).toBe(now.getTime()); 129 | }); 130 | 131 | test("thread mapping", () => { 132 | const now = new Date(); 133 | const mastraRow = { 134 | id: "thread123", 135 | title: "Test Thread", 136 | metadata: { key: "value" }, 137 | resourceId: "resource123", 138 | createdAt: now, 139 | updatedAt: now, 140 | }; 141 | 142 | const serialized = mapMastraToSerialized(TABLE_THREADS, mastraRow); 143 | expect(serialized.id).toBe(mastraRow.id); 144 | expect(serialized.title).toBe(mastraRow.title); 145 | expect(serialized.metadata).toEqual(mastraRow.metadata); 146 | expect(serialized.resourceId).toBe(mastraRow.resourceId); 147 | expect(serialized.createdAt).toBe(Number(now)); 148 | expect(serialized.updatedAt).toBe(Number(now)); 149 | 150 | const roundTripped = mapSerializedToMastra(TABLE_THREADS, serialized); 151 | expect(roundTripped.id).toBe(mastraRow.id); 152 | expect(roundTripped.title).toBe(mastraRow.title); 153 | expect(roundTripped.metadata).toEqual(mastraRow.metadata); 154 | expect(roundTripped.resourceId).toBe(mastraRow.resourceId); 155 | expect(roundTripped.createdAt.getTime()).toBe(now.getTime()); 156 | expect(roundTripped.updatedAt.getTime()).toBe(now.getTime()); 157 | }); 158 | 159 | test("trace mapping", () => { 160 | const now = new Date(); 161 | const mastraRow = { 162 | id: "trace123", 163 | parentSpanId: "parent123", 164 | traceId: "trace123", 165 | name: "test_trace", 166 | scope: "test", 167 | kind: 1n, 168 | startTime: 1000n, 169 | endTime: 2000n, 170 | createdAt: now, 171 | }; 172 | 173 | const serialized = mapMastraToSerialized(TABLE_TRACES, mastraRow); 174 | expect(serialized.id).toBe(mastraRow.id); 175 | expect(serialized.parentSpanId).toBe(mastraRow.parentSpanId); 176 | expect(serialized.traceId).toBe(mastraRow.traceId); 177 | expect(serialized.name).toBe(mastraRow.name); 178 | expect(serialized.scope).toBe(mastraRow.scope); 179 | expect(serialized.kind).toBe(mastraRow.kind); 180 | expect(serialized.startTime).toBe(mastraRow.startTime); 181 | expect(serialized.endTime).toBe(mastraRow.endTime); 182 | expect(serialized.createdAt).toBe(Number(now)); 183 | 184 | const roundTripped = mapSerializedToMastra(TABLE_TRACES, serialized); 185 | expect(roundTripped.id).toBe(mastraRow.id); 186 | expect(roundTripped.parentSpanId).toBe(mastraRow.parentSpanId); 187 | expect(roundTripped.traceId).toBe(mastraRow.traceId); 188 | expect(roundTripped.name).toBe(mastraRow.name); 189 | expect(roundTripped.scope).toBe(mastraRow.scope); 190 | expect(roundTripped.kind).toBe(mastraRow.kind); 191 | expect(roundTripped.startTime).toBe(mastraRow.startTime); 192 | expect(roundTripped.endTime).toBe(mastraRow.endTime); 193 | }); 194 | 195 | test("content serialization with URLs", () => { 196 | const url = new URL("https://example.com/image.jpg"); 197 | const content = [ 198 | { type: "image" as const, image: url }, 199 | { type: "text" as const, text: "test" }, 200 | ]; 201 | 202 | const serialized = serializeContent(content); 203 | assert(serialized[0] instanceof Object); 204 | assert(serialized[0].type === "image"); 205 | expect(serialized[0].image).toBe(url.toString()); 206 | expect(serialized[1]).toEqual(content[1]); 207 | 208 | const deserialized = deserializeContent(serialized); 209 | assert(deserialized[0] instanceof Object); 210 | assert(deserialized[0].type === "image"); 211 | expect(deserialized[0].image).toBeInstanceOf(URL); 212 | expect((deserialized[0].image as URL).toString()).toBe(url.toString()); 213 | expect(deserialized[1]).toEqual(content[1]); 214 | }); 215 | 216 | test("content serialization with ArrayBuffer", () => { 217 | const buffer = new ArrayBuffer(8); 218 | const content = [ 219 | { 220 | type: "file" as const, 221 | data: buffer, 222 | mimeType: "application/octet-stream", 223 | }, 224 | { type: "text" as const, text: "test" }, 225 | ]; 226 | 227 | const serialized = serializeContent(content); 228 | assert(serialized[0] instanceof Object); 229 | assert(serialized[0].type === "file"); 230 | expect(serialized[0].data).toBeInstanceOf(ArrayBuffer); 231 | expect(serialized[1]).toEqual(content[1]); 232 | 233 | const deserialized = deserializeContent(serialized); 234 | assert(deserialized[0] instanceof Object); 235 | assert(deserialized[0].type === "file"); 236 | expect(deserialized[0].data).toBeInstanceOf(ArrayBuffer); 237 | expect(deserialized[1]).toEqual(content[1]); 238 | }); 239 | 240 | test("invalid table name throws error", () => { 241 | expect(() => 242 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 243 | mapMastraToSerialized("invalid_table" as any, {}) 244 | ).toThrow("Unsupported table name: invalid_table"); 245 | 246 | expect(() => 247 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 248 | mapSerializedToMastra("invalid_table" as any, {}) 249 | ).toThrow("Unsupported table name: invalid_table"); 250 | }); 251 | -------------------------------------------------------------------------------- /src/mapping/index.ts: -------------------------------------------------------------------------------- 1 | import type { 2 | EvalRow, 3 | MessageType, 4 | StorageThreadType, 5 | WorkflowRow, 6 | } from "@mastra/core"; 7 | import type { 8 | AssistantContent, 9 | DataContent, 10 | ToolContent, 11 | UserContent, 12 | } from "ai"; 13 | import { v } from "convex/values"; 14 | import { SerializeUrlsAndUint8Arrays, vContent } from "../ai/types"; 15 | 16 | export const TABLE_WORKFLOW_SNAPSHOT = "mastra_workflow_snapshot"; 17 | export const TABLE_EVALS = "mastra_evals"; 18 | export const TABLE_MESSAGES = "mastra_messages"; 19 | export const TABLE_THREADS = "mastra_threads"; 20 | export const TABLE_TRACES = "mastra_traces"; 21 | export type TABLE_NAMES = 22 | | typeof TABLE_WORKFLOW_SNAPSHOT 23 | | typeof TABLE_EVALS 24 | | typeof TABLE_MESSAGES 25 | | typeof TABLE_THREADS 26 | | typeof TABLE_TRACES; 27 | 28 | // Define the runtime constants first 29 | export const mastraToConvexTableNames = { 30 | [TABLE_WORKFLOW_SNAPSHOT]: "snapshots", 31 | [TABLE_EVALS]: "evals", 32 | [TABLE_MESSAGES]: "messages", 33 | [TABLE_THREADS]: "threads", 34 | [TABLE_TRACES]: "traces", 35 | } as const; 36 | 37 | export const convexToMastraTableNames = { 38 | snapshots: TABLE_WORKFLOW_SNAPSHOT, 39 | evals: TABLE_EVALS, 40 | messages: TABLE_MESSAGES, 41 | threads: TABLE_THREADS, 42 | traces: TABLE_TRACES, 43 | } as const; 44 | 45 | // Then derive the types from the constants 46 | export type MastraToConvexTableMap = typeof mastraToConvexTableNames; 47 | export type ConvexToMastraTableMap = typeof convexToMastraTableNames; 48 | 49 | // Helper types to get table names 50 | export type ConvexTableName = MastraToConvexTableMap[T]; 51 | export type MastraTableName = 52 | ConvexToMastraTableMap[T]; 53 | 54 | // Type that maps Mastra table names to their row types 55 | export type MastraRowTypeMap = { 56 | [TABLE_WORKFLOW_SNAPSHOT]: WorkflowRow; 57 | [TABLE_EVALS]: EvalRow; 58 | [TABLE_MESSAGES]: MessageType; 59 | [TABLE_THREADS]: StorageThreadType; 60 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 61 | [TABLE_TRACES]: any; // Replace with proper type when available 62 | }; 63 | 64 | export type SerializedTimestamp = number; 65 | const vSerializedTimestamp = v.number(); 66 | 67 | export type SerializedSnapshot = Omit< 68 | WorkflowRow, 69 | "created_at" | "updated_at" | "snapshot" | "workflow_name" | "run_id" 70 | > & { 71 | createdAt: SerializedTimestamp; 72 | updatedAt: SerializedTimestamp; 73 | snapshot: string; 74 | workflowName: string; 75 | runId: string; 76 | }; 77 | 78 | export type SerializedEval = Omit & { 79 | createdAt: SerializedTimestamp; 80 | }; 81 | 82 | export type SerializedContent = SerializeUrlsAndUint8Arrays< 83 | MessageType["content"] 84 | >; 85 | 86 | export type SerializedMessage = Omit & { 87 | createdAt: SerializedTimestamp; 88 | content: SerializedContent; 89 | }; 90 | 91 | export const vSerializedMessage = v.object({ 92 | id: v.string(), 93 | threadId: v.string(), 94 | content: vContent, 95 | role: v.union( 96 | v.literal("system"), 97 | v.literal("user"), 98 | v.literal("assistant"), 99 | v.literal("tool") 100 | ), 101 | type: v.union( 102 | v.literal("text"), 103 | v.literal("tool-call"), 104 | v.literal("tool-result") 105 | ), 106 | createdAt: v.number(), 107 | }); 108 | 109 | export type SerializedThread = Omit< 110 | StorageThreadType, 111 | "createdAt" | "updatedAt" 112 | > & { 113 | createdAt: SerializedTimestamp; 114 | updatedAt: SerializedTimestamp; 115 | }; 116 | export const vSerializedThread = v.object({ 117 | id: v.string(), 118 | title: v.optional(v.string()), 119 | metadata: v.optional(v.record(v.string(), v.any())), 120 | resourceId: v.string(), 121 | createdAt: vSerializedTimestamp, 122 | updatedAt: vSerializedTimestamp, 123 | }); 124 | 125 | // Inferring from the table schema created in 126 | // @mastra/core:src/storage/base.ts 127 | /* eslint-disable @typescript-eslint/no-explicit-any */ 128 | export type SerializedTrace = { 129 | id: string; 130 | parentSpanId?: string | null; 131 | traceId: string; 132 | name: string; 133 | scope: string; 134 | kind: number | bigint; 135 | events?: any[]; 136 | links?: any[]; 137 | status?: any; 138 | attributes?: Record; 139 | startTime: bigint; 140 | endTime: bigint; 141 | other?: any; 142 | createdAt: SerializedTimestamp; 143 | }; 144 | /* eslint-enable @typescript-eslint/no-explicit-any */ 145 | 146 | // Type that maps Convex table names to their document types 147 | export type SerializedTypeMap = { 148 | [TABLE_WORKFLOW_SNAPSHOT]: SerializedSnapshot; 149 | [TABLE_EVALS]: SerializedEval; 150 | [TABLE_MESSAGES]: SerializedMessage; 151 | [TABLE_THREADS]: SerializedThread; 152 | [TABLE_TRACES]: SerializedTrace; 153 | }; 154 | 155 | function serializeDateOrNow(date: string | Date | number): number { 156 | if (!date) { 157 | return Date.now(); 158 | } 159 | if (typeof date === "number") { 160 | return date; 161 | } 162 | if (date instanceof Date) { 163 | return Number(date); 164 | } 165 | return Number(new Date(date)); 166 | } 167 | 168 | /** 169 | * Maps a Mastra row to a Convex document 170 | * @param tableName Mastra table name 171 | * @param mastraRow Row data from Mastra 172 | * @returns Properly typed Convex document 173 | */ 174 | export function mapMastraToSerialized( 175 | tableName: T, 176 | mastraRow: MastraRowTypeMap[T] 177 | ): SerializedTypeMap[T] { 178 | switch (tableName) { 179 | case TABLE_WORKFLOW_SNAPSHOT: { 180 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_WORKFLOW_SNAPSHOT]; 181 | const serialized: SerializedSnapshot = { 182 | workflowName: row.workflow_name, 183 | runId: row.run_id, 184 | snapshot: JSON.stringify(row.snapshot), 185 | updatedAt: serializeDateOrNow(row.updated_at), 186 | createdAt: serializeDateOrNow(row.created_at), 187 | }; 188 | return serialized as SerializedTypeMap[T]; 189 | } 190 | case TABLE_EVALS: { 191 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_EVALS]; 192 | const serialized: SerializedEval = { 193 | input: row.input, 194 | output: row.output, 195 | result: row.result, 196 | agentName: row.agentName, 197 | metricName: row.metricName, 198 | instructions: row.instructions, 199 | testInfo: row.testInfo, 200 | globalRunId: row.globalRunId, 201 | runId: row.runId, 202 | createdAt: serializeDateOrNow(row.createdAt), 203 | }; 204 | return serialized as SerializedTypeMap[T]; 205 | } 206 | case TABLE_MESSAGES: { 207 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_MESSAGES]; 208 | const serialized: SerializedMessage = { 209 | id: row.id, 210 | threadId: row.threadId, 211 | content: serializeContent(row.content), 212 | role: row.role, 213 | type: row.type, 214 | createdAt: serializeDateOrNow(row.createdAt), 215 | }; 216 | return serialized as SerializedTypeMap[T]; 217 | } 218 | case TABLE_THREADS: { 219 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_THREADS]; 220 | const serialized: SerializedThread = { 221 | id: row.id, 222 | title: row.title, 223 | metadata: row.metadata, 224 | resourceId: row.resourceId, 225 | createdAt: serializeDateOrNow(row.createdAt), 226 | updatedAt: serializeDateOrNow(row.updatedAt), 227 | }; 228 | return serialized as SerializedTypeMap[T]; 229 | } 230 | case TABLE_TRACES: { 231 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_TRACES]; 232 | const serialized: SerializedTrace = { 233 | id: row.id, 234 | parentSpanId: row.parentSpanId, 235 | name: row.name, 236 | traceId: row.traceId, 237 | scope: row.scope, 238 | kind: row.kind, 239 | attributes: row.attributes, 240 | status: row.status, 241 | events: row.events, 242 | links: row.links, 243 | other: row.other, 244 | startTime: row.startTime, 245 | endTime: row.endTime, 246 | createdAt: serializeDateOrNow(row.createdAt), 247 | }; 248 | return serialized as SerializedTypeMap[T]; 249 | } 250 | default: 251 | throw new Error(`Unsupported table name: ${tableName}`); 252 | } 253 | } 254 | 255 | export function serializeContent( 256 | content: UserContent | AssistantContent | ToolContent 257 | ): SerializedContent { 258 | if (typeof content === "string") { 259 | return content; 260 | } 261 | const serialized = content.map((part) => { 262 | switch (part.type) { 263 | case "image": 264 | return { ...part, image: serializeDataOrUrl(part.image) }; 265 | case "file": 266 | return { ...part, file: serializeDataOrUrl(part.data) }; 267 | default: 268 | return part; 269 | } 270 | }); 271 | return serialized as SerializedContent; 272 | } 273 | 274 | export function deserializeContent( 275 | content: SerializedContent 276 | ): UserContent | AssistantContent | ToolContent { 277 | if (typeof content === "string") { 278 | return content; 279 | } 280 | return content.map((part) => { 281 | switch (part.type) { 282 | case "image": 283 | return { ...part, image: deserializeUrl(part.image) }; 284 | case "file": 285 | return { ...part, file: deserializeUrl(part.data) }; 286 | default: 287 | return part; 288 | } 289 | }) as UserContent | AssistantContent | ToolContent; 290 | } 291 | function serializeDataOrUrl( 292 | dataOrUrl: DataContent | URL 293 | ): ArrayBuffer | string { 294 | if (typeof dataOrUrl === "string") { 295 | return dataOrUrl; 296 | } 297 | if (dataOrUrl instanceof ArrayBuffer) { 298 | return dataOrUrl; // Already an ArrayBuffer 299 | } 300 | if (dataOrUrl instanceof URL) { 301 | return dataOrUrl.toString(); 302 | } 303 | return dataOrUrl.buffer.slice( 304 | dataOrUrl.byteOffset, 305 | dataOrUrl.byteOffset + dataOrUrl.byteLength 306 | ) as ArrayBuffer; 307 | } 308 | 309 | function deserializeUrl(urlOrString: string | ArrayBuffer): URL | DataContent { 310 | if (typeof urlOrString === "string") { 311 | if ( 312 | urlOrString.startsWith("http://") || 313 | urlOrString.startsWith("https://") 314 | ) { 315 | return new URL(urlOrString); 316 | } 317 | return urlOrString; 318 | } 319 | return urlOrString; 320 | } 321 | 322 | /** 323 | * Maps a Convex document to a Mastra row 324 | * @param tableName Mastra table name 325 | * @param row Data with transfer-safe values 326 | * @returns Properly typed Mastra row 327 | */ 328 | export function mapSerializedToMastra( 329 | tableName: T, 330 | row: SerializedTypeMap[T] 331 | ): MastraRowTypeMap[T] { 332 | switch (tableName) { 333 | case TABLE_WORKFLOW_SNAPSHOT: { 334 | const serialized = 335 | row as SerializedTypeMap[typeof TABLE_WORKFLOW_SNAPSHOT]; 336 | const workflow: WorkflowRow = { 337 | workflow_name: serialized.workflowName, 338 | run_id: serialized.runId, 339 | snapshot: JSON.parse(serialized.snapshot), 340 | created_at: new Date(serialized.createdAt), 341 | updated_at: new Date(serialized.updatedAt), 342 | }; 343 | return workflow; 344 | } 345 | case TABLE_EVALS: { 346 | const serialized = row as SerializedTypeMap[typeof TABLE_EVALS]; 347 | const evalRow: EvalRow = { 348 | input: serialized.input, 349 | output: serialized.output, 350 | result: serialized.result, 351 | agentName: serialized.agentName, 352 | metricName: serialized.metricName, 353 | instructions: serialized.instructions, 354 | testInfo: serialized.testInfo, 355 | globalRunId: serialized.globalRunId, 356 | runId: serialized.runId, 357 | createdAt: new Date(serialized.createdAt).toISOString(), 358 | }; 359 | return evalRow as MastraRowTypeMap[T]; 360 | } 361 | case TABLE_MESSAGES: { 362 | const serialized = row as SerializedTypeMap[typeof TABLE_MESSAGES]; 363 | const messageRow: MessageType = { 364 | id: serialized.id, 365 | threadId: serialized.threadId, 366 | content: serialized.content, 367 | role: serialized.role, 368 | type: serialized.type, 369 | createdAt: new Date(serialized.createdAt), 370 | }; 371 | return messageRow as MastraRowTypeMap[T]; 372 | } 373 | case TABLE_THREADS: { 374 | const serialized = row as SerializedTypeMap[typeof TABLE_THREADS]; 375 | const threadRow: StorageThreadType = { 376 | id: serialized.id, 377 | title: serialized.title, 378 | metadata: serialized.metadata, 379 | resourceId: serialized.resourceId, 380 | createdAt: new Date(serialized.createdAt), 381 | updatedAt: new Date(serialized.updatedAt), 382 | }; 383 | return threadRow as MastraRowTypeMap[T]; 384 | } 385 | case TABLE_TRACES: { 386 | const traceDoc = row as SerializedTypeMap[typeof TABLE_TRACES]; 387 | return { 388 | id: traceDoc.id, 389 | parentSpanId: traceDoc.parentSpanId, 390 | name: traceDoc.name, 391 | traceId: traceDoc.traceId, 392 | scope: traceDoc.scope, 393 | kind: traceDoc.kind, 394 | attributes: traceDoc.attributes, 395 | status: traceDoc.status, 396 | events: traceDoc.events, 397 | links: traceDoc.links, 398 | other: traceDoc.other, 399 | startTime: traceDoc.startTime, 400 | endTime: traceDoc.endTime, 401 | } as MastraRowTypeMap[T]; 402 | } 403 | default: 404 | throw new Error(`Unsupported table name: ${tableName}`); 405 | } 406 | } 407 | -------------------------------------------------------------------------------- /src/react/index.ts: -------------------------------------------------------------------------------- 1 | // This is where React components go. 2 | if (typeof window === "undefined") { 3 | throw new Error("this is frontend code, but it's running somewhere else!"); 4 | } 5 | 6 | export function subtract(a: number, b: number): number { 7 | return a - b; 8 | } 9 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | export function assert(value: unknown, message?: string): asserts value { 2 | if (!value) { 3 | throw new Error(message); 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowJs": true, 4 | "checkJs": true, 5 | "strict": true, 6 | 7 | "target": "ESNext", 8 | "lib": ["ES2021", "dom"], 9 | "forceConsistentCasingInFileNames": true, 10 | "allowSyntheticDefaultImports": true, 11 | "module": "ESNext", 12 | "moduleResolution": "Bundler", 13 | 14 | "isolatedModules": true, 15 | "composite": false, 16 | "declaration": true, 17 | "declarationMap": true, 18 | "sourceMap": true, 19 | "outDir": "./dist", 20 | "skipLibCheck": true 21 | }, 22 | "include": ["./src/**/*"] 23 | } 24 | --------------------------------------------------------------------------------