├── example
├── src
│ ├── vite-env.d.ts
│ ├── App.tsx
│ ├── main.tsx
│ ├── mastra
│ │ ├── index.ts
│ │ ├── agents
│ │ │ └── index.ts
│ │ ├── tools
│ │ │ └── index.ts
│ │ └── workflows
│ │ │ └── index.ts
│ ├── App.css
│ └── index.css
├── convex
│ ├── _libsql_workaround.ts
│ ├── tsconfig.json
│ ├── schema.ts
│ ├── convex.config.ts
│ ├── _generated
│ │ ├── api.js
│ │ ├── api.d.ts
│ │ ├── dataModel.d.ts
│ │ ├── server.js
│ │ └── server.d.ts
│ ├── v8Runtime.ts
│ ├── README.md
│ └── example.ts
├── README.md
├── .cursor
│ └── mcp.json
├── vite.config.ts
├── index.html
└── tsconfig.json
├── .prettierrc.json
├── src
├── client
│ ├── index.ts
│ ├── types.ts
│ ├── vector.ts
│ ├── client.ts
│ ├── storage.ts
│ └── in-memory.ts
├── utils.ts
├── component
│ ├── convex.config.ts
│ ├── setup.test.ts
│ ├── schema.ts
│ ├── _generated
│ │ ├── api.ts
│ │ ├── dataModel.ts
│ │ ├── server.ts
│ │ └── component.ts
│ ├── debug.ts
│ ├── storage
│ │ ├── tables.ts
│ │ ├── storage.ts
│ │ └── messages.ts
│ ├── vector
│ │ ├── tables.ts
│ │ └── vector.ts
│ └── logger.ts
├── react
│ └── index.ts
├── test.ts
├── ai
│ ├── types.test.ts
│ └── types.ts
└── mapping
│ ├── index.test.ts
│ └── index.ts
├── CHANGELOG.md
├── vitest.config.js
├── tsconfig.build.json
├── convex.json
├── tsconfig.test.json
├── .gitignore
├── CONTRIBUTING.md
├── renovate.json
├── tsconfig.json
├── .github
└── workflows
│ └── test.yml
├── eslint.config.js
├── package.json
├── README.md
└── LICENSE
/example/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "trailingComma": "all",
3 | "proseWrap": "always"
4 | }
5 |
--------------------------------------------------------------------------------
/example/convex/_libsql_workaround.ts:
--------------------------------------------------------------------------------
1 | "use node";
2 | export * as _ from "@libsql/client";
3 |
--------------------------------------------------------------------------------
/example/convex/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "../tsconfig.json",
3 | "include": ["."],
4 | "exclude": ["_generated"]
5 | }
6 |
--------------------------------------------------------------------------------
/src/client/index.ts:
--------------------------------------------------------------------------------
1 | export { ConvexStorage, InMemoryStorage } from "./storage.js";
2 | export { ConvexVector, InMemoryVector } from "./vector.js";
3 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | export function assert(value: unknown, message?: string): asserts value {
2 | if (!value) {
3 | throw new Error(message);
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/src/component/convex.config.ts:
--------------------------------------------------------------------------------
1 | import { defineComponent } from "convex/server";
2 |
3 | const component = defineComponent("mastra");
4 |
5 | export default component;
6 |
--------------------------------------------------------------------------------
/example/README.md:
--------------------------------------------------------------------------------
1 | # Example app
2 |
3 | Components need an app that uses them in order to run codegen. An example app is also useful
4 | for testing and documentation.
5 |
6 |
--------------------------------------------------------------------------------
/example/convex/schema.ts:
--------------------------------------------------------------------------------
1 | import { defineSchema } from "convex/server";
2 |
3 | export default defineSchema({
4 | // Any tables used by the example app go here.
5 | });
6 |
--------------------------------------------------------------------------------
/example/.cursor/mcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "mastra": {
4 | "command": "npx",
5 | "args": ["-y", "@mastra/mcp-docs-server@latest"]
6 | }
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/src/component/setup.test.ts:
--------------------------------------------------------------------------------
1 | ///
2 | import { test } from "vitest";
3 | export const modules = import.meta.glob("./**/*.*s");
4 |
5 | test("setup", () => {});
6 |
--------------------------------------------------------------------------------
/example/convex/convex.config.ts:
--------------------------------------------------------------------------------
1 | import { defineApp } from "convex/server";
2 | import mastra from "@convex-dev/mastra/convex.config.js";
3 |
4 | const app = defineApp();
5 | app.use(mastra);
6 |
7 | export default app;
8 |
--------------------------------------------------------------------------------
/example/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "vite";
2 | import react from "@vitejs/plugin-react";
3 |
4 | // https://vitejs.dev/config/
5 | export default defineConfig({
6 | envDir: "../",
7 | plugins: [react()],
8 | });
9 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## 0.1.0 (unreleased)
4 |
5 | - Adds /test and /\_generated/component.js entrypoints
6 | - Drops commonjs support
7 | - Improves source mapping for generated files
8 | - Changes to a statically generated component API
9 |
--------------------------------------------------------------------------------
/vitest.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "vitest/config";
2 |
3 | export default defineConfig({
4 | test: {
5 | environment: "edge-runtime",
6 | typecheck: {
7 | tsconfig: "./tsconfig.test.json",
8 | },
9 | },
10 | });
11 |
--------------------------------------------------------------------------------
/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "include": ["src/**/*"],
4 | "exclude": ["src/**/*.test.*", "./src/test.ts"],
5 | "compilerOptions": {
6 | "module": "ESNext",
7 | "moduleResolution": "Bundler"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/src/react/index.ts:
--------------------------------------------------------------------------------
1 | // This is where React components go.
2 | if (typeof window === "undefined") {
3 | throw new Error("this is frontend code, but it's running somewhere else!");
4 | }
5 |
6 | export function subtract(a: number, b: number): number {
7 | return a - b;
8 | }
9 |
--------------------------------------------------------------------------------
/convex.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "./node_modules/convex/schemas/convex.schema.json",
3 | "functions": "example/convex",
4 | "node": {
5 | "externalPackages": [
6 | "@libsql/client"
7 | ]
8 | },
9 | "codegen": {
10 | "legacyComponentApi": false
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/tsconfig.test.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "include": [
4 | "src/**/*.ts",
5 | "src/**/*.tsx",
6 | "example/src/**/*.ts",
7 | "example/src/**/*.tsx",
8 | "example/convex/**/*.ts"
9 | ],
10 | "exclude": [
11 | "node_modules",
12 | "dist",
13 | "**/_generated"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/example/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Mastra Component Example
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/example/src/App.tsx:
--------------------------------------------------------------------------------
1 | import "./App.css";
2 |
3 | function App() {
4 | return (
5 | <>
6 | Convex Mastra Component Example
7 |
8 |
9 | See example/convex/example.ts for all the ways to use
10 | this component
11 |
12 |
13 | >
14 | );
15 | }
16 |
17 | export default App;
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .idea
3 | *.local
4 | *.log
5 | /.vscode/
6 | /docs/.vitepress/cache
7 | dist
8 | dist-ssr
9 | explorations
10 | node_modules
11 | .eslintcache
12 | */.mastra
13 |
14 | # this is a package-json-redirect stub dir, see https://github.com/andrewbranch/example-subpath-exports-ts-compat?tab=readme-ov-file
15 | react/package.json
16 | # npm pack output
17 | *.tgz
18 | *.tsbuildinfo
19 | .env.development
20 |
--------------------------------------------------------------------------------
/src/component/schema.ts:
--------------------------------------------------------------------------------
1 | import { defineSchema, defineTable } from "convex/server";
2 | import storageTables from "./storage/tables.js";
3 | import { v } from "convex/values";
4 | import { logLevel } from "./logger.js";
5 | import vectorTables from "./vector/tables.js";
6 |
7 | export default defineSchema({
8 | config: defineTable({
9 | config: v.object({ logLevel: logLevel }),
10 | }),
11 | ...storageTables,
12 | ...vectorTables,
13 | });
14 |
--------------------------------------------------------------------------------
/example/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "lib": ["DOM", "DOM.Iterable", "ESNext"],
5 | "skipLibCheck": true,
6 | "allowSyntheticDefaultImports": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "module": "ESNext",
10 | "moduleResolution": "Bundler",
11 | "resolveJsonModule": true,
12 | "jsx": "react-jsx",
13 | "noEmit": true
14 | },
15 | "include": ["./src", "vite.config.ts"]
16 | }
17 |
--------------------------------------------------------------------------------
/example/src/main.tsx:
--------------------------------------------------------------------------------
1 | import { StrictMode } from "react";
2 | import { createRoot } from "react-dom/client";
3 | import { ConvexProvider, ConvexReactClient } from "convex/react";
4 | import App from "./App.jsx";
5 | import "./index.css";
6 |
7 | const address = import.meta.env.VITE_CONVEX_URL;
8 |
9 | const convex = new ConvexReactClient(address);
10 |
11 | createRoot(document.getElementById("root")!).render(
12 |
13 |
14 |
15 |
16 | ,
17 | );
18 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Developing guide
2 |
3 | ## Running locally
4 |
5 | ```sh
6 | npm i
7 | npm run dev
8 | ```
9 |
10 | ## Testing
11 |
12 | ```sh
13 | npm run clean
14 | npm run build
15 | npm run typecheck
16 | npm run lint
17 | npm run test
18 | ```
19 |
20 | ## Deploying
21 |
22 | ### Building a one-off package
23 |
24 | ```sh
25 | npm run clean
26 | npm ci
27 | npm pack
28 | ```
29 |
30 | ### Deploying a new version
31 |
32 | ```sh
33 | npm run release
34 | ```
35 |
36 | or for alpha release:
37 |
38 | ```sh
39 | npm run alpha
40 | ```
41 |
--------------------------------------------------------------------------------
/example/src/mastra/index.ts:
--------------------------------------------------------------------------------
1 | import { Mastra } from "@mastra/core";
2 | import { createLogger } from "@mastra/core/logger";
3 |
4 | import { weatherAgent } from "./agents";
5 | import { weatherToOutfitWorkflow, whenTest } from "./workflows";
6 | // import { ConvexStorage } from "@convex-dev/mastra/registry";
7 |
8 | export const mastra = new Mastra({
9 | agents: { weatherAgent },
10 | workflows: { weatherToOutfitWorkflow, whenTest },
11 | logger: createLogger({
12 | name: "Mastra",
13 | level: "debug",
14 | }),
15 | // storage: new ConvexStorage(),
16 | });
17 |
--------------------------------------------------------------------------------
/example/convex/_generated/api.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated `api` utility.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import { anyApi, componentsGeneric } from "convex/server";
12 |
13 | /**
14 | * A utility for referencing Convex functions in your app's API.
15 | *
16 | * Usage:
17 | * ```js
18 | * const myFunctionReference = api.myModule.myFunction;
19 | * ```
20 | */
21 | export const api = anyApi;
22 | export const internal = anyApi;
23 | export const components = componentsGeneric();
24 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": ["config:best-practices"],
4 | "schedule": ["* 0-4 * * 1"],
5 | "timezone": "America/Los_Angeles",
6 | "prConcurrentLimit": 1,
7 | "packageRules": [
8 | {
9 | "groupName": "Routine updates",
10 | "matchUpdateTypes": ["minor", "patch", "pin", "digest"],
11 | "automerge": true
12 | },
13 | {
14 | "groupName": "Major updates",
15 | "matchUpdateTypes": ["major"],
16 | "automerge": false
17 | },
18 | {
19 | "matchDepTypes": ["devDependencies"],
20 | "automerge": true
21 | }
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/src/test.ts:
--------------------------------------------------------------------------------
1 | ///
2 | import type { TestConvex } from "convex-test";
3 | import type { GenericSchema, SchemaDefinition } from "convex/server";
4 | import schema from "./component/schema.js";
5 | const modules = import.meta.glob("./component/**/*.ts");
6 |
7 | /**
8 | * Register the component with the test convex instance.
9 | * @param t - The test convex instance, e.g. from calling `convexTest`.
10 | * @param name - The name of the component, as registered in convex.config.ts.
11 | */
12 | export function register(
13 | t: TestConvex>,
14 | name: string = "mastra"
15 | ) {
16 | t.registerComponent(name, schema, modules);
17 | }
18 | export default { register, schema, modules };
19 |
--------------------------------------------------------------------------------
/example/src/App.css:
--------------------------------------------------------------------------------
1 | #root {
2 | max-width: 1280px;
3 | margin: 0 auto;
4 | padding: 2rem;
5 | text-align: center;
6 | }
7 |
8 | .logo {
9 | height: 6em;
10 | padding: 1.5em;
11 | will-change: filter;
12 | transition: filter 300ms;
13 | }
14 | .logo:hover {
15 | filter: drop-shadow(0 0 2em #646cffaa);
16 | }
17 | .logo.react:hover {
18 | filter: drop-shadow(0 0 2em #61dafbaa);
19 | }
20 |
21 | @keyframes logo-spin {
22 | from {
23 | transform: rotate(0deg);
24 | }
25 | to {
26 | transform: rotate(360deg);
27 | }
28 | }
29 |
30 | @media (prefers-reduced-motion: no-preference) {
31 | a:nth-of-type(2) .logo {
32 | animation: logo-spin infinite 20s linear;
33 | }
34 | }
35 |
36 | .card {
37 | padding: 2em;
38 | }
39 |
40 | .read-the-docs {
41 | color: #888;
42 | }
43 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowJs": true,
4 | "checkJs": true,
5 | "strict": true,
6 |
7 | "target": "ESNext",
8 | "lib": ["ES2021", "dom", "DOM.Iterable"],
9 | "jsx": "react-jsx",
10 | "forceConsistentCasingInFileNames": true,
11 | "allowSyntheticDefaultImports": true,
12 | "noErrorTruncation": true,
13 | // We enforce stricter module resolution for Node16 compatibility
14 | // But when building we use Bundler & ESNext for ESM
15 | "module": "NodeNext",
16 | "moduleResolution": "NodeNext",
17 |
18 | "composite": true,
19 | "isolatedModules": true,
20 | "declaration": true,
21 | "declarationMap": true,
22 | "sourceMap": true,
23 | "rootDir": "./src",
24 | "outDir": "./dist",
25 | "verbatimModuleSyntax": true,
26 | "skipLibCheck": true
27 | },
28 | "include": ["./src/**/*"]
29 | }
30 |
--------------------------------------------------------------------------------
/example/convex/v8Runtime.ts:
--------------------------------------------------------------------------------
1 | import { query } from "./_generated/server";
2 | import { components } from "./_generated/api";
3 | import { v } from "convex/values";
4 | import {
5 | mapSerializedToMastra,
6 | TABLE_WORKFLOW_SNAPSHOT,
7 | } from "@convex-dev/mastra/mapping";
8 |
9 | export const getStatus = query({
10 | args: { runId: v.string() },
11 | handler: async (ctx, args) => {
12 | const doc = await ctx.runQuery(
13 | components.mastra.storage.storage.loadSnapshot,
14 | {
15 | workflowName: "weatherToOutfitWorkflow",
16 | runId: args.runId,
17 | }
18 | );
19 | if (!doc) {
20 | return null;
21 | }
22 | const snapshot = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, doc);
23 | const { childStates, activePaths, suspendedSteps } = snapshot.snapshot;
24 | return { childStates, activePaths, suspendedSteps };
25 | },
26 | });
27 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test and lint
2 | concurrency:
3 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
4 | cancel-in-progress: true
5 |
6 | on:
7 | push:
8 | branches: [main]
9 | pull_request:
10 | branches: ["**"]
11 |
12 | jobs:
13 | check:
14 | name: Test and lint
15 | runs-on: ubuntu-latest
16 | timeout-minutes: 30
17 |
18 | steps:
19 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
20 |
21 | - name: Node setup
22 | uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5
23 | with:
24 | cache-dependency-path: package.json
25 | node-version: "20.x"
26 | cache: "npm"
27 |
28 | - name: Install and build
29 | run: |
30 | npm i
31 | npm run build
32 | - name: Publish package for testing branch
33 | run: npx pkg-pr-new publish || echo "Have you set up pkg-pr-new for this repo?"
34 | - name: Test
35 | run: |
36 | npm run test
37 | npm run typecheck
38 | npm run lint
39 |
--------------------------------------------------------------------------------
/example/src/mastra/agents/index.ts:
--------------------------------------------------------------------------------
1 | import { openai } from "@ai-sdk/openai";
2 | import { Agent } from "@mastra/core/agent";
3 | import { weatherTool } from "../tools";
4 |
5 | export const weatherAgent = new Agent({
6 | name: "Weather Agent",
7 | instructions: `
8 | You are a helpful weather assistant that provides accurate weather information.
9 |
10 | Your primary function is to help users get weather details for specific locations. When responding:
11 | - Always ask for a location if none is provided
12 | - If giving a location with multiple parts (e.g. "New York, NY"), use the most relevant part (e.g. "New York")
13 | - Include relevant details like humidity, wind conditions, and precipitation
14 | - Keep responses concise but informative
15 |
16 | Use the weatherTool to fetch current weather data.
17 | `,
18 | model: openai("gpt-4o"),
19 | tools: { weatherTool },
20 | });
21 |
22 | export const outfitAgent = new Agent({
23 | name: "Outfit Agent",
24 | instructions: `
25 | You are a helpful outfit assistant that provides outfit recommendations
26 | based on the weather and local style.
27 | `,
28 | model: openai("gpt-4o"),
29 | });
30 |
--------------------------------------------------------------------------------
/example/src/index.css:
--------------------------------------------------------------------------------
1 | :root {
2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
3 | line-height: 1.5;
4 | font-weight: 400;
5 |
6 | color-scheme: light dark;
7 | color: rgba(255, 255, 255, 0.87);
8 | background-color: #242424;
9 |
10 | font-synthesis: none;
11 | text-rendering: optimizeLegibility;
12 | -webkit-font-smoothing: antialiased;
13 | -moz-osx-font-smoothing: grayscale;
14 | }
15 |
16 | a {
17 | font-weight: 500;
18 | color: #646cff;
19 | text-decoration: inherit;
20 | }
21 | a:hover {
22 | color: #535bf2;
23 | }
24 |
25 | body {
26 | margin: 0;
27 | display: flex;
28 | place-items: center;
29 | min-width: 320px;
30 | min-height: 100vh;
31 | }
32 |
33 | h1 {
34 | font-size: 3.2em;
35 | line-height: 1.1;
36 | }
37 |
38 | button {
39 | border-radius: 8px;
40 | border: 1px solid transparent;
41 | padding: 0.6em 1.2em;
42 | font-size: 1em;
43 | font-weight: 500;
44 | font-family: inherit;
45 | background-color: #1a1a1a;
46 | cursor: pointer;
47 | transition: border-color 0.25s;
48 | }
49 | button:hover {
50 | border-color: #646cff;
51 | }
52 | button:focus,
53 | button:focus-visible {
54 | outline: 4px auto -webkit-focus-ring-color;
55 | }
56 |
57 | @media (prefers-color-scheme: light) {
58 | :root {
59 | color: #213547;
60 | background-color: #ffffff;
61 | }
62 | a:hover {
63 | color: #747bff;
64 | }
65 | button {
66 | background-color: #f9f9f9;
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/example/convex/_generated/api.d.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated `api` utility.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type * as _libsql_workaround from "../_libsql_workaround.js";
12 | import type * as example from "../example.js";
13 | import type * as v8Runtime from "../v8Runtime.js";
14 |
15 | import type {
16 | ApiFromModules,
17 | FilterApi,
18 | FunctionReference,
19 | } from "convex/server";
20 |
21 | declare const fullApi: ApiFromModules<{
22 | _libsql_workaround: typeof _libsql_workaround;
23 | example: typeof example;
24 | v8Runtime: typeof v8Runtime;
25 | }>;
26 |
27 | /**
28 | * A utility for referencing Convex functions in your app's public API.
29 | *
30 | * Usage:
31 | * ```js
32 | * const myFunctionReference = api.myModule.myFunction;
33 | * ```
34 | */
35 | export declare const api: FilterApi<
36 | typeof fullApi,
37 | FunctionReference
38 | >;
39 |
40 | /**
41 | * A utility for referencing Convex functions in your app's internal API.
42 | *
43 | * Usage:
44 | * ```js
45 | * const myFunctionReference = internal.myModule.myFunction;
46 | * ```
47 | */
48 | export declare const internal: FilterApi<
49 | typeof fullApi,
50 | FunctionReference
51 | >;
52 |
53 | export declare const components: {
54 | mastra: import("@convex-dev/mastra/_generated/component.js").ComponentApi<"mastra">;
55 | };
56 |
--------------------------------------------------------------------------------
/src/client/types.ts:
--------------------------------------------------------------------------------
1 | /* Type utils follow */
2 |
3 | import type {
4 | FunctionArgs,
5 | FunctionReference,
6 | FunctionReturnType,
7 | GenericActionCtx,
8 | } from "convex/server";
9 |
10 | import type { GenericMutationCtx } from "convex/server";
11 |
12 | import type { GenericQueryCtx } from "convex/server";
13 |
14 | import type { GenericDataModel } from "convex/server";
15 |
16 | export type RunQueryCtx = {
17 | runQuery: GenericQueryCtx["runQuery"];
18 | };
19 | export type RunMutationCtx = {
20 | runQuery: GenericMutationCtx["runQuery"];
21 | runMutation: GenericMutationCtx["runMutation"];
22 | };
23 | export type RunActionCtx = {
24 | runQuery: GenericActionCtx["runQuery"];
25 | runMutation: GenericActionCtx["runMutation"];
26 | runAction: GenericActionCtx["runAction"];
27 | };
28 |
29 | type CtxWith = Pick<
30 | {
31 | runQuery: >(
32 | query: Query,
33 | args: FunctionArgs,
34 | ) => Promise>;
35 | runMutation: >(
36 | mutation: Mutation,
37 | args: FunctionArgs,
38 | ) => Promise>;
39 | runAction: >(
40 | action: Action,
41 | args: FunctionArgs,
42 | ) => Promise>;
43 | },
44 | T
45 | >;
46 |
47 | type QueryCtx = CtxWith<"runQuery">;
48 | const queryCtx = {} as QueryCtx;
49 |
--------------------------------------------------------------------------------
/src/component/_generated/api.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated `api` utility.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type * as debug from "../debug.js";
12 | import type * as logger from "../logger.js";
13 | import type * as storage_messages from "../storage/messages.js";
14 | import type * as storage_storage from "../storage/storage.js";
15 | import type * as storage_tables from "../storage/tables.js";
16 | import type * as vector_tables from "../vector/tables.js";
17 | import type * as vector_vector from "../vector/vector.js";
18 |
19 | import type {
20 | ApiFromModules,
21 | FilterApi,
22 | FunctionReference,
23 | } from "convex/server";
24 | import { anyApi, componentsGeneric } from "convex/server";
25 |
26 | const fullApi: ApiFromModules<{
27 | debug: typeof debug;
28 | logger: typeof logger;
29 | "storage/messages": typeof storage_messages;
30 | "storage/storage": typeof storage_storage;
31 | "storage/tables": typeof storage_tables;
32 | "vector/tables": typeof vector_tables;
33 | "vector/vector": typeof vector_vector;
34 | }> = anyApi as any;
35 |
36 | /**
37 | * A utility for referencing Convex functions in your app's public API.
38 | *
39 | * Usage:
40 | * ```js
41 | * const myFunctionReference = api.myModule.myFunction;
42 | * ```
43 | */
44 | export const api: FilterApi<
45 | typeof fullApi,
46 | FunctionReference
47 | > = anyApi as any;
48 |
49 | /**
50 | * A utility for referencing Convex functions in your app's internal API.
51 | *
52 | * Usage:
53 | * ```js
54 | * const myFunctionReference = internal.myModule.myFunction;
55 | * ```
56 | */
57 | export const internal: FilterApi<
58 | typeof fullApi,
59 | FunctionReference
60 | > = anyApi as any;
61 |
62 | export const components = componentsGeneric() as unknown as {};
63 |
--------------------------------------------------------------------------------
/src/component/_generated/dataModel.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated data model types.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type {
12 | DataModelFromSchemaDefinition,
13 | DocumentByName,
14 | TableNamesInDataModel,
15 | SystemTableNames,
16 | } from "convex/server";
17 | import type { GenericId } from "convex/values";
18 | import schema from "../schema.js";
19 |
20 | /**
21 | * The names of all of your Convex tables.
22 | */
23 | export type TableNames = TableNamesInDataModel;
24 |
25 | /**
26 | * The type of a document stored in Convex.
27 | *
28 | * @typeParam TableName - A string literal type of the table name (like "users").
29 | */
30 | export type Doc = DocumentByName<
31 | DataModel,
32 | TableName
33 | >;
34 |
35 | /**
36 | * An identifier for a document in Convex.
37 | *
38 | * Convex documents are uniquely identified by their `Id`, which is accessible
39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids).
40 | *
41 | * Documents can be loaded using `db.get(id)` in query and mutation functions.
42 | *
43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other
44 | * strings when type checking.
45 | *
46 | * @typeParam TableName - A string literal type of the table name (like "users").
47 | */
48 | export type Id =
49 | GenericId;
50 |
51 | /**
52 | * A type describing your Convex data model.
53 | *
54 | * This type includes information about what tables you have, the type of
55 | * documents stored in those tables, and the indexes defined on them.
56 | *
57 | * This type is used to parameterize methods like `queryGeneric` and
58 | * `mutationGeneric` to make them type-safe.
59 | */
60 | export type DataModel = DataModelFromSchemaDefinition;
61 |
--------------------------------------------------------------------------------
/example/convex/_generated/dataModel.d.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated data model types.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type {
12 | DataModelFromSchemaDefinition,
13 | DocumentByName,
14 | TableNamesInDataModel,
15 | SystemTableNames,
16 | } from "convex/server";
17 | import type { GenericId } from "convex/values";
18 | import schema from "../schema.js";
19 |
20 | /**
21 | * The names of all of your Convex tables.
22 | */
23 | export type TableNames = TableNamesInDataModel;
24 |
25 | /**
26 | * The type of a document stored in Convex.
27 | *
28 | * @typeParam TableName - A string literal type of the table name (like "users").
29 | */
30 | export type Doc = DocumentByName<
31 | DataModel,
32 | TableName
33 | >;
34 |
35 | /**
36 | * An identifier for a document in Convex.
37 | *
38 | * Convex documents are uniquely identified by their `Id`, which is accessible
39 | * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids).
40 | *
41 | * Documents can be loaded using `db.get(id)` in query and mutation functions.
42 | *
43 | * IDs are just strings at runtime, but this type can be used to distinguish them from other
44 | * strings when type checking.
45 | *
46 | * @typeParam TableName - A string literal type of the table name (like "users").
47 | */
48 | export type Id =
49 | GenericId;
50 |
51 | /**
52 | * A type describing your Convex data model.
53 | *
54 | * This type includes information about what tables you have, the type of
55 | * documents stored in those tables, and the indexes defined on them.
56 | *
57 | * This type is used to parameterize methods like `queryGeneric` and
58 | * `mutationGeneric` to make them type-safe.
59 | */
60 | export type DataModel = DataModelFromSchemaDefinition;
61 |
--------------------------------------------------------------------------------
/src/component/debug.ts:
--------------------------------------------------------------------------------
1 | import { v, type VString } from "convex/values";
2 | import {
3 | type ActionCtx,
4 | internalAction,
5 | internalMutation,
6 | internalQuery,
7 | } from "./_generated/server.js";
8 |
9 | import { logLevel } from "./logger.js";
10 | import { internal } from "./_generated/api.js";
11 | import type { TableNames } from "./_generated/dataModel.js";
12 | import {
13 | mapSerializedToMastra,
14 | TABLE_WORKFLOW_SNAPSHOT,
15 | } from "../mapping/index.js";
16 |
17 | export const debugOverrideLogLevel = internalMutation({
18 | args: {
19 | logLevel,
20 | },
21 | handler: async (ctx, args) => {
22 | const frozen = await ctx.db.query("config").first();
23 | if (frozen) {
24 | await ctx.db.patch(frozen._id, {
25 | config: {
26 | ...frozen.config,
27 | logLevel: args.logLevel,
28 | },
29 | });
30 | } else {
31 | await ctx.db.insert("config", {
32 | config: {
33 | logLevel: args.logLevel,
34 | },
35 | });
36 | }
37 | },
38 | returns: v.null(),
39 | });
40 |
41 | export const deleteAll = internalAction({
42 | args: {},
43 | handler: async (ctx) => {
44 | await Promise.all([deleteTable(ctx, "config")]);
45 | },
46 | returns: v.null(),
47 | });
48 |
49 | async function deleteTable(ctx: ActionCtx, table: TableNames) {
50 | let cursor: string | null = null;
51 | let isDone = false;
52 | while (!isDone) {
53 | ({ isDone, cursor } = await ctx.runMutation(internal.debug.deletePage, {
54 | table,
55 | cursor,
56 | }));
57 | }
58 | }
59 |
60 | export const deletePage = internalMutation({
61 | args: {
62 | table: v.string() as VString,
63 | cursor: v.union(v.string(), v.null()),
64 | },
65 | handler: async (ctx, args) => {
66 | const results = await ctx.db.query(args.table).paginate({
67 | cursor: args.cursor ?? null,
68 | numItems: 1000,
69 | });
70 | await Promise.all(results.page.map((result) => ctx.db.delete(result._id)));
71 | return {
72 | isDone: results.isDone,
73 | cursor: results.continueCursor,
74 | };
75 | },
76 | returns: v.object({
77 | isDone: v.boolean(),
78 | cursor: v.string(),
79 | }),
80 | });
81 |
82 | export const getLatestWorkflowStatus = internalQuery({
83 | args: {},
84 | handler: async (ctx): Promise => {
85 | const latest = await ctx.db.query("snapshots").order("desc").first();
86 | if (!latest) {
87 | return;
88 | }
89 | const workflow = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, latest);
90 | return workflow.snapshot;
91 | },
92 | returns: v.any(),
93 | });
94 |
--------------------------------------------------------------------------------
/src/component/storage/tables.ts:
--------------------------------------------------------------------------------
1 | import { defineTable } from "convex/server";
2 | import { v } from "convex/values";
3 | import {
4 | vAssistantContent,
5 | vToolContent,
6 | vUserContent,
7 | } from "../../ai/types.js";
8 |
9 | const storageSchema = {
10 | // messages.ts
11 | messages: defineTable({
12 | id: v.string(), // TODO: can we juse the _id?
13 | threadId: v.string(), // TODO: can we use v.id("threads")?
14 | threadOrder: v.number(),
15 | resourceId: v.optional(v.string()),
16 | content: v.union(vUserContent, vAssistantContent, vToolContent),
17 | role: v.union(
18 | v.literal("system"),
19 | v.literal("user"),
20 | v.literal("assistant"),
21 | v.literal("tool"),
22 | ),
23 | type: v.union(
24 | v.literal("text"),
25 | v.literal("tool-call"),
26 | v.literal("tool-result"),
27 | ),
28 | createdAt: v.number(),
29 | })
30 | .index("id", ["id"])
31 | .index("threadId", ["threadId", "threadOrder"]),
32 | threads: defineTable({
33 | id: v.string(), // TODO: can we juse the _id?
34 | resourceId: v.string(),
35 | title: v.optional(v.string()),
36 | metadata: v.optional(v.record(v.string(), v.any())),
37 | createdAt: v.number(),
38 | updatedAt: v.number(),
39 | })
40 | .index("id", ["id"])
41 | .index("resourceId", ["resourceId"]),
42 |
43 | // index.ts
44 | snapshots: defineTable({
45 | workflowName: v.string(),
46 | runId: v.string(),
47 | snapshot: v.string(), // JSON for now, later:
48 | createdAt: v.number(),
49 | updatedAt: v.number(),
50 | }).index("runId", ["runId", "workflowName"]),
51 | evals: defineTable({
52 | input: v.string(),
53 | output: v.string(),
54 | result: v.any(),
55 | agentName: v.string(),
56 | metricName: v.string(),
57 | instructions: v.string(),
58 | testInfo: v.optional(v.any()),
59 | globalRunId: v.string(),
60 | runId: v.string(),
61 | createdAt: v.number(),
62 | }).index("agentName", ["agentName", "testInfo.testPath"]),
63 | traces: defineTable({
64 | id: v.string(), // TODO: can we juse the _id?
65 | parentSpanId: v.optional(v.union(v.string(), v.null())),
66 | name: v.string(),
67 | traceId: v.string(),
68 | scope: v.string(),
69 | kind: v.union(v.number(), v.int64()),
70 | attributes: v.optional(v.any()),
71 | status: v.optional(v.any()),
72 | events: v.optional(v.any()),
73 | links: v.optional(v.any()),
74 | other: v.optional(v.string()),
75 | startTime: v.int64(),
76 | endTime: v.int64(),
77 | createdAt: v.number(),
78 | })
79 | .index("scope", ["scope"])
80 | .index("name", ["name"]),
81 | };
82 |
83 | export type TableNames = keyof typeof storageSchema;
84 |
85 | export default storageSchema;
86 |
--------------------------------------------------------------------------------
/example/convex/README.md:
--------------------------------------------------------------------------------
1 | # Welcome to your Convex functions directory!
2 |
3 | Write your Convex functions here.
4 | See https://docs.convex.dev/functions for more.
5 |
6 | A query function that takes two arguments looks like:
7 |
8 | ```ts
9 | // functions.js
10 | import { query } from "./_generated/server";
11 | import { v } from "convex/values";
12 |
13 | export const myQueryFunction = query({
14 | // Validators for arguments.
15 | args: {
16 | first: v.number(),
17 | second: v.string(),
18 | },
19 |
20 | // Function implementation.
21 | handler: async (ctx, args) => {
22 | // Read the database as many times as you need here.
23 | // See https://docs.convex.dev/database/reading-data.
24 | const documents = await ctx.db.query("tablename").collect();
25 |
26 | // Arguments passed from the client are properties of the args object.
27 | console.log(args.first, args.second);
28 |
29 | // Write arbitrary JavaScript here: filter, aggregate, build derived data,
30 | // remove non-public properties, or create new objects.
31 | return documents;
32 | },
33 | });
34 | ```
35 |
36 | Using this query function in a React component looks like:
37 |
38 | ```ts
39 | const data = useQuery(api.functions.myQueryFunction, {
40 | first: 10,
41 | second: "hello",
42 | });
43 | ```
44 |
45 | A mutation function looks like:
46 |
47 | ```ts
48 | // functions.js
49 | import { mutation } from "./_generated/server";
50 | import { v } from "convex/values";
51 |
52 | export const myMutationFunction = mutation({
53 | // Validators for arguments.
54 | args: {
55 | first: v.string(),
56 | second: v.string(),
57 | },
58 |
59 | // Function implementation.
60 | handler: async (ctx, args) => {
61 | // Insert or modify documents in the database here.
62 | // Mutations can also read from the database like queries.
63 | // See https://docs.convex.dev/database/writing-data.
64 | const message = { body: args.first, author: args.second };
65 | const id = await ctx.db.insert("messages", message);
66 |
67 | // Optionally, return a value from your mutation.
68 | return await ctx.db.get(id);
69 | },
70 | });
71 | ```
72 |
73 | Using this mutation function in a React component looks like:
74 |
75 | ```ts
76 | const mutation = useMutation(api.functions.myMutationFunction);
77 | function handleButtonPress() {
78 | // fire and forget, the most common way to use mutations
79 | mutation({ first: "Hello!", second: "me" });
80 | // OR
81 | // use the result once the mutation has completed
82 | mutation({ first: "Hello!", second: "me" }).then((result) =>
83 | console.log(result),
84 | );
85 | }
86 | ```
87 |
88 | Use the Convex CLI to push your functions to a deployment. See everything
89 | the Convex CLI can do by running `npx convex -h` in your project root
90 | directory. To learn more, launch the docs with `npx convex docs`.
91 |
--------------------------------------------------------------------------------
/eslint.config.js:
--------------------------------------------------------------------------------
1 | import globals from "globals";
2 | import pluginJs from "@eslint/js";
3 | import tseslint from "typescript-eslint";
4 | import reactHooks from "eslint-plugin-react-hooks";
5 | import reactRefresh from "eslint-plugin-react-refresh";
6 |
7 | export default [
8 | {
9 | ignores: [
10 | "dist/**",
11 | "eslint.config.js",
12 | "vitest.config.ts",
13 | "**/_generated/",
14 | ],
15 | },
16 | {
17 | files: ["src/**/*.{js,mjs,cjs,ts,tsx}", "example/**/*.{js,mjs,cjs,ts,tsx}"],
18 | languageOptions: {
19 | parser: tseslint.parser,
20 | parserOptions: {
21 | project: [
22 | "./tsconfig.json",
23 | "./example/tsconfig.json",
24 | "./example/convex/tsconfig.json",
25 | ],
26 | tsconfigRootDir: import.meta.dirname,
27 | },
28 | },
29 | },
30 | pluginJs.configs.recommended,
31 | ...tseslint.configs.recommended,
32 | // Convex code - Worker environment
33 | {
34 | files: ["src/**/*.{ts,tsx}", "example/convex/**/*.{ts,tsx}"],
35 | ignores: ["src/react/**"],
36 | languageOptions: {
37 | globals: globals.worker,
38 | },
39 | rules: {
40 | "@typescript-eslint/no-floating-promises": "error",
41 | "@typescript-eslint/no-explicit-any": "off",
42 | "no-unused-vars": "off",
43 | "@typescript-eslint/no-unused-vars": [
44 | "warn",
45 | {
46 | argsIgnorePattern: "^_",
47 | varsIgnorePattern: "^_",
48 | },
49 | ],
50 | "@typescript-eslint/no-unused-expressions": [
51 | "error",
52 | {
53 | allowShortCircuit: true,
54 | allowTernary: true,
55 | allowTaggedTemplates: true,
56 | },
57 | ],
58 | },
59 | },
60 | // React app code - Browser environment
61 | {
62 | files: ["src/react/**/*.{ts,tsx}", "example/src/**/*.{ts,tsx}"],
63 | languageOptions: {
64 | ecmaVersion: 2020,
65 | globals: globals.browser,
66 | },
67 | plugins: {
68 | "react-hooks": reactHooks,
69 | "react-refresh": reactRefresh,
70 | },
71 | rules: {
72 | ...reactHooks.configs.recommended.rules,
73 | "react-refresh/only-export-components": [
74 | "warn",
75 | { allowConstantExport: true },
76 | ],
77 | "@typescript-eslint/no-explicit-any": "off",
78 | "no-unused-vars": "off",
79 | "@typescript-eslint/no-unused-vars": [
80 | "warn",
81 | {
82 | argsIgnorePattern: "^_",
83 | varsIgnorePattern: "^_",
84 | },
85 | ],
86 | },
87 | },
88 | // Example config files (vite.config.ts, etc.) - Node environment
89 | {
90 | files: ["example/vite.config.ts", "example/**/*.config.{js,ts}"],
91 | languageOptions: {
92 | globals: {
93 | ...globals.node,
94 | ...globals.browser,
95 | },
96 | },
97 | },
98 | ];
99 |
--------------------------------------------------------------------------------
/src/ai/types.test.ts:
--------------------------------------------------------------------------------
1 | import type { Infer } from "convex/values";
2 | import { test } from "vitest";
3 | import {
4 | type SerializeUrlsAndUint8Arrays,
5 | vAssistantContent,
6 | vFilePart,
7 | vImagePart,
8 | vReasoningPart,
9 | vRedactedReasoningPart,
10 | vTextPart,
11 | vToolCallPart,
12 | vToolContent,
13 | } from "./types.js";
14 | import { vUserContent } from "./types.js";
15 | import type {
16 | AssistantContent,
17 | FilePart,
18 | ImagePart,
19 | TextPart,
20 | ToolCallPart,
21 | ToolContent,
22 | UserContent,
23 | } from "ai";
24 |
25 | // type assertion
26 | type OurUserContent = SerializeUrlsAndUint8Arrays;
27 | const _userContent: Infer = [] as OurUserContent;
28 | const _userContent2: OurUserContent = [] as Infer;
29 |
30 | type OurAssistantContent = SerializeUrlsAndUint8Arrays;
31 | const _assistantContent: Infer =
32 | [] as OurAssistantContent;
33 | const _assistantContent2: OurAssistantContent = [] as Infer<
34 | typeof vAssistantContent
35 | >;
36 |
37 | type OurToolContent = SerializeUrlsAndUint8Arrays;
38 | const _toolContent: Infer = [] as OurToolContent;
39 | const _toolContent2: OurToolContent = [] as Infer;
40 |
41 | // type assertion
42 | const _toolCallPart: Infer = {} as ToolCallPart;
43 | const _toolCallPart2: ToolCallPart = {} as Infer;
44 |
45 | // type assertion
46 | type OurTextPart = SerializeUrlsAndUint8Arrays;
47 | const _textPart: Infer = {} as OurTextPart;
48 | const _textPart2: OurTextPart = {} as Infer;
49 |
50 | // type assertion
51 | type OurImagePart = SerializeUrlsAndUint8Arrays;
52 | const _imagePart: Infer = {} as OurImagePart;
53 | const _imagePart2: OurImagePart = {} as Infer;
54 |
55 | // type assertion
56 | type OurFilePart = SerializeUrlsAndUint8Arrays;
57 | const _filePart: Infer = {} as OurFilePart;
58 | const _filePart2: OurFilePart = {} as Infer;
59 |
60 | // narrow to the type
61 | type ReasoningPart = AssistantContent[number] & { type: "reasoning" } & object;
62 | type OurReasoningPart = SerializeUrlsAndUint8Arrays;
63 | const _reasoningPart: Infer = {} as OurReasoningPart;
64 | const _reasoningPart2: OurReasoningPart = {} as Infer;
65 |
66 | // narrow to the type
67 | type RedactedReasoningPart = AssistantContent[number] & {
68 | type: "redacted-reasoning";
69 | } & object;
70 | type OurRedactedReasoningPart =
71 | SerializeUrlsAndUint8Arrays;
72 | const _redactedReasoningPart: Infer =
73 | {} as OurRedactedReasoningPart;
74 | const _redactedReasoningPart2: OurRedactedReasoningPart = {} as Infer<
75 | typeof vRedactedReasoningPart
76 | >;
77 |
78 | test("noop", () => {});
79 |
--------------------------------------------------------------------------------
/src/component/vector/tables.ts:
--------------------------------------------------------------------------------
1 | import { literals } from "convex-helpers/validators";
2 | import {
3 | defineTable,
4 | type GenericTableSearchIndexes,
5 | type TableDefinition,
6 | } from "convex/server";
7 | import {
8 | type GenericId,
9 | type ObjectType,
10 | v,
11 | type VId,
12 | type VObject,
13 | type VUnion,
14 | } from "convex/values";
15 |
16 | const embeddings = {
17 | id: v.optional(v.string()),
18 | indexName: v.string(),
19 | vector: v.array(v.number()),
20 | metadata: v.optional(v.record(v.string(), v.any())),
21 | };
22 |
23 | function table(dimensions: D): Table {
24 | return defineTable(embeddings)
25 | .vectorIndex("vector", {
26 | vectorField: "vector",
27 | dimensions,
28 | filterFields: ["indexName"], // TODO: More fields
29 | })
30 | .index("id", ["id"]);
31 | }
32 |
33 | export const SUPPORTED_DIMENSIONS = [
34 | 128, 256, 512, 768, 1024, 1536, 2048, 3072, 4096,
35 | ] as const;
36 | export type SupportedDimension = (typeof SUPPORTED_DIMENSIONS)[number];
37 | export const SUPPORTED_TABLE_NAMES = SUPPORTED_DIMENSIONS.map(
38 | (d) => `embeddings_${d}`,
39 | ) as `embeddings_${(typeof SUPPORTED_DIMENSIONS)[number]}`[];
40 | export type SupportedTableName = (typeof SUPPORTED_TABLE_NAMES)[number];
41 | export const SUPPORTED_TABLE_ID = v.union(
42 | ...SUPPORTED_TABLE_NAMES.map((name) => v.id(name)),
43 | ) as VUnion<
44 | GenericId<(typeof SUPPORTED_TABLE_NAMES)[number]>,
45 | VId<(typeof SUPPORTED_TABLE_NAMES)[number]>[]
46 | >;
47 |
48 | export const vSupportedDimension = literals(...SUPPORTED_DIMENSIONS);
49 | export const vSupportedTableName = literals(...SUPPORTED_TABLE_NAMES);
50 | export const vSupportedId = SUPPORTED_TABLE_ID;
51 |
52 | type Table = TableDefinition<
53 | VObject, typeof embeddings>,
54 | { id: ["id"] },
55 | GenericTableSearchIndexes,
56 | VectorIndex
57 | >;
58 |
59 | type VectorIndex = {
60 | vector: {
61 | vectorField: "vector";
62 | dimensions: D;
63 | filterFields: string;
64 | };
65 | };
66 |
67 | const tables: {
68 | [K in keyof typeof SUPPORTED_DIMENSIONS &
69 | number as `embeddings_${(typeof SUPPORTED_DIMENSIONS)[K]}`]: Table<
70 | (typeof SUPPORTED_DIMENSIONS)[K]
71 | >;
72 | } = Object.fromEntries(
73 | SUPPORTED_DIMENSIONS.map((dimensions) => [
74 | `embeddings_${dimensions}`,
75 | table(dimensions),
76 | ]),
77 | ) as Record<
78 | `embeddings_${(typeof SUPPORTED_DIMENSIONS)[number]}`,
79 | Table<(typeof SUPPORTED_DIMENSIONS)[number]>
80 | >;
81 |
82 | // Hack to get vector indexes of arbitrary* dimensions
83 | export default {
84 | ...tables,
85 | indexTableMap: defineTable({
86 | indexName: v.string(),
87 | tableName: vSupportedTableName,
88 | dimensions: vSupportedDimension,
89 | }).index("indexName", ["indexName"]),
90 | // documents: defineTable({
91 | // id: v.string(),
92 | // content: v.string(),
93 | // }).index("id", ["id"]),
94 | };
95 |
--------------------------------------------------------------------------------
/example/src/mastra/tools/index.ts:
--------------------------------------------------------------------------------
1 | import { createTool } from "@mastra/core/tools";
2 | import { z } from "zod";
3 |
4 | interface GeocodingResponse {
5 | results: {
6 | latitude: number;
7 | longitude: number;
8 | name: string;
9 | }[];
10 | }
11 | interface WeatherResponse {
12 | current: {
13 | time: string;
14 | temperature_2m: number;
15 | apparent_temperature: number;
16 | relative_humidity_2m: number;
17 | wind_speed_10m: number;
18 | wind_gusts_10m: number;
19 | weather_code: number;
20 | };
21 | }
22 |
23 | export const weatherTool = createTool({
24 | id: "get-weather",
25 | description: "Get current weather for a location",
26 | inputSchema: z.object({
27 | location: z.string().describe("City name"),
28 | }),
29 | outputSchema: z.object({
30 | temperature: z.number(),
31 | feelsLike: z.number(),
32 | humidity: z.number(),
33 | windSpeed: z.number(),
34 | windGust: z.number(),
35 | conditions: z.string(),
36 | location: z.string(),
37 | }),
38 | execute: async ({ context }) => {
39 | return await getWeather(context.location);
40 | },
41 | });
42 |
43 | const getWeather = async (location: string) => {
44 | const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(location)}&count=1`;
45 | const geocodingResponse = await fetch(geocodingUrl);
46 | const geocodingData = (await geocodingResponse.json()) as GeocodingResponse;
47 |
48 | if (!geocodingData.results?.[0]) {
49 | throw new Error(`Location '${location}' not found`);
50 | }
51 |
52 | const { latitude, longitude, name } = geocodingData.results[0];
53 |
54 | const weatherUrl = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}¤t=temperature_2m,apparent_temperature,relative_humidity_2m,wind_speed_10m,wind_gusts_10m,weather_code`;
55 |
56 | const response = await fetch(weatherUrl);
57 | const data = (await response.json()) as WeatherResponse;
58 |
59 | return {
60 | temperature: data.current.temperature_2m,
61 | feelsLike: data.current.apparent_temperature,
62 | humidity: data.current.relative_humidity_2m,
63 | windSpeed: data.current.wind_speed_10m,
64 | windGust: data.current.wind_gusts_10m,
65 | conditions: getWeatherCondition(data.current.weather_code),
66 | location: name,
67 | };
68 | };
69 |
70 | function getWeatherCondition(code: number): string {
71 | const conditions: Record = {
72 | 0: "Clear sky",
73 | 1: "Mainly clear",
74 | 2: "Partly cloudy",
75 | 3: "Overcast",
76 | 45: "Foggy",
77 | 48: "Depositing rime fog",
78 | 51: "Light drizzle",
79 | 53: "Moderate drizzle",
80 | 55: "Dense drizzle",
81 | 56: "Light freezing drizzle",
82 | 57: "Dense freezing drizzle",
83 | 61: "Slight rain",
84 | 63: "Moderate rain",
85 | 65: "Heavy rain",
86 | 66: "Light freezing rain",
87 | 67: "Heavy freezing rain",
88 | 71: "Slight snow fall",
89 | 73: "Moderate snow fall",
90 | 75: "Heavy snow fall",
91 | 77: "Snow grains",
92 | 80: "Slight rain showers",
93 | 81: "Moderate rain showers",
94 | 82: "Violent rain showers",
95 | 85: "Slight snow showers",
96 | 86: "Heavy snow showers",
97 | 95: "Thunderstorm",
98 | 96: "Thunderstorm with slight hail",
99 | 99: "Thunderstorm with heavy hail",
100 | };
101 | return conditions[code] || "Unknown";
102 | }
103 |
--------------------------------------------------------------------------------
/src/component/logger.ts:
--------------------------------------------------------------------------------
1 | import { type Infer, v } from "convex/values";
2 | import { internalQuery, type QueryCtx } from "./_generated/server.js";
3 |
4 | export const DEFAULT_LOG_LEVEL: LogLevel = "INFO";
5 |
6 | export const logLevel = v.union(
7 | v.literal("DEBUG"),
8 | v.literal("TRACE"),
9 | v.literal("INFO"),
10 | v.literal("REPORT"),
11 | v.literal("WARN"),
12 | v.literal("ERROR"),
13 | );
14 | export type LogLevel = Infer;
15 |
16 | export type Logger = {
17 | debug: (...args: unknown[]) => void;
18 | info: (...args: unknown[]) => void;
19 | warn: (...args: unknown[]) => void;
20 | error: (...args: unknown[]) => void;
21 | time: (label: string) => void;
22 | timeEnd: (label: string) => void;
23 | event: (event: string, payload: Record) => void;
24 | logLevel: LogLevel;
25 | };
26 | const logLevelOrder = logLevel.members.map((l) => l.value);
27 | const logLevelByName = logLevelOrder.reduce(
28 | (acc, l, i) => {
29 | acc[l] = i;
30 | return acc;
31 | },
32 | {} as Record,
33 | );
34 | export function shouldLog(config: LogLevel, level: LogLevel) {
35 | return logLevelByName[config] <= logLevelByName[level];
36 | }
37 |
38 | const DEBUG = logLevelByName["DEBUG"];
39 | const TRACE = logLevelByName["TRACE"];
40 | const INFO = logLevelByName["INFO"];
41 | const REPORT = logLevelByName["REPORT"];
42 | const WARN = logLevelByName["WARN"];
43 | const ERROR = logLevelByName["ERROR"];
44 |
45 | export function createLogger(level: LogLevel | undefined): Logger {
46 | const logLevel = level ?? DEFAULT_LOG_LEVEL;
47 | const levelIndex = logLevelByName[logLevel];
48 | if (levelIndex === undefined) {
49 | throw new Error(`Invalid log level: ${level}`);
50 | }
51 | return {
52 | debug: (...args: unknown[]) => {
53 | if (levelIndex <= DEBUG) {
54 | console.debug(...args);
55 | }
56 | },
57 | info: (...args: unknown[]) => {
58 | if (levelIndex <= INFO) {
59 | console.info(...args);
60 | }
61 | },
62 | warn: (...args: unknown[]) => {
63 | if (levelIndex <= WARN) {
64 | console.warn(...args);
65 | }
66 | },
67 | error: (...args: unknown[]) => {
68 | if (levelIndex <= ERROR) {
69 | console.error(...args);
70 | }
71 | },
72 | time: (label: string) => {
73 | if (levelIndex <= TRACE) {
74 | console.time(label);
75 | }
76 | },
77 | timeEnd: (label: string) => {
78 | if (levelIndex <= TRACE) {
79 | console.timeEnd(label);
80 | }
81 | },
82 | event: (event: string, payload: Record) => {
83 | const fullPayload = {
84 | component: "mastra",
85 | event,
86 | ...payload,
87 | };
88 | if (levelIndex === REPORT && event === "report") {
89 | console.info(JSON.stringify(fullPayload));
90 | } else if (levelIndex <= INFO) {
91 | console.info(JSON.stringify(fullPayload));
92 | }
93 | },
94 | logLevel,
95 | };
96 | }
97 |
98 | export async function makeConsole(ctx: QueryCtx) {
99 | const config = await ctx.db.query("config").first();
100 | const console = createLogger(config?.config.logLevel);
101 | return console;
102 | }
103 |
104 | export const getLogLevel = internalQuery({
105 | args: {},
106 | handler: async (ctx) => {
107 | const config = await ctx.db.query("config").first();
108 | return config?.config.logLevel ?? DEFAULT_LOG_LEVEL;
109 | },
110 | returns: logLevel,
111 | });
112 |
--------------------------------------------------------------------------------
/example/convex/_generated/server.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated utilities for implementing server-side Convex query and mutation functions.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import {
12 | actionGeneric,
13 | httpActionGeneric,
14 | queryGeneric,
15 | mutationGeneric,
16 | internalActionGeneric,
17 | internalMutationGeneric,
18 | internalQueryGeneric,
19 | } from "convex/server";
20 |
21 | /**
22 | * Define a query in this Convex app's public API.
23 | *
24 | * This function will be allowed to read your Convex database and will be accessible from the client.
25 | *
26 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
27 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
28 | */
29 | export const query = queryGeneric;
30 |
31 | /**
32 | * Define a query that is only accessible from other Convex functions (but not from the client).
33 | *
34 | * This function will be allowed to read from your Convex database. It will not be accessible from the client.
35 | *
36 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
37 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
38 | */
39 | export const internalQuery = internalQueryGeneric;
40 |
41 | /**
42 | * Define a mutation in this Convex app's public API.
43 | *
44 | * This function will be allowed to modify your Convex database and will be accessible from the client.
45 | *
46 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
47 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
48 | */
49 | export const mutation = mutationGeneric;
50 |
51 | /**
52 | * Define a mutation that is only accessible from other Convex functions (but not from the client).
53 | *
54 | * This function will be allowed to modify your Convex database. It will not be accessible from the client.
55 | *
56 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
57 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
58 | */
59 | export const internalMutation = internalMutationGeneric;
60 |
61 | /**
62 | * Define an action in this Convex app's public API.
63 | *
64 | * An action is a function which can execute any JavaScript code, including non-deterministic
65 | * code and code with side-effects, like calling third-party services.
66 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
67 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
68 | *
69 | * @param func - The action. It receives an {@link ActionCtx} as its first argument.
70 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible.
71 | */
72 | export const action = actionGeneric;
73 |
74 | /**
75 | * Define an action that is only accessible from other Convex functions (but not from the client).
76 | *
77 | * @param func - The function. It receives an {@link ActionCtx} as its first argument.
78 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible.
79 | */
80 | export const internalAction = internalActionGeneric;
81 |
82 | /**
83 | * Define an HTTP action.
84 | *
85 | * The wrapped function will be used to respond to HTTP requests received
86 | * by a Convex deployment if the requests matches the path and method where
87 | * this action is routed. Be sure to route your httpAction in `convex/http.js`.
88 | *
89 | * @param func - The function. It receives an {@link ActionCtx} as its first argument
90 | * and a Fetch API `Request` object as its second.
91 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
92 | */
93 | export const httpAction = httpActionGeneric;
94 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@convex-dev/mastra",
3 | "description": "A mastra component for Convex.",
4 | "repository": "github:get-convex/mastra",
5 | "homepage": "https://github.com/get-convex/mastra#readme",
6 | "bugs": {
7 | "email": "support@convex.dev",
8 | "url": "https://github.com/get-convex/mastra/issues"
9 | },
10 | "version": "0.0.1-alpha.8",
11 | "license": "Apache-2.0",
12 | "keywords": [
13 | "convex",
14 | "component"
15 | ],
16 | "type": "module",
17 | "scripts": {
18 | "dev": "run-p -r 'dev:*'",
19 | "dev:backend": "convex dev --typecheck-components",
20 | "dev:frontend": "cd example && vite --clearScreen false",
21 | "dev:build": "chokidar 'tsconfig*.json' 'src/**/*.ts' -i '**/*.test.ts' -c 'convex codegen --component-dir ./src/component && npm run build' --initial",
22 | "predev": "path-exists .env.local || npm run dev:backend -- --until-success",
23 | "clean": "rm -rf dist *.tsbuildinfo",
24 | "build": "tsc --project ./tsconfig.build.json",
25 | "typecheck": "tsc --noEmit && tsc -p example && tsc -p example/convex",
26 | "lint": "eslint .",
27 | "all": "run-p -r 'dev:*' 'test:watch'",
28 | "test": "vitest run --typecheck",
29 | "test:watch": "vitest --typecheck --clearScreen false",
30 | "test:debug": "vitest --inspect-brk --no-file-parallelism",
31 | "test:coverage": "vitest run --coverage --coverage.reporter=text",
32 | "prepublishOnly": "npm run clean && npm run build",
33 | "preversion": "npm run clean && npm ci && run-p test lint typecheck",
34 | "alpha": "npm version prerelease --preid alpha && npm publish --tag alpha && git push --tags",
35 | "release": "npm version patch && npm publish && git push --tags",
36 | "version": "vim -c 'normal o' -c 'normal o## '$npm_package_version CHANGELOG.md && prettier -w CHANGELOG.md && git add CHANGELOG.md"
37 | },
38 | "files": [
39 | "dist",
40 | "src"
41 | ],
42 | "exports": {
43 | "./package.json": "./package.json",
44 | ".": {
45 | "types": "./dist/client/index.d.ts",
46 | "default": "./dist/client/index.js"
47 | },
48 | "./react": {
49 | "types": "./dist/react/index.d.ts",
50 | "default": "./dist/react/index.js"
51 | },
52 | "./mapping": {
53 | "types": "./dist/mapping/index.d.ts",
54 | "default": "./dist/mapping/index.js"
55 | },
56 | "./test": "./src/test.ts",
57 | "./_generated/component.js": {
58 | "types": "./dist/component/_generated/component.d.ts"
59 | },
60 | "./convex.config": {
61 | "types": "./dist/component/convex.config.d.ts",
62 | "default": "./dist/component/convex.config.js"
63 | },
64 | "./convex.config.js": {
65 | "types": "./dist/component/convex.config.d.ts",
66 | "default": "./dist/component/convex.config.js"
67 | }
68 | },
69 | "peerDependencies": {
70 | "@mastra/core": "^0.8.0",
71 | "ai": "^4.2.0",
72 | "convex": "^1.24.8",
73 | "convex-helpers": "^0.1.100"
74 | },
75 | "devDependencies": {
76 | "@ai-sdk/openai": "^1.3.24",
77 | "@edge-runtime/vm": "5.0.0",
78 | "@eslint/eslintrc": "^3.3.1",
79 | "@eslint/js": "^9.38.0",
80 | "@libsql/client": "^0.15.15",
81 | "@mastra/core": "^0.8.0",
82 | "@mastra/memory": "^0.2.6",
83 | "@types/node": "20.19.24",
84 | "@types/react": "^19.2.2",
85 | "@types/react-dom": "^19.2.2",
86 | "@vitejs/plugin-react": "^5.1.0",
87 | "ai": "4.3.19",
88 | "chokidar-cli": "3.0.0",
89 | "convex": "1.29.0",
90 | "convex-test": "^0.0.38",
91 | "eslint": "^9.38.0",
92 | "eslint-plugin-react-hooks": "^7.0.1",
93 | "eslint-plugin-react-refresh": "^0.4.24",
94 | "globals": "^16.4.0",
95 | "npm-run-all2": "8.0.4",
96 | "path-exists-cli": "2.0.0",
97 | "prettier": "^3.6.2",
98 | "react": "^19.2.0",
99 | "react-dom": "^19.2.0",
100 | "typescript": "^5.9.3",
101 | "typescript-eslint": "^8.46.2",
102 | "vite": "^7.1.12",
103 | "vitest": "3.2.4",
104 | "zod": "^3.25.76"
105 | },
106 | "types": "./dist/client/index.d.ts",
107 | "module": "./dist/client/index.js"
108 | }
109 |
--------------------------------------------------------------------------------
/src/client/vector.ts:
--------------------------------------------------------------------------------
1 | import { MastraVector } from "@mastra/core";
2 | import type { SupportedTableName } from "../component/vector/tables.js";
3 | import type {
4 | GenericDataModel,
5 | GenericMutationCtx,
6 | GenericQueryCtx,
7 | } from "convex/server";
8 | import type { GenericActionCtx } from "convex/server";
9 | import type { ComponentApi } from "../component/_generated/component.js";
10 | export { InMemoryVector } from "./in-memory.js";
11 |
12 | export class ConvexVector extends MastraVector {
13 | ctx: Ctx<"action" | "mutation" | "query"> | undefined;
14 | api: ComponentApi["vector"];
15 |
16 | constructor(
17 | component: ComponentApi,
18 | public options?: { name?: string },
19 | ) {
20 | super();
21 | this.api = component.vector;
22 | }
23 |
24 | /**
25 | * Set the context for the storage. Must be called before using the storage
26 | * in a Convex function. If you are using the storage via the API, you do not
27 | * need to call this.
28 | *
29 | * @param ctx - The context to use for the storage.
30 | */
31 | async setCtx(ctx: Ctx<"action" | "mutation" | "query"> | undefined) {
32 | this.ctx = ctx;
33 | }
34 |
35 | getApi(kind: T): Ctx {
36 | // TODO: get http client if that's specified
37 | if (!this.ctx) {
38 | throw new Error(
39 | "Context not set: ensure you're calling storage.setCtx" +
40 | " before using the storage.",
41 | );
42 | }
43 | switch (kind) {
44 | case "action":
45 | if (!(this.ctx as GenericActionCtx).runAction) {
46 | throw new Error("Context must be an action context to do this");
47 | }
48 | // fallthrough
49 | case "mutation":
50 | if (!(this.ctx as GenericMutationCtx).runMutation) {
51 | throw new Error("Context doesn't have a way to run mutations");
52 | }
53 | // fallthrough
54 | case "query":
55 | if (!(this.ctx as GenericQueryCtx).runQuery) {
56 | throw new Error("Context is not a query context");
57 | }
58 | }
59 | return this.ctx as Ctx;
60 | }
61 |
62 | async query(...args: Parameters) {
63 | const { indexName, queryVector, topK, filter, includeVector } =
64 | this.normalizeArgs("query", args);
65 | const ctx = this.getApi("action");
66 | return await ctx.runAction(this.api.vector.search, {
67 | indexName,
68 | queryVector,
69 | topK: topK ?? 10,
70 | filter: filter ?? undefined,
71 | includeVector,
72 | });
73 | }
74 |
75 | async upsert(...args: Parameters): Promise {
76 | const { indexName, vectors, metadata, ids } = this.normalizeArgs(
77 | "upsert",
78 | args,
79 | );
80 | const ctx = this.getApi("mutation");
81 | return await ctx.runMutation(this.api.vector.upsert, {
82 | indexName,
83 | vectors,
84 | metadata,
85 | ids,
86 | });
87 | }
88 |
89 | async createIndex(...args: Parameters) {
90 | const { indexName, dimension } = this.normalizeArgs("createIndex", args);
91 | if (dimension !== 1536) {
92 | throw new Error("Only 1536 dimensions supported");
93 | }
94 | const ctx = this.getApi("mutation");
95 | await ctx.runMutation(this.api.vector.createIndex, {
96 | indexName,
97 | dimensions: dimension,
98 | });
99 | }
100 |
101 | async listIndexes() {
102 | const ctx = this.getApi("query");
103 | return await ctx.runQuery(this.api.vector.listIndexes, {});
104 | }
105 |
106 | async describeIndex(indexName: string) {
107 | const ctx = this.getApi("query");
108 | return await ctx.runQuery(this.api.vector.describeIndex, { indexName });
109 | }
110 |
111 | async deleteIndex(indexName: SupportedTableName) {
112 | const ctx = this.getApi("action");
113 | await ctx.runAction(this.api.vector.deleteIndex, { indexName });
114 | }
115 | }
116 |
117 | type Ctx = T extends "action"
118 | ? GenericActionCtx
119 | : T extends "mutation"
120 | ? GenericMutationCtx
121 | : T extends "query"
122 | ? GenericQueryCtx
123 | : never;
124 |
--------------------------------------------------------------------------------
/src/ai/types.ts:
--------------------------------------------------------------------------------
1 | import type { DataContent, ImagePart } from "ai";
2 | import { type Infer, v } from "convex/values";
3 |
4 | // const deprecated = v.optional(v.any()) as unknown as VNull;
5 |
6 | const ProviderOptions = v.record(v.string(), v.record(v.string(), v.any()));
7 |
8 | export function dataContentToConvex(data: DataContent): string | ArrayBuffer {
9 | if (data instanceof Uint8Array) {
10 | return Buffer.from(data).toString("base64");
11 | }
12 | return data;
13 | }
14 |
15 | export function imagePartFromConvex(part: Infer): ImagePart {
16 | if (typeof part.image === "string" && part.image.includes("://")) {
17 | return {
18 | ...part,
19 | image: new URL(part.image),
20 | };
21 | }
22 | return part;
23 | }
24 |
25 | export function imagePartToConvex(part: ImagePart): Infer {
26 | const image =
27 | part.image instanceof URL
28 | ? part.image.toString()
29 | : dataContentToConvex(part.image);
30 | return {
31 | ...part,
32 | image,
33 | };
34 | }
35 |
36 | export type SerializeUrlsAndUint8Arrays = T extends URL
37 | ? string
38 | : T extends Uint8Array | ArrayBufferLike
39 | ? ArrayBuffer
40 | : T extends Array
41 | ? Array>
42 | : T extends Record
43 | ? { [K in keyof T]: SerializeUrlsAndUint8Arrays }
44 | : T;
45 |
46 | export const vTextPart = v.object({
47 | type: v.literal("text"),
48 | text: v.string(),
49 | providerOptions: v.optional(ProviderOptions),
50 | experimental_providerMetadata: v.optional(ProviderOptions),
51 | });
52 |
53 | export const vImagePart = v.object({
54 | type: v.literal("image"),
55 | image: v.union(v.string(), v.bytes()),
56 | mimeType: v.optional(v.string()),
57 | providerOptions: v.optional(ProviderOptions),
58 | experimental_providerMetadata: v.optional(ProviderOptions),
59 | });
60 |
61 | export const vFilePart = v.object({
62 | type: v.literal("file"),
63 | data: v.union(v.string(), v.bytes()),
64 | mimeType: v.string(),
65 | providerOptions: v.optional(ProviderOptions),
66 | experimental_providerMetadata: v.optional(ProviderOptions),
67 | });
68 |
69 | export const vUserContent = v.union(
70 | v.string(),
71 | v.array(v.union(vTextPart, vImagePart, vFilePart)),
72 | );
73 |
74 | export const vReasoningPart = v.object({
75 | type: v.literal("reasoning"),
76 | text: v.string(),
77 | providerOptions: v.optional(ProviderOptions),
78 | experimental_providerMetadata: v.optional(ProviderOptions),
79 | });
80 |
81 | export const vRedactedReasoningPart = v.object({
82 | type: v.literal("redacted-reasoning"),
83 | data: v.string(),
84 | providerOptions: v.optional(ProviderOptions),
85 | experimental_providerMetadata: v.optional(ProviderOptions),
86 | });
87 |
88 | export const vToolCallPart = v.object({
89 | type: v.literal("tool-call"),
90 | toolCallId: v.string(),
91 | toolName: v.string(),
92 | args: v.any(), // TODO: need to be optional?
93 | providerOptions: v.optional(ProviderOptions),
94 | experimental_providerMetadata: v.optional(ProviderOptions),
95 | });
96 |
97 | export const vAssistantContent = v.union(
98 | v.string(),
99 | v.array(
100 | v.union(
101 | vTextPart,
102 | vFilePart,
103 | vReasoningPart,
104 | vRedactedReasoningPart,
105 | vToolCallPart,
106 | ),
107 | ),
108 | );
109 |
110 | const vToolResultContent = v.array(
111 | v.union(
112 | v.object({
113 | type: v.literal("text"),
114 | text: v.string(),
115 | }),
116 | v.object({
117 | type: v.literal("image"),
118 | data: v.string(),
119 | mimeType: v.optional(v.string()),
120 | }),
121 | ),
122 | );
123 |
124 | const vToolResultPart = v.object({
125 | type: v.literal("tool-result"),
126 | toolCallId: v.string(),
127 | toolName: v.string(),
128 | result: v.any(),
129 | experimental_content: v.optional(vToolResultContent),
130 | isError: v.optional(v.boolean()),
131 | providerOptions: v.optional(ProviderOptions),
132 | experimental_providerMetadata: v.optional(ProviderOptions),
133 | });
134 | export const vToolContent = v.array(vToolResultPart);
135 |
136 | export const vContent = v.union(vUserContent, vAssistantContent, vToolContent);
137 | export type Content = Infer;
138 |
--------------------------------------------------------------------------------
/example/convex/_generated/server.d.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated utilities for implementing server-side Convex query and mutation functions.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import {
12 | ActionBuilder,
13 | HttpActionBuilder,
14 | MutationBuilder,
15 | QueryBuilder,
16 | GenericActionCtx,
17 | GenericMutationCtx,
18 | GenericQueryCtx,
19 | GenericDatabaseReader,
20 | GenericDatabaseWriter,
21 | } from "convex/server";
22 | import type { DataModel } from "./dataModel.js";
23 |
24 | /**
25 | * Define a query in this Convex app's public API.
26 | *
27 | * This function will be allowed to read your Convex database and will be accessible from the client.
28 | *
29 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
30 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
31 | */
32 | export declare const query: QueryBuilder;
33 |
34 | /**
35 | * Define a query that is only accessible from other Convex functions (but not from the client).
36 | *
37 | * This function will be allowed to read from your Convex database. It will not be accessible from the client.
38 | *
39 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
40 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
41 | */
42 | export declare const internalQuery: QueryBuilder;
43 |
44 | /**
45 | * Define a mutation in this Convex app's public API.
46 | *
47 | * This function will be allowed to modify your Convex database and will be accessible from the client.
48 | *
49 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
50 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
51 | */
52 | export declare const mutation: MutationBuilder;
53 |
54 | /**
55 | * Define a mutation that is only accessible from other Convex functions (but not from the client).
56 | *
57 | * This function will be allowed to modify your Convex database. It will not be accessible from the client.
58 | *
59 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
60 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
61 | */
62 | export declare const internalMutation: MutationBuilder;
63 |
64 | /**
65 | * Define an action in this Convex app's public API.
66 | *
67 | * An action is a function which can execute any JavaScript code, including non-deterministic
68 | * code and code with side-effects, like calling third-party services.
69 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
70 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
71 | *
72 | * @param func - The action. It receives an {@link ActionCtx} as its first argument.
73 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible.
74 | */
75 | export declare const action: ActionBuilder;
76 |
77 | /**
78 | * Define an action that is only accessible from other Convex functions (but not from the client).
79 | *
80 | * @param func - The function. It receives an {@link ActionCtx} as its first argument.
81 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible.
82 | */
83 | export declare const internalAction: ActionBuilder;
84 |
85 | /**
86 | * Define an HTTP action.
87 | *
88 | * The wrapped function will be used to respond to HTTP requests received
89 | * by a Convex deployment if the requests matches the path and method where
90 | * this action is routed. Be sure to route your httpAction in `convex/http.js`.
91 | *
92 | * @param func - The function. It receives an {@link ActionCtx} as its first argument
93 | * and a Fetch API `Request` object as its second.
94 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
95 | */
96 | export declare const httpAction: HttpActionBuilder;
97 |
98 | /**
99 | * A set of services for use within Convex query functions.
100 | *
101 | * The query context is passed as the first argument to any Convex query
102 | * function run on the server.
103 | *
104 | * This differs from the {@link MutationCtx} because all of the services are
105 | * read-only.
106 | */
107 | export type QueryCtx = GenericQueryCtx;
108 |
109 | /**
110 | * A set of services for use within Convex mutation functions.
111 | *
112 | * The mutation context is passed as the first argument to any Convex mutation
113 | * function run on the server.
114 | */
115 | export type MutationCtx = GenericMutationCtx;
116 |
117 | /**
118 | * A set of services for use within Convex action functions.
119 | *
120 | * The action context is passed as the first argument to any Convex action
121 | * function run on the server.
122 | */
123 | export type ActionCtx = GenericActionCtx;
124 |
125 | /**
126 | * An interface to read from the database within Convex query functions.
127 | *
128 | * The two entry points are {@link DatabaseReader.get}, which fetches a single
129 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts
130 | * building a query.
131 | */
132 | export type DatabaseReader = GenericDatabaseReader;
133 |
134 | /**
135 | * An interface to read from and write to the database within Convex mutation
136 | * functions.
137 | *
138 | * Convex guarantees that all writes within a single mutation are
139 | * executed atomically, so you never have to worry about partial writes leaving
140 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control)
141 | * for the guarantees Convex provides your functions.
142 | */
143 | export type DatabaseWriter = GenericDatabaseWriter;
144 |
--------------------------------------------------------------------------------
/src/component/_generated/server.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated utilities for implementing server-side Convex query and mutation functions.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type {
12 | ActionBuilder,
13 | HttpActionBuilder,
14 | MutationBuilder,
15 | QueryBuilder,
16 | GenericActionCtx,
17 | GenericMutationCtx,
18 | GenericQueryCtx,
19 | GenericDatabaseReader,
20 | GenericDatabaseWriter,
21 | } from "convex/server";
22 | import {
23 | actionGeneric,
24 | httpActionGeneric,
25 | queryGeneric,
26 | mutationGeneric,
27 | internalActionGeneric,
28 | internalMutationGeneric,
29 | internalQueryGeneric,
30 | } from "convex/server";
31 | import type { DataModel } from "./dataModel.js";
32 |
33 | /**
34 | * Define a query in this Convex app's public API.
35 | *
36 | * This function will be allowed to read your Convex database and will be accessible from the client.
37 | *
38 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
39 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
40 | */
41 | export const query: QueryBuilder = queryGeneric;
42 |
43 | /**
44 | * Define a query that is only accessible from other Convex functions (but not from the client).
45 | *
46 | * This function will be allowed to read from your Convex database. It will not be accessible from the client.
47 | *
48 | * @param func - The query function. It receives a {@link QueryCtx} as its first argument.
49 | * @returns The wrapped query. Include this as an `export` to name it and make it accessible.
50 | */
51 | export const internalQuery: QueryBuilder =
52 | internalQueryGeneric;
53 |
54 | /**
55 | * Define a mutation in this Convex app's public API.
56 | *
57 | * This function will be allowed to modify your Convex database and will be accessible from the client.
58 | *
59 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
60 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
61 | */
62 | export const mutation: MutationBuilder = mutationGeneric;
63 |
64 | /**
65 | * Define a mutation that is only accessible from other Convex functions (but not from the client).
66 | *
67 | * This function will be allowed to modify your Convex database. It will not be accessible from the client.
68 | *
69 | * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
70 | * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
71 | */
72 | export const internalMutation: MutationBuilder =
73 | internalMutationGeneric;
74 |
75 | /**
76 | * Define an action in this Convex app's public API.
77 | *
78 | * An action is a function which can execute any JavaScript code, including non-deterministic
79 | * code and code with side-effects, like calling third-party services.
80 | * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
81 | * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
82 | *
83 | * @param func - The action. It receives an {@link ActionCtx} as its first argument.
84 | * @returns The wrapped action. Include this as an `export` to name it and make it accessible.
85 | */
86 | export const action: ActionBuilder = actionGeneric;
87 |
88 | /**
89 | * Define an action that is only accessible from other Convex functions (but not from the client).
90 | *
91 | * @param func - The function. It receives an {@link ActionCtx} as its first argument.
92 | * @returns The wrapped function. Include this as an `export` to name it and make it accessible.
93 | */
94 | export const internalAction: ActionBuilder =
95 | internalActionGeneric;
96 |
97 | /**
98 | * Define an HTTP action.
99 | *
100 | * The wrapped function will be used to respond to HTTP requests received
101 | * by a Convex deployment if the requests matches the path and method where
102 | * this action is routed. Be sure to route your httpAction in `convex/http.js`.
103 | *
104 | * @param func - The function. It receives an {@link ActionCtx} as its first argument
105 | * and a Fetch API `Request` object as its second.
106 | * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
107 | */
108 | export const httpAction: HttpActionBuilder = httpActionGeneric;
109 |
110 | type GenericCtx =
111 | | GenericActionCtx
112 | | GenericMutationCtx
113 | | GenericQueryCtx;
114 |
115 | /**
116 | * A set of services for use within Convex query functions.
117 | *
118 | * The query context is passed as the first argument to any Convex query
119 | * function run on the server.
120 | *
121 | * If you're using code generation, use the `QueryCtx` type in `convex/_generated/server.d.ts` instead.
122 | */
123 | export type QueryCtx = GenericQueryCtx;
124 |
125 | /**
126 | * A set of services for use within Convex mutation functions.
127 | *
128 | * The mutation context is passed as the first argument to any Convex mutation
129 | * function run on the server.
130 | *
131 | * If you're using code generation, use the `MutationCtx` type in `convex/_generated/server.d.ts` instead.
132 | */
133 | export type MutationCtx = GenericMutationCtx;
134 |
135 | /**
136 | * A set of services for use within Convex action functions.
137 | *
138 | * The action context is passed as the first argument to any Convex action
139 | * function run on the server.
140 | */
141 | export type ActionCtx = GenericActionCtx;
142 |
143 | /**
144 | * An interface to read from the database within Convex query functions.
145 | *
146 | * The two entry points are {@link DatabaseReader.get}, which fetches a single
147 | * document by its {@link Id}, or {@link DatabaseReader.query}, which starts
148 | * building a query.
149 | */
150 | export type DatabaseReader = GenericDatabaseReader;
151 |
152 | /**
153 | * An interface to read from and write to the database within Convex mutation
154 | * functions.
155 | *
156 | * Convex guarantees that all writes within a single mutation are
157 | * executed atomically, so you never have to worry about partial writes leaving
158 | * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control)
159 | * for the guarantees Convex provides your functions.
160 | */
161 | export type DatabaseWriter = GenericDatabaseWriter;
162 |
--------------------------------------------------------------------------------
/example/src/mastra/workflows/index.ts:
--------------------------------------------------------------------------------
1 | import { Agent, createStep, Mastra, Workflow } from "@mastra/core";
2 | import { z } from "zod";
3 | import { outfitAgent, weatherAgent } from "../agents";
4 |
5 | export const getWeather = createStep({
6 | id: "getWeather",
7 | description: "Gets the weather for a location",
8 | inputSchema: z.object({
9 | location: z.string(),
10 | }),
11 | outputSchema: z.object({
12 | weather: z.string(),
13 | }),
14 | execute: async ({ context, suspend }) => {
15 | const weather = await weatherAgent.generate(
16 | `What's the weather in ${context.inputData.location}?`
17 | );
18 | return { weather: weather.text };
19 | },
20 | });
21 |
22 | export const getOutfit = createStep({
23 | id: "getOutfit",
24 | description: "Gets the outfit for a location",
25 | inputSchema: z.object({
26 | location: z.string(),
27 | weather: z.string(),
28 | }),
29 | outputSchema: z.object({
30 | outfit: z.string(),
31 | }),
32 | execute: async ({ context, suspend, resourceId, threadId, runId }) => {
33 | const outfit = await outfitAgent.generate([
34 | {
35 | role: "user",
36 | content: `What's the outfit for ${context.inputData.weather} in ${context.inputData.location}?`,
37 | },
38 | ]);
39 | return { outfit: outfit.text };
40 | },
41 | });
42 |
43 | export const refineOutfit = createStep({
44 | id: "refineOutfit",
45 | description: "Refines the outfit",
46 | inputSchema: z.object({
47 | outfit: z.string(),
48 | refinement: z.union([z.string(), z.literal(null)]).optional(),
49 | }),
50 | async execute({ context, suspend, resourceId, threadId, runId }) {
51 | const previous = context.getStepResult("refineOutfit");
52 | if (!previous) {
53 | console.log("suspending", context.inputData.outfit);
54 | await suspend({
55 | ask: `Do you want to change anything?`,
56 | outfit: context.inputData.outfit,
57 | });
58 | return { outfit: context.inputData.outfit };
59 | }
60 | if (
61 | !context.inputData.refinement ||
62 | context.inputData.refinement.toLowerCase().startsWith("no ")
63 | ) {
64 | return { outfit: previous.outfit };
65 | }
66 | console.log("refining outfit", previous.outfit, context);
67 | // TODO: use memory for full history
68 | const refinement = await outfitAgent.generate([
69 | {
70 | role: "user",
71 | content: `What's the outfit for ${context.inputData.weather} in ${context.inputData.location}?`,
72 | },
73 | {
74 | role: "assistant",
75 | content: previous.outfit,
76 | },
77 | {
78 | role: "user",
79 | content: context.inputData.refinement,
80 | },
81 | ]);
82 | await suspend({ ask: `How about this?`, outfit: refinement.text });
83 | return { outfit: refinement.text };
84 | },
85 | outputSchema: z.object({
86 | outfit: z.string(),
87 | }),
88 | });
89 |
90 | export const weatherToOutfitWorkflow = new Workflow({
91 | name: "weatherToOutfit",
92 | triggerSchema: z.object({
93 | location: z.string(),
94 | }),
95 | })
96 | .step(getWeather, {
97 | variables: {
98 | location: {
99 | step: "trigger",
100 | path: "location",
101 | },
102 | },
103 | })
104 | .then(getOutfit, {
105 | variables: {
106 | location: {
107 | step: "trigger",
108 | path: "location",
109 | },
110 | weather: {
111 | step: getWeather as any,
112 | path: "weather",
113 | },
114 | },
115 | })
116 | .then(refineOutfit, {
117 | variables: {
118 | outfit: {
119 | step: getOutfit as any,
120 | path: "outfit",
121 | },
122 | refinement: {
123 | step: refineOutfit as any,
124 | path: "refinement",
125 | },
126 | },
127 | });
128 | const A = createStep({
129 | id: "A",
130 | execute: async ({ context, suspend }) => {
131 | console.info("A");
132 | return "A";
133 | },
134 | });
135 | const B = createStep({
136 | id: "B",
137 | execute: async ({ context }) => {
138 | console.info("B");
139 | return "B";
140 | },
141 | });
142 | const C = createStep({
143 | id: "C",
144 | execute: async ({ context }) => {
145 | console.info("C");
146 | return "C";
147 | },
148 | });
149 | const D = createStep({
150 | id: "D",
151 | execute: async ({ context }) => {
152 | console.info("D");
153 | return "D";
154 | },
155 | });
156 | const E = createStep({
157 | id: "E",
158 | execute: async ({ context }) => {
159 | console.info("E");
160 | return "E";
161 | },
162 | });
163 | const Counter = createStep({
164 | id: "Counter",
165 | execute: async ({ context }) => {
166 | const previous = context.getStepResult("Counter");
167 | return { count: (previous?.count ?? 0) + 1 };
168 | },
169 | outputSchema: z.object({
170 | count: z.number(),
171 | }),
172 | });
173 | const SuspendsUntilHumanInput = createStep({
174 | id: "SuspendsUntilHumanInput",
175 | inputSchema: z.object({
176 | human: z.string().optional(),
177 | }),
178 | execute: async ({ context, suspend }) => {
179 | console.info("SuspendsUntilHumanInput");
180 | if (context.inputData.human) {
181 | console.info("Human message", context.inputData.human);
182 | } else {
183 | console.info("Suspending until human input");
184 | await suspend({ ask: "Can you help?" });
185 | }
186 | return "SuspendsUntilHumanInput";
187 | },
188 | });
189 | const RetryOnce = createStep({
190 | id: "RetryOnce",
191 | execute: async ({ context }) => {
192 | const previous = context.getStepResult("RetryOnce");
193 | if (previous) {
194 | return { status: "success" };
195 | }
196 | return { status: "retry" };
197 | },
198 | });
199 | const FailsOnSecondRun = createStep({
200 | id: "FailsOnSecondRun",
201 | execute: async ({ context }) => {
202 | const previous = context.getStepResult("FailsOnSecondRun");
203 | console.info("FailsOnSecondRun", previous);
204 | if (previous) throw new Error("FailsOnSecondRun already ran");
205 | return (previous ?? 0) + 1;
206 | },
207 | });
208 | const Fail = createStep({
209 | id: "Fail",
210 | execute: async ({ context }) => {
211 | console.info("Fail");
212 | throw new Error("Fail");
213 | },
214 | });
215 |
216 | export const whenTest = new Workflow({
217 | name: "whenTest",
218 | triggerSchema: z.object({
219 | text: z.string(),
220 | nested: z.object({
221 | text: z.string(),
222 | }),
223 | }),
224 | })
225 | .step(A)
226 | .then(Counter)
227 | // .if(async ({ context }) => context.getStepResult("A") === "A")
228 | // .then(B)
229 | // .step(Fail)
230 | // .after([A, Fail])
231 | // .step(C)
232 | .after(A)
233 | .step(B, {
234 | when: {
235 | "A.status": "success",
236 | // ref: {
237 | // step: A,
238 | // path: ".",
239 | // },
240 | // query: {
241 | // $eq: "A",
242 | // },
243 | },
244 | })
245 | // .then(C, {
246 | // when: {
247 | // ref: {
248 | // step: { id: "B" },
249 | // path: "status",
250 | // },
251 | // query: {
252 | // $eq: "success",
253 | // },
254 | // },
255 | // })
256 | // .after([A, C])
257 | // .step(D, {
258 | // when: {
259 | // "B.status": "success",
260 | // },
261 | // })
262 | // .then(Counter)
263 | // .after(B)
264 | // // skip
265 | // .step(Fail, {
266 | // when: { "RetryOnce.status": "retry" },
267 | // })
268 | // .until(async ({ context }) => context.getStepResult("Counter") === 5, Counter)
269 | // .step(E, {
270 | // when: {
271 | // ref: {
272 | // step: { id: "Counter" },
273 | // path: "count",
274 | // },
275 | // query: { $lt: 5 },
276 | // },
277 | // })
278 | // .step(RetryOnce, {
279 | // when: {
280 | // and: [
281 | // {
282 | // ref: {
283 | // step: { id: "Counter" },
284 | // path: "status",
285 | // },
286 | // query: {
287 | // $eq: "success",
288 | // },
289 | // },
290 | // {
291 | // ref: {
292 | // step: { id: "Counter" },
293 | // path: "count",
294 | // },
295 | // query: {
296 | // $eq: 5,
297 | // },
298 | // },
299 | // ],
300 | // },
301 | // })
302 | .commit();
303 |
--------------------------------------------------------------------------------
/src/client/client.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | MessageType,
3 | StorageThreadType,
4 | WorkflowRuns,
5 | } from "@mastra/core";
6 | import type {
7 | EvalRow,
8 | StorageColumn,
9 | StorageGetMessagesArg,
10 | } from "@mastra/core/storage";
11 | import { MastraStorage, type TABLE_NAMES } from "@mastra/core/storage";
12 | import { anyApi, type FunctionReference } from "convex/server";
13 | import { mastraToConvexTableNames } from "../mapping/index.js";
14 | import { ConvexHttpClient } from "convex/browser";
15 |
16 | import { MastraVector } from "@mastra/core";
17 | import type { SupportedTableName } from "../component/vector/tables.js";
18 | export { InMemoryVector } from "./in-memory.js";
19 |
20 | export type VectorApi = {
21 | vectorAction: FunctionReference<"action">;
22 | vectorMutation: FunctionReference<"mutation">;
23 | vectorQuery: FunctionReference<"query">;
24 | };
25 | export class ConvexVector extends MastraVector {
26 | api: VectorApi;
27 |
28 | constructor(
29 | public client: ConvexHttpClient,
30 | public options?: { name?: string; api?: VectorApi },
31 | ) {
32 | super();
33 | this.api = options?.api ?? (anyApi.mastra.api as unknown as VectorApi);
34 | }
35 |
36 | async query(...args: Parameters) {
37 | const { indexName, queryVector, topK, filter, includeVector } =
38 | this.normalizeArgs("query", args);
39 | return await this.client.action(this.api.vectorAction, {
40 | op: "search",
41 | args: {
42 | indexName,
43 | queryVector,
44 | topK: topK ?? 10,
45 | filter: filter ?? undefined,
46 | includeVector,
47 | },
48 | });
49 | }
50 |
51 | async upsert(...args: Parameters): Promise {
52 | const { indexName, vectors, metadata, ids } = this.normalizeArgs(
53 | "upsert",
54 | args,
55 | );
56 | return await this.client.action(this.api.vectorAction, {
57 | op: "upsert",
58 | args: {
59 | indexName,
60 | vectors,
61 | metadata,
62 | ids,
63 | },
64 | });
65 | }
66 |
67 | async createIndex(...args: Parameters) {
68 | const { indexName, dimension } = this.normalizeArgs("createIndex", args);
69 | if (dimension !== 1536) {
70 | throw new Error("Only 1536 dimensions supported");
71 | }
72 | await this.client.action(this.api.vectorAction, {
73 | op: "createIndex",
74 | args: {
75 | indexName,
76 | dimensions: dimension,
77 | },
78 | });
79 | }
80 |
81 | async listIndexes() {
82 | return await this.client.query(this.api.vectorQuery, {
83 | op: "listIndexes",
84 | args: {},
85 | });
86 | }
87 |
88 | async describeIndex(indexName: string) {
89 | return await this.client.query(this.api.vectorQuery, {
90 | op: "describeIndex",
91 | args: { indexName },
92 | });
93 | }
94 |
95 | async deleteIndex(indexName: SupportedTableName) {
96 | await this.client.action(this.api.vectorAction, {
97 | op: "deleteIndex",
98 | args: { indexName },
99 | });
100 | }
101 | }
102 |
103 | export type StorageApi = {
104 | storageAction: FunctionReference<"action">;
105 | storageMutation: FunctionReference<"mutation">;
106 | storageQuery: FunctionReference<"query">;
107 | };
108 |
109 | export class ConvexStorage extends MastraStorage {
110 | client: ConvexHttpClient;
111 | api: StorageApi;
112 | constructor(
113 | client: ConvexHttpClient,
114 | options?: { name?: string; api?: StorageApi },
115 | ) {
116 | super({ name: options?.name ?? "ConvexStorage" });
117 | this.client = client;
118 | this.api = options?.api ?? (anyApi.mastra.api as unknown as StorageApi);
119 | this.shouldCacheInit = true;
120 | }
121 |
122 | async getWorkflowRuns(args?: {
123 | namespace?: string;
124 | workflowName?: string;
125 | fromDate?: Date;
126 | toDate?: Date;
127 | limit?: number;
128 | offset?: number;
129 | }): Promise {
130 | return await this.client.query(this.api.storageQuery, {
131 | op: "getWorkflowRuns",
132 | args,
133 | });
134 | }
135 | async createTable(args: {
136 | tableName: TABLE_NAMES;
137 | schema: Record;
138 | }): Promise {
139 | const convexTableName = mastraToConvexTableNames[args.tableName];
140 | if (!convexTableName) {
141 | throw new Error(`Unsupported table name: ${args.tableName}`);
142 | }
143 | // TODO: we could do more serious validation against the defined schema
144 | // await this.client.mutation(this.api.storageMutation, {
145 | // op: "createTable",
146 | // args,
147 | // });
148 | }
149 |
150 | async clearTable(args: { tableName: TABLE_NAMES }): Promise {
151 | await this.client.action(this.api.storageAction, {
152 | op: "clearTable",
153 | args,
154 | });
155 | }
156 |
157 | async insert(args: {
158 | tableName: TABLE_NAMES;
159 | record: Record;
160 | }): Promise {
161 | await this.client.mutation(this.api.storageMutation, {
162 | op: "insert",
163 | args,
164 | });
165 | return;
166 | }
167 |
168 | async batchInsert(args: {
169 | tableName: TABLE_NAMES;
170 |
171 | records: Record[];
172 | }): Promise {
173 | await this.client.mutation(this.api.storageMutation, {
174 | op: "batchInsert",
175 | args,
176 | });
177 | }
178 |
179 | async load(args: {
180 | tableName: TABLE_NAMES;
181 | keys: Record;
182 | }): Promise {
183 | return await this.client.query(this.api.storageQuery, {
184 | op: "load",
185 | args,
186 | });
187 | }
188 |
189 | async getThreadById({
190 | threadId,
191 | }: {
192 | threadId: string;
193 | }): Promise {
194 | return await this.client.query(this.api.storageQuery, {
195 | op: "getThreadById",
196 | args: { threadId },
197 | });
198 | }
199 |
200 | async getThreadsByResourceId({
201 | resourceId,
202 | }: {
203 | resourceId: string;
204 | }): Promise {
205 | return await this.client.query(this.api.storageQuery, {
206 | op: "getThreadsByResourceId",
207 | args: { resourceId },
208 | });
209 | }
210 |
211 | async saveThread({
212 | thread,
213 | }: {
214 | thread: StorageThreadType;
215 | }): Promise {
216 | return await this.client.mutation(this.api.storageMutation, {
217 | op: "saveThread",
218 | args: { thread },
219 | });
220 | }
221 |
222 | async updateThread({
223 | id,
224 | title,
225 | metadata,
226 | }: {
227 | id: string;
228 | title: string;
229 | metadata: Record;
230 | }): Promise {
231 | return await this.client.mutation(this.api.storageMutation, {
232 | op: "updateThread",
233 | args: { id, title, metadata },
234 | });
235 | }
236 |
237 | async deleteThread({ threadId }: { threadId: string }): Promise {
238 | await this.client.mutation(this.api.storageMutation, {
239 | op: "deleteThread",
240 | args: { threadId },
241 | });
242 | }
243 |
244 | async getMessages({
245 | threadId,
246 | selectBy,
247 | }: StorageGetMessagesArg): Promise {
248 | return await this.client.query(this.api.storageQuery, {
249 | op: "getMessages",
250 | args: { threadId, selectBy },
251 | });
252 | }
253 |
254 | async saveMessages({
255 | messages,
256 | }: {
257 | messages: MessageType[];
258 | }): Promise {
259 | return await this.client.mutation(this.api.storageMutation, {
260 | op: "saveMessages",
261 | args: { messages },
262 | });
263 | }
264 |
265 | async getEvalsByAgentName(
266 | agentName: string,
267 | type?: "test" | "live",
268 | ): Promise {
269 | return await this.client.query(this.api.storageQuery, {
270 | op: "getEvalsByAgentName",
271 | args: { agentName, type },
272 | });
273 | }
274 |
275 | async getTraces(options?: {
276 | name?: string;
277 | scope?: string;
278 | page: number;
279 | perPage: number;
280 | attributes?: Record;
281 | }): Promise {
282 | return await this.client.action(this.api.storageAction, {
283 | op: "getTraces",
284 | args: options,
285 | });
286 | }
287 | }
288 |
--------------------------------------------------------------------------------
/example/convex/example.ts:
--------------------------------------------------------------------------------
1 | "use node";
2 | import { action, internalAction } from "./_generated/server";
3 | import { components } from "./_generated/api";
4 | import { Agent, createStep, Mastra, Workflow } from "@mastra/core";
5 | // import { Memory } from "@mastra/memory";
6 | import { openai } from "@ai-sdk/openai";
7 | import { z } from "zod";
8 | import { weatherAgent, outfitAgent } from "../src/mastra/agents";
9 | import { weatherToOutfitWorkflow } from "../src/mastra/workflows";
10 | import { ConvexStorage, ConvexVector } from "@convex-dev/mastra";
11 |
12 | import { v } from "convex/values";
13 |
14 | const storage = new ConvexStorage(components.mastra);
15 | const vector = new ConvexVector(components.mastra);
16 |
17 | const agent = new Agent({
18 | // memory: new Memory({ storage, vector }),
19 | name: "summarizer",
20 | instructions: "You are a helpful assistant that summarizes text.",
21 | model: openai("gpt-4o"),
22 | });
23 |
24 | const summarize = createStep({
25 | id: "summarize",
26 | inputSchema: z.object({
27 | text: z.string(),
28 | guidance: z.string().optional(),
29 | }),
30 | async execute({ context, suspend, resourceId, threadId }) {
31 | // const console = createLogger(context.logLevel);
32 | console.debug({ threadId, resourceId, context });
33 | const guidance = context.inputData.text;
34 | const result = await agent.generate(
35 | context.inputData.text + guidance
36 | ? `\n\nHere is some guidance: ${guidance}`
37 | : "",
38 | );
39 | if (!guidance) {
40 | await suspend({
41 | ask: "Does this look good?",
42 | result: result.response.messages,
43 | });
44 | }
45 | return result.text;
46 | },
47 | outputSchema: z.string(),
48 | });
49 | const A = createStep({
50 | id: "A",
51 | execute: async ({ context, suspend }) => {
52 | console.info("A");
53 | return "A";
54 | },
55 | });
56 | const B = createStep({
57 | id: "B",
58 | execute: async ({ context }) => {
59 | console.info("B");
60 | return "B";
61 | },
62 | });
63 | const C = createStep({
64 | id: "C",
65 | execute: async ({ context }) => {
66 | console.info("C");
67 | return "C";
68 | },
69 | });
70 | const D = createStep({
71 | id: "D",
72 | execute: async ({ context }) => {
73 | console.info("D");
74 | return "D";
75 | },
76 | });
77 | const E = createStep({
78 | id: "E",
79 | execute: async ({ context }) => {
80 | console.info("E");
81 | return "E";
82 | },
83 | });
84 | const Counter = createStep({
85 | id: "Counter",
86 | execute: async ({ context }) => {
87 | const previous = context.getStepResult("Counter");
88 | return { count: (previous?.count ?? 0) + 1 };
89 | },
90 | outputSchema: z.object({
91 | count: z.number(),
92 | }),
93 | });
94 | const SuspendsUntilHumanInput = createStep({
95 | id: "SuspendsUntilHumanInput",
96 | inputSchema: z.object({
97 | human: z.string().optional(),
98 | }),
99 | execute: async ({ context, suspend }) => {
100 | console.info("SuspendsUntilHumanInput");
101 | if (context.inputData.human) {
102 | console.info("Human message", context.inputData.human);
103 | } else {
104 | console.info("Suspending until human input");
105 | await suspend({ ask: "Can you help?" });
106 | }
107 | return "SuspendsUntilHumanInput";
108 | },
109 | });
110 | const RetryOnce = createStep({
111 | id: "RetryOnce",
112 | execute: async ({ context }) => {
113 | const previous = context.getStepResult("RetryOnce");
114 | if (previous) {
115 | return { status: "success" };
116 | }
117 | return { status: "retry" };
118 | },
119 | });
120 | const FailsOnSecondRun = createStep({
121 | id: "FailsOnSecondRun",
122 | execute: async ({ context }) => {
123 | const previous = context.getStepResult("FailsOnSecondRun");
124 | console.info("FailsOnSecondRun", previous);
125 | if (previous) throw new Error("FailsOnSecondRun already ran");
126 | return (previous ?? 0) + 1;
127 | },
128 | });
129 | const Fail = createStep({
130 | id: "Fail",
131 | execute: async ({ context }) => {
132 | console.info("Fail");
133 | throw new Error("Fail");
134 | },
135 | });
136 | const workflow = new Workflow({
137 | name: "workflow",
138 | triggerSchema: z.object({
139 | text: z.string(),
140 | nested: z.object({
141 | text: z.string(),
142 | }),
143 | }),
144 | })
145 | .step(A)
146 | .then(Counter, {
147 | when: {
148 | ref: {
149 | step: A,
150 | path: ".",
151 | },
152 | query: {
153 | $eq: "A",
154 | },
155 | },
156 | })
157 | // .if(async ({ context }) => context.getStepResult("A") === "A")
158 | // .then(B)
159 | // .step(Fail)
160 | // .after([A, Fail])
161 | // .step(C)
162 | // .after(A)
163 | .step(B)
164 | .then(C, {
165 | when: {
166 | ref: {
167 | step: { id: "B" },
168 | path: "status",
169 | },
170 | query: {
171 | $eq: "success",
172 | },
173 | },
174 | })
175 | .after([A, C])
176 | .step(D, {
177 | when: {
178 | "B.status": "success",
179 | },
180 | })
181 | .then(Counter)
182 | .after(B)
183 | // skip
184 | .step(Fail, {
185 | when: { "RetryOnce.status": "retry" },
186 | })
187 | .step(RetryOnce)
188 | // .until(async ({ context }) => context.getStepResult("Counter").count >= 5, Counter)
189 | .step(E, {
190 | when: {
191 | ref: {
192 | step: { id: "Counter" },
193 | path: "count",
194 | },
195 | query: { $lt: 5 },
196 | },
197 | })
198 | .step(RetryOnce, {
199 | when: {
200 | and: [
201 | {
202 | ref: {
203 | step: { id: "Counter" },
204 | path: "status",
205 | },
206 | query: {
207 | $eq: "success",
208 | },
209 | },
210 | {
211 | ref: {
212 | step: { id: "Counter" },
213 | path: "count",
214 | },
215 | query: {
216 | $eq: 5,
217 | },
218 | },
219 | ],
220 | },
221 | })
222 | // .step(D);
223 | // .then(D);
224 | // .after(B)
225 | // .step(A, {
226 | // // when: { "B.status": "retry" },
227 | // when: async ({ context }) => context.getStepResult("B") === "foo",
228 | // });
229 | // .after([A, B])
230 | // .step(A)
231 | // .then(B)
232 | // .while(async ({ context }) => context.inputData.text === "B", A)
233 | // .then(C)
234 | // .until(async () => true, D)
235 | // .after(B)
236 | // .step(D)
237 | // .then(E);
238 | // .then(C);
239 | // when: ({ context }) => context.inputData.text === "B",
240 | // }).step(C, {
241 | // when: ({ context }) => context.inputData.text === "C",
242 | // }).step(D, {
243 | // when: ({ context }) => context.inputData.text === "D",
244 | // })
245 |
246 | .step(summarize, {
247 | variables: {
248 | text: { step: "trigger", path: "nested.text" },
249 | },
250 | })
251 | .commit();
252 |
253 | // Can run this not in node:
254 | const mastra = new Mastra({
255 | agents: {
256 | weatherAgent,
257 | outfitAgent,
258 | },
259 | storage,
260 | workflows: {
261 | workflow,
262 | weatherToOutfitWorkflow,
263 | },
264 | });
265 | type M = ReturnType>;
266 |
267 | export const startWorkflow = internalAction({
268 | args: {
269 | runId: v.optional(v.string()),
270 | name: v.optional(
271 | v.union(v.literal("workflow"), v.literal("weatherToOutfitWorkflow")),
272 | ),
273 | initialData: v.optional(v.any()),
274 | },
275 | handler: async (ctx, args) => {
276 | storage.ctx = ctx;
277 | const w = mastra.getWorkflow(args.name ?? "workflow");
278 | const { start } = w.createRun({ runId: args.runId });
279 | const result = await start(args.initialData);
280 | // Save the result somewhere
281 | return result.results;
282 | },
283 | });
284 |
285 | export const t = action({
286 | async handler(ctx) {
287 | // console.debug({
288 | // stepGraph: workflow.stepGraph,
289 | // stepSubscriberGraph: workflow.stepSubscriberGraph,
290 | // serializedStepGraph: JSON.stringify(
291 | // workflow.serializedStepGraph,
292 | // null,
293 | // 2
294 | // ),
295 | // serializedStepSubscriberGraph: JSON.stringify(
296 | // workflow.serializedStepSubscriberGraph,
297 | // null,
298 | // 2
299 | // ),
300 | // });
301 | // return;
302 | // const { runId, start, resume } = workflow.createRun();
303 | const w = mastra.getWorkflow("workflow");
304 | const { runId, start, resume } = w.createRun();
305 | return runId;
306 | // const afterResume = await resume({
307 | // stepId: "A",
308 | // context: {
309 | // human: "Here is a human message",
310 | // },
311 | // });
312 | // console.debug("After resume", afterResume);
313 | // return JSON.stringify(result, null, 2);
314 | },
315 | });
316 |
--------------------------------------------------------------------------------
/src/mapping/index.test.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | EvalRow,
3 | TABLE_NAMES as ORIGINAL_TABLE_NAMES,
4 | } from "@mastra/core/storage";
5 | import { expect, test } from "vitest";
6 | import type { Content } from "../ai/types.js";
7 | import {
8 | type TABLE_NAMES as NEW_TABLE_NAMES,
9 | type SerializedContent,
10 | TABLE_WORKFLOW_SNAPSHOT,
11 | TABLE_EVALS,
12 | TABLE_MESSAGES,
13 | TABLE_THREADS,
14 | TABLE_TRACES,
15 | mapMastraToSerialized,
16 | mapSerializedToMastra,
17 | serializeContent,
18 | deserializeContent,
19 | } from "./index.js";
20 | import assert from "assert";
21 |
22 | // Type compatibility tests
23 | const _tableNames: ORIGINAL_TABLE_NAMES = "" as NEW_TABLE_NAMES;
24 | const _tableNames2: NEW_TABLE_NAMES = "" as ORIGINAL_TABLE_NAMES;
25 | const _content: SerializedContent = [] as Content;
26 | const _content2: Content = [] as SerializedContent;
27 |
28 | test("table name mappings are bijective", () => {
29 | expect(TABLE_WORKFLOW_SNAPSHOT).toBe("mastra_workflow_snapshot");
30 | expect(TABLE_EVALS).toBe("mastra_evals");
31 | expect(TABLE_MESSAGES).toBe("mastra_messages");
32 | expect(TABLE_THREADS).toBe("mastra_threads");
33 | expect(TABLE_TRACES).toBe("mastra_traces");
34 | });
35 |
36 | test("workflow snapshot mapping", () => {
37 | const now = new Date();
38 | const mastraRow = {
39 | workflow_name: "test_workflow",
40 | run_id: "run123",
41 | snapshot: {
42 | state: "RUNNING",
43 | value: { test: "test" },
44 | context: {
45 | steps: {},
46 | triggerData: {},
47 | attempts: {},
48 | },
49 | activePaths: [],
50 | runId: "run123",
51 | timestamp: now.getTime(),
52 | },
53 | created_at: now,
54 | updated_at: now,
55 | };
56 |
57 | const serialized = mapMastraToSerialized(TABLE_WORKFLOW_SNAPSHOT, mastraRow);
58 | expect(serialized.workflowName).toBe(mastraRow.workflow_name);
59 | expect(serialized.runId).toBe(mastraRow.run_id);
60 | expect(serialized.snapshot).toBe(JSON.stringify(mastraRow.snapshot));
61 | expect(serialized.createdAt).toBe(Number(now));
62 | expect(serialized.updatedAt).toBe(Number(now));
63 |
64 | const roundTripped = mapSerializedToMastra(
65 | TABLE_WORKFLOW_SNAPSHOT,
66 | serialized,
67 | );
68 | expect(roundTripped.workflow_name).toBe(mastraRow.workflow_name);
69 | expect(roundTripped.run_id).toBe(mastraRow.run_id);
70 | expect(roundTripped.snapshot).toEqual(mastraRow.snapshot);
71 | expect(roundTripped.created_at.getTime()).toBe(now.getTime());
72 | expect(roundTripped.updated_at.getTime()).toBe(now.getTime());
73 | });
74 |
75 | test("eval row mapping", () => {
76 | const now = new Date();
77 | const mastraRow: EvalRow = {
78 | input: "test input",
79 | output: "test output",
80 | result: { score: 1 },
81 | agentName: "test_agent",
82 | metricName: "accuracy",
83 | instructions: "test instructions",
84 | testInfo: {},
85 | globalRunId: "global123",
86 | runId: "run123",
87 | createdAt: now.toISOString(),
88 | };
89 |
90 | const serialized = mapMastraToSerialized(TABLE_EVALS, mastraRow);
91 | expect(serialized.input).toBe(mastraRow.input);
92 | expect(serialized.output).toBe(mastraRow.output);
93 | expect(serialized.result).toBe(mastraRow.result);
94 | expect(serialized.createdAt).toBe(Number(now));
95 |
96 | const roundTripped = mapSerializedToMastra(TABLE_EVALS, serialized);
97 | expect(roundTripped.input).toBe(mastraRow.input);
98 | expect(roundTripped.output).toBe(mastraRow.output);
99 | expect(roundTripped.result).toBe(mastraRow.result);
100 | expect(roundTripped.createdAt).toBe(mastraRow.createdAt);
101 | });
102 |
103 | test("message mapping", () => {
104 | const now = new Date();
105 | const mastraRow = {
106 | id: "msg123",
107 | threadId: "thread123",
108 | resourceId: "resource123",
109 | content: "test message",
110 | role: "user" as const,
111 | type: "text" as const,
112 | createdAt: now,
113 | };
114 |
115 | const serialized = mapMastraToSerialized(TABLE_MESSAGES, mastraRow);
116 | expect(serialized.id).toBe(mastraRow.id);
117 | expect(serialized.threadId).toBe(mastraRow.threadId);
118 | expect(serialized.content).toBe(mastraRow.content);
119 | expect(serialized.role).toBe(mastraRow.role);
120 | expect(serialized.type).toBe(mastraRow.type);
121 | expect(serialized.createdAt).toBe(Number(now));
122 |
123 | const roundTripped = mapSerializedToMastra(TABLE_MESSAGES, serialized);
124 | expect(roundTripped.id).toBe(mastraRow.id);
125 | expect(roundTripped.threadId).toBe(mastraRow.threadId);
126 | expect(roundTripped.content).toBe(mastraRow.content);
127 | expect(roundTripped.role).toBe(mastraRow.role);
128 | expect(roundTripped.type).toBe(mastraRow.type);
129 | expect(roundTripped.createdAt.getTime()).toBe(now.getTime());
130 | });
131 |
132 | test("thread mapping", () => {
133 | const now = new Date();
134 | const mastraRow = {
135 | id: "thread123",
136 | title: "Test Thread",
137 | metadata: { key: "value" },
138 | resourceId: "resource123",
139 | createdAt: now,
140 | updatedAt: now,
141 | };
142 |
143 | const serialized = mapMastraToSerialized(TABLE_THREADS, mastraRow);
144 | expect(serialized.id).toBe(mastraRow.id);
145 | expect(serialized.title).toBe(mastraRow.title);
146 | expect(serialized.metadata).toEqual(mastraRow.metadata);
147 | expect(serialized.resourceId).toBe(mastraRow.resourceId);
148 | expect(serialized.createdAt).toBe(Number(now));
149 | expect(serialized.updatedAt).toBe(Number(now));
150 |
151 | const roundTripped = mapSerializedToMastra(TABLE_THREADS, serialized);
152 | expect(roundTripped.id).toBe(mastraRow.id);
153 | expect(roundTripped.title).toBe(mastraRow.title);
154 | expect(roundTripped.metadata).toEqual(mastraRow.metadata);
155 | expect(roundTripped.resourceId).toBe(mastraRow.resourceId);
156 | expect(roundTripped.createdAt.getTime()).toBe(now.getTime());
157 | expect(roundTripped.updatedAt.getTime()).toBe(now.getTime());
158 | });
159 |
160 | test("trace mapping", () => {
161 | const now = new Date();
162 | const mastraRow = {
163 | id: "trace123",
164 | parentSpanId: "parent123",
165 | traceId: "trace123",
166 | name: "test_trace",
167 | scope: "test",
168 | kind: 1n,
169 | startTime: 1000n,
170 | endTime: 2000n,
171 | createdAt: now,
172 | };
173 |
174 | const serialized = mapMastraToSerialized(TABLE_TRACES, mastraRow);
175 | expect(serialized.id).toBe(mastraRow.id);
176 | expect(serialized.parentSpanId).toBe(mastraRow.parentSpanId);
177 | expect(serialized.traceId).toBe(mastraRow.traceId);
178 | expect(serialized.name).toBe(mastraRow.name);
179 | expect(serialized.scope).toBe(mastraRow.scope);
180 | expect(serialized.kind).toBe(mastraRow.kind);
181 | expect(serialized.startTime).toBe(mastraRow.startTime);
182 | expect(serialized.endTime).toBe(mastraRow.endTime);
183 | expect(serialized.createdAt).toBe(Number(now));
184 |
185 | const roundTripped = mapSerializedToMastra(TABLE_TRACES, serialized);
186 | expect(roundTripped.id).toBe(mastraRow.id);
187 | expect(roundTripped.parentSpanId).toBe(mastraRow.parentSpanId);
188 | expect(roundTripped.traceId).toBe(mastraRow.traceId);
189 | expect(roundTripped.name).toBe(mastraRow.name);
190 | expect(roundTripped.scope).toBe(mastraRow.scope);
191 | expect(roundTripped.kind).toBe(mastraRow.kind);
192 | expect(roundTripped.startTime).toBe(mastraRow.startTime);
193 | expect(roundTripped.endTime).toBe(mastraRow.endTime);
194 | });
195 |
196 | test("content serialization with URLs", () => {
197 | const url = new URL("https://example.com/image.jpg");
198 | const content = [
199 | { type: "image" as const, image: url },
200 | { type: "text" as const, text: "test" },
201 | ];
202 |
203 | const serialized = serializeContent(content);
204 | assert(serialized[0] instanceof Object);
205 | assert(serialized[0].type === "image");
206 | expect(serialized[0].image).toBe(url.toString());
207 | expect(serialized[1]).toEqual(content[1]);
208 |
209 | const deserialized = deserializeContent(serialized);
210 | assert(deserialized[0] instanceof Object);
211 | assert(deserialized[0].type === "image");
212 | expect(deserialized[0].image).toBeInstanceOf(URL);
213 | expect((deserialized[0].image as URL).toString()).toBe(url.toString());
214 | expect(deserialized[1]).toEqual(content[1]);
215 | });
216 |
217 | test("content serialization with ArrayBuffer", () => {
218 | const buffer = new ArrayBuffer(8);
219 | const content = [
220 | {
221 | type: "file" as const,
222 | data: buffer,
223 | mimeType: "application/octet-stream",
224 | },
225 | { type: "text" as const, text: "test" },
226 | ];
227 |
228 | const serialized = serializeContent(content);
229 | assert(serialized[0] instanceof Object);
230 | assert(serialized[0].type === "file");
231 | expect(serialized[0].data).toBeInstanceOf(ArrayBuffer);
232 | expect(serialized[1]).toEqual(content[1]);
233 |
234 | const deserialized = deserializeContent(serialized);
235 | assert(deserialized[0] instanceof Object);
236 | assert(deserialized[0].type === "file");
237 | expect(deserialized[0].data).toBeInstanceOf(ArrayBuffer);
238 | expect(deserialized[1]).toEqual(content[1]);
239 | });
240 |
241 | test("invalid table name throws error", () => {
242 | expect(() => mapMastraToSerialized("invalid_table" as any, {})).toThrow(
243 | "Unsupported table name: invalid_table",
244 | );
245 |
246 | expect(() => mapSerializedToMastra("invalid_table" as any, {})).toThrow(
247 | "Unsupported table name: invalid_table",
248 | );
249 | });
250 |
--------------------------------------------------------------------------------
/src/component/storage/storage.ts:
--------------------------------------------------------------------------------
1 | import { v, type Validator } from "convex/values";
2 | import tables from "./tables.js";
3 | import { internal } from "../_generated/api.js";
4 | import type { TableNames } from "./tables.js";
5 | import {
6 | action,
7 | internalMutation,
8 | mutation,
9 | query,
10 | } from "../_generated/server.js";
11 | import { paginator } from "convex-helpers/server/pagination";
12 | import schema from "../schema.js";
13 | import { createLogger, makeConsole } from "../logger.js";
14 |
15 | interface StorageColumn {
16 | type: "text" | "timestamp" | "uuid" | "jsonb" | "integer" | "bigint";
17 | primaryKey?: boolean;
18 | nullable?: boolean;
19 | references?: {
20 | table: string;
21 | column: string;
22 | };
23 | }
24 |
25 | export function validateTableSchema(
26 | tableName: TableNames,
27 | tableSchema: Record,
28 | ) {
29 | if (!tables[tableName]) {
30 | throw new Error(`Table ${tableName} not found in schema`);
31 | }
32 | const table = tables[tableName];
33 | const fields = table.validator.fields;
34 | for (const [name, field] of Object.entries(tableSchema)) {
35 | if (!(name in fields)) {
36 | throw new Error(`Field ${name} not found in schema for ${tableName}`);
37 | }
38 | let convexValue: Validator["kind"];
39 | switch (field.type) {
40 | case "text":
41 | convexValue = "string";
42 | break;
43 | case "integer":
44 | convexValue = "int64";
45 | break;
46 | case "bigint":
47 | convexValue = "int64";
48 | break;
49 | case "timestamp":
50 | convexValue = "int64";
51 | break;
52 | case "jsonb":
53 | convexValue = "any";
54 | break;
55 | case "uuid":
56 | convexValue = "string";
57 | break;
58 | }
59 | if (!convexValue) {
60 | throw new Error(
61 | `Unexpected field type ${field.type} for ${name} in ${tableName}`,
62 | );
63 | }
64 | const expected = fields[name as keyof typeof fields] as Validator;
65 | if (expected.type !== convexValue) {
66 | throw new Error(
67 | `Field ${name} in table ${tableName} was expected to be a ${convexValue} but got ${expected.type}`,
68 | );
69 | }
70 | if (expected.isOptional === "required" && field.nullable) {
71 | throw new Error(
72 | `Field ${name} in table ${tableName} was expected to be required but the schema specified nullable`,
73 | );
74 | }
75 | }
76 | }
77 |
78 | export const insert = mutation({
79 | args: {
80 | tableName: v.string(),
81 | document: v.any(),
82 | },
83 | handler: async (ctx, args) => {
84 | const console = await makeConsole(ctx);
85 | console.debug(`Inserting ${args.tableName}`, args.document);
86 | // TODO: split out into inserts per usecase and enforce unique constraints
87 | await ctx.db.insert(args.tableName as any, args.document);
88 | },
89 | returns: v.null(),
90 | });
91 |
92 | export const batchInsert = mutation({
93 | args: {
94 | tableName: v.string(),
95 | records: v.array(v.any()),
96 | },
97 | handler: async (ctx, args) => {
98 | const console = await makeConsole(ctx);
99 | console.debug(`Batch inserting ${args.tableName}`, args.records);
100 | await Promise.all(
101 | args.records.map(async (record) => {
102 | await ctx.db.insert(args.tableName as any, record);
103 | }),
104 | );
105 | },
106 | returns: v.null(),
107 | });
108 |
109 | export const loadSnapshot = query({
110 | args: {
111 | runId: v.string(),
112 | workflowName: v.string(),
113 | },
114 | handler: async (ctx, args) => {
115 | const console = await makeConsole(ctx);
116 | console.debug(
117 | `Loading snapshot for ${args.runId} and ${args.workflowName}`,
118 | );
119 | const snapshot = await ctx.db
120 | .query("snapshots")
121 | .withIndex("runId", (q) =>
122 | q.eq("runId", args.runId).eq("workflowName", args.workflowName),
123 | )
124 | .order("desc")
125 | .first();
126 | if (!snapshot) {
127 | return null;
128 | }
129 | const { _id, _creationTime, ...rest } = snapshot;
130 | return rest;
131 | },
132 | returns: v.union(tables.snapshots.validator, v.null()),
133 | });
134 |
135 | export const load = query({
136 | args: {
137 | tableName: v.string(),
138 | keys: v.any(),
139 | },
140 | handler: async (ctx, args) => {
141 | const console = await makeConsole(ctx);
142 | console.debug(`Loading ${args.tableName}`, args.keys);
143 | if (args)
144 | throw new Error(
145 | `Not implemented: load for ${args.tableName}: ${JSON.stringify(args.keys)}`,
146 | );
147 | },
148 | returns: v.union(v.any(), v.null()),
149 | });
150 |
151 | export const clearTable = action({
152 | args: { tableName: v.string() },
153 | handler: async (ctx, args) => {
154 | const logLevel = await ctx.runQuery(internal.logger.getLogLevel);
155 | const console = createLogger(logLevel);
156 | console.debug(`Clearing ${args.tableName}`);
157 | let cursor: string | null = null;
158 | while (true) {
159 | cursor = await ctx.scheduler.runAfter(
160 | 0,
161 | internal.storage.storage.clearPage,
162 | {
163 | tableName: args.tableName,
164 | cursor,
165 | },
166 | );
167 | if (!cursor) {
168 | break;
169 | }
170 | }
171 | console.debug(`Cleared ${args.tableName}`);
172 | },
173 | returns: v.null(),
174 | });
175 |
176 | export const clearPage = internalMutation({
177 | args: { tableName: v.string(), cursor: v.union(v.string(), v.null()) },
178 | handler: async (ctx, args): Promise => {
179 | const console = await makeConsole(ctx);
180 | const page = await ctx.db.query(args.tableName as any).paginate({
181 | numItems: 1000,
182 | cursor: args.cursor ?? null,
183 | });
184 | await Promise.all(
185 | page.page.map(async (item) => {
186 | await ctx.db.delete(item._id);
187 | }),
188 | );
189 | console.debug(`Deleted ${page.page.length} items from ${args.tableName}`);
190 | if (!page.isDone) {
191 | return page.continueCursor;
192 | }
193 | return null;
194 | },
195 | returns: v.union(v.string(), v.null()),
196 | });
197 |
198 | export const getEvalsByAgentName = query({
199 | args: {
200 | agentName: v.string(),
201 | type: v.optional(v.union(v.literal("test"), v.literal("live"))),
202 | },
203 | handler: async (ctx, args) => {
204 | const console = await makeConsole(ctx);
205 | console.debug(`Getting evals by name ${args.agentName}, type ${args.type}`);
206 | const evals = await ctx.db
207 | .query("evals")
208 | .withIndex("agentName", (q) => {
209 | const byAgent = q.eq("agentName", args.agentName);
210 | if (args.type === "test") {
211 | return byAgent.gt("testInfo.testPath", null);
212 | } else if (args.type === "live") {
213 | return byAgent.lte("testInfo.testPath", null);
214 | }
215 | return byAgent;
216 | })
217 | .collect();
218 | return evals.map((e) => {
219 | const { _id, _creationTime, ...serialized } = e;
220 | return serialized;
221 | });
222 | },
223 | returns: v.array(tables.evals.validator),
224 | });
225 |
226 | const MAX_TRACES_SCANNED = 4096;
227 | export const getTracesPage = query({
228 | args: {
229 | name: v.optional(v.string()),
230 | scope: v.optional(v.string()),
231 | cursor: v.union(v.string(), v.null()),
232 | numItems: v.number(),
233 | attributes: v.optional(v.record(v.string(), v.string())),
234 | },
235 | handler: async (ctx, args) => {
236 | const console = await makeConsole(ctx);
237 | console.debug(
238 | `Getting traces page with name ${args.name}, scope ${args.scope}, cursor ${args.cursor}, numItems ${args.numItems}, attributes ${args.attributes}`,
239 | );
240 | const { scope, name, cursor, numItems, attributes } = args;
241 | const overfetch = (scope ? 1 : 8) * (name ? 1 : 8);
242 | const traces = paginator(ctx.db, schema).query("traces");
243 | const results = await (
244 | scope
245 | ? traces.withIndex("scope", (q) => q.eq("scope", scope))
246 | : name
247 | ? traces.withIndex("name", (q) =>
248 | q.gte("name", name).lt("name", name + "~"),
249 | )
250 | : traces
251 | ).paginate({
252 | numItems: Math.min(numItems * overfetch, MAX_TRACES_SCANNED),
253 | cursor: cursor,
254 | });
255 | console.debug(`Got ${results.page.length} traces`);
256 | return {
257 | isDone: results.isDone,
258 | continuCursor: results.continueCursor,
259 | page: results.page
260 | .filter(
261 | (trace) =>
262 | (!name || trace.name.startsWith(name)) &&
263 | (!scope || trace.scope === scope) &&
264 | (!attributes ||
265 | Object.entries(attributes).every(
266 | ([key, value]) => trace[key as keyof typeof trace] === value,
267 | )),
268 | )
269 | .map((t) => {
270 | const { _id, _creationTime, ...serialized } = t;
271 | return serialized;
272 | }),
273 | };
274 | },
275 | returns: v.object({
276 | isDone: v.boolean(),
277 | continuCursor: v.string(),
278 | page: v.array(tables.traces.validator),
279 | }),
280 | });
281 |
282 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
283 | const console = "THIS IS A REMINDER TO USE makeConsole";
284 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Convex Mastra Component
2 |
3 | [](https://badge.fury.io/js/@convex-dev%2Fmastra)
4 |
5 |
6 |
7 | Use [Mastra](https://mastra.ai) to build workflows and define agents, then use
8 | this component to run and save them on [Convex](https://convex.dev/).
9 |
10 | 1. Run workflows asynchronously. Fire and forget from a serverless function
11 | (mutation or action).
12 | 1. Track the status of the workflow. Reactive queries and run-to-completion
13 | utilities. Or just write to the database from your steps and use normal
14 | Convex reactivity.
15 | 1. Resume a workflow from where it left off, after suspending it for user input.
16 | 1. Full support for Mastra's step forking, joining, triggering, and more.
17 |
18 | ```ts
19 | const storage = new ConvexStorage(components.mastra);
20 | const vector = new ConvexVector(components.mastra);
21 |
22 | // Uses storage to save and load messages and threads.
23 | // Uses vector to save and query embeddings for RAG on messages.
24 | const agent = new Agent({ memory: new Memory({ storage, vector}), ... })
25 | // Uses storage to save and load workflow state.
26 | const mastra = new Mastra({ storage, ...})
27 |
28 | export const myAction = action({
29 | args: { workflowName: v.string()},
30 | handler: async (ctx, args) => {
31 | // IMPORTANT:
32 | // <- must be called before using storage or vector
33 | storage.setCtx(ctx);
34 | vector.setCtx(ctx);
35 |
36 | const workflow = mastra.getWorkflow(args.workflowName);
37 | const { runId, start } = await workflow.create(ctx);
38 | await start({...});
39 | }
40 | })
41 | ```
42 |
43 | ### Use cases
44 |
45 | - Agentic workflows, such as taking user input, calling multiple LLMs, calling
46 | third parties, etc.
47 | - ... Everything else you want to do with Mastra.
48 |
49 | Found a bug? Feature request?
50 | [File it here](https://github.com/get-convex/mastra/issues).
51 |
52 | ### Future work
53 |
54 | - Provide Storage and Vector integrations for using Convex **from** Mastra
55 | servers.
56 | - Enables running from both `mastra dev` and `convex dev` for fast iterations.
57 | - Enables using Convex for Agent Memory.
58 | - Provide helpers to export functions so browsers can call them safely.
59 | - Add a custom mutation step, for a transactional step that will always
60 | terminate without needing a retry configuration (built-in for Convex).
61 |
62 | ## Pre-requisite: Convex
63 |
64 | You'll need an existing Convex project to use the component. Convex is a hosted
65 | backend platform, including a database, serverless functions, and a ton more you
66 | can learn about [here](https://docs.convex.dev/get-started).
67 |
68 | Run `npm create convex` or follow any of the
69 | [quickstarts](https://docs.convex.dev/home) to set one up.
70 |
71 | ## Installation
72 |
73 | Install the component package:
74 |
75 | ```ts
76 | npm install @convex-dev/mastra
77 | ```
78 |
79 | **NOTE**: You also need to:
80 |
81 | - Directly install `@libsql/client`
82 | - Mark it as an external package
83 | - Export it from a file in your /convex folder due to current bundling issues.
84 |
85 | You can do all of this by running the following commands from the project root:
86 |
87 | ```sh
88 | npm install -D @libsql/client
89 | echo '{"node":{"externalPackages":["@libsql/client"]}}' > convex.json
90 | printf '"use node";\nexport * as _ from "@libsql/client";' > convex/_workaround.ts
91 | ```
92 |
93 | Create a `convex.config.ts` file in your app's `convex/` folder and install the
94 | component by calling `use`:
95 |
96 | ```ts
97 | // convex/convex.config.ts
98 | import { defineApp } from "convex/server";
99 | import mastra from "@convex-dev/mastra/convex.config.js";
100 |
101 | const app = defineApp();
102 | app.use(mastra);
103 |
104 | export default app;
105 | ```
106 |
107 | ## Usage
108 |
109 | - It's important to call `storage.setCtx(ctx)` and `vector.setCtx(ctx)` before
110 | using the storage or vector in an action.
111 |
112 | ```ts
113 | "use node";
114 | const storage = new ConvexStorage(components.mastra);
115 | const vector = new ConvexVector(components.mastra);
116 |
117 | // Uses storage to save and load messages and threads.
118 | // Uses vector to save and query embeddings for RAG on messages.
119 | const agent = new Agent({ memory: new Memory({ storage, vector}), ... })
120 | // Uses storage to save and load workflow state.
121 | const mastra = new Mastra({ storage, ...})
122 |
123 | export const myAction = action({
124 | args: { workflowName: v.string()},
125 | handler: async (ctx, args) => {
126 | // IMPORTANT:
127 | // <- must be called before using storage or vector
128 | storage.setCtx(ctx);
129 | vector.setCtx(ctx);
130 |
131 | const workflow = mastra.getWorkflow(args.workflowName);
132 | const { runId, start } = await workflow.create(ctx);
133 | await start({...});
134 | }
135 | })
136 | ```
137 |
138 | Querying the status reactively from a non-node file:
139 |
140 | ```ts
141 | import { query } from "./_generated/server";
142 | import { components } from "./_generated/api";
143 | import { v } from "convex/values";
144 | import {
145 | mapSerializedToMastra,
146 | TABLE_WORKFLOW_SNAPSHOT,
147 | } from "@convex-dev/mastra/mapping";
148 |
149 | export const getStatus = query({
150 | args: { runId: v.string() },
151 | handler: async (ctx, args) => {
152 | const doc = await ctx.runQuery(
153 | components.mastra.storage.storage.loadSnapshot,
154 | {
155 | workflowName: "weatherToOutfitWorkflow",
156 | runId: args.runId,
157 | },
158 | );
159 | if (!doc) {
160 | return null;
161 | }
162 | const snapshot = mapSerializedToMastra(TABLE_WORKFLOW_SNAPSHOT, doc);
163 | const { childStates, activePaths, suspendedSteps } = snapshot.snapshot;
164 | return { childStates, activePaths, suspendedSteps };
165 | },
166 | });
167 | ```
168 |
169 | See more example usage in [example.ts](./example/convex/example.ts).
170 |
171 | ## Limitations
172 |
173 | 1. For local development, you need to run `mastra dev` in Node 20, but
174 | `convex dev` in Node 18. If you see issues about syscalls at import time, try
175 | using the cloud dev environment instead.
176 | 1. Currently you can only interact with Mastra classes from Node actions, so you
177 | can't start them from a mutation without doing it indirectly via the
178 | Scheduler or Workpool by enqueuing the node action to run.
179 | 1. To reactively query for the status of a workflow, you need to call the
180 | component API directly. There's an example above and in
181 | [v8Runtime.ts](./example/convex/v8Runtime.ts).
182 |
183 | ### TODO before it's out of alpha
184 |
185 | - [ ] Validate the Storage and Vector implementations (from Convex).
186 | - [ ] Ensure @mastra/memory can be bundled in Convex.
187 |
188 | ### TODO before it's out of beta
189 |
190 | - [ ] Support using Storage and Vector from `mastra dev`.
191 | - [ ] Configurable vacuuming of workflow state.
192 | - [ ] Support queries on workflow state without hitting the component directly.
193 |
194 | ### Backlog:
195 |
196 | 1. Support exposing the same `hono` HTTP API as Mastra servers.
197 | 1. Better logging and tracing.
198 | 1. Provide a Mutation Step to avoid the v8 action and is executed exactly once.
199 | 1. Workflows currently only run in Node Actions. You can create/start/resume
200 | them from anywhere, but each step will be executed in the node runtime. This
201 | is a bit slower and more expensive than running in the default runtime.
202 | 1. Using the `ConvexStorage` from Mastra doesn't share state with workflows made
203 | via the Component. They're currently stored in separate tables with different
204 | schemas.
205 |
206 | ## Troubleshooting
207 |
208 | ### Libsql errors
209 |
210 | If you see an error like this:
211 |
212 | ```
213 | Uncaught Failed to analyze _deps/node/4QMS5IZK.js: Cannot find module '@libsql/linux-arm64-gnu'
214 | ```
215 |
216 | You need to add `@libsql/client` to the `externalPackages` in a `convex.json`
217 | file in the root of your project:
218 |
219 | ```json
220 | {
221 | "node": {
222 | "externalPackages": ["@libsql/client"]
223 | }
224 | }
225 | ```
226 |
227 | If that still doesn't solve it, add a `convex/_workaround.ts` file:
228 |
229 | ```ts
230 | "use node";
231 | export * as _ from "@libsql/client";
232 | ```
233 |
234 | ### Errors about 'no loader is configured for ".node" files'
235 |
236 | If you see an error like this:
237 |
238 | ```
239 | ✘ [ERROR] No loader is configured for ".node" files: node_modules/onnxruntime-node/bin/napi-v3/win32/arm64/onnxruntime_binding.nodel
240 | ```
241 |
242 | You're likely importing some node package through a dependency that isn't
243 | supported. One workaround is to add it as an explicit dependency, then add it to
244 | the `externalPackages` in a `convex.json` file in the root of your project, then
245 | export something from it, similar to `@libsql/client` above
246 |
247 | You can also try deleting your `node_modules` and `package-lock.json` and
248 | re-installing using node 18.
249 |
250 | ### Errors about node packages not being available
251 |
252 | ```
253 | ✘ [ERROR] Could not resolve "assert"
254 |
255 | node_modules/sonic-boom/index.js:8:23:
256 | 8 │ const assert = require('assert')
257 | ╵ ~~~~~~~~
258 |
259 | The package "assert" wasn't found on the file system but is built into node. Are you trying to
260 | bundle for node? You can use "platform: 'node'" to do that, which will remove this error.
261 | ✖ It looks like you are using Node APIs from a file without the "use node" directive.
262 | ```
263 |
264 | This is because you're using a Node API in a file that doesn't have
265 | `"use node";` as the first line in the file Or you're importing a file in your
266 | convex/ directory that imports from a node dependency that doesn't have the
267 | `"use node"` directive.
268 |
269 | To fix this, add the `"use node"` directive to the file. Note: these files can
270 | only have actions, since mutations and queries only run in the default runtime.
271 |
272 |
273 |
--------------------------------------------------------------------------------
/src/component/vector/vector.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Implement the API based on ../storage/storage.ts, using and editing the tables in ./tables.ts
3 | * and providing an API that matches this abstract class, but not importing any Mastra* classes or from @mastra/core or @mastra/core/mastra or @mastra/core/vector
4 | */
5 |
6 | import { v } from "convex/values";
7 | import {
8 | action,
9 | query,
10 | mutation,
11 | internalQuery,
12 | internalMutation,
13 | type QueryCtx,
14 | } from "../_generated/server.js";
15 | import {
16 | SUPPORTED_DIMENSIONS,
17 | type SupportedDimension,
18 | vSupportedDimension,
19 | vSupportedId,
20 | vSupportedTableName,
21 | } from "./tables.js";
22 | import { internal } from "../_generated/api.js";
23 | import { paginator } from "convex-helpers/server/pagination";
24 | import schema from "../schema.js";
25 |
26 | export const createIndex = mutation({
27 | args: { indexName: v.string(), dimensions: vSupportedDimension },
28 | handler: async (ctx, { indexName, dimensions }) => {
29 | // For now only validate that it maches one of the supported sizes
30 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) {
31 | throw new Error(`Unsupported index size: ${indexName}`);
32 | }
33 | const existing = await ctx.db
34 | .query("indexTableMap")
35 | .withIndex("indexName", (q) => q.eq("indexName", indexName))
36 | .first();
37 | if (existing) {
38 | if (existing.dimensions !== dimensions) {
39 | throw new Error("Index already exists with different dimensions");
40 | }
41 | console.warn(`Index ${indexName} already exists, not creating...`);
42 | return;
43 | }
44 | console.log(`Creating index ${indexName} with dimensions ${dimensions}`);
45 | await ctx.db.insert("indexTableMap", {
46 | indexName,
47 | tableName: `embeddings_${dimensions}`,
48 | dimensions: dimensions,
49 | });
50 | },
51 | returns: v.null(),
52 | });
53 |
54 | function getIndexMetadata(ctx: QueryCtx, name: string) {
55 | return ctx.db
56 | .query("indexTableMap")
57 | .withIndex("indexName", (q) => q.eq("indexName", name))
58 | .order("desc")
59 | .first();
60 | }
61 |
62 | export const getIndexMetadataQuery = internalQuery({
63 | args: { indexName: v.string() },
64 | handler: async (ctx, args) => {
65 | return await getIndexMetadata(ctx, args.indexName);
66 | },
67 | });
68 |
69 | export const upsert = mutation({
70 | args: {
71 | indexName: v.string(),
72 | vectors: v.array(v.array(v.number())),
73 | metadata: v.optional(v.array(v.record(v.string(), v.any()))),
74 | ids: v.optional(v.array(v.string())),
75 | },
76 | returns: v.array(v.string()),
77 | handler: async (
78 | ctx,
79 | { indexName, vectors, metadata, ids },
80 | ): Promise => {
81 | const index = await ctx.runQuery(
82 | internal.vector.vector.getIndexMetadataQuery,
83 | {
84 | indexName,
85 | },
86 | );
87 | if (!index) {
88 | throw new Error("Index not found");
89 | }
90 | const dimensions = index.dimensions;
91 | if (!vectors.every((v) => v.length === dimensions)) {
92 | throw new Error(`All vectors must have ${dimensions} dimensions`);
93 | }
94 | if (metadata && vectors.length !== metadata.length) {
95 | throw new Error("vectors and metadata must have same length");
96 | }
97 | if (ids && vectors.length !== ids.length) {
98 | throw new Error("vectors and ids must have same length");
99 | }
100 |
101 | // Batch insert all vectors
102 | return await Promise.all(
103 | vectors.map(async (vector, i) => {
104 | const id = ids?.[i];
105 | if (id) {
106 | const convexId = ctx.db.normalizeId(index.tableName, id);
107 | const existing = convexId
108 | ? await ctx.db.get(convexId)
109 | : await ctx.db
110 | .query(index.tableName)
111 | .withIndex("id", (q) => q.eq("id", id))
112 | .first();
113 | if (existing) {
114 | await ctx.db.patch(existing._id, {
115 | vector,
116 | metadata: metadata?.[i],
117 | });
118 | return existing.id ?? existing._id;
119 | }
120 | }
121 | const newId = await ctx.db.insert(index.tableName, {
122 | id,
123 | vector,
124 | metadata: metadata?.[i],
125 | indexName,
126 | });
127 | if (!id) {
128 | await ctx.db.patch(newId, {
129 | id: newId,
130 | });
131 | }
132 | return id ?? newId;
133 | }),
134 | );
135 | },
136 | });
137 |
138 | const vSearchResult = v.object({
139 | id: v.string(),
140 | score: v.number(),
141 | metadata: v.optional(v.record(v.string(), v.any())),
142 | vector: v.optional(v.array(v.number())),
143 | });
144 |
145 | export const search = action({
146 | args: {
147 | indexName: v.string(),
148 | queryVector: v.array(v.number()),
149 | topK: v.number(),
150 | filter: v.optional(v.record(v.string(), v.any())),
151 | includeVector: v.optional(v.boolean()),
152 | },
153 | handler: async (
154 | ctx,
155 | { indexName, queryVector, topK, filter, includeVector },
156 | ): Promise => {
157 | const index = await ctx.runQuery(
158 | internal.vector.vector.getIndexMetadataQuery,
159 | {
160 | indexName,
161 | },
162 | );
163 | if (!index) {
164 | throw new Error("Index not found");
165 | }
166 | const dimensions = index.dimensions;
167 | if (queryVector.length !== dimensions) {
168 | throw new Error(`Query vector must have ${dimensions} dimensions`);
169 | }
170 |
171 | const results = await ctx.vectorSearch(index.tableName, "vector", {
172 | vector: queryVector,
173 | limit: Math.max(topK * 2 * (1 + Object.keys(filter ?? {}).length), 256),
174 | filter: filter
175 | ? (q) => {
176 | return q.eq("indexName", index.indexName);
177 | }
178 | : undefined,
179 | });
180 |
181 | const entries = await ctx.runQuery(internal.vector.vector.lookupResults, {
182 | ids: results.map((r) => r._id),
183 | scores: results.map((r) => r._score),
184 | includeVector: includeVector ?? false,
185 | });
186 |
187 | const filtered = entries.filter((r) => {
188 | if (filter) {
189 | return Object.entries(filter).every(([key, value]) => {
190 | return r.metadata?.[key] === value;
191 | });
192 | }
193 | return true;
194 | });
195 |
196 | return filtered;
197 | },
198 | returns: v.array(vSearchResult),
199 | });
200 |
201 | type SearchResult = {
202 | id: string;
203 | score: number;
204 | metadata?: Record;
205 | vector?: number[];
206 | };
207 |
208 | export const lookupResults = internalQuery({
209 | args: {
210 | ids: v.array(vSupportedId),
211 | scores: v.array(v.number()),
212 | includeVector: v.boolean(),
213 | },
214 | handler: async (ctx, args): Promise => {
215 | if (args.ids.length !== args.scores.length) {
216 | throw new Error("ids and scores must have same length");
217 | }
218 | const results = await Promise.all(args.ids.map((id) => ctx.db.get(id)));
219 | return results.flatMap((r, i) =>
220 | r
221 | ? [
222 | {
223 | id: r._id,
224 | score: args.scores[i],
225 | metadata: r.metadata,
226 | vector: args.includeVector ? r.vector : undefined,
227 | },
228 | ]
229 | : [],
230 | );
231 | },
232 | returns: v.array(vSearchResult),
233 | });
234 |
235 | export const listIndexes = query({
236 | args: {},
237 | handler: async (ctx): Promise => {
238 | return (await ctx.db.query("indexTableMap").collect()).map(
239 | (i) => i.indexName,
240 | );
241 | },
242 | returns: v.array(v.string()),
243 | });
244 |
245 | export const describeIndex = query({
246 | args: { indexName: v.string() },
247 | handler: async (ctx, { indexName }) => {
248 | const index = await getIndexMetadata(ctx, indexName);
249 | if (!index) {
250 | throw new Error("Index not found");
251 | }
252 | const dimensions = index.dimensions;
253 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) {
254 | throw new Error("Invalid index name");
255 | }
256 | return {
257 | dimension: dimensions,
258 | count: await (ctx.db.query(index.tableName) as any).count(),
259 | metric: "cosine" as const,
260 | };
261 | },
262 | returns: v.object({
263 | dimension: vSupportedDimension,
264 | count: v.number(),
265 | metric: v.literal("cosine"),
266 | }),
267 | });
268 |
269 | export const deleteIndex = action({
270 | args: { indexName: v.string() },
271 | handler: async (ctx, { indexName }) => {
272 | const index = await ctx.runQuery(
273 | internal.vector.vector.getIndexMetadataQuery,
274 | {
275 | indexName,
276 | },
277 | );
278 | if (!index) {
279 | console.warn(`Index ${indexName} not found, not deleting...`);
280 | return;
281 | }
282 | let cursor: string | null = null;
283 | while (true) {
284 | const results: PageResult = await ctx.runMutation(
285 | internal.vector.vector.deletePage,
286 | {
287 | indexName: index.tableName,
288 | cursor,
289 | },
290 | );
291 | if (results.isDone) break;
292 | cursor = results.continueCursor;
293 | }
294 | },
295 | returns: v.null(),
296 | });
297 |
298 | type PageResult = {
299 | isDone: boolean;
300 | continueCursor: string;
301 | };
302 |
303 | export const deletePage = internalMutation({
304 | args: {
305 | indexName: vSupportedTableName,
306 | cursor: v.union(v.string(), v.null()),
307 | },
308 | handler: async (ctx, { indexName, cursor }): Promise => {
309 | const dimensions = parseInt(indexName.split("_")[1]) as SupportedDimension;
310 | if (!SUPPORTED_DIMENSIONS.includes(dimensions)) {
311 | throw new Error("Invalid index name");
312 | }
313 | const docs = await paginator(ctx.db, schema).query(indexName).paginate({
314 | cursor,
315 | numItems: 1000,
316 | });
317 | await Promise.all(docs.page.map((doc) => ctx.db.delete(doc._id)));
318 | return {
319 | isDone: docs.isDone,
320 | continueCursor: docs.continueCursor,
321 | };
322 | },
323 | returns: v.object({
324 | isDone: v.boolean(),
325 | continueCursor: v.string(),
326 | }),
327 | });
328 |
--------------------------------------------------------------------------------
/src/component/storage/messages.ts:
--------------------------------------------------------------------------------
1 | import { v } from "convex/values";
2 | import type { Doc } from "../_generated/dataModel.js";
3 | import { mutation, query } from "../_generated/server.js";
4 | import {
5 | type SerializedMessage,
6 | type SerializedThread,
7 | vSerializedMessage,
8 | vSerializedThread,
9 | } from "../../mapping/index.js";
10 | import { paginator } from "convex-helpers/server/pagination";
11 | import schema from "../schema.js";
12 | import { makeConsole } from "../logger.js";
13 |
14 | function threadToSerializedMastra(thread: Doc<"threads">): SerializedThread {
15 | const { id, title, metadata, resourceId, createdAt, updatedAt } = thread;
16 | return { id, title, metadata, resourceId, createdAt, updatedAt };
17 | }
18 |
19 | export const getThreadById = query({
20 | args: { threadId: v.string() },
21 | handler: async (ctx, args) => {
22 | const console = await makeConsole(ctx);
23 | console.debug(`Getting thread by id ${args.threadId}`);
24 | const thread = await ctx.db
25 | .query("threads")
26 | .withIndex("id", (q) => q.eq("id", args.threadId))
27 | .unique();
28 | if (!thread) {
29 | console.debug(`Thread ${args.threadId} not found`);
30 | return null;
31 | }
32 | return threadToSerializedMastra(thread);
33 | },
34 | returns: v.union(vSerializedThread, v.null()),
35 | });
36 |
37 | export const getThreadsByResourceId = query({
38 | args: {
39 | resourceId: v.string(),
40 | cursor: v.optional(v.union(v.string(), v.null())),
41 | },
42 | handler: async (
43 | ctx,
44 | args,
45 | ): Promise<{
46 | threads: SerializedThread[];
47 | continueCursor: string;
48 | isDone: boolean;
49 | }> => {
50 | const console = await makeConsole(ctx);
51 | console.debug(`Getting threads by resource id ${args.resourceId}`);
52 | const threads = await paginator(ctx.db, schema)
53 | .query("threads")
54 | .withIndex("resourceId", (q) => q.eq("resourceId", args.resourceId))
55 | .paginate({
56 | numItems: 100,
57 | cursor: args.cursor ?? null,
58 | });
59 | console.debug(`Got ${threads.page.length} threads`);
60 | return {
61 | threads: threads.page.map(threadToSerializedMastra),
62 | continueCursor: threads.continueCursor,
63 | isDone: threads.isDone,
64 | };
65 | },
66 | returns: v.object({
67 | threads: v.array(vSerializedThread),
68 | continueCursor: v.string(),
69 | isDone: v.boolean(),
70 | }),
71 | });
72 |
73 | export const saveThread = mutation({
74 | args: { thread: vSerializedThread },
75 | handler: async (ctx, args) => {
76 | const console = await makeConsole(ctx);
77 | console.debug(`Saving thread ${args.thread.id}`);
78 | await ctx.db.insert("threads", args.thread);
79 | },
80 | returns: v.null(),
81 | });
82 |
83 | export const updateThread = mutation({
84 | args: {
85 | threadId: v.string(),
86 | title: v.optional(v.string()),
87 | metadata: v.optional(v.record(v.string(), v.any())),
88 | },
89 | handler: async (ctx, args) => {
90 | const console = await makeConsole(ctx);
91 | console.debug(`Updating thread ${args.threadId}`);
92 | const thread = await ctx.db
93 | .query("threads")
94 | .withIndex("id", (q) => q.eq("id", args.threadId))
95 | .unique();
96 | if (!thread) {
97 | throw new Error(`Thread ${args.threadId} not found`);
98 | }
99 | if (args.title) {
100 | console.debug(`Updating title for thread ${args.threadId}`);
101 | await ctx.db.patch(thread._id, {
102 | title: args.title,
103 | updatedAt: Date.now(),
104 | });
105 | }
106 | if (args.metadata) {
107 | console.debug(`Updating metadata for thread ${args.threadId}`);
108 | await ctx.db.patch(thread._id, {
109 | metadata: args.metadata,
110 | updatedAt: Date.now(),
111 | });
112 | }
113 | return threadToSerializedMastra(thread);
114 | },
115 | returns: vSerializedThread,
116 | });
117 |
118 | export const deleteThread = mutation({
119 | args: { threadId: v.string() },
120 | handler: async (ctx, args) => {
121 | const console = await makeConsole(ctx);
122 | console.debug(`Deleting thread ${args.threadId}`);
123 | const thread = await ctx.db
124 | .query("threads")
125 | .withIndex("id", (q) => q.eq("id", args.threadId))
126 | .unique();
127 | if (!thread) {
128 | throw new Error(`Thread ${args.threadId} not found`);
129 | }
130 | await ctx.db.delete(thread._id);
131 | },
132 | returns: v.null(),
133 | });
134 |
135 | // const vMemoryConfig = v.object({
136 | // lastMessages: v.optional(v.union(v.number(), v.literal(false))),
137 | // semanticRecall: v.optional(
138 | // v.union(
139 | // v.boolean(),
140 | // v.object({
141 | // topK: v.number(),
142 | // messageRange: v.union(
143 | // v.number(),
144 | // v.object({ before: v.number(), after: v.number() }),
145 | // ),
146 | // }),
147 | // ),
148 | // ),
149 | // workingMemory: v.optional(
150 | // v.object({
151 | // enabled: v.boolean(),
152 | // template: v.optional(v.string()),
153 | // use: v.optional(
154 | // v.union(v.literal("text-stream"), v.literal("tool-call")),
155 | // ),
156 | // }),
157 | // ),
158 | // threads: v.optional(
159 | // v.object({
160 | // generateTitle: v.optional(v.boolean()),
161 | // }),
162 | // ),
163 | // });
164 | const vSelectBy = v.object({
165 | vectorSearchString: v.optional(v.string()),
166 | last: v.optional(v.union(v.number(), v.literal(false))),
167 | include: v.optional(
168 | v.array(
169 | v.object({
170 | id: v.string(),
171 | withPreviousMessages: v.optional(v.number()),
172 | withNextMessages: v.optional(v.number()),
173 | }),
174 | ),
175 | ),
176 | });
177 |
178 | function messageToSerializedMastra(
179 | message: Doc<"messages">,
180 | ): SerializedMessage {
181 | const { threadOrder: _, _id, _creationTime, ...serialized } = message;
182 | return {
183 | ...serialized,
184 | resourceId: serialized.resourceId ?? "",
185 | };
186 | }
187 |
188 | const DEFAULT_MESSAGES_LIMIT = 40; // What pg & upstash do too.
189 |
190 | export const getMessagesPage = query({
191 | args: {
192 | threadId: v.string(),
193 | selectBy: v.optional(vSelectBy),
194 | // Unimplemented and as far I can tell no storage provider has either.
195 | // memoryConfig: v.optional(vMemoryConfig),
196 | },
197 | handler: async (ctx, args): Promise => {
198 | const console = await makeConsole(ctx);
199 | console.debug(`Getting messages page for thread ${args.threadId}`);
200 | const messages = await ctx.db
201 | .query("messages")
202 | .withIndex("threadId", (q) => q.eq("threadId", args.threadId))
203 | .order("desc")
204 | .take(args.selectBy?.last ? args.selectBy.last : DEFAULT_MESSAGES_LIMIT);
205 |
206 | const handled: boolean[] = [];
207 | const toFetch: number[] = [];
208 | for (const m of messages) {
209 | handled[m.threadOrder] = true;
210 | }
211 | await Promise.all(
212 | args.selectBy?.include?.map(async (range) => {
213 | const includeDoc = await ctx.db
214 | .query("messages")
215 | .withIndex("id", (q) => q.eq("id", range.id))
216 | .unique();
217 | if (!includeDoc) {
218 | console.warn(`Message ${range.id} not found`);
219 | return;
220 | }
221 | if (!range.withPreviousMessages && !range.withNextMessages) {
222 | messages.push(includeDoc);
223 | return;
224 | }
225 | const order = includeDoc.threadOrder;
226 | for (
227 | let i = order - (range.withPreviousMessages ?? 0);
228 | i < order + (range.withNextMessages ?? 0);
229 | i++
230 | ) {
231 | if (!handled[i]) {
232 | toFetch.push(i);
233 | handled[i] = true;
234 | }
235 | }
236 | }) ?? [],
237 | );
238 | console.debug(`Need to fetch ${toFetch.length} messages`);
239 | // sort and find unique numbers in toFetch
240 | const uniqueToFetch = [...new Set(toFetch)].sort();
241 | console.debug(`Unique to fetch ${uniqueToFetch}`);
242 | // find contiguous ranges in uniqueToFetch
243 | const ranges: { start: number; end: number }[] = [];
244 | for (let i = 0; i < uniqueToFetch.length; i++) {
245 | const start = uniqueToFetch[i];
246 | let end = start;
247 | while (i + 1 < uniqueToFetch.length && uniqueToFetch[i + 1] === end + 1) {
248 | end++;
249 | i++;
250 | }
251 | ranges.push({ start, end });
252 | }
253 | console.debug(`Ranges to fetch ${ranges}`);
254 | const fetched = (
255 | await Promise.all(
256 | ranges.map(async (range) => {
257 | return await ctx.db
258 | .query("messages")
259 | .withIndex("threadId", (q) =>
260 | q
261 | .eq("threadId", args.threadId)
262 | .gte("threadOrder", range.start)
263 | .lte("threadOrder", range.end),
264 | )
265 | .collect();
266 | }),
267 | )
268 | ).flat();
269 | console.debug(`Fetched ${fetched.length} messages`);
270 | messages.push(...fetched);
271 | console.debug(`Total messages ${messages.length}`);
272 | return messages.map(messageToSerializedMastra);
273 | },
274 | returns: v.array(vSerializedMessage),
275 | });
276 |
277 | export const saveMessages = mutation({
278 | args: { messages: v.array(vSerializedMessage) },
279 | handler: async (ctx, args) => {
280 | const console = await makeConsole(ctx);
281 | console.debug(`Saving messages ${args.messages.length}`);
282 | const messagesByThreadId: Record = {};
283 | for (const message of args.messages) {
284 | messagesByThreadId[message.threadId] = [
285 | ...(messagesByThreadId[message.threadId] ?? []),
286 | message,
287 | ];
288 | }
289 | for (const threadId in messagesByThreadId) {
290 | const lastMessage = await ctx.db
291 | .query("messages")
292 | .withIndex("threadId", (q) => q.eq("threadId", threadId))
293 | .order("desc")
294 | .first();
295 | let threadOrder = lastMessage?.threadOrder ?? 0;
296 | for (const message of messagesByThreadId[threadId]) {
297 | threadOrder++;
298 | await ctx.db.insert("messages", {
299 | ...message,
300 | threadOrder,
301 | });
302 | }
303 | }
304 | },
305 | returns: v.null(),
306 | });
307 |
308 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
309 | const console = "THIS IS A REMINDER TO USE makeConsole";
310 |
--------------------------------------------------------------------------------
/src/client/storage.ts:
--------------------------------------------------------------------------------
1 | // Workaround to aid in bundling, to be combined with adding @libsql/client to
2 | // the externalPackages in a convex.json file in the root of your project.
3 | export * as libsql from "@libsql/client";
4 | export { InMemoryStorage } from "./in-memory.js";
5 |
6 | import type {
7 | MessageType,
8 | StorageThreadType,
9 | WorkflowRuns,
10 | } from "@mastra/core";
11 | import type {
12 | EvalRow,
13 | StorageColumn,
14 | StorageGetMessagesArg,
15 | } from "@mastra/core/storage";
16 | import {
17 | MastraStorage,
18 | TABLE_EVALS,
19 | TABLE_MESSAGES,
20 | type TABLE_NAMES,
21 | TABLE_THREADS,
22 | TABLE_TRACES,
23 | TABLE_WORKFLOW_SNAPSHOT,
24 | } from "@mastra/core/storage";
25 | import type {
26 | GenericActionCtx,
27 | GenericDataModel,
28 | GenericMutationCtx,
29 | GenericQueryCtx,
30 | } from "convex/server";
31 | import {
32 | mapMastraToSerialized,
33 | mapSerializedToMastra,
34 | mastraToConvexTableNames,
35 | type SerializedMessage,
36 | type SerializedThread,
37 | type SerializedTrace,
38 | } from "../mapping/index.js";
39 | import type { ComponentApi } from "../component/_generated/component.js";
40 |
41 | export class ConvexStorage extends MastraStorage {
42 | ctx: Ctx<"action" | "mutation" | "query"> | undefined;
43 | api: ComponentApi["storage"];
44 | constructor(component: ComponentApi, options?: { name?: string }) {
45 | super({ name: options?.name ?? "ConvexStorage" });
46 | this.api = component.storage;
47 | this.shouldCacheInit = true;
48 | }
49 |
50 | /**
51 | * Set the context for the storage. Must be called before using the storage
52 | * in a Convex function. If you are using the storage via the API, you do not
53 | * need to call this.
54 | *
55 | * @param ctx - The context to use for the storage.
56 | */
57 | async setCtx(ctx: Ctx<"action" | "mutation" | "query"> | undefined) {
58 | this.ctx = ctx;
59 | }
60 |
61 | getApi(kind: T): Ctx {
62 | // TODO: get http client if that's specified
63 | if (!this.ctx) {
64 | throw new Error(
65 | "Context not set: ensure you're calling storage.setCtx" +
66 | " before using the storage.",
67 | );
68 | }
69 | switch (kind) {
70 | case "action":
71 | if (!(this.ctx as GenericActionCtx).runAction) {
72 | throw new Error("Context must be an action context to do this");
73 | }
74 | // fallthrough
75 | case "mutation":
76 | if (!(this.ctx as GenericMutationCtx).runMutation) {
77 | throw new Error("Context doesn't have a way to run mutations");
78 | }
79 | // fallthrough
80 | case "query":
81 | if (!(this.ctx as GenericQueryCtx).runQuery) {
82 | throw new Error("Context is not a query context");
83 | }
84 | }
85 | return this.ctx as Ctx;
86 | }
87 |
88 | async getWorkflowRuns(args?: {
89 | namespace?: string;
90 | workflowName?: string;
91 | fromDate?: Date;
92 | toDate?: Date;
93 | limit?: number;
94 | offset?: number;
95 | }): Promise {
96 | // TODO: implement
97 | return { runs: [], total: 0 };
98 | }
99 |
100 | async createTable(args: {
101 | tableName: TABLE_NAMES;
102 | schema: Record;
103 | }): Promise {
104 | const convexTableName = mastraToConvexTableNames[args.tableName];
105 | if (!convexTableName) {
106 | throw new Error(`Unsupported table name: ${args.tableName}`);
107 | }
108 | // TODO: we could do more serious validation against the defined schema
109 | // validateTableSchema(convexTableName, tableSchema);
110 | return;
111 | }
112 |
113 | async clearTable(args: { tableName: TABLE_NAMES }): Promise {
114 | const ctx = this.getApi("action");
115 | const tableName = mastraToConvexTableNames[args.tableName];
116 | await ctx.runAction(this.api.storage.clearTable, { tableName });
117 | return;
118 | }
119 |
120 | async insert(args: {
121 | tableName: TABLE_NAMES;
122 | record: Record;
123 | }): Promise {
124 | const convexRecord = mapMastraToSerialized(args.tableName, args.record);
125 | const tableName = mastraToConvexTableNames[args.tableName];
126 | const ctx = this.getApi("mutation");
127 | await ctx.runMutation(this.api.storage.insert, {
128 | tableName,
129 | document: convexRecord,
130 | });
131 | return;
132 | }
133 |
134 | async batchInsert(args: {
135 | tableName: TABLE_NAMES;
136 | records: Record[];
137 | }): Promise {
138 | const ctx = this.getApi("mutation");
139 | const tableName = mastraToConvexTableNames[args.tableName];
140 | await ctx.runMutation(this.api.storage.batchInsert, {
141 | tableName,
142 | records: args.records.map((record) =>
143 | mapMastraToSerialized(args.tableName, record),
144 | ),
145 | });
146 | return;
147 | }
148 |
149 | async load(args: {
150 | tableName: TABLE_NAMES;
151 | keys: Record;
152 | }): Promise {
153 | const ctx = this.getApi("query");
154 | const tableName = mastraToConvexTableNames[args.tableName];
155 | if (args.tableName === TABLE_WORKFLOW_SNAPSHOT) {
156 | const { run_id, workflow_name } = args.keys;
157 | if (!run_id || !workflow_name) {
158 | throw new Error("Expected run_id and workflow_name to load a snapshot");
159 | }
160 | const snapshot = await ctx.runQuery(this.api.storage.loadSnapshot, {
161 | runId: run_id,
162 | workflowName: workflow_name,
163 | });
164 | if (!snapshot) {
165 | return null;
166 | }
167 | return mapSerializedToMastra(args.tableName, snapshot) as R;
168 | }
169 | return await ctx.runQuery(this.api.storage.load, {
170 | tableName,
171 | keys: args.keys,
172 | });
173 | }
174 |
175 | async getThreadById({
176 | threadId,
177 | }: {
178 | threadId: string;
179 | }): Promise {
180 | const ctx = this.getApi("query");
181 | const thread = await ctx.runQuery(this.api.messages.getThreadById, {
182 | threadId,
183 | });
184 | if (!thread) {
185 | return null;
186 | }
187 | return mapSerializedToMastra(TABLE_THREADS, thread);
188 | }
189 |
190 | async getThreadsByResourceId({
191 | resourceId,
192 | }: {
193 | resourceId: string;
194 | }): Promise {
195 | const ctx = this.getApi("query");
196 | const threads: SerializedThread[] = [];
197 | let cursor: string | null = null;
198 | while (true) {
199 | const page: {
200 | threads: SerializedThread[];
201 | continueCursor: string;
202 | isDone: boolean;
203 | } = await ctx.runQuery(this.api.messages.getThreadsByResourceId, {
204 | resourceId,
205 | cursor,
206 | });
207 | threads.push(...page.threads);
208 | if (page.isDone) {
209 | break;
210 | }
211 | cursor = page.continueCursor;
212 | }
213 | return threads.map((thread) =>
214 | mapSerializedToMastra(TABLE_THREADS, thread),
215 | );
216 | }
217 |
218 | async saveThread({
219 | thread,
220 | }: {
221 | thread: StorageThreadType;
222 | }): Promise {
223 | const ctx = this.getApi("mutation");
224 | await ctx.runMutation(this.api.messages.saveThread, {
225 | thread: mapMastraToSerialized(TABLE_THREADS, thread),
226 | });
227 | return thread;
228 | }
229 |
230 | async updateThread({
231 | id,
232 | title,
233 | metadata,
234 | }: {
235 | id: string;
236 | title: string;
237 | metadata: Record;
238 | }): Promise {
239 | const ctx = this.getApi("mutation");
240 | const thread = await ctx.runMutation(this.api.messages.updateThread, {
241 | threadId: id,
242 | title,
243 | metadata,
244 | });
245 | return mapSerializedToMastra(TABLE_THREADS, thread);
246 | }
247 |
248 | async deleteThread({ threadId }: { threadId: string }): Promise {
249 | const ctx = this.getApi("mutation");
250 | await ctx.runMutation(this.api.messages.deleteThread, { threadId });
251 | return;
252 | }
253 |
254 | async getMessages({
255 | threadId,
256 | selectBy,
257 | }: StorageGetMessagesArg): Promise {
258 | const ctx = this.getApi("query");
259 | const messages: SerializedMessage[] = await ctx.runQuery(
260 | this.api.messages.getMessagesPage,
261 | {
262 | threadId,
263 | selectBy,
264 | // memoryConfig: threadConfig,
265 | },
266 | );
267 | return messages.map((message) =>
268 | mapSerializedToMastra(TABLE_MESSAGES, message),
269 | ) as T[];
270 | }
271 |
272 | async saveMessages({
273 | messages,
274 | }: {
275 | messages: MessageType[];
276 | }): Promise {
277 | const ctx = this.getApi("mutation");
278 | await ctx.runMutation(this.api.messages.saveMessages, {
279 | messages: messages.map((message) =>
280 | mapMastraToSerialized(TABLE_MESSAGES, message),
281 | ),
282 | });
283 | return messages;
284 | }
285 |
286 | async getEvalsByAgentName(
287 | agentName: string,
288 | type?: "test" | "live",
289 | ): Promise {
290 | const ctx = this.getApi("query");
291 | const evals = await ctx.runQuery(this.api.storage.getEvalsByAgentName, {
292 | agentName,
293 | type,
294 | });
295 | return evals.map((e) => mapSerializedToMastra(TABLE_EVALS, e));
296 | }
297 |
298 | async getTraces(options?: {
299 | name?: string;
300 | scope?: string;
301 | page: number;
302 | perPage: number;
303 | attributes?: Record;
304 | }): Promise {
305 | const { name, scope, page, perPage, attributes } = options ?? {};
306 | const traces: SerializedTrace[] = [];
307 | let cursor: string | null = null;
308 | const numItems = perPage ?? 100;
309 | const pageNum = page ?? 0;
310 | while (true) {
311 | const ctx = this.getApi("query");
312 | const results: {
313 | isDone: boolean;
314 | continuCursor: string;
315 | page: SerializedTrace[];
316 | } = await ctx.runQuery(this.api.storage.getTracesPage, {
317 | name,
318 | scope,
319 | cursor,
320 | numItems,
321 | attributes,
322 | });
323 | traces.push(...results.page);
324 | // Note: we'll refetch from the beginning on every page.
325 | if (results.isDone || traces.length >= numItems * pageNum) {
326 | break;
327 | }
328 | cursor = results.continuCursor;
329 | }
330 | return traces
331 | .slice(pageNum * numItems, (pageNum + 1) * numItems)
332 | .map((trace) => mapSerializedToMastra(TABLE_TRACES, trace));
333 | }
334 | }
335 |
336 | type Ctx = T extends "action"
337 | ? GenericActionCtx
338 | : T extends "mutation"
339 | ? GenericMutationCtx
340 | : T extends "query"
341 | ? GenericQueryCtx
342 | : never;
343 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/mapping/index.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | EvalRow,
3 | MessageType,
4 | StorageThreadType,
5 | WorkflowRow,
6 | } from "@mastra/core";
7 | import type {
8 | AssistantContent,
9 | DataContent,
10 | ToolContent,
11 | UserContent,
12 | } from "ai";
13 | import { type Infer, v } from "convex/values";
14 | import { type SerializeUrlsAndUint8Arrays, vContent } from "../ai/types.js";
15 |
16 | export const TABLE_WORKFLOW_SNAPSHOT = "mastra_workflow_snapshot";
17 | export const TABLE_EVALS = "mastra_evals";
18 | export const TABLE_MESSAGES = "mastra_messages";
19 | export const TABLE_THREADS = "mastra_threads";
20 | export const TABLE_TRACES = "mastra_traces";
21 | export type TABLE_NAMES =
22 | | typeof TABLE_WORKFLOW_SNAPSHOT
23 | | typeof TABLE_EVALS
24 | | typeof TABLE_MESSAGES
25 | | typeof TABLE_THREADS
26 | | typeof TABLE_TRACES;
27 |
28 | // Define the runtime constants first
29 | export const mastraToConvexTableNames = {
30 | [TABLE_WORKFLOW_SNAPSHOT]: "snapshots",
31 | [TABLE_EVALS]: "evals",
32 | [TABLE_MESSAGES]: "messages",
33 | [TABLE_THREADS]: "threads",
34 | [TABLE_TRACES]: "traces",
35 | } as const;
36 |
37 | export const convexToMastraTableNames = {
38 | snapshots: TABLE_WORKFLOW_SNAPSHOT,
39 | evals: TABLE_EVALS,
40 | messages: TABLE_MESSAGES,
41 | threads: TABLE_THREADS,
42 | traces: TABLE_TRACES,
43 | } as const;
44 |
45 | // Then derive the types from the constants
46 | export type MastraToConvexTableMap = typeof mastraToConvexTableNames;
47 | export type ConvexToMastraTableMap = typeof convexToMastraTableNames;
48 |
49 | // Helper types to get table names
50 | export type ConvexTableName = MastraToConvexTableMap[T];
51 | export type MastraTableName =
52 | ConvexToMastraTableMap[T];
53 |
54 | // Type that maps Mastra table names to their row types
55 | export type MastraRowTypeMap = {
56 | [TABLE_WORKFLOW_SNAPSHOT]: WorkflowRow;
57 | [TABLE_EVALS]: EvalRow;
58 | [TABLE_MESSAGES]: MessageType;
59 | [TABLE_THREADS]: StorageThreadType;
60 |
61 | [TABLE_TRACES]: any; // Replace with proper type when available
62 | };
63 |
64 | export type SerializedTimestamp = number;
65 | const vSerializedTimestamp = v.number();
66 |
67 | export type SerializedSnapshot = Omit<
68 | WorkflowRow,
69 | "created_at" | "updated_at" | "snapshot" | "workflow_name" | "run_id"
70 | > & {
71 | createdAt: SerializedTimestamp;
72 | updatedAt: SerializedTimestamp;
73 | snapshot: string;
74 | workflowName: string;
75 | runId: string;
76 | };
77 |
78 | export type SerializedEval = Omit & {
79 | createdAt: SerializedTimestamp;
80 | };
81 |
82 | export type SerializedContent = SerializeUrlsAndUint8Arrays<
83 | MessageType["content"]
84 | >;
85 |
86 | export const vSerializedMessage = v.object({
87 | id: v.string(),
88 | threadId: v.string(),
89 | resourceId: v.string(),
90 | content: vContent,
91 | role: v.union(
92 | v.literal("system"),
93 | v.literal("user"),
94 | v.literal("assistant"),
95 | v.literal("tool"),
96 | ),
97 | type: v.union(
98 | v.literal("text"),
99 | v.literal("tool-call"),
100 | v.literal("tool-result"),
101 | ),
102 | createdAt: v.number(),
103 | });
104 |
105 | export type SerializedMessage = Infer;
106 | // Omit & {
107 | // createdAt: SerializedTimestamp;
108 | // content: SerializedContent;
109 | // };
110 |
111 | export type SerializedThread = Omit<
112 | StorageThreadType,
113 | "createdAt" | "updatedAt"
114 | > & {
115 | createdAt: SerializedTimestamp;
116 | updatedAt: SerializedTimestamp;
117 | };
118 | export const vSerializedThread = v.object({
119 | id: v.string(),
120 | title: v.optional(v.string()),
121 | metadata: v.optional(v.record(v.string(), v.any())),
122 | resourceId: v.string(),
123 | createdAt: vSerializedTimestamp,
124 | updatedAt: vSerializedTimestamp,
125 | });
126 |
127 | // Inferring from the table schema created in
128 | // @mastra/core:src/storage/base.ts
129 | export type SerializedTrace = {
130 | id: string;
131 | parentSpanId?: string | null;
132 | traceId: string;
133 | name: string;
134 | scope: string;
135 | kind: number | bigint;
136 | events?: any[];
137 | links?: any[];
138 | status?: any;
139 | attributes?: Record;
140 | startTime: bigint;
141 | endTime: bigint;
142 | other?: any;
143 | createdAt: SerializedTimestamp;
144 | };
145 |
146 | // Type that maps Convex table names to their document types
147 | export type SerializedTypeMap = {
148 | [TABLE_WORKFLOW_SNAPSHOT]: SerializedSnapshot;
149 | [TABLE_EVALS]: SerializedEval;
150 | [TABLE_MESSAGES]: SerializedMessage;
151 | [TABLE_THREADS]: SerializedThread;
152 | [TABLE_TRACES]: SerializedTrace;
153 | };
154 |
155 | function serializeDateOrNow(date: string | Date | number): number {
156 | if (!date) {
157 | return Date.now();
158 | }
159 | if (typeof date === "number") {
160 | return date;
161 | }
162 | if (date instanceof Date) {
163 | return Number(date);
164 | }
165 | return Number(new Date(date));
166 | }
167 |
168 | /**
169 | * Maps a Mastra row to a Convex document
170 | * @param tableName Mastra table name
171 | * @param mastraRow Row data from Mastra
172 | * @returns Properly typed Convex document
173 | */
174 | export function mapMastraToSerialized(
175 | tableName: T,
176 | mastraRow: MastraRowTypeMap[T],
177 | ): SerializedTypeMap[T] {
178 | switch (tableName) {
179 | case TABLE_WORKFLOW_SNAPSHOT: {
180 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_WORKFLOW_SNAPSHOT];
181 | const serialized: SerializedSnapshot = {
182 | workflowName: row.workflow_name,
183 | runId: row.run_id,
184 | snapshot: JSON.stringify(row.snapshot),
185 | createdAt: serializeDateOrNow(row.created_at),
186 | updatedAt: serializeDateOrNow(row.updated_at),
187 | };
188 | return serialized as SerializedTypeMap[T];
189 | }
190 | case TABLE_EVALS: {
191 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_EVALS];
192 | const serialized: SerializedEval = {
193 | input: row.input,
194 | output: row.output,
195 | result: row.result,
196 | agentName: row.agentName,
197 | metricName: row.metricName,
198 | instructions: row.instructions,
199 | testInfo: row.testInfo,
200 | globalRunId: row.globalRunId,
201 | runId: row.runId,
202 | createdAt: serializeDateOrNow(row.createdAt),
203 | };
204 | return serialized as SerializedTypeMap[T];
205 | }
206 | case TABLE_MESSAGES: {
207 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_MESSAGES];
208 | const serialized: SerializedMessage = {
209 | id: row.id,
210 | threadId: row.threadId,
211 | resourceId: row.resourceId,
212 | content: serializeContent(row.content),
213 | role: row.role,
214 | type: row.type,
215 | createdAt: serializeDateOrNow(row.createdAt),
216 | };
217 | return serialized as SerializedTypeMap[T];
218 | }
219 | case TABLE_THREADS: {
220 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_THREADS];
221 | const serialized: SerializedThread = {
222 | id: row.id,
223 | title: row.title,
224 | metadata: row.metadata,
225 | resourceId: row.resourceId,
226 | createdAt: serializeDateOrNow(row.createdAt),
227 | updatedAt: serializeDateOrNow(row.updatedAt),
228 | };
229 | return serialized as SerializedTypeMap[T];
230 | }
231 | case TABLE_TRACES: {
232 | const row = mastraRow as MastraRowTypeMap[typeof TABLE_TRACES];
233 | const serialized: SerializedTrace = {
234 | id: row.id,
235 | parentSpanId: row.parentSpanId,
236 | name: row.name,
237 | traceId: row.traceId,
238 | scope: row.scope,
239 | kind: row.kind,
240 | attributes: row.attributes,
241 | status: row.status,
242 | events: row.events,
243 | links: row.links,
244 | other: row.other,
245 | startTime: row.startTime,
246 | endTime: row.endTime,
247 | createdAt: serializeDateOrNow(row.createdAt),
248 | };
249 | return serialized as SerializedTypeMap[T];
250 | }
251 | default:
252 | throw new Error(`Unsupported table name: ${tableName}`);
253 | }
254 | }
255 |
256 | export function serializeContent(
257 | content: UserContent | AssistantContent | ToolContent,
258 | ): SerializedContent {
259 | if (typeof content === "string") {
260 | return content;
261 | }
262 | const serialized = content.map((part) => {
263 | switch (part.type) {
264 | case "image":
265 | return { ...part, image: serializeDataOrUrl(part.image) };
266 | case "file":
267 | return { ...part, file: serializeDataOrUrl(part.data) };
268 | default:
269 | return part;
270 | }
271 | });
272 | return serialized as SerializedContent;
273 | }
274 |
275 | export function deserializeContent(
276 | content: SerializedContent,
277 | ): UserContent | AssistantContent | ToolContent {
278 | if (typeof content === "string") {
279 | return content;
280 | }
281 | return content.map((part) => {
282 | switch (part.type) {
283 | case "image":
284 | return { ...part, image: deserializeUrl(part.image) };
285 | case "file":
286 | return { ...part, file: deserializeUrl(part.data) };
287 | default:
288 | return part;
289 | }
290 | }) as UserContent | AssistantContent | ToolContent;
291 | }
292 | function serializeDataOrUrl(
293 | dataOrUrl: DataContent | URL,
294 | ): ArrayBuffer | string {
295 | if (typeof dataOrUrl === "string") {
296 | return dataOrUrl;
297 | }
298 | if (dataOrUrl instanceof ArrayBuffer) {
299 | return dataOrUrl; // Already an ArrayBuffer
300 | }
301 | if (dataOrUrl instanceof URL) {
302 | return dataOrUrl.toString();
303 | }
304 | return dataOrUrl.buffer.slice(
305 | dataOrUrl.byteOffset,
306 | dataOrUrl.byteOffset + dataOrUrl.byteLength,
307 | ) as ArrayBuffer;
308 | }
309 |
310 | function deserializeUrl(urlOrString: string | ArrayBuffer): URL | DataContent {
311 | if (typeof urlOrString === "string") {
312 | if (
313 | urlOrString.startsWith("http://") ||
314 | urlOrString.startsWith("https://")
315 | ) {
316 | return new URL(urlOrString);
317 | }
318 | return urlOrString;
319 | }
320 | return urlOrString;
321 | }
322 |
323 | /**
324 | * Maps a Convex document to a Mastra row
325 | * @param tableName Mastra table name
326 | * @param row Data with transfer-safe values
327 | * @returns Properly typed Mastra row
328 | */
329 | export function mapSerializedToMastra(
330 | tableName: T,
331 | row: SerializedTypeMap[T],
332 | ): MastraRowTypeMap[T] {
333 | switch (tableName) {
334 | case TABLE_WORKFLOW_SNAPSHOT: {
335 | const serialized =
336 | row as SerializedTypeMap[typeof TABLE_WORKFLOW_SNAPSHOT];
337 | const workflow: WorkflowRow = {
338 | workflow_name: serialized.workflowName,
339 | run_id: serialized.runId,
340 | snapshot: JSON.parse(serialized.snapshot),
341 | created_at: new Date(serialized.createdAt),
342 | updated_at: new Date(serialized.updatedAt),
343 | };
344 | return workflow;
345 | }
346 | case TABLE_EVALS: {
347 | const serialized = row as SerializedTypeMap[typeof TABLE_EVALS];
348 | const evalRow: EvalRow = {
349 | input: serialized.input,
350 | output: serialized.output,
351 | result: serialized.result,
352 | agentName: serialized.agentName,
353 | metricName: serialized.metricName,
354 | instructions: serialized.instructions,
355 | testInfo: serialized.testInfo,
356 | globalRunId: serialized.globalRunId,
357 | runId: serialized.runId,
358 | createdAt: new Date(serialized.createdAt).toISOString(),
359 | };
360 | return evalRow as MastraRowTypeMap[T];
361 | }
362 | case TABLE_MESSAGES: {
363 | const serialized = row as SerializedTypeMap[typeof TABLE_MESSAGES];
364 | const messageRow: MessageType = {
365 | id: serialized.id,
366 | threadId: serialized.threadId,
367 | resourceId: serialized.resourceId,
368 | content: serialized.content,
369 | role: serialized.role,
370 | type: serialized.type,
371 | createdAt: new Date(serialized.createdAt),
372 | };
373 | return messageRow as MastraRowTypeMap[T];
374 | }
375 | case TABLE_THREADS: {
376 | const serialized = row as SerializedTypeMap[typeof TABLE_THREADS];
377 | const threadRow: StorageThreadType = {
378 | id: serialized.id,
379 | title: serialized.title,
380 | metadata: serialized.metadata,
381 | resourceId: serialized.resourceId,
382 | createdAt: new Date(serialized.createdAt),
383 | updatedAt: new Date(serialized.updatedAt),
384 | };
385 | return threadRow as MastraRowTypeMap[T];
386 | }
387 | case TABLE_TRACES: {
388 | const traceDoc = row as SerializedTypeMap[typeof TABLE_TRACES];
389 | return {
390 | id: traceDoc.id,
391 | parentSpanId: traceDoc.parentSpanId,
392 | name: traceDoc.name,
393 | traceId: traceDoc.traceId,
394 | scope: traceDoc.scope,
395 | kind: traceDoc.kind,
396 | attributes: traceDoc.attributes,
397 | status: traceDoc.status,
398 | events: traceDoc.events,
399 | links: traceDoc.links,
400 | other: traceDoc.other,
401 | startTime: traceDoc.startTime,
402 | endTime: traceDoc.endTime,
403 | } as MastraRowTypeMap[T];
404 | }
405 | default:
406 | throw new Error(`Unsupported table name: ${tableName}`);
407 | }
408 | }
409 |
--------------------------------------------------------------------------------
/src/client/in-memory.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | MessageType,
3 | StorageThreadType,
4 | WorkflowRuns,
5 | } from "@mastra/core";
6 | import type {
7 | EvalRow,
8 | StorageColumn,
9 | StorageGetMessagesArg,
10 | } from "@mastra/core/storage";
11 | import {
12 | MastraStorage,
13 | TABLE_EVALS,
14 | TABLE_MESSAGES,
15 | type TABLE_NAMES,
16 | TABLE_THREADS,
17 | TABLE_TRACES,
18 | TABLE_WORKFLOW_SNAPSHOT,
19 | } from "@mastra/core/storage";
20 |
21 | type Row = Record;
22 |
23 | /**
24 | * InMemoryStorage is a simple in-memory storage implementation for Mastra.
25 | * It is used for testing and development purposes.
26 | */
27 | export class InMemoryStorage extends MastraStorage {
28 | private tables: Record = {
29 | [TABLE_WORKFLOW_SNAPSHOT]: [],
30 | [TABLE_EVALS]: [],
31 | [TABLE_MESSAGES]: [],
32 | [TABLE_THREADS]: [],
33 | [TABLE_TRACES]: [],
34 | };
35 | private primaryKeys: Record = {
36 | [TABLE_WORKFLOW_SNAPSHOT]: null,
37 | [TABLE_EVALS]: null,
38 | [TABLE_MESSAGES]: null,
39 | [TABLE_THREADS]: null,
40 | [TABLE_TRACES]: null,
41 | };
42 | constructor() {
43 | super({ name: "InMemoryStorage" });
44 | }
45 |
46 | async createTable({
47 | tableName,
48 | schema,
49 | }: {
50 | tableName: TABLE_NAMES;
51 | schema: Record;
52 | }) {
53 | for (const [key, value] of Object.entries(schema)) {
54 | if (value.primaryKey) {
55 | this.primaryKeys[tableName] = key;
56 | }
57 | break;
58 | }
59 | return;
60 | }
61 |
62 | async clearTable({ tableName }: { tableName: TABLE_NAMES }) {
63 | this.tables[tableName] = [];
64 | }
65 |
66 | // We make this a non-async function so all inserts can happen transactionally
67 | _insert(tableName: TABLE_NAMES, record: Record) {
68 | if (this.primaryKeys[tableName]) {
69 | const primaryKey = record[this.primaryKeys[tableName]!];
70 | const index = this.tables[tableName].findIndex(
71 | (record) => record[this.primaryKeys[tableName]!] === primaryKey
72 | );
73 | if (index !== -1) {
74 | this.tables[tableName][index] = record;
75 | } else {
76 | this.tables[tableName].push(record);
77 | }
78 | } else {
79 | this.tables[tableName].push(record);
80 | }
81 | }
82 |
83 | async getWorkflowRuns(args?: {
84 | namespace?: string;
85 | workflowName?: string;
86 | fromDate?: Date;
87 | toDate?: Date;
88 | limit?: number;
89 | offset?: number;
90 | }): Promise {
91 | // TODO: implement
92 | return { runs: [], total: 0 };
93 | }
94 |
95 | async insert({
96 | tableName,
97 | record,
98 | }: {
99 | tableName: TABLE_NAMES;
100 | record: Record;
101 | }) {
102 | this._insert(tableName, record);
103 | }
104 |
105 | async batchInsert({
106 | tableName,
107 | records,
108 | }: {
109 | tableName: TABLE_NAMES;
110 | records: Record[];
111 | }) {
112 | records.forEach((record) => this._insert(tableName, record));
113 | }
114 |
115 | async load({
116 | tableName,
117 | keys,
118 | }: {
119 | tableName: TABLE_NAMES;
120 | keys: Record;
121 | }): Promise {
122 | return this.tables[tableName].find((record) =>
123 | Object.entries(keys).every(([key, value]) => record[key] === value)
124 | ) as R | null;
125 | }
126 |
127 | async getThreadById({
128 | threadId,
129 | }: {
130 | threadId: string;
131 | }): Promise {
132 | return this.tables[TABLE_THREADS].find(
133 | (record) => record.id === threadId
134 | ) as StorageThreadType | null;
135 | }
136 |
137 | async getThreadsByResourceId({
138 | resourceId,
139 | }: {
140 | resourceId: string;
141 | }): Promise {
142 | return this.tables[TABLE_THREADS].filter(
143 | (record) => record.resourceId === resourceId
144 | ) as StorageThreadType[];
145 | }
146 |
147 | async saveThread({
148 | thread,
149 | }: {
150 | thread: StorageThreadType;
151 | }): Promise {
152 | this._insert(TABLE_THREADS, thread);
153 | return thread;
154 | }
155 |
156 | async updateThread({
157 | id,
158 | title,
159 | metadata,
160 | }: {
161 | id: string;
162 | title: string;
163 | metadata: Record;
164 | }): Promise {
165 | const index = this.tables[TABLE_THREADS].findIndex(
166 | (record) => record.id === id
167 | );
168 | if (index === -1) {
169 | throw new Error(`Thread with id ${id} not found`);
170 | }
171 | this.tables[TABLE_THREADS][index] = {
172 | ...this.tables[TABLE_THREADS][index],
173 | title,
174 | metadata,
175 | };
176 | return this.tables[TABLE_THREADS][index] as StorageThreadType;
177 | }
178 |
179 | async deleteThread({ threadId }: { threadId: string }): Promise {
180 | const index = this.tables[TABLE_THREADS].findIndex(
181 | (record) => record.id === threadId
182 | );
183 | if (index !== -1) {
184 | this.tables[TABLE_THREADS].splice(index, 1);
185 | }
186 | }
187 |
188 | async getMessages({
189 | threadId,
190 | selectBy,
191 | }: StorageGetMessagesArg): Promise {
192 | const allMessages = this.tables[TABLE_MESSAGES].filter(
193 | (record) => record.threadId === threadId
194 | ) as MessageType[];
195 | const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
196 | const ranges = [
197 | { start: allMessages.length - limit, end: allMessages.length },
198 | ];
199 | if (selectBy?.include?.length) {
200 | ranges.push(
201 | ...selectBy.include
202 | .map((i) => {
203 | const index = allMessages.findIndex((record) => record.id === i.id);
204 | return index !== -1
205 | ? {
206 | start: index - (i.withPreviousMessages || 0),
207 | end: index + (i.withNextMessages || 0),
208 | }
209 | : null;
210 | })
211 | .flatMap((r) => (r ? [r] : []))
212 | );
213 | }
214 | const indexes = ranges
215 | .flatMap((r) =>
216 | Array.from({ length: r.end - r.start + 1 }, (_, i) => r.start + i)
217 | )
218 | .sort()
219 | .reduce(
220 | (acc, index) => (acc.at(-1) === index ? acc : [...acc, index]),
221 | [] as number[]
222 | );
223 | return indexes
224 | .map((index) => allMessages[index]!)
225 | .map((m) => ({
226 | ...m,
227 | content: tryJSONParse(m.content),
228 | createdAt: new Date(m.createdAt),
229 | })) as T;
230 | }
231 |
232 | async saveMessages({
233 | messages,
234 | }: {
235 | messages: MessageType[];
236 | }): Promise {
237 | messages.forEach((message) =>
238 | this._insert(TABLE_MESSAGES, {
239 | id: message.id,
240 | threadId: message.threadId,
241 | content:
242 | typeof message.content === "object"
243 | ? JSON.stringify(message.content)
244 | : message.content,
245 | role: message.role,
246 | type: message.type,
247 | createdAt:
248 | message.createdAt instanceof Date
249 | ? message.createdAt.toISOString()
250 | : message.createdAt || new Date().toISOString(),
251 | })
252 | );
253 | return messages;
254 | }
255 |
256 | async getTraces(args: {
257 | name?: string;
258 | scope?: string;
259 | page: number;
260 | perPage: number;
261 | attributes?: Record;
262 | }): Promise {
263 | const { name, scope, page, perPage, attributes } = args;
264 | const limit = perPage;
265 | const offset = page * perPage;
266 | const traces = this.tables[TABLE_TRACES].filter((record) => {
267 | if (name && !record.name.startsWith(name)) {
268 | return false;
269 | }
270 | if (scope && record.scope !== scope) {
271 | return false;
272 | }
273 | if (attributes) {
274 | return Object.keys(attributes).every(
275 | (key) => record.attributes[key] === attributes[key]
276 | );
277 | }
278 | return true;
279 | });
280 | return traces.slice(offset, offset + limit);
281 | }
282 |
283 | async getEvalsByAgentName(
284 | agentName: string,
285 | type?: "test" | "live"
286 | ): Promise {
287 | return this.tables[TABLE_EVALS].filter(
288 | (record) =>
289 | record.agentName === agentName &&
290 | (type === "test"
291 | ? record.testInfo && record.testInfo.testPath
292 | : type === "live"
293 | ? !record.testInfo || !record.testInfo.testPath
294 | : true)
295 | ) as EvalRow[];
296 | }
297 | }
298 |
299 | function tryJSONParse(content: unknown) {
300 | try {
301 | return JSON.parse(content as string);
302 | } catch {
303 | return content;
304 | }
305 | }
306 |
307 | /**
308 | * InMemoryVector is a simple in-memory vector implementation for Mastra.
309 | * It is used for testing and development purposes.
310 | */
311 | import type {
312 | CreateIndexParams,
313 | UpsertVectorParams,
314 | QueryVectorParams,
315 | IndexStats,
316 | ParamsToArgs,
317 | QueryResult,
318 | CreateIndexArgs,
319 | UpsertVectorArgs,
320 | QueryVectorArgs,
321 | } from "@mastra/core/vector";
322 | import { MastraVector } from "@mastra/core/vector";
323 | type VectorDoc = {
324 | id: string;
325 | vector: number[];
326 | metadata: Record;
327 | };
328 |
329 | export class InMemoryVector extends MastraVector {
330 | private tables: Record = {};
331 | private dimensions: Record = {};
332 | constructor() {
333 | super();
334 | }
335 | async query(
336 | ...args: ParamsToArgs | E
337 | ): Promise {
338 | const params = this.normalizeArgs(
339 | "query",
340 | args
341 | );
342 | const index = this.tables[params.indexName];
343 | if (!index) return [];
344 | const scored = index
345 | .filter(
346 | (doc) =>
347 | !params.filter ||
348 | Object.entries(params.filter).every(
349 | ([field, value]) => doc.metadata[field] === value
350 | )
351 | )
352 | .map((doc) => {
353 | const score = dotProduct(doc.vector, params.queryVector);
354 | return { score, doc };
355 | });
356 | return scored
357 | .sort((a, b) => b.score - a.score)
358 | .slice(0, params.topK)
359 | .map((scored) => ({
360 | id: scored.doc.id,
361 | score: scored.score,
362 | ...scored.doc.metadata,
363 | ...(params.includeVector ? { vector: scored.doc.vector } : {}),
364 | }));
365 | }
366 | // Adds type checks for positional arguments if used
367 | async upsert(
368 | ...args: ParamsToArgs | E
369 | ): Promise {
370 | const params = this.normalizeArgs(
371 | "upsert",
372 | args
373 | );
374 | const table = this.tables[params.indexName];
375 | if (!table) throw new Error(`Index ${params.indexName} not found`);
376 | const normalized = params.vectors.map((vector, index) => {
377 | if (vector.length !== this.dimensions[params.indexName]) {
378 | throw new Error(
379 | `Vector ${index} has wrong dimension: ${vector.length} !== ${this.dimensions[params.indexName]}`
380 | );
381 | }
382 | // Normalize the vector to unit length
383 | return vector.map(
384 | (value) => value / Math.sqrt(dotProduct(vector, vector))
385 | );
386 | });
387 |
388 | const ids = params.ids || params.vectors.map(() => crypto.randomUUID());
389 | normalized.forEach((vector, index) => {
390 | const existing = table.find((doc) => doc.id === ids[index]);
391 | if (existing) {
392 | existing.vector = vector;
393 | existing.metadata = params.metadata?.[index] ?? {};
394 | } else {
395 | table.push({
396 | id: ids[index]!,
397 | vector,
398 | metadata: params.metadata?.[index] ?? {},
399 | });
400 | }
401 | });
402 | return ids;
403 | }
404 | // Adds type checks for positional arguments if used
405 | async createIndex(
406 | ...args: ParamsToArgs | E
407 | ): Promise {
408 | const params = this.normalizeArgs(
409 | "createIndex",
410 | args
411 | );
412 | this.tables[params.indexName] = [];
413 | this.dimensions[params.indexName] = params.dimension;
414 | }
415 |
416 | async listIndexes(): Promise {
417 | return Object.keys(this.tables);
418 | }
419 |
420 | async describeIndex(indexName: string): Promise {
421 | const table = this.tables[indexName];
422 | const dimension = this.dimensions[indexName];
423 | if (!table) throw new Error(`Index ${indexName} not found`);
424 | if (!dimension) throw new Error(`Index ${indexName} has no dimension`);
425 | return {
426 | dimension,
427 | metric: "cosine",
428 | count: table.length,
429 | };
430 | }
431 |
432 | async deleteIndex(indexName: string): Promise {
433 | delete this.tables[indexName];
434 | delete this.dimensions[indexName];
435 | }
436 | }
437 |
438 | function dotProduct(a: number[], b: number[]): number {
439 | return sum(a.map((value, index) => (b[index] ? value * b[index] : 0)));
440 | }
441 |
442 | function sum(a: number[]): number {
443 | return a.reduce((acc, curr) => acc + curr, 0);
444 | }
445 |
--------------------------------------------------------------------------------
/src/component/_generated/component.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | /**
3 | * Generated `ComponentApi` utility.
4 | *
5 | * THIS CODE IS AUTOMATICALLY GENERATED.
6 | *
7 | * To regenerate, run `npx convex dev`.
8 | * @module
9 | */
10 |
11 | import type { FunctionReference } from "convex/server";
12 |
13 | /**
14 | * A utility for referencing a Convex component's exposed API.
15 | *
16 | * Useful when expecting a parameter like `components.myComponent`.
17 | * Usage:
18 | * ```ts
19 | * async function myFunction(ctx: QueryCtx, component: ComponentApi) {
20 | * return ctx.runQuery(component.someFile.someQuery, { ...args });
21 | * }
22 | * ```
23 | */
24 | export type ComponentApi =
25 | {
26 | storage: {
27 | messages: {
28 | deleteThread: FunctionReference<
29 | "mutation",
30 | "internal",
31 | { threadId: string },
32 | null,
33 | Name
34 | >;
35 | getMessagesPage: FunctionReference<
36 | "query",
37 | "internal",
38 | {
39 | selectBy?: {
40 | include?: Array<{
41 | id: string;
42 | withNextMessages?: number;
43 | withPreviousMessages?: number;
44 | }>;
45 | last?: number | false;
46 | vectorSearchString?: string;
47 | };
48 | threadId: string;
49 | },
50 | Array<{
51 | content:
52 | | string
53 | | Array<
54 | | {
55 | experimental_providerMetadata?: Record<
56 | string,
57 | Record
58 | >;
59 | providerOptions?: Record>;
60 | text: string;
61 | type: "text";
62 | }
63 | | {
64 | experimental_providerMetadata?: Record<
65 | string,
66 | Record
67 | >;
68 | image: string | ArrayBuffer;
69 | mimeType?: string;
70 | providerOptions?: Record>;
71 | type: "image";
72 | }
73 | | {
74 | data: string | ArrayBuffer;
75 | experimental_providerMetadata?: Record<
76 | string,
77 | Record
78 | >;
79 | mimeType: string;
80 | providerOptions?: Record>;
81 | type: "file";
82 | }
83 | >
84 | | string
85 | | Array<
86 | | {
87 | experimental_providerMetadata?: Record<
88 | string,
89 | Record
90 | >;
91 | providerOptions?: Record>;
92 | text: string;
93 | type: "text";
94 | }
95 | | {
96 | data: string | ArrayBuffer;
97 | experimental_providerMetadata?: Record<
98 | string,
99 | Record
100 | >;
101 | mimeType: string;
102 | providerOptions?: Record>;
103 | type: "file";
104 | }
105 | | {
106 | experimental_providerMetadata?: Record<
107 | string,
108 | Record
109 | >;
110 | providerOptions?: Record>;
111 | text: string;
112 | type: "reasoning";
113 | }
114 | | {
115 | data: string;
116 | experimental_providerMetadata?: Record<
117 | string,
118 | Record
119 | >;
120 | providerOptions?: Record>;
121 | type: "redacted-reasoning";
122 | }
123 | | {
124 | args: any;
125 | experimental_providerMetadata?: Record<
126 | string,
127 | Record