├── .gitignore ├── LICENSE ├── README.md ├── examples └── prisma-nextjs │ ├── .eslintrc.json │ ├── .gitignore │ ├── README.md │ ├── next.config.mjs │ ├── package-lock.json │ ├── package.json │ ├── prisma │ ├── migrations │ │ ├── 20240527123208_init │ │ │ └── migration.sql │ │ ├── 20240630160212_add_truncation_strategy │ │ │ └── migration.sql │ │ ├── 20240630180029_add_response_format │ │ │ └── migration.sql │ │ └── migration_lock.toml │ └── schema.prisma │ ├── public │ ├── next.svg │ └── vercel.svg │ ├── src │ ├── app │ │ ├── api │ │ │ ├── anthropic │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ ├── azure-openai │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ ├── assistants-api-storage │ │ │ │ │ └── route.ts │ │ │ │ │ └── route.ts │ │ │ ├── google │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ ├── groq │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ ├── humiris │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ ├── mistral │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ ├── ollama │ │ │ │ └── models │ │ │ │ │ └── route.ts │ │ │ ├── openai │ │ │ │ ├── list │ │ │ │ │ └── route.ts │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ ├── assistants-api-storage │ │ │ │ │ │ └── route.ts │ │ │ │ │ └── route.ts │ │ │ │ ├── steps │ │ │ │ │ └── list │ │ │ │ │ │ └── route.ts │ │ │ │ ├── stream │ │ │ │ │ ├── assistants-api-storage │ │ │ │ │ │ ├── o1 │ │ │ │ │ │ │ └── route.ts │ │ │ │ │ │ └── route.ts │ │ │ │ │ ├── o1 │ │ │ │ │ │ └── route.ts │ │ │ │ │ └── route.ts │ │ │ │ └── superinterface │ │ │ │ │ └── route.ts │ │ │ ├── perplexity │ │ │ │ ├── models │ │ │ │ │ └── route.ts │ │ │ │ ├── poll │ │ │ │ │ └── route.ts │ │ │ │ └── stream │ │ │ │ │ └── route.ts │ │ │ └── together │ │ │ │ └── models │ │ │ │ └── route.ts │ │ ├── favicon.ico │ │ ├── globals.css │ │ ├── layout.tsx │ │ ├── page.module.css │ │ └── page.tsx │ └── lib │ │ └── prisma.ts │ └── tsconfig.json ├── package-lock.json ├── package.json ├── packages └── supercompat │ ├── package.json │ ├── src │ ├── adapters │ │ ├── client │ │ │ ├── anthropicClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ ├── post.ts │ │ │ │ │ ├── serializeMessages │ │ │ │ │ │ ├── index.ts │ │ │ │ │ │ └── serializeMessage.ts │ │ │ │ │ └── serializeTools.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── azureOpenaiClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ └── index.ts │ │ │ ├── googleClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── groqClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── humirisClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── mistralClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ ├── post.ts │ │ │ │ │ ├── serializeBody.ts │ │ │ │ │ └── serializeChunk.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── ollamaClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── openaiClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ ├── perplexityClientAdapter │ │ │ │ ├── completions │ │ │ │ │ ├── index.ts │ │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ │ ├── get.ts │ │ │ │ │ └── index.ts │ │ │ └── togetherClientAdapter │ │ │ │ ├── completions │ │ │ │ ├── index.ts │ │ │ │ └── post.ts │ │ │ │ ├── index.ts │ │ │ │ └── models │ │ │ │ ├── get.ts │ │ │ │ └── index.ts │ │ ├── run │ │ │ └── completionsRunAdapter │ │ │ │ ├── index.ts │ │ │ │ └── messages │ │ │ │ ├── index.ts │ │ │ │ └── serializeMessage.ts │ │ └── storage │ │ │ └── prismaStorageAdapter │ │ │ ├── index.ts │ │ │ └── threads │ │ │ ├── index.ts │ │ │ ├── messages │ │ │ ├── get.ts │ │ │ ├── index.ts │ │ │ ├── post.ts │ │ │ └── serializeMessage.ts │ │ │ ├── post.ts │ │ │ ├── run │ │ │ ├── get.ts │ │ │ └── index.ts │ │ │ ├── runs │ │ │ ├── get.ts │ │ │ ├── getMessages.ts │ │ │ ├── index.ts │ │ │ ├── onEvent │ │ │ │ ├── handlers │ │ │ │ │ ├── index.ts │ │ │ │ │ ├── threadMessageCompleted.ts │ │ │ │ │ ├── threadMessageCreated.ts │ │ │ │ │ ├── threadMessageDelta.ts │ │ │ │ │ ├── threadRunCompleted.ts │ │ │ │ │ ├── threadRunFailed.ts │ │ │ │ │ ├── threadRunInProgress.ts │ │ │ │ │ ├── threadRunRequiresAction.ts │ │ │ │ │ ├── threadRunStepCreated.ts │ │ │ │ │ └── threadRunStepDelta.ts │ │ │ │ └── index.ts │ │ │ ├── post.ts │ │ │ ├── serializeRun.ts │ │ │ ├── steps │ │ │ │ ├── get.ts │ │ │ │ ├── index.ts │ │ │ │ └── serializeRunStep.ts │ │ │ └── submitToolOutputs │ │ │ │ ├── index.ts │ │ │ │ └── post │ │ │ │ ├── index.ts │ │ │ │ └── updateRun.ts │ │ │ └── serializeThread.ts │ ├── index.ts │ ├── lib │ │ ├── azureOpenai │ │ │ └── endpointFromBaseUrl.ts │ │ ├── messages │ │ │ ├── alternatingMessages.ts │ │ │ ├── firstUserMessages.ts │ │ │ ├── messagesRegexp.ts │ │ │ ├── nonEmptyMessages.ts │ │ │ └── systemDeveloperMessages.ts │ │ ├── models │ │ │ └── isOModel.ts │ │ ├── runs │ │ │ ├── runRegexp.ts │ │ │ ├── runsRegexp.ts │ │ │ └── submitToolOutputsRegexp.ts │ │ └── steps │ │ │ └── stepsRegexp.ts │ ├── supercompat.ts │ ├── supercompatFetch │ │ ├── findRequestHandler.ts │ │ ├── index.ts │ │ ├── originalFetch.ts │ │ └── requestHandlers.ts │ └── types │ │ ├── index.ts │ │ └── prisma.ts │ ├── tsconfig.json │ └── tsup.config.ts ├── tsconfig.json └── turbo.json /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # Dependencies 4 | node_modules 5 | .pnp 6 | .pnp.js 7 | 8 | # Local env files 9 | .env 10 | .env.local 11 | .env.development.local 12 | .env.test.local 13 | .env.production.local 14 | 15 | # Testing 16 | coverage 17 | 18 | # Turbo 19 | .turbo 20 | 21 | # Vercel 22 | .vercel 23 | 24 | # Build Outputs 25 | .next/ 26 | out/ 27 | build 28 | dist 29 | 30 | 31 | # Debug 32 | npm-debug.log* 33 | yarn-debug.log* 34 | yarn-error.log* 35 | 36 | # Misc 37 | .DS_Store 38 | *.pem 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Supercorp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Supercompat 2 | 3 | Supercompat allows you to use any AI provider like Anthropic, Groq or Mistral with OpenAI-compatible Assistants API. 4 | 5 | # Install 6 | 7 | ``` 8 | npm i supercompat 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```ts 14 | import { 15 | supercompat, 16 | groqClientAdapter, 17 | prismaStorageAdapter, 18 | completionsRunAdapter, 19 | } from 'supercompat' 20 | import Groq from 'groq-sdk' 21 | 22 | const client = supercompat({ 23 | client: groqClientAdapter({ 24 | groq: new Groq(), 25 | }), 26 | storage: prismaStorageAdapter({ 27 | prisma, 28 | }), 29 | runAdapter: completionsRunAdapter(), 30 | }) 31 | 32 | const message = await client.beta.threads.messages.create(thread.id, { 33 | role: 'user', 34 | content: 'Who won the world series in 2020?' 35 | }) 36 | ``` 37 | 38 | ## Setup 39 | 40 | ```prisma 41 | // prisma.schema 42 | model Thread { 43 | id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid 44 | assistantId String @db.Uuid 45 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 46 | metadata Json? 47 | messages Message[] 48 | runs Run[] 49 | runSteps RunStep[] 50 | createdAt DateTime @default(now()) @db.Timestamptz(6) 51 | updatedAt DateTime @updatedAt @db.Timestamptz(6) 52 | 53 | @@index([assistantId]) 54 | @@index([createdAt(sort: Desc)]) 55 | } 56 | 57 | enum MessageRole { 58 | USER 59 | ASSISTANT 60 | } 61 | 62 | enum MessageStatus { 63 | IN_PROGRESS 64 | INCOMPLETE 65 | COMPLETED 66 | } 67 | 68 | model Message { 69 | id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid 70 | threadId String @db.Uuid 71 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 72 | role MessageRole 73 | content Json 74 | status MessageStatus @default(COMPLETED) 75 | assistantId String? @db.Uuid 76 | assistant Assistant? @relation(fields: [assistantId], references: [id], onDelete: Cascade) 77 | runId String? @db.Uuid 78 | run Run? @relation(fields: [runId], references: [id], onDelete: Cascade) 79 | completedAt DateTime? @db.Timestamptz(6) 80 | incompleteAt DateTime? @db.Timestamptz(6) 81 | incompleteDetails Json? 82 | attachments Json[] @default([]) 83 | metadata Json? 84 | toolCalls Json? 85 | createdAt DateTime @default(now()) @db.Timestamptz(6) 86 | updatedAt DateTime @updatedAt @db.Timestamptz(6) 87 | 88 | @@index([threadId]) 89 | @@index([createdAt(sort: Desc)]) 90 | } 91 | 92 | enum RunStatus { 93 | QUEUED 94 | IN_PROGRESS 95 | REQUIRES_ACTION 96 | CANCELLING 97 | CANCELLED 98 | FAILED 99 | COMPLETED 100 | EXPIRED 101 | } 102 | 103 | model Run { 104 | id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid 105 | threadId String @db.Uuid 106 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 107 | assistantId String @db.Uuid 108 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 109 | status RunStatus 110 | requiredAction Json? 111 | lastError Json? 112 | expiresAt Int 113 | startedAt Int? 114 | cancelledAt Int? 115 | failedAt Int? 116 | completedAt Int? 117 | model String 118 | instructions String 119 | tools Json[] @default([]) 120 | metadata Json? 121 | usage Json? 122 | truncationStrategy Json @default("{ \"type\": \"auto\" }") 123 | responseFormat Json @default("{ \"type\": \"text\" }") 124 | runSteps RunStep[] 125 | messages Message[] 126 | createdAt DateTime @default(now()) @db.Timestamptz(6) 127 | updatedAt DateTime @updatedAt @db.Timestamptz(6) 128 | } 129 | 130 | enum RunStepType { 131 | MESSAGE_CREATION 132 | TOOL_CALLS 133 | } 134 | 135 | enum RunStepStatus { 136 | IN_PROGRESS 137 | CANCELLED 138 | FAILED 139 | COMPLETED 140 | EXPIRED 141 | } 142 | 143 | model RunStep { 144 | id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid 145 | threadId String @db.Uuid 146 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 147 | assistantId String @db.Uuid 148 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 149 | runId String @db.Uuid 150 | run Run @relation(fields: [runId], references: [id], onDelete: Cascade) 151 | type RunStepType 152 | status RunStepStatus 153 | stepDetails Json 154 | lastError Json? 155 | expiredAt Int? 156 | cancelledAt Int? 157 | failedAt Int? 158 | completedAt Int? 159 | metadata Json? 160 | usage Json? 161 | createdAt DateTime @default(now()) @db.Timestamptz(6) 162 | updatedAt DateTime @updatedAt @db.Timestamptz(6) 163 | 164 | @@index([threadId, runId, type, status]) 165 | @@index([createdAt(sort: Asc)]) 166 | } 167 | 168 | model Assistant { 169 | id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid 170 | threads Thread[] 171 | runs Run[] 172 | runSteps RunStep[] 173 | messages Message[] 174 | createdAt DateTime @default(now()) @db.Timestamptz(6) 175 | updatedAt DateTime @updatedAt @db.Timestamptz(6) 176 | } 177 | ``` 178 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | .yarn/install-state.gz 8 | 9 | # testing 10 | /coverage 11 | 12 | # next.js 13 | /.next/ 14 | /out/ 15 | 16 | # production 17 | /build 18 | 19 | # misc 20 | .DS_Store 21 | *.pem 22 | 23 | # debug 24 | npm-debug.log* 25 | yarn-debug.log* 26 | yarn-error.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/README.md: -------------------------------------------------------------------------------- 1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). 2 | 3 | ## Getting Started 4 | 5 | First, run the development server: 6 | 7 | ```bash 8 | npm run dev 9 | # or 10 | yarn dev 11 | # or 12 | pnpm dev 13 | # or 14 | bun dev 15 | ``` 16 | 17 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 18 | 19 | You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. 20 | 21 | This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. 22 | 23 | ## Learn More 24 | 25 | To learn more about Next.js, take a look at the following resources: 26 | 27 | - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. 28 | - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. 29 | 30 | You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! 31 | 32 | ## Deploy on Vercel 33 | 34 | The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. 35 | 36 | Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. 37 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/next.config.mjs: -------------------------------------------------------------------------------- 1 | import { join } from 'path' 2 | 3 | /** @type {import('next').NextConfig} */ 4 | const nextConfig = { 5 | outputFileTracingRoot: join(import.meta.dirname, '../../') 6 | }; 7 | 8 | export default nextConfig; 9 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prisma-nextjs", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev --turbopack", 7 | "build": "next build", 8 | "start": "next start", 9 | "postinstall": "prisma generate", 10 | "lint": "next lint" 11 | }, 12 | "dependencies": { 13 | "@anthropic-ai/sdk": "^0.39.0", 14 | "@mistralai/mistralai": "^1.5.2", 15 | "@neondatabase/serverless": "^1.0.0", 16 | "@prisma/adapter-neon": "^6.5.0", 17 | "@prisma/client": "^6.5.0", 18 | "@superinterface/react": "^3.23.0", 19 | "groq-sdk": "^0.19.0", 20 | "next": "15.2.4", 21 | "openai": "^4.97.0", 22 | "react": "19.1.0", 23 | "react-dom": "19.1.0", 24 | "supercompat": "file:../../packages/supercompat" 25 | }, 26 | "devDependencies": { 27 | "@types/node": "^22", 28 | "@types/react": "npm:types-react@19.0.0-alpha.3", 29 | "@types/react-dom": "npm:types-react-dom@19.0.0-alpha.3", 30 | "eslint": "^9", 31 | "eslint-config-next": "15.2.4", 32 | "prisma": "^6.5.0", 33 | "typescript": "^5" 34 | }, 35 | "overrides": { 36 | "@types/react": "npm:types-react@19.0.0-alpha.3", 37 | "@types/react-dom": "npm:types-react-dom@19.0.0-alpha.3" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/prisma/migrations/20240527123208_init/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "MessageRole" AS ENUM ('USER', 'ASSISTANT'); 3 | 4 | -- CreateEnum 5 | CREATE TYPE "MessageStatus" AS ENUM ('IN_PROGRESS', 'INCOMPLETE', 'COMPLETED'); 6 | 7 | -- CreateEnum 8 | CREATE TYPE "RunStatus" AS ENUM ('QUEUED', 'IN_PROGRESS', 'REQUIRES_ACTION', 'CANCELLING', 'CANCELLED', 'FAILED', 'COMPLETED', 'EXPIRED'); 9 | 10 | -- CreateEnum 11 | CREATE TYPE "RunStepType" AS ENUM ('MESSAGE_CREATION', 'TOOL_CALLS'); 12 | 13 | -- CreateEnum 14 | CREATE TYPE "RunStepStatus" AS ENUM ('IN_PROGRESS', 'CANCELLED', 'FAILED', 'COMPLETED', 'EXPIRED'); 15 | 16 | -- CreateTable 17 | CREATE TABLE "Thread" ( 18 | "id" TEXT NOT NULL DEFAULT gen_random_uuid(), 19 | "assistantId" TEXT NOT NULL, 20 | "metadata" JSONB, 21 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 22 | "updatedAt" TIMESTAMP(3) NOT NULL, 23 | 24 | CONSTRAINT "Thread_pkey" PRIMARY KEY ("id") 25 | ); 26 | 27 | -- CreateTable 28 | CREATE TABLE "Message" ( 29 | "id" TEXT NOT NULL DEFAULT gen_random_uuid(), 30 | "threadId" TEXT NOT NULL, 31 | "role" "MessageRole" NOT NULL, 32 | "content" JSONB NOT NULL, 33 | "status" "MessageStatus" NOT NULL DEFAULT 'COMPLETED', 34 | "assistantId" TEXT, 35 | "runId" TEXT, 36 | "completedAt" TIMESTAMP(3), 37 | "incompleteAt" TIMESTAMP(3), 38 | "incompleteDetails" JSONB, 39 | "attachments" JSONB[] DEFAULT ARRAY[]::JSONB[], 40 | "metadata" JSONB, 41 | "toolCalls" JSONB, 42 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 43 | "updatedAt" TIMESTAMP(3) NOT NULL, 44 | 45 | CONSTRAINT "Message_pkey" PRIMARY KEY ("id") 46 | ); 47 | 48 | -- CreateTable 49 | CREATE TABLE "Run" ( 50 | "id" TEXT NOT NULL DEFAULT gen_random_uuid(), 51 | "threadId" TEXT NOT NULL, 52 | "assistantId" TEXT NOT NULL, 53 | "status" "RunStatus" NOT NULL, 54 | "requiredAction" JSONB, 55 | "lastError" JSONB, 56 | "expiresAt" INTEGER NOT NULL, 57 | "startedAt" INTEGER, 58 | "cancelledAt" INTEGER, 59 | "failedAt" INTEGER, 60 | "completedAt" INTEGER, 61 | "model" TEXT NOT NULL, 62 | "instructions" TEXT NOT NULL, 63 | "tools" JSONB[] DEFAULT ARRAY[]::JSONB[], 64 | "metadata" JSONB, 65 | "usage" JSONB, 66 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 67 | "updatedAt" TIMESTAMP(3) NOT NULL, 68 | 69 | CONSTRAINT "Run_pkey" PRIMARY KEY ("id") 70 | ); 71 | 72 | -- CreateTable 73 | CREATE TABLE "RunStep" ( 74 | "id" TEXT NOT NULL DEFAULT gen_random_uuid(), 75 | "threadId" TEXT NOT NULL, 76 | "assistantId" TEXT NOT NULL, 77 | "runId" TEXT NOT NULL, 78 | "type" "RunStepType" NOT NULL, 79 | "status" "RunStepStatus" NOT NULL, 80 | "stepDetails" JSONB NOT NULL, 81 | "lastError" JSONB, 82 | "expiredAt" INTEGER, 83 | "cancelledAt" INTEGER, 84 | "failedAt" INTEGER, 85 | "completedAt" INTEGER, 86 | "metadata" JSONB, 87 | "usage" JSONB, 88 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 89 | "updatedAt" TIMESTAMP(3) NOT NULL, 90 | 91 | CONSTRAINT "RunStep_pkey" PRIMARY KEY ("id") 92 | ); 93 | 94 | -- CreateTable 95 | CREATE TABLE "Assistant" ( 96 | "id" TEXT NOT NULL DEFAULT gen_random_uuid(), 97 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 98 | "updatedAt" TIMESTAMP(3) NOT NULL, 99 | 100 | CONSTRAINT "Assistant_pkey" PRIMARY KEY ("id") 101 | ); 102 | 103 | -- CreateIndex 104 | CREATE INDEX "Thread_assistantId_idx" ON "Thread"("assistantId"); 105 | 106 | -- CreateIndex 107 | CREATE INDEX "Thread_createdAt_idx" ON "Thread"("createdAt" DESC); 108 | 109 | -- CreateIndex 110 | CREATE INDEX "Message_threadId_idx" ON "Message"("threadId"); 111 | 112 | -- CreateIndex 113 | CREATE INDEX "Message_createdAt_idx" ON "Message"("createdAt" DESC); 114 | 115 | -- CreateIndex 116 | CREATE INDEX "RunStep_threadId_runId_type_status_idx" ON "RunStep"("threadId", "runId", "type", "status"); 117 | 118 | -- CreateIndex 119 | CREATE INDEX "RunStep_createdAt_idx" ON "RunStep"("createdAt" ASC); 120 | 121 | -- AddForeignKey 122 | ALTER TABLE "Thread" ADD CONSTRAINT "Thread_assistantId_fkey" FOREIGN KEY ("assistantId") REFERENCES "Assistant"("id") ON DELETE CASCADE ON UPDATE CASCADE; 123 | 124 | -- AddForeignKey 125 | ALTER TABLE "Message" ADD CONSTRAINT "Message_threadId_fkey" FOREIGN KEY ("threadId") REFERENCES "Thread"("id") ON DELETE CASCADE ON UPDATE CASCADE; 126 | 127 | -- AddForeignKey 128 | ALTER TABLE "Message" ADD CONSTRAINT "Message_assistantId_fkey" FOREIGN KEY ("assistantId") REFERENCES "Assistant"("id") ON DELETE CASCADE ON UPDATE CASCADE; 129 | 130 | -- AddForeignKey 131 | ALTER TABLE "Message" ADD CONSTRAINT "Message_runId_fkey" FOREIGN KEY ("runId") REFERENCES "Run"("id") ON DELETE CASCADE ON UPDATE CASCADE; 132 | 133 | -- AddForeignKey 134 | ALTER TABLE "Run" ADD CONSTRAINT "Run_threadId_fkey" FOREIGN KEY ("threadId") REFERENCES "Thread"("id") ON DELETE CASCADE ON UPDATE CASCADE; 135 | 136 | -- AddForeignKey 137 | ALTER TABLE "Run" ADD CONSTRAINT "Run_assistantId_fkey" FOREIGN KEY ("assistantId") REFERENCES "Assistant"("id") ON DELETE CASCADE ON UPDATE CASCADE; 138 | 139 | -- AddForeignKey 140 | ALTER TABLE "RunStep" ADD CONSTRAINT "RunStep_threadId_fkey" FOREIGN KEY ("threadId") REFERENCES "Thread"("id") ON DELETE CASCADE ON UPDATE CASCADE; 141 | 142 | -- AddForeignKey 143 | ALTER TABLE "RunStep" ADD CONSTRAINT "RunStep_assistantId_fkey" FOREIGN KEY ("assistantId") REFERENCES "Assistant"("id") ON DELETE CASCADE ON UPDATE CASCADE; 144 | 145 | -- AddForeignKey 146 | ALTER TABLE "RunStep" ADD CONSTRAINT "RunStep_runId_fkey" FOREIGN KEY ("runId") REFERENCES "Run"("id") ON DELETE CASCADE ON UPDATE CASCADE; 147 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/prisma/migrations/20240630160212_add_truncation_strategy/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Run" ADD COLUMN "truncationStrategy" JSONB NOT NULL DEFAULT '{ "type": "auto" }'; 3 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/prisma/migrations/20240630180029_add_response_format/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Run" ADD COLUMN "responseFormat" JSONB NOT NULL DEFAULT '{ "type": "text" }'; 3 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/prisma/migrations/migration_lock.toml: -------------------------------------------------------------------------------- 1 | # Please do not edit this file manually 2 | # It should be added in your version-control system (i.e. Git) 3 | provider = "postgresql" -------------------------------------------------------------------------------- /examples/prisma-nextjs/prisma/schema.prisma: -------------------------------------------------------------------------------- 1 | // This is your Prisma schema file, 2 | // learn more about it in the docs: https://pris.ly/d/prisma-schema 3 | 4 | // Looking for ways to speed up your queries, or scale easily with your serverless or edge functions? 5 | // Try Prisma Accelerate: https://pris.ly/cli/accelerate-init 6 | 7 | generator client { 8 | provider = "prisma-client-js" 9 | previewFeatures = ["driverAdapters"] 10 | } 11 | 12 | datasource db { 13 | provider = "postgresql" 14 | url = env("DATABASE_URL") 15 | } 16 | 17 | model Thread { 18 | id String @id @default(dbgenerated("gen_random_uuid()")) 19 | assistantId String 20 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 21 | metadata Json? 22 | messages Message[] 23 | runs Run[] 24 | runSteps RunStep[] 25 | createdAt DateTime @default(now()) 26 | updatedAt DateTime @updatedAt 27 | 28 | @@index([assistantId]) 29 | @@index([createdAt(sort: Desc)]) 30 | } 31 | 32 | enum MessageRole { 33 | USER 34 | ASSISTANT 35 | } 36 | 37 | enum MessageStatus { 38 | IN_PROGRESS 39 | INCOMPLETE 40 | COMPLETED 41 | } 42 | 43 | model Message { 44 | id String @id @default(dbgenerated("gen_random_uuid()")) 45 | threadId String 46 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 47 | role MessageRole 48 | content Json 49 | status MessageStatus @default(COMPLETED) 50 | assistantId String? 51 | assistant Assistant? @relation(fields: [assistantId], references: [id], onDelete: Cascade) 52 | runId String? 53 | run Run? @relation(fields: [runId], references: [id], onDelete: Cascade) 54 | completedAt DateTime? 55 | incompleteAt DateTime? 56 | incompleteDetails Json? 57 | attachments Json[] @default([]) 58 | metadata Json? 59 | toolCalls Json? 60 | createdAt DateTime @default(now()) 61 | updatedAt DateTime @updatedAt 62 | 63 | @@index([threadId]) 64 | @@index([createdAt(sort: Desc)]) 65 | } 66 | 67 | enum RunStatus { 68 | QUEUED 69 | IN_PROGRESS 70 | REQUIRES_ACTION 71 | CANCELLING 72 | CANCELLED 73 | FAILED 74 | COMPLETED 75 | EXPIRED 76 | } 77 | 78 | model Run { 79 | id String @id @default(dbgenerated("gen_random_uuid()")) 80 | threadId String 81 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 82 | assistantId String 83 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 84 | status RunStatus 85 | requiredAction Json? 86 | lastError Json? 87 | expiresAt Int 88 | startedAt Int? 89 | cancelledAt Int? 90 | failedAt Int? 91 | completedAt Int? 92 | model String 93 | instructions String 94 | tools Json[] @default([]) 95 | metadata Json? 96 | usage Json? 97 | truncationStrategy Json @default("{ \"type\": \"auto\" }") 98 | responseFormat Json @default("{ \"type\": \"text\" }") 99 | runSteps RunStep[] 100 | messages Message[] 101 | createdAt DateTime @default(now()) 102 | updatedAt DateTime @updatedAt 103 | } 104 | 105 | enum RunStepType { 106 | MESSAGE_CREATION 107 | TOOL_CALLS 108 | } 109 | 110 | enum RunStepStatus { 111 | IN_PROGRESS 112 | CANCELLED 113 | FAILED 114 | COMPLETED 115 | EXPIRED 116 | } 117 | 118 | model RunStep { 119 | id String @id @default(dbgenerated("gen_random_uuid()")) 120 | threadId String 121 | thread Thread @relation(fields: [threadId], references: [id], onDelete: Cascade) 122 | assistantId String 123 | assistant Assistant @relation(fields: [assistantId], references: [id], onDelete: Cascade) 124 | runId String 125 | run Run @relation(fields: [runId], references: [id], onDelete: Cascade) 126 | type RunStepType 127 | status RunStepStatus 128 | stepDetails Json 129 | lastError Json? 130 | expiredAt Int? 131 | cancelledAt Int? 132 | failedAt Int? 133 | completedAt Int? 134 | metadata Json? 135 | usage Json? 136 | createdAt DateTime @default(now()) 137 | updatedAt DateTime @updatedAt 138 | 139 | @@index([threadId, runId, type, status]) 140 | @@index([createdAt(sort: Asc)]) 141 | } 142 | 143 | model Assistant { 144 | id String @id @default(dbgenerated("gen_random_uuid()")) 145 | threads Thread[] 146 | runs Run[] 147 | runSteps RunStep[] 148 | messages Message[] 149 | createdAt DateTime @default(now()) 150 | updatedAt DateTime @updatedAt 151 | } 152 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/anthropic/models/route.ts: -------------------------------------------------------------------------------- 1 | import Anthropic from '@anthropic-ai/sdk' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | anthropicClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: anthropicClientAdapter({ 14 | anthropic: new Anthropic(), 15 | }), 16 | storage: prismaStorageAdapter({ 17 | prisma, 18 | }), 19 | runAdapter: completionsRunAdapter(), 20 | }) 21 | 22 | const response = await client.models.list() 23 | 24 | const models = [] 25 | 26 | for await (const model of response) { 27 | models.push(model) 28 | } 29 | 30 | return NextResponse.json({ 31 | models, 32 | }) 33 | } 34 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/anthropic/poll/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | anthropicClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import Anthropic from '@anthropic-ai/sdk' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: anthropicClientAdapter({ 36 | anthropic: new Anthropic(), 37 | }), 38 | storage: prismaStorageAdapter({ 39 | prisma, 40 | }), 41 | runAdapter: completionsRunAdapter(), 42 | }) 43 | 44 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 45 | 46 | const thread = await client.beta.threads.create({ 47 | messages: [], 48 | metadata: { 49 | assistantId, 50 | }, 51 | }) 52 | 53 | await client.beta.threads.messages.create(thread.id, { 54 | role: 'user', 55 | content: 'What is the weather in San Francisco, CA? In celsius.' 56 | }) 57 | 58 | const run = await client.beta.threads.runs.createAndPoll( 59 | thread.id, 60 | { 61 | assistant_id: assistantId, 62 | instructions: 'Use the get_current_weather and then answer the message.', 63 | model: 'claude-3-5-sonnet-20240620', 64 | tools, 65 | truncation_strategy: { 66 | type: 'last_messages', 67 | last_messages: 10, 68 | }, 69 | }, 70 | ) 71 | 72 | if (!run.required_action) { 73 | throw new Error('No requires action event') 74 | } 75 | 76 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 77 | 78 | await client.beta.threads.runs.submitToolOutputs( 79 | thread.id, 80 | run.id, 81 | { 82 | tool_outputs: [ 83 | { 84 | tool_call_id: toolCallId, 85 | output: '70 degrees and sunny.', 86 | }, 87 | ], 88 | } 89 | ) 90 | 91 | await new Promise(r => setTimeout(r, 5000)) 92 | 93 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 94 | 95 | return NextResponse.json({ 96 | threadMessages, 97 | }) 98 | } 99 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/anthropic/stream/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | anthropicClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import Anthropic from '@anthropic-ai/sdk' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: anthropicClientAdapter({ 36 | anthropic: new Anthropic(), 37 | }), 38 | storage: prismaStorageAdapter({ 39 | prisma, 40 | }), 41 | runAdapter: completionsRunAdapter(), 42 | }) 43 | 44 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 45 | 46 | const thread = await client.beta.threads.create({ 47 | messages: [ 48 | { 49 | role: 'assistant', 50 | content: '\n\n' 51 | }, 52 | { 53 | role: 'assistant', 54 | content: '' 55 | }, 56 | { 57 | role: 'assistant', 58 | content: 'What is the weather in San Francisco, CA? In celsius.' 59 | }, 60 | { 61 | role: 'assistant', 62 | content: 'What is the weather in San Francisco, CA? In celsius.' 63 | }, 64 | ], 65 | metadata: { 66 | assistantId, 67 | }, 68 | }) 69 | 70 | await client.beta.threads.messages.create(thread.id, { 71 | role: 'user', 72 | content: [{ 73 | type: 'text', 74 | text: 'What is the weather in SF? Use get_current_weather', 75 | }], 76 | }) 77 | 78 | const run = await client.beta.threads.runs.create( 79 | thread.id, 80 | { 81 | assistant_id: assistantId, 82 | instructions: 'Use the get_current_weather and then answer the message.', 83 | model: 'claude-3-5-sonnet-20240620', 84 | stream: true, 85 | tools, 86 | truncation_strategy: { 87 | type: 'last_messages', 88 | last_messages: 10, 89 | }, 90 | }, 91 | ) 92 | 93 | let requiresActionEvent 94 | 95 | for await (const event of run) { 96 | if (event.event === 'thread.run.requires_action') { 97 | requiresActionEvent = event 98 | } 99 | } 100 | 101 | if (!requiresActionEvent) { 102 | throw new Error('No requires action event') 103 | } 104 | 105 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 106 | 107 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 108 | thread.id, 109 | requiresActionEvent.data.id, 110 | { 111 | stream: true, 112 | tool_outputs: [ 113 | { 114 | tool_call_id: toolCallId, 115 | output: '70 degrees and sunny.', 116 | }, 117 | ], 118 | } 119 | ) 120 | 121 | for await (const _event of submitToolOutputsRun) { 122 | } 123 | 124 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 125 | 126 | return NextResponse.json({ 127 | threadMessages, 128 | }) 129 | } 130 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/azure-openai/models/route.ts: -------------------------------------------------------------------------------- 1 | import { AzureOpenAI } from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | azureOpenaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: azureOpenaiClientAdapter({ 14 | azureOpenai: new AzureOpenAI({ 15 | endpoint: process.env.EXAMPLE_AZURE_OPENAI_ENDPOINT, 16 | apiVersion: '2024-09-01-preview', 17 | }), 18 | }), 19 | storage: prismaStorageAdapter({ 20 | prisma, 21 | }), 22 | runAdapter: completionsRunAdapter(), 23 | }) 24 | 25 | const response = await client.models.list() 26 | 27 | const models = [] 28 | 29 | for await (const model of response) { 30 | models.push(model) 31 | } 32 | 33 | return NextResponse.json({ 34 | models, 35 | }) 36 | } 37 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/azure-openai/stream/assistants-api-storage/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI, { AzureOpenAI } from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | azureOpenaiClientAdapter, 6 | } from 'supercompat' 7 | 8 | const tools = [ 9 | { 10 | "type": "function", 11 | "function": { 12 | "name": "get_current_weather", 13 | "description": "Get the current weather in a given location", 14 | "parameters": { 15 | "type": "object", 16 | "properties": { 17 | "location": { 18 | "type": "string", 19 | "description": "The city and state, e.g. San Francisco, CA", 20 | }, 21 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 22 | }, 23 | "required": ["location"], 24 | }, 25 | } 26 | } 27 | ] as OpenAI.Beta.AssistantTool[] 28 | 29 | export const GET = async () => { 30 | const client = supercompat({ 31 | client: azureOpenaiClientAdapter({ 32 | azureOpenai: new AzureOpenAI({ 33 | endpoint: process.env.EXAMPLE_AZURE_OPENAI_ENDPOINT, 34 | apiVersion: '2024-09-01-preview', 35 | fetch: (url: RequestInfo, init?: RequestInit): Promise => ( 36 | fetch(url, { 37 | ...(init || {}), 38 | cache: 'no-store', 39 | // @ts-ignore-next-line 40 | duplex: 'half', 41 | }) 42 | ), 43 | }), 44 | }), 45 | }) 46 | 47 | const assistantId = 'asst_ZrKBc3znUGrm6L0cKzSpfqXG' 48 | 49 | const thread = await client.beta.threads.create({ 50 | messages: [], 51 | metadata: { 52 | assistantId, 53 | }, 54 | }) 55 | 56 | await client.beta.threads.messages.create(thread.id, { 57 | role: 'user', 58 | content: 'What is the weather in SF?' 59 | }) 60 | 61 | const run = await client.beta.threads.runs.create( 62 | thread.id, 63 | { 64 | assistant_id: assistantId, 65 | instructions: 'Use the get_current_weather and then answer the message.', 66 | model: 'gpt-4o-mini', 67 | stream: true, 68 | tools, 69 | truncation_strategy: { 70 | type: 'last_messages', 71 | last_messages: 10, 72 | }, 73 | }, 74 | ) 75 | 76 | let requiresActionEvent 77 | 78 | let lastEvent 79 | 80 | for await (const event of run) { 81 | if (event.event === 'thread.run.requires_action') { 82 | requiresActionEvent = event 83 | } 84 | lastEvent = event 85 | } 86 | 87 | if (!requiresActionEvent) { 88 | console.dir({ lastEvent }, { depth: null }) 89 | throw new Error('No requires action event') 90 | } 91 | 92 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 93 | 94 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 95 | thread.id, 96 | requiresActionEvent.data.id, 97 | { 98 | stream: true, 99 | tool_outputs: [ 100 | { 101 | tool_call_id: toolCallId, 102 | output: '70 degrees and sunny.', 103 | }, 104 | ], 105 | } 106 | ) 107 | 108 | for await (const _event of submitToolOutputsRun) { 109 | } 110 | 111 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 112 | 113 | return NextResponse.json({ 114 | threadMessages, 115 | }) 116 | } 117 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/azure-openai/stream/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI, { AzureOpenAI } from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | azureOpenaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: azureOpenaiClientAdapter({ 35 | azureOpenai: new AzureOpenAI({ 36 | endpoint: process.env.EXAMPLE_AZURE_OPENAI_ENDPOINT, 37 | apiVersion: '2024-09-01-preview', 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'gpt-4o-mini', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/google/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | googleClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: googleClientAdapter({ 14 | google: new OpenAI({ 15 | apiKey: process.env.GOOGLE_API_KEY, 16 | baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/', 17 | }), 18 | }), 19 | storage: prismaStorageAdapter({ 20 | prisma, 21 | }), 22 | runAdapter: completionsRunAdapter(), 23 | }) 24 | 25 | const response = await client.models.list() 26 | // console.dir({ response }, { depth: null }) 27 | 28 | const models = [] 29 | 30 | for await (const model of response) { 31 | models.push(model) 32 | } 33 | 34 | return NextResponse.json({ 35 | models, 36 | }) 37 | } 38 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/google/poll/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | googleClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: googleClientAdapter({ 35 | google: new OpenAI({ 36 | apiKey: process.env.GOOGLE_API_KEY, 37 | baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/', 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.createAndPoll( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'gemini-2.0-flash-exp', 66 | tools, 67 | truncation_strategy: { 68 | type: 'last_messages', 69 | last_messages: 10, 70 | }, 71 | }, 72 | ) 73 | 74 | if (!run.required_action) { 75 | throw new Error('No requires action event') 76 | } 77 | 78 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 79 | 80 | await client.beta.threads.runs.submitToolOutputs( 81 | thread.id, 82 | run.id, 83 | { 84 | tool_outputs: [ 85 | { 86 | tool_call_id: toolCallId, 87 | output: '70 degrees and sunny.', 88 | }, 89 | ], 90 | } 91 | ) 92 | 93 | await new Promise(r => setTimeout(r, 5000)) 94 | 95 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 96 | 97 | return NextResponse.json({ 98 | threadMessages, 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/google/stream/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | googleClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: googleClientAdapter({ 35 | google: new OpenAI({ 36 | apiKey: process.env.GOOGLE_API_KEY, 37 | baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/', 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF? Use get_current_weather' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'gemini-2.0-flash-exp', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/groq/models/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from 'next/server' 2 | import { 3 | supercompat, 4 | groqClientAdapter, 5 | prismaStorageAdapter, 6 | completionsRunAdapter, 7 | } from 'supercompat' 8 | import Groq from 'groq-sdk' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: groqClientAdapter({ 14 | groq: new Groq(), 15 | }), 16 | storage: prismaStorageAdapter({ 17 | prisma, 18 | }), 19 | runAdapter: completionsRunAdapter(), 20 | }) 21 | 22 | const response = await client.models.list() 23 | 24 | const models = [] 25 | 26 | for await (const model of response) { 27 | models.push(model) 28 | } 29 | 30 | return NextResponse.json({ 31 | models, 32 | }) 33 | } 34 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/groq/poll/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | groqClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import Groq from 'groq-sdk' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: groqClientAdapter({ 36 | groq: new Groq(), 37 | }), 38 | storage: prismaStorageAdapter({ 39 | prisma, 40 | }), 41 | runAdapter: completionsRunAdapter(), 42 | }) 43 | 44 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 45 | 46 | const thread = await client.beta.threads.create({ 47 | messages: [], 48 | metadata: { 49 | assistantId, 50 | }, 51 | }) 52 | 53 | await client.beta.threads.messages.create(thread.id, { 54 | role: 'user', 55 | content: 'What is the weather in SF?' 56 | }) 57 | 58 | const run = await client.beta.threads.runs.createAndPoll( 59 | thread.id, 60 | { 61 | assistant_id: assistantId, 62 | instructions: 'Use the get_current_weather and then answer the message.', 63 | model: 'llama3-8b-8192', 64 | tools, 65 | truncation_strategy: { 66 | type: 'last_messages', 67 | last_messages: 10, 68 | }, 69 | }, 70 | ) 71 | 72 | if (!run.required_action) { 73 | throw new Error('No requires action event') 74 | } 75 | 76 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 77 | 78 | await client.beta.threads.runs.submitToolOutputs( 79 | thread.id, 80 | run.id, 81 | { 82 | tool_outputs: [ 83 | { 84 | tool_call_id: toolCallId, 85 | output: '70 degrees and sunny.', 86 | }, 87 | ], 88 | } 89 | ) 90 | 91 | await new Promise(r => setTimeout(r, 5000)) 92 | 93 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 94 | 95 | return NextResponse.json({ 96 | threadMessages, 97 | }) 98 | } 99 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/groq/stream/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | groqClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import Groq from 'groq-sdk' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: groqClientAdapter({ 36 | groq: new Groq(), 37 | }), 38 | storage: prismaStorageAdapter({ 39 | prisma, 40 | }), 41 | runAdapter: completionsRunAdapter(), 42 | }) 43 | 44 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 45 | 46 | const thread = await client.beta.threads.create({ 47 | messages: [], 48 | metadata: { 49 | assistantId, 50 | }, 51 | }) 52 | 53 | await client.beta.threads.messages.create(thread.id, { 54 | role: 'user', 55 | content: 'What is the weather in SF?' 56 | }) 57 | 58 | const run = await client.beta.threads.runs.create( 59 | thread.id, 60 | { 61 | assistant_id: assistantId, 62 | instructions: 'Use the get_current_weather and then answer the message.', 63 | model: 'llama3-8b-8192', 64 | stream: true, 65 | tools, 66 | truncation_strategy: { 67 | type: 'last_messages', 68 | last_messages: 10, 69 | }, 70 | }, 71 | ) 72 | 73 | let requiresActionEvent 74 | 75 | for await (const event of run) { 76 | if (event.event === 'thread.run.requires_action') { 77 | requiresActionEvent = event 78 | } 79 | } 80 | 81 | if (!requiresActionEvent) { 82 | throw new Error('No requires action event') 83 | } 84 | 85 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 86 | 87 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 88 | thread.id, 89 | requiresActionEvent.data.id, 90 | { 91 | stream: true, 92 | tool_outputs: [ 93 | { 94 | tool_call_id: toolCallId, 95 | output: '70 degrees and sunny.', 96 | }, 97 | ], 98 | } 99 | ) 100 | 101 | for await (const _event of submitToolOutputsRun) { 102 | } 103 | 104 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 105 | 106 | return NextResponse.json({ 107 | threadMessages, 108 | }) 109 | } 110 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/humiris/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | humirisClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: humirisClientAdapter({ 14 | humiris: new OpenAI({ 15 | apiKey: process.env.HUMIRIS_API_KEY, 16 | baseURL: 'https://moai-service-app.humiris.ai/api/openai/v1/', 17 | defaultHeaders: { 18 | 'moai-api-key': process.env.HUMIRIS_API_KEY, 19 | }, 20 | }), 21 | }), 22 | storage: prismaStorageAdapter({ 23 | prisma, 24 | }), 25 | runAdapter: completionsRunAdapter(), 26 | }) 27 | 28 | const response = await client.models.list() 29 | // console.dir({ response }, { depth: null }) 30 | 31 | const models = [] 32 | 33 | for await (const model of response) { 34 | models.push(model) 35 | } 36 | 37 | return NextResponse.json({ 38 | models, 39 | }) 40 | } 41 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/humiris/poll/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | humirisClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [] as OpenAI.Beta.AssistantTool[] 12 | 13 | export const GET = async () => { 14 | const client = supercompat({ 15 | client: humirisClientAdapter({ 16 | humiris: new OpenAI({ 17 | apiKey: process.env.HUMIRIS_API_KEY, 18 | baseURL: 'https://moai-service-app.humiris.ai/api/openai/v1/', 19 | defaultHeaders: { 20 | 'moai-api-key': process.env.HUMIRIS_API_KEY, 21 | }, 22 | }), 23 | }), 24 | storage: prismaStorageAdapter({ 25 | prisma, 26 | }), 27 | runAdapter: completionsRunAdapter(), 28 | }) 29 | 30 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 31 | 32 | const thread = await client.beta.threads.create({ 33 | messages: [], 34 | metadata: { 35 | assistantId, 36 | }, 37 | }) 38 | 39 | await client.beta.threads.messages.create(thread.id, { 40 | role: 'user', 41 | content: 'What is the weather in SF?' 42 | }) 43 | 44 | const run = await client.beta.threads.runs.createAndPoll( 45 | thread.id, 46 | { 47 | assistant_id: assistantId, 48 | instructions: 'Answer the message.', 49 | model: 'Humiris/humiris-moai', 50 | tools, 51 | truncation_strategy: { 52 | type: 'last_messages', 53 | last_messages: 10, 54 | }, 55 | }, 56 | ) 57 | 58 | await new Promise(r => setTimeout(r, 5000)) 59 | 60 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 61 | 62 | return NextResponse.json({ 63 | threadMessages, 64 | }) 65 | } 66 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/humiris/stream/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | humirisClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [] as OpenAI.Beta.AssistantTool[] 12 | 13 | export const GET = async () => { 14 | const client = supercompat({ 15 | client: humirisClientAdapter({ 16 | humiris: new OpenAI({ 17 | apiKey: process.env.HUMIRIS_API_KEY, 18 | baseURL: 'https://moai-service-app.humiris.ai/api/openai/v1/', 19 | defaultHeaders: { 20 | 'moai-api-key': process.env.HUMIRIS_API_KEY, 21 | }, 22 | }), 23 | }), 24 | storage: prismaStorageAdapter({ 25 | prisma, 26 | }), 27 | runAdapter: completionsRunAdapter(), 28 | }) 29 | 30 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 31 | 32 | const thread = await client.beta.threads.create({ 33 | messages: [], 34 | metadata: { 35 | assistantId, 36 | }, 37 | }) 38 | 39 | await client.beta.threads.messages.create(thread.id, { 40 | role: 'user', 41 | content: 'What is the weather in SF?' 42 | }) 43 | 44 | const run = await client.beta.threads.runs.create( 45 | thread.id, 46 | { 47 | assistant_id: assistantId, 48 | instructions: 'Answer the message.', 49 | model: 'Humiris/humiris-moai', 50 | stream: true, 51 | tools, 52 | truncation_strategy: { 53 | type: 'last_messages', 54 | last_messages: 10, 55 | }, 56 | }, 57 | ) 58 | 59 | for await (const _event of run) { 60 | } 61 | 62 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 63 | 64 | return NextResponse.json({ 65 | threadMessages, 66 | }) 67 | } 68 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/mistral/models/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from 'next/server' 2 | import { 3 | supercompat, 4 | mistralClientAdapter, 5 | prismaStorageAdapter, 6 | completionsRunAdapter, 7 | } from 'supercompat' 8 | import { Mistral } from '@mistralai/mistralai' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: mistralClientAdapter({ 14 | mistral: new Mistral({ 15 | apiKey: process.env.MISTRAL_API_KEY, 16 | }), 17 | }), 18 | storage: prismaStorageAdapter({ 19 | prisma, 20 | }), 21 | runAdapter: completionsRunAdapter(), 22 | }) 23 | 24 | const response = await client.models.list() 25 | 26 | const models = [] 27 | 28 | for await (const model of response) { 29 | models.push(model) 30 | } 31 | 32 | return NextResponse.json({ 33 | models, 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/mistral/poll/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | mistralClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { Mistral } from '@mistralai/mistralai' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: mistralClientAdapter({ 36 | mistral: new Mistral({ 37 | apiKey: process.env.MISTRAL_API_KEY, 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.createAndPoll( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'mistral-large-latest', 66 | tools, 67 | truncation_strategy: { 68 | type: 'last_messages', 69 | last_messages: 10, 70 | }, 71 | }, 72 | ) 73 | 74 | if (!run.required_action) { 75 | throw new Error('No requires action event') 76 | } 77 | 78 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 79 | 80 | await client.beta.threads.runs.submitToolOutputs( 81 | thread.id, 82 | run.id, 83 | { 84 | tool_outputs: [ 85 | { 86 | tool_call_id: toolCallId, 87 | output: '70 degrees and sunny.', 88 | }, 89 | ], 90 | } 91 | ) 92 | 93 | await new Promise(r => setTimeout(r, 5000)) 94 | 95 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 96 | 97 | return NextResponse.json({ 98 | threadMessages, 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/mistral/stream/route.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | mistralClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { Mistral } from '@mistralai/mistralai' 10 | import { prisma } from '@/lib/prisma' 11 | 12 | const tools = [ 13 | { 14 | "type": "function", 15 | "function": { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather in a given location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "location": { 22 | "type": "string", 23 | "description": "The city and state, e.g. San Francisco, CA", 24 | }, 25 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 26 | }, 27 | "required": ["location"], 28 | }, 29 | } 30 | } 31 | ] as OpenAI.Beta.AssistantTool[] 32 | 33 | export const GET = async () => { 34 | const client = supercompat({ 35 | client: mistralClientAdapter({ 36 | mistral: new Mistral({ 37 | apiKey: process.env.MISTRAL_API_KEY, 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in San Francisco, CA? In celsius. Use the get_current_weather function.', 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'pixtral-large-latest', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/ollama/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | ollamaClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: ollamaClientAdapter({ 14 | ollama: new OpenAI({ 15 | apiKey: 'ollama', 16 | baseURL: 'https://7274-209-36-2-102.ngrok-free.app/v1/', 17 | }), 18 | }), 19 | storage: prismaStorageAdapter({ 20 | prisma, 21 | }), 22 | runAdapter: completionsRunAdapter(), 23 | }) 24 | 25 | const response = await client.models.list() 26 | // console.dir({ response }, { depth: null }) 27 | 28 | const models = [] 29 | 30 | for await (const model of response) { 31 | models.push(model) 32 | } 33 | 34 | return NextResponse.json({ 35 | models, 36 | }) 37 | } 38 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/list/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: openaiClientAdapter({ 14 | openai: new OpenAI({ 15 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 16 | }), 17 | }), 18 | storage: prismaStorageAdapter({ 19 | prisma, 20 | }), 21 | runAdapter: completionsRunAdapter(), 22 | }) 23 | 24 | const threadId = '1ea8b616-fcec-4f0c-a3cb-9df81b67a241' 25 | const threadMessages = await client.beta.threads.messages.list(threadId, { limit: 10 }) 26 | 27 | const hasNextPage = threadMessages.hasNextPage() 28 | 29 | return NextResponse.json({ 30 | threadMessages, 31 | hasNextPage, 32 | }) 33 | } 34 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: openaiClientAdapter({ 14 | openai: new OpenAI({ 15 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 16 | }), 17 | }), 18 | storage: prismaStorageAdapter({ 19 | prisma, 20 | }), 21 | runAdapter: completionsRunAdapter(), 22 | }) 23 | 24 | const response = await client.models.list() 25 | 26 | const models = [] 27 | 28 | for await (const model of response) { 29 | models.push(model) 30 | } 31 | 32 | return NextResponse.json({ 33 | models, 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/poll/assistants-api-storage/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | } from 'supercompat' 7 | 8 | const tools = [ 9 | { 10 | "type": "function", 11 | "function": { 12 | "name": "get_current_weather", 13 | "description": "Get the current weather in a given location", 14 | "parameters": { 15 | "type": "object", 16 | "properties": { 17 | "location": { 18 | "type": "string", 19 | "description": "The city and state, e.g. San Francisco, CA", 20 | }, 21 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 22 | }, 23 | "required": ["location"], 24 | }, 25 | } 26 | } 27 | ] as OpenAI.Beta.AssistantTool[] 28 | 29 | export const GET = async () => { 30 | const client = supercompat({ 31 | client: openaiClientAdapter({ 32 | openai: new OpenAI({ 33 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 34 | fetch: (url: RequestInfo, init?: RequestInit): Promise => ( 35 | fetch(url, { 36 | ...(init || {}), 37 | cache: 'no-store', 38 | // @ts-ignore-next-line 39 | duplex: 'half', 40 | }) 41 | ), 42 | }), 43 | }), 44 | }) 45 | 46 | const assistantId = 'asst_nnbyhkbrhNpRUtVXKLtCY41j' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.createAndPoll( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'gpt-3.5-turbo', 66 | tools, 67 | truncation_strategy: { 68 | type: 'last_messages', 69 | last_messages: 10, 70 | }, 71 | }, 72 | ) 73 | 74 | if (!run.required_action) { 75 | throw new Error('No requires action event') 76 | } 77 | 78 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 79 | 80 | await client.beta.threads.runs.submitToolOutputs( 81 | thread.id, 82 | run.id, 83 | { 84 | tool_outputs: [ 85 | { 86 | tool_call_id: toolCallId, 87 | output: '70 degrees and sunny.', 88 | }, 89 | ], 90 | } 91 | ) 92 | 93 | await new Promise(r => setTimeout(r, 5000)) 94 | 95 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 96 | 97 | return NextResponse.json({ 98 | threadMessages, 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/poll/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: openaiClientAdapter({ 35 | openai: new OpenAI({ 36 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 37 | }), 38 | }), 39 | storage: prismaStorageAdapter({ 40 | prisma, 41 | }), 42 | runAdapter: completionsRunAdapter(), 43 | }) 44 | 45 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 46 | 47 | const thread = await client.beta.threads.create({ 48 | messages: [], 49 | metadata: { 50 | assistantId, 51 | }, 52 | }) 53 | 54 | await client.beta.threads.messages.create(thread.id, { 55 | role: 'user', 56 | content: 'What is the weather in SF?' 57 | }) 58 | 59 | const run = await client.beta.threads.runs.createAndPoll( 60 | thread.id, 61 | { 62 | assistant_id: assistantId, 63 | instructions: 'Use the get_current_weather and then answer the message.', 64 | model: 'gpt-3.5-turbo', 65 | tools, 66 | truncation_strategy: { 67 | type: 'last_messages', 68 | last_messages: 10, 69 | }, 70 | }, 71 | ) 72 | 73 | if (!run.required_action) { 74 | throw new Error('No requires action event') 75 | } 76 | 77 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 78 | 79 | await client.beta.threads.runs.submitToolOutputs( 80 | thread.id, 81 | run.id, 82 | { 83 | tool_outputs: [ 84 | { 85 | tool_call_id: toolCallId, 86 | output: '70 degrees and sunny.', 87 | }, 88 | ], 89 | } 90 | ) 91 | 92 | await new Promise(r => setTimeout(r, 5000)) 93 | 94 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 95 | 96 | return NextResponse.json({ 97 | threadMessages, 98 | }) 99 | } 100 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/steps/list/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: openaiClientAdapter({ 14 | openai: new OpenAI({ 15 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 16 | }), 17 | }), 18 | storage: prismaStorageAdapter({ 19 | prisma, 20 | }), 21 | runAdapter: completionsRunAdapter(), 22 | }) 23 | 24 | const threadId = '1ea8b616-fcec-4f0c-a3cb-9df81b67a241' 25 | const runId = '776fb850-d517-472a-85cc-49faa06b01c7' 26 | // const threadMessages = await client.beta.threads.messages.list(threadId, { limit: 10 }) 27 | const runStepsResponse = await client.beta.threads.runs.steps.list( 28 | threadId, 29 | runId, 30 | ) 31 | 32 | 33 | return NextResponse.json({ 34 | runStepsResponse, 35 | }) 36 | } 37 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/stream/assistants-api-storage/o1/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | } from 'supercompat' 7 | 8 | const tools = [ 9 | { 10 | "type": "function", 11 | "function": { 12 | "name": "get_current_weather", 13 | "description": "Get the current weather in a given location", 14 | "parameters": { 15 | "type": "object", 16 | "properties": { 17 | "location": { 18 | "type": "string", 19 | "description": "The city and state, e.g. San Francisco, CA", 20 | }, 21 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 22 | }, 23 | "required": ["location"], 24 | }, 25 | } 26 | } 27 | ] as OpenAI.Beta.AssistantTool[] 28 | 29 | export const GET = async () => { 30 | const client = supercompat({ 31 | client: openaiClientAdapter({ 32 | openai: new OpenAI({ 33 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 34 | fetch: (url: RequestInfo, init?: RequestInit): Promise => ( 35 | fetch(url, { 36 | ...(init || {}), 37 | cache: 'no-store', 38 | // @ts-ignore-next-line 39 | duplex: 'half', 40 | }) 41 | ), 42 | }), 43 | }), 44 | }) 45 | 46 | const assistantId = 'asst_nnbyhkbrhNpRUtVXKLtCY41j' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'o1-preview', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/stream/assistants-api-storage/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | } from 'supercompat' 7 | 8 | const tools = [ 9 | { 10 | "type": "function", 11 | "function": { 12 | "name": "get_current_weather", 13 | "description": "Get the current weather in a given location", 14 | "parameters": { 15 | "type": "object", 16 | "properties": { 17 | "location": { 18 | "type": "string", 19 | "description": "The city and state, e.g. San Francisco, CA", 20 | }, 21 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 22 | }, 23 | "required": ["location"], 24 | }, 25 | } 26 | } 27 | ] as OpenAI.Beta.AssistantTool[] 28 | 29 | export const GET = async () => { 30 | const client = supercompat({ 31 | client: openaiClientAdapter({ 32 | openai: new OpenAI({ 33 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 34 | fetch: (url: RequestInfo, init?: RequestInit): Promise => ( 35 | fetch(url, { 36 | ...(init || {}), 37 | cache: 'no-store', 38 | // @ts-ignore-next-line 39 | duplex: 'half', 40 | }) 41 | ), 42 | }), 43 | }), 44 | }) 45 | 46 | const assistantId = 'asst_nnbyhkbrhNpRUtVXKLtCY41j' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF?' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'gpt-3.5-turbo', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/stream/o1/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: openaiClientAdapter({ 35 | openai: new OpenAI({ 36 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 37 | }), 38 | }), 39 | storage: prismaStorageAdapter({ 40 | prisma, 41 | }), 42 | runAdapter: completionsRunAdapter(), 43 | }) 44 | 45 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 46 | 47 | const thread = await client.beta.threads.create({ 48 | messages: [], 49 | metadata: { 50 | assistantId, 51 | }, 52 | }) 53 | 54 | await client.beta.threads.messages.create(thread.id, { 55 | role: 'user', 56 | content: 'What is the weather in SF?' 57 | }) 58 | 59 | const run = await client.beta.threads.runs.create( 60 | thread.id, 61 | { 62 | assistant_id: assistantId, 63 | instructions: 'Use the get_current_weather and then answer the message.', 64 | model: 'o1-preview', 65 | stream: true, 66 | tools, 67 | truncation_strategy: { 68 | type: 'last_messages', 69 | last_messages: 10, 70 | }, 71 | }, 72 | ) 73 | 74 | for await (const _event of run) { 75 | } 76 | 77 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 78 | 79 | return NextResponse.json({ 80 | threadMessages, 81 | }) 82 | } 83 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/stream/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: openaiClientAdapter({ 35 | openai: new OpenAI({ 36 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 37 | }), 38 | }), 39 | storage: prismaStorageAdapter({ 40 | prisma, 41 | }), 42 | runAdapter: completionsRunAdapter(), 43 | }) 44 | 45 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 46 | 47 | const thread = await client.beta.threads.create({ 48 | messages: [], 49 | metadata: { 50 | assistantId, 51 | }, 52 | }) 53 | 54 | await client.beta.threads.messages.create(thread.id, { 55 | role: 'user', 56 | content: [{ 57 | type: 'text', 58 | text: 'What is the weather in SF?', 59 | }], 60 | }) 61 | 62 | const run = await client.beta.threads.runs.create( 63 | thread.id, 64 | { 65 | assistant_id: assistantId, 66 | instructions: 'Use the get_current_weather and then answer the message.', 67 | model: 'gpt-3.5-turbo', 68 | stream: true, 69 | tools, 70 | truncation_strategy: { 71 | type: 'last_messages', 72 | last_messages: 10, 73 | }, 74 | }, 75 | ) 76 | 77 | let requiresActionEvent 78 | 79 | for await (const event of run) { 80 | if (event.event === 'thread.run.requires_action') { 81 | requiresActionEvent = event 82 | } 83 | } 84 | 85 | if (!requiresActionEvent) { 86 | throw new Error('No requires action event') 87 | } 88 | 89 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 90 | 91 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 92 | thread.id, 93 | requiresActionEvent.data.id, 94 | { 95 | stream: true, 96 | tool_outputs: [ 97 | { 98 | tool_call_id: toolCallId, 99 | output: '70 degrees and sunny.', 100 | }, 101 | ], 102 | } 103 | ) 104 | 105 | for await (const _event of submitToolOutputsRun) { 106 | } 107 | 108 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 109 | 110 | return NextResponse.json({ 111 | threadMessages, 112 | }) 113 | } 114 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/openai/superinterface/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | openaiClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | import { 11 | messagesResponse, 12 | } from '@superinterface/react/server' 13 | 14 | export const GET = async () => { 15 | const client = supercompat({ 16 | client: openaiClientAdapter({ 17 | openai: new OpenAI({ 18 | apiKey: process.env.RENAMED_OPENAI_API_KEY!, 19 | }), 20 | }), 21 | storage: prismaStorageAdapter({ 22 | prisma, 23 | }), 24 | runAdapter: completionsRunAdapter(), 25 | }) 26 | 27 | const threadId = '1ea8b616-fcec-4f0c-a3cb-9df81b67a241' 28 | // const threadMessages = await client.beta.threads.messages.list(threadId, { limit: 10 }) 29 | 30 | const response = await messagesResponse({ 31 | threadId, 32 | client, 33 | }) 34 | 35 | return NextResponse.json({ 36 | response, 37 | }) 38 | } 39 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/perplexity/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | perplexityClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: perplexityClientAdapter({ 14 | perplexity: new OpenAI({ 15 | apiKey: process.env.PERPLEXITY_API_KEY, 16 | baseURL: 'https://api.perplexity.ai', 17 | }), 18 | }), 19 | storage: prismaStorageAdapter({ 20 | prisma, 21 | }), 22 | runAdapter: completionsRunAdapter(), 23 | }) 24 | 25 | const response = await client.models.list() 26 | 27 | const models = [] 28 | 29 | for await (const model of response) { 30 | models.push(model) 31 | } 32 | 33 | return NextResponse.json({ 34 | models, 35 | }) 36 | } 37 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/perplexity/poll/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | perplexityClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: perplexityClientAdapter({ 35 | perplexity: new OpenAI({ 36 | apiKey: process.env.PERPLEXITY_API_KEY, 37 | baseURL: 'https://api.perplexity.ai', 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in San Francisco, CA? In celsius.' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.createAndPoll( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'llama-3.1-sonar-huge-128k-online', 66 | tools, 67 | truncation_strategy: { 68 | type: 'last_messages', 69 | last_messages: 10, 70 | }, 71 | }, 72 | ) 73 | 74 | if (!run.required_action) { 75 | throw new Error('No requires action event') 76 | } 77 | 78 | const toolCallId = run.required_action.submit_tool_outputs.tool_calls[0].id 79 | 80 | await client.beta.threads.runs.submitToolOutputs( 81 | thread.id, 82 | run.id, 83 | { 84 | tool_outputs: [ 85 | { 86 | tool_call_id: toolCallId, 87 | output: '70 degrees and sunny.', 88 | }, 89 | ], 90 | } 91 | ) 92 | 93 | await new Promise(r => setTimeout(r, 5000)) 94 | 95 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 96 | 97 | return NextResponse.json({ 98 | threadMessages, 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/perplexity/stream/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | perplexityClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | const tools = [ 12 | { 13 | "type": "function", 14 | "function": { 15 | "name": "get_current_weather", 16 | "description": "Get the current weather in a given location", 17 | "parameters": { 18 | "type": "object", 19 | "properties": { 20 | "location": { 21 | "type": "string", 22 | "description": "The city and state, e.g. San Francisco, CA", 23 | }, 24 | "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, 25 | }, 26 | "required": ["location"], 27 | }, 28 | } 29 | } 30 | ] as OpenAI.Beta.AssistantTool[] 31 | 32 | export const GET = async () => { 33 | const client = supercompat({ 34 | client: perplexityClientAdapter({ 35 | perplexity: new OpenAI({ 36 | apiKey: process.env.PERPLEXITY_API_KEY, 37 | baseURL: 'https://api.perplexity.ai', 38 | }), 39 | }), 40 | storage: prismaStorageAdapter({ 41 | prisma, 42 | }), 43 | runAdapter: completionsRunAdapter(), 44 | }) 45 | 46 | const assistantId = 'b7fd7a65-3504-4ad3-95a0-b83a8eaff0f3' 47 | 48 | const thread = await client.beta.threads.create({ 49 | messages: [], 50 | metadata: { 51 | assistantId, 52 | }, 53 | }) 54 | 55 | await client.beta.threads.messages.create(thread.id, { 56 | role: 'user', 57 | content: 'What is the weather in SF? Use get_current_weather' 58 | }) 59 | 60 | const run = await client.beta.threads.runs.create( 61 | thread.id, 62 | { 63 | assistant_id: assistantId, 64 | instructions: 'Use the get_current_weather and then answer the message.', 65 | model: 'llama-3.1-sonar-huge-128k-online', 66 | stream: true, 67 | tools, 68 | truncation_strategy: { 69 | type: 'last_messages', 70 | last_messages: 10, 71 | }, 72 | }, 73 | ) 74 | 75 | let requiresActionEvent 76 | 77 | for await (const event of run) { 78 | if (event.event === 'thread.run.requires_action') { 79 | requiresActionEvent = event 80 | } 81 | } 82 | 83 | if (!requiresActionEvent) { 84 | throw new Error('No requires action event') 85 | } 86 | 87 | const toolCallId = requiresActionEvent.data.required_action?.submit_tool_outputs.tool_calls[0].id 88 | 89 | const submitToolOutputsRun = await client.beta.threads.runs.submitToolOutputs( 90 | thread.id, 91 | requiresActionEvent.data.id, 92 | { 93 | stream: true, 94 | tool_outputs: [ 95 | { 96 | tool_call_id: toolCallId, 97 | output: '70 degrees and sunny.', 98 | }, 99 | ], 100 | } 101 | ) 102 | 103 | for await (const _event of submitToolOutputsRun) { 104 | } 105 | 106 | const threadMessages = await client.beta.threads.messages.list(thread.id, { limit: 10 }) 107 | 108 | return NextResponse.json({ 109 | threadMessages, 110 | }) 111 | } 112 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/api/together/models/route.ts: -------------------------------------------------------------------------------- 1 | import OpenAI from 'openai' 2 | import { NextResponse } from 'next/server' 3 | import { 4 | supercompat, 5 | togetherClientAdapter, 6 | prismaStorageAdapter, 7 | completionsRunAdapter, 8 | } from 'supercompat' 9 | import { prisma } from '@/lib/prisma' 10 | 11 | export const GET = async () => { 12 | const client = supercompat({ 13 | client: togetherClientAdapter({ 14 | together: new OpenAI({ 15 | apiKey: process.env.TOGETHER_API_KEY, 16 | baseURL: 'https://api.together.xyz/v1', 17 | }), 18 | }), 19 | storage: prismaStorageAdapter({ 20 | prisma, 21 | }), 22 | runAdapter: completionsRunAdapter(), 23 | }) 24 | 25 | const response = await client.models.list() 26 | // console.dir({ response }, { depth: null }) 27 | 28 | const models = [] 29 | 30 | for await (const model of response) { 31 | models.push(model) 32 | } 33 | 34 | return NextResponse.json({ 35 | models, 36 | }) 37 | } 38 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/supercorp-ai/supercompat/695b2998d2d886331ee1dd83cfc98a08d1fe99c4/examples/prisma-nextjs/src/app/favicon.ico -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/globals.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --max-width: 1100px; 3 | --border-radius: 12px; 4 | --font-mono: ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono", 5 | "Roboto Mono", "Oxygen Mono", "Ubuntu Monospace", "Source Code Pro", 6 | "Fira Mono", "Droid Sans Mono", "Courier New", monospace; 7 | 8 | --foreground-rgb: 0, 0, 0; 9 | --background-start-rgb: 214, 219, 220; 10 | --background-end-rgb: 255, 255, 255; 11 | 12 | --primary-glow: conic-gradient( 13 | from 180deg at 50% 50%, 14 | #16abff33 0deg, 15 | #0885ff33 55deg, 16 | #54d6ff33 120deg, 17 | #0071ff33 160deg, 18 | transparent 360deg 19 | ); 20 | --secondary-glow: radial-gradient( 21 | rgba(255, 255, 255, 1), 22 | rgba(255, 255, 255, 0) 23 | ); 24 | 25 | --tile-start-rgb: 239, 245, 249; 26 | --tile-end-rgb: 228, 232, 233; 27 | --tile-border: conic-gradient( 28 | #00000080, 29 | #00000040, 30 | #00000030, 31 | #00000020, 32 | #00000010, 33 | #00000010, 34 | #00000080 35 | ); 36 | 37 | --callout-rgb: 238, 240, 241; 38 | --callout-border-rgb: 172, 175, 176; 39 | --card-rgb: 180, 185, 188; 40 | --card-border-rgb: 131, 134, 135; 41 | } 42 | 43 | @media (prefers-color-scheme: dark) { 44 | :root { 45 | --foreground-rgb: 255, 255, 255; 46 | --background-start-rgb: 0, 0, 0; 47 | --background-end-rgb: 0, 0, 0; 48 | 49 | --primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0)); 50 | --secondary-glow: linear-gradient( 51 | to bottom right, 52 | rgba(1, 65, 255, 0), 53 | rgba(1, 65, 255, 0), 54 | rgba(1, 65, 255, 0.3) 55 | ); 56 | 57 | --tile-start-rgb: 2, 13, 46; 58 | --tile-end-rgb: 2, 5, 19; 59 | --tile-border: conic-gradient( 60 | #ffffff80, 61 | #ffffff40, 62 | #ffffff30, 63 | #ffffff20, 64 | #ffffff10, 65 | #ffffff10, 66 | #ffffff80 67 | ); 68 | 69 | --callout-rgb: 20, 20, 20; 70 | --callout-border-rgb: 108, 108, 108; 71 | --card-rgb: 100, 100, 100; 72 | --card-border-rgb: 200, 200, 200; 73 | } 74 | } 75 | 76 | * { 77 | box-sizing: border-box; 78 | padding: 0; 79 | margin: 0; 80 | } 81 | 82 | html, 83 | body { 84 | max-width: 100vw; 85 | overflow-x: hidden; 86 | } 87 | 88 | body { 89 | color: rgb(var(--foreground-rgb)); 90 | background: linear-gradient( 91 | to bottom, 92 | transparent, 93 | rgb(var(--background-end-rgb)) 94 | ) 95 | rgb(var(--background-start-rgb)); 96 | } 97 | 98 | a { 99 | color: inherit; 100 | text-decoration: none; 101 | } 102 | 103 | @media (prefers-color-scheme: dark) { 104 | html { 105 | color-scheme: dark; 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import type { Metadata } from "next"; 2 | import { Inter } from "next/font/google"; 3 | import "./globals.css"; 4 | 5 | const inter = Inter({ subsets: ["latin"] }); 6 | 7 | export const metadata: Metadata = { 8 | title: "Create Next App", 9 | description: "Generated by create next app", 10 | }; 11 | 12 | export default function RootLayout({ 13 | children, 14 | }: Readonly<{ 15 | children: React.ReactNode; 16 | }>) { 17 | return ( 18 | 19 | {children} 20 | 21 | ); 22 | } 23 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/page.module.css: -------------------------------------------------------------------------------- 1 | .main { 2 | display: flex; 3 | flex-direction: column; 4 | justify-content: space-between; 5 | align-items: center; 6 | padding: 6rem; 7 | min-height: 100vh; 8 | } 9 | 10 | .description { 11 | display: inherit; 12 | justify-content: inherit; 13 | align-items: inherit; 14 | font-size: 0.85rem; 15 | max-width: var(--max-width); 16 | width: 100%; 17 | z-index: 2; 18 | font-family: var(--font-mono); 19 | } 20 | 21 | .description a { 22 | display: flex; 23 | justify-content: center; 24 | align-items: center; 25 | gap: 0.5rem; 26 | } 27 | 28 | .description p { 29 | position: relative; 30 | margin: 0; 31 | padding: 1rem; 32 | background-color: rgba(var(--callout-rgb), 0.5); 33 | border: 1px solid rgba(var(--callout-border-rgb), 0.3); 34 | border-radius: var(--border-radius); 35 | } 36 | 37 | .code { 38 | font-weight: 700; 39 | font-family: var(--font-mono); 40 | } 41 | 42 | .grid { 43 | display: grid; 44 | grid-template-columns: repeat(4, minmax(25%, auto)); 45 | max-width: 100%; 46 | width: var(--max-width); 47 | } 48 | 49 | .card { 50 | padding: 1rem 1.2rem; 51 | border-radius: var(--border-radius); 52 | background: rgba(var(--card-rgb), 0); 53 | border: 1px solid rgba(var(--card-border-rgb), 0); 54 | transition: background 200ms, border 200ms; 55 | } 56 | 57 | .card span { 58 | display: inline-block; 59 | transition: transform 200ms; 60 | } 61 | 62 | .card h2 { 63 | font-weight: 600; 64 | margin-bottom: 0.7rem; 65 | } 66 | 67 | .card p { 68 | margin: 0; 69 | opacity: 0.6; 70 | font-size: 0.9rem; 71 | line-height: 1.5; 72 | max-width: 30ch; 73 | text-wrap: balance; 74 | } 75 | 76 | .center { 77 | display: flex; 78 | justify-content: center; 79 | align-items: center; 80 | position: relative; 81 | padding: 4rem 0; 82 | } 83 | 84 | .center::before { 85 | background: var(--secondary-glow); 86 | border-radius: 50%; 87 | width: 480px; 88 | height: 360px; 89 | margin-left: -400px; 90 | } 91 | 92 | .center::after { 93 | background: var(--primary-glow); 94 | width: 240px; 95 | height: 180px; 96 | z-index: -1; 97 | } 98 | 99 | .center::before, 100 | .center::after { 101 | content: ""; 102 | left: 50%; 103 | position: absolute; 104 | filter: blur(45px); 105 | transform: translateZ(0); 106 | } 107 | 108 | .logo { 109 | position: relative; 110 | } 111 | /* Enable hover only on non-touch devices */ 112 | @media (hover: hover) and (pointer: fine) { 113 | .card:hover { 114 | background: rgba(var(--card-rgb), 0.1); 115 | border: 1px solid rgba(var(--card-border-rgb), 0.15); 116 | } 117 | 118 | .card:hover span { 119 | transform: translateX(4px); 120 | } 121 | } 122 | 123 | @media (prefers-reduced-motion) { 124 | .card:hover span { 125 | transform: none; 126 | } 127 | } 128 | 129 | /* Mobile */ 130 | @media (max-width: 700px) { 131 | .content { 132 | padding: 4rem; 133 | } 134 | 135 | .grid { 136 | grid-template-columns: 1fr; 137 | margin-bottom: 120px; 138 | max-width: 320px; 139 | text-align: center; 140 | } 141 | 142 | .card { 143 | padding: 1rem 2.5rem; 144 | } 145 | 146 | .card h2 { 147 | margin-bottom: 0.5rem; 148 | } 149 | 150 | .center { 151 | padding: 8rem 0 6rem; 152 | } 153 | 154 | .center::before { 155 | transform: none; 156 | height: 300px; 157 | } 158 | 159 | .description { 160 | font-size: 0.8rem; 161 | } 162 | 163 | .description a { 164 | padding: 1rem; 165 | } 166 | 167 | .description p, 168 | .description div { 169 | display: flex; 170 | justify-content: center; 171 | position: fixed; 172 | width: 100%; 173 | } 174 | 175 | .description p { 176 | align-items: center; 177 | inset: 0 0 auto; 178 | padding: 2rem 1rem 1.4rem; 179 | border-radius: 0; 180 | border: none; 181 | border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25); 182 | background: linear-gradient( 183 | to bottom, 184 | rgba(var(--background-start-rgb), 1), 185 | rgba(var(--callout-rgb), 0.5) 186 | ); 187 | background-clip: padding-box; 188 | backdrop-filter: blur(24px); 189 | } 190 | 191 | .description div { 192 | align-items: flex-end; 193 | pointer-events: none; 194 | inset: auto 0 0; 195 | padding: 2rem; 196 | height: 200px; 197 | background: linear-gradient( 198 | to bottom, 199 | transparent 0%, 200 | rgb(var(--background-end-rgb)) 40% 201 | ); 202 | z-index: 1; 203 | } 204 | } 205 | 206 | /* Tablet and Smaller Desktop */ 207 | @media (min-width: 701px) and (max-width: 1120px) { 208 | .grid { 209 | grid-template-columns: repeat(2, 50%); 210 | } 211 | } 212 | 213 | @media (prefers-color-scheme: dark) { 214 | .vercelLogo { 215 | filter: invert(1); 216 | } 217 | 218 | .logo { 219 | filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70); 220 | } 221 | } 222 | 223 | @keyframes rotate { 224 | from { 225 | transform: rotate(360deg); 226 | } 227 | to { 228 | transform: rotate(0deg); 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/app/page.tsx: -------------------------------------------------------------------------------- 1 | import Image from "next/image"; 2 | import styles from "./page.module.css"; 3 | 4 | export default function Home() { 5 | return ( 6 |
7 |
8 |

9 | Get started by editing  10 | src/app/page.tsx 11 |

12 | 29 |
30 | 31 |
32 | Next.js Logo 40 |
41 | 42 | 93 |
94 | ); 95 | } 96 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/src/lib/prisma.ts: -------------------------------------------------------------------------------- 1 | import { Pool } from '@neondatabase/serverless' 2 | import { PrismaNeon } from '@prisma/adapter-neon' 3 | import { PrismaClient } from '@prisma/client' 4 | 5 | const prismaClientSingleton = () => { 6 | const connectionString = `${process.env.DATABASE_URL}` 7 | const pool = new Pool({ connectionString }) 8 | const adapter = new PrismaNeon(pool) 9 | 10 | return new PrismaClient({ adapter }) 11 | } 12 | 13 | declare global { 14 | var prisma: undefined | ReturnType 15 | } 16 | 17 | export const prisma = globalThis.prisma ?? prismaClientSingleton() 18 | 19 | if (process.env.NODE_ENV !== 'production') globalThis.prisma = prisma 20 | -------------------------------------------------------------------------------- /examples/prisma-nextjs/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": [ 4 | "dom", 5 | "dom.iterable", 6 | "esnext" 7 | ], 8 | "allowJs": true, 9 | "skipLibCheck": true, 10 | "strict": true, 11 | "noEmit": true, 12 | "esModuleInterop": true, 13 | "module": "esnext", 14 | "moduleResolution": "bundler", 15 | "resolveJsonModule": true, 16 | "isolatedModules": true, 17 | "jsx": "preserve", 18 | "incremental": true, 19 | "plugins": [ 20 | { 21 | "name": "next" 22 | } 23 | ], 24 | "paths": { 25 | "@/*": [ 26 | "./src/*" 27 | ] 28 | }, 29 | "target": "ES2017" 30 | }, 31 | "include": [ 32 | "next-env.d.ts", 33 | "**/*.ts", 34 | "**/*.tsx", 35 | ".next/types/**/*.ts" 36 | ], 37 | "exclude": [ 38 | "node_modules" 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@supercorp/supercompat", 3 | "private": true, 4 | "scripts": { 5 | "build": "turbo build", 6 | "dev": "turbo run build --force -- --watch", 7 | "lint": "turbo lint", 8 | "format": "prettier --write \"**/*.{ts,tsx,md}\"" 9 | }, 10 | "devDependencies": { 11 | "@swc/core": "^1.10.1", 12 | "prettier": "^3.4.2", 13 | "turbo": "^2.3.3" 14 | }, 15 | "engines": { 16 | "node": ">=18" 17 | }, 18 | "packageManager": "npm@10.2.0", 19 | "workspaces": [ 20 | "apps/*", 21 | "packages/*" 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /packages/supercompat/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "supercompat", 3 | "version": "2.30.1", 4 | "type": "module", 5 | "main": "./dist/index.js", 6 | "module": "./dist/index.js", 7 | "types": "./dist/index.d.cts", 8 | "exports": { 9 | ".": "./dist/index.js", 10 | "./*": "./dist/*.js", 11 | "./types": "./dist/types/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "keywords": [], 17 | "license": "ISC", 18 | "scripts": { 19 | "build": "tsup" 20 | }, 21 | "dependencies": { 22 | "@paralleldrive/cuid2": "^2.2.2", 23 | "dayjs": "^1.11.13", 24 | "deepmerge-ts": "^7.1.5", 25 | "lodash": "^4.17.21", 26 | "openai": "^4.97.0", 27 | "radash": "^12.1.0", 28 | "ts-deepmerge": "^7.0.3" 29 | }, 30 | "devDependencies": { 31 | "@anthropic-ai/sdk": "^0.40.1", 32 | "@mistralai/mistralai": "^1.6.0", 33 | "@prisma/client": "^6.7.0", 34 | "@types/lodash": "^4.17.16", 35 | "groq-sdk": "^0.21.0", 36 | "humiris-ai": "^1.11.0", 37 | "tsup": "^8.4.0" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type Anthropic from '@anthropic-ai/sdk' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | anthropic, 6 | }: { 7 | anthropic: Anthropic 8 | }) => ({ 9 | post: post({ anthropic }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type Anthropic from '@anthropic-ai/sdk' 2 | import type OpenAI from 'openai' 3 | import { uid, fork, omit, isEmpty } from 'radash' 4 | import { nonEmptyMessages } from '@/lib/messages/nonEmptyMessages' 5 | import { alternatingMessages } from '@/lib/messages/alternatingMessages' 6 | import { firstUserMessages } from '@/lib/messages/firstUserMessages' 7 | import { serializeTools } from './serializeTools' 8 | import { serializeMessages } from './serializeMessages' 9 | 10 | export const post = ({ 11 | anthropic, 12 | }: { 13 | anthropic: Anthropic 14 | }) => async (_url: string, options: any) => { 15 | const body = JSON.parse(options.body) 16 | 17 | const messages = body.messages as OpenAI.ChatCompletionMessageParam[] 18 | const [systemMessages, otherMessages] = fork(messages, (message) => message.role === 'system') 19 | const system = systemMessages.map((message) => message.content).join('\n') 20 | 21 | const chatMessages = nonEmptyMessages({ 22 | messages: firstUserMessages({ 23 | messages: alternatingMessages({ 24 | messages: otherMessages, 25 | }), 26 | }), 27 | }) 28 | 29 | const resultOptions = { 30 | ...omit(body, ['response_format']), 31 | stream: body.stream ? isEmpty(body.tools) : false, 32 | system, 33 | messages: serializeMessages({ 34 | messages: chatMessages, 35 | }), 36 | max_tokens: 4096, 37 | tools: serializeTools({ 38 | tools: body.tools, 39 | }), 40 | } 41 | 42 | if (body.stream) { 43 | // @ts-ignore-next-line 44 | const response = await anthropic.messages.stream(resultOptions) 45 | 46 | const stream = new ReadableStream({ 47 | async start(controller) { 48 | for await (const chunk of response) { 49 | if (chunk.type === 'content_block_delta') { 50 | const delta = chunk.delta.type === 'input_json_delta' ? { 51 | tool_calls: [ 52 | { 53 | index: 0, 54 | function: { 55 | arguments: chunk.delta.partial_json, 56 | }, 57 | }, 58 | ] 59 | } : { 60 | content: chunk.delta.text, 61 | } 62 | 63 | const messageDelta = { 64 | id: `chatcmpl-${uid(29)}`, 65 | object: 'chat.completion.chunk', 66 | choices: [ 67 | { 68 | index: chunk.index, 69 | delta, 70 | }, 71 | ], 72 | } 73 | 74 | controller.enqueue(`data: ${JSON.stringify(messageDelta)}\n\n`) 75 | } else if (chunk.type === 'content_block_start') { 76 | const delta = chunk.content_block.type === 'tool_use' ? { 77 | content: null, 78 | tool_calls: [ 79 | { 80 | index: 0, 81 | id: chunk.content_block.id, 82 | type: 'function', 83 | function: { 84 | name: chunk.content_block.name, 85 | arguments: '', 86 | } 87 | } 88 | ], 89 | } : { 90 | content: chunk.content_block.text, 91 | } 92 | 93 | const messageDelta = { 94 | id: `chatcmpl-${uid(29)}`, 95 | object: 'chat.completion.chunk', 96 | choices: [ 97 | { 98 | index: chunk.index, 99 | delta, 100 | }, 101 | ], 102 | } 103 | 104 | controller.enqueue(`data: ${JSON.stringify(messageDelta)}\n\n`) 105 | } else if (chunk.type === 'message_start') { 106 | const messageDelta = { 107 | id: `chatcmpl-${uid(29)}`, 108 | object: 'chat.completion.chunk', 109 | choices: [ 110 | { 111 | index: 0, 112 | delta: { 113 | content: '', 114 | }, 115 | }, 116 | ], 117 | } 118 | 119 | controller.enqueue(`data: ${JSON.stringify(messageDelta)}\n\n`) 120 | } 121 | } 122 | 123 | controller.close() 124 | }, 125 | }) 126 | 127 | return new Response(stream, { 128 | headers: { 129 | 'Content-Type': 'text/event-stream', 130 | }, 131 | }) 132 | } else { 133 | try { 134 | // @ts-ignore-next-line 135 | const data = await anthropic.messages.create(resultOptions) 136 | 137 | return new Response(JSON.stringify({ 138 | data, 139 | }), { 140 | status: 200, 141 | headers: { 142 | 'Content-Type': 'application/json', 143 | }, 144 | }) 145 | } catch (error) { 146 | return new Response(JSON.stringify({ 147 | error, 148 | }), { 149 | status: 500, 150 | headers: { 151 | 'Content-Type': 'application/json', 152 | }, 153 | }) 154 | } 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/completions/serializeMessages/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { serializeMessage } from './serializeMessage' 3 | 4 | export const serializeMessages = ({ 5 | messages, 6 | }: { 7 | messages: OpenAI.ChatCompletionMessageParam[] 8 | }) => ( 9 | messages.map(message => ( 10 | serializeMessage({ 11 | message, 12 | }) 13 | )) 14 | ) 15 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/completions/serializeMessages/serializeMessage.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const serializeMessage = ({ 4 | message, 5 | }: { 6 | message: OpenAI.ChatCompletionMessageParam 7 | }) => { 8 | if (message.role === 'user') { 9 | return { 10 | role: 'user', 11 | content: message.content, 12 | } 13 | } else if (message.role === 'assistant') { 14 | return { 15 | role: 'assistant', 16 | content: [ 17 | { 18 | type: 'text', 19 | text: message.content, 20 | }, 21 | ...(message.tool_calls ?? []).map((toolCall) => ({ 22 | type: 'tool_use', 23 | id: toolCall.id, 24 | name: toolCall.function.name, 25 | input: toolCall.function.arguments ? JSON.parse(toolCall.function.arguments) : {}, 26 | })), 27 | ], 28 | } 29 | } else if (message.role === 'tool') { 30 | return { 31 | role: 'user', 32 | content: [ 33 | { 34 | type: 'tool_result', 35 | tool_use_id: message.tool_call_id, 36 | content: message.content, 37 | }, 38 | ], 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/completions/serializeTools.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const serializeTools = ({ 4 | tools, 5 | }: { 6 | tools: OpenAI.Beta.AssistantTool[] | undefined 7 | }) => ( 8 | (tools ?? []).map((tool: OpenAI.Beta.AssistantTool) => { 9 | if (tool.type === 'function') { 10 | return { 11 | name: tool.function.name, 12 | description: tool.function.description, 13 | input_schema: tool.function.parameters ?? { 14 | type: 'object', 15 | }, 16 | } 17 | } 18 | 19 | return tool 20 | }) 21 | ) 22 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type Anthropic from '@anthropic-ai/sdk' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const anthropicClientAdapter = ({ 6 | anthropic, 7 | }: { 8 | anthropic: Anthropic 9 | }) => ({ 10 | client: anthropic, 11 | requestHandlers: { 12 | '^/v1/models$': models({ anthropic }), 13 | '^/v1/chat/completions$': completions({ anthropic }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type Anthropic from '@anthropic-ai/sdk' 2 | 3 | const models = [ 4 | 'claude-3-7-sonnet-latest', 5 | 'claude-3-7-sonnet-20250219', 6 | 'claude-3-5-haiku-latest', 7 | 'claude-3-5-sonnet-20241022', 8 | 'claude-3-5-sonnet-20240620', 9 | 'claude-3-opus-20240229', 10 | 'claude-3-sonnet-20240229', 11 | 'claude-3-5-haiku-20241022', 12 | 'claude-3-haiku-20240307', 13 | ] 14 | 15 | export const get = ({ 16 | anthropic, 17 | }: { 18 | anthropic: Anthropic 19 | }) => async (_url: string, _options: any) => { 20 | try { 21 | return new Response(JSON.stringify({ 22 | type: 'list', 23 | data: models.map((model) => ({ 24 | id: model, 25 | object: 'model', 26 | })), 27 | }), { 28 | status: 200, 29 | headers: { 30 | 'Content-Type': 'application/json', 31 | }, 32 | }) 33 | } catch (error) { 34 | return new Response(JSON.stringify({ 35 | error, 36 | }), { 37 | status: 500, 38 | headers: { 39 | 'Content-Type': 'application/json', 40 | }, 41 | }) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/anthropicClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type Anthropic from '@anthropic-ai/sdk' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | anthropic, 6 | }: { 7 | anthropic: Anthropic 8 | }) => ({ 9 | get: get({ anthropic }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/azureOpenaiClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | openai, 6 | }: { 7 | openai: OpenAI 8 | }) => ({ 9 | post: post({ openai }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/azureOpenaiClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const post = ({ 4 | openai, 5 | }: { 6 | openai: OpenAI 7 | }) => async (_url: string, options: any) => { 8 | const body = JSON.parse(options.body) 9 | 10 | if (body.stream) { 11 | const response = await openai.chat.completions.create(body) 12 | 13 | const stream = new ReadableStream({ 14 | async start(controller) { 15 | // @ts-ignore-next-line 16 | for await (const chunk of response) { 17 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 18 | } 19 | 20 | controller.close() 21 | }, 22 | }) 23 | 24 | return new Response(stream, { 25 | headers: { 26 | 'Content-Type': 'text/event-stream', 27 | }, 28 | }) 29 | } else { 30 | try { 31 | const data = await openai.chat.completions.create(body) 32 | 33 | return new Response(JSON.stringify({ 34 | data, 35 | }), { 36 | status: 200, 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | } catch (error) { 42 | return new Response(JSON.stringify({ 43 | error, 44 | }), { 45 | status: 500, 46 | headers: { 47 | 'Content-Type': 'application/json', 48 | }, 49 | }) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/azureOpenaiClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type { AzureOpenAI } from 'openai' 2 | import { completions } from '@/adapters/client/openaiClientAdapter/completions' 3 | 4 | export const azureOpenaiClientAdapter = ({ 5 | azureOpenai, 6 | }: { 7 | azureOpenai: AzureOpenAI 8 | }) => ({ 9 | type: 'AZURE_OPENAI', 10 | client: azureOpenai, 11 | requestHandlers: { 12 | '^/(?:v1|/?openai)/chat/completions$': completions({ 13 | openai: azureOpenai, 14 | }), 15 | }, 16 | }) 17 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/googleClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | google, 6 | }: { 7 | google: OpenAI 8 | }) => ({ 9 | post: post({ google }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/googleClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { createId } from '@paralleldrive/cuid2' 3 | import { nonEmptyMessages } from '@/lib/messages/nonEmptyMessages' 4 | 5 | export const post = ({ 6 | google, 7 | }: { 8 | google: OpenAI 9 | }) => async (_url: string, options: any) => { 10 | const body = JSON.parse(options.body) 11 | 12 | const resultOptions = { 13 | ...body, 14 | messages: nonEmptyMessages({ 15 | messages: body.messages, 16 | }), 17 | } 18 | 19 | if (body.stream) { 20 | const response = await google.chat.completions.create(resultOptions) 21 | 22 | const stream = new ReadableStream({ 23 | async start(controller) { 24 | // @ts-ignore-next-line 25 | for await (const chunk of response) { 26 | let resultChunk 27 | 28 | if (chunk.choices) { 29 | const newChoices = chunk.choices.map((choice: any) => { 30 | if (choice.delta?.tool_calls) { 31 | return { 32 | ...choice, 33 | delta: { 34 | ...choice.delta, 35 | tool_calls: choice.delta.tool_calls.map((toolCall: any) => { 36 | if (toolCall.id === '') { 37 | return { 38 | ...toolCall, 39 | id: `call_${createId()}`, 40 | } 41 | } 42 | 43 | return toolCall 44 | }), 45 | }, 46 | } 47 | } else { 48 | return choice 49 | } 50 | }) 51 | 52 | resultChunk = { 53 | ...chunk, 54 | choices: newChoices, 55 | } 56 | } else { 57 | resultChunk = chunk 58 | } 59 | 60 | controller.enqueue(`data: ${JSON.stringify(resultChunk)}\n\n`) 61 | } 62 | 63 | controller.close() 64 | }, 65 | }) 66 | 67 | return new Response(stream, { 68 | headers: { 69 | 'Content-Type': 'text/event-stream', 70 | }, 71 | }) 72 | } else { 73 | try { 74 | const data = await google.chat.completions.create(resultOptions) 75 | 76 | return new Response(JSON.stringify({ 77 | data, 78 | }), { 79 | status: 200, 80 | headers: { 81 | 'Content-Type': 'application/json', 82 | }, 83 | }) 84 | } catch (error) { 85 | return new Response(JSON.stringify({ 86 | error, 87 | }), { 88 | status: 500, 89 | headers: { 90 | 'Content-Type': 'application/json', 91 | }, 92 | }) 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/googleClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const googleClientAdapter = ({ 6 | google, 7 | }: { 8 | google: OpenAI 9 | }) => ({ 10 | client: google, 11 | requestHandlers: { 12 | '^/v1/models$': models({ google }), 13 | '^/(?:v1|/?openai)/chat/completions$': completions({ google }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/googleClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const models = [ 4 | 'gemini-2.5-flash-preview-04-17', 5 | 'gemini-2.5-pro-preview-03-25', 6 | 'gemini-2.0-flash', 7 | 'gemini-2.0-flash-lite', 8 | 'gemini-1.5-flash', 9 | 'gemini-1.5-flash-8b', 10 | 'gemini-1.5-pro', 11 | ] 12 | 13 | export const get = ({ 14 | google, 15 | }: { 16 | google: OpenAI 17 | }) => async (_url: string, _options: any) => { 18 | try { 19 | return new Response(JSON.stringify({ 20 | type: 'list', 21 | data: models.map((model) => ({ 22 | id: model, 23 | object: 'model', 24 | })), 25 | }), { 26 | status: 200, 27 | headers: { 28 | 'Content-Type': 'application/json', 29 | }, 30 | }) 31 | } catch (error) { 32 | return new Response(JSON.stringify({ 33 | error, 34 | }), { 35 | status: 500, 36 | headers: { 37 | 'Content-Type': 'application/json', 38 | }, 39 | }) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/googleClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | google, 6 | }: { 7 | google: OpenAI 8 | }) => ({ 9 | get: get({ google }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/groqClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type Groq from 'groq-sdk' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | groq, 6 | }: { 7 | groq: Groq 8 | }) => ({ 9 | post: post({ groq }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/groqClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type Groq from 'groq-sdk' 2 | 3 | export const post = ({ 4 | groq, 5 | }: { 6 | groq: Groq 7 | }) => async (_url: string, options: any) => { 8 | const body = JSON.parse(options.body) 9 | 10 | if (body.stream) { 11 | const response = await groq.chat.completions.create(body) 12 | 13 | const stream = new ReadableStream({ 14 | async start(controller) { 15 | // @ts-ignore-next-line 16 | for await (const chunk of response) { 17 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 18 | } 19 | 20 | controller.close() 21 | }, 22 | }) 23 | 24 | return new Response(stream, { 25 | headers: { 26 | 'Content-Type': 'text/event-stream', 27 | }, 28 | }) 29 | } else { 30 | try { 31 | const data = await groq.chat.completions.create(body) 32 | 33 | return new Response(JSON.stringify({ 34 | data, 35 | }), { 36 | status: 200, 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | } catch (error) { 42 | return new Response(JSON.stringify({ 43 | error, 44 | }), { 45 | status: 500, 46 | headers: { 47 | 'Content-Type': 'application/json', 48 | }, 49 | }) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/groqClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | // import type Groq from 'groq-sdk' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const groqClientAdapter = ({ 6 | groq, 7 | }: { 8 | // TODO 9 | groq: any 10 | }) => ({ 11 | client: groq, 12 | requestHandlers: { 13 | '^/v1/models$': models({ groq }), 14 | '^/v1/chat/completions$': completions({ groq }), 15 | }, 16 | }) 17 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/groqClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type Groq from 'groq-sdk' 2 | 3 | export const get = ({ 4 | groq, 5 | }: { 6 | groq: Groq 7 | }) => async (_url: string, _options: any) => { 8 | try { 9 | const data = await groq.models.list() 10 | 11 | return new Response(JSON.stringify(data), { 12 | status: 200, 13 | headers: { 14 | 'Content-Type': 'application/json', 15 | }, 16 | }) 17 | } catch (error) { 18 | return new Response(JSON.stringify({ 19 | error, 20 | }), { 21 | status: 500, 22 | headers: { 23 | 'Content-Type': 'application/json', 24 | }, 25 | }) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/groqClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type Groq from 'groq-sdk' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | groq, 6 | }: { 7 | groq: Groq 8 | }) => ({ 9 | get: get({ groq }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/humirisClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | humiris, 6 | }: { 7 | humiris: OpenAI 8 | }) => ({ 9 | post: post({ humiris }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/humirisClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const post = ({ 4 | humiris, 5 | }: { 6 | humiris: OpenAI 7 | }) => async (_url: string, options: any) => { 8 | const body = JSON.parse(options.body) 9 | 10 | if (body.stream) { 11 | const data = await humiris.chat.completions.create({ 12 | ...body, 13 | stream: false, 14 | }) 15 | 16 | const stream = new ReadableStream({ 17 | async start(controller) { 18 | const chunk = { 19 | id: data.id, 20 | object: 'chat.completion.chunk', 21 | created: data.created, 22 | model: data.created, 23 | choices: [ 24 | { 25 | index: 0, 26 | delta: { 27 | role: data.choices[0].message.role, 28 | content: data.choices[0].message.content, 29 | }, 30 | logprobs: null, 31 | finish_reason: data.choices[0].finish_reason, 32 | } 33 | ] 34 | } 35 | 36 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 37 | controller.close() 38 | }, 39 | }) 40 | 41 | return new Response(stream, { 42 | headers: { 43 | 'Content-Type': 'text/event-stream', 44 | }, 45 | }) 46 | } else { 47 | try { 48 | const data = await humiris.chat.completions.create(body) 49 | 50 | return new Response(JSON.stringify({ 51 | data, 52 | }), { 53 | status: 200, 54 | headers: { 55 | 'Content-Type': 'application/json', 56 | }, 57 | }) 58 | } catch (error) { 59 | return new Response(JSON.stringify({ 60 | error, 61 | }), { 62 | status: 500, 63 | headers: { 64 | 'Content-Type': 'application/json', 65 | }, 66 | }) 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/humirisClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const humirisClientAdapter = ({ 6 | humiris, 7 | }: { 8 | humiris: OpenAI 9 | }) => ({ 10 | client: humiris, 11 | requestHandlers: { 12 | '^/v1/models$': models({ humiris }), 13 | '^/(?:v1|/?openai)/chat/completions$': completions({ humiris }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/humirisClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const models = [ 4 | 'Humiris/humiris-moai', 5 | ] 6 | 7 | export const get = ({ 8 | humiris, 9 | }: { 10 | humiris: OpenAI 11 | }) => async (_url: string, _options: any) => { 12 | try { 13 | return new Response(JSON.stringify({ 14 | type: 'list', 15 | data: models.map((model) => ({ 16 | id: model, 17 | object: 'model', 18 | })), 19 | }), { 20 | status: 200, 21 | headers: { 22 | 'Content-Type': 'application/json', 23 | }, 24 | }) 25 | } catch (error) { 26 | return new Response(JSON.stringify({ 27 | error, 28 | }), { 29 | status: 500, 30 | headers: { 31 | 'Content-Type': 'application/json', 32 | }, 33 | }) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/humirisClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | humiris, 6 | }: { 7 | humiris: OpenAI 8 | }) => ({ 9 | get: get({ humiris }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type { Mistral } from '@mistralai/mistralai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | mistral, 6 | }: { 7 | mistral: Mistral 8 | }) => ({ 9 | post: post({ mistral }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type { Mistral } from '@mistralai/mistralai' 2 | import { serializeChunk } from './serializeChunk' 3 | import { serializeBody } from './serializeBody' 4 | 5 | export const post = ({ 6 | mistral, 7 | }: { 8 | mistral: Mistral 9 | }) => async (_url: string, options: any) => { 10 | const body = JSON.parse(options.body) 11 | const serializedBody = serializeBody({ 12 | body, 13 | }) 14 | 15 | if (body.stream) { 16 | const response = await mistral.chat.stream(serializedBody) 17 | 18 | const stream = new ReadableStream({ 19 | async start(controller) { 20 | for await (const chunk of response) { 21 | const serializedChunk = serializeChunk({ 22 | chunk, 23 | }) 24 | 25 | controller.enqueue(`data: ${JSON.stringify(serializedChunk)}\n\n`) 26 | } 27 | 28 | controller.close() 29 | }, 30 | }) 31 | 32 | return new Response(stream, { 33 | headers: { 34 | 'Content-Type': 'text/event-stream', 35 | }, 36 | }) 37 | } else { 38 | try { 39 | const data = await mistral.chat.complete(serializedBody) 40 | 41 | return new Response(JSON.stringify({ 42 | data, 43 | }), { 44 | status: 200, 45 | headers: { 46 | 'Content-Type': 'application/json', 47 | }, 48 | }) 49 | } catch (error) { 50 | return new Response(JSON.stringify({ 51 | error, 52 | }), { 53 | status: 500, 54 | headers: { 55 | 'Content-Type': 'application/json', 56 | }, 57 | }) 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/completions/serializeBody.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const serializeMessage = ({ 4 | message: { 5 | // @ts-ignore-next-line 6 | tool_calls, 7 | // @ts-ignore-next-line 8 | tool_call_id, 9 | ...rest 10 | }, 11 | }: { 12 | message: OpenAI.ChatCompletionMessageParam 13 | }) => ({ 14 | ...rest, 15 | ...(tool_call_id ? { 16 | toolCallId: tool_call_id, 17 | } : {}), 18 | ...(tool_calls ? { 19 | toolCalls: tool_calls, 20 | } : {}), 21 | }) 22 | 23 | export const serializeBody = ({ 24 | body, 25 | }: { 26 | body: any 27 | }) => ({ 28 | ...body, 29 | messages: body.messages.map((message: OpenAI.ChatCompletionMessageParam) => ( 30 | serializeMessage({ 31 | message, 32 | }) 33 | )), 34 | }) 35 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/completions/serializeChunk.ts: -------------------------------------------------------------------------------- 1 | const serializeDelta = ({ 2 | delta: { 3 | toolCalls, 4 | ...rest 5 | }, 6 | }: { 7 | delta: any 8 | }) => ({ 9 | ...rest, 10 | ...(toolCalls ? { 11 | tool_calls: toolCalls, 12 | } : {}), 13 | }) 14 | 15 | const serializeChoice = ({ 16 | choice: { 17 | finishReason, 18 | delta, 19 | ...rest 20 | }, 21 | }: { 22 | choice: any 23 | }) => ({ 24 | ...rest, 25 | finish_reason: finishReason ?? null, 26 | delta: serializeDelta({ delta }), 27 | }) 28 | 29 | export const serializeChunk = ({ 30 | chunk, 31 | }: { 32 | chunk: any 33 | }) => ({ 34 | ...chunk.data, 35 | ...(chunk.data.choices ? { 36 | choices: chunk.data.choices.map((choice: any) => ( 37 | serializeChoice({ choice }) 38 | )), 39 | }: {}), 40 | }) 41 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type { Mistral } from '@mistralai/mistralai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const mistralClientAdapter = ({ 6 | mistral, 7 | }: { 8 | mistral: Mistral 9 | }) => ({ 10 | client: mistral, 11 | requestHandlers: { 12 | '^/v1/models$': models({ mistral }), 13 | '^/v1/chat/completions$': completions({ mistral }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type { Mistral } from '@mistralai/mistralai' 2 | 3 | export const get = ({ 4 | mistral, 5 | }: { 6 | mistral: Mistral 7 | }) => async (_url: string, _options: any) => { 8 | try { 9 | const data = await mistral.models.list() 10 | 11 | return new Response(JSON.stringify(data), { 12 | status: 200, 13 | headers: { 14 | 'Content-Type': 'application/json', 15 | }, 16 | }) 17 | } catch (error) { 18 | return new Response(JSON.stringify({ 19 | error, 20 | }), { 21 | status: 500, 22 | headers: { 23 | 'Content-Type': 'application/json', 24 | }, 25 | }) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/mistralClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type { Mistral } from '@mistralai/mistralai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | mistral, 6 | }: { 7 | mistral: Mistral 8 | }) => ({ 9 | get: get({ mistral }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/ollamaClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | ollama, 6 | }: { 7 | ollama: OpenAI 8 | }) => ({ 9 | post: post({ ollama }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/ollamaClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const post = ({ 4 | ollama, 5 | }: { 6 | ollama: OpenAI 7 | }) => async (_url: string, options: any) => { 8 | const body = JSON.parse(options.body) 9 | 10 | if (body.stream) { 11 | const response = await ollama.chat.completions.create(body) 12 | 13 | const stream = new ReadableStream({ 14 | async start(controller) { 15 | // @ts-ignore-next-line 16 | for await (const chunk of response) { 17 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 18 | } 19 | 20 | controller.close() 21 | }, 22 | }) 23 | 24 | return new Response(stream, { 25 | headers: { 26 | 'Content-Type': 'text/event-stream', 27 | }, 28 | }) 29 | } else { 30 | try { 31 | const data = await ollama.chat.completions.create(body) 32 | 33 | return new Response(JSON.stringify({ 34 | data, 35 | }), { 36 | status: 200, 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | } catch (error) { 42 | return new Response(JSON.stringify({ 43 | error, 44 | }), { 45 | status: 500, 46 | headers: { 47 | 'Content-Type': 'application/json', 48 | }, 49 | }) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/ollamaClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const ollamaClientAdapter = ({ 6 | ollama, 7 | }: { 8 | ollama: OpenAI 9 | }) => ({ 10 | client: ollama, 11 | requestHandlers: { 12 | '^/v1/models$': models({ ollama }), 13 | '^/(?:v1|/?openai)/chat/completions$': completions({ ollama }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/ollamaClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const get = ({ 4 | ollama, 5 | }: { 6 | ollama: OpenAI 7 | }) => async (_url: string, _options: any) => { 8 | try { 9 | const data = await ollama.models.list() 10 | 11 | return new Response(JSON.stringify(data), { 12 | status: 200, 13 | headers: { 14 | 'Content-Type': 'application/json', 15 | }, 16 | }) 17 | } catch (error) { 18 | return new Response(JSON.stringify({ 19 | error, 20 | }), { 21 | status: 500, 22 | headers: { 23 | 'Content-Type': 'application/json', 24 | }, 25 | }) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/ollamaClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | ollama, 6 | }: { 7 | ollama: OpenAI 8 | }) => ({ 9 | get: get({ ollama }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/openaiClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | openai, 6 | }: { 7 | openai: OpenAI 8 | }) => ({ 9 | post: post({ openai }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/openaiClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { omit } from 'radash' 3 | import { systemDeveloperMessages } from '@/lib/messages/systemDeveloperMessages' 4 | import { isOModel } from '@/lib/models/isOModel' 5 | 6 | const omitKeys = ({ 7 | model, 8 | }: { 9 | model: string 10 | }) => { 11 | if (isOModel({ model })) { 12 | return ['tools'] 13 | } 14 | 15 | return [] 16 | } 17 | 18 | export const post = ({ 19 | openai, 20 | }: { 21 | openai: OpenAI 22 | }) => async (_url: string, options: any) => { 23 | const body = JSON.parse(options.body) 24 | const messages = body.messages as OpenAI.ChatCompletionMessageParam[] 25 | 26 | const resultOptions = { 27 | ...omit(body, omitKeys({ model: body.model })), 28 | messages: systemDeveloperMessages({ 29 | messages, 30 | model: body.model, 31 | }), 32 | } as OpenAI.Chat.ChatCompletionCreateParams 33 | 34 | if (body.stream) { 35 | const response = await openai.chat.completions.create(resultOptions) 36 | 37 | const stream = new ReadableStream({ 38 | async start(controller) { 39 | // @ts-ignore-next-line 40 | for await (const chunk of response) { 41 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 42 | } 43 | 44 | controller.close() 45 | }, 46 | }) 47 | 48 | return new Response(stream, { 49 | headers: { 50 | 'Content-Type': 'text/event-stream', 51 | }, 52 | }) 53 | } else { 54 | try { 55 | const data = await openai.chat.completions.create(resultOptions) 56 | 57 | return new Response(JSON.stringify({ 58 | data, 59 | }), { 60 | status: 200, 61 | headers: { 62 | 'Content-Type': 'application/json', 63 | }, 64 | }) 65 | } catch (error) { 66 | return new Response(JSON.stringify({ 67 | error, 68 | }), { 69 | status: 500, 70 | headers: { 71 | 'Content-Type': 'application/json', 72 | }, 73 | }) 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/openaiClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const openaiClientAdapter = ({ 6 | openai, 7 | }: { 8 | openai: OpenAI 9 | }) => ({ 10 | client: openai, 11 | requestHandlers: { 12 | '^/v1/models$': models({ openai }), 13 | '^/(?:v1|/?openai)/chat/completions$': completions({ openai }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/openaiClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const get = ({ 4 | openai, 5 | }: { 6 | openai: OpenAI 7 | }) => async (_url: string, _options: any) => { 8 | try { 9 | const data = await openai.models.list() 10 | 11 | return new Response(JSON.stringify(data), { 12 | status: 200, 13 | headers: { 14 | 'Content-Type': 'application/json', 15 | }, 16 | }) 17 | } catch (error) { 18 | return new Response(JSON.stringify({ 19 | error, 20 | }), { 21 | status: 500, 22 | headers: { 23 | 'Content-Type': 'application/json', 24 | }, 25 | }) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/openaiClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | openai, 6 | }: { 7 | openai: OpenAI 8 | }) => ({ 9 | get: get({ openai }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/perplexityClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | perplexity, 6 | }: { 7 | perplexity: OpenAI 8 | }) => ({ 9 | post: post({ perplexity }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/perplexityClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { alternatingMessages } from '@/lib/messages/alternatingMessages' 3 | 4 | export const post = ({ 5 | perplexity, 6 | }: { 7 | perplexity: OpenAI 8 | }) => async (_url: string, options: any) => { 9 | const body = JSON.parse(options.body) 10 | 11 | const messages = alternatingMessages({ 12 | messages: body.messages, 13 | }) 14 | 15 | if (body.stream) { 16 | const response = await perplexity.chat.completions.create({ 17 | ...body, 18 | messages, 19 | }) 20 | 21 | const stream = new ReadableStream({ 22 | async start(controller) { 23 | // @ts-ignore-next-line 24 | for await (const chunk of response) { 25 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 26 | } 27 | 28 | controller.close() 29 | }, 30 | }) 31 | 32 | return new Response(stream, { 33 | headers: { 34 | 'Content-Type': 'text/event-stream', 35 | }, 36 | }) 37 | } else { 38 | try { 39 | const data = await perplexity.chat.completions.create(body) 40 | 41 | return new Response(JSON.stringify({ 42 | data, 43 | }), { 44 | status: 200, 45 | headers: { 46 | 'Content-Type': 'application/json', 47 | }, 48 | }) 49 | } catch (error) { 50 | return new Response(JSON.stringify({ 51 | error, 52 | }), { 53 | status: 500, 54 | headers: { 55 | 'Content-Type': 'application/json', 56 | }, 57 | }) 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/perplexityClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const perplexityClientAdapter = ({ 6 | perplexity, 7 | }: { 8 | perplexity: OpenAI 9 | }) => ({ 10 | client: perplexity, 11 | requestHandlers: { 12 | '^/v1/models$': models({ perplexity }), 13 | '^/v1/chat/completions$': completions({ perplexity }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/perplexityClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const models = [ 4 | 'sonar-reasoning-pro', 5 | 'sonar-reasoning', 6 | 'sonar-pro', 7 | 'sonar', 8 | 'r1-1776', 9 | 'llama-3.1-sonar-small-128k-online', 10 | 'llama-3.1-sonar-large-128k-online', 11 | 'llama-3.1-sonar-huge-128k-online', 12 | ] 13 | 14 | export const get = ({ 15 | perplexity, 16 | }: { 17 | perplexity: OpenAI 18 | }) => async (_url: string, _options: any) => { 19 | try { 20 | return new Response(JSON.stringify({ 21 | type: 'list', 22 | data: models.map((model) => ({ 23 | id: model, 24 | object: 'model', 25 | })), 26 | }), { 27 | status: 200, 28 | headers: { 29 | 'Content-Type': 'application/json', 30 | }, 31 | }) 32 | } catch (error) { 33 | return new Response(JSON.stringify({ 34 | error, 35 | }), { 36 | status: 500, 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/perplexityClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | perplexity, 6 | }: { 7 | perplexity: OpenAI 8 | }) => ({ 9 | get: get({ perplexity }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/togetherClientAdapter/completions/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { post } from './post' 3 | 4 | export const completions = ({ 5 | together, 6 | }: { 7 | together: OpenAI 8 | }) => ({ 9 | post: post({ together }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/togetherClientAdapter/completions/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const post = ({ 4 | together, 5 | }: { 6 | together: OpenAI 7 | }) => async (_url: string, options: any) => { 8 | const body = JSON.parse(options.body) 9 | 10 | if (body.stream) { 11 | const response = await together.chat.completions.create(body) 12 | 13 | const stream = new ReadableStream({ 14 | async start(controller) { 15 | // @ts-ignore-next-line 16 | for await (const chunk of response) { 17 | controller.enqueue(`data: ${JSON.stringify(chunk)}\n\n`) 18 | } 19 | 20 | controller.close() 21 | }, 22 | }) 23 | 24 | return new Response(stream, { 25 | headers: { 26 | 'Content-Type': 'text/event-stream', 27 | }, 28 | }) 29 | } else { 30 | try { 31 | const data = await together.chat.completions.create(body) 32 | 33 | return new Response(JSON.stringify({ 34 | data, 35 | }), { 36 | status: 200, 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | } catch (error) { 42 | return new Response(JSON.stringify({ 43 | error, 44 | }), { 45 | status: 500, 46 | headers: { 47 | 'Content-Type': 'application/json', 48 | }, 49 | }) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/togetherClientAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { models } from './models' 3 | import { completions } from './completions' 4 | 5 | export const togetherClientAdapter = ({ 6 | together, 7 | }: { 8 | together: OpenAI 9 | }) => ({ 10 | client: together, 11 | requestHandlers: { 12 | '^/v1/models$': models({ together }), 13 | '^/(?:v1|/?openai)/chat/completions$': completions({ together }), 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/togetherClientAdapter/models/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const get = ({ 4 | together, 5 | }: { 6 | together: OpenAI 7 | }) => async (_url: string, _options: any) => { 8 | try { 9 | const data = await together.models.list() 10 | 11 | return new Response(JSON.stringify({ 12 | type: 'list', 13 | // @ts-ignore-next-line 14 | data: data.body, 15 | }), { 16 | status: 200, 17 | headers: { 18 | 'Content-Type': 'application/json', 19 | }, 20 | }) 21 | } catch (error) { 22 | return new Response(JSON.stringify({ 23 | error, 24 | }), { 25 | status: 500, 26 | headers: { 27 | 'Content-Type': 'application/json', 28 | }, 29 | }) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/client/togetherClientAdapter/models/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { get } from './get' 3 | 4 | export const models = ({ 5 | together, 6 | }: { 7 | together: OpenAI 8 | }) => ({ 9 | get: get({ together }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/run/completionsRunAdapter/messages/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { flat } from 'radash' 3 | import { MessageWithRun } from '@/types' 4 | import { serializeMessage } from './serializeMessage' 5 | 6 | export const messages = async ({ 7 | run, 8 | getMessages, 9 | }: { 10 | run: OpenAI.Beta.Threads.Run 11 | getMessages: () => Promise 12 | }) => ( 13 | [ 14 | ...(run.instructions ? [{ 15 | role: 'system', 16 | content: run.instructions, 17 | }] : []), 18 | ...flat((await getMessages()).map((message: MessageWithRun) => serializeMessage({ message }))), 19 | ] 20 | ) 21 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/run/completionsRunAdapter/messages/serializeMessage.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { MessageWithRun } from '@/types' 3 | 4 | const serializeToolCall = ({ 5 | toolCall, 6 | }: { 7 | toolCall: OpenAI.Beta.Threads.Runs.Steps.FunctionToolCall 8 | }) => ({ 9 | tool_call_id: toolCall.id, 10 | role: 'tool' as 'tool', 11 | name: toolCall.function.name, 12 | content: toolCall.function.output ?? '', 13 | }) 14 | 15 | const serializeMessageWithContent = ({ 16 | message, 17 | }: { 18 | message: MessageWithRun 19 | }) => ({ 20 | role: message.role, 21 | content: serializeContent({ 22 | content: message.content as unknown as OpenAI.Beta.Threads.Messages.TextContentBlock[], 23 | }), 24 | ...(message?.metadata?.toolCalls ? { tool_calls: message.metadata.toolCalls } : {}), 25 | }) 26 | 27 | const serializeContent = ({ 28 | content, 29 | }: { 30 | content: OpenAI.Beta.Threads.Messages.TextContentBlock[] 31 | }) => content.map((content) => content.text.value).join('\n') 32 | 33 | export const serializeMessage = ({ 34 | message 35 | }: { 36 | message: MessageWithRun 37 | }) => { 38 | const result = [serializeMessageWithContent({ message })] 39 | 40 | const run = message.run 41 | 42 | if (!run) return result 43 | 44 | const messageToolCalls = message.metadata?.toolCalls || [] 45 | 46 | messageToolCalls.forEach((tc: OpenAI.Beta.Threads.Runs.Steps.ToolCall) => { 47 | const runStep = run.runSteps.find((rs) => { 48 | if (rs.type !== 'tool_calls') return false 49 | 50 | return rs.step_details.tool_calls.some((rsTc: OpenAI.Beta.Threads.Runs.Steps.ToolCall) => { 51 | if (rsTc.type !== 'function') return false 52 | 53 | return rsTc.id === tc.id 54 | }) 55 | }) 56 | 57 | if (!runStep) return 58 | 59 | const toolCall = runStep.step_details.tool_calls.find((rsTc: OpenAI.Beta.Threads.Runs.Steps.ToolCall) => { 60 | if (rsTc.type !== 'function') return false 61 | 62 | return rsTc.id === tc.id 63 | }) 64 | 65 | result.push(serializeToolCall({ toolCall })) 66 | }) 67 | 68 | return result 69 | } 70 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { StorageAdapterArgs } from '@/types' 3 | import { messagesRegexp } from '@/lib/messages/messagesRegexp' 4 | import { runsRegexp } from '@/lib/runs/runsRegexp' 5 | import { runRegexp } from '@/lib/runs/runRegexp' 6 | import { submitToolOutputsRegexp } from '@/lib/runs/submitToolOutputsRegexp' 7 | import { stepsRegexp } from '@/lib/steps/stepsRegexp' 8 | import { threads } from './threads' 9 | import { messages } from './threads/messages' 10 | import { runs } from './threads/runs' 11 | import { run } from './threads/run' 12 | import { steps } from './threads/runs/steps' 13 | import { submitToolOutputs } from './threads/runs/submitToolOutputs' 14 | 15 | export const prismaStorageAdapter = ({ 16 | prisma, 17 | }: { 18 | prisma: PrismaClient 19 | }) => ({ 20 | runAdapter, 21 | }: StorageAdapterArgs) => ({ 22 | requestHandlers: { 23 | '^/(?:v1|/?openai)/threads$': threads({ prisma }), 24 | [messagesRegexp]: messages({ prisma }), 25 | [runsRegexp]: runs({ prisma, runAdapter }), 26 | [runRegexp]: run({ prisma, runAdapter }), 27 | [stepsRegexp]: steps({ prisma }), 28 | [submitToolOutputsRegexp]: submitToolOutputs({ prisma, runAdapter }), 29 | }, 30 | }) 31 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { post } from './post' 3 | 4 | export const threads = ({ 5 | prisma, 6 | }: { 7 | prisma: PrismaClient 8 | }) => ({ 9 | post: post({ prisma }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/messages/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | // @ts-ignore-next-line 3 | import type { PrismaClient, Message } from '@prisma/client' 4 | import { assign, last } from 'radash' 5 | import { messagesRegexp } from '@/lib/messages/messagesRegexp' 6 | import { serializeMessage } from './serializeMessage' 7 | 8 | type MessageCreateResponse = Response & { 9 | json: () => Promise> 10 | } 11 | 12 | export const get = ({ 13 | prisma, 14 | }: { 15 | prisma: PrismaClient 16 | }) => async (urlString: string): Promise => { 17 | const url = new URL(urlString) 18 | 19 | const [, threadId] = url.pathname.match(new RegExp(messagesRegexp))! 20 | 21 | const { 22 | limit, 23 | order, 24 | after, 25 | } = assign({ 26 | limit: '20', 27 | order: 'desc', 28 | // after: null, 29 | }, Object.fromEntries(url.searchParams)) 30 | 31 | const pageSize = parseInt(limit) 32 | 33 | const messagesPlusOne = await prisma.message.findMany({ 34 | where: { threadId }, 35 | take: pageSize + 1, 36 | orderBy: { createdAt: order }, 37 | ...(after && { 38 | skip: 1, 39 | cursor: { id: after }, 40 | }), 41 | }) as Message[] 42 | 43 | const messages = messagesPlusOne.slice(0, pageSize) 44 | 45 | return new Response(JSON.stringify({ 46 | data: messages.map((message: Message) => ( 47 | serializeMessage({ message }) 48 | )), 49 | has_more: messagesPlusOne.length > pageSize, 50 | last_id: last(messages)?.id ?? null, 51 | }), { 52 | status: 200, 53 | headers: { 54 | 'Content-Type': 'application/json', 55 | }, 56 | }) 57 | } 58 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/messages/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { post } from './post' 3 | import { get } from './get' 4 | 5 | export const messages = ({ 6 | prisma, 7 | }: { 8 | prisma: PrismaClient 9 | }) => ({ 10 | post: post({ prisma }), 11 | get: get({ prisma }), 12 | }) 13 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/messages/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { isArray } from 'radash' 3 | import type { PrismaClient } from '@prisma/client' 4 | import { serializeMessage } from './serializeMessage' 5 | import { messagesRegexp } from '@/lib/messages/messagesRegexp' 6 | 7 | type MessageCreateResponse = Response & { 8 | json: () => Promise> 9 | } 10 | 11 | const messageContentBlocks = ({ 12 | content, 13 | }: { 14 | content: string | OpenAI.Beta.Threads.Messages.MessageContentPartParam[] 15 | }) => { 16 | if (isArray(content)) { 17 | return content.map((item) => { 18 | if (item.type === 'text') { 19 | return { 20 | type: 'text', 21 | text: { 22 | value: item.text ?? '', 23 | annotations: [], 24 | }, 25 | } 26 | } 27 | 28 | return item 29 | }) 30 | } 31 | 32 | return [ 33 | { 34 | type: 'text', 35 | text: { 36 | value: content ?? '', 37 | annotations: [], 38 | }, 39 | }, 40 | ] 41 | } 42 | 43 | export const post = ({ 44 | prisma, 45 | }: { 46 | prisma: PrismaClient 47 | }) => async (urlString: string, options: any): Promise => { 48 | const url = new URL(urlString) 49 | 50 | const [, threadId] = url.pathname.match(new RegExp(messagesRegexp))! 51 | 52 | const body = JSON.parse(options.body) 53 | const { role, content } = body 54 | 55 | const message = await prisma.message.create({ 56 | data: { 57 | threadId, 58 | content: messageContentBlocks({ content }), 59 | role: role === 'user' ? 'USER' : 'ASSISTANT', 60 | }, 61 | }) 62 | 63 | return new Response(JSON.stringify( 64 | serializeMessage({ message }), 65 | ), { 66 | status: 200, 67 | headers: { 68 | 'Content-Type': 'application/json', 69 | }, 70 | }) 71 | } 72 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/messages/serializeMessage.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore-next-line 2 | import type { Message } from '@prisma/client' 3 | import dayjs from 'dayjs' 4 | import type OpenAI from 'openai' 5 | import { assign } from 'radash' 6 | 7 | export const serializeMessage = ({ 8 | message, 9 | }: { 10 | message: Message 11 | }) => ({ 12 | id: message.id, 13 | object: 'thread.message' as 'thread.message', 14 | created_at: dayjs(message.createdAt).unix(), 15 | thread_id: message.threadId, 16 | completed_at: message.completedAt ? dayjs(message.completedAt).unix() : null, 17 | incomplete_at: message.incompleteAt ? dayjs(message.incompleteAt).unix() : null, 18 | incomplete_details: message.incompleteDetails as unknown as OpenAI.Beta.Threads.Messages.Message.IncompleteDetails, 19 | role: message.role.toLowerCase() as 'user' | 'assistant', 20 | content: message.content as unknown as OpenAI.Beta.Threads.Messages.TextContentBlock[], 21 | assistant_id: message.assistantId, 22 | run_id: message.runId, 23 | attachments: message.attachments, 24 | status: message.status.toLowerCase() as OpenAI.Beta.Threads.Messages.Message['status'], 25 | metadata: assign(message.metadata as Record ?? {}, message.toolCalls ? { toolCalls: message.toolCalls } : {}), 26 | }) 27 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import type { PrismaClient } from '@prisma/client' 3 | import dayjs from 'dayjs' 4 | import { serializeThread } from './serializeThread' 5 | 6 | type ThreadCreateResponse = Response & { 7 | json: () => Promise 8 | } 9 | 10 | export const post = ({ 11 | prisma, 12 | }: { 13 | prisma: PrismaClient 14 | }) => async (...args: Parameters): Promise => { 15 | // @ts-ignore-next-line 16 | const body = JSON.parse(args[1].body) 17 | 18 | const messages = body.messages || [] 19 | const metadata = body.metadata || {} 20 | 21 | const initialCreatedAt = dayjs().subtract(messages.length, 'seconds').format() 22 | 23 | const thread = await prisma.thread.create({ 24 | data: { 25 | metadata, 26 | ...(metadata.assistantId ? ({ 27 | assistant: { 28 | connect: { 29 | id: metadata.assistantId, 30 | }, 31 | }, 32 | }) : {}), 33 | messages: { 34 | create: messages.map((message: OpenAI.Beta.ThreadCreateParams.Message, index: number) => ({ 35 | role: message.role === 'user' ? 'USER' : 'ASSISTANT', 36 | content: [{ 37 | type: 'text', 38 | text: { 39 | annotations: [], 40 | value: message.content, 41 | }, 42 | }, 43 | ], 44 | attachments: message.attachments, 45 | metadata: message.metadata, 46 | createdAt: dayjs(initialCreatedAt).add(index, 'seconds').toDate(), 47 | })), 48 | }, 49 | }, 50 | }) 51 | 52 | return new Response(JSON.stringify( 53 | serializeThread({ thread }), 54 | ), { 55 | status: 200, 56 | headers: { 57 | 'Content-Type': 'application/json', 58 | }, 59 | }) 60 | } 61 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/run/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import type { PrismaClient } from '@prisma/client' 3 | import { runRegexp } from '@/lib/runs/runRegexp' 4 | import { serializeRun } from '../runs/serializeRun' 5 | 6 | type GetResponse = Response & { 7 | json: () => Promise> 8 | } 9 | 10 | export const get = ({ 11 | prisma, 12 | }: { 13 | prisma: PrismaClient 14 | }) => async (urlString: string): Promise => { 15 | const url = new URL(urlString) 16 | 17 | const [, threadId, runId] = url.pathname.match(new RegExp(runRegexp))! 18 | 19 | const run = await prisma.run.findUnique({ 20 | where: { 21 | id: runId, 22 | threadId, 23 | }, 24 | }) 25 | 26 | return new Response(JSON.stringify( 27 | serializeRun({ run }) 28 | ), { 29 | status: 200, 30 | headers: { 31 | 'Content-Type': 'application/json', 32 | 'openai-poll-after-ms': '5000', 33 | }, 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/run/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import type { RunAdapter } from '@/types' 3 | import { get } from './get' 4 | // import { post } from './post' 5 | 6 | export const run = ({ 7 | prisma, 8 | runAdapter, 9 | }: { 10 | prisma: PrismaClient 11 | runAdapter: RunAdapter 12 | }) => ({ 13 | get: get({ prisma }), 14 | // post: post({ prisma, runAdapter }), 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/get.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | // @ts-ignore-next-line 3 | import type { PrismaClient, Run } from '@prisma/client' 4 | import { assign, last } from 'radash' 5 | import { runsRegexp } from '@/lib/runs/runsRegexp' 6 | import { serializeRun } from './serializeRun' 7 | 8 | type MessageCreateResponse = Response & { 9 | json: () => Promise> 10 | } 11 | 12 | export const get = ({ 13 | prisma, 14 | }: { 15 | prisma: PrismaClient 16 | }) => async (urlString: string): Promise => { 17 | const url = new URL(urlString) 18 | 19 | const [, threadId] = url.pathname.match(new RegExp(runsRegexp))! 20 | 21 | const { 22 | limit, 23 | order, 24 | after, 25 | } = assign({ 26 | limit: '20', 27 | order: 'desc', 28 | // after: null, 29 | }, Object.fromEntries(url.searchParams)) 30 | 31 | const pageSize = parseInt(limit, 10) 32 | 33 | const runsPlusOne = await prisma.run.findMany({ 34 | where: { threadId }, 35 | take: pageSize + 1, 36 | orderBy: { createdAt: order }, 37 | ...(after && { 38 | skip: 1, 39 | cursor: { id: after }, 40 | }), 41 | }) as Run[] 42 | 43 | const runs = runsPlusOne.slice(0, pageSize) 44 | 45 | return new Response(JSON.stringify({ 46 | data: runs.map((run: Run) => serializeRun({ run })), 47 | has_more: runsPlusOne.length > pageSize, 48 | last_id: runs.at(-1)?.id ?? null, 49 | }), { 50 | status: 200, 51 | headers: { 'Content-Type': 'application/json' }, 52 | }) 53 | } 54 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/getMessages.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { serializeMessage } from '../messages/serializeMessage' 3 | import { serializeRunStep } from './steps/serializeRunStep' 4 | import { serializeRun } from './serializeRun' 5 | import type { Run, MessageWithRun, RunStep } from '@/types/prisma' 6 | 7 | const getTake = ({ 8 | run, 9 | }: { 10 | run: Run 11 | }) => { 12 | // @ts-ignore-next-line 13 | if (run.truncationStrategy.type === 'auto') { 14 | return null 15 | } 16 | 17 | // @ts-ignore-next-line 18 | if (run.truncationStrategy.type === 'last_messages') { 19 | // @ts-ignore-next-line 20 | if (!run.truncationStrategy.last_messages) { 21 | throw new Error('Truncation strategy last_messages is required') 22 | } 23 | 24 | // @ts-ignore-next-line 25 | return -run.truncationStrategy.last_messages 26 | } 27 | 28 | // @ts-ignore-next-line 29 | throw new Error(`Unsupported truncation strategy type: ${run.truncationStrategy.type}`) 30 | } 31 | 32 | export const getMessages = ({ 33 | prisma, 34 | run, 35 | }: { 36 | prisma: PrismaClient 37 | run: Run 38 | }) => async () => { 39 | const take = getTake({ 40 | run, 41 | }) 42 | 43 | const messages = await prisma.message.findMany({ 44 | where: { 45 | threadId: run.threadId, 46 | }, 47 | include: { 48 | run: { 49 | include: { 50 | runSteps: true, 51 | }, 52 | }, 53 | }, 54 | orderBy: { 55 | createdAt: 'asc', 56 | }, 57 | ...(take ? { take } : {}), 58 | }) 59 | 60 | return messages.map((message: MessageWithRun) => ({ 61 | ...serializeMessage({ message }), 62 | run: message.run ? ({ 63 | ...serializeRun({ run: message.run }), 64 | runSteps: message.run.runSteps.map((runStep: RunStep) => ( 65 | serializeRunStep({ runStep }) 66 | )), 67 | }) : null, 68 | })) 69 | } 70 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import type { RunAdapter } from '@/types' 3 | import { get } from './get' 4 | import { post } from './post' 5 | 6 | export const runs = ({ 7 | prisma, 8 | runAdapter, 9 | }: { 10 | prisma: PrismaClient 11 | runAdapter: RunAdapter 12 | }) => ({ 13 | get: get({ prisma }), 14 | post: post({ prisma, runAdapter }), 15 | }) 16 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/index.ts: -------------------------------------------------------------------------------- 1 | import { threadRunInProgress } from './threadRunInProgress' 2 | import { threadRunFailed } from './threadRunFailed' 3 | import { threadRunCompleted } from './threadRunCompleted' 4 | import { threadRunRequiresAction } from './threadRunRequiresAction' 5 | import { threadRunStepCreated } from './threadRunStepCreated' 6 | import { threadRunStepDelta } from './threadRunStepDelta' 7 | import { threadMessageCreated } from './threadMessageCreated' 8 | import { threadMessageDelta } from './threadMessageDelta' 9 | import { threadMessageCompleted } from './threadMessageCompleted' 10 | 11 | export const handlers = { 12 | 'thread.run.in_progress': threadRunInProgress, 13 | 'thread.run.failed': threadRunFailed, 14 | 'thread.run.completed': threadRunCompleted, 15 | 'thread.run.requires_action': threadRunRequiresAction, 16 | 'thread.run.step.created': threadRunStepCreated, 17 | 'thread.run.step.delta': threadRunStepDelta, 18 | 'thread.message.created': threadMessageCreated, 19 | 'thread.message.delta': threadMessageDelta, 20 | 'thread.message.completed': threadMessageCompleted, 21 | } 22 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadMessageCompleted.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { MessageStatus, RunStepType } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | 5 | export const threadMessageCompleted = async ({ 6 | prisma, 7 | event, 8 | controller, 9 | }: { 10 | prisma: PrismaClient 11 | event: OpenAI.Beta.AssistantStreamEvent.ThreadMessageCompleted 12 | controller: ReadableStreamDefaultController 13 | }) => { 14 | controller.enqueue(event) 15 | 16 | if (event.data.tool_calls) { 17 | const latestRunStep = await prisma.runStep.findFirst({ 18 | where: { 19 | threadId: event.data.thread_id, 20 | type: RunStepType.TOOL_CALLS, 21 | }, 22 | orderBy: { 23 | createdAt: 'desc', 24 | }, 25 | }) 26 | 27 | if (!latestRunStep) { 28 | throw new Error('No run step found') 29 | } 30 | 31 | await prisma.runStep.update({ 32 | where: { 33 | id: latestRunStep.id, 34 | }, 35 | data: { 36 | stepDetails: { 37 | type: 'tool_calls', 38 | tool_calls: event.data.tool_calls, 39 | }, 40 | }, 41 | }) 42 | } 43 | 44 | return prisma.message.update({ 45 | where: { 46 | id: event.data.id, 47 | }, 48 | data: { 49 | status: MessageStatus.COMPLETED, 50 | ...(event.data.content ? { content: event.data.content } : {}), 51 | ...(event.data.tool_calls ? { toolCalls: event.data.tool_calls } : {}), 52 | }, 53 | }) 54 | } 55 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadMessageCreated.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { MessageStatus } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | import { serializeMessage } from '../../../messages/serializeMessage' 5 | 6 | const status = (event: OpenAI.Beta.AssistantStreamEvent.ThreadMessageCreated) => { 7 | if (event.data.status === 'completed') return MessageStatus.COMPLETED 8 | if (event.data.status === 'in_progress') return MessageStatus.IN_PROGRESS 9 | if (event.data.status === 'incomplete') return MessageStatus.INCOMPLETE 10 | 11 | throw new Error(`Unknown status: ${event.data.status}`) 12 | } 13 | 14 | export const threadMessageCreated = async ({ 15 | prisma, 16 | event, 17 | controller, 18 | }: { 19 | prisma: PrismaClient 20 | event: OpenAI.Beta.AssistantStreamEvent.ThreadMessageCreated 21 | controller: ReadableStreamDefaultController 22 | }) => { 23 | const message = await prisma.message.create({ 24 | data: { 25 | threadId: event.data.thread_id, 26 | content: event.data.content as unknown as OpenAI.Beta.Threads.Messages.TextContentBlock[], 27 | role: event.data.role === 'assistant' ? 'ASSISTANT' : 'USER', 28 | assistantId: event.data.assistant_id, 29 | runId: event.data.run_id, 30 | status: status(event), 31 | }, 32 | }) 33 | 34 | const serializedMessage = serializeMessage({ message }) 35 | 36 | controller.enqueue({ 37 | ...event, 38 | data: serializedMessage, 39 | }) 40 | 41 | return serializedMessage 42 | } 43 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadMessageDelta.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const threadMessageDelta = ({ 4 | event, 5 | controller, 6 | }: { 7 | event: OpenAI.Beta.AssistantStreamEvent.ThreadMessageDelta 8 | controller: ReadableStreamDefaultController 9 | }) => ( 10 | controller.enqueue(event) 11 | ) 12 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunCompleted.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { RunStatus } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | 5 | export const threadRunCompleted = ({ 6 | prisma, 7 | event, 8 | controller, 9 | }: { 10 | prisma: PrismaClient 11 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunCompleted 12 | controller: ReadableStreamDefaultController 13 | }) => { 14 | controller.enqueue(event) 15 | 16 | return prisma.run.update({ 17 | where: { 18 | id: event.data.id, 19 | }, 20 | data: { 21 | status: RunStatus.COMPLETED, 22 | requiredAction: undefined, 23 | }, 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunFailed.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { RunStatus } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | 5 | export const threadRunFailed = ({ 6 | prisma, 7 | event, 8 | controller, 9 | }: { 10 | prisma: PrismaClient 11 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunFailed 12 | controller: ReadableStreamDefaultController 13 | }) => { 14 | controller.enqueue(event) 15 | 16 | return prisma.run.update({ 17 | where: { 18 | id: event.data.id, 19 | }, 20 | data: { 21 | status: RunStatus.FAILED, 22 | failedAt: event.data.failed_at, 23 | lastError: event.data.last_error, 24 | }, 25 | }) 26 | } 27 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunInProgress.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { RunStatus } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | 5 | export const threadRunInProgress = ({ 6 | prisma, 7 | event, 8 | controller, 9 | }: { 10 | prisma: PrismaClient 11 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunInProgress 12 | controller: ReadableStreamDefaultController 13 | }) => { 14 | controller.enqueue(event) 15 | 16 | return prisma.run.update({ 17 | where: { 18 | id: event.data.id, 19 | }, 20 | data: { 21 | status: RunStatus.IN_PROGRESS, 22 | }, 23 | }) 24 | } 25 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunRequiresAction.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import type { PrismaClient } from '@prisma/client' 3 | import { RunStatus } from '@/types/prisma' 4 | 5 | export const threadRunRequiresAction = ({ 6 | prisma, 7 | event, 8 | controller, 9 | }: { 10 | prisma: PrismaClient 11 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunRequiresAction 12 | controller: ReadableStreamDefaultController 13 | }) => { 14 | controller.enqueue(event) 15 | 16 | return prisma.run.update({ 17 | where: { 18 | id: event.data.id, 19 | }, 20 | data: { 21 | status: RunStatus.REQUIRES_ACTION, 22 | requiredAction: event.data.required_action, 23 | }, 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunStepCreated.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { RunStepType, RunStepStatus } from '@/types/prisma' 3 | import type { PrismaClient } from '@prisma/client' 4 | import { serializeRunStep } from '../../steps/serializeRunStep' 5 | 6 | const type = (event: OpenAI.Beta.AssistantStreamEvent.ThreadRunStepCreated) => { 7 | if (event.data.type === 'message_creation') return RunStepType.MESSAGE_CREATION 8 | if (event.data.type === 'tool_calls') return RunStepType.TOOL_CALLS 9 | 10 | throw new Error(`Unknown type: ${event.data.type}`) 11 | } 12 | 13 | const status = (event: OpenAI.Beta.AssistantStreamEvent.ThreadRunStepCreated) => { 14 | if (event.data.status === 'in_progress') return RunStepStatus.IN_PROGRESS 15 | if (event.data.status === 'cancelled') return RunStepStatus.CANCELLED 16 | if (event.data.status === 'completed') return RunStepStatus.COMPLETED 17 | if (event.data.status === 'failed') return RunStepStatus.FAILED 18 | if (event.data.status === 'expired') return RunStepStatus.EXPIRED 19 | 20 | throw new Error(`Unknown status: ${event.data.status}`) 21 | } 22 | 23 | export const threadRunStepCreated = async ({ 24 | prisma, 25 | event, 26 | controller, 27 | }: { 28 | prisma: PrismaClient 29 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunStepCreated 30 | controller: ReadableStreamDefaultController 31 | }) => { 32 | const runStep = await prisma.runStep.create({ 33 | data: { 34 | runId: event.data.run_id, 35 | assistantId: event.data.assistant_id, 36 | threadId: event.data.thread_id, 37 | type: type(event), 38 | status: status(event), 39 | stepDetails: event.data.step_details, 40 | completedAt: event.data.completed_at, 41 | }, 42 | }) 43 | 44 | const serializedRunStep = serializeRunStep({ runStep }) 45 | 46 | controller.enqueue({ 47 | ...event, 48 | data: serializedRunStep, 49 | }) 50 | 51 | return serializedRunStep 52 | } 53 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/handlers/threadRunStepDelta.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const threadRunStepDelta = ({ 4 | event, 5 | controller, 6 | }: { 7 | event: OpenAI.Beta.AssistantStreamEvent.ThreadRunStepDelta 8 | controller: ReadableStreamDefaultController 9 | }) => ( 10 | controller.enqueue(event) 11 | ) 12 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/onEvent/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import type { PrismaClient } from '@prisma/client' 3 | import { handlers } from './handlers' 4 | 5 | export const onEvent = ({ 6 | prisma, 7 | controller, 8 | }: { 9 | prisma: PrismaClient 10 | controller: ReadableStreamDefaultController 11 | }) => (event: OpenAI.Beta.AssistantStreamEvent) => { 12 | // @ts-ignore-next-line 13 | const handler = handlers[event.event] 14 | 15 | if (!handler) { 16 | console.log('No handler for event', event) 17 | return 18 | } 19 | 20 | return handler({ prisma, controller, event }) 21 | } 22 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/post.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import type { PrismaClient } from '@prisma/client' 3 | import dayjs from 'dayjs' 4 | import { assign } from 'radash' 5 | import { runsRegexp } from '@/lib/runs/runsRegexp' 6 | import { serializeRun } from './serializeRun' 7 | import { RunAdapterPartobClient } from '@/types' 8 | import { onEvent } from './onEvent' 9 | import { getMessages } from './getMessages' 10 | 11 | type RunCreateResponse = Response & { 12 | json: () => Promise> 13 | } 14 | 15 | export const post = ({ 16 | prisma, 17 | runAdapter, 18 | }: { 19 | prisma: PrismaClient 20 | runAdapter: RunAdapterPartobClient 21 | }) => async (urlString: string, options: any): Promise => { 22 | const url = new URL(urlString) 23 | const [, threadId] = url.pathname.match(new RegExp(runsRegexp))! 24 | 25 | const body = JSON.parse(options.body) 26 | const { assistant_id, stream } = body 27 | 28 | const assistant = await prisma.assistant.findUnique({ 29 | where: { 30 | id: assistant_id, 31 | }, 32 | }) 33 | 34 | if (!assistant) { 35 | throw new Error('Assistant not found') 36 | } 37 | 38 | const { 39 | model, 40 | instructions, 41 | // additional_instructions, 42 | tools, 43 | metadata, 44 | response_format, 45 | truncation_strategy, 46 | } = assign({ 47 | model: assistant.modelSlug, 48 | instructions: '', 49 | additional_instructions: null, 50 | truncation_strategy: { 51 | type: 'auto', 52 | }, 53 | response_format: { 54 | type: 'text', 55 | }, 56 | // tools: [], 57 | // metadata: {}, 58 | }, body) 59 | 60 | const run = await prisma.run.create({ 61 | data: { 62 | status: 'QUEUED', 63 | expiresAt: dayjs().add(1, 'hour').unix(), 64 | model, 65 | instructions, 66 | tools, 67 | metadata, 68 | thread: { 69 | connect: { 70 | id: threadId, 71 | }, 72 | }, 73 | assistant: { 74 | connect: { 75 | id: assistant_id, 76 | }, 77 | }, 78 | truncationStrategy: truncation_strategy, 79 | responseFormat: response_format, 80 | }, 81 | }) 82 | 83 | const data = serializeRun({ run }) 84 | 85 | const readableStream = new ReadableStream({ 86 | async start(controller) { 87 | try { 88 | await runAdapter({ 89 | run: data, 90 | onEvent: onEvent({ 91 | controller: { 92 | ...controller, 93 | enqueue: (data) => { 94 | controller.enqueue(`data: ${JSON.stringify(data)}\n\n`) 95 | }, 96 | }, 97 | prisma, 98 | }), 99 | getMessages: getMessages({ 100 | prisma, 101 | run, 102 | }), 103 | }) 104 | } catch (error: any) { 105 | console.error(error) 106 | 107 | onEvent({ 108 | controller: { 109 | ...controller, 110 | enqueue: (data) => { 111 | controller.enqueue(`data: ${JSON.stringify(data)}\n\n`) 112 | }, 113 | }, 114 | prisma, 115 | })({ 116 | event: 'thread.run.failed', 117 | data: { 118 | id: run.id, 119 | failed_at: dayjs().unix(), 120 | last_error: { 121 | code: 'server_error', 122 | message: `${error?.message ?? ''} ${error?.cause?.message ?? ''}`, 123 | }, 124 | }, 125 | } as OpenAI.Beta.AssistantStreamEvent.ThreadRunFailed) 126 | } 127 | 128 | controller.close() 129 | }, 130 | }) 131 | 132 | if (stream) { 133 | return new Response(readableStream, { 134 | headers: { 135 | 'Content-Type': 'text/event-stream', 136 | }, 137 | }) 138 | } else { 139 | return new Response(JSON.stringify( 140 | data 141 | ), { 142 | status: 200, 143 | headers: { 144 | 'Content-Type': 'application/json', 145 | }, 146 | }) 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/serializeRun.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | // @ts-ignore-next-line 3 | import type { Run } from '@prisma/client' 4 | import dayjs from 'dayjs' 5 | 6 | export const serializeRun = ({ 7 | run, 8 | }: { 9 | run: Run 10 | }): OpenAI.Beta.Threads.Run => ({ 11 | id: run.id, 12 | object: 'thread.run' as 'thread.run', 13 | created_at: dayjs(run.createdAt).unix(), 14 | thread_id: run.threadId, 15 | assistant_id: run.assistantId, 16 | status: run.status.toLowerCase() as OpenAI.Beta.Threads.Run['status'], 17 | required_action: run.requiredAction as OpenAI.Beta.Threads.Run['required_action'], 18 | last_error: run.lastError as OpenAI.Beta.Threads.Run['last_error'], 19 | expires_at: dayjs(run.expiresAt).unix(), 20 | started_at: run.startedAt ? dayjs(run.startedAt).unix() : null, 21 | cancelled_at: run.cancelledAt ? dayjs(run.cancelledAt).unix() : null, 22 | failed_at: run.failedAt ? dayjs(run.failedAt).unix() : null, 23 | completed_at: run.completedAt ? dayjs(run.completedAt).unix() : null, 24 | model: run.model, 25 | instructions: run.instructions, 26 | tools: run.tools as OpenAI.Beta.Threads.Run['tools'], 27 | metadata: run.metadata, 28 | usage: run.usage as OpenAI.Beta.Threads.Run['usage'], 29 | truncation_strategy: { 30 | type: 'auto', 31 | }, 32 | response_format: { 33 | type: 'text', 34 | }, 35 | // TODO 36 | incomplete_details: null, 37 | max_completion_tokens: null, 38 | max_prompt_tokens: null, 39 | tool_choice: 'auto', 40 | parallel_tool_calls: true, 41 | }) 42 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/steps/get.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore-next-line 2 | import type { PrismaClient, RunStep } from '@prisma/client' 3 | import { assign, last } from 'radash' 4 | import { stepsRegexp } from '@/lib/steps/stepsRegexp' 5 | import { serializeRunStep } from './serializeRunStep' 6 | 7 | export const get = ({ 8 | prisma, 9 | }: { 10 | prisma: PrismaClient 11 | }) => async (urlString: string) => { 12 | const url = new URL(urlString) 13 | 14 | const [, threadId, runId] = url.pathname.match(new RegExp(stepsRegexp))! 15 | 16 | const { 17 | limit, 18 | order, 19 | after, 20 | } = assign({ 21 | limit: '20', 22 | order: 'desc', 23 | // after: null, 24 | }, Object.fromEntries(url.searchParams)) 25 | 26 | const pageSize = parseInt(limit, 10) 27 | 28 | const runStepsPlusOne = await prisma.runStep.findMany({ 29 | where: { threadId, runId }, 30 | take: pageSize + 1, 31 | orderBy: { createdAt: order }, 32 | ...(after && { 33 | skip: 1, 34 | cursor: { id: after }, 35 | }), 36 | }) as RunStep[] 37 | 38 | const runSteps = runStepsPlusOne.slice(0, pageSize) 39 | 40 | return new Response(JSON.stringify({ 41 | data: runSteps.map((runStep: RunStep) => serializeRunStep({ runStep })), 42 | has_more: runStepsPlusOne.length > pageSize, 43 | last_id: runSteps.at(-1)?.id ?? null, 44 | }), { 45 | status: 200, 46 | headers: { 'Content-Type': 'application/json' }, 47 | }) 48 | } 49 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/steps/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { get } from './get' 3 | 4 | export const steps = ({ 5 | prisma, 6 | }: { 7 | prisma: PrismaClient 8 | }) => ({ 9 | get: get({ prisma }), 10 | }) 11 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/steps/serializeRunStep.ts: -------------------------------------------------------------------------------- 1 | import dayjs from 'dayjs' 2 | // @ts-ignore-next-line 3 | import type { RunStep } from '@prisma/client' 4 | import type OpenAI from 'openai' 5 | 6 | export const serializeRunStep = ({ 7 | runStep, 8 | }: { 9 | runStep: RunStep 10 | }) => ({ 11 | id: runStep.id, 12 | object: 'thread.run.step' as 'thread.run.step', 13 | created_at: dayjs(runStep.createdAt).unix(), 14 | assistant_id: runStep.assistantId, 15 | thread_id: runStep.threadId, 16 | run_id: runStep.runId, 17 | type: runStep.type.toLowerCase() as OpenAI.Beta.Threads.Runs.RunStep['type'], 18 | status: runStep.status.toLowerCase() as OpenAI.Beta.Threads.Runs.RunStep['status'], 19 | // @ts-ignore-next-line 20 | step_details: runStep.stepDetails as OpenAI.Beta.Threads.Runs.RunStep['step_details'], 21 | last_error: runStep.lastError as OpenAI.Beta.Threads.Runs.RunStep['last_error'], 22 | expired_at: runStep.expiredAt ? dayjs(runStep.expiredAt).unix() : null, 23 | cancelled_at: runStep.cancelledAt ? dayjs(runStep.cancelledAt).unix() : null, 24 | failed_at: runStep.failedAt ? dayjs(runStep.failedAt).unix() : null, 25 | completed_at: runStep.completedAt ? dayjs(runStep.completedAt).unix() : null, 26 | metadata: runStep.metadata, 27 | usage: runStep.usage as OpenAI.Beta.Threads.Runs.RunStep['usage'], 28 | }) 29 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/submitToolOutputs/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import type { RunAdapter } from '@/types' 3 | import { post } from './post' 4 | 5 | export const submitToolOutputs = ({ 6 | prisma, 7 | runAdapter, 8 | }: { 9 | prisma: PrismaClient 10 | runAdapter: RunAdapter 11 | }) => ({ 12 | post: post({ 13 | prisma, 14 | // @ts-ignore-next-line 15 | runAdapter, 16 | }), 17 | }) 18 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/submitToolOutputs/post/index.ts: -------------------------------------------------------------------------------- 1 | import type { PrismaClient } from '@prisma/client' 2 | import { submitToolOutputsRegexp } from '@/lib/runs/submitToolOutputsRegexp' 3 | import { RunAdapterPartobClient } from '@/types' 4 | import { serializeRun } from '../../serializeRun' 5 | import { onEvent } from '../../onEvent' 6 | import { getMessages } from '../../getMessages' 7 | import { serializeRunStep } from '../../steps/serializeRunStep' 8 | import { updateRun } from './updateRun' 9 | 10 | export const post = ({ 11 | prisma, 12 | runAdapter, 13 | }: { 14 | prisma: PrismaClient 15 | runAdapter: RunAdapterPartobClient 16 | }) => async (urlString: string, options: any) => { 17 | const url = new URL(urlString) 18 | const [, threadId, runId] = url.pathname.match(new RegExp(submitToolOutputsRegexp))! 19 | 20 | const body = JSON.parse(options.body) 21 | 22 | const { 23 | tool_outputs, 24 | stream, 25 | } = body 26 | 27 | if (stream) { 28 | const readableStream = new ReadableStream({ 29 | async start(controller) { 30 | const run = await updateRun({ 31 | prisma, 32 | runId, 33 | threadId, 34 | tool_outputs, 35 | onThreadRunStepCompleted: async ({ runStep }) => { 36 | controller.enqueue(`data: ${JSON.stringify({ 37 | event: 'thread.run.step.completed', 38 | data: serializeRunStep({ runStep }), 39 | })}\n\n`) 40 | } 41 | }) 42 | 43 | await runAdapter({ 44 | run: serializeRun({ run }), 45 | onEvent: onEvent({ 46 | controller: { 47 | ...controller, 48 | enqueue: (data) => { 49 | controller.enqueue(`data: ${JSON.stringify(data)}\n\n`) 50 | }, 51 | }, 52 | prisma, 53 | }), 54 | getMessages: getMessages({ prisma, run }), 55 | }) 56 | 57 | controller.close() 58 | }, 59 | }) 60 | 61 | return new Response(readableStream, { 62 | headers: { 63 | 'Content-Type': 'text/event-stream', 64 | }, 65 | }) 66 | } else { 67 | const run = await updateRun({ 68 | prisma, 69 | runId, 70 | threadId, 71 | tool_outputs, 72 | }) 73 | 74 | await new Promise((resolve) => ( 75 | new ReadableStream({ 76 | async start(controller) { 77 | await runAdapter({ 78 | run: serializeRun({ run }), 79 | onEvent: onEvent({ 80 | controller: { 81 | ...controller, 82 | enqueue: (data) => { 83 | controller.enqueue(`data: ${JSON.stringify(data)}\n\n`) 84 | }, 85 | }, 86 | prisma, 87 | }), 88 | getMessages: getMessages({ prisma, run }), 89 | }) 90 | 91 | controller.close() 92 | resolve(void 0) 93 | }, 94 | }) 95 | )) 96 | 97 | return new Response(JSON.stringify( 98 | run 99 | ), { 100 | status: 200, 101 | headers: { 102 | 'Content-Type': 'application/json', 103 | }, 104 | }) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/runs/submitToolOutputs/post/updateRun.ts: -------------------------------------------------------------------------------- 1 | import { PrismaClient } from '@prisma/client' 2 | import dayjs from 'dayjs' 3 | 4 | export const updateRun = async ({ 5 | prisma, 6 | runId, 7 | threadId, 8 | onThreadRunStepCompleted = () => {}, 9 | tool_outputs, 10 | }: { 11 | prisma: PrismaClient 12 | runId: string 13 | threadId: string 14 | onThreadRunStepCompleted?: ({ runStep }: { runStep: any }) => void 15 | tool_outputs: any 16 | }) => ( 17 | prisma.$transaction(async (prisma: PrismaClient) => { 18 | const runSteps = await prisma.runStep.findMany({ 19 | where: { 20 | threadId, 21 | runId, 22 | type: 'TOOL_CALLS', 23 | status: 'IN_PROGRESS', 24 | }, 25 | orderBy: { 26 | createdAt: 'asc', 27 | }, 28 | }) 29 | 30 | for (const runStep of runSteps) { 31 | const completedRunStep = await prisma.runStep.update({ 32 | where: { 33 | id: runStep.id, 34 | }, 35 | data: { 36 | status: 'COMPLETED', 37 | completedAt: dayjs().unix(), 38 | stepDetails: { 39 | type: 'tool_calls', 40 | // @ts-ignore-next-line 41 | tool_calls: runStep.stepDetails!.tool_calls.map((toolCall) => { 42 | // @ts-ignore-next-line 43 | const toolOutput = tool_outputs.find((output) => output.tool_call_id === toolCall.id) || tool_outputs[0] 44 | 45 | if (!toolOutput) { 46 | console.dir({ toolOutput, runStep, tool_outputs, runSteps }, { depth: null }) 47 | throw new Error('Tool output not found') 48 | } 49 | 50 | return { 51 | id: toolCall.id, 52 | type: toolCall.type, 53 | function: { 54 | ...toolCall.function, 55 | output: toolOutput.output, 56 | }, 57 | } 58 | }), 59 | }, 60 | }, 61 | }) 62 | 63 | onThreadRunStepCompleted({ 64 | runStep: completedRunStep, 65 | }) 66 | } 67 | 68 | return prisma.run.update({ 69 | where: { 70 | id: runId, 71 | }, 72 | data: { 73 | status: 'QUEUED', 74 | }, 75 | }) 76 | }) 77 | ) 78 | -------------------------------------------------------------------------------- /packages/supercompat/src/adapters/storage/prismaStorageAdapter/threads/serializeThread.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore-next-line 2 | import type { Thread } from '@prisma/client' 3 | import dayjs from 'dayjs' 4 | 5 | export const serializeThread = ({ 6 | thread, 7 | }: { 8 | thread: Thread 9 | }) => ({ 10 | id: thread.id, 11 | object: 'thread' as 'thread', 12 | created_at: dayjs(thread.createdAt).unix(), 13 | metadata: thread.metadata, 14 | // TODO 15 | tool_resources: null, 16 | }) 17 | -------------------------------------------------------------------------------- /packages/supercompat/src/index.ts: -------------------------------------------------------------------------------- 1 | export { supercompat } from './supercompat' 2 | export { groqClientAdapter } from './adapters/client/groqClientAdapter' 3 | export { openaiClientAdapter } from './adapters/client/openaiClientAdapter' 4 | export { azureOpenaiClientAdapter } from './adapters/client/azureOpenaiClientAdapter' 5 | export { mistralClientAdapter } from './adapters/client/mistralClientAdapter' 6 | export { perplexityClientAdapter } from './adapters/client/perplexityClientAdapter' 7 | export { anthropicClientAdapter } from './adapters/client/anthropicClientAdapter' 8 | export { togetherClientAdapter } from './adapters/client/togetherClientAdapter' 9 | export { googleClientAdapter } from './adapters/client/googleClientAdapter' 10 | export { humirisClientAdapter } from './adapters/client/humirisClientAdapter' 11 | export { ollamaClientAdapter } from './adapters/client/ollamaClientAdapter' 12 | export { completionsRunAdapter } from './adapters/run/completionsRunAdapter' 13 | export { prismaStorageAdapter } from './adapters/storage/prismaStorageAdapter' 14 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/azureOpenai/endpointFromBaseUrl.ts: -------------------------------------------------------------------------------- 1 | export const endpointFromBaseUrl = ({ 2 | baseURL 3 | }: { 4 | baseURL: string 5 | }) => ( 6 | baseURL.replace(/\/+openai$/, '') 7 | ) 8 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/messages/alternatingMessages.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const agentSideRoles = ['assistant', 'system'] 4 | 5 | export const alternatingMessages = ({ 6 | messages, 7 | }: { 8 | messages: OpenAI.Chat.ChatCompletionMessageParam[] 9 | }) => { 10 | const result = [] as OpenAI.Chat.ChatCompletionMessageParam[] 11 | 12 | messages.forEach((message: OpenAI.Chat.ChatCompletionMessageParam, index: number) => { 13 | result.push(message) 14 | 15 | const nextMessage = messages[index + 1] 16 | if (!nextMessage) return 17 | 18 | if (message.role === 'user' && nextMessage.role === 'user') { 19 | result.push({ 20 | role: 'assistant', 21 | content: '-', 22 | }) 23 | } else if (agentSideRoles.includes(message.role) && agentSideRoles.includes(nextMessage.role)) { 24 | result.push({ 25 | role: 'user', 26 | content: '-', 27 | }) 28 | } 29 | }) 30 | 31 | return result 32 | } 33 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/messages/firstUserMessages.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export const firstUserMessages = ({ 4 | messages, 5 | }: { 6 | messages: OpenAI.Chat.ChatCompletionMessageParam[] 7 | }): OpenAI.Chat.ChatCompletionMessageParam[] => { 8 | const firstMessage = messages[0] 9 | if (!firstMessage) return messages 10 | 11 | if (firstMessage.role !== 'user') { 12 | return [ 13 | { 14 | role: 'user', 15 | content: '-', 16 | }, 17 | ...messages, 18 | ] 19 | } 20 | 21 | return messages 22 | } 23 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/messages/messagesRegexp.ts: -------------------------------------------------------------------------------- 1 | export const messagesRegexp = '^/(?:v1|/?openai)/threads/([^/]+)/messages$' 2 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/messages/nonEmptyMessages.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | const nonEmptyContent = ({ 4 | message, 5 | }: { 6 | message: OpenAI.Chat.ChatCompletionMessageParam 7 | }): OpenAI.Chat.ChatCompletionMessageParam['content'] => { 8 | if (typeof message.content === 'string') { 9 | if (!/\S/.test(message.content)) { 10 | return '-' 11 | } 12 | } 13 | 14 | return message.content as OpenAI.Chat.ChatCompletionMessageParam["content"] 15 | } 16 | 17 | type ExtendedRole = OpenAI.Chat.ChatCompletionMessageParam['role'] | 'developer' 18 | 19 | type ExtendedMessageParam = Omit & { 20 | role: ExtendedRole 21 | } 22 | 23 | export const nonEmptyMessages = ({ 24 | messages, 25 | }: { 26 | messages: OpenAI.Chat.ChatCompletionMessageParam[] 27 | }) => { 28 | const result = [] as ExtendedMessageParam[] 29 | 30 | messages.forEach((message: OpenAI.Chat.ChatCompletionMessageParam) => ( 31 | result.push({ 32 | ...message, 33 | content: nonEmptyContent({ message }), 34 | }) 35 | )) 36 | 37 | return result 38 | } 39 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/messages/systemDeveloperMessages.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | import { isOModel } from '@/lib/models/isOModel' 3 | 4 | export const systemDeveloperMessages = ({ 5 | messages, 6 | model, 7 | }: { 8 | messages: OpenAI.Chat.ChatCompletionMessageParam[] 9 | model: string 10 | }): OpenAI.Chat.ChatCompletionMessageParam[] => { 11 | if (isOModel({ model })) { 12 | return messages.map((message) => { 13 | if (message.role === 'system') { 14 | return { 15 | ...message, 16 | // TODO: This should be 'developer' but we're using 'user' for now 17 | // role: 'developer', 18 | role: 'user', 19 | } 20 | } 21 | 22 | return message 23 | }) 24 | } 25 | 26 | return messages 27 | } 28 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/models/isOModel.ts: -------------------------------------------------------------------------------- 1 | export const isOModel = ({ model }: { model: string }) => ( 2 | model.startsWith('o1') || model.startsWith('o3') 3 | ) 4 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/runs/runRegexp.ts: -------------------------------------------------------------------------------- 1 | export const runRegexp = '^/(?:v1|/?openai)/threads/([^/]+)/runs/([^/]+)$' 2 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/runs/runsRegexp.ts: -------------------------------------------------------------------------------- 1 | export const runsRegexp = '^/(?:v1|/?openai)/threads/([^/]+)/runs$' 2 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/runs/submitToolOutputsRegexp.ts: -------------------------------------------------------------------------------- 1 | export const submitToolOutputsRegexp = '^/(?:v1|/?openai)/threads/([^/]+)/runs/([^/]+)/submit_tool_outputs$' 2 | -------------------------------------------------------------------------------- /packages/supercompat/src/lib/steps/stepsRegexp.ts: -------------------------------------------------------------------------------- 1 | export const stepsRegexp = '^/(?:v1|/?openai)/threads/([^/]+)/runs/([^/]+)/steps$' 2 | -------------------------------------------------------------------------------- /packages/supercompat/src/supercompat.ts: -------------------------------------------------------------------------------- 1 | import OpenAI, { AzureOpenAI } from 'openai' 2 | import { supercompatFetch, type Args } from './supercompatFetch' 3 | import { endpointFromBaseUrl } from '@/lib/azureOpenai/endpointFromBaseUrl' 4 | 5 | export const supercompat = ({ 6 | client, 7 | storage, 8 | runAdapter, 9 | }: Args) => { 10 | if (client.type === 'AZURE_OPENAI') { 11 | return new AzureOpenAI({ 12 | apiKey: client.client.apiKey, 13 | apiVersion: client.client.apiVersion, 14 | endpoint: endpointFromBaseUrl({ baseURL: client.client.baseURL }), 15 | fetch: supercompatFetch({ 16 | client, 17 | storage, 18 | runAdapter, 19 | }), 20 | }) 21 | } 22 | 23 | return new OpenAI({ 24 | apiKey: 'SUPERCOMPAT_PLACEHOLDER_OPENAI_KEY', 25 | fetch: supercompatFetch({ 26 | client, 27 | storage, 28 | runAdapter, 29 | }), 30 | }) 31 | } 32 | -------------------------------------------------------------------------------- /packages/supercompat/src/supercompatFetch/findRequestHandler.ts: -------------------------------------------------------------------------------- 1 | export const findRequestHandler = ({ 2 | url, 3 | requestHandlers, 4 | }: { 5 | url: string 6 | requestHandlers: any 7 | }) => { 8 | const pathname = new URL(url).pathname 9 | 10 | for (const key in requestHandlers) { 11 | const regex = new RegExp(key) 12 | 13 | if (regex.test(pathname)) { 14 | return requestHandlers[key] 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /packages/supercompat/src/supercompatFetch/index.ts: -------------------------------------------------------------------------------- 1 | import { RunAdapter, StorageAdapterArgs } from '@/types' 2 | import { requestHandlers as getRequestHandlers } from './requestHandlers' 3 | import { findRequestHandler } from './findRequestHandler' 4 | import { originalFetch } from './originalFetch' 5 | 6 | export type Args = { 7 | client: any 8 | storage?: (arg0: StorageAdapterArgs) => any 9 | runAdapter?: RunAdapter 10 | } 11 | 12 | export const supercompatFetch = ({ 13 | client, 14 | storage, 15 | runAdapter, 16 | }: Args) => { 17 | const requestHandlers = getRequestHandlers({ 18 | client, 19 | storage, 20 | runAdapter, 21 | }) 22 | 23 | return async (...args: any[]) => { 24 | const [url, options] = args 25 | 26 | const pathHandler = findRequestHandler({ 27 | url, 28 | requestHandlers, 29 | }) 30 | 31 | if (!pathHandler) { 32 | return originalFetch({ 33 | client, 34 | args, 35 | }) 36 | } 37 | 38 | const method = options?.method ?? '' 39 | 40 | const requestHandler = pathHandler[method.toLowerCase()] 41 | 42 | if (!requestHandler) { 43 | return originalFetch({ 44 | client, 45 | args, 46 | }) 47 | } 48 | 49 | return requestHandler(...args) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /packages/supercompat/src/supercompatFetch/originalFetch.ts: -------------------------------------------------------------------------------- 1 | export const originalFetch = ({ 2 | args, 3 | client, 4 | }: { 5 | args: any[] 6 | client: any 7 | }) => { 8 | if (client.client?.fetch) { 9 | const [url, options] = args 10 | 11 | const headers = { 12 | ...options.headers, 13 | authorization: client.client.defaultHeaders().Authorization, 14 | } 15 | 16 | return client.client.fetch(url, { 17 | ...options, 18 | headers, 19 | }) 20 | } else { 21 | // @ts-ignore-next-line 22 | return fetch(...args) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /packages/supercompat/src/supercompatFetch/requestHandlers.ts: -------------------------------------------------------------------------------- 1 | import { assign, partob } from 'radash' 2 | import { RunAdapter, StorageAdapterArgs } from '@/types' 3 | 4 | const storageRequestHandlers = ({ 5 | storage, 6 | runAdapter, 7 | client, 8 | }: { 9 | storage?: (arg0: StorageAdapterArgs) => any 10 | runAdapter?: RunAdapter 11 | client: any 12 | }) => { 13 | if (!storage) return {} 14 | if (!runAdapter) return {} 15 | 16 | const result = storage({ runAdapter: partob(runAdapter, { client }) }) 17 | return result.requestHandlers 18 | } 19 | 20 | export const requestHandlers = ({ 21 | client, 22 | storage, 23 | runAdapter, 24 | }: { 25 | client: any 26 | storage?: (arg0: StorageAdapterArgs) => any 27 | runAdapter?: RunAdapter 28 | }) => ( 29 | assign( 30 | client.requestHandlers, 31 | storageRequestHandlers({ 32 | storage, 33 | runAdapter, 34 | client, 35 | }) 36 | ) 37 | ) 38 | -------------------------------------------------------------------------------- /packages/supercompat/src/types/index.ts: -------------------------------------------------------------------------------- 1 | import type OpenAI from 'openai' 2 | 3 | export type MessageWithRun = OpenAI.Beta.Threads.Message & { 4 | run: (OpenAI.Beta.Threads.Run & { 5 | runSteps: OpenAI.Beta.Threads.Runs.RunStep[] 6 | }) | null 7 | } 8 | 9 | export type RunAdapter = ({ 10 | client, 11 | run, 12 | onEvent, 13 | getMessages, 14 | }: { 15 | client: OpenAI 16 | run: OpenAI.Beta.Threads.Run 17 | onEvent: (event: OpenAI.Beta.AssistantStreamEvent) => Promise 18 | getMessages: () => Promise 19 | }) => Promise 20 | 21 | export type RunAdapterPartobClient = (args: Omit[0], 'client'>) => ReturnType 22 | 23 | export type StorageAdapterArgs = { 24 | runAdapter: RunAdapter 25 | } 26 | -------------------------------------------------------------------------------- /packages/supercompat/src/types/prisma.ts: -------------------------------------------------------------------------------- 1 | export enum RunStatus { 2 | QUEUED = "QUEUED", 3 | IN_PROGRESS = "IN_PROGRESS", 4 | REQUIRES_ACTION = "REQUIRES_ACTION", 5 | CANCELLING = "CANCELLING", 6 | CANCELLED = "CANCELLED", 7 | FAILED = "FAILED", 8 | COMPLETED = "COMPLETED", 9 | EXPIRED = "EXPIRED", 10 | } 11 | 12 | export type Run = { 13 | id: string 14 | threadId: string 15 | assistantId: string 16 | status: RunStatus 17 | requiredAction?: JSON 18 | lastError?: JSON 19 | expiresAt: number 20 | startedAt?: number 21 | cancelledAt?: number 22 | failedAt?: number 23 | completedAt?: number 24 | model: string 25 | instructions: string 26 | tools: JSON[] 27 | fileIds: string[] 28 | metadata?: JSON 29 | usage?: JSON 30 | truncationStrategy: JSON 31 | responseFormat: JSON 32 | createdAt: string 33 | updatedAt: string 34 | } 35 | 36 | export enum MessageRole { 37 | USER = "USER", 38 | ASSISTANT = "ASSISTANT", 39 | } 40 | 41 | export enum MessageStatus { 42 | IN_PROGRESS = "IN_PROGRESS", 43 | INCOMPLETE = "INCOMPLETE", 44 | COMPLETED = "COMPLETED", 45 | } 46 | 47 | export type Message = { 48 | id: string 49 | threadId: string 50 | role: MessageRole 51 | content: JSON[] 52 | status: MessageStatus 53 | assistantId?: string 54 | runId?: string 55 | completedAt?: string 56 | incompleteAt?: string 57 | incompleteDetails?: JSON 58 | fileIds: string[] 59 | metadata?: JSON 60 | toolCalls?: JSON 61 | createdAt: string 62 | updatedAt: string 63 | } 64 | 65 | export enum RunStepType { 66 | MESSAGE_CREATION = "MESSAGE_CREATION", 67 | TOOL_CALLS = "TOOL_CALLS", 68 | } 69 | 70 | export enum RunStepStatus { 71 | IN_PROGRESS = "IN_PROGRESS", 72 | CANCELLED = "CANCELLED", 73 | FAILED = "FAILED", 74 | COMPLETED = "COMPLETED", 75 | EXPIRED = "EXPIRED", 76 | } 77 | 78 | export type RunStep = { 79 | id: string 80 | threadId: string 81 | assistantId: string 82 | runId: string 83 | type: RunStepType 84 | status: RunStepStatus 85 | stepDetails: JSON 86 | lastError?: JSON 87 | expiredAt?: number 88 | cancelledAt?: number 89 | failedAt?: number 90 | completedAt?: number 91 | metadata?: JSON 92 | usage?: JSON 93 | createdAt: string 94 | updatedAt: string 95 | } 96 | 97 | export type MessageWithRun = Message & { 98 | run: (Run & { 99 | runSteps: RunStep[] 100 | }) | null 101 | } 102 | -------------------------------------------------------------------------------- /packages/supercompat/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "declaration": true, 6 | "noEmit": false, 7 | "paths": { 8 | "@/*": ["./src/*"] 9 | } 10 | }, 11 | "include": ["src"], 12 | "exclude": ["node_modules", "dist"] 13 | } 14 | -------------------------------------------------------------------------------- /packages/supercompat/tsup.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'tsup' 2 | 3 | export default defineConfig({ 4 | entry: [ 5 | 'src/*.ts', 6 | 'src/types/*.ts', 7 | ], 8 | splitting: false, 9 | sourcemap: true, 10 | clean: true, 11 | format: [ 12 | 'esm', 13 | 'cjs', 14 | ], 15 | dts: true, 16 | }) 17 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "esModuleInterop": true, 8 | "allowSyntheticDefaultImports": true, 9 | "strict": true, 10 | "forceConsistentCasingInFileNames": true, 11 | "module": "ESNext", 12 | "moduleResolution": "bundler", 13 | "resolveJsonModule": true, 14 | "isolatedModules": true, 15 | "noEmit": true, 16 | "jsx": "react-jsx" 17 | }, 18 | "include": ["packages/*/src"], 19 | "exclude": ["node_modules"] 20 | } 21 | -------------------------------------------------------------------------------- /turbo.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://turbo.build/schema.json", 3 | "globalDependencies": [ 4 | "**/.env.*local" 5 | ], 6 | "tasks": { 7 | "build": { 8 | "dependsOn": [ 9 | "^build" 10 | ], 11 | "outputs": [ 12 | "dist/**", 13 | ".next/**", 14 | "!.next/cache/**" 15 | ] 16 | }, 17 | "lint": { 18 | "dependsOn": [ 19 | "^lint" 20 | ] 21 | }, 22 | "dev": { 23 | "cache": false, 24 | "persistent": true 25 | } 26 | } 27 | } 28 | --------------------------------------------------------------------------------