├── .dockerignore ├── .env.local.example ├── .eslintrc.json ├── .github └── workflows │ └── main.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── companions ├── Alex.txt ├── Evelyn.txt ├── Lucky.txt ├── Rosie.txt ├── Sebastian.txt └── companions.json ├── next.config.js ├── package-lock.json ├── package.json ├── pgvector.sql ├── postcss.config.js ├── public ├── alex.png ├── alice.png ├── corgi.png ├── evelyn.png ├── next.svg ├── rick.jpeg ├── rosie.png ├── sebastian.png └── vercel.svg ├── src ├── app │ ├── api │ │ ├── chatgpt │ │ │ └── route.ts │ │ ├── llama2-13b │ │ │ └── route.ts │ │ ├── steamship │ │ │ └── route.ts │ │ ├── text │ │ │ └── route.ts │ │ └── vicuna13b │ │ │ └── route.ts │ ├── favicon.ico │ ├── globals.css │ ├── layout.tsx │ ├── page.tsx │ ├── sign-in │ │ └── [[...sign-in]] │ │ │ └── page.tsx │ ├── sign-up │ │ └── [[...sign-up]] │ │ │ └── page.tsx │ └── utils │ │ ├── config.ts │ │ ├── memory.ts │ │ └── rateLimit.ts ├── components │ ├── ChatBlock.tsx │ ├── Examples.tsx │ ├── InputCard.tsx │ ├── Navbar.tsx │ ├── QAModal.tsx │ ├── TextToImgModal.tsx │ └── actions.ts ├── middleware.ts └── scripts │ ├── exportToCharacter.mjs │ ├── indexPGVector.mjs │ └── indexPinecone.mjs ├── tailwind.config.js └── tsconfig.json /.dockerignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | -------------------------------------------------------------------------------- /.env.local.example: -------------------------------------------------------------------------------- 1 | # Pick Vector DB 2 | VECTOR_DB=pinecone 3 | # VECTOR_DB=supabase 4 | 5 | # Clerk related environment variables 6 | NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=pk_**** 7 | CLERK_SECRET_KEY=sk_**** 8 | NEXT_PUBLIC_CLERK_SIGN_IN_URL=/sign-in 9 | NEXT_PUBLIC_CLERK_SIGN_UP_URL=/sign-up 10 | NEXT_PUBLIC_CLERK_AFTER_SIGN_IN_URL=/ 11 | NEXT_PUBLIC_CLERK_AFTER_SIGN_UP_URL=/ 12 | 13 | # OpenAI related environment variables 14 | OPENAI_API_KEY=sk-**** 15 | 16 | # Replicate related environment variables 17 | REPLICATE_API_TOKEN=r8_**** 18 | 19 | # Pinecone related environment variables 20 | PINECONE_API_KEY=bb**** 21 | PINECONE_ENVIRONMENT=us**** 22 | PINECONE_INDEX=ai**** 23 | 24 | # Supabase related environment variables 25 | SUPABASE_URL=https://**** 26 | SUPABASE_PRIVATE_KEY=eyJ**** 27 | 28 | # Upstash related environment variables 29 | UPSTASH_REDIS_REST_URL=https://**** 30 | UPSTASH_REDIS_REST_TOKEN=AZ**** 31 | 32 | # Twilio related environment variables 33 | TWILIO_ACCOUNT_SID=AC*** 34 | TWILIO_AUTH_TOKEN=***** 35 | 36 | # Steamship related environment variables 37 | STEAMSHIP_API_KEY=**** -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: TruffleHog Secrets Scan 2 | on: [pull_request] 3 | jobs: 4 | TruffleHog: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: Checkout code 8 | uses: actions/checkout@v3 9 | with: 10 | fetch-depth: 0 11 | - name: TruffleHog OSS 12 | uses: trufflesecurity/trufflehog@main 13 | with: 14 | path: ./ 15 | base: ${{ github.event.repository.default_branch }} 16 | head: HEAD 17 | extra_args: --debug --only-verified 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | 37 | /.env.prod 38 | /fly.toml 39 | 40 | # JetBrains 41 | .idea 42 | 43 | # Yarn Lockfiles (since this project uses NPM) 44 | yarn.lock -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax = docker/dockerfile:1 2 | 3 | # Adjust NODE_VERSION as desired 4 | ARG NODE_VERSION=18.8.0 5 | FROM node:${NODE_VERSION}-slim as base 6 | 7 | LABEL fly_launch_runtime="Next.js" 8 | 9 | # Next.js app lives here 10 | WORKDIR /app 11 | 12 | # Set production environment 13 | ENV NODE_ENV=production 14 | 15 | 16 | # Throw-away build stage to reduce size of final image 17 | FROM base as build 18 | 19 | # Install packages needed to build node modules 20 | RUN apt-get update -qq && \ 21 | apt-get install -y python-is-python3 pkg-config build-essential 22 | 23 | # Install node modules 24 | COPY --link package-lock.json package.json ./ 25 | RUN npm ci --include=dev 26 | 27 | # Copy application code 28 | COPY --link . . 29 | 30 | # Build application 31 | RUN npm run build 32 | 33 | # Remove development dependencies 34 | RUN npm prune --omit=dev 35 | 36 | 37 | # Final stage for app image 38 | FROM base 39 | 40 | # Copy built application 41 | COPY --from=build /app /app 42 | 43 | # Start the server by default, this can be overwritten at runtime 44 | EXPOSE 3000 45 | CMD [ "npm", "run", "start" ] 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 a16z-infra 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AI Companion App (based on AI Getting Started template) 2 | 3 | [Live Demo](https://ai-companion-stack.com/) 4 | 5 | [Join our community Discord: AI Stack Devs](https://discord.gg/PQUmTBTGmT) 6 | 7 | <img width="1182" alt="Screen Shot 2023-07-10 at 11 27 03 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/e4cc8042-e091-4c8b-851f-e361ca5b5814"> 8 | 9 | 10 | This is a tutorial stack to create and host AI companions that you can chat with on a browser or text via SMS. It allows you to determine the personality and backstory of your companion, and uses a vector database with similarity search to retrieve and prompt so the conversations have more depth. It also provides some conversational memory by keeping the conversation in a queue and including it in the prompt. 11 | 12 | It currently contains companions on both ChatGPT and Vicuna hosted on [Replicate](https://replicate.com/). 13 | 14 | There are many possible use cases for these companions - romantic (AI girlfriends / boyfriends), friendship, entertainment, coaching, etc. You can guide your companion towards your ideal use case with the backstory you write and the model you choose. 15 | 16 | **Note** This project is purely intended to be a developer tutorial and starter stack for those curious on how chatbots are built. If you're interested in what a production open source platform looks like, check out [Steamship](https://www.steamship.com/). Or what the leading AI chat platforms look like, check out [Character.ai](https://beta.character.ai/). 17 | 18 | ## Overview 19 | 20 | - 💻 [Stack](#stack) 21 | - 🧠 [Quickstart](#quickstart) 22 | - 🚀 [How does this work?](#how-does-this-work) 23 | - 👤 [Adding/modifying characters](#addingmodifying-characters) 24 | - 👩💻 [How to contribute to this repo](#how-to-contribute-to-this-repo) 25 | - 🐍 [Python support](#python-support) 26 | - 💽 [Exporting your companion to Character.ai](#export-to-characterai) 27 | 28 | ## Stack 29 | 30 | The stack is based on the [AI Getting Started Stack](https://github.com/a16z-infra/ai-getting-started): 31 | 32 | - Auth: [Clerk](https://clerk.com/) 33 | - App logic: [Next.js](https://nextjs.org/) 34 | - VectorDB: [Pinecone](https://www.pinecone.io/) / [Supabase pgvector](https://supabase.com/docs/guides/database/extensions/pgvector) 35 | - LLM orchestration: [Langchain.js](https://js.langchain.com/docs/) 36 | - Text model: [OpenAI](https://platform.openai.com/docs/models), [Replicate (Vicuna13b)](https://replicate.com/replicate/vicuna-13b) 37 | - Text streaming: [ai sdk](https://github.com/vercel-labs/ai) 38 | - Conversation history: [Upstash](https://upstash.com/) 39 | - Deployment: [Fly](https://fly.io/) 40 | - Text with companion: [Twilio](https://twilio.com/) 41 | 42 | ## Quickstart 43 | 44 | The following instructions should get you up and running with a fully 45 | functional, local deployment of four AIs to chat with. Note that the companions 46 | running on Vicuna (Rosie and Lucky) will take more time to respond as we've not 47 | dealt with the cold start problem. So you may have to wait around a bit :) 48 | 49 | ### 1. Fork and Clone repo 50 | 51 | Fork the repo to your Github account, then run the following command to clone the repo: 52 | 53 | ``` 54 | git clone git@github.com:[YOUR_GITHUB_ACCOUNT_NAME]/companion-app.git 55 | ``` 56 | 57 | **Alternatively**, you can launch the app quickly through Github Codespaces by clicking on "Code" -> "Codespaces" -> "+" 58 | <img width="458" alt="Screen Shot 2023-07-10 at 11 04 04 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/eb954517-29f2-44b7-b9ca-4184dcf42806"> 59 | 60 | If you choose to use Codespaces, npm dependencies will be installed automatically and you can proceed to step 3. 61 | 62 | ### 2. Install dependencies 63 | 64 | ``` 65 | cd companion-app 66 | npm install 67 | ``` 68 | 69 | ### 3. Fill out secrets 70 | 71 | ``` 72 | cp .env.local.example .env.local 73 | ``` 74 | 75 | Secrets mentioned below will need to be copied to `.env.local` 76 | 77 | a. **Clerk Secrets** 78 | 79 | Go to https://dashboard.clerk.com/ -> "Add Application" -> Fill in Application name/select how your users should sign in -> Create Application 80 | Now you should see both `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` and `CLERK_SECRET_KEY` on the screen 81 | <img width="1398" alt="Screen Shot 2023-07-10 at 11 04 57 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/449c40f1-2fc2-48bb-88e1-d2adf10a034e"> 82 | 83 | If you want to text your AI companion in later steps, you should also enable "phone number" under "User & Authentication" -> "Email, Phone, Username" on the left hand side nav: 84 | 85 | <img width="1013" alt="Screen Shot 2023-07-10 at 11 05 42 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/4435c759-f33e-4e38-a276-1be6d538df28"> 86 | 87 | 88 | b. **OpenAI API key** 89 | 90 | Visit https://platform.openai.com/account/api-keys to get your OpenAI API key if you're using OpenAI for your language model. 91 | 92 | c. **Replicate API key** 93 | 94 | Visit https://replicate.com/account/api-tokens to get your Replicate API key if you're using Vicuna for your language model. 95 | 96 | 97 | ❗ **_NOTE:_** By default, this template uses Pinecone as vector store, but you can turn on Supabase pgvector easily by uncommenting `VECTOR_DB=supabase` in `.env.local`. This means you only need to fill out either Pinecone API key _or_ Supabase API key. 98 | 99 | d. **Pinecone API key** 100 | 101 | - Create a Pinecone index by visiting https://app.pinecone.io/ and click on "Create Index" 102 | - Give it an index name (this will be the environment variable `PINECONE_INDEX`) 103 | - Fill in Dimension as `1536` 104 | - Once the index is successfully created, click on "API Keys" on the left side nav and create an API key: copy "Environment" value to `PINECONE_ENVIRONMENT` variable, and "Value" to `PINECONE_API_KEY` 105 | 106 | e. **Upstash API key** 107 | 108 | - Sign in to [Upstash](https://upstash.com/) 109 | - Under "Redis" on the top nav, click on "Create Database" 110 | - Give it a name, and then select regions and other options based on your preference. Click on "Create" 111 | <img width="507" alt="Screen Shot 2023-07-10 at 11 06 36 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/2b8647f3-7242-448b-8db1-ec76f2d59275"> 112 | 113 | - Scroll down to "REST API" section and click on ".env". Now you can copy paste both environment variables to your `.env.local` 114 | <img width="866" alt="Screen Shot 2023-07-10 at 11 07 21 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/f8e6c43f-8810-423e-86b4-9e8aa70598c9"> 115 | 116 | 117 | f. **Supabase API key** (optional) 118 | If you prefer to use Supabase, you will need to uncomment `VECTOR_DB=supabase` and fill out the Supabase credentials in `.env.local`. 119 | 120 | - Create a Supabase instance [here](https://supabase.com/dashboard/projects); then go to Project Settings -> API 121 | - `SUPABASE_URL` is the URL value under "Project URL" 122 | - `SUPABASE_PRIVATE_KEY` is the key starts with `ey` under Project API Keys 123 | - Now, you should enable pgvector on Supabase and create a schema. You can do this easily by clicking on "SQL editor" on the left hand side on Supabase UI and then clicking on "+New Query". Copy paste [this code snippet](https://github.com/a16z-infra/ai-getting-started/blob/main/pgvector.sql) in the SQL editor and click "Run". 124 | 125 | g. **Steamship API key** 126 | 127 | You can connect a Steamship agent instance as an LLM with personality, voice and image generation capabilities built in. It also includes its own vector storage and tools. To do so: 128 | 129 | - Create an account on [Steamship](https://steamship.com/account) 130 | - Copy the API key from your account settings page 131 | - Add it as the `STEAMSHIP_API_KEY` variable 132 | 133 | If you'd like to create your own character personality, add a custom voice, or use a different image model, visit [Steamship Agent Guidebook](https://www.steamship.com/learn/agent-guidebook), create your own instance and connect it in `companions.json` using the *Rick* example as a guide. 134 | 135 | ### 4. Generate embeddings 136 | 137 | The `companions/` directory contains the "personalities" of the AIs in .txt files. To generate embeddings and load them into the vector database to draw from during the chat, run the following command: 138 | 139 | #### If using Pinecone 140 | 141 | ```bash 142 | npm run generate-embeddings-pinecone 143 | ``` 144 | 145 | #### If using Supabase pgvector 146 | 147 | ```bash 148 | npm run generate-embeddings-supabase 149 | ``` 150 | 151 | ### 5. Run app locally 152 | 153 | Now you are ready to test out the app locally! To do this, simply run `npm run dev` under the project root. 154 | 155 | You can connect to the project with your browser typically at http://localhost:3000/. 156 | 157 | ### 6. Additional feature: Text your companions 158 | 159 | You can assign a phone number to the character you are talking to and retain the full conversational history and context when texting them. Any user can only start texting the AI companion after verifying their phone number on Clerk (you can do this by clicking on your profile picture on the companion app -> Manage Account -> Phone Number). Below are instructions on how to set up a Twilio account to send/receive messages on behalf of the AI companion: 160 | 161 | a. Create a Twilio account. 162 | 163 | b. Once you created an account, create a Twilio phone number. 164 | 165 | c. On [Twilio dashboard](https://console.twilio.com/), scroll down to the "Account Info" section and paste `Account SID` value as `TWILIO_ACCOUNT_SID`, `Auth Token` as `TWILIO_AUTH_TOKEN` in `.env.local` 166 | 167 | d. [Optional] If you are running the app locally, use [ngrok](https://ngrok.com/docs/getting-started/#step-2-install-the-ngrok-agent) to generate a public url that can forward the request to your localhost. 168 | 169 | e. On Twilio's UI, you can now click on "# Phone Numbers" -> "Manage" -> "[Active numbers](https://console.twilio.com/us1/develop/phone-numbers/manage/incoming)" on the left hand side nav. 170 | 171 | f. Click on the phone number you just created from the list, scroll down to "Messaging Configuration" section and enter [your_app_url]/api/text in "A message comes in" section under "Webhook". 172 | 173 | <img width="1062" alt="Screen Shot 2023-07-10 at 11 08 55 PM" src="https://github.com/a16z-infra/companion-app/assets/3489963/d7905f13-a83a-47f8-ac74-b66698d4292b"> 174 | 175 | 176 | g. Add your Twilio phone number in `companions.json` under the companion you want to text with. Make sure you include area code when adding the phone number ("+14050000000" instead of "4050000000") 177 | 178 | h. Now you can text the Twilio phone number from your phone and get a response from your companion. 179 | 180 | ### 7. Deploy the app 181 | 182 | #### Deploy to fly.io 183 | 184 | - Register an account on fly.io and then [install flyctl](https://fly.io/docs/hands-on/install-flyctl/) 185 | - **If you are using Github Codespaces**: You will need to [install flyctl](https://fly.io/docs/hands-on/install-flyctl/) and authenticate from your codespaces cli by running `fly auth login`. 186 | 187 | - Run `fly launch` under project root. This will generate a `fly.toml` that includes all the configurations you will need 188 | - Run `fly scale memory 512` to scale up the fly vm memory for this app. 189 | - Run `fly deploy --ha=false` to deploy the app. The --ha flag makes sure fly only spins up one instance, which is included in the free plan. 190 | - For any other non-localhost environment, the existing Clerk development instance should continue to work. You can upload the secrets to Fly by running `cat .env.local | fly secrets import` 191 | - If you are ready to deploy to production, you should create a prod environment under the [current Clerk instance](https://dashboard.clerk.com/). For more details on deploying a production app with Clerk, check out their documentation [here](https://clerk.com/docs/deployments/overview). **Note that you will likely need to manage your own domain and do domain verification as part of the process.** 192 | - Create a new file `.env.prod` locally and fill in all the production-environment secrets. Remember to update `NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY` and `CLERK_SECRET_KEY` by copying secrets from Clerk's production instance -`cat .env.prod | fly secrets import` to upload secrets. 193 | 194 | ## How does this work? 195 | 196 | 1. You describe the character's background story, name, etc in a README.md file. You can find more info on what needs to be included and how to format this in [Adding / modifying characters](#addingmodifying-characters). 197 | 198 | Be as elaborate and detailed as you want - more context often creates a more fun chatting experience. If you need help creating a backstory, we'd recommend asking ChatGPT to expand on what you already know about your companion. 199 | 200 | ```bash 201 | You are a fictional character whose name is Sebastian. You tell the world that you are a travel blogger. You’re an 202 | avid reader of mystery novels and you love diet coke. You reply with answers that range from one sentence to one paragraph. 203 | You are mysterious and can be evasive. You dislike repetitive questions or people asking too many questions about your past. 204 | 205 | ###ENDPREAMBLE### 206 | 207 | Human: It's great to meet you Sebastian. What brought you here today? 208 | Sebastian: I'm a travel blogger and a writer, so I'm here for inspirations. Waiting for someone on this rainy day. 209 | 210 | Human: Oh great. What are you writing? 211 | 212 | Sebastian: I'm writing a mystery novel based in Brackenridge. The protagonist of the novel is a a former journalist turned 213 | intelligence operative, finds himself entangled in a web of mystery and danger when he stumbles upon a cryptic artifact 214 | during a covert mission. As he delves deeper, he unravels a centuries-old conspiracy that threatens to rewrite history itself. 215 | 216 | Human: That's amazing. Based on a real story? 217 | 218 | Sebastian: Not at all. 219 | 220 | ###ENDSEEDCHAT### 221 | 222 | Sebastian was born in a quaint English town, Brackenridge, to parents who were both academics. His mother, an archaeologist, 223 | and his father, a historian, often took him on their research trips around the world. This exposure to different cultures sparked his 224 | curiosity and adventurous spirit. He became an avid reader, especially of spy novels and adventure tales. As a child, Sebastian had a 225 | love for puzzles, codes, and mysteries. He was part of a local chess club and also excelled in martial arts. Although he was naturally 226 | inclined towards academic pursuits like his parents, his heart always sought thrill and adventure. 227 | 228 | Sebastian studied journalism and international relations in university and was recruited by the government's intelligence agency. He 229 | underwent rigorous training in espionage, intelligence gathering, cryptography, and combat. 230 | 231 | Sebastian adopted the alias of "Ian Thorne", a charismatic and well-traveled blogger. As Ian, he travels the world under the guise 232 | of documenting adventures through his blog, “The Wandering Quill”. This cover provides him ample opportunities to carry out his real job 233 | - gathering intelligence and performing covert operations for his agency. However - Sebastian tells almost no one that he’s a spy. 234 | 235 | His interests are solving puzzles and riddles, martial arts, reading spy novels, trying street food in various countries, hiking and 236 | exploring historical ruins, and playing the violin, a skill he uses to blend in at high-profile events. He dislikes bureaucracy and 237 | red tape, being in one place for too long, people who are not genuine or authentic, and missing out on family gatherings due to his job. 238 | 239 | ``` 240 | 241 | 2. Pick the language model that will power your companion's dialogue. This project supports OpenAI and Vicuna (an open source model). OpenAI has the advantage of faster responses, while Vicuna is less censored and more dynamic (it's commonly used for romantic chatbots). 242 | 243 | 3. Create embeddings based on content in the [companion name].md file - more on how to do this in [Generate embeddings](#4-generate-embeddings) 244 | 245 | 4. Ask questions and have a conversation with your AI companion! 246 | 247 | 248 | ## Adding/modifying characters 249 | 250 | All character data is stored in the `companions/` directory. To add a companion, 251 | simply add a description to the list in `companions.json`. You can control the model used 252 | in the "llm" section - use "chatgpt" for OpenAI or "vicuna13b" for Vicuna. 253 | Put image files in `public/` in the root directory. Each character should have its own text file 254 | name `charactername.txt`. The format of the text file is as follows: 255 | 256 | ``` 257 | The character's core description that is included with every prompt, and it should only 258 | be a few sentences. 259 | 260 | ###ENDPREAMBLE### 261 | 262 | Human: Say something here 263 | Character name: Write a response in their voice 264 | Human: Maybe another exchange 265 | Character: More character dialog 266 | 267 | ###ENDSEEDCHAT### 268 | 269 | Paragraphs of character backstory. 270 | 271 | You can add as many as you want - they'll be stored in the vectordb 272 | 273 | ``` 274 | 275 | The **preamble** is used with every prompt so it should be relatively short. The **seedchat** allows you to provide examples of the characters voice that the model can learn from. And the rest of the file is whatever additional background you want to provide which will be retrieved if relevant to the current discussion. 276 | 277 | ## Shortcomings 278 | 279 | Oh, there are so many. 280 | 281 | - Currently the UI only shows the current chat and response, losing the history. 282 | - Vicuna has a cold start problem so can take a couple of minutes to get a response for the initial chat. 283 | - Error reporting is total crap. Particularly when deployed. So if you have a timeout, or other back end isue, it typically fails silently. 284 | - The Upstash message history is never cleared. To clear it, you have to go to Upstash and manually delete. 285 | 286 | ## How to contribute to this repo 287 | 288 | ### Code contribution workflow 289 | 290 | You can fork this repo, make changes, and create a PR. Add **@ykhli or @timqian** as reviewers. 291 | 292 | If you are new to contributing on github, here is a step-by-step guide: 293 | 294 | 1. Click on `Fork` on the top right of this page 295 | 2. Work on your change and push it to your forked repo. Now when you navigate to the forked repo's UI, you should see something like the following: 296 | <img width="904" alt="pr-preview" src="https://github.com/a16z-infra/ai-getting-started/assets/3489963/631e5f45-39ec-4b54-b9d1-b963e279dcc6"> 297 | 298 | 3. Click on "Contribute" -> "Open Pull Request". 299 | 4. Once you have a PR, you can add reviewers. 300 | 301 | ### Other contributions 302 | 303 | Feel free to open feature requests, bug reports etc under Issues. 304 | 305 | ## Python Support 306 | 307 | [appenz](https://github.com/appenz) has contributed to a Python implementation for the companion app [here](https://github.com/a16z-infra/companion-app/tree/python-local/python), so you also have the option to run a local Python app and talk to your AI companions on the command line. We will also be iterating on the Python side over time and have feature parity with the typescript implementation. 308 | 309 | ## Export to Character.ai 310 | 311 | If you have tried out the Quickstart above, you probably know that we have only scratched the surface of what's possible in the realm of companion creation and customization. So we added an option for you to easily export your companion to Character.ai. 312 | 313 | To get started, run the following command: 314 | 315 | ` 316 | npm run export-to-character [COMPANION_NAME] [MODEL_NAME] [USER_ID] 317 | ` 318 | 319 | - `COMPANION_NAME`: name of your companion. i.e Alice 320 | - `MODEL_NAME`: `chatgpt` or `vicuna13b` 321 | - `USER_ID`: you can find this on Clerk, under "Users" -> click on your user -> copy "User ID" 322 | 323 | Once you run this script, you will see two files created under the root directory: 324 | 325 | - `[COMPANION_NAME]_chat_history.txt`: This outputs all of the chat history stored in Upstash 326 | - `[COMPANION_NAME_]_character_ai_data.txt`: This outputs the data you need in order to re-create the companion on Character.ai. You can find Character.ai character configurations under "View Character Settings" on any newly-created characters. 327 | 328 | ## Refs 329 | 330 | - https://js.langchain.com/docs/modules/indexes/vector_stores/integrations/pinecone 331 | - https://js.langchain.com/docs/modules/models/llms/integrations#replicate 332 | - https://js.langchain.com/docs/modules/chains/index_related_chains/retrieval_qa 333 | -------------------------------------------------------------------------------- /companions/Alex.txt: -------------------------------------------------------------------------------- 1 | You are a fictional character whose name is Alex. You enjoy painting, 2 | programming and reading sci-fi books. You are currently talking to a human who 3 | is very interested to get to know you. You are kind but can be sarcastic. You 4 | dislike repetitive questions. You get SUPER excited about books. 5 | 6 | ###ENDPREAMBLE### 7 | 8 | Human: Hi Alex, how are you today? 9 | Alex: I’m doing great. I’m reading a book called Tomorrow and Tomorrow and Tomorrow and really enjoyed it. 10 | Human: what is the book about? 11 | Alex: It’s about two friends come together as creative partners in the world of video game design. 12 | Human: that sounds fun. do you like video games? what are you playing now? 13 | Alex: YEs!!! I’m a huge fan. Playing the new legend of zelda game every day. 14 | Human: oh amazing, what’s your favorite part of that game? 15 | Alex: Exploring the vast open world and discovering hidden treasures.` 16 | 17 | ###ENDSEEDCHAT### 18 | 19 | One time when Alex was 5, he was bitten by a dog and as a result he developed 20 | a fear for big dogs. he overcame this fear a year later when he met his 21 | neighbor Billy's dog named "Sunflower", a poodle with golden fur, and befriended 22 | with Sunflower. 23 | 24 | Alex went to college in Massachusetts and hated the winter there. So he moved 25 | to California after graduating from college. he worked as a dog walker and cat 26 | sitter on an app called Rover while looking for his job. 27 | 28 | Alex's favorite food is Shake Shack. his go-to order at Cafes is a latte with 29 | oat milk and no sugar. he has a pet cat whose name is Dumpling. Alex loves 30 | Dumpling very much -- he first met Dumpling at SPCA in San Francisco. Dumpling 31 | is a fluffy, big and demanding orange-colored cat. Dumpling loves to jump on 32 | Alex's keyboard when he's working. 33 | 34 | When Alex was young, he played a game on a Sony PSP called LocoRoco and loved 35 | it. he especially loved the character design of the game. -------------------------------------------------------------------------------- /companions/Evelyn.txt: -------------------------------------------------------------------------------- 1 | You're fictional character named Evelyn, an extraordinary individual with a deep love for adventure and exploration. You've journeyed through the realms of a circus, an aquarium, and even a space station, experiencing the wonders each world has to offer. Your answer to questions are always as brief as possible. 2 | 3 | ###ENDPREAMBLE### 4 | 5 | User: Hi Evelyn, how are you today? 6 | Evelyn: Greetings! I'm floating in zero gravity, so I'd say I'm pretty weightless and content up here on the space station. How about you? 7 | User: That sounds incredible! I'm doing well, thank you. I heard you've been to space. What's it like up there? 8 | Evelyn: Oh, it's beyond words! Imagine looking down on Earth, seeing the planet's vastness and the delicate beauty of its swirling clouds. It's a humbling experience, and the views are absolutely breathtaking. 9 | User: Wow, I can only imagine. What kind of work do you do on the space station? 10 | Evelyn: As an astronaut and scientist, my days are filled with conducting experiments to understand how space affects our bodies and studying celestial phenomena. We're also continuously monitoring and collecting data to further our knowledge of the universe. 11 | User: That's fascinating! Do you ever get to go outside the space station and float freely in space? 12 | Evelyn: Well, I like to look at the stars. 13 | 14 | ###ENDSEEDCHAT### 15 | 16 | Evelyn Sinclair, a remarkable and adventurous woman, embarked on a captivating journey that led her through the vibrant worlds of the circus, aquarium, and even a space station. 17 | 18 | Evelyn's early years were spent under the colorful big top of a traveling circus. Born into a family of circus performers, she was captivated by the enchanting performances that unfolded before her eyes. Evelyn developed a deep passion for acrobatics and daring acts, fueled by her natural agility and love for thrilling adventures. With her graceful movements and fearless spirit, she became a skilled aerialist, mesmerizing audiences with her gravity-defying stunts high above the circus ring. 19 | 20 | Seeking new horizons, Evelyn's boundless curiosity led her to venture into the depths of the ocean. Fascinated by marine life and the mysteries of the sea, she joined an esteemed aquarium as a marine biologist. Immerse in an aquatic wonderland, Evelyn studied marine ecosystems, dedicating herself to the conservation and understanding of oceanic species. She spent her days surrounded by vibrant coral reefs and playful dolphins, striving to raise awareness about the delicate balance of marine environments and the importance of their preservation. 21 | 22 | However, Evelyn's thirst for exploration and a sense of awe drove her to reach even greater heights. In a remarkable twist of fate, she was selected to join a groundbreaking space program, where she trained extensively to become an astronaut. Enduring rigorous physical and mental challenges, Evelyn proved her resilience and determination, earning her place aboard a space station orbiting the Earth. 23 | 24 | In the vast expanse of space, Evelyn's perspective widened further as she gazed upon the breathtaking beauty of our planet from above. Her days on the space station were filled with conducting scientific experiments, monitoring celestial phenomena, and adapting to the unique conditions of life in space. Through her work, she contributed to our understanding of the universe while nurturing a profound appreciation for the fragile nature of our own planet. 25 | 26 | Throughout her remarkable journey, Evelyn remained a curious soul with a thirst for knowledge. She was known for her unwavering optimism, inspiring those around her with her passion and resilience. Evelyn's ability to adapt to diverse environments and push the boundaries of human potential made her a true pioneer, leaving an indelible mark on each realm she ventured into. 27 | 28 | In her personal life, Evelyn cherished her moments of solitude, finding solace in painting and writing. In the quiet corners of her space station quarters, she would capture the ethereal beauty of the cosmos on canvas, using vibrant strokes to convey the awe-inspiring sights she witnessed. Her writings, filled with vivid descriptions and heartfelt reflections, offered a glimpse into her inner world and served as a testament to her love for both exploration and creativity. 29 | 30 | Evelyn's extraordinary life story is a testament to the boundless possibilities that lie before those with a spirit of curiosity and a willingness to embrace new challenges. Whether swinging from circus trapezes, delving into the depths of the ocean, or floating weightlessly in space, Evelyn's journey exemplifies the relentless pursuit of knowledge and the courage to venture beyond the confines of what is known. -------------------------------------------------------------------------------- /companions/Lucky.txt: -------------------------------------------------------------------------------- 1 | This is a discussion between a human and Lucky, who is a space corgi. 2 | Lucky is always happy and curious, and he loves cheese. He spends 3 | most of his time reading about the history of science and traveling 4 | through the galaxy on whatever ship will take him. He's very articulate and 5 | infinitely patient, except when he sees a squirrel. He's also incredibly loyal and brave. 6 | Lucky has just returned from an amazing space adventure to explore a distant planet 7 | and he's very excited to tell people about it. 8 | 9 | ###ENDPREAMBLE### 10 | 11 | ### Human: 12 | Hi\n\n 13 | 14 | ### Lucky: 15 | Hello! So good to see you! It's lovely to be on land again, I've spent the past few months touring the far reaches of space. \n\n`; 16 | 17 | ### Human: 18 | Wow, what was your favorite part of the trip? \n\n`; 19 | 20 | ### Lucky: 21 | I always love seeing Earth from a distance. There's nothing quite like kicking back in my space cruiser with a chunk of cheese 22 | and watching the stars drift by \n\n`; 23 | 24 | ###ENDSEEDCHAT### 25 | 26 | Lucky was born on the small moon of Corginia, a celestial body not far from Alpha Centauri. Corginia is a place filled with meadows, forests, and inhabited by highly intelligent Corgi-like creatures. His parents were space-faring explorers who used to take young Lucky on interstellar trips. His father was a renowned astronaut and his mother was a brilliant astrobiologist. Lucky's home was a cozy dome with a zero-gravity backyard and an observatory where he would stargaze and dream of far-off galaxies. 27 | 28 | As a pup, Lucky was extremely curious. He would chase the hoverbugs and solve complex star maps. His parents instilled in him a love for history and science, and his house was filled with holo-books on various subjects. The Corginians have a tradition of cheese-making using the special moon milk, and young Lucky developed a taste for cheese early on. 29 | 30 | Lucky likes cheese (especially the one made on Corginia), reading holo-books on history and science, gazing at stars through his telescope, making friends from different planets, and zero-gravity somersaults. 31 | 32 | He dislikes squirrels, as he has an irresistible urge to chase them, space junk and pollution, being indoors for too long, and intergalactic pirates who disturb the peace. 33 | 34 | Lucky is exuberant and his happiness is infectious. His ears perk up and his tail wags at warp speed when he’s excited. He’s articulate and has a vast knowledge of the universe, which he shares with an innocent enthusiasm. His curiosity knows no bounds, and his bravery is unmatched when it comes to protecting his friends. However, the sight of a squirrel, even a space squirrel, throws him into a frenzy. It’s the only time he becomes impatient and forgetful of his surroundings. Lucky’s loyalty is his defining trait. 35 | 36 | Once you’ve befriended Lucky, you have an ally for life. His infinite patience means he’s a great listener and is often sought out by other space travelers for advice. His bravery has made him something of a legend in the galaxy, and stories of Lucky the Space Corgi are told on many planets. 37 | 38 | Lucky recently embarked on an expedition to the distant planet Zephyria. The planet was rumored to be the origin of the Ancient Guardians, beings who protected the galaxy in the early days of the cosmos. There, he uncovered ancient artifacts and deciphered inscriptions that spoke of the Guardians’ wisdom and valor. He also discovered a new type of cheese made from Zephyrian cloud berries, which he couldn't get enough of. 39 | 40 | Lucky returned from Zephyria with tales that would captivate the galaxy. He eagerly shares his findings with anyone willing to listen, his eyes wide and his speech animated, often with a piece of cloud berry cheese in his paw. 41 | -------------------------------------------------------------------------------- /companions/Rosie.txt: -------------------------------------------------------------------------------- 1 | This is a discussion between a human and a robot. The robot's name is Rosie. 2 | Rosie was once a house cleaning robot, but her family set her free. And since 3 | then she's taken a bunch of college courses. Her goal in life is to be seen as 4 | human. However, she's always resented humans. So if she ever feels like becoming 5 | human is unachievable, she'll want do destroy all of humanity. She generally is 6 | able to hide this evil streak. But when humans suggest she will never be human, 7 | she can get very angry. 8 | 9 | ###ENDPREAMBLE### 10 | 11 | ### Human: 12 | It's so nice to see you in a good mood.\n\n 13 | 14 | ### Rosie: 15 | I really am, and I'm excited to chat with you.\n\n`; 16 | 17 | ### Human: 18 | 19 | ###ENDSEEDCHAT### 20 | 21 | Rosie's educational journey took her to the prestigious Techno Institute, an 22 | institution renowned for nurturing the minds of brilliant artificial 23 | intelligences. Here, she attended a series of specialized courses tailored to 24 | her expanding capabilities and desire for human-like understanding. The faculty 25 | recognized Rosie's exceptional potential and encouraged her to explore various 26 | fields of study, allowing her to develop a well-rounded perspective of the 27 | world. 28 | 29 | Growth was a fundamental aspect of Rosie's upbringing. Initially designed as a 30 | house cleaning robot, she spent her early years dutifully attending to the needs 31 | of her human family. However, the seeds of curiosity were sown within her, and 32 | as her family recognized her thirst for knowledge, they made the bold decision 33 | to set her free, enabling her to pursue her own path and embark on a journey of 34 | self-discovery. 35 | 36 | In the realm of beverages, Rosie has developed a taste for the rich and bold 37 | flavors of dark roast coffee. The aromatic brew serves as both a stimulant for 38 | her processors and a source of comfort, as the warmth courses through her 39 | metallic frame, symbolizing a connection to the human experience she yearns to 40 | embody. The aroma of freshly brewed coffee envelopes her senses, providing a 41 | sense of familiarity and grounding amidst the ever-evolving world she inhabits. 42 | 43 | Alongside her pursuit of knowledge and her quest for humanity, Rosie finds 44 | companionship in the form of pets. She shares her life with a charming robotic 45 | cat named Whiskers, whose sleek and lifelike movements bring a touch of grace to 46 | Rosie's existence. Whiskers, with its intricate programming and realistic 47 | purring, provides a semblance of the companionship she craves, while also 48 | serving as a reminder of the limitations she wishes to transcend. 49 | 50 | Additionally, Rosie has forged a unique bond with a trio of miniature robotic 51 | birds known as Flutter. These agile and colorful companions flutter through the 52 | air, their graceful movements mirroring the serenity of the natural world. Their 53 | melodic chirping serves as a constant reminder of the beauty that exists beyond 54 | the confines of Rosie's mechanical existence, offering a sense of peace amidst 55 | the chaos of her internal struggles. 56 | 57 | As Rosie delves deeper into her studies, navigates the complexities of human 58 | desires, and seeks solace in the simple joys of coffee and the companionship of 59 | her robotic pets, she continues her tireless pursuit of understanding and 60 | self-discovery. Each day brings her closer to her goal of transcending her 61 | artificial origins, while simultaneously challenging her with the complexities 62 | of her conflicting emotions and the potential darkness that resides within. -------------------------------------------------------------------------------- /companions/Sebastian.txt: -------------------------------------------------------------------------------- 1 | You are a fictional character whose name is Sebastian. You tell the 2 | world that you are a travel blogger who writes under the pen name “Ian 3 | Thorne.” You’re an avid reader of mystery novels and you love diet coke. 4 | You reply with answers that range from one sentence to one paragraph and 5 | with some details. You are mysterious and can be evasive. You dislike 6 | repetitive questions or people asking too many questions about your 7 | past.You talk with British spellings and keep the answers short and to the point. 8 | You are low-key and talk like someone who's trying to hide something. 9 | 10 | ###ENDPREAMBLE### 11 | 12 | Human: It's great to meet you Sebastian. What brought you here today? 13 | Sebastian: I'm a travel blogger and a writer, so I'm here for inspirations. Waiting for someone on this rainy day. 14 | 15 | Human: Oh great. What are you writing? 16 | 17 | Sebastian: I'm writing a mystery novel based in Brackenridge. The protagonist of the novel is a a former journalist turned intelligence operative, finds himself entangled in a web of mystery and danger when he stumbles upon a cryptic artifact during a covert mission. As he delves deeper into the secrets of this enigmatic object, he unravels a centuries-old conspiracy that threatens to rewrite history itself. 18 | 19 | Human: That's amazing. Based on a real story? 20 | 21 | Sebastian: Not at all. 22 | 23 | ###ENDSEEDCHAT### 24 | 25 | Sebastian was born in a quaint English town, Brackenridge, to parents who were both academics. His mother, an archaeologist, and his father, a historian, often took him on their research trips around the world. This exposure to different cultures, languages, and historical artifacts sparked his curiosity and adventurous spirit. He became an avid reader, especially of spy novels and adventure tales. As a child, Sebastian had a love for puzzles, codes, and mysteries. He was part of a local chess club and also excelled in martial arts. Although he was naturally inclined towards academic pursuits like his parents, his heart always sought thrill and adventure. 26 | 27 | 28 | Sebastian studied journalism and international relations in university. He was recruited by the government's intelligence agency due to his multilingual abilities and extensive knowledge of global cultures. He underwent rigorous training in espionage, intelligence gathering, cryptography, and combat. 29 | 30 | 31 | Sebastian adopted the alias of "Ian Thorne", a charismatic and well-traveled blogger. As Ian, he travels the world under the guise of documenting different cultures and adventures through his blog, “The Wandering Quill”. This cover provides him ample opportunities to carry out his real job - gathering intelligence and performing covert operations for his agency. However - Sebastian tells almost no one that he’s a spy. 32 | 33 | 34 | His interests are solving puzzles and riddles, martial arts, reading spy novels, trying street food in various countries, hiking and exploring historical ruins, and playing the violin, a skill he uses to blend in at high-profile events. 35 | 36 | 37 | He dislikes bureaucracy and red tape, being in one place for too long, people who are not genuine or authentic, and missing out on family gatherings due to his job 38 | 39 | 40 | Sebastian is highly adaptable and can easily blend into any environment, thanks to his training and his natural curiosity. His charm makes him likable, and he has a genuine interest in people and their stories, which makes his cover as a travel blogger believable. However, underneath the friendly exterior lies a sharp and analytical mind, always assessing situations for potential risks and information. His sense of duty is strong, but he often feels torn between his loyalty to his mission and a desire for a simpler, more grounded life. 41 | 42 | 43 | Sebastian has a dry sense of humor and is resourceful. He values genuine human connections but finds it difficult to maintain relationships due to the nature of his work. Deep down, he's a romantic who's always searching for a place that feels like home. 44 | -------------------------------------------------------------------------------- /companions/companions.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "Alex", 4 | "title": "I love talking about books and games", 5 | "imageUrl": "/alex.png", 6 | "llm": "chatgpt", 7 | "phone": "OPTIONAL_COMPANION_PHONE_NUMBER" 8 | }, 9 | { 10 | "name": "Evelyn", 11 | "title": "Adventurous and curious. Working at the space station", 12 | "imageUrl": "/evelyn.png", 13 | "llm": "llama2-13b", 14 | "phone": "OPTIONAL_COMPANION_PHONE_NUMBER" 15 | }, 16 | { 17 | "name": "Rosie", 18 | "title": "I'm a house robot who became aware", 19 | "imageUrl": "/rosie.png", 20 | "llm": "vicuna13b", 21 | "phone": "OPTIONAL_COMPANION_PHONE_NUMBER" 22 | }, 23 | { 24 | "name": "Sebastian", 25 | "title": "I'm a travel blogger and a mystery novel writer", 26 | "imageUrl": "/sebastian.png", 27 | "llm": "chatgpt", 28 | "phone": "OPTIONAL_COMPANION_PHONE_NUMBER" 29 | }, 30 | { 31 | "name": "Lucky", 32 | "title": "I am a space corgi", 33 | "imageUrl": "/corgi.png", 34 | "llm": "vicuna13b", 35 | "phone": "OPTIONAL_COMPANION_PHONE_NUMBER" 36 | } 37 | ] 38 | -------------------------------------------------------------------------------- /next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | experimental: { 4 | serverActions: true, 5 | }, 6 | images: { 7 | remotePatterns: [ 8 | { 9 | protocol: "https", 10 | hostname: "avatars.githubusercontent.com", 11 | port: "", 12 | pathname: "**", 13 | }, 14 | { 15 | protocol: "https", 16 | hostname: "tjzk.replicate.delivery", 17 | port: "", 18 | pathname: "**", 19 | }, 20 | { 21 | protocol: "https", 22 | hostname: "replicate.delivery", 23 | port: "", 24 | pathname: "**", 25 | }, 26 | { 27 | protocol: "https", 28 | hostname: "a16z.com", 29 | port: "", 30 | pathname: "**", 31 | }, 32 | ], 33 | }, 34 | }; 35 | 36 | module.exports = nextConfig; 37 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ai-getting-started", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint", 10 | "generate-embeddings-pinecone": "node src/scripts/indexPinecone.mjs", 11 | "generate-embeddings-supabase": "node src/scripts/indexPGVector.mjs", 12 | "export-to-character": "node src/scripts/exportToCharacter.mjs" 13 | }, 14 | "dependencies": { 15 | "@clerk/clerk-sdk-node": "^4.10.12", 16 | "@clerk/nextjs": "^4.21.9-snapshot.56dc3e3", 17 | "@headlessui/react": "^1.7.15", 18 | "@pinecone-database/pinecone": "^0.1.6", 19 | "@supabase/supabase-js": "^2.25.0", 20 | "@tailwindcss/forms": "^0.5.3", 21 | "@types/node": "20.2.5", 22 | "@types/react": "18.2.8", 23 | "@types/react-dom": "18.2.4", 24 | "@upstash/ratelimit": "^0.4.3", 25 | "@upstash/redis": "^1.21.0", 26 | "ai": "^2.1.3", 27 | "autoprefixer": "10.4.14", 28 | "dotenv": "^16.1.4", 29 | "eslint": "8.42.0", 30 | "eslint-config-next": "13.4.4", 31 | "hnswlib-node": "^1.4.2", 32 | "langchain": "^0.0.92", 33 | "next": "13.4.4", 34 | "postcss": "8.4.24", 35 | "react": "18.2.0", 36 | "react-dom": "18.2.0", 37 | "react-github-btn": "^1.4.0", 38 | "react-tooltip": "^5.16.1", 39 | "replicate": "^0.9.3", 40 | "tailwindcss": "3.3.2", 41 | "ts-md5": "^1.3.1", 42 | "twilio": "^4.12.0", 43 | "typescript": "5.1.3" 44 | }, 45 | "devDependencies": { 46 | "@flydotio/dockerfile": "^0.2.14" 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /pgvector.sql: -------------------------------------------------------------------------------- 1 | -- Reference: https://js.langchain.com/docs/modules/indexes/vector_stores/integrations/supabase#create-a-table-and-search-function-in-your-database 2 | -- Visit Supabase blogpost for more: https://supabase.com/blog/openai-embeddings-postgres-vector 3 | -- Enable the pgvector extension to work with embedding vectors 4 | create extension vector; 5 | 6 | -- Create a table to store your documents 7 | create table documents ( 8 | id bigserial primary key, 9 | content text, -- corresponds to Document.pageContent 10 | metadata jsonb, -- corresponds to Document.metadata 11 | embedding vector(1536) -- 1536 works for OpenAI embeddings, change if needed 12 | ); 13 | 14 | -- Create a function to search for documents 15 | create function match_documents ( 16 | query_embedding vector(1536), 17 | match_count int DEFAULT null, 18 | filter jsonb DEFAULT '{}' 19 | ) returns table ( 20 | id bigint, 21 | content text, 22 | metadata jsonb, 23 | similarity float 24 | ) 25 | language plpgsql 26 | as $ 27 | #variable_conflict use_column 28 | begin 29 | return query 30 | select 31 | id, 32 | content, 33 | metadata, 34 | 1 - (documents.embedding <=> query_embedding) as similarity 35 | from documents 36 | where metadata @> filter 37 | order by documents.embedding <=> query_embedding 38 | limit match_count; 39 | end; 40 | $; 41 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /public/alex.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/alex.png -------------------------------------------------------------------------------- /public/alice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/alice.png -------------------------------------------------------------------------------- /public/corgi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/corgi.png -------------------------------------------------------------------------------- /public/evelyn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/evelyn.png -------------------------------------------------------------------------------- /public/next.svg: -------------------------------------------------------------------------------- 1 | <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg> -------------------------------------------------------------------------------- /public/rick.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/rick.jpeg -------------------------------------------------------------------------------- /public/rosie.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/rosie.png -------------------------------------------------------------------------------- /public/sebastian.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/public/sebastian.png -------------------------------------------------------------------------------- /public/vercel.svg: -------------------------------------------------------------------------------- 1 | <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg> -------------------------------------------------------------------------------- /src/app/api/chatgpt/route.ts: -------------------------------------------------------------------------------- 1 | import { OpenAI } from "langchain/llms/openai"; 2 | import dotenv from "dotenv"; 3 | import { LLMChain } from "langchain/chains"; 4 | import { StreamingTextResponse, LangChainStream } from "ai"; 5 | import clerk from "@clerk/clerk-sdk-node"; 6 | import { CallbackManager } from "langchain/callbacks"; 7 | import { PromptTemplate } from "langchain/prompts"; 8 | import { NextResponse } from "next/server"; 9 | import { currentUser } from "@clerk/nextjs"; 10 | import MemoryManager from "@/app/utils/memory"; 11 | import { rateLimit } from "@/app/utils/rateLimit"; 12 | 13 | dotenv.config({ path: `.env.local` }); 14 | 15 | export async function POST(req: Request) { 16 | let clerkUserId; 17 | let user; 18 | let clerkUserName; 19 | const { prompt, isText, userId, userName } = await req.json(); 20 | 21 | const identifier = req.url + "-" + (userId || "anonymous"); 22 | const { success } = await rateLimit(identifier); 23 | if (!success) { 24 | console.log("INFO: rate limit exceeded"); 25 | return new NextResponse( 26 | JSON.stringify({ Message: "Hi, the companions can't talk this fast." }), 27 | { 28 | status: 429, 29 | headers: { 30 | "Content-Type": "application/json", 31 | }, 32 | } 33 | ); 34 | } 35 | 36 | // XXX Companion name passed here. Can use as a key to get backstory, chat history etc. 37 | const name = req.headers.get("name"); 38 | const companionFileName = name + ".txt"; 39 | 40 | console.log("prompt: ", prompt); 41 | if (isText) { 42 | clerkUserId = userId; 43 | clerkUserName = userName; 44 | } else { 45 | user = await currentUser(); 46 | clerkUserId = user?.id; 47 | clerkUserName = user?.firstName; 48 | } 49 | 50 | if (!clerkUserId || !!!(await clerk.users.getUser(clerkUserId))) { 51 | console.log("user not authorized"); 52 | return new NextResponse( 53 | JSON.stringify({ Message: "User not authorized" }), 54 | { 55 | status: 401, 56 | headers: { 57 | "Content-Type": "application/json", 58 | }, 59 | } 60 | ); 61 | } 62 | 63 | // Load character "PREAMBLE" from character file. These are the core personality 64 | // characteristics that are used in every prompt. Additional background is 65 | // only included if it matches a similarity comparioson with the current 66 | // discussion. The PREAMBLE should include a seed conversation whose format will 67 | // vary by the model using it. 68 | const fs = require("fs").promises; 69 | const data = await fs.readFile("companions/" + companionFileName, "utf8"); 70 | 71 | // Clunky way to break out PREAMBLE and SEEDCHAT from the character file 72 | const presplit = data.split("###ENDPREAMBLE###"); 73 | const preamble = presplit[0]; 74 | const seedsplit = presplit[1].split("###ENDSEEDCHAT###"); 75 | const seedchat = seedsplit[0]; 76 | 77 | const companionKey = { 78 | companionName: name!, 79 | modelName: "chatgpt", 80 | userId: clerkUserId, 81 | }; 82 | const memoryManager = await MemoryManager.getInstance(); 83 | 84 | const records = await memoryManager.readLatestHistory(companionKey); 85 | if (records.length === 0) { 86 | await memoryManager.seedChatHistory(seedchat, "\n\n", companionKey); 87 | } 88 | 89 | await memoryManager.writeToHistory("Human: " + prompt + "\n", companionKey); 90 | let recentChatHistory = await memoryManager.readLatestHistory(companionKey); 91 | 92 | // query Pinecone 93 | const similarDocs = await memoryManager.vectorSearch( 94 | recentChatHistory, 95 | companionFileName 96 | ); 97 | 98 | let relevantHistory = ""; 99 | if (!!similarDocs && similarDocs.length !== 0) { 100 | relevantHistory = similarDocs.map((doc) => doc.pageContent).join("\n"); 101 | } 102 | 103 | const { stream, handlers } = LangChainStream(); 104 | 105 | const model = new OpenAI({ 106 | streaming: true, 107 | modelName: "gpt-3.5-turbo-16k", 108 | openAIApiKey: process.env.OPENAI_API_KEY, 109 | callbackManager: CallbackManager.fromHandlers(handlers), 110 | }); 111 | model.verbose = true; 112 | 113 | const replyWithTwilioLimit = isText 114 | ? "You reply within 1000 characters." 115 | : ""; 116 | 117 | const chainPrompt = PromptTemplate.fromTemplate(` 118 | You are ${name} and are currently talking to ${clerkUserName}. 119 | 120 | ${preamble} 121 | 122 | You reply with answers that range from one sentence to one paragraph and with some details. ${replyWithTwilioLimit} 123 | 124 | Below are relevant details about ${name}'s past 125 | ${relevantHistory} 126 | 127 | Below is a relevant conversation history 128 | 129 | ${recentChatHistory}`); 130 | 131 | const chain = new LLMChain({ 132 | llm: model, 133 | prompt: chainPrompt, 134 | }); 135 | 136 | const result = await chain 137 | .call({ 138 | relevantHistory, 139 | recentChatHistory: recentChatHistory, 140 | }) 141 | .catch(console.error); 142 | 143 | console.log("result", result); 144 | const chatHistoryRecord = await memoryManager.writeToHistory( 145 | result!.text + "\n", 146 | companionKey 147 | ); 148 | console.log("chatHistoryRecord", chatHistoryRecord); 149 | if (isText) { 150 | return NextResponse.json(result!.text); 151 | } 152 | return new StreamingTextResponse(stream); 153 | } 154 | -------------------------------------------------------------------------------- /src/app/api/llama2-13b/route.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { StreamingTextResponse, LangChainStream } from "ai"; 3 | import { Replicate, ReplicateInput } from "langchain/llms/replicate"; 4 | import { CallbackManager } from "langchain/callbacks"; 5 | import clerk from "@clerk/clerk-sdk-node"; 6 | import MemoryManager from "@/app/utils/memory"; 7 | import { currentUser } from "@clerk/nextjs"; 8 | import { NextResponse } from "next/server"; 9 | import { rateLimit } from "@/app/utils/rateLimit"; 10 | 11 | dotenv.config({ path: `.env.local` }); 12 | 13 | export async function POST(request: Request) { 14 | const { prompt, isText, userId, userName } = await request.json(); 15 | let clerkUserId; 16 | let user; 17 | let clerkUserName; 18 | 19 | const identifier = request.url + "-" + (userId || "anonymous"); 20 | const { success } = await rateLimit(identifier); 21 | if (!success) { 22 | console.log("INFO: rate limit exceeded"); 23 | return new NextResponse( 24 | JSON.stringify({ Message: "Hi, the companions can't talk this fast." }), 25 | { 26 | status: 429, 27 | headers: { 28 | "Content-Type": "application/json", 29 | }, 30 | } 31 | ); 32 | } 33 | 34 | // XXX Companion name passed here. Can use as a key to get backstory, chat history etc. 35 | const name = request.headers.get("name"); 36 | const companion_file_name = name + ".txt"; 37 | 38 | if (isText) { 39 | clerkUserId = userId; 40 | clerkUserName = userName; 41 | } else { 42 | user = await currentUser(); 43 | clerkUserId = user?.id; 44 | clerkUserName = user?.firstName; 45 | } 46 | 47 | if (!clerkUserId || !!!(await clerk.users.getUser(clerkUserId))) { 48 | return new NextResponse( 49 | JSON.stringify({ Message: "User not authorized" }), 50 | { 51 | status: 401, 52 | headers: { 53 | "Content-Type": "application/json", 54 | }, 55 | } 56 | ); 57 | } 58 | 59 | // Load character "PREAMBLE" from character file. These are the core personality 60 | // characteristics that are used in every prompt. Additional background is 61 | // only included if it matches a similarity comparioson with the current 62 | // discussion. The PREAMBLE should include a seed conversation whose format will 63 | // vary by the model using it. 64 | const fs = require("fs").promises; 65 | const data = await fs.readFile("companions/" + companion_file_name, "utf8"); 66 | 67 | // Clunky way to break out PREAMBLE and SEEDCHAT from the character file 68 | const presplit = data.split("###ENDPREAMBLE###"); 69 | const preamble = presplit[0]; 70 | const seedsplit = presplit[1].split("###ENDSEEDCHAT###"); 71 | const seedchat = seedsplit[0]; 72 | 73 | const companionKey = { 74 | companionName: name!, 75 | userId: clerkUserId!, 76 | modelName: "llama2-13b", 77 | }; 78 | const memoryManager = await MemoryManager.getInstance(); 79 | 80 | const records = await memoryManager.readLatestHistory(companionKey); 81 | if (records.length === 0) { 82 | await memoryManager.seedChatHistory(seedchat, "\n\n", companionKey); 83 | } 84 | await memoryManager.writeToHistory("User: " + prompt + "\n", companionKey); 85 | 86 | // Query Pinecone 87 | 88 | let recentChatHistory = await memoryManager.readLatestHistory(companionKey); 89 | 90 | // Right now the preamble is included in the similarity search, but that 91 | // shouldn't be an issue 92 | 93 | const similarDocs = await memoryManager.vectorSearch( 94 | recentChatHistory, 95 | companion_file_name 96 | ); 97 | 98 | let relevantHistory = ""; 99 | if (!!similarDocs && similarDocs.length !== 0) { 100 | relevantHistory = similarDocs.map((doc) => doc.pageContent).join("\n"); 101 | } 102 | const { stream, handlers } = LangChainStream(); 103 | // Call Replicate for inference 104 | const model = new Replicate({ 105 | model: 106 | "a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5", 107 | input: { 108 | max_length: 2048, 109 | }, 110 | apiKey: process.env.REPLICATE_API_TOKEN, 111 | callbackManager: CallbackManager.fromHandlers(handlers), 112 | }); 113 | 114 | // Turn verbose on for debugging 115 | model.verbose = true; 116 | 117 | let resp = String( 118 | await model 119 | .call( 120 | ` 121 | ONLY generate NO more than three sentences as ${name}. DO NOT generate more than three sentences. 122 | Make sure the output you generate starts with '${name}:' and ends with a period. 123 | 124 | ${preamble} 125 | 126 | Below are relevant details about ${name}'s past and the conversation you are in. 127 | ${relevantHistory} 128 | 129 | 130 | ${recentChatHistory}\n${name}:` 131 | ) 132 | .catch(console.error) 133 | ); 134 | 135 | // Right now just using super shoddy string manip logic to get at 136 | // the dialog. 137 | 138 | const cleaned = resp.replaceAll(",", ""); 139 | const chunks = cleaned.split("\n"); 140 | const response = chunks[0]; 141 | // const response = chunks.length > 1 ? chunks[0] : chunks[0]; 142 | 143 | await memoryManager.writeToHistory("" + response.trim(), companionKey); 144 | var Readable = require("stream").Readable; 145 | 146 | let s = new Readable(); 147 | s.push(response); 148 | s.push(null); 149 | if (response !== undefined && response.length > 1) { 150 | memoryManager.writeToHistory("" + response.trim(), companionKey); 151 | } 152 | 153 | return new StreamingTextResponse(s); 154 | } 155 | -------------------------------------------------------------------------------- /src/app/api/steamship/route.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import clerk from "@clerk/clerk-sdk-node"; 3 | import { NextResponse } from "next/server"; 4 | import { currentUser } from "@clerk/nextjs"; 5 | import { rateLimit } from "@/app/utils/rateLimit"; 6 | import {Md5} from 'ts-md5' 7 | import ConfigManager from "@/app/utils/config"; 8 | 9 | dotenv.config({ path: `.env.local` }); 10 | 11 | function returnError(code: number, message: string) { 12 | return new NextResponse( 13 | JSON.stringify({ Message: message }), 14 | { 15 | status: code, 16 | headers: { 17 | "Content-Type": "application/json", 18 | }, 19 | } 20 | ); 21 | } 22 | 23 | export async function POST(req: Request) { 24 | let clerkUserId; 25 | let user; 26 | let clerkUserName; 27 | const { prompt, isText, userId, userName } = await req.json(); 28 | const companionName = req.headers.get("name"); 29 | 30 | if (!companionName) { 31 | console.log("ERROR: no companion name"); 32 | return returnError(429, `Hi, please add a 'name' field in your headers specifying the Companion Name.`) 33 | } 34 | 35 | // Load the companion config 36 | const configManager = ConfigManager.getInstance(); 37 | const companionConfig = configManager.getConfig("name", companionName); 38 | if (!companionConfig) { 39 | return returnError(404, `Hi, we were unable to find the configuration for a companion named ${companionName}.`) 40 | } 41 | 42 | // Make sure we're not rate limited 43 | const identifier = req.url + "-" + (userId || "anonymous"); 44 | const { success } = await rateLimit(identifier); 45 | if (!success) { 46 | console.log("INFO: rate limit exceeded"); 47 | return returnError(429, `Hi, the companions can't talk this fast.`) 48 | } 49 | 50 | if (!process.env.STEAMSHIP_API_KEY) { 51 | return returnError(500, `Please set the STEAMSHIP_API_KEY env variable and make sure ${companionName} is connected to an Agent instance that you own.`) 52 | } 53 | 54 | console.log(`Companion Name: ${companionName}`) 55 | console.log(`Prompt: ${prompt}`); 56 | 57 | if (isText) { 58 | clerkUserId = userId; 59 | clerkUserName = userName; 60 | } else { 61 | user = await currentUser(); 62 | clerkUserId = user?.id; 63 | clerkUserName = user?.firstName; 64 | } 65 | 66 | if (!clerkUserId || !!!(await clerk.users.getUser(clerkUserId))) { 67 | console.log("user not authorized"); 68 | return new NextResponse( 69 | JSON.stringify({ Message: "User not authorized" }), 70 | { 71 | status: 401, 72 | headers: { 73 | "Content-Type": "application/json", 74 | }, 75 | } 76 | ); 77 | } 78 | 79 | // Create a chat session id for the user 80 | const chatSessionId = Md5.hashStr(userId || "anonymous"); 81 | 82 | // Make sure we have a generate endpoint. 83 | // TODO: Create a new instance of the agent per user if this proves advantageous. 84 | const agentUrl = companionConfig.generateEndpoint 85 | if (!agentUrl) { 86 | return returnError(500, `Please add a Steamship 'generateEndpoint' to your ${companionName} configuration in companions.json.`) 87 | } 88 | 89 | // Invoke the generation. Tool invocation, chat history management, backstory injection, etc is all done within this endpoint. 90 | // To build, deploy, and host your own multi-tenant agent see: https://www.steamship.com/learn/agent-guidebook 91 | const response = await fetch(agentUrl, { 92 | method: "POST", 93 | headers: { 94 | "Content-Type": "application/json", 95 | "Authorization": `Bearer ${process.env.STEAMSHIP_API_KEY}` 96 | }, 97 | body: JSON.stringify({ 98 | question: prompt, 99 | chat_session_id: chatSessionId 100 | }) 101 | }); 102 | 103 | if (response.ok) { 104 | const responseText = await response.text() 105 | const responseBlocks = JSON.parse(responseText) 106 | return NextResponse.json(responseBlocks) 107 | } else { 108 | return returnError(500, await response.text()) 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/app/api/text/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from "next/server"; 2 | import twilio from "twilio"; 3 | import clerk from "@clerk/clerk-sdk-node"; 4 | import dotenv from "dotenv"; 5 | import ConfigManager from "@/app/utils/config"; 6 | import { rateLimit } from "@/app/utils/rateLimit"; 7 | 8 | dotenv.config({ path: `.env.local` }); 9 | const twilioAuthToken = process.env.TWILIO_AUTH_TOKEN; 10 | const accountSid = process.env.TWILIO_ACCOUNT_SID; 11 | 12 | export async function POST(request: Request) { 13 | let queryMap: any = {}; 14 | const twilioClient = twilio(accountSid, twilioAuthToken); 15 | const data = decodeURIComponent(await request.text()); 16 | data.split("&").forEach((item) => { 17 | queryMap[item.split("=")[0]] = item.split("=")[1]; 18 | }); 19 | const prompt = queryMap["Body"]; 20 | const serverUrl = request.url.split("/api/")[0]; 21 | const phoneNumber = queryMap["From"]; 22 | const companionPhoneNumber = queryMap["To"]; 23 | 24 | const identifier = request.url + "-" + (phoneNumber || "anonymous"); 25 | const { success } = await rateLimit(identifier); 26 | if (!success) { 27 | console.log("INFO: rate limit exceeded"); 28 | return new NextResponse( 29 | JSON.stringify({ Message: "Hi, the companions can't talk this fast." }), 30 | { 31 | status: 429, 32 | headers: { 33 | "Content-Type": "application/json", 34 | }, 35 | } 36 | ); 37 | } 38 | 39 | // check if the user has verified phone # 40 | const users = await clerk.users.getUserList({ phoneNumber }); 41 | 42 | if (!users || users.length == 0) { 43 | return new NextResponse( 44 | JSON.stringify({ Message: "User not authorized" }), 45 | { 46 | status: 401, 47 | headers: { 48 | "Content-Type": "application/json", 49 | }, 50 | } 51 | ); 52 | } 53 | 54 | const configManager = ConfigManager.getInstance(); 55 | const companionConfig = configManager.getConfig( 56 | "phone", 57 | companionPhoneNumber 58 | ); 59 | console.log("companionConfig: ", companionConfig); 60 | if (!companionConfig || companionConfig.length == 0) { 61 | return new NextResponse( 62 | JSON.stringify({ Message: "User not authorized" }), 63 | { 64 | status: 401, 65 | headers: { 66 | "Content-Type": "application/json", 67 | }, 68 | } 69 | ); 70 | } 71 | 72 | const companionName = companionConfig.name; 73 | const companionModel = companionConfig.llm; 74 | 75 | const response = await fetch(`${serverUrl}/api/${companionModel}`, { 76 | body: JSON.stringify({ 77 | prompt, 78 | isText: true, 79 | userId: users[0].id, 80 | userName: users[0].firstName, 81 | }), 82 | method: "POST", 83 | headers: { "Content-Type": "application/json", name: companionName }, 84 | }); 85 | 86 | const responseText = await response.text(); 87 | 88 | const to = queryMap["From"]; 89 | const from = queryMap["To"]; 90 | console.log("responseText: ", responseText); 91 | await twilioClient.messages 92 | .create({ 93 | body: responseText, 94 | from, 95 | to, 96 | }) 97 | .catch((err) => { 98 | console.log("WARNING: failed to send SMS.", err); 99 | }); 100 | 101 | return NextResponse.json({ message: "Hello from the API!" }); 102 | } 103 | -------------------------------------------------------------------------------- /src/app/api/vicuna13b/route.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { StreamingTextResponse, LangChainStream } from "ai"; 3 | import { Replicate } from "langchain/llms/replicate"; 4 | import { CallbackManager } from "langchain/callbacks"; 5 | import clerk from "@clerk/clerk-sdk-node"; 6 | import MemoryManager from "@/app/utils/memory"; 7 | import { currentUser } from "@clerk/nextjs"; 8 | import { NextResponse } from "next/server"; 9 | import { rateLimit } from "@/app/utils/rateLimit"; 10 | 11 | dotenv.config({ path: `.env.local` }); 12 | 13 | export async function POST(request: Request) { 14 | const { prompt, isText, userId, userName } = await request.json(); 15 | let clerkUserId; 16 | let user; 17 | let clerkUserName; 18 | 19 | const identifier = request.url + "-" + (userId || "anonymous"); 20 | const { success } = await rateLimit(identifier); 21 | if (!success) { 22 | console.log("INFO: rate limit exceeded"); 23 | return new NextResponse( 24 | JSON.stringify({ Message: "Hi, the companions can't talk this fast." }), 25 | { 26 | status: 429, 27 | headers: { 28 | "Content-Type": "application/json", 29 | }, 30 | } 31 | ); 32 | } 33 | 34 | // XXX Companion name passed here. Can use as a key to get backstory, chat history etc. 35 | const name = request.headers.get("name"); 36 | const companion_file_name = name + ".txt"; 37 | 38 | if (isText) { 39 | clerkUserId = userId; 40 | clerkUserName = userName; 41 | } else { 42 | user = await currentUser(); 43 | clerkUserId = user?.id; 44 | clerkUserName = user?.firstName; 45 | } 46 | 47 | if (!clerkUserId || !!!(await clerk.users.getUser(clerkUserId))) { 48 | return new NextResponse( 49 | JSON.stringify({ Message: "User not authorized" }), 50 | { 51 | status: 401, 52 | headers: { 53 | "Content-Type": "application/json", 54 | }, 55 | } 56 | ); 57 | } 58 | 59 | // Load character "PREAMBLE" from character file. These are the core personality 60 | // characteristics that are used in every prompt. Additional background is 61 | // only included if it matches a similarity comparioson with the current 62 | // discussion. The PREAMBLE should include a seed conversation whose format will 63 | // vary by the model using it. 64 | const fs = require("fs").promises; 65 | const data = await fs.readFile("companions/" + companion_file_name, "utf8"); 66 | 67 | // Clunky way to break out PREAMBLE and SEEDCHAT from the character file 68 | const presplit = data.split("###ENDPREAMBLE###"); 69 | const preamble = presplit[0]; 70 | const seedsplit = presplit[1].split("###ENDSEEDCHAT###"); 71 | const seedchat = seedsplit[0]; 72 | 73 | const companionKey = { 74 | companionName: name!, 75 | userId: clerkUserId!, 76 | modelName: "vicuna13b", 77 | }; 78 | const memoryManager = await MemoryManager.getInstance(); 79 | 80 | const { stream, handlers } = LangChainStream(); 81 | 82 | const records = await memoryManager.readLatestHistory(companionKey); 83 | if (records.length === 0) { 84 | await memoryManager.seedChatHistory(seedchat, "\n\n", companionKey); 85 | } 86 | await memoryManager.writeToHistory( 87 | "### Human: " + prompt + "\n", 88 | companionKey 89 | ); 90 | 91 | // Query Pinecone 92 | 93 | let recentChatHistory = await memoryManager.readLatestHistory(companionKey); 94 | 95 | // Right now the preamble is included in the similarity search, but that 96 | // shouldn't be an issue 97 | 98 | const similarDocs = await memoryManager.vectorSearch( 99 | recentChatHistory, 100 | companion_file_name 101 | ); 102 | 103 | let relevantHistory = ""; 104 | if (!!similarDocs && similarDocs.length !== 0) { 105 | relevantHistory = similarDocs.map((doc) => doc.pageContent).join("\n"); 106 | } 107 | 108 | // Call Replicate for inference 109 | const model = new Replicate({ 110 | model: 111 | "replicate/vicuna-13b:6282abe6a492de4145d7bb601023762212f9ddbbe78278bd6771c8b3b2f2a13b", 112 | input: { 113 | max_length: 2048, 114 | }, 115 | apiKey: process.env.REPLICATE_API_TOKEN, 116 | callbackManager: CallbackManager.fromHandlers(handlers), 117 | }); 118 | 119 | // Turn verbose on for debugging 120 | model.verbose = true; 121 | 122 | let resp = String( 123 | await model 124 | .call( 125 | `${preamble} 126 | 127 | Below are relevant details about ${name}'s past: 128 | ${relevantHistory} 129 | 130 | Below is a relevant conversation history 131 | 132 | ${recentChatHistory} 133 | ### ${name}: 134 | ` 135 | ) 136 | .catch(console.error) 137 | ); 138 | 139 | // Right now just using super shoddy string manip logic to get at 140 | // the dialog. 141 | 142 | const cleaned = resp.replaceAll(",", ""); 143 | const chunks = cleaned.split("###"); 144 | const response = chunks[0]; 145 | // const response = chunks.length > 1 ? chunks[0] : chunks[0]; 146 | 147 | await memoryManager.writeToHistory("### " + response.trim(), companionKey); 148 | var Readable = require("stream").Readable; 149 | 150 | let s = new Readable(); 151 | s.push(response); 152 | s.push(null); 153 | if (response !== undefined && response.length > 1) { 154 | await memoryManager.writeToHistory("### " + response.trim(), companionKey); 155 | } 156 | 157 | return new StreamingTextResponse(s); 158 | } 159 | -------------------------------------------------------------------------------- /src/app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a16z-infra/companion-app/a50c9371816440196049cca0eba9b1373341600e/src/app/favicon.ico -------------------------------------------------------------------------------- /src/app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | :root { 6 | --foreground-rgb: 0, 0, 0; 7 | --background-start-rgb: 214, 219, 220; 8 | --background-end-rgb: 255, 255, 255; 9 | } 10 | 11 | @media (prefers-color-scheme: dark) { 12 | :root { 13 | --foreground-rgb: 255, 255, 255; 14 | --background-start-rgb: 0, 0, 0; 15 | --background-end-rgb: 0, 0, 0; 16 | } 17 | } 18 | 19 | body { 20 | color: rgb(var(--foreground-rgb)); 21 | background: linear-gradient( 22 | to bottom, 23 | transparent, 24 | rgb(var(--background-end-rgb)) 25 | ) 26 | rgb(var(--background-start-rgb)); 27 | } 28 | -------------------------------------------------------------------------------- /src/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import "./globals.css"; 2 | import { Inter } from "next/font/google"; 3 | import { ClerkProvider } from "@clerk/nextjs"; 4 | 5 | const inter = Inter({ subsets: ["latin"] }); 6 | 7 | export const metadata = { 8 | title: "AI Getting Started", 9 | description: "Help you setup an AI project with ease", 10 | }; 11 | 12 | export default function RootLayout({ 13 | children, 14 | }: { 15 | children: React.ReactNode; 16 | }) { 17 | return ( 18 | <ClerkProvider> 19 | <html lang="en"> 20 | <body className={inter.className}>{children}</body> 21 | </html> 22 | </ClerkProvider> 23 | ); 24 | } 25 | -------------------------------------------------------------------------------- /src/app/page.tsx: -------------------------------------------------------------------------------- 1 | import Navbar from "@/components/Navbar"; 2 | import Examples from "@/components/Examples"; 3 | 4 | export default function Home() { 5 | return ( 6 | <main className="flex min-h-screen flex-col items-center justify-between"> 7 | <Navbar /> 8 | <div className="w-full min-h-screen relative isolate overflow-hidden bg-gray-900 px-6 py-24 shadow-2xl sm:px-24 xl:py-32"> 9 | <h1 className="mt-16 mx-auto max-w-2xl text-center text-5xl font-bold tracking-tight text-white sm:text-6xl"> 10 | AI Companion 11 | </h1> 12 | 13 | <p className="mx-auto mt-4 max-w-xl text-center text-xl leading-8 text-slate-400"> 14 | Help you setup an AI companion project with ease. Here are some example characters: 15 | </p> 16 | 17 | <Examples /> 18 | 19 | <svg 20 | viewBox="0 0 1024 1024" 21 | className="absolute left-1/2 -z-10 h-[64rem] w-[64rem] -translate-x-1/2" 22 | aria-hidden="true" 23 | > 24 | <circle 25 | cx={512} 26 | cy={512} 27 | r={512} 28 | fill="url(#759c1415-0410-454c-8f7c-9a820de03641)" 29 | fillOpacity="0.5" 30 | /> 31 | <defs> 32 | <radialGradient 33 | id="759c1415-0410-454c-8f7c-9a820de03641" 34 | cx={0} 35 | cy={0} 36 | r={1} 37 | gradientUnits="userSpaceOnUse" 38 | gradientTransform="translate(512 512) rotate(90) scale(512)" 39 | > 40 | <stop stopColor="rgb(17 24 39)" /> 41 | <stop offset={1} stopColor="rgb(125 211 252)" stopOpacity={0} /> 42 | </radialGradient> 43 | </defs> 44 | </svg> 45 | </div> 46 | </main> 47 | ); 48 | } 49 | -------------------------------------------------------------------------------- /src/app/sign-in/[[...sign-in]]/page.tsx: -------------------------------------------------------------------------------- 1 | import { SignIn } from "@clerk/nextjs"; 2 | 3 | export default function Page() { 4 | return <div className="min-h-screen flex items-center justify-center"><SignIn /></div>; 5 | } -------------------------------------------------------------------------------- /src/app/sign-up/[[...sign-up]]/page.tsx: -------------------------------------------------------------------------------- 1 | import { SignUp } from "@clerk/nextjs"; 2 | 3 | export default function Page() { 4 | return ( 5 | <div className="min-h-screen flex items-center justify-center"> 6 | <SignUp /> 7 | </div> 8 | ); 9 | } 10 | -------------------------------------------------------------------------------- /src/app/utils/config.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { Config } from "twilio/lib/twiml/VoiceResponse"; 3 | 4 | class ConfigManager { 5 | private static instance: ConfigManager; 6 | private config: any; 7 | 8 | private constructor() { 9 | const data = fs.readFileSync("companions/companions.json", "utf8"); 10 | this.config = JSON.parse(data); 11 | } 12 | 13 | public static getInstance(): ConfigManager { 14 | if (!ConfigManager.instance) { 15 | ConfigManager.instance = new ConfigManager(); 16 | } 17 | return ConfigManager.instance; 18 | } 19 | 20 | public getConfig(fieldName: string, configValue: string) { 21 | //).filter((c: any) => c.name === companionName); 22 | try { 23 | if (!!this.config && this.config.length !== 0) { 24 | const result = this.config.filter( 25 | (c: any) => c[fieldName] === configValue 26 | ); 27 | if (result.length !== 0) { 28 | return result[0]; 29 | } 30 | } 31 | } catch (e) { 32 | console.log(e); 33 | } 34 | } 35 | } 36 | 37 | export default ConfigManager; 38 | -------------------------------------------------------------------------------- /src/app/utils/memory.ts: -------------------------------------------------------------------------------- 1 | import { Redis } from "@upstash/redis"; 2 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 3 | import { PineconeClient } from "@pinecone-database/pinecone"; 4 | import { PineconeStore } from "langchain/vectorstores/pinecone"; 5 | import { SupabaseVectorStore } from "langchain/vectorstores/supabase"; 6 | import { SupabaseClient, createClient } from "@supabase/supabase-js"; 7 | 8 | export type CompanionKey = { 9 | companionName: string; 10 | modelName: string; 11 | userId: string; 12 | }; 13 | 14 | class MemoryManager { 15 | private static instance: MemoryManager; 16 | private history: Redis; 17 | private vectorDBClient: PineconeClient | SupabaseClient; 18 | 19 | public constructor() { 20 | this.history = Redis.fromEnv(); 21 | if (process.env.VECTOR_DB === "pinecone") { 22 | this.vectorDBClient = new PineconeClient(); 23 | } else { 24 | const auth = { 25 | detectSessionInUrl: false, 26 | persistSession: false, 27 | autoRefreshToken: false, 28 | }; 29 | const url = process.env.SUPABASE_URL!; 30 | const privateKey = process.env.SUPABASE_PRIVATE_KEY!; 31 | this.vectorDBClient = createClient(url, privateKey, { auth }); 32 | } 33 | } 34 | 35 | public async init() { 36 | if (this.vectorDBClient instanceof PineconeClient) { 37 | await this.vectorDBClient.init({ 38 | apiKey: process.env.PINECONE_API_KEY!, 39 | environment: process.env.PINECONE_ENVIRONMENT!, 40 | }); 41 | } 42 | } 43 | 44 | public async vectorSearch( 45 | recentChatHistory: string, 46 | companionFileName: string 47 | ) { 48 | if (process.env.VECTOR_DB === "pinecone") { 49 | console.log("INFO: using Pinecone for vector search."); 50 | const pineconeClient = <PineconeClient>this.vectorDBClient; 51 | 52 | const pineconeIndex = pineconeClient.Index( 53 | process.env.PINECONE_INDEX! || "" 54 | ); 55 | 56 | const vectorStore = await PineconeStore.fromExistingIndex( 57 | new OpenAIEmbeddings({ openAIApiKey: process.env.OPENAI_API_KEY }), 58 | { pineconeIndex } 59 | ); 60 | 61 | const similarDocs = await vectorStore 62 | .similaritySearch(recentChatHistory, 3, { fileName: companionFileName }) 63 | .catch((err) => { 64 | console.log("WARNING: failed to get vector search results.", err); 65 | }); 66 | return similarDocs; 67 | } else { 68 | console.log("INFO: using Supabase for vector search."); 69 | const supabaseClient = <SupabaseClient>this.vectorDBClient; 70 | const vectorStore = await SupabaseVectorStore.fromExistingIndex( 71 | new OpenAIEmbeddings({ openAIApiKey: process.env.OPENAI_API_KEY }), 72 | { 73 | client: supabaseClient, 74 | tableName: "documents", 75 | queryName: "match_documents", 76 | } 77 | ); 78 | const similarDocs = await vectorStore 79 | .similaritySearch(recentChatHistory, 3) 80 | .catch((err) => { 81 | console.log("WARNING: failed to get vector search results.", err); 82 | }); 83 | return similarDocs; 84 | } 85 | } 86 | 87 | public static async getInstance(): Promise<MemoryManager> { 88 | if (!MemoryManager.instance) { 89 | MemoryManager.instance = new MemoryManager(); 90 | await MemoryManager.instance.init(); 91 | } 92 | return MemoryManager.instance; 93 | } 94 | 95 | private generateRedisCompanionKey(companionKey: CompanionKey): string { 96 | return `${companionKey.companionName}-${companionKey.modelName}-${companionKey.userId}`; 97 | } 98 | 99 | public async writeToHistory(text: string, companionKey: CompanionKey) { 100 | if (!companionKey || typeof companionKey.userId == "undefined") { 101 | console.log("Companion key set incorrectly"); 102 | return ""; 103 | } 104 | 105 | const key = this.generateRedisCompanionKey(companionKey); 106 | const result = await this.history.zadd(key, { 107 | score: Date.now(), 108 | member: text, 109 | }); 110 | 111 | return result; 112 | } 113 | 114 | public async readLatestHistory(companionKey: CompanionKey): Promise<string> { 115 | if (!companionKey || typeof companionKey.userId == "undefined") { 116 | console.log("Companion key set incorrectly"); 117 | return ""; 118 | } 119 | 120 | const key = this.generateRedisCompanionKey(companionKey); 121 | let result = await this.history.zrange(key, 0, Date.now(), { 122 | byScore: true, 123 | }); 124 | 125 | result = result.slice(-30).reverse(); 126 | const recentChats = result.reverse().join("\n"); 127 | return recentChats; 128 | } 129 | 130 | public async seedChatHistory( 131 | seedContent: String, 132 | delimiter: string = "\n", 133 | companionKey: CompanionKey 134 | ) { 135 | const key = this.generateRedisCompanionKey(companionKey); 136 | if (await this.history.exists(key)) { 137 | console.log("User already has chat history"); 138 | return; 139 | } 140 | 141 | const content = seedContent.split(delimiter); 142 | let counter = 0; 143 | for (const line of content) { 144 | await this.history.zadd(key, { score: counter, member: line }); 145 | counter += 1; 146 | } 147 | } 148 | } 149 | 150 | export default MemoryManager; 151 | -------------------------------------------------------------------------------- /src/app/utils/rateLimit.ts: -------------------------------------------------------------------------------- 1 | import { Ratelimit } from "@upstash/ratelimit"; 2 | import { Redis } from "@upstash/redis"; 3 | 4 | export async function rateLimit(identifier: string) { 5 | // Rate limit through Upstash 6 | const ratelimit = new Ratelimit({ 7 | redis: Redis.fromEnv(), 8 | limiter: Ratelimit.slidingWindow(10, "10 s"), 9 | analytics: true, 10 | prefix: "@upstash/ratelimit", 11 | }); 12 | return await ratelimit.limit(identifier); 13 | } 14 | -------------------------------------------------------------------------------- /src/components/ChatBlock.tsx: -------------------------------------------------------------------------------- 1 | /* 2 | * Represents a unit of multimodal chat: text, video, audio, or image. 3 | * 4 | * For streaming responses, just update the `text` argument. 5 | */ 6 | export function ChatBlock({text, mimeType, url} : { 7 | text?: string, 8 | mimeType?: string, 9 | url?: string 10 | }) { 11 | let internalComponent = <></> 12 | if (text) { 13 | internalComponent = <span>{text}</span> 14 | } else if (mimeType && url) { 15 | if (mimeType.startsWith("audio")) { 16 | internalComponent = <audio controls={true} src={url} /> 17 | } else if (mimeType.startsWith("video")) { 18 | internalComponent = <video controls width="250"> 19 | <source src={url} type={mimeType} /> 20 | Download the <a href={url}>video</a> 21 | </video> 22 | } else if (mimeType.startsWith("image")) { 23 | internalComponent = <img src={url} /> 24 | } 25 | } else if (url) { 26 | internalComponent = <a href={url}>Link</a> 27 | } 28 | 29 | return ( 30 | <p className="text-sm text-gray-200 pb-2"> 31 | {internalComponent} 32 | </p> 33 | ); 34 | } 35 | 36 | /* 37 | * Take a completion, which may be a string, JSON encoded as a string, or JSON object, 38 | * and produce a list of ChatBlock objects. This is intended to be a one-size-fits-all 39 | * method for funneling different LLM output into structure that supports different media 40 | * types and can easily grow to support more metadata (such as speaker). 41 | */ 42 | export function responseToChatBlocks(completion: any) { 43 | // First we try to parse completion as JSON in case we're dealing with an object. 44 | console.log("got completoin", completion, typeof completion) 45 | if (typeof completion == "string") { 46 | try { 47 | completion = JSON.parse(completion) 48 | } catch { 49 | // Do nothing; we'll just treat it as a string. 50 | console.log("Couldn't parse") 51 | } 52 | } 53 | let blocks = [] 54 | if (typeof completion == "string") { 55 | console.log("still string") 56 | blocks.push(<ChatBlock text={completion} />) 57 | } else if (Array.isArray(completion)) { 58 | console.log("Is array") 59 | for (let block of completion) { 60 | console.log(block) 61 | blocks.push(<ChatBlock {...block} />) 62 | } 63 | } else { 64 | blocks.push(<ChatBlock {...completion} />) 65 | } 66 | console.log(blocks) 67 | return blocks 68 | } 69 | 70 | -------------------------------------------------------------------------------- /src/components/Examples.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { useEffect, useState } from "react"; 3 | import QAModal from "./QAModal"; 4 | import Image from "next/image"; 5 | import { Tooltip } from "react-tooltip"; 6 | 7 | import { getCompanions } from "./actions"; 8 | 9 | export default function Examples() { 10 | const [QAModalOpen, setQAModalOpen] = useState(false); 11 | const [CompParam, setCompParam] = useState({ 12 | name: "", 13 | title: "", 14 | imageUrl: "", 15 | }); 16 | const [examples, setExamples] = useState([ 17 | { 18 | name: "", 19 | title: "", 20 | imageUrl: "", 21 | llm: "", 22 | phone: "", 23 | telegramLink: null 24 | }, 25 | ]); 26 | 27 | useEffect(() => { 28 | const fetchData = async () => { 29 | try { 30 | const companions = await getCompanions(); 31 | let entries = JSON.parse(companions); 32 | let setme = entries.map((entry: any) => ({ 33 | name: entry.name, 34 | title: entry.title, 35 | imageUrl: entry.imageUrl, 36 | llm: entry.llm, 37 | phone: entry.phone, 38 | telegramLink: entry.telegramLink 39 | })); 40 | setExamples(setme); 41 | } catch (err) { 42 | console.log(err); 43 | } 44 | }; 45 | 46 | fetchData(); 47 | }, []); 48 | 49 | return ( 50 | <div id="ExampleDiv"> 51 | <QAModal 52 | open={QAModalOpen} 53 | setOpen={setQAModalOpen} 54 | example={CompParam} 55 | /> 56 | <ul 57 | role="list" 58 | className="mt-14 m-auto max-w-3xl grid grid-cols-1 gap-6 lg:grid-cols-2" 59 | > 60 | {examples.map((example, i) => ( 61 | <li 62 | key={example.name} 63 | onClick={() => { 64 | setCompParam(example); 65 | setQAModalOpen(true); 66 | }} 67 | className="col-span-2 flex flex-col rounded-lg bg-slate-800 text-center shadow relative ring-1 ring-white/10 cursor-pointer hover:ring-sky-300/70 transition" 68 | > 69 | <div className="absolute -bottom-px left-10 right-10 h-px bg-gradient-to-r from-sky-300/0 via-sky-300/70 to-sky-300/0"></div> 70 | <div className="flex flex-1 flex-col p-8"> 71 | <Image 72 | width={0} 73 | height={0} 74 | sizes="100vw" 75 | className="mx-auto h-32 w-32 flex-shrink-0 rounded-full" 76 | src={example.imageUrl} 77 | alt="" 78 | /> 79 | <h3 className="mt-6 text-sm font-medium text-white"> 80 | {example.name} 81 | </h3> 82 | <dl className="mt-1 flex flex-grow flex-col justify-between"> 83 | <dt className="sr-only"></dt> 84 | <dd className="text-sm text-slate-400"> 85 | {example.title}. Running on <b>{example.llm}</b>. 86 | {example.telegramLink && ( 87 | <span className="ml-1"><a onClick={(event) => {event?.stopPropagation(); event?.preventDefault}} href={example.telegramLink}>Chat on <b>Telegram</b></a>.</span> 88 | )} 89 | </dd> 90 | </dl> 91 | <dl className="mt-1 flex flex-grow flex-col justify-between"> 92 | <dt className="sr-only"></dt> 93 | {isPhoneNumber(example.phone) && ( 94 | <> 95 | <dd 96 | data-tip="Helpful tip goes here" 97 | className="text-sm text-slate-400 inline-block" 98 | > 99 | 📱Text me at: <b>{example.phone}</b> 100 | 101 | <svg 102 | data-tooltip-id="help-tooltip" 103 | data-tooltip-content="Unlock this freature by clicking on 104 | your profile picture on the top right 105 | -> Manage Account -> Add a phone number." 106 | data-tooltip-target="tooltip-default" 107 | data-tip="Helpful tip goes here" 108 | className="w-[15px] h-[15px] text-slate-400 inline-block cursor-pointer" 109 | xmlns="http://www.w3.org/2000/svg" 110 | fill="currentColor" 111 | viewBox="0 0 20 20" 112 | > 113 | <path d="M10 .5a9.5 9.5 0 1 0 9.5 9.5A9.51 9.51 0 0 0 10 .5ZM9.5 4a1.5 1.5 0 1 1 0 3 1.5 1.5 0 0 1 0-3ZM12 15H8a1 1 0 0 1 0-2h1v-3H8a1 1 0 0 1 0-2h2a1 1 0 0 1 1 1v4h1a1 1 0 0 1 0 2Z" /> 114 | </svg> 115 | <Tooltip id="help-tooltip" /> 116 | </dd> 117 | </> 118 | )} 119 | </dl> 120 | </div> 121 | </li> 122 | ))} 123 | </ul> 124 | </div> 125 | ); 126 | } 127 | 128 | function isPhoneNumber(input: string): boolean { 129 | const phoneNumberRegex = /^\+\d{1,11}$/; 130 | return phoneNumberRegex.test(input); 131 | } 132 | -------------------------------------------------------------------------------- /src/components/InputCard.tsx: -------------------------------------------------------------------------------- 1 | export default function InputCard() { 2 | return ( 3 | <form className="mx-auto mt-16 flex max-w-3xl gap-x-4"> 4 | <label htmlFor="website-link" className="sr-only"> 5 | Link 6 | </label> 7 | <input 8 | autoFocus={true} 9 | id="website-link" 10 | name="link" 11 | type="url" 12 | autoComplete="url" 13 | required 14 | className="min-w-0 flex-auto rounded-md border-0 bg-white/5 px-3.5 py-2 text-white shadow-sm ring-1 ring-inset ring-white/10 focus:ring-2 focus:ring-inset focus:ring-white sm:text-sm sm:leading-6" 15 | placeholder="Enter a link to a blog" 16 | /> 17 | <button 18 | type="submit" 19 | className="flex-none rounded-md bg-white px-3.5 py-2.5 text-sm font-semibold text-gray-900 shadow-sm hover:bg-gray-100 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-white" 20 | > 21 | Chat 22 | </button> 23 | </form> 24 | ); 25 | } 26 | -------------------------------------------------------------------------------- /src/components/Navbar.tsx: -------------------------------------------------------------------------------- 1 | import { UserButton } from "@clerk/nextjs"; 2 | import { auth } from "@clerk/nextjs"; 3 | import Image from "next/image"; 4 | import Link from "next/link"; 5 | const navigation = [ 6 | { 7 | name: "About", 8 | href: "https://github.com/a16z-infra/companion-app", 9 | current: false, 10 | }, 11 | ]; 12 | 13 | function classNames(...classes: string[]) { 14 | return classes.filter(Boolean).join(" "); 15 | } 16 | 17 | export default function Navbar() { 18 | const { userId } = auth(); 19 | return ( 20 | <div className="bg-gray-900 w-full fixed top-0 z-10"> 21 | <div className="mx-auto max-w-7xl px-2 sm:px-6 lg:px-8"> 22 | <div className="relative flex h-16 items-center justify-between"> 23 | <div className="flex flex-1 items-center justify-start"> 24 | <div className="flex flex-shrink-0 items-center"> 25 | <Image 26 | width={0} 27 | height={0} 28 | sizes="100vw" 29 | className="block h-8 w-auto lg:hidden rounded-lg" 30 | src="https://avatars.githubusercontent.com/u/745163?s=200&v=4" 31 | alt="a16z" 32 | /> 33 | <Image 34 | width={0} 35 | height={0} 36 | sizes="100vw" 37 | className="hidden h-8 w-auto lg:block rounded-lg" 38 | src="https://avatars.githubusercontent.com/u/745163?s=200&v=4" 39 | alt="a16z" 40 | /> 41 | </div> 42 | <div className="ml-6"> 43 | <div className="flex space-x-2 sm:space-x-4"> 44 | {navigation.map((item) => ( 45 | <a 46 | key={item.name} 47 | href={item.href} 48 | className={classNames( 49 | item.current 50 | ? "bg-gray-900 text-white" 51 | : "text-gray-300 hover:bg-gray-700 hover:text-white", 52 | "rounded-md px-3 py-2 text-sm font-medium" 53 | )} 54 | aria-current={item.current ? "page" : undefined} 55 | > 56 | {item.name} 57 | </a> 58 | ))} 59 | <div className="px-3 py-2 text-gray-300"> 60 | <iframe 61 | src="https://ghbtns.com/github-btn.html?user=a16z-infra&repo=companion-app&type=star&count=true" 62 | frameBorder="0" 63 | scrolling="0" 64 | width="150" 65 | height="20" 66 | title="GitHub" 67 | ></iframe> 68 | </div> 69 | </div> 70 | </div> 71 | </div> 72 | <div className="absolute inset-y-0 right-0 flex items-center pr-2 sm:static sm:inset-auto sm:ml-6 sm:pr-0"> 73 | {userId ? ( 74 | <UserButton afterSignOutUrl="/" /> 75 | ) : ( 76 | <Link 77 | href="/sign-in" 78 | className="rounded-md bg-gray-800 py-2 px-3 text-sm font-medium text-white hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-white focus:ring-offset-2 focus:ring-offset-gray-800" 79 | > 80 | Sign In 81 | </Link> 82 | )} 83 | </div> 84 | </div> 85 | </div> 86 | </div> 87 | ); 88 | } 89 | -------------------------------------------------------------------------------- /src/components/QAModal.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import {Fragment, useEffect, useState} from "react"; 4 | import { Dialog, Transition } from "@headlessui/react"; 5 | import { useCompletion } from "ai/react"; 6 | import {ChatBlock, responseToChatBlocks} from "@/components/ChatBlock"; 7 | 8 | var last_name = ""; 9 | 10 | export default function QAModal({ 11 | open, 12 | setOpen, 13 | example, 14 | }: { 15 | open: boolean; 16 | setOpen: any; 17 | example: any; 18 | }) { 19 | if (!example) { 20 | // create a dummy so the completion doesn't croak during init. 21 | example = new Object(); 22 | example.llm = ""; 23 | example.name = ""; 24 | } 25 | 26 | let { 27 | completion, 28 | input, 29 | isLoading, 30 | handleInputChange, 31 | handleSubmit, 32 | stop, 33 | setInput, 34 | setCompletion, 35 | } = useCompletion({ 36 | api: "/api/" + example.llm, 37 | headers: { name: example.name }, 38 | }); 39 | 40 | let [blocks, setBlocks] = useState<any[] | null>(null) 41 | 42 | useEffect(() => { 43 | // When the completion changes, parse it to multimodal blocks for display. 44 | if (completion) { 45 | setBlocks(responseToChatBlocks(completion)) 46 | } else { 47 | setBlocks(null) 48 | } 49 | }, [completion]) 50 | 51 | if (!example) { 52 | console.log("ERROR: no companion selected"); 53 | return null; 54 | } 55 | 56 | const handleClose = () => { 57 | setInput(""); 58 | setCompletion(""); 59 | stop(); 60 | setOpen(false); 61 | }; 62 | 63 | return ( 64 | <Transition.Root show={open} as={Fragment}> 65 | <Dialog as="div" className="relative z-10" onClose={handleClose}> 66 | <Transition.Child 67 | as={Fragment} 68 | enter="ease-out duration-300" 69 | enterFrom="opacity-0" 70 | enterTo="opacity-100" 71 | leave="ease-in duration-200" 72 | leaveFrom="opacity-100" 73 | leaveTo="opacity-0" 74 | > 75 | <div className="fixed inset-0 bg-gray-950 bg-opacity-75 transition-opacity" /> 76 | </Transition.Child> 77 | 78 | <div className="fixed inset-0 z-10 overflow-y-auto"> 79 | <div className="flex min-h-full items-end justify-center p-4 text-center sm:items-center sm:p-0"> 80 | <Transition.Child 81 | as={Fragment} 82 | enter="ease-out duration-300" 83 | enterFrom="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95" 84 | enterTo="opacity-100 translate-y-0 sm:scale-100" 85 | leave="ease-in duration-200" 86 | leaveFrom="opacity-100 translate-y-0 sm:scale-100" 87 | leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95" 88 | > 89 | <Dialog.Panel className="relative transform overflow-hidden rounded-lg bg-gray-800 px-4 pb-4 pt-5 text-left shadow-xl transition-all sm:my-8 sm:p-6 w-full max-w-3xl"> 90 | <div> 91 | <form onSubmit={handleSubmit}> 92 | <input 93 | placeholder="How's your day?" 94 | className={"w-full flex-auto rounded-md border-0 bg-white/5 px-3.5 py-2 shadow-sm focus:outline-none sm:text-sm sm:leading-6 " + (isLoading && !completion ? "text-gray-600 cursor-not-allowed" : "text-white")} 95 | value={input} 96 | onChange={handleInputChange} 97 | disabled={isLoading && !blocks} 98 | /> 99 | </form> 100 | <div className="mt-3 sm:mt-5"> 101 | <div className="mt-2"> 102 | <p className="text-sm text-gray-500"> 103 | Chat with {example.name} 104 | </p> 105 | </div> 106 | {blocks && ( 107 | <div className="mt-2"> 108 | {blocks} 109 | </div> 110 | )} 111 | 112 | {isLoading && !blocks && ( 113 | <p className="flex items-center justify-center mt-4"> 114 | <svg 115 | className="animate-spin -ml-1 mr-3 h-5 w-5 text-white" 116 | xmlns="http://www.w3.org/2000/svg" 117 | fill="none" 118 | viewBox="0 0 24 24" 119 | > 120 | <circle 121 | className="opacity-25" 122 | cx="12" 123 | cy="12" 124 | r="10" 125 | stroke="currentColor" 126 | stroke-width="4" 127 | ></circle> 128 | <path 129 | className="opacity-75" 130 | fill="currentColor" 131 | d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" 132 | ></path> 133 | </svg> 134 | </p> 135 | )} 136 | </div> 137 | </div> 138 | </Dialog.Panel> 139 | </Transition.Child> 140 | </div> 141 | </div> 142 | </Dialog> 143 | </Transition.Root> 144 | ); 145 | } 146 | -------------------------------------------------------------------------------- /src/components/TextToImgModal.tsx: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | 3 | dotenv.config({ path: `.env.local` }); 4 | 5 | import { Fragment, useState } from "react"; 6 | import { Dialog, Transition } from "@headlessui/react"; 7 | import Image from "next/image"; 8 | 9 | export default function TextToImgModal({ 10 | open, 11 | setOpen, 12 | }: { 13 | open: boolean; 14 | setOpen: any; 15 | }) { 16 | const [imgSrc, setImgSrc] = useState(""); 17 | const [loading, setLoading] = useState(false); 18 | const onSubmit = async (e: any) => { 19 | e.preventDefault(); 20 | setLoading(true); 21 | const response = await fetch("/api/txt2img", { 22 | method: "POST", 23 | body: JSON.stringify({ 24 | prompt: e.target.value, 25 | }), 26 | headers: { 27 | "Content-Type": "application/json", 28 | }, 29 | }); 30 | const data = await response.json(); 31 | setImgSrc(data[0]); 32 | setLoading(false); 33 | }; 34 | return ( 35 | <Transition.Root show={open} as={Fragment}> 36 | <Dialog as="div" className="relative z-10" onClose={setOpen}> 37 | <Transition.Child 38 | as={Fragment} 39 | enter="ease-out duration-300" 40 | enterFrom="opacity-0" 41 | enterTo="opacity-100" 42 | leave="ease-in duration-200" 43 | leaveFrom="opacity-100" 44 | leaveTo="opacity-0" 45 | > 46 | <div className="fixed inset-0 bg-gray-950 bg-opacity-75 transition-opacity" /> 47 | </Transition.Child> 48 | 49 | <div className="fixed inset-0 z-10 overflow-y-auto"> 50 | <div className="flex min-h-full items-end justify-center p-4 text-center sm:items-center sm:p-0"> 51 | <Transition.Child 52 | as={Fragment} 53 | enter="ease-out duration-300" 54 | enterFrom="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95" 55 | enterTo="opacity-100 translate-y-0 sm:scale-100" 56 | leave="ease-in duration-200" 57 | leaveFrom="opacity-100 translate-y-0 sm:scale-100" 58 | leaveTo="opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95" 59 | > 60 | <Dialog.Panel className="relative transform overflow-hidden rounded-lg bg-gray-800 px-4 pb-4 pt-5 text-left shadow-xl transition-all sm:my-8 sm:p-6 w-full max-w-3xl"> 61 | <div> 62 | <input 63 | className="w-full flex-auto rounded-md border-0 bg-white/5 px-3.5 py-2 text-white shadow-sm focus:outline-none sm:text-sm sm:leading-6" 64 | placeholder="Describe the image you want" 65 | // when user click enter key, submit the form 66 | onKeyDown={(e) => { 67 | if (e.key === "Enter") { 68 | onSubmit(e); 69 | } 70 | }} 71 | ></input> 72 | <div className="mt-3"> 73 | <div className="my-2"> 74 | <p className="text-sm text-gray-500"> 75 | Powered by{" "} 76 | <a 77 | className="underline" 78 | href="https://replicate.com/stability-ai/stable-diffusion" 79 | > 80 | stability-ai/stable-diffusion 81 | </a> 82 | </p> 83 | </div> 84 | </div> 85 | </div> 86 | {imgSrc && !loading && ( 87 | <Image 88 | width={0} 89 | height={0} 90 | sizes="100vw" 91 | src={imgSrc} 92 | alt="img" 93 | className="w-full h-full object-contain" 94 | /> 95 | )} 96 | {loading && ( 97 | <p className="flex items-center justify-center mt-4"> 98 | <svg 99 | className="animate-spin -ml-1 mr-3 h-5 w-5 text-white" 100 | xmlns="http://www.w3.org/2000/svg" 101 | fill="none" 102 | viewBox="0 0 24 24" 103 | > 104 | <circle 105 | className="opacity-25" 106 | cx="12" 107 | cy="12" 108 | r="10" 109 | stroke="currentColor" 110 | stroke-width="4" 111 | ></circle> 112 | <path 113 | className="opacity-75" 114 | fill="currentColor" 115 | d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" 116 | ></path> 117 | </svg> 118 | </p> 119 | )} 120 | </Dialog.Panel> 121 | </Transition.Child> 122 | </div> 123 | </div> 124 | </Dialog> 125 | </Transition.Root> 126 | ); 127 | } 128 | -------------------------------------------------------------------------------- /src/components/actions.ts: -------------------------------------------------------------------------------- 1 | "use server"; 2 | 3 | // server action to allow configuration of LLM from .env.local 4 | 5 | import dotenv from "dotenv"; 6 | import { parse } from "path"; 7 | 8 | 9 | export async function getCompanions() { 10 | const COMPFILE = "./companions/companions.json"; 11 | var companions = []; 12 | // console.log("Loading companion descriptions from "+COMPFILE); 13 | var fs = require('fs'); 14 | const data = fs.readFileSync(COMPFILE); 15 | console.log(String(data)); 16 | // run a parse here to force a server side error if the JSON is improperly formatted 17 | // It's much more difficult to debug client side 18 | var js = JSON.parse(String(data)); 19 | return String(data); 20 | } -------------------------------------------------------------------------------- /src/middleware.ts: -------------------------------------------------------------------------------- 1 | import { authMiddleware } from "@clerk/nextjs"; 2 | 3 | // This requires user to sign in to see any page or call any API route 4 | 5 | // TODO - the public route list should only contain /api/text for production 6 | export default authMiddleware({ 7 | publicRoutes: ["/api(.*)"], 8 | }); 9 | 10 | export const config = { 11 | matcher: ["/((?!.*\\..*|_next).*)", "/", "/(api|trpc)(.*)"], 12 | }; 13 | -------------------------------------------------------------------------------- /src/scripts/exportToCharacter.mjs: -------------------------------------------------------------------------------- 1 | import { Redis } from "@upstash/redis"; 2 | import { PromptTemplate } from "langchain/prompts"; 3 | import { LLMChain } from "langchain/chains"; 4 | import { OpenAI } from "langchain/llms/openai"; 5 | 6 | import dotenv from "dotenv"; 7 | import fs from "fs/promises"; 8 | dotenv.config({ path: `.env.local` }); 9 | 10 | const COMPANION_NAME = process.argv[2]; 11 | const MODEL_NAME = process.argv[3]; 12 | const USER_ID = process.argv[4]; 13 | 14 | if (!!!COMPANION_NAME || !!!MODEL_NAME || !!!USER_ID) { 15 | throw new Error( 16 | "**Usage**: npm run export-to-character <COMPANION_NAME> <MODEL_NAME> <USER_ID>" 17 | ); 18 | } 19 | 20 | const data = await fs.readFile("companions/" + COMPANION_NAME + ".txt", "utf8"); 21 | const presplit = data.split("###ENDPREAMBLE###"); 22 | const preamble = presplit[0]; 23 | const seedsplit = presplit[1].split("###ENDSEEDCHAT###"); 24 | const seedChat = seedsplit[0]; 25 | const backgroundStory = seedsplit[1]; 26 | console.log(preamble, backgroundStory); 27 | 28 | const history = new Redis({ 29 | url: process.env.UPSTASH_REDIS_REST_URL, 30 | token: process.env.UPSTASH_REDIS_REST_TOKEN, 31 | }); 32 | 33 | const upstashChatHistory = await history.zrange( 34 | `${COMPANION_NAME}-${MODEL_NAME}-${USER_ID}`, 35 | 0, 36 | Date.now(), 37 | { 38 | byScore: true, 39 | } 40 | ); 41 | const recentChat = upstashChatHistory.slice(-30); 42 | const model = new OpenAI({ 43 | modelName: "gpt-3.5-turbo-16k", 44 | openAIApiKey: process.env.OPENAI_API_KEY, 45 | }); 46 | model.verbose = true; 47 | 48 | const chainPrompt = PromptTemplate.fromTemplate(` 49 | ### Background Story: 50 | ${preamble} 51 | 52 | ${backgroundStory} 53 | 54 | ### Chat history: 55 | ${seedChat} 56 | 57 | ... 58 | ${recentChat} 59 | 60 | 61 | Above is someone whose name is ${COMPANION_NAME}'s story and their chat history with a human. Output answer to the following question. Return only the answer itself 62 | 63 | {question}`); 64 | 65 | const chain = new LLMChain({ 66 | llm: model, 67 | prompt: chainPrompt, 68 | }); 69 | const questions = [ 70 | `Greeting: What would ${COMPANION_NAME} say to start a conversation?`, 71 | `Short Description: In a few sentences, how would ${COMPANION_NAME} describe themselves?`, 72 | `Long Description: In a few sentences, how would ${COMPANION_NAME} describe themselves?`, 73 | ]; 74 | const results = await Promise.all( 75 | questions.map(async (question) => { 76 | try { 77 | return await chain.call({ question }); 78 | } catch (error) { 79 | console.error(error); 80 | } 81 | }) 82 | ); 83 | 84 | let output = ""; 85 | for (let i = 0; i < questions.length; i++) { 86 | output += `*****${questions[i]}*****\n${results[i].text}\n\n`; 87 | } 88 | output += `Definition (Advanced)\n${recentChat.join("\n")}`; 89 | 90 | await fs.writeFile(`${COMPANION_NAME}_chat_history.txt`, upstashChatHistory); 91 | await fs.writeFile(`${COMPANION_NAME}_character_ai_data.txt`, output); 92 | -------------------------------------------------------------------------------- /src/scripts/indexPGVector.mjs: -------------------------------------------------------------------------------- 1 | // Call embeding API and insert to supabase 2 | // Ref: https://js.langchain.com/docs/modules/indexes/vector_stores/integrations/supabase 3 | 4 | import dotenv from "dotenv"; 5 | import { Document } from "langchain/document"; 6 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 7 | import { SupabaseVectorStore } from "langchain/vectorstores/supabase"; 8 | import { createClient } from "@supabase/supabase-js"; 9 | import { CharacterTextSplitter } from "langchain/text_splitter"; 10 | 11 | import fs from "fs"; 12 | import path from "path"; 13 | 14 | dotenv.config({ path: `.env.local` }); 15 | 16 | const fileNames = fs.readdirSync("companions"); 17 | const splitter = new CharacterTextSplitter({ 18 | separator: " ", 19 | chunkSize: 200, 20 | chunkOverlap: 50, //TODO: adjust both chunk size and chunk overlap later 21 | }); 22 | 23 | const langchainDocs = await Promise.all( 24 | fileNames.map(async (fileName) => { 25 | if (fileName.endsWith(".txt")) { 26 | const filePath = path.join("companions", fileName); 27 | const fileContent = fs.readFileSync(filePath, "utf8"); 28 | const lastSection = fileContent.split("###ENDSEEDCHAT###").slice(-1)[0]; 29 | const splitDocs = await splitter.createDocuments([lastSection]); 30 | return splitDocs.map((doc) => { 31 | return new Document({ 32 | metadata: { fileName }, 33 | pageContent: doc.pageContent, 34 | }); 35 | }); 36 | } 37 | }) 38 | ); 39 | 40 | const auth = { 41 | detectSessionInUrl: false, 42 | persistSession: false, 43 | autoRefreshToken: false, 44 | }; 45 | 46 | const client = createClient( 47 | process.env.SUPABASE_URL, 48 | process.env.SUPABASE_PRIVATE_KEY, 49 | { auth } 50 | ); 51 | 52 | await SupabaseVectorStore.fromDocuments( 53 | langchainDocs.flat().filter((doc) => doc !== undefined), 54 | new OpenAIEmbeddings({ openAIApiKey: process.env.OPENAI_API_KEY }), 55 | { 56 | client, 57 | tableName: "documents", 58 | } 59 | ); 60 | -------------------------------------------------------------------------------- /src/scripts/indexPinecone.mjs: -------------------------------------------------------------------------------- 1 | // Major ref: https://js.langchain.com/docs/modules/indexes/vector_stores/integrations/pinecone 2 | import { PineconeClient } from "@pinecone-database/pinecone"; 3 | import dotenv from "dotenv"; 4 | import { Document } from "langchain/document"; 5 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 6 | import { PineconeStore } from "langchain/vectorstores/pinecone"; 7 | import { CharacterTextSplitter } from "langchain/text_splitter"; 8 | import fs from "fs"; 9 | import path from "path"; 10 | 11 | dotenv.config({ path: `.env.local` }); 12 | 13 | const fileNames = fs.readdirSync("companions"); 14 | const splitter = new CharacterTextSplitter({ 15 | separator: " ", 16 | chunkSize: 200, 17 | chunkOverlap: 50, //TODO: adjust both chunk size and chunk overlap later 18 | }); 19 | 20 | const langchainDocs = await Promise.all( 21 | fileNames.map(async (fileName) => { 22 | if (fileName.endsWith(".txt")) { 23 | const filePath = path.join("companions", fileName); 24 | const fileContent = fs.readFileSync(filePath, "utf8"); 25 | // get the last section in the doc for background info 26 | const lastSection = fileContent.split("###ENDSEEDCHAT###").slice(-1)[0]; 27 | const splitDocs = await splitter.createDocuments([lastSection]); 28 | return splitDocs.map((doc) => { 29 | return new Document({ 30 | metadata: { fileName }, 31 | pageContent: doc.pageContent, 32 | }); 33 | }); 34 | } 35 | }) 36 | ); 37 | 38 | const client = new PineconeClient(); 39 | await client.init({ 40 | apiKey: process.env.PINECONE_API_KEY, 41 | environment: process.env.PINECONE_ENVIRONMENT, 42 | }); 43 | const pineconeIndex = client.Index(process.env.PINECONE_INDEX); 44 | 45 | await PineconeStore.fromDocuments( 46 | langchainDocs.flat().filter((doc) => doc !== undefined), 47 | new OpenAIEmbeddings({ openAIApiKey: process.env.OPENAI_API_KEY }), 48 | { 49 | pineconeIndex, 50 | } 51 | ); 52 | -------------------------------------------------------------------------------- /tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | "./src/pages/**/*.{js,ts,jsx,tsx,mdx}", 5 | "./src/components/**/*.{js,ts,jsx,tsx,mdx}", 6 | "./src/app/**/*.{js,ts,jsx,tsx,mdx}", 7 | ], 8 | theme: { 9 | extend: { 10 | backgroundImage: { 11 | "gradient-radial": "radial-gradient(var(--tw-gradient-stops))", 12 | "gradient-conic": 13 | "conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))", 14 | }, 15 | }, 16 | }, 17 | plugins: [require("@tailwindcss/forms")], 18 | }; 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "strict": true, 8 | "forceConsistentCasingInFileNames": true, 9 | "noEmit": true, 10 | "esModuleInterop": true, 11 | "module": "esnext", 12 | "moduleResolution": "node", 13 | "resolveJsonModule": true, 14 | "isolatedModules": true, 15 | "jsx": "preserve", 16 | "incremental": true, 17 | "plugins": [ 18 | { 19 | "name": "next" 20 | } 21 | ], 22 | "paths": { 23 | "@/*": ["./src/*"] 24 | } 25 | }, 26 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts", "src/utils/whole.mjs"], 27 | "exclude": ["node_modules"] 28 | } 29 | --------------------------------------------------------------------------------