├── .editorconfig ├── .env.example ├── .eslintignore ├── .eslintrc.json ├── .github └── lint.yml ├── .gitignore ├── .prettierignore ├── .vscode └── settings.json ├── README.md ├── app ├── api │ ├── ingest │ │ ├── upload │ │ │ └── route.ts │ │ └── wikipedia │ │ │ └── route.ts │ └── query │ │ ├── route.ts │ │ └── stream │ │ └── route.ts ├── components │ ├── config-context.tsx │ ├── document-card.tsx │ ├── embedding-model.tsx │ ├── ingest-upload.tsx │ ├── ingest-wikipedia.tsx │ ├── query-config.tsx │ ├── query.tsx │ ├── sidebar.tsx │ └── store-config.tsx ├── layout.tsx └── page.tsx ├── components.json ├── components ├── cell.tsx ├── code-editor.tsx ├── icons.tsx ├── main-nav.tsx ├── monaco-theme.ts ├── site-header.tsx ├── tailwind-indicator.tsx ├── theme-provider.tsx ├── theme-toggle.tsx └── ui │ ├── badge.tsx │ ├── button.tsx │ ├── card.tsx │ ├── dialog.tsx │ ├── form.tsx │ ├── input.tsx │ ├── label.tsx │ ├── select.tsx │ ├── separator.tsx │ ├── slider.tsx │ ├── switch.tsx │ ├── table.tsx │ ├── tabs.tsx │ ├── textarea.tsx │ ├── toast.tsx │ ├── toaster.tsx │ └── use-toast.ts ├── config └── site.ts ├── lib ├── axgen-utils.ts ├── fonts.ts ├── ingest.ts ├── query.ts ├── types.ts └── utils.ts ├── next-env.d.ts ├── next.config.mjs ├── package-lock.json ├── package.json ├── postcss.config.js ├── prettier.config.js ├── prisma └── schema.prisma ├── public ├── axilla-logo-text-white.png ├── demo-screenshot.png ├── favicon.ico └── github-mark-white.png ├── styles └── globals.css ├── tailwind.config.js ├── tsconfig.json ├── tsconfig.tsbuildinfo └── types ├── nav.ts └── store.ts /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | charset = utf-8 6 | end_of_line = lf 7 | indent_size = 2 8 | indent_style = space 9 | insert_final_newline = true 10 | trim_trailing_whitespace = true 11 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | ## Pinecone settings 2 | # Pinecone secret API key, e.g.: (not a real key) 3 | PINECONE_API_KEY= 4 | # Pinecone environment 5 | PINECONE_ENVIRONMENT=northamerica-northeast1-gcp 6 | # Pinecone index name, can be whatever you want. 7 | PINECONE_INDEX= 8 | # Pinecone vector dimensions. This will be coupled to the embedding model you use. 9 | # For example, OpenAI's text-embedding-ada-002 is 1536 dimensions. 10 | PINECONE_INDEX_DIMENSION=1536 11 | # Pinecone namespace, "default" by default. 12 | PINECONE_NAMESPACE=default 13 | 14 | ## OpenAI settings 15 | OPENAI_API_KEY=sk-fake 16 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/* 2 | .cache 3 | public 4 | node_modules 5 | *.esm.js 6 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/eslintrc", 3 | "root": true, 4 | "extends": [ 5 | "next/core-web-vitals", 6 | "plugin:@typescript-eslint/recommended", 7 | "prettier", 8 | "plugin:tailwindcss/recommended" 9 | ], 10 | "plugins": ["tailwindcss", "@typescript-eslint"], 11 | "rules": { 12 | "@next/next/no-html-link-for-pages": "off", 13 | "react/jsx-key": "off", 14 | "tailwindcss/no-custom-classname": "off", 15 | "@typescript-eslint/ban-ts-comment": "off" 16 | }, 17 | "settings": { 18 | "tailwindcss": { 19 | "callees": ["cn"], 20 | "config": "tailwind.config.js" 21 | }, 22 | "next": { 23 | "rootDir": ["./"] 24 | } 25 | }, 26 | "overrides": [ 27 | { 28 | "files": ["*.ts", "*.tsx"], 29 | "parser": "@typescript-eslint/parser" 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /.github/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: [pull_request] 3 | jobs: 4 | pr_lint: 5 | runs-on: ubuntu-22.04 6 | steps: 7 | - uses: actions/checkout@v3 8 | - name: setup node 9 | uses: actions/setup-node@v3 10 | with: 11 | node-version: 18 12 | - name: Run linter 13 | - run: npm run lint 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules 5 | .pnp 6 | .pnp.js 7 | 8 | # testing 9 | coverage 10 | 11 | # next.js 12 | .next/ 13 | out/ 14 | build 15 | 16 | # misc 17 | .DS_Store 18 | *.pem 19 | 20 | # debug 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | .pnpm-debug.log* 25 | 26 | # local env files 27 | .env.local 28 | .env.development.local 29 | .env.test.local 30 | .env.production.local 31 | 32 | # turbo 33 | .turbo 34 | 35 | .contentlayer 36 | .env -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | cache 2 | .cache 3 | package.json 4 | package-lock.json 5 | public 6 | CHANGELOG.md 7 | .yarn 8 | dist 9 | node_modules 10 | .next 11 | build 12 | .contentlayer -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "../../node_modules/.pnpm/typescript@4.9.5/node_modules/typescript/lib", 3 | "typescript.enablePromptUseWorkspaceTsdk": true 4 | } 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Axilla demo UI 2 | 3 | ### [Demo video 🎥](https://www.loom.com/share/458f9b6679b740f0a5c78a33fffee3dc) 4 | 5 | This demo UI showcases how to build RAG (retrieval augmented generation) workflows using the [axgen](https://github.com/axilla-io/axgen) library. 6 | 7 | ![Demo UI Screenshot](./public/demo-screenshot.png) 8 | 9 | The UI covers the usual flow, which has 2 separate parts: 10 | 11 | 1. Ingest documents into a vector store (this demo shows ingesting from files and from wikipedia, but you could plug any data source) 12 | 2. Ask questions with augmented context for retrieval (by fetching chunks of the ingested documents to enrich the answer) 13 | 14 | You can easily toggle document inclusion on/off, to see the difference. The UI also shows the documents that were retrieved which helps troubleshoot why the answer is what it is. 15 | 16 | [Axgen](https://github.com/axilla-io/axgen) is fully configurable, as this UI demonstrates. 17 | 18 | Please give us any feedback (bugs, requests, questions) at hello@axilla.io. We love talking to our users so don't be shy. 19 | 20 | ## Axilla 21 | 22 | At [Axilla](https://axilla.io), we are building an opinionated end-to-end framework to work with LLMs in TypeScript. 23 | Our first module open source module is [axgen](https://github.com/axilla-io/axgen), focused on document ingestion and retrieval. Giving it a star ⭐️ is very helpful for our visibility, so we appreciate it if you can spare one! 24 | 25 | ## Usage 26 | 27 | This is a simple nextJS application, that was tested using node 18. 28 | 29 | ### Steps 30 | 31 | 1. Clone the repo: `git clone https://github.com/axilla-io/demo-ui.git` 32 | 2. Ensure you have the right environment variables setup: `cp .env.example .env` 33 | 3. Install packages: `npm i` 34 | 4. Run it: `npm run dev # will run on localhost:3300` 35 | 36 | ## License 37 | 38 | Licensed under the [MIT license](https://github.com/shadcn/ui/blob/main/LICENSE.md). 39 | -------------------------------------------------------------------------------- /app/api/ingest/upload/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from 'next/server'; 2 | import { ingestFile } from '@/lib/ingest'; 3 | 4 | /** 5 | * POST /api/ingest/upload 6 | * Uploads a file, chunks it to the specified store 7 | */ 8 | export async function POST(request: NextRequest) { 9 | const formData = await request.formData(); 10 | const file = formData.get('file'); 11 | const storeName = formData.get('store'); 12 | const filename = formData.get('filename'); 13 | 14 | if (['pinecone', 'pgvector'].includes(storeName as string) === false) { 15 | return NextResponse.json({ error: 'Invalid store name' }, { status: 400 }); 16 | } 17 | 18 | if (!file || typeof file === 'string') { 19 | return NextResponse.json({ error: 'Error reading file' }, { status: 400 }); 20 | } 21 | 22 | // Convert file to Buffer 23 | const chunks: Buffer[] = []; 24 | for await (const chunk of file.stream()) { 25 | chunks.push(chunk as Buffer); 26 | } 27 | const fileContentBuffer = Buffer.concat(chunks); 28 | 29 | try { 30 | const content = fileContentBuffer.toString(); 31 | await ingestFile(storeName as 'pinecone' | 'pgvector', content, `file://${filename}`); 32 | return NextResponse.json({ content, store: storeName }, { status: 200 }); 33 | } catch { 34 | return NextResponse.json({ error: 'Error ingesting file' }, { status: 400 }); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /app/api/ingest/wikipedia/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from 'next/server'; 2 | import { ingestWikipedia } from '@/lib/ingest'; 3 | 4 | export async function POST(request: NextRequest) { 5 | const { term, store } = await request.json(); 6 | try { 7 | await ingestWikipedia(term, store); 8 | } catch (error: unknown) { 9 | let message = 'Unknown Error'; 10 | if (error instanceof Error) message = error.message; 11 | 12 | return NextResponse.json({ error: message }, { status: 500 }); 13 | } 14 | 15 | return NextResponse.json({ term }, { status: 200 }); 16 | } 17 | -------------------------------------------------------------------------------- /app/api/query/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from 'next/server'; 2 | import { queryCompletion, queryChat } from '@/lib/query'; 3 | 4 | export async function POST(request: NextRequest) { 5 | const { modelType, question, llmOnly, topK, model, temperature, maxTokens, store } = 6 | await request.json(); 7 | try { 8 | let response; 9 | switch (modelType) { 10 | case 'completion': 11 | response = await queryCompletion({ 12 | query: question, 13 | model, 14 | store, 15 | llmOnly, 16 | topK, 17 | temperature, 18 | maxTokens, 19 | }); 20 | break; 21 | case 'chat': 22 | response = await queryChat({ 23 | query: question, 24 | model, 25 | llmOnly, 26 | topK, 27 | temperature, 28 | store, 29 | maxTokens, 30 | }); 31 | break; 32 | default: 33 | return NextResponse.json({ error: 'Invalid model type' }, { status: 400 }); 34 | } 35 | return NextResponse.json({ response }, { status: 200 }); 36 | } catch (error: unknown) { 37 | let message = 'Unknown Error'; 38 | if (error instanceof Error) message = error.message; 39 | 40 | console.error(error); 41 | return NextResponse.json({ error: message }, { status: 500 }); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /app/api/query/stream/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from 'next/server'; 2 | import { queryChatStream, queryCompletionStream } from 'lib/query'; 3 | // Separators recommended from 4 | // https://stackoverflow.com/questions/6319551/whats-the-best-separator-delimiter-characters-for-a-plaintext-db-file 5 | const JSON_STREAM_SEPARATOR = Uint8Array.from([0x1d]); 6 | const CONTENT_STREAM_SEPARATOR = Uint8Array.from([0x1e]); 7 | 8 | type ChatResponse = { 9 | choices: Array<{ 10 | delta: { 11 | content: string; 12 | }; 13 | }>; 14 | }; 15 | 16 | type InfoContext = { 17 | context?: Array; 18 | }; 19 | 20 | function iterableToStream(iterable: AsyncIterable, info: InfoContext) { 21 | const encoder = new TextEncoder(); 22 | return new ReadableStream({ 23 | async pull(controller) { 24 | for await (const value of iterable) { 25 | const chunk = typeof value === 'string' ? value : value.choices[0].delta.content; 26 | controller.enqueue(encoder.encode(chunk)); 27 | } 28 | 29 | controller.enqueue(CONTENT_STREAM_SEPARATOR); 30 | const documents = info.context; 31 | if (documents) { 32 | for (const document of documents) { 33 | controller.enqueue(encoder.encode(JSON.stringify(document))); 34 | controller.enqueue(JSON_STREAM_SEPARATOR); 35 | } 36 | } 37 | 38 | controller.close(); 39 | }, 40 | }); 41 | } 42 | 43 | type RequestBody = { 44 | question: string; 45 | model: string; 46 | store: string; 47 | llmOnly: boolean; 48 | temperature: number; 49 | maxTokens: number; 50 | topK: number; 51 | }; 52 | 53 | export async function handleCompletion(body: RequestBody) { 54 | const { question, llmOnly, topK, model, temperature, maxTokens, store } = body; 55 | const { result: iterable, info } = await queryCompletionStream({ 56 | query: question, 57 | model: model, 58 | store, 59 | llmOnly, 60 | temperature, 61 | maxTokens, 62 | topK, 63 | }); 64 | const stream = iterableToStream(iterable, info); 65 | return new Response(stream); 66 | } 67 | 68 | export async function handleChat(body: RequestBody) { 69 | const { question, llmOnly, topK, model, temperature, maxTokens, store } = body; 70 | const { result: iterable, info } = await queryChatStream({ 71 | query: question, 72 | model: model, 73 | store, 74 | llmOnly, 75 | temperature, 76 | maxTokens, 77 | topK, 78 | }); 79 | const stream = iterableToStream(iterable, info); 80 | return new Response(stream); 81 | } 82 | 83 | export async function POST(request: NextRequest) { 84 | const body = await request.json(); 85 | 86 | switch (body.modelType) { 87 | case 'completion': 88 | return handleCompletion(body); 89 | 90 | case 'chat': 91 | return handleChat(body); 92 | 93 | default: 94 | return NextResponse.json({ error: 'Invalid model type' }, { status: 400 }); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /app/components/config-context.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { createContext, useContext, useState, ReactNode } from 'react'; 3 | 4 | export interface ConfigInterface { 5 | chatModel: string; 6 | setChatModel: (model: string) => void; 7 | completionModel: string; 8 | setCompletionModel: (model: string) => void; 9 | temperature: number; 10 | setTemperature: (temperature: number) => void; 11 | topK: number; 12 | setTopK: (topK: number) => void; 13 | store: string; 14 | setStore: (store: string) => void; 15 | embeddingModel: string; 16 | setEmbeddingModel: (embeddingModel: string) => void; 17 | includeDocs: boolean; 18 | setIncludeDocs: (includeDocs: boolean) => void; 19 | dimensions: number; 20 | maxTokens: number; 21 | setMaxTokens: (maxTokens: number) => void; 22 | } 23 | 24 | const notImplemented = () => { 25 | throw new Error('Not implemented'); 26 | }; 27 | 28 | export const DEFAULT_COMPLETION_MODEL = 'text-davinci-003'; 29 | export const DEFAULT_CHAT_MODEL = 'gpt-4'; 30 | 31 | const ConfigContext = createContext({ 32 | // Default values 33 | completionModel: '', // we default to chat models 34 | setCompletionModel: notImplemented, 35 | chatModel: DEFAULT_CHAT_MODEL, 36 | setChatModel: notImplemented, 37 | temperature: 0, 38 | setTemperature: notImplemented, 39 | topK: 2, 40 | setTopK: notImplemented, 41 | store: 'pinecone', 42 | setStore: notImplemented, 43 | embeddingModel: 'text-embedding-ada-002', 44 | setEmbeddingModel: notImplemented, 45 | includeDocs: true, 46 | setIncludeDocs: notImplemented, 47 | dimensions: 1536, 48 | maxTokens: 150, 49 | setMaxTokens: notImplemented, 50 | }); 51 | 52 | export const useConfig = () => { 53 | return useContext(ConfigContext); 54 | }; 55 | 56 | export const ConfigProvider = ({ children }: { children: ReactNode }): JSX.Element => { 57 | const [completionModel, setCompletionModel] = useState(''); 58 | const [chatModel, setChatModel] = useState(DEFAULT_CHAT_MODEL); 59 | const [temperature, setTemperature] = useState(0); 60 | const [topK, setTopK] = useState(2); 61 | const [store, setStore] = useState('pinecone'); 62 | const [embeddingModel, setEmbeddingModel] = useState('text-embedding-ada-002'); 63 | const [includeDocs, setIncludeDocs] = useState(true); 64 | const [maxTokens, setMaxTokens] = useState(150); 65 | 66 | const context: ConfigInterface = { 67 | completionModel: completionModel, 68 | setCompletionModel: setCompletionModel, 69 | chatModel: chatModel, 70 | setChatModel: setChatModel, 71 | temperature, 72 | setTemperature, 73 | topK, 74 | setTopK, 75 | store, 76 | setStore, 77 | embeddingModel, 78 | setEmbeddingModel, 79 | includeDocs, 80 | setIncludeDocs, 81 | // This is fixed. 82 | dimensions: 1536, 83 | maxTokens, 84 | setMaxTokens, 85 | }; 86 | 87 | return {children}; 88 | }; 89 | -------------------------------------------------------------------------------- /app/components/document-card.tsx: -------------------------------------------------------------------------------- 1 | import type { ContextDocument } from '@/lib/types'; 2 | import { Card, CardContent, CardFooter, CardHeader, CardTitle } from '@/components/ui/card'; 3 | import { 4 | Dialog, 5 | DialogContent, 6 | DialogDescription, 7 | DialogHeader, 8 | DialogTitle, 9 | DialogTrigger, 10 | } from '@/components/ui/dialog'; 11 | 12 | export default function DocumentCard({ 13 | document, 14 | idx, 15 | }: { 16 | document: ContextDocument; 17 | idx: number; 18 | }) { 19 | return ( 20 | 21 | 22 | 23 | 24 | Chunk #{idx + 1} 25 | 26 | 27 | {document.chunk.text.slice(0, 150)}... 28 | 29 | {document.similarity && ( 30 | 31 |

similarity:

32 |
{document.similarity}
33 |
34 | )} 35 |
36 |
37 | 38 | 39 | Chunk #{idx + 1} 40 | 41 | Source:{' '} 42 | 43 | {document.chunk.url} 44 | 45 | 46 | 47 |
{document.chunk.text}
48 |
49 |
50 | ); 51 | } 52 | -------------------------------------------------------------------------------- /app/components/embedding-model.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { useConfig } from '@/app/components/config-context'; 3 | import { 4 | Select, 5 | SelectContent, 6 | SelectItem, 7 | SelectTrigger, 8 | SelectValue, 9 | } from '@/components/ui/select'; 10 | import { Label } from '@/components/ui/label'; 11 | 12 | export function EmbeddingModel() { 13 | const { embeddingModel, setEmbeddingModel } = useConfig(); 14 | 15 | return ( 16 |
17 |
18 | 19 | 27 |
28 |
29 | ); 30 | } 31 | -------------------------------------------------------------------------------- /app/components/ingest-upload.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { useState } from 'react'; 3 | import { SubmitHandler, useForm } from 'react-hook-form'; 4 | import { Input } from '@/components/ui/input'; 5 | import { Button } from '@/components/ui/button'; 6 | import { Label } from '@/components/ui/label'; 7 | import { useConfig } from '@/app/components/config-context'; 8 | import { toast } from '@/components/ui/use-toast'; 9 | import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'; 10 | 11 | export function IngestDocumentUpload() { 12 | const { store } = useConfig(); 13 | const [ingesting, setIngesting] = useState(false); 14 | interface IFormInput { 15 | document: FileList; 16 | } 17 | const { 18 | register, 19 | handleSubmit, 20 | formState: { errors }, 21 | } = useForm(); 22 | 23 | const onUpload: SubmitHandler = async (data) => { 24 | setIngesting(true); 25 | 26 | const firstDoc = data.document[0]; 27 | const formData = new FormData(); 28 | formData.append('file', firstDoc); 29 | formData.append('store', store); 30 | formData.append('filename', firstDoc.name); 31 | 32 | const response = await window.fetch('/api/ingest/upload', { 33 | method: 'POST', 34 | body: formData, 35 | }); 36 | const responseData = await response.json(); 37 | 38 | if (responseData.error) { 39 | toast({ 40 | variant: 'destructive', 41 | title: 'Error ingesting', 42 | description:

{responseData.error}

, 43 | }); 44 | } else { 45 | toast({ 46 | title: `Uploaded file`, 47 | description:

{firstDoc.name}

, 48 | }); 49 | } 50 | setIngesting(false); 51 | }; 52 | 53 | const validateFile = (file: FileList) => { 54 | if (file.length !== 1) { 55 | return false; 56 | } 57 | 58 | const filename = file[0].name; 59 | if (!filename.endsWith('.md') && !filename.endsWith('.mdx')) { 60 | return false; 61 | } 62 | return true; 63 | }; 64 | 65 | return ( 66 |
67 | 68 | 69 | Upload a file 70 | 71 | Select a markdown file to upload. It will get chunked and ingested into the store. 72 | 73 | 74 | 75 |
76 |
77 |
78 | 81 | 86 |
87 |

88 | We currently only support markdown files (.md) 89 |

90 | {errors.document?.type === 'required' && ( 91 |

This field is required

92 | )} 93 | {errors.document?.type === 'validate' && ( 94 |

Please upload a markdown file

95 | )} 96 |
97 | 98 |
99 | 102 |
103 |
104 |
105 |
106 |
107 | ); 108 | } 109 | -------------------------------------------------------------------------------- /app/components/ingest-wikipedia.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { useState } from 'react'; 3 | import { zodResolver } from '@hookform/resolvers/zod'; 4 | import { useConfig } from '@/app/components/config-context'; 5 | import { useForm } from 'react-hook-form'; 6 | import * as z from 'zod'; 7 | import { 8 | Form, 9 | FormControl, 10 | FormField, 11 | FormItem, 12 | FormLabel, 13 | FormMessage, 14 | } from '@/components/ui/form'; 15 | 16 | import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'; 17 | import { Input } from '@/components/ui/input'; 18 | import { Button } from '@/components/ui/button'; 19 | import { toast } from '@/components/ui/use-toast'; 20 | 21 | export function IngestWikipedia() { 22 | const [loading, setLoading] = useState(false); 23 | const { store } = useConfig(); 24 | 25 | const WikipediaFormSchema = z.object({ 26 | term: z.string({ 27 | required_error: 'Please select a term to search wikipedia for', 28 | }), 29 | }); 30 | 31 | const wikipediaForm = useForm>({ 32 | resolver: zodResolver(WikipediaFormSchema), 33 | defaultValues: { term: 'San Francisco' }, 34 | }); 35 | 36 | async function onSubmitWikipedia(data: z.infer) { 37 | setLoading(true); 38 | const response = await fetch('/api/ingest/wikipedia', { 39 | method: 'POST', 40 | headers: { 'Content-type': 'application/json' }, 41 | body: JSON.stringify({ term: data.term, store }), 42 | }); 43 | const json = await response.json(); 44 | 45 | if (json.error) { 46 | toast({ 47 | variant: 'destructive', 48 | title: 'Error ingesting', 49 | description:

{json.error}

, 50 | }); 51 | } else { 52 | toast({ 53 | title: 'Ingested wikipedia page', 54 | description:

{data.term}

, 55 | }); 56 | } 57 | setLoading(false); 58 | } 59 | 60 | return ( 61 |
62 | 63 | 64 | Ingest from wikipedia 65 | 66 | Simply enter a wikipedia term and the app will fetch it, chunk it, and ingest the chunks 67 | into the store. 68 | 69 | 70 | 71 |
72 | 73 | ( 77 | 78 |
79 | wikipedia term 80 | 81 | 82 | 83 |
84 | 85 |
86 | )} 87 | /> 88 |
89 | 92 |
93 | 94 | 95 |
96 |
97 |
98 | ); 99 | } 100 | -------------------------------------------------------------------------------- /app/components/query-config.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { ChangeEvent } from 'react'; 3 | import { 4 | useConfig, 5 | DEFAULT_CHAT_MODEL, 6 | DEFAULT_COMPLETION_MODEL, 7 | } from '@/app/components/config-context'; 8 | import { 9 | Select, 10 | SelectContent, 11 | SelectItem, 12 | SelectTrigger, 13 | SelectValue, 14 | } from '@/components/ui/select'; 15 | import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'; 16 | import { Input } from '@/components/ui/input'; 17 | import { Label } from '@/components/ui/label'; 18 | import { Slider } from '@/components/ui/slider'; 19 | import { toast } from '@/components/ui/use-toast'; 20 | import { Switch } from '@/components/ui/switch'; 21 | 22 | export function QueryConfigForm() { 23 | const { completionModel, chatModel, topK, temperature, includeDocs, maxTokens } = useConfig(); 24 | const { 25 | setCompletionModel, 26 | setChatModel, 27 | setTopK, 28 | setTemperature, 29 | setIncludeDocs, 30 | setMaxTokens, 31 | } = useConfig(); 32 | 33 | const sendToast = (key: string, value: string) => { 34 | return toast({ 35 | title: `Updated query config ${key}`, 36 | description: `${key} = ${value}`, 37 | }); 38 | }; 39 | 40 | const validateAndSubmitTopK = (e: ChangeEvent) => { 41 | const value = e.target.value; 42 | if (isNaN(Number(value))) { 43 | return; 44 | } 45 | if (Number(value) < 0 || Number(value) > 10) { 46 | return; 47 | } 48 | setTopK(Number(value)); 49 | sendToast('topK', value); 50 | }; 51 | 52 | const validateAndSubmitMaxTokens = (e: ChangeEvent) => { 53 | const value = e.target.value; 54 | if (isNaN(Number(value))) { 55 | return; 56 | } 57 | if (Number(value) < 0 || Number(value) > 1000) { 58 | return; 59 | } 60 | setMaxTokens(Number(value)); 61 | sendToast('maxTokens', value); 62 | }; 63 | 64 | const updateConfig = (value: string) => { 65 | if (value === 'chat') { 66 | setChatModel(DEFAULT_CHAT_MODEL); 67 | setCompletionModel(''); 68 | } else if (value === 'completion') { 69 | setCompletionModel(DEFAULT_COMPLETION_MODEL); 70 | setChatModel(''); 71 | } else { 72 | throw new Error('Invalid model type'); 73 | } 74 | }; 75 | 76 | return ( 77 | 78 |
79 |
80 | 81 | 82 | Chat 83 | Completion 84 | 85 |
86 |
87 | 88 |
89 | 90 | 105 |
106 |
107 | 108 |
109 | 110 | 126 |
127 |
128 |
129 | 130 |
131 | 132 | 133 |
134 | 135 |
136 | 137 |

{temperature}

138 | { 143 | setTemperature(e[0]), sendToast('temperature', e[0].toString()); 144 | }} 145 | /> 146 |
147 | 148 |
149 | 150 |

{includeDocs}

151 | { 154 | setIncludeDocs(e); 155 | sendToast('include docs', e.toString()); 156 | }} 157 | /> 158 |
159 | 160 | {includeDocs && ( 161 |
162 | 163 | 164 |
165 | )} 166 |
167 |
168 | ); 169 | } 170 | -------------------------------------------------------------------------------- /app/components/query.tsx: -------------------------------------------------------------------------------- 1 | 'use client'; 2 | import { useState } from 'react'; 3 | import { useConfig } from '@/app/components/config-context'; 4 | import { zodResolver } from '@hookform/resolvers/zod'; 5 | import { useForm } from 'react-hook-form'; 6 | import * as z from 'zod'; 7 | import type { ContextDocument } from '@/lib/types'; 8 | import { 9 | Form, 10 | FormControl, 11 | FormDescription, 12 | FormField, 13 | FormItem, 14 | FormLabel, 15 | FormMessage, 16 | } from '@/components/ui/form'; 17 | import { Textarea } from '@/components/ui/textarea'; 18 | import { Button } from '@/components/ui/button'; 19 | import { Label } from '@/components/ui/label'; 20 | import DocumentCard from '@/app/components/document-card'; 21 | 22 | const JSON_SEPARATOR_CODE_POINT = String.fromCharCode(parseInt('1d', 16)); 23 | const CONTENT_SEPARATOR_CODE_POINT = String.fromCharCode(parseInt('1e', 16)); 24 | 25 | export function QueryWidget() { 26 | const [response, setResponse] = useState(''); 27 | const [querying, setQuerying] = useState(false); 28 | const [docs, setDocs] = useState>([]); 29 | const { topK, temperature, completionModel, chatModel, includeDocs, store, maxTokens } = 30 | useConfig(); 31 | 32 | const QuerySchema = z.object({ 33 | prompt: z 34 | .string({ 35 | required_error: 'Please write your question', 36 | }) 37 | .min(10, { message: 'The minimum prompt length is 10 characters.' }) 38 | .max(3000, { message: 'The maximum prompt length is 3000 characters.' }), 39 | }); 40 | 41 | const queryForm = useForm>({ 42 | resolver: zodResolver(QuerySchema), 43 | }); 44 | 45 | async function onSubmit(data: z.infer) { 46 | setQuerying(true); 47 | setResponse(''); 48 | setDocs([]); 49 | 50 | const modelType = chatModel ? 'chat' : 'completion'; 51 | const res = await fetch('/api/query/stream', { 52 | method: 'POST', 53 | body: JSON.stringify({ 54 | modelType: modelType, 55 | question: data.prompt, 56 | llmOnly: !includeDocs, 57 | topK, 58 | temperature, 59 | model: modelType === 'chat' ? chatModel : completionModel, 60 | store, 61 | maxTokens, 62 | }), 63 | }); 64 | 65 | if (!res?.body) { 66 | return; 67 | } 68 | 69 | const reader = res.body.getReader(); 70 | const chunks = []; 71 | 72 | while (true) { 73 | const { done, value } = await reader.read(); 74 | 75 | if (done) { 76 | const finishedChunks = new Uint8Array( 77 | ([] as number[]).concat(...chunks.map((chunk) => Array.from(chunk))) 78 | ); 79 | const decodedData = new TextDecoder().decode(finishedChunks); 80 | const chainedDocuments = decodedData.split(CONTENT_SEPARATOR_CODE_POINT)[1]; 81 | const documents = chainedDocuments?.split(JSON_SEPARATOR_CODE_POINT); 82 | 83 | setDocs(documents?.filter((doc) => doc).map((doc) => JSON.parse(doc))); 84 | break; 85 | } 86 | 87 | chunks.push(value); 88 | const concatenatedChunks = new Uint8Array( 89 | ([] as number[]).concat(...chunks.map((chunk) => Array.from(chunk))) 90 | ); 91 | 92 | const decodedData = new TextDecoder().decode(concatenatedChunks); 93 | const text = decodedData.split(CONTENT_SEPARATOR_CODE_POINT)[0]; 94 | 95 | setResponse(text); 96 | } 97 | 98 | setQuerying(false); 99 | } 100 | 101 | return ( 102 |
103 |
104 |

105 | Ask a question 106 |

107 |
108 |
109 | 110 | ( 114 | 115 |
116 | Question 117 | 118 |