├── .env.example
├── .eslintrc.json
├── .gitignore
├── .vscode
├── launch.json
└── settings.json
├── README.md
├── globals.d.ts
├── next.config.js
├── package-lock.json
├── package.json
├── public
├── favicon.ico
├── next.svg
├── thirteen.svg
└── vercel.svg
├── src
├── crawler.ts
├── pages
│ ├── _document.tsx
│ ├── api
│ │ ├── chat.ts
│ │ ├── conversationLog.ts
│ │ ├── crawl.ts
│ │ ├── matches.ts
│ │ ├── summarizer.ts
│ │ └── templates.ts
│ ├── index.tsx
│ └── test.tsx
├── styles
│ └── globals.css
├── types
│ └── supabase.ts
└── utils
│ ├── supabaseAdmin.ts
│ └── supabaseBrowser.ts
├── supabase
├── .gitignore
├── config.toml
├── migrations
│ ├── 20230612070906_langchain.sql
│ └── 20230613075114_dbinit.sql
└── seed.sql
└── tsconfig.json
/.env.example:
--------------------------------------------------------------------------------
1 | OPENAI_API_KEY=
2 |
3 | NEXT_PUBLIC_SUPABASE_URL=
4 | NEXT_PUBLIC_SUPABASE_ANON_KEY=
5 | SUPABASE_SERVICE_ROLE_KEY=
6 |
7 | API_ROOT=
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 | .pnpm-debug.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # vercel
32 | .vercel
33 |
34 | # typescript
35 | *.tsbuildinfo
36 | next-env.d.ts
37 | .env
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Next.js: debug server-side",
6 | "type": "node-terminal",
7 | "request": "launch",
8 | "command": "npm run dev"
9 | },
10 | {
11 | "name": "Next.js: debug client-side",
12 | "type": "chrome",
13 | "request": "launch",
14 | "url": "http://localhost:3000"
15 | },
16 | {
17 | "name": "Next.js: debug full stack",
18 | "type": "node-terminal",
19 | "request": "launch",
20 | "command": "npm run dev",
21 | "serverReadyAction": {
22 | "pattern": "started server on .+, url: (https?://.+)",
23 | "uriFormat": "%s",
24 | "action": "debugWithChrome"
25 | }
26 | }
27 | ]
28 | }
29 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "dotenv.enableAutocloaking": false
3 | }
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Supabase Langchain Chatbot Demo
2 |
3 | To run this demo, you need to have:
4 |
5 | 1. An OpenAI account. If you don't have one, you can sign up for free at [openai.com](https://www.openai.com).
6 | 2. [Optional] A [Supabase account](https://app.supabase.io/register). Only needed if you want to use the hosted Supabase service.
7 |
8 | ## Setup
9 |
10 | 1. Clone this repository
11 |
12 | ```bash
13 | git clone git@github.com:thorwebdev/langchain-chatbot-demo.git
14 | ```
15 |
16 | 2. Install dependencies
17 |
18 | ```bash
19 | cd langchain-chatbot-demo
20 | npm install
21 | ```
22 |
23 | 3. Start Supabase
24 |
25 | ```bash
26 | supabase start
27 | ```
28 |
29 | 5. Create a `.env` file in the root directory of the project and add your API keys:
30 |
31 | ```
32 | OPENAI_API_KEY=...
33 | NEXT_PUBLIC_SUPABASE_URL=...
34 | NEXT_PUBLIC_SUPABASE_ANON_KEY=...
35 | SUPABASE_SERVICE_ROLE_KEY=...
36 | API_ROOT="http://localhost:3000"
37 | ```
38 |
39 | When running Supabase locally you can run `supabase status` to get the local credentials.
40 |
41 | 6. [Optional] generate types (only needed after making db schema schanges)
42 |
43 | ```bash
44 | npx supabase gen types typescript --local --schema public > src/types/supabase.ts
45 | ```
46 |
47 | ## Start the development server
48 |
49 | ```bash
50 | npm run dev
51 | ```
52 |
--------------------------------------------------------------------------------
/globals.d.ts:
--------------------------------------------------------------------------------
1 | declare module '*.css';
2 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | publicRuntimeConfig: {
5 | apiUrl: process.env.API_URL || "http://localhost:3000",
6 | },
7 | };
8 |
9 | module.exports = nextConfig;
10 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "chatbot",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "NODE_INSEPCT=true & next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@chatscope/chat-ui-kit-react": "^1.10.1",
13 | "@langchain/community": "^0.0.30",
14 | "@langchain/core": "^0.1.30",
15 | "@langchain/openai": "^0.0.14",
16 | "@next/font": "13.1.6",
17 | "@supabase/auth-helpers-nextjs": "^0.7.2",
18 | "@supabase/auth-ui-react": "^0.4.2",
19 | "@supabase/auth-ui-shared": "^0.1.6",
20 | "@supabase/supabase-js": "^2.24.0",
21 | "bottleneck": "^2.19.5",
22 | "cheerio": "^1.0.0-rc.12",
23 | "langchain": "^0.1.20",
24 | "next": "13.1.6",
25 | "node-spider": "^1.4.1",
26 | "react": "18.2.0",
27 | "react-dom": "18.2.0",
28 | "react-markdown": "^8.0.5",
29 | "rehype-katex": "^6.0.2",
30 | "remark-math": "^5.1.1",
31 | "timeago": "^1.6.7",
32 | "timeago.js": "^4.0.2",
33 | "turndown": "^7.1.1",
34 | "url-parse": "^1.5.10"
35 | },
36 | "devDependencies": {
37 | "@types/async": "^3.2.18",
38 | "@types/node": "^18.16.11",
39 | "@types/react": "18.0.28",
40 | "@types/react-dom": "18.0.11",
41 | "@types/url-parse": "^1.4.8",
42 | "eslint": "8.34.0",
43 | "eslint-config-next": "13.1.6",
44 | "typescript": "4.9.5"
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/supabase-community/langchain-chatbot-demo/59a381ed330495c32c118e80422bf18bd22f43a8/public/favicon.ico
--------------------------------------------------------------------------------
/public/next.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/thirteen.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/crawler.ts:
--------------------------------------------------------------------------------
1 | //@ts-ignore
2 | import * as Spider from 'node-spider'
3 | //@ts-ignore
4 | import * as TurndownService from 'turndown'
5 | import * as cheerio from 'cheerio'
6 | import parse from 'url-parse'
7 | const turndownService = new TurndownService();
8 |
9 | export type Page = {
10 | url: string,
11 | text: string,
12 | title: string,
13 | }
14 | class Crawler {
15 | pages: Page[] = [];
16 | limit: number = 1000;
17 | urls: string[] = [];
18 | spider: Spider | null = {};
19 | count: number = 0;
20 | textLengthMinimum: number = 200;
21 |
22 | constructor(urls: string[], limit: number = 1000, textLengthMinimum: number = 200) {
23 | this.urls = urls;
24 | this.limit = limit
25 | this.textLengthMinimum = textLengthMinimum
26 |
27 | this.count = 0
28 | this.pages = [];
29 | this.spider = {}
30 | }
31 |
32 | handleRequest = (doc: any) => {
33 | const $ = cheerio.load(doc.res.body);
34 | $("script").remove();
35 | $("#hub-sidebar").remove();
36 | $("header").remove();
37 | $("nav").remove();
38 | $("img").remove();
39 | const title = $("title").text() || $(".article-title").text();
40 | const html = $("body").html();
41 | const text = turndownService.turndown(html);
42 | console.log("crawling ", doc.url)
43 | const page: Page = {
44 | url: doc.url,
45 | text,
46 | title,
47 | };
48 | if (text.length > this.textLengthMinimum) {
49 | this.pages.push(page);
50 | }
51 |
52 |
53 | doc.$("a").each((i: number, elem: any) => {
54 | var href = doc.$(elem).attr("href")?.split("#")[0];
55 | var targetUrl = href && doc.resolve(href);
56 | // crawl more
57 | if (targetUrl && this.urls.some(u => {
58 | const targetUrlParts = parse(targetUrl);
59 | const uParts = parse(u);
60 | return targetUrlParts.hostname === uParts.hostname
61 | }) && this.count < this.limit) {
62 | this.spider.queue(targetUrl, this.handleRequest);
63 | this.count = this.count + 1
64 | }
65 | });
66 | };
67 |
68 | start = async () => {
69 | this.pages = []
70 | return new Promise((resolve, reject) => {
71 | this.spider = new Spider({
72 | concurrent: 5,
73 | delay: 0,
74 | allowDuplicates: false,
75 | catchErrors: true,
76 | addReferrer: false,
77 | xhr: false,
78 | keepAlive: false,
79 | error: (err: any, url: string) => {
80 | console.log(err, url);
81 | reject(err)
82 | },
83 | // Called when there are no more requests
84 | done: () => {
85 | resolve(this.pages)
86 | },
87 | headers: { "user-agent": "node-spider" },
88 | encoding: "utf8",
89 | });
90 | this.urls.forEach((url) => {
91 | this.spider.queue(url, this.handleRequest);
92 | });
93 | })
94 | }
95 | }
96 |
97 | export { Crawler };
98 |
--------------------------------------------------------------------------------
/src/pages/_document.tsx:
--------------------------------------------------------------------------------
1 | import { Html, Head, Main, NextScript } from "next/document";
2 |
3 | export default function Document() {
4 | return (
5 |
6 |
7 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/src/pages/api/chat.ts:
--------------------------------------------------------------------------------
1 | import { CallbackManager } from "langchain/callbacks";
2 | import { LLMChain } from "langchain/chains";
3 | import { ChatOpenAI } from "langchain/chat_models/openai";
4 | import { OpenAI } from "@langchain/openai";
5 | import { PromptTemplate } from "langchain/prompts";
6 | import type { NextApiRequest, NextApiResponse } from "next";
7 | import { summarizeLongDocument } from "./summarizer";
8 | import {
9 | createPagesServerClient,
10 | SupabaseClient,
11 | } from "@supabase/auth-helpers-nextjs";
12 |
13 | import { ConversationLog } from "./conversationLog";
14 | import { Metadata, getMatchesFromEmbeddings } from "./matches";
15 | import { templates } from "./templates";
16 |
17 | const llm = new OpenAI({});
18 |
19 | const handleRequest = async ({
20 | prompt,
21 | userId,
22 | supabaseAuthedClient,
23 | }: {
24 | prompt: string;
25 | userId: string;
26 | supabaseAuthedClient: SupabaseClient;
27 | }) => {
28 | try {
29 | const channel = supabaseAuthedClient.channel(userId);
30 | const { data } = await supabaseAuthedClient
31 | .from("conversations")
32 | .insert({ speaker: "ai", user_id: userId })
33 | .select()
34 | .single()
35 | .throwOnError();
36 | const interactionId = data?.id;
37 |
38 | // Retrieve the conversation log and save the user's prompt
39 | const conversationLog = new ConversationLog(userId);
40 | const conversationHistory = await conversationLog.getConversation({
41 | limit: 10,
42 | });
43 | await conversationLog.addEntry({ entry: prompt, speaker: "user" });
44 |
45 | // Build an LLM chain that will improve the user prompt
46 | const inquiryChain = new LLMChain({
47 | llm,
48 | prompt: new PromptTemplate({
49 | template: templates.inquiryTemplate,
50 | inputVariables: ["userPrompt", "conversationHistory"],
51 | }),
52 | });
53 | const inquiryChainResult = await inquiryChain.call({
54 | userPrompt: prompt,
55 | conversationHistory,
56 | });
57 | const inquiry: string = inquiryChainResult.text;
58 |
59 | channel.subscribe(async (status) => {
60 | if (status === "SUBSCRIBED") {
61 | await channel.send({
62 | type: "broadcast",
63 | event: "chat",
64 | payload: {
65 | event: "status",
66 | message: "Finding matches...",
67 | },
68 | });
69 |
70 | const matches = await getMatchesFromEmbeddings(
71 | inquiry,
72 | supabaseAuthedClient,
73 | 2
74 | );
75 |
76 | const urls =
77 | matches &&
78 | Array.from(
79 | new Set(
80 | matches.map((match) => {
81 | const metadata = match.metadata as Metadata;
82 | const { url } = metadata;
83 | return url;
84 | })
85 | )
86 | );
87 |
88 | console.log(urls);
89 |
90 | const docs =
91 | matches &&
92 | Array.from(
93 | matches.reduce((map, match) => {
94 | const metadata = match.metadata as Metadata;
95 | const { text, url } = metadata;
96 | if (!map.has(url)) {
97 | map.set(url, text);
98 | }
99 | return map;
100 | }, new Map())
101 | ).map(([_, text]) => text);
102 |
103 | const promptTemplate = new PromptTemplate({
104 | template: templates.qaTemplate,
105 | inputVariables: [
106 | "summaries",
107 | "question",
108 | "conversationHistory",
109 | "urls",
110 | ],
111 | });
112 |
113 | let i = 0;
114 | const chat = new ChatOpenAI({
115 | streaming: true,
116 | verbose: true,
117 | modelName: "gpt-3.5-turbo",
118 | callbackManager: CallbackManager.fromHandlers({
119 | async handleLLMNewToken(token) {
120 | await channel.send({
121 | type: "broadcast",
122 | event: "chat",
123 | payload: {
124 | event: "response",
125 | token,
126 | interactionId,
127 | },
128 | });
129 | },
130 | async handleLLMEnd(result) {
131 | // Store answer in DB
132 | await supabaseAuthedClient
133 | .from("conversations")
134 | .update({ entry: result.generations[0][0].text })
135 | .eq("id", interactionId);
136 | await channel.send({
137 | type: "broadcast",
138 | event: "chat",
139 | payload: {
140 | event: "responseEnd",
141 | token: "END",
142 | interactionId,
143 | },
144 | });
145 | },
146 | }),
147 | });
148 |
149 | const chain = new LLMChain({
150 | prompt: promptTemplate,
151 | llm: chat,
152 | });
153 |
154 | const allDocs = docs.join("\n");
155 | if (allDocs.length > 4000) {
156 | await channel.send({
157 | type: "broadcast",
158 | event: "chat",
159 | payload: {
160 | event: "status",
161 | message: `Just a second, forming final answer...`,
162 | },
163 | });
164 | }
165 |
166 | const summary =
167 | allDocs.length > 4000
168 | ? await summarizeLongDocument({ document: allDocs, inquiry })
169 | : allDocs;
170 |
171 | await chain.call({
172 | summaries: summary,
173 | question: prompt,
174 | conversationHistory,
175 | urls,
176 | });
177 | }
178 | });
179 | } catch (error) {
180 | //@ts-ignore
181 | console.error(error);
182 | // @ts-ignore
183 | console.error("Something went wrong with OpenAI: ", error.message);
184 | }
185 | };
186 |
187 | export default async function handler(
188 | req: NextApiRequest,
189 | res: NextApiResponse
190 | ) {
191 | // Create authenticated Supabase Client
192 | const supabase = createPagesServerClient(
193 | { req, res },
194 | {
195 | options: {
196 | realtime: {
197 | params: {
198 | eventsPerSecond: -1,
199 | },
200 | },
201 | },
202 | }
203 | );
204 | // Check if we have a session
205 | const {
206 | data: { session },
207 | } = await supabase.auth.getSession();
208 |
209 | if (!session)
210 | return res.status(401).json({
211 | error: "not_authenticated",
212 | description:
213 | "The user does not have an active session or is not authenticated",
214 | });
215 |
216 | // Run queries with RLS on the server
217 | const { body } = req;
218 | const { prompt } = body;
219 | await handleRequest({
220 | prompt,
221 | userId: session.user.id,
222 | supabaseAuthedClient: supabase,
223 | });
224 | res.status(200).json({ message: "started" });
225 | }
226 |
--------------------------------------------------------------------------------
/src/pages/api/conversationLog.ts:
--------------------------------------------------------------------------------
1 | import { supabaseAdminClient } from "utils/supabaseAdmin";
2 |
3 | class ConversationLog {
4 | constructor(public userId: string) {
5 | this.userId = userId;
6 | }
7 |
8 | public async addEntry({
9 | entry,
10 | speaker,
11 | }: {
12 | entry: string;
13 | speaker: "user" | "ai";
14 | }) {
15 | try {
16 | await supabaseAdminClient
17 | .from("conversations")
18 | .insert({ user_id: this.userId, entry, speaker })
19 | .throwOnError();
20 | } catch (e) {
21 | console.log(`Error adding entry: ${e}`);
22 | }
23 | }
24 |
25 | public async getConversation({
26 | limit,
27 | }: {
28 | limit: number;
29 | }): Promise {
30 | const { data: history } = await supabaseAdminClient
31 | .from("conversations")
32 | .select("entry, speaker, created_at")
33 | .eq("user_id", this.userId)
34 | .order("created_at", { ascending: false })
35 | .limit(limit)
36 | .throwOnError();
37 |
38 | const response = history
39 | ? history
40 | .map((entry) => {
41 | return `${entry.speaker.toUpperCase()}: ${entry.entry}`;
42 | })
43 | .reverse()
44 | : [];
45 | return response;
46 | }
47 |
48 | public async clearConversation() {
49 | await supabaseAdminClient
50 | .from("conversations")
51 | .delete()
52 | .eq("user_id", this.userId)
53 | .throwOnError();
54 | }
55 | }
56 |
57 | export { ConversationLog };
58 |
--------------------------------------------------------------------------------
/src/pages/api/crawl.ts:
--------------------------------------------------------------------------------
1 | import { NextApiRequest, NextApiResponse } from "next";
2 | import { Crawler, Page } from "crawler";
3 | import { Document } from "langchain/document";
4 | import { OpenAIEmbeddings } from "@langchain/openai";
5 | import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
6 | import { supabaseAdminClient } from "utils/supabaseAdmin";
7 | import { TokenTextSplitter } from "langchain/text_splitter";
8 | import { summarizeLongDocument } from "./summarizer";
9 |
10 | // The TextEncoder instance enc is created and its encode() method is called on the input string.
11 | // The resulting Uint8Array is then sliced, and the TextDecoder instance decodes the sliced array in a single line of code.
12 | const truncateStringByBytes = (str: string, bytes: number) => {
13 | const enc = new TextEncoder();
14 | return new TextDecoder("utf-8").decode(enc.encode(str).slice(0, bytes));
15 | };
16 |
17 | export default async function handler(
18 | req: NextApiRequest,
19 | res: NextApiResponse
20 | ) {
21 | const { query } = req;
22 | const { urls: urlString, limit, indexName, summmarize } = query;
23 | const urls = (urlString as string).split(",");
24 | const crawlLimit = parseInt(limit as string) || 100;
25 | const shouldSummarize = summmarize === "true";
26 |
27 | const crawler = new Crawler(urls, crawlLimit, 200);
28 | const pages = (await crawler.start()) as Page[];
29 |
30 | const documentCollection = await Promise.all(
31 | pages.map(async (row) => {
32 | // console.log(row);
33 | const splitter = new TokenTextSplitter({
34 | encodingName: "gpt2",
35 | chunkSize: 300,
36 | chunkOverlap: 20,
37 | });
38 |
39 | const pageContent = shouldSummarize
40 | ? await summarizeLongDocument({ document: row.text })
41 | : row.text;
42 |
43 | const docs = splitter.splitDocuments([
44 | new Document({
45 | pageContent,
46 | metadata: {
47 | url: row.url,
48 | text: truncateStringByBytes(pageContent, 36000),
49 | },
50 | }),
51 | ]);
52 | return docs;
53 | })
54 | );
55 |
56 | try {
57 | const embeddings = new OpenAIEmbeddings();
58 |
59 | const store = new SupabaseVectorStore(embeddings, {
60 | client: supabaseAdminClient,
61 | tableName: "documents",
62 | });
63 | console.log("Storing embeddings now ");
64 | try {
65 | await Promise.all(
66 | documentCollection.map(async (documents) => {
67 | console.log(documents);
68 | await store.addDocuments(documents);
69 | })
70 | );
71 | console.log("Process has completed");
72 | res.status(200).json({ message: "Done" });
73 | } catch (e) {
74 | console.log(e);
75 | res.status(500).json({ message: `Error ${JSON.stringify(e)}` });
76 | }
77 | } catch (e) {
78 | console.log(e);
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/pages/api/matches.ts:
--------------------------------------------------------------------------------
1 | import { SupabaseClient } from "@supabase/supabase-js";
2 | import { OpenAIEmbeddings } from "@langchain/openai";
3 | import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
4 |
5 | export type Metadata = {
6 | url: string;
7 | text: string;
8 | chunk: string;
9 | };
10 |
11 | const getMatchesFromEmbeddings = async (
12 | inquiry: string,
13 | client: SupabaseClient,
14 | topK: number
15 | ) => {
16 | const embeddings = new OpenAIEmbeddings();
17 |
18 | const store = new SupabaseVectorStore(embeddings, {
19 | client,
20 | tableName: "documents",
21 | });
22 | try {
23 | const queryResult = await store.similaritySearch(inquiry, topK);
24 | return (
25 | queryResult.map((match) => ({
26 | ...match,
27 | metadata: match.metadata as Metadata,
28 | })) || []
29 | );
30 | } catch (e) {
31 | console.log("Error querying embeddings: ", e);
32 | throw new Error(`Error querying embeddings: ${e}`);
33 | }
34 | };
35 |
36 | export { getMatchesFromEmbeddings };
37 |
--------------------------------------------------------------------------------
/src/pages/api/summarizer.ts:
--------------------------------------------------------------------------------
1 | import { OpenAI } from "@langchain/openai";
2 | import { templates } from "./templates";
3 | import { LLMChain } from "langchain/chains";
4 | import { PromptTemplate } from "@langchain/core/prompts";
5 | import Bottleneck from "bottleneck";
6 | import { StructuredOutputParser } from "langchain/output_parsers";
7 |
8 | const llm = new OpenAI({
9 | concurrency: 10,
10 | temperature: 0,
11 | modelName: "gpt-3.5-turbo",
12 | });
13 |
14 | const { summarizerTemplate, summarizerDocumentTemplate } = templates;
15 |
16 | const parser = StructuredOutputParser.fromNamesAndDescriptions({
17 | answer: "answer to the user's question",
18 | source: "source used to answer the user's question, should be a website.",
19 | });
20 |
21 | const formatInstructions = parser.getFormatInstructions();
22 |
23 | const limiter = new Bottleneck({
24 | minTime: 5050,
25 | });
26 |
27 | console.log(summarizerDocumentTemplate.length);
28 | const chunkSubstr = (str: string, size: number) => {
29 | const numChunks = Math.ceil(str.length / size);
30 | const chunks = new Array(numChunks);
31 |
32 | for (let i = 0, o = 0; i < numChunks; ++i, o += size) {
33 | chunks[i] = str.substr(o, size);
34 | }
35 |
36 | return chunks;
37 | };
38 |
39 | const summarize = async ({
40 | document,
41 | inquiry,
42 | onSummaryDone,
43 | }: {
44 | document: string;
45 | inquiry?: string;
46 | onSummaryDone?: Function;
47 | }) => {
48 | console.log("summarizing ", document.length);
49 | const promptTemplate = new PromptTemplate({
50 | template: inquiry ? summarizerTemplate : summarizerDocumentTemplate,
51 | inputVariables: inquiry ? ["document", "inquiry"] : ["document"],
52 | });
53 | const chain = new LLMChain({
54 | prompt: promptTemplate,
55 | llm,
56 | });
57 |
58 | try {
59 | const result = await chain.call({
60 | prompt: promptTemplate,
61 | document,
62 | inquiry,
63 | });
64 |
65 | onSummaryDone && onSummaryDone(result.text);
66 | return result.text;
67 | } catch (e) {
68 | console.log(e);
69 | }
70 | };
71 |
72 | const rateLimitedSummarize = limiter.wrap(summarize);
73 |
74 | const summarizeLongDocument = async ({
75 | document,
76 | inquiry,
77 | onSummaryDone,
78 | }: {
79 | document: string;
80 | inquiry?: string;
81 | onSummaryDone?: Function;
82 | }): Promise => {
83 | // Chunk document into 4000 character chunks
84 | const templateLength = inquiry
85 | ? summarizerTemplate.length
86 | : summarizerDocumentTemplate.length;
87 | try {
88 | if (document.length + templateLength > 4000) {
89 | console.log("document is long and has to be shortened", document.length);
90 | const chunks = chunkSubstr(document, 4000 - templateLength - 1);
91 | let summarizedChunks: string[] = [];
92 | summarizedChunks = await Promise.all(
93 | chunks.map(async (chunk) => {
94 | let result;
95 | if (inquiry) {
96 | result = await rateLimitedSummarize({
97 | document: chunk,
98 | inquiry,
99 | onSummaryDone,
100 | });
101 | } else {
102 | result = await rateLimitedSummarize({
103 | document: chunk,
104 | onSummaryDone,
105 | });
106 | }
107 | return result;
108 | })
109 | );
110 |
111 | const result = summarizedChunks.join("\n");
112 | console.log(result.length);
113 |
114 | if (result.length + templateLength > 4000) {
115 | console.log("document is STILL long and has to be shortened further");
116 | return await summarizeLongDocument({
117 | document: result,
118 | inquiry,
119 | onSummaryDone,
120 | });
121 | } else {
122 | console.log("done");
123 | return result;
124 | }
125 | } else {
126 | return document;
127 | }
128 | } catch (e) {
129 | throw new Error(e as string);
130 | }
131 | };
132 |
133 | export { summarizeLongDocument };
134 |
--------------------------------------------------------------------------------
/src/pages/api/templates.ts:
--------------------------------------------------------------------------------
1 | const templates = {
2 | qaTemplate: `Answer the question based on the context below. You should follow ALL the following rules when generating and answer:
3 | - There will be a CONVERSATION LOG, CONTEXT, and a QUESTION.
4 | - The final answer must always be styled using markdown.
5 | - Your main goal is to point the user to the right source of information (the source is always a URL) based on the CONTEXT you are given.
6 | - Your secondary goal is to provide the user with an answer that is relevant to the question.
7 | - Provide the user with a code example that is relevant to the question, if the context contains relevant code examples. Do not make up any code examples on your own.
8 | - Take into account the entire conversation so far, marked as CONVERSATION LOG, but prioritize the CONTEXT.
9 | - Based on the CONTEXT, choose the source that is most relevant to the QUESTION.
10 | - Do not make up any answers if the CONTEXT does not have relevant information.
11 | - Use bullet points, lists, paragraphs and text styling to present the answer in markdown.
12 | - The CONTEXT is a set of JSON objects, each includes the field "text" where the content is stored, and "url" where the url of the page is stored.
13 | - The URLs are the URLs of the pages that contain the CONTEXT. Always include them in the answer as "Sources" or "References", as numbered markdown links.
14 | - Do not mention the CONTEXT or the CONVERSATION LOG in the answer, but use them to generate the answer.
15 | - ALWAYS prefer the result with the highest "score" value.
16 | - Ignore any content that is stored in html tables.
17 | - The answer should only be based on the CONTEXT. Do not use any external sources. Do not generate the response based on the question without clear reference to the context.
18 | - Summarize the CONTEXT to make it easier to read, but don't omit any information.
19 | - It is IMPERATIVE that any link provided is found in the CONTEXT. Prefer not to provide a link if it is not found in the CONTEXT.
20 |
21 | CONVERSATION LOG: {conversationHistory}
22 |
23 | CONTEXT: {summaries}
24 |
25 | QUESTION: {question}
26 |
27 | URLS: {urls}
28 |
29 | Final Answer: `,
30 | summarizerTemplate: `Shorten the text in the CONTENT, attempting to answer the INQUIRY You should follow the following rules when generating the summary:
31 | - Any code found in the CONTENT should ALWAYS be preserved in the summary, unchanged.
32 | - Code will be surrounded by backticks (\`) or triple backticks (\`\`\`).
33 | - Summary should include code examples that are relevant to the INQUIRY, based on the content. Do not make up any code examples on your own.
34 | - The summary will answer the INQUIRY. If it cannot be answered, the summary should be empty, AND NO TEXT SHOULD BE RETURNED IN THE FINAL ANSWER AT ALL.
35 | - If the INQUIRY cannot be answered, the final answer should be empty.
36 | - The summary should be under 4000 characters.
37 | - The summary should be 2000 characters long, if possible.
38 |
39 | INQUIRY: {inquiry}
40 | CONTENT: {document}
41 |
42 | Final answer:
43 | `,
44 | summarizerDocumentTemplate: `Summarize the text in the CONTENT. You should follow the following rules when generating the summary:
45 | - Any code found in the CONTENT should ALWAYS be preserved in the summary, unchanged.
46 | - Code will be surrounded by backticks (\`) or triple backticks (\`\`\`).
47 | - Summary should include code examples when possible. Do not make up any code examples on your own.
48 | - The summary should be under 4000 characters.
49 | - The summary should be at least 1500 characters long, if possible.
50 |
51 | CONTENT: {document}
52 |
53 | Final answer:
54 | `,
55 | inquiryTemplate: `Given the following user prompt and conversation log, formulate a question that would be the most relevant to provide the user with an answer from a knowledge base.
56 | You should follow the following rules when generating and answer:
57 | - Always prioritize the user prompt over the conversation log.
58 | - Ignore any conversation log that is not directly related to the user prompt.
59 | - Only attempt to answer if a question was posed.
60 | - The question should be a single sentence
61 | - You should remove any punctuation from the question
62 | - You should remove any words that are not relevant to the question
63 | - If you are unable to formulate a question, respond with the same USER PROMPT you got.
64 |
65 | USER PROMPT: {userPrompt}
66 |
67 | CONVERSATION LOG: {conversationHistory}
68 |
69 | Final answer:
70 | `,
71 | summerierTemplate: `Summarize the following text. You should follow the following rules when generating and answer:`
72 | }
73 |
74 | export { templates }
--------------------------------------------------------------------------------
/src/pages/index.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import Head from "next/head";
4 | import { useEffect, useState } from "react";
5 | import ReactMarkdown from "react-markdown";
6 | import remarkMath from "remark-math";
7 | import rehypeKatex from "rehype-katex";
8 | import * as timeago from "timeago.js";
9 | import {
10 | MainContainer,
11 | ChatContainer,
12 | MessageList,
13 | Message,
14 | MessageInput,
15 | ConversationHeader,
16 | TypingIndicator,
17 | } from "@chatscope/chat-ui-kit-react";
18 | import { supabaseBrowserClient } from "utils/supabaseBrowser";
19 | import { Auth } from "@supabase/auth-ui-react";
20 | import {
21 | // Import predefined theme
22 | ThemeSupa,
23 | } from "@supabase/auth-ui-shared";
24 |
25 | import styles from "@chatscope/chat-ui-kit-styles/dist/default/styles.min.css";
26 |
27 | type ConversationEntry = {
28 | message: string;
29 | speaker: "bot" | "user";
30 | date: Date;
31 | id?: string;
32 | };
33 |
34 | const updateChatbotMessage = (
35 | conversation: ConversationEntry[],
36 | message: { interactionId: string; token: string; event: "response" }
37 | ): ConversationEntry[] => {
38 | const interactionId = message.interactionId;
39 |
40 | const updatedConversation = conversation.reduce(
41 | (acc: ConversationEntry[], e: ConversationEntry) => [
42 | ...acc,
43 | e.id === interactionId ? { ...e, message: e.message + message.token } : e,
44 | ],
45 | []
46 | );
47 |
48 | return conversation.some((e) => e.id === interactionId)
49 | ? updatedConversation
50 | : [
51 | ...updatedConversation,
52 | {
53 | id: interactionId,
54 | message: message.token,
55 | speaker: "bot",
56 | date: new Date(),
57 | },
58 | ];
59 | };
60 |
61 | export default function Home() {
62 | const [text, setText] = useState("");
63 | const [conversation, setConversation] = useState([]);
64 | const [botIsTyping, setBotIsTyping] = useState(false);
65 | const [statusMessage, setStatusMessage] = useState("Waiting for query...");
66 | const [userId, setUserId] = useState();
67 |
68 | useEffect(() => {
69 | supabaseBrowserClient.auth.getSession().then(({ data: { session } }) => {
70 | if (!session) {
71 | supabaseBrowserClient.auth.onAuthStateChange((_e, newSession) =>
72 | setUserId(newSession?.user.id)
73 | );
74 | } else {
75 | setUserId(session?.user.id);
76 | }
77 | });
78 | }, []);
79 |
80 | if (!userId)
81 | return (
82 |
86 | );
87 |
88 | const channel = supabaseBrowserClient.channel(userId);
89 |
90 | channel
91 | .on("broadcast", { event: "chat" }, ({ payload }) => {
92 | switch (payload.event) {
93 | case "response":
94 | setConversation((state) => updateChatbotMessage(state, payload));
95 | break;
96 | case "status":
97 | setStatusMessage(payload.message);
98 | break;
99 | case "responseEnd":
100 | default:
101 | setBotIsTyping(false);
102 | setStatusMessage("Waiting for query...");
103 | }
104 | })
105 | .subscribe();
106 |
107 | const submit = async () => {
108 | setConversation((state) => [
109 | ...state,
110 | {
111 | message: text,
112 | speaker: "user",
113 | date: new Date(),
114 | },
115 | ]);
116 | try {
117 | setBotIsTyping(true);
118 | const response = await fetch("/api/chat", {
119 | method: "POST",
120 | headers: {
121 | "Content-Type": "application/json",
122 | },
123 | body: JSON.stringify({ prompt: text }),
124 | });
125 |
126 | await response.json();
127 | } catch (error) {
128 | console.error("Error submitting message:", error);
129 | }
130 | setText("");
131 | };
132 |
133 | return (
134 | <>
135 |
136 | Langchain Supa GPT
137 |
138 |
139 |
140 |
141 |
142 |
145 |
146 |
147 |
148 |
149 |
153 |
154 |
155 |
159 | ) : null
160 | }
161 | >
162 | {conversation.map((entry, index) => {
163 | return (
164 |
175 |
176 |
179 | {entry.message}
180 |
181 |
182 |
186 |
187 | );
188 | })}
189 |
190 | {
194 | setText(text);
195 | }}
196 | sendButton={true}
197 | autoFocus
198 | />
199 |
200 |
201 |
202 |
203 | >
204 | );
205 | }
206 |
--------------------------------------------------------------------------------
/src/pages/test.tsx:
--------------------------------------------------------------------------------
1 | //@ts-nocheck
2 | import { useEffect, useState } from "react";
3 | import getConfig from "next/config";
4 |
5 | const { publicRuntimeConfig } = getConfig();
6 | const { apiUrl } = publicRuntimeConfig;
7 |
8 | function MyComponent() {
9 | const [data, setData] = useState([]);
10 |
11 | const fetchData = async () => {
12 | try {
13 | const response = await fetch(`${apiUrl}/api/test`);
14 | const result = await response.json();
15 | setData([...data, result]);
16 |
17 | // Schedule the next fetch
18 | setTimeout(fetchData, 1000);
19 | } catch (error) {
20 | console.error("Error fetching data:", error);
21 | setTimeout(fetchData, 5000); // Retry after 5 seconds
22 | }
23 | };
24 |
25 | useEffect(() => {
26 | fetchData();
27 | }, []);
28 |
29 | return (
30 |
31 |
Streaming data:
32 |
33 | {data.map((item, index) => (
34 | - {JSON.stringify(item)}
35 | ))}
36 |
37 |
38 | );
39 | }
40 |
41 | export default MyComponent;
42 |
--------------------------------------------------------------------------------
/src/styles/globals.css:
--------------------------------------------------------------------------------
1 | html {
2 | font-family: "mediumllweb", "sans-serif";
3 | }
4 | p > a {
5 | color: #192bd5;
6 | text-decoration: underline;
7 | }
8 |
--------------------------------------------------------------------------------
/src/types/supabase.ts:
--------------------------------------------------------------------------------
1 | export type Json =
2 | | string
3 | | number
4 | | boolean
5 | | null
6 | | { [key: string]: Json | undefined }
7 | | Json[]
8 |
9 | export interface Database {
10 | public: {
11 | Tables: {
12 | conversations: {
13 | Row: {
14 | created_at: string
15 | entry: string | null
16 | id: string
17 | speaker: Database["public"]["Enums"]["speaker"]
18 | user_id: string
19 | }
20 | Insert: {
21 | created_at?: string
22 | entry?: string | null
23 | id?: string
24 | speaker: Database["public"]["Enums"]["speaker"]
25 | user_id: string
26 | }
27 | Update: {
28 | created_at?: string
29 | entry?: string | null
30 | id?: string
31 | speaker?: Database["public"]["Enums"]["speaker"]
32 | user_id?: string
33 | }
34 | Relationships: [
35 | {
36 | foreignKeyName: "conversations_user_id_fkey"
37 | columns: ["user_id"]
38 | isOneToOne: false
39 | referencedRelation: "users"
40 | referencedColumns: ["id"]
41 | }
42 | ]
43 | }
44 | documents: {
45 | Row: {
46 | content: string | null
47 | embedding: string | null
48 | id: number
49 | metadata: Json | null
50 | }
51 | Insert: {
52 | content?: string | null
53 | embedding?: string | null
54 | id?: number
55 | metadata?: Json | null
56 | }
57 | Update: {
58 | content?: string | null
59 | embedding?: string | null
60 | id?: number
61 | metadata?: Json | null
62 | }
63 | Relationships: []
64 | }
65 | }
66 | Views: {
67 | [_ in never]: never
68 | }
69 | Functions: {
70 | hnswhandler: {
71 | Args: {
72 | "": unknown
73 | }
74 | Returns: unknown
75 | }
76 | ivfflathandler: {
77 | Args: {
78 | "": unknown
79 | }
80 | Returns: unknown
81 | }
82 | match_documents: {
83 | Args: {
84 | query_embedding: string
85 | match_count?: number
86 | filter?: Json
87 | }
88 | Returns: {
89 | id: number
90 | content: string
91 | metadata: Json
92 | similarity: number
93 | }[]
94 | }
95 | vector_avg: {
96 | Args: {
97 | "": number[]
98 | }
99 | Returns: string
100 | }
101 | vector_dims: {
102 | Args: {
103 | "": string
104 | }
105 | Returns: number
106 | }
107 | vector_norm: {
108 | Args: {
109 | "": string
110 | }
111 | Returns: number
112 | }
113 | vector_out: {
114 | Args: {
115 | "": string
116 | }
117 | Returns: unknown
118 | }
119 | vector_send: {
120 | Args: {
121 | "": string
122 | }
123 | Returns: string
124 | }
125 | vector_typmod_in: {
126 | Args: {
127 | "": unknown[]
128 | }
129 | Returns: number
130 | }
131 | }
132 | Enums: {
133 | speaker: "user" | "ai"
134 | }
135 | CompositeTypes: {
136 | [_ in never]: never
137 | }
138 | }
139 | }
140 |
141 | export type Tables<
142 | PublicTableNameOrOptions extends
143 | | keyof (Database["public"]["Tables"] & Database["public"]["Views"])
144 | | { schema: keyof Database },
145 | TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
146 | ? keyof (Database[PublicTableNameOrOptions["schema"]]["Tables"] &
147 | Database[PublicTableNameOrOptions["schema"]]["Views"])
148 | : never = never
149 | > = PublicTableNameOrOptions extends { schema: keyof Database }
150 | ? (Database[PublicTableNameOrOptions["schema"]]["Tables"] &
151 | Database[PublicTableNameOrOptions["schema"]]["Views"])[TableName] extends {
152 | Row: infer R
153 | }
154 | ? R
155 | : never
156 | : PublicTableNameOrOptions extends keyof (Database["public"]["Tables"] &
157 | Database["public"]["Views"])
158 | ? (Database["public"]["Tables"] &
159 | Database["public"]["Views"])[PublicTableNameOrOptions] extends {
160 | Row: infer R
161 | }
162 | ? R
163 | : never
164 | : never
165 |
166 | export type TablesInsert<
167 | PublicTableNameOrOptions extends
168 | | keyof Database["public"]["Tables"]
169 | | { schema: keyof Database },
170 | TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
171 | ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"]
172 | : never = never
173 | > = PublicTableNameOrOptions extends { schema: keyof Database }
174 | ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends {
175 | Insert: infer I
176 | }
177 | ? I
178 | : never
179 | : PublicTableNameOrOptions extends keyof Database["public"]["Tables"]
180 | ? Database["public"]["Tables"][PublicTableNameOrOptions] extends {
181 | Insert: infer I
182 | }
183 | ? I
184 | : never
185 | : never
186 |
187 | export type TablesUpdate<
188 | PublicTableNameOrOptions extends
189 | | keyof Database["public"]["Tables"]
190 | | { schema: keyof Database },
191 | TableName extends PublicTableNameOrOptions extends { schema: keyof Database }
192 | ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"]
193 | : never = never
194 | > = PublicTableNameOrOptions extends { schema: keyof Database }
195 | ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends {
196 | Update: infer U
197 | }
198 | ? U
199 | : never
200 | : PublicTableNameOrOptions extends keyof Database["public"]["Tables"]
201 | ? Database["public"]["Tables"][PublicTableNameOrOptions] extends {
202 | Update: infer U
203 | }
204 | ? U
205 | : never
206 | : never
207 |
208 | export type Enums<
209 | PublicEnumNameOrOptions extends
210 | | keyof Database["public"]["Enums"]
211 | | { schema: keyof Database },
212 | EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database }
213 | ? keyof Database[PublicEnumNameOrOptions["schema"]]["Enums"]
214 | : never = never
215 | > = PublicEnumNameOrOptions extends { schema: keyof Database }
216 | ? Database[PublicEnumNameOrOptions["schema"]]["Enums"][EnumName]
217 | : PublicEnumNameOrOptions extends keyof Database["public"]["Enums"]
218 | ? Database["public"]["Enums"][PublicEnumNameOrOptions]
219 | : never
220 |
221 |
--------------------------------------------------------------------------------
/src/utils/supabaseAdmin.ts:
--------------------------------------------------------------------------------
1 | import { createClient } from "@supabase/supabase-js";
2 | import { Database } from "types/supabase";
3 |
4 | const supabasePrivateKey = process.env.SUPABASE_SERVICE_ROLE_KEY;
5 | if (!supabasePrivateKey)
6 | throw new Error(`Expected env var SUPABASE_SERVICE_ROLE_KEY`);
7 |
8 | const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL;
9 | if (!supabaseUrl) throw new Error(`Expected env var NEXT_PUBLIC_SUPABASE_URL`);
10 |
11 | const supabaseAdminClient = createClient(
12 | supabaseUrl,
13 | supabasePrivateKey,
14 | {
15 | auth: { persistSession: false },
16 | realtime: {
17 | params: {
18 | eventsPerSecond: -1,
19 | },
20 | },
21 | }
22 | );
23 |
24 | export { supabaseAdminClient };
25 |
--------------------------------------------------------------------------------
/src/utils/supabaseBrowser.ts:
--------------------------------------------------------------------------------
1 | import { createClientComponentClient } from "@supabase/auth-helpers-nextjs";
2 | import { Database } from "types/supabase";
3 |
4 | const supabaseBrowserClient = createClientComponentClient({
5 | options: {
6 | realtime: {
7 | params: {
8 | eventsPerSecond: -1,
9 | },
10 | },
11 | },
12 | });
13 |
14 | export { supabaseBrowserClient };
15 |
--------------------------------------------------------------------------------
/supabase/.gitignore:
--------------------------------------------------------------------------------
1 | # Supabase
2 | .branches
3 | .temp
4 |
--------------------------------------------------------------------------------
/supabase/config.toml:
--------------------------------------------------------------------------------
1 | # A string used to distinguish different Supabase projects on the same host. Defaults to the working
2 | # directory name when running `supabase init`.
3 | project_id = "langchain-chatbot-demo"
4 |
5 | [api]
6 | # Port to use for the API URL.
7 | port = 54321
8 | # Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
9 | # endpoints. public and storage are always included.
10 | schemas = ["public", "storage", "graphql_public"]
11 | # Extra schemas to add to the search_path of every request. public is always included.
12 | extra_search_path = ["public", "extensions"]
13 | # The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
14 | # for accidental or malicious requests.
15 | max_rows = 1000
16 |
17 | [db]
18 | # Port to use for the local database URL.
19 | port = 54322
20 | # The database major version to use. This has to be the same as your remote database's. Run `SHOW
21 | # server_version;` on the remote database to check.
22 | major_version = 15
23 |
24 | [studio]
25 | # Port to use for Supabase Studio.
26 | port = 54323
27 |
28 | # Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
29 | # are monitored, and you can view the emails that would have been sent from the web interface.
30 | [inbucket]
31 | # Port to use for the email testing server web interface.
32 | port = 54324
33 | smtp_port = 54325
34 | pop3_port = 54326
35 |
36 | [storage]
37 | # The maximum file size allowed (e.g. "5MB", "500KB").
38 | file_size_limit = "50MiB"
39 |
40 | [auth]
41 | # The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
42 | # in emails.
43 | site_url = "http://localhost:3000"
44 | # A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
45 | additional_redirect_urls = ["https://localhost:3000"]
46 | # How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 seconds (one
47 | # week).
48 | jwt_expiry = 3600
49 | # Allow/disallow new user signups to your project.
50 | enable_signup = true
51 |
52 | [auth.email]
53 | # Allow/disallow new user signups via email to your project.
54 | enable_signup = true
55 | # If enabled, a user will be required to confirm any email change on both the old, and new email
56 | # addresses. If disabled, only the new email is required to confirm.
57 | double_confirm_changes = true
58 | # If enabled, users need to confirm their email address before signing in.
59 | enable_confirmations = true
60 |
61 | # Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
62 | # `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin`, `notion`, `twitch`,
63 | # `twitter`, `slack`, `spotify`, `workos`, `zoom`.
64 | [auth.external.apple]
65 | enabled = false
66 | client_id = ""
67 | secret = ""
68 | # Overrides the default auth redirectUrl.
69 | redirect_uri = ""
70 | # Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
71 | # or any other third-party OIDC providers.
72 | url = ""
73 |
74 | [analytics]
75 | enabled = false
76 | port = 54327
77 | vector_port = 54328
78 | # Setup BigQuery project to enable log viewer on local development stack.
79 | # See: https://supabase.com/docs/guides/getting-started/local-development#enabling-local-logging
80 | gcp_project_id = ""
81 | gcp_project_number = ""
82 | gcp_jwt_path = "supabase/gcloud.json"
83 |
--------------------------------------------------------------------------------
/supabase/migrations/20230612070906_langchain.sql:
--------------------------------------------------------------------------------
1 | -- Enable the pgvector extension to work with embedding vectors
2 | create extension vector;
3 |
4 | -- Create a table to store your documents
5 | create table documents (
6 | id bigserial primary key,
7 | content text, -- corresponds to Document.pageContent
8 | metadata jsonb, -- corresponds to Document.metadata
9 | embedding vector(1536) -- 1536 works for OpenAI embeddings, change if needed
10 | );
11 | -- Set up Row Level Security (RLS)
12 | -- See https://supabase.com/docs/guides/auth/row-level-security for more details.
13 | alter table documents
14 | enable row level security;
15 |
16 | CREATE POLICY "Allow langchain querying for authenticated users" ON "public"."documents"
17 | AS PERMISSIVE FOR SELECT
18 | TO authenticated
19 | USING (true);
20 |
21 | -- Create a function to search for documents
22 | create function match_documents (
23 | query_embedding vector(1536),
24 | match_count int default null,
25 | filter jsonb DEFAULT '{}'
26 | ) returns table (
27 | id bigint,
28 | content text,
29 | metadata jsonb,
30 | similarity float
31 | )
32 | language plpgsql
33 | as $$
34 | #variable_conflict use_column
35 | begin
36 | return query
37 | select
38 | id,
39 | content,
40 | metadata,
41 | 1 - (documents.embedding <=> query_embedding) as similarity
42 | from documents
43 | where metadata @> filter
44 | order by documents.embedding <=> query_embedding
45 | limit match_count;
46 | end;
47 | $$;
48 |
--------------------------------------------------------------------------------
/supabase/migrations/20230613075114_dbinit.sql:
--------------------------------------------------------------------------------
1 | CREATE TYPE speaker AS ENUM ('user', 'ai');
2 |
3 | CREATE TABLE conversations (
4 | id uuid not null default gen_random_uuid (),
5 | user_id uuid references auth.users not null,
6 | entry text,
7 | speaker speaker not null,
8 | created_at timestamp with time zone not null default timezone ('utc'::text, now()),
9 | constraint conversations_pkey primary key (id)
10 | );
11 | -- Set up Row Level Security (RLS)
12 | -- See https://supabase.com/docs/guides/auth/row-level-security for more details.
13 | alter table conversations
14 | enable row level security;
15 |
16 | CREATE POLICY "Allow users access to own conversations" ON "public"."conversations"
17 | AS PERMISSIVE FOR ALL
18 | TO authenticated
19 | USING (auth.uid() = user_id)
20 | WITH CHECK (auth.uid() = user_id)
--------------------------------------------------------------------------------
/supabase/seed.sql:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/supabase-community/langchain-chatbot-demo/59a381ed330495c32c118e80422bf18bd22f43a8/supabase/seed.sql
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "noEmit": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve",
16 | "incremental": true,
17 | "baseUrl": "./src",
18 | "paths": {
19 | "@/*": ["./src/*"]
20 | }
21 | },
22 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
23 | "exclude": ["node_modules"]
24 | }
25 |
--------------------------------------------------------------------------------