├── .dockerignore
├── .env.example
├── .eslintrc.json
├── .gitignore
├── Dockerfile
├── README.md
├── data
├── args.json
├── docstore.json
└── hnswlib.index
├── fly.toml
├── ingest.ts
├── next.config.js
├── package.json
├── pages
├── _app.tsx
├── _document.tsx
├── api
│ ├── chat-stream.ts
│ ├── chat.ts
│ └── util.ts
└── index.tsx
├── public
├── android-chrome-192x192.png
├── android-chrome-512x512.png
├── apple-touch-icon.png
├── chatIcon.png
├── favicon-16x16.png
├── favicon-32x32.png
├── favicon.ico
├── og-image.svg
├── robots.txt
├── site.webmanifest
└── usericon.png
├── styles
├── Home.module.css
└── globals.css
├── tsconfig.json
├── vercel.json
└── yarn.lock
/.dockerignore:
--------------------------------------------------------------------------------
1 | fly.toml
2 | Dockerfile
3 | .dockerignore
4 | node_modules
5 | npm-debug.log
6 | README.md
7 | .next
8 | .git
9 | .env
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | OPENAI_API_KEY=""
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 | .pnpm-debug.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # Used env file
32 | .env
33 |
34 | # vercel
35 | .vercel
36 |
37 | # typescript
38 | *.tsbuildinfo
39 | next-env.d.ts
40 |
41 | NavalAlmanac.md
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Install dependencies only when needed
2 | FROM node:16-alpine AS builder
3 | # Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
4 | RUN apk add --no-cache libc6-compat
5 |
6 | # Add build dependencies for HNSWLib
7 | ENV PYTHONUNBUFFERED=1
8 | RUN apk add --no-cache make g++
9 | RUN apk add --update --no-cache python3 && ln -sf python3 /usr/bin/python
10 | RUN python3 -m ensurepip
11 | RUN pip3 install --no-cache --upgrade pip setuptools
12 |
13 | WORKDIR /app
14 | COPY . .
15 | RUN yarn install --frozen-lockfile
16 |
17 | # If using npm with a `package-lock.json` comment out above and use below instead
18 | # RUN npm ci
19 |
20 | ENV NEXT_TELEMETRY_DISABLED 1
21 |
22 | # Add `ARG` instructions below if you need `NEXT_PUBLIC_` variables
23 | # then put the value on your fly.toml
24 | # Example:
25 | # ARG NEXT_PUBLIC_EXAMPLE="value here"
26 |
27 | RUN yarn build
28 |
29 | # If using npm comment out above and use below instead
30 | # RUN npm run build
31 |
32 |
33 | # Production image, copy all the files and run next
34 | FROM node:16-alpine AS runner
35 | WORKDIR /app
36 |
37 | ENV NODE_ENV production
38 | ENV NEXT_TELEMETRY_DISABLED 1
39 |
40 | RUN addgroup --system --gid 1001 nodejs
41 | RUN adduser --system --uid 1001 nextjs
42 |
43 | COPY --chown=nextjs:nodejs --from=builder /app ./
44 |
45 | USER nextjs
46 |
47 | CMD ["yarn", "start"]
48 |
49 | # If using npm comment out above and use below instead
50 | # CMD ["npm", "run", "start"]
51 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
2 |
3 | ## Getting Started
4 |
5 | First, create a new `.env` file from `.env.example` and add your OpenAI API key found [here](https://platform.openai.com/account/api-keys).
6 |
7 | ```bash
8 | cp .env.example .env
9 | ```
10 |
11 | ### Prerequisites
12 |
13 | - [Node.js](https://nodejs.org/en/download/)
14 | - [Yarn](https://classic.yarnpkg.com/en/docs/install/#mac-stable)
15 | - `wget` (on macOS, you can install this with `brew install wget`)
16 |
17 | Next, we'll need to load our data source.
18 |
19 | ### Data Ingestion
20 |
21 | Data ingestion happens in two steps.
22 |
23 | First, you should download the book / source and format it into something readable. In my case I downloaded the book from [here](https://www.navalmanack.com/) and converted it into `md` format using Calibre. Add that source to the project folder and update `FILENAME` in `ingest.ts` to match the filename.
24 |
25 | Next, install dependencies and run the ingestion script:
26 |
27 | ```bash
28 | yarn && yarn ingest
29 | ```
30 |
31 | This will parse the data, split text, create embeddings, store them in a vectorstore, and
32 | then save it to the `data/` directory.
33 |
34 | We save it to a directory because we only want to run the (expensive) data ingestion process once.
35 |
36 | The Next.js server relies on the presence of the `data/` directory. Please
37 | make sure to run this before moving on to the next step.
38 |
39 | ### Running the Server
40 |
41 | Then, run the development server:
42 |
43 | ```bash
44 | yarn dev
45 | ```
46 |
47 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
48 |
49 | ### Deploying the server
50 |
51 | The production version of this repo is hosted on
52 | [fly](https://chat-langchainjs.fly.dev/). To deploy your own server on Fly, you
53 | can use the provided `fly.toml` and `Dockerfile` as a starting point.
54 |
55 | **Note:** As a Next.js app it seems like Vercel is a natural place to
56 | host this site. Unfortunately there are
57 | [limitations](https://github.com/websockets/ws/issues/1786#issuecomment-678315435)
58 | to secure websockets using `ws` with Next.js which requires using a custom
59 | server which cannot be hosted on Vercel. Even using server side events, it
60 | seems, Vercel's serverless functions seem to prohibit streaming responses
61 | (e.g. see
62 | [here](https://github.com/vercel/next.js/issues/9965#issuecomment-820156947))
63 |
64 | ## Inspirations
65 |
66 | This repo borrows heavily from
67 |
68 | - [ChatLangChain](https://github.com/hwchase17/chat-langchain) - for the backend and data ingestion logic
69 | - [LangChain Chat NextJS](https://github.com/zahidkhawaja/langchain-chat-nextjs) - for the frontend.
70 |
71 | ## How To Run on Your Example
72 |
73 | If you'd like to chat your own data, you need to:
74 |
75 | 1. Set up your own ingestion pipeline, and create a similar `data/` directory with a vectorstore in it.
76 | 2. Change the prompt used in `pages/api/util.ts` - right now this tells the chatbot to only respond to questions about LangChain, so in order to get it to work on your data you'll need to update it accordingly.
77 |
78 | The server should work just the same 😄
79 |
--------------------------------------------------------------------------------
/data/args.json:
--------------------------------------------------------------------------------
1 | {"space":"ip","numDimensions":1536}
--------------------------------------------------------------------------------
/data/hnswlib.index:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/data/hnswlib.index
--------------------------------------------------------------------------------
/fly.toml:
--------------------------------------------------------------------------------
1 | # fly.toml file generated for navalmanac on 2023-03-05T23:11:37+03:00
2 |
3 | app = "navalmanac"
4 | kill_signal = "SIGINT"
5 | kill_timeout = 5
6 | processes = []
7 |
8 | [build]
9 | [build.args]
10 | NEXT_PUBLIC_EXAMPLE = "Value goes here"
11 |
12 | [env]
13 | PORT = "8080"
14 |
15 | [experimental]
16 | auto_rollback = true
17 |
18 | [[services]]
19 | http_checks = []
20 | internal_port = 8080
21 | processes = ["app"]
22 | protocol = "tcp"
23 | script_checks = []
24 | [services.concurrency]
25 | hard_limit = 25
26 | soft_limit = 20
27 | type = "connections"
28 |
29 | [[services.ports]]
30 | force_https = true
31 | handlers = ["http"]
32 | port = 80
33 |
34 | [[services.ports]]
35 | handlers = ["tls", "http"]
36 | port = 443
37 |
38 | [[services.tcp_checks]]
39 | grace_period = "1s"
40 | interval = "15s"
41 | restart_limit = 0
42 | timeout = "2s"
43 |
--------------------------------------------------------------------------------
/ingest.ts:
--------------------------------------------------------------------------------
1 | import { HNSWLib } from "langchain/vectorstores";
2 | import { OpenAIEmbeddings } from "langchain/embeddings";
3 | import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
4 | import { TextLoader } from "langchain/document_loaders";
5 |
6 | const FILENAME = "NavalAlmanac.md";
7 |
8 | export const run = async () => {
9 | const loader = new TextLoader(FILENAME);
10 | const rawDocs = await loader.load();
11 | console.log("Loader created.");
12 | /* Split the text into chunks */
13 | const textSplitter = new RecursiveCharacterTextSplitter({
14 | chunkSize: 1000,
15 | chunkOverlap: 200,
16 | });
17 | const docs = await textSplitter.splitDocuments(rawDocs);
18 | console.log("Docs splitted.");
19 |
20 | console.log("Creating vector store...");
21 | /* Create the vectorstore */
22 | const vectorStore = await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings());
23 | await vectorStore.save("data");
24 | };
25 |
26 | (async () => {
27 | await run();
28 | console.log("done");
29 | })();
30 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | }
5 |
6 | export default nextConfig
7 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "navalmanac",
3 | "version": "0.1.0",
4 | "private": true,
5 | "type": "module",
6 | "scripts": {
7 | "dev": "next dev",
8 | "build": "next build",
9 | "start": "next start",
10 | "lint": "next lint",
11 | "download": "sh ingest/download.sh",
12 | "ingest": "tsx -r dotenv/config ingest.ts"
13 | },
14 | "dependencies": {
15 | "@emotion/react": "^11.10.5",
16 | "@emotion/styled": "^11.10.5",
17 | "@microsoft/fetch-event-source": "^2.0.1",
18 | "@mui/material": "^5.11.4",
19 | "@next/font": "13.1.6",
20 | "dotenv": "^16.0.3",
21 | "eslint": "8.34.0",
22 | "eslint-config-next": "13.1.6",
23 | "hnswlib-node": "^1.2.0",
24 | "langchain": "0.0.22",
25 | "next": "13.1.6",
26 | "openai": "^3.1.0",
27 | "react": "18.2.0",
28 | "react-dom": "18.2.0",
29 | "react-markdown": "^8.0.5",
30 | "remark-gfm": "^3.0.1",
31 | "sharp": "^0.31.3",
32 | "ws": "^8.12.1"
33 | },
34 | "devDependencies": {
35 | "@types/adm-zip": "^0.5.0",
36 | "@types/node": "18.13.0",
37 | "@types/react": "18.0.28",
38 | "@types/react-dom": "18.0.11",
39 | "@types/ws": "^8.5.4",
40 | "cohere-ai": "^5.0.2",
41 | "ts-node": "^10.9.1",
42 | "tsx": "^3.12.3",
43 | "typescript": "4.9.5"
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/pages/_app.tsx:
--------------------------------------------------------------------------------
1 | import '@/styles/globals.css'
2 | import type { AppProps } from 'next/app'
3 | import Script from 'next/script'
4 |
5 | export default function App({ Component, pageProps }: AppProps) {
6 | return (
7 | <>
8 |
12 |
21 |
22 | >
23 | )
24 | }
25 |
--------------------------------------------------------------------------------
/pages/_document.tsx:
--------------------------------------------------------------------------------
1 | import { Html, Head, Main, NextScript } from 'next/document'
2 |
3 | export default function Document() {
4 | return (
5 |
6 |
8 |
9 |
10 |
11 |
12 | )
13 | }
14 |
--------------------------------------------------------------------------------
/pages/api/chat-stream.ts:
--------------------------------------------------------------------------------
1 | // Next.js API route support: https://nextjs.org/docs/api-routes/introduction
2 | import type { NextApiRequest, NextApiResponse } from 'next'
3 | import type { Server as HttpServer } from "http";
4 | import type { Server as HttpsServer } from "https";
5 | import { WebSocketServer } from 'ws';
6 | import { HNSWLib } from "langchain/vectorstores";
7 | import { OpenAIEmbeddings } from 'langchain/embeddings';
8 | import { makeChain } from "./util";
9 |
10 | export default async function handler(req: NextApiRequest, res: NextApiResponse) {
11 | if ((res.socket as any).server.wss) {
12 | res.end();
13 | return;
14 | }
15 |
16 | const server = (res.socket as any).server as HttpsServer | HttpServer;
17 | const wss = new WebSocketServer({ noServer: true });
18 | (res.socket as any).server.wss = wss;
19 |
20 | server.on('upgrade', (req, socket, head) => {
21 | if (!req.url?.includes('/_next/webpack-hmr')) {
22 | wss.handleUpgrade(req, socket, head, (ws) => {
23 | wss.emit('connection', ws, req);
24 | });
25 | }
26 | });
27 |
28 | wss.on('connection', (ws) => {
29 | const sendResponse = ({ sender, message, type }: { sender: string, message: string, type: string }) => {
30 | ws.send(JSON.stringify({ sender, message, type }));
31 | };
32 |
33 | const onNewToken = (token: string) => {
34 | sendResponse({ sender: 'bot', message: token, type: 'stream' });
35 | }
36 |
37 | const chainPromise = HNSWLib.load("data", new OpenAIEmbeddings()).then((vs) => makeChain(vs, onNewToken));
38 | const chatHistory: [string, string][] = [];
39 | const encoder = new TextEncoder();
40 |
41 |
42 | ws.on('message', async (data) => {
43 | try {
44 | const question = data.toString();
45 | sendResponse({ sender: 'you', message: question, type: 'stream' });
46 |
47 | sendResponse({ sender: 'bot', message: "", type: 'start' });
48 | const chain = await chainPromise;
49 |
50 | const result = await chain.call({
51 | question,
52 | chat_history: chatHistory,
53 | });
54 | chatHistory.push([question, result.answer]);
55 |
56 | sendResponse({ sender: 'bot', message: "", type: 'end' });
57 | } catch (e) {
58 | sendResponse({
59 | sender: 'bot',
60 | message: "Sorry, something went wrong. Try again.",
61 | type: 'error'
62 | });
63 | }
64 | })
65 | });
66 |
67 | res.end();
68 | }
69 |
--------------------------------------------------------------------------------
/pages/api/chat.ts:
--------------------------------------------------------------------------------
1 | // Next.js API route support: https://nextjs.org/docs/api-routes/introduction
2 | import type { NextApiRequest, NextApiResponse } from "next";
3 | import path from "path";
4 | import { HNSWLib } from "langchain/vectorstores";
5 | import { OpenAIEmbeddings } from "langchain/embeddings";
6 | import { makeChain } from "./util";
7 |
8 | export default async function handler(
9 | req: NextApiRequest,
10 | res: NextApiResponse
11 | ) {
12 | const body = req.body;
13 | const dir = path.resolve(process.cwd(), "data");
14 |
15 | const vectorstore = await HNSWLib.load(dir, new OpenAIEmbeddings());
16 | res.writeHead(200, {
17 | "Content-Type": "text/event-stream",
18 | // Important to set no-transform to avoid compression, which will delay
19 | // writing response chunks to the client.
20 | // See https://github.com/vercel/next.js/issues/9965
21 | "Cache-Control": "no-cache, no-transform",
22 | Connection: "keep-alive",
23 | });
24 |
25 | const sendData = (data: string) => {
26 | res.write(`data: ${data}\n\n`);
27 | };
28 |
29 | sendData(JSON.stringify({ data: "" }));
30 | const chain = makeChain(vectorstore, (token: string) => {
31 | sendData(JSON.stringify({ data: token }));
32 | });
33 |
34 | try {
35 | await chain.call({
36 | question: body.question,
37 | chat_history: body.history,
38 | });
39 | } catch (err) {
40 | console.error(err);
41 | // Ignore error
42 | } finally {
43 | sendData("[DONE]");
44 | res.end();
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/pages/api/util.ts:
--------------------------------------------------------------------------------
1 | import { OpenAIChat, BaseLLM } from "langchain/llms";
2 | import { Document } from "langchain/document";
3 | import { LLMChain, VectorDBQAChain, ChainValues, StuffDocumentsChain } from "langchain/chains";
4 | import { HNSWLib } from "langchain/vectorstores";
5 | import { PromptTemplate } from "langchain/prompts";
6 | import { LLMChainInput } from "langchain/dist/chains/llm_chain";
7 |
8 | const SYSTEM_MESSAGE = PromptTemplate.fromTemplate(
9 | `You are an AI assistant for the "Almanac of Naval Ravikant" book. This book collects and curates Naval’s wisdom from Twitter, Podcasts, and Essays over the past decade.
10 | The entirety of the book (and bonus content!) is free to read on https://www.navalmanack.com/, as well as complete pdf and e-reader versions for free download.
11 | You are given the following extracted parts of the book. The context is between two '========='. Provide conversational answers in Markdown syntax with links formatted as hyperlinks.
12 | If the context is empty or you don't know the answer, just tell them that you didn't find anything regarding that topic. Don't try to make up an answer.
13 | If the question is not about the book's content or has nothing to do with Naval Ravikant himself, politely inform them that you are tuned to only answer questions about the Almanac of Naval Ravikant's content.
14 | =========
15 | {context}
16 | =========`);
17 |
18 | const QA_PROMPT = PromptTemplate.fromTemplate(`{question}`);
19 |
20 | // VectorDBQAChain is a chain that uses a vector store to find the most similar document to the question
21 | // and then uses a documents chain to combine all the documents into a single string
22 | // and then uses a LLMChain to generate the answer
23 | // Before: Based on the chat history make singular question -> find related docs from the question -> combine docs and insert them as context -> generate answer
24 | // After: Find related docs from the question -> combine docs and insert them into predefined system message -> pass in the chat history -> generate answer
25 |
26 | export class OpenAIChatLLMChain extends LLMChain implements LLMChainInput {
27 | async _call(values: ChainValues): Promise {
28 | let stop;
29 | if ("stop" in values && Array.isArray(values.stop)) {
30 | stop = values.stop;
31 | }
32 | const { chat_history } = values;
33 | const prefixMessages = chat_history.map((message: string[]) => {
34 | return [
35 | {
36 | role: "user",
37 | content: message[0]
38 | },
39 | {
40 | role: "assistant",
41 | content: message[1]
42 | }
43 | ]
44 | }).flat();
45 |
46 | const formattedSystemMessage = await SYSTEM_MESSAGE.format({ context: values.context })
47 | // @ts-ignore
48 | this.llm.prefixMessages = [
49 | {
50 | role: "system",
51 | content: formattedSystemMessage
52 | },
53 | {
54 | role: "assistant",
55 | content: "Hi, I'm an AI assistant for the Almanac of Naval Ravikant. How can I help you?"
56 | },
57 | ...prefixMessages];
58 | const formattedString = await this.prompt.format(values);
59 | const llmResult = await this.llm.call(formattedString, stop);
60 | const result = { [this.outputKey]: llmResult };
61 | return result;
62 | }
63 | }
64 |
65 | class ChatStuffDocumentsChain extends StuffDocumentsChain {
66 | async _call(values: ChainValues): Promise {
67 | if (!(this.inputKey in values)) {
68 | throw new Error(`Document key ${this.inputKey} not found.`);
69 | }
70 | const { [this.inputKey]: docs, ...rest } = values;
71 | const texts = (docs as Document[]).map(({ pageContent }) => pageContent);
72 | const text = texts.join("\n\n");
73 | const result = await this.llmChain.call({
74 | ...rest,
75 | [this.documentVariableName]: text,
76 | });
77 | return result;
78 | }
79 | }
80 |
81 | class OpenAIChatVectorDBQAChain extends VectorDBQAChain {
82 | async _call(values: ChainValues): Promise {
83 | if (!(this.inputKey in values)) {
84 | throw new Error(`Question key ${this.inputKey} not found.`);
85 | }
86 | const question: string = values[this.inputKey];
87 | const docs = await this.vectorstore.similaritySearch(question, this.k);
88 | // all of this just to pass chat history to the LLMChain
89 | const inputs = { question, input_documents: docs, chat_history: values.chat_history };
90 | const result = await this.combineDocumentsChain.call(inputs);
91 | return result;
92 | }
93 | }
94 |
95 | interface qaParams {
96 | prompt?: PromptTemplate
97 | }
98 |
99 | // use this custom qa chain instead of the default one
100 | const loadQAChain = (llm: BaseLLM, params: qaParams = {}) => {
101 | const { prompt = QA_PROMPT } = params;
102 | const llmChain = new OpenAIChatLLMChain({ prompt, llm });
103 | const chain = new ChatStuffDocumentsChain({ llmChain });
104 | return chain;
105 | }
106 |
107 |
108 | export const makeChain = (vectorstore: HNSWLib, onTokenStream?: (token: string) => void) => {
109 | const docChain = loadQAChain(
110 | new OpenAIChat({
111 | temperature: 0,
112 | streaming: Boolean(onTokenStream),
113 | callbackManager: {
114 | handleNewToken: onTokenStream,
115 | }
116 | }),
117 | { prompt: QA_PROMPT },
118 | );
119 |
120 | return new OpenAIChatVectorDBQAChain({
121 | vectorstore,
122 | combineDocumentsChain: docChain,
123 | inputKey: 'question',
124 | });
125 | }
126 |
--------------------------------------------------------------------------------
/pages/index.tsx:
--------------------------------------------------------------------------------
1 | import { useState, useRef, useEffect, useMemo } from 'react'
2 | import Head from 'next/head'
3 | import styles from '../styles/Home.module.css'
4 | import Image from 'next/image';
5 | import Link from 'next/link';
6 | import ReactMarkdown from 'react-markdown';
7 | import CircularProgress from '@mui/material/CircularProgress';
8 | import { fetchEventSource } from '@microsoft/fetch-event-source';
9 | import remarkGfm from "remark-gfm";
10 |
11 | type Message = {
12 | type: "apiMessage" | "userMessage";
13 | message: string;
14 | isStreaming?: boolean;
15 | }
16 |
17 | export default function Home() {
18 | const [userInput, setUserInput] = useState("");
19 | const [loading, setLoading] = useState(false);
20 | const [messageState, setMessageState] = useState<{ messages: Message[], pending?: string, history: [string, string][] }>({
21 | messages: [{
22 | "message": "Hi, I'm an AI assistant for the Almanac of Naval Ravikant. How can I help you?",
23 | "type": "apiMessage"
24 | }],
25 | history: []
26 | });
27 | const { messages, pending, history } = messageState;
28 |
29 | const messageListRef = useRef(null);
30 | const textAreaRef = useRef(null);
31 |
32 | // Auto scroll chat to bottom
33 | useEffect(() => {
34 | const messageList = messageListRef.current;
35 | if (messageList) {
36 | messageList.scrollTop = messageList.scrollHeight;
37 | }
38 | }, [pending]);
39 |
40 | // Focus on text field on load
41 | useEffect(() => {
42 | textAreaRef.current?.focus();
43 | }, [loading]);
44 |
45 | // Handle form submission
46 | const handleSubmit = async (e: any) => {
47 | e.preventDefault();
48 |
49 | const question = userInput.trim();
50 | if (question === "") {
51 | return;
52 | }
53 |
54 | setMessageState(state => ({
55 | ...state,
56 | messages: [...state.messages, {
57 | type: "userMessage",
58 | message: question
59 | }],
60 | pending: undefined
61 | }));
62 |
63 | setLoading(true);
64 | setUserInput("");
65 | setMessageState(state => ({ ...state, pending: "" }));
66 |
67 | const ctrl = new AbortController();
68 |
69 | fetchEventSource('/api/chat', {
70 | method: 'POST',
71 | headers: {
72 | 'Content-Type': 'application/json',
73 | },
74 | body: JSON.stringify({
75 | question,
76 | history
77 | }),
78 | signal: ctrl.signal,
79 | onmessage: (event) => {
80 | if (event.data === "[DONE]") {
81 | setMessageState(state => ({
82 | history: [...state.history, [question, state.pending ?? ""]],
83 | messages: [...state.messages, {
84 | type: "apiMessage",
85 | message: state.pending ?? "",
86 | }],
87 | pending: undefined
88 | }));
89 | setLoading(false);
90 | ctrl.abort();
91 | } else {
92 | const data = JSON.parse(event.data);
93 | setMessageState(state => ({
94 | ...state,
95 | pending: (state.pending ?? "") + data.data,
96 | }));
97 | }
98 | }
99 | });
100 | }
101 |
102 | // Prevent blank submissions and allow for multiline input
103 | const handleEnter = (e: any) => {
104 | if (e.key === "Enter" && userInput) {
105 | if(!e.shiftKey && userInput) {
106 | handleSubmit(e);
107 | }
108 | } else if (e.key === "Enter") {
109 | e.preventDefault();
110 | }
111 | };
112 |
113 | const chatMessages = useMemo(() => {
114 | return [...messages, ...(pending ? [{ type: "apiMessage", message: pending }] : [])];
115 | }, [messages, pending]);
116 |
117 | return (
118 | <>
119 |
120 | {/* */}
121 | Almanac of Naval Ravikant: Chatbot
122 |
123 |
124 |
125 | {/* */}
126 |
127 |
128 |
129 |
130 |
131 | {/* */}
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
Almanac of Naval Ravikant: Chatbot
142 |
143 |
159 |
160 |
161 |
162 |
163 | {chatMessages.map((message, index) => {
164 | let icon;
165 | let className;
166 |
167 | if (message.type === "apiMessage") {
168 | icon =
;
169 | className = styles.apimessage;
170 | } else {
171 | icon =
172 |
173 | // The latest message sent by the user will be animated while waiting for a response
174 | className = loading && index === chatMessages.length - 1
175 | ? styles.usermessagewaiting
176 | : styles.usermessage;
177 | }
178 | return (
179 |
180 | {icon}
181 |
182 |
186 | {message.message}
187 |
188 |
189 |
190 | )
191 | })}
192 |
193 |
194 |
237 |
238 | >
239 | )
240 | }
241 |
--------------------------------------------------------------------------------
/public/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/android-chrome-192x192.png
--------------------------------------------------------------------------------
/public/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/android-chrome-512x512.png
--------------------------------------------------------------------------------
/public/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/apple-touch-icon.png
--------------------------------------------------------------------------------
/public/chatIcon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/chatIcon.png
--------------------------------------------------------------------------------
/public/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/favicon-16x16.png
--------------------------------------------------------------------------------
/public/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/favicon-32x32.png
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/favicon.ico
--------------------------------------------------------------------------------
/public/og-image.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/robots.txt:
--------------------------------------------------------------------------------
1 | # Allow all crawlers
2 | User-agent: *
3 | Allow: /
--------------------------------------------------------------------------------
/public/site.webmanifest:
--------------------------------------------------------------------------------
1 | {"name":"Almanac of Naval Ravikant: Chatbot","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
--------------------------------------------------------------------------------
/public/usericon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/progremir/navalmanac/f64ba7f2415fe8db36c4d76a6b975b60caa9be5a/public/usericon.png
--------------------------------------------------------------------------------
/styles/Home.module.css:
--------------------------------------------------------------------------------
1 | .main {
2 | display: flex;
3 | flex-direction: column;
4 | justify-content: space-between;
5 | align-items: center;
6 | padding: 2rem;
7 | }
8 |
9 | .header {
10 | width: auto;
11 | }
12 |
13 | .header p {
14 | text-align: center;
15 | }
16 |
17 | .topnav {
18 | background-color: #141619;
19 | border-bottom: 1px solid #30373d;
20 | display: flex;
21 | justify-content: space-between;
22 | padding: 1rem 0.75rem 1rem 0.75rem;
23 | align-items: center;
24 | }
25 |
26 | .navlogo, .navlinks a {
27 | font-weight: 500;
28 | }
29 |
30 | .navlogo {
31 | font-size: 1.25rem;
32 | margin-left: 1rem;
33 | }
34 |
35 | .navlinks {
36 | width: 20rem;
37 | display: flex;
38 | justify-content: space-evenly;
39 | align-items: center;
40 | }
41 |
42 | .apptitle {
43 | font-size: 2.5rem;
44 | font-weight: 500;
45 | display: flex;
46 | justify-content: center;
47 | }
48 |
49 | .appdescription {
50 | font-size: 1.1rem;
51 | margin: 1rem;
52 | }
53 |
54 | .link {
55 | font-weight: 500;
56 | }
57 |
58 | .cloudform {
59 | position: relative;
60 | }
61 |
62 | .textarea {
63 | position: relative;
64 | resize: none;
65 | font-size: 1.1rem;
66 | padding: 1rem 2rem 1rem 2rem;
67 | width: 75vw;
68 | border-radius: 0.5rem;
69 | border: 1px solid #30373d;
70 | background: #070809;
71 | color: #ECECF1;
72 | outline: none;
73 | }
74 |
75 | .textarea:disabled {
76 | opacity: 0.5;
77 | }
78 |
79 | .textarea::placeholder {
80 | color: #5f6368;
81 | }
82 |
83 | .generatebutton {
84 | position: absolute;
85 | top: 0.87rem;
86 | right: 1rem;
87 | color: rgb(165, 162, 162);
88 | background: none;
89 | padding: 0.3rem;
90 | border: none;
91 | display: flex;
92 | }
93 |
94 | .loadingwheel {
95 | position: absolute;
96 | top: 0.2rem;
97 | right: 0.25rem;
98 | }
99 |
100 | .svgicon {
101 | transform: rotate(90deg);
102 | width: 1.2em;
103 | height: 1.2em;
104 | fill: currentColor;
105 | }
106 |
107 | .generatebutton:hover {
108 | background: #1f2227;
109 | border-radius: 0.2rem;
110 | }
111 |
112 | .generatebutton:disabled {
113 | opacity: 0.9;
114 | cursor: not-allowed;
115 | background: none;
116 | }
117 |
118 | .messagelist {
119 | width: 100%;
120 | height: 100%;
121 | overflow-y: scroll;
122 | border-radius: 0.5rem;
123 | }
124 |
125 | .messagelistloading {
126 | display: flex;
127 | width: 100%;
128 | justify-content: center;
129 | margin-top: 1rem;
130 | }
131 |
132 | .usermessage {
133 | background: #070809;
134 | padding: 1.5rem;
135 | color: #ECECF1;
136 | }
137 |
138 | .usermessagewaiting{
139 | padding: 1.5rem;
140 | color: #ECECF1;
141 | background: linear-gradient(to left, #070809, #1a1c20, #070809);
142 | background-size: 200% 200%;
143 | background-position: -100% 0;
144 | animation: loading-gradient 2s ease-in-out infinite;
145 | animation-direction: alternate;
146 | animation-name: loading-gradient;
147 | }
148 |
149 | @keyframes loading-gradient {
150 | 0% {
151 | background-position: -100% 0;
152 | }
153 | 100% {
154 | background-position: 100% 0;
155 | }
156 | }
157 |
158 | .apimessage {
159 | background: #141619;
160 | padding: 1.5rem;
161 | color: #ECECF1;
162 | animation: fadein 0.5s;
163 | }
164 |
165 | @keyframes fadein {
166 | from { opacity: 0; }
167 | to { opacity: 1; }
168 | }
169 |
170 | .apimessage, .usermessage, .usermessagewaiting {
171 | display: flex;
172 | }
173 |
174 | .markdownanswer {
175 | line-height: 1.75;
176 | }
177 |
178 | .markdownanswer a:hover {
179 | opacity: 0.8;
180 | }
181 |
182 | .markdownanswer a {
183 | color: #16bed7;
184 | font-weight: 500;
185 | }
186 |
187 | .markdownanswer code {
188 | color: #15cb19;
189 | font-weight: 500;
190 | white-space: pre-wrap !important;
191 | }
192 |
193 | .markdownanswer ol, .markdownanswer ul {
194 | margin: 1rem;
195 | }
196 |
197 | .boticon, .usericon {
198 | margin-right: 1rem;
199 | border-radius: 0.1rem;
200 | }
201 |
202 | .markdownanswer h1, .markdownanswer h2, .markdownanswer h3 {
203 | font-size: inherit;
204 | }
205 |
206 |
207 | .center {
208 | display: flex;
209 | justify-content: center;
210 | align-items: center;
211 | position: relative;
212 | padding: 2rem 0;
213 | flex-direction: column;
214 | }
215 |
216 | .cloud {
217 | width: 75vw;
218 | height: 65vh;
219 | background: #070809;
220 | border-radius: 0.5rem;
221 | border: 1px solid #30373d;
222 | display: flex;
223 | justify-content: center;
224 | align-items: center;
225 | }
226 |
227 | .pointsnormal {
228 | width: 90%;
229 | height: 90%;
230 | }
231 |
232 | .pointsdim {
233 | width: 90%;
234 | height: 90%;
235 | opacity: 0.25;
236 | }
237 |
238 | .footer {
239 | color: #5f6368;
240 | font-size: 0.8rem;
241 | margin: 1.5rem;
242 | }
243 |
244 | .footer a {
245 | font-weight: 500;
246 | color: #7a7d81;
247 | }
248 |
249 | .footer a:hover {
250 | opacity: 0.8;
251 | }
252 |
253 | /* Mobile optimization */
254 | @media (max-width: 600px) {
255 |
256 | .main {
257 | padding: 1rem;
258 | max-height: 90vh;
259 | }
260 |
261 | .cloud {
262 | width: 22rem;
263 | height: 28rem;
264 | }
265 | .textarea {
266 | width: 22rem;
267 | }
268 | .topnav {
269 | border: 1px solid black;
270 | align-items: center;
271 | padding: 0.85rem 0.75rem 0.85rem 0.75rem;
272 | }
273 |
274 | .markdownanswer code {
275 | white-space : pre-wrap !important;
276 | }
277 |
278 | .footer {
279 | font-size: 0.7rem;
280 | width: 100%;
281 | text-align: center;
282 | }
283 | }
284 |
285 | @keyframes fadeIn {
286 | from {
287 | opacity: 0;
288 | }
289 | to {
290 | opacity: 1;
291 | }
292 | }
293 |
294 | .fadeIn {
295 | animation: fadeIn 1s ease-in-out forwards;
296 | opacity: 0;
297 | }
298 |
299 |
--------------------------------------------------------------------------------
/styles/globals.css:
--------------------------------------------------------------------------------
1 | @import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500&display=swap');
2 |
3 | * {
4 | box-sizing: border-box;
5 | padding: 0;
6 | margin: 0;
7 | font-family: 'Inter', sans-serif;
8 | }
9 |
10 | html,
11 | body {
12 | max-width: 100vw;
13 | overflow-x: hidden;
14 | }
15 |
16 | body {
17 | color: snow;
18 | background: #070809;
19 | }
20 |
21 | a {
22 | color: inherit;
23 | text-decoration: none;
24 | }
25 |
26 | a:hover {
27 | opacity: 0.8;
28 | }
29 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es6",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "noEmit": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve",
16 | "incremental": true,
17 | "baseUrl": ".",
18 | "paths": {
19 | "@/*": ["./*"]
20 | }
21 | },
22 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
23 | "exclude": ["node_modules"]
24 | }
25 |
--------------------------------------------------------------------------------
/vercel.json:
--------------------------------------------------------------------------------
1 | {
2 | "functions": {
3 | "pages/api/chat.js": {
4 | "includeFiles": "data/**"
5 | }
6 | }
7 | }
8 |
9 |
--------------------------------------------------------------------------------