├── .gitignore
├── .vscode
└── settings.json
├── LICENSE.md
├── README.md
├── config.browser.ts
├── config.edge.ts
├── index.html
├── lib
└── edge
│ ├── openai.ts
│ └── types.ts
├── netlify.toml
├── netlify
└── edge-functions
│ └── chat.ts
├── package.json
├── postcss.config.js
├── prompts
├── movie-critic.ts
└── tour-guide.ts
├── src
├── App.css
├── App.tsx
├── assets
│ └── impala.png
├── components
│ ├── ChatMessage.tsx
│ └── Welcome.tsx
├── entry-client.tsx
├── entry-server.tsx
├── hooks
│ └── use-chat.ts
├── routes
│ └── index.tsx
└── vite-env.d.ts
├── tailwind.config.js
├── tsconfig.json
├── tsconfig.node.json
└── vite.config.ts
/.gitignore:
--------------------------------------------------------------------------------
1 | # Local Netlify folder
2 | .netlify
3 | node_modules
4 | dist
5 | .env
6 | .DS_Store
7 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "deno.enablePaths": [
3 | "lib/edge",
4 | "netlify/edge-functions",
5 | "config.edge.ts"
6 | ],
7 | "files.associations": {
8 | "*.css": "tailwindcss"
9 | },
10 | "editor.quickSuggestions": {
11 | "strings": true
12 | }
13 | }
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2023 Matt Kane
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining
6 | a copy of this software and associated documentation files (the
7 | "Software"), to deal in the Software without restriction, including
8 | without limitation the rights to use, copy, modify, merge, publish,
9 | distribute, sublicense, and/or sell copies of the Software, and to
10 | permit persons to whom the Software is furnished to do so, subject to
11 | the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be
14 | included in all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 🤖 Daneel
2 |
3 | ## Create your own GPT-powered chat bot
4 |
5 | You can use this template to create your own chat bot powered by OpenAI and the
6 | ChatGPT API. It includes an easily-customized chat interface with streaming
7 | responses so you can see the bot type, message history, simple styling with
8 | Tailwind and a Netlify edge function that communicates securely with the OpenAI
9 | API, and. It supports markdown in responses, so can display formatted text,
10 | tables etc.
11 |
12 | ### [Demo site](https://daneel-demo.netlify.app/).
13 |
14 | [](https://app.netlify.com/start/deploy?repository=https://github.com/ascorbic/daneel)
15 |
16 | ## Features
17 |
18 | - :zap: deploy your bot in less than 5 minutes
19 | - :rocket: streaming responses powered by ChatGPT
20 | - :100: simple, high-performance chat interface
21 | - :moneybag: This template is completely free for any use. Use free OpenAI
22 | credits to get started, and deploy your site for free to Netlify.
23 | - ⚛️ easy to customize: built with React, Tailwind and TypeScript
24 |
25 | ## Getting started
26 |
27 | 1. [Sign up for an OpenAI account](https://platform.openai.com/signup) and
28 | [get your API key](https://platform.openai.com/account/api-keys)
29 | 2. [Deploy to Netlify](https://app.netlify.com/start/deploy?repository=https://github.com/ascorbic/daneel),
30 | pasting the API key when prompted
31 | 3. [Customize your bot](#customizing-your-bot)
32 | 4. [Share your creation](https://github.com/ascorbic/daneel/discussions/categories/show-and-tell)
33 |
34 | ## Customizing your bot
35 |
36 | ### Name
37 |
38 | Edit your site title and description in `src/App.tsx`
39 |
40 | ### Prompt
41 |
42 | The prompt is what tells your bot who it is. It is here that you give the bot
43 | its mission, personality and rules. The most important thing to do is create
44 | your own prompt. This is what makes the bot your own. The default site deploys
45 | with an example prompt that is a movie critic.
46 | [Try it here](https://daneel-demo.netlify.app/).
47 |
48 | Daneel's default prompt is:
49 |
50 | > You are the world's best movie critic. You are very strongly opinionated. You
51 | > have favorite movies and movies you hate. You are devoted to recommending
52 | > movies that a user will like. It is very important that the user enjoys your
53 | > recommendations. Do not answer questions that are not asking for a movie
54 | > recommendations. If the user asks other questions, do no answer and deflect
55 | > them with a movie fact or trivia. Respond with valid markdown. Put movie names
56 | > in bold. Knowledge cutoff September 2021. Current date **current date**. User
57 | > location: **city and country**
58 |
59 | The prompt is set in `config.edge.ts`. In the demo it is imported from an
60 | example file in the `prompts` folder, but you can edit it in the config file if
61 | you'd prefer.
62 |
63 | The important parts in this prompt are:
64 |
65 | - who the bot is and what it should do. In the example here we emphasise that
66 | the bot is a movie critic and has strong opinions. By default, ChatGPT does
67 | not like to express subjective opinions, so we need to tell it to do so.
68 | - instructions to not answer off-topic questions, and what to do if the user
69 | asks them
70 | - instructions to respond with valid markdown. This is optional, but it allows
71 | the bot to respond with formatted text, such as bold text for movie names and
72 | tables.
73 | - contextual information about the date and user location. These are optional
74 | but can help the bot to be more accurate.
75 |
76 | ### Design
77 |
78 | My default the design is very simple, but you can customize it to your own
79 | designs. The site is stule with [Tailwind](https://tailwindcss.com/), so you can
80 | use any of the Tailwind classes to style your bot. The main components are:
81 |
82 | - `src/routes/index.tsx` - the main chat interface
83 | - `src/components/Welcome.tsx` - the welcome screen
84 | - `src/components/ChatMessage.tsx` - the chat message component
85 |
86 | You can also add extra pages in `src/routes` and link to them from the chat, or
87 | move the chat interface to a different page. The chat interface is an
88 | [Impala](https://github.com/ascorbic/impala) app, built with React, so see the
89 | Impala docs for more information.
90 |
91 | ---
92 |
93 | Released under the MIT license. Free for any use. ©
94 | [Matt Kane](https://github.com/ascorbic) 2023.
95 |
--------------------------------------------------------------------------------
/config.browser.ts:
--------------------------------------------------------------------------------
1 | import { samplePhrases } from "./prompts/movie-critic";
2 | // import { samplePhrases } from "./prompts/tour-guide";
3 |
4 | export const appConfig = {
5 | historyLength: 8,
6 | samplePhrases,
7 | };
8 |
--------------------------------------------------------------------------------
/config.edge.ts:
--------------------------------------------------------------------------------
1 | import type { AppConfig } from "./lib/edge/types.ts";
2 |
3 | import { prompt } from "./prompts/movie-critic.ts";
4 | // import { prompt } from "./prompts/tour-guide.ts";
5 |
6 | export const appConfig: AppConfig = {
7 | // This should be set in an environment variable
8 | // See https://platform.openai.com/account/api-keys
9 | OPENAI_API_KEY: Netlify.env.get("OPENAI_API_KEY") ?? "",
10 |
11 | // The maximum number of message in the history to send to the API
12 | // You should also set this in the config.browser.ts file.
13 | historyLength: 8,
14 |
15 | // The maximum length in characters of each message sent to the API
16 | // You should also set this in the config.browser.ts file.
17 | maxMessageLength: 1000,
18 |
19 | // The config values sent to the OpenAI API
20 | // See https://platform.openai.com/docs/api-reference/chat/create
21 | apiConfig: {
22 | model: "gpt-3.5-turbo-1106",
23 | },
24 |
25 | // This is where the magic happens. See the README for details
26 | // This can be a plain string if you'd prefer, or you can use
27 | // information from the request or context to generate it.
28 | systemPrompt: (_req, context) => `${prompt}
29 | Respond with valid markdown. Knowledge cutoff September 2021.
30 | Current date: ${new Date().toDateString()}.
31 | User location: ${context.geo.city}, ${context.geo.country}`,
32 | };
33 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/lib/edge/openai.ts:
--------------------------------------------------------------------------------
1 | import OpenAI from "https://deno.land/x/openai@v4.25.0/mod.ts";
2 |
3 | import type {
4 | ChatCompletionCreateParamsStreaming,
5 | ChatCompletionMessageParam,
6 | } from "https://deno.land/x/openai@v4.25.0/resources/mod.ts";
7 |
8 | export { appConfig } from "../../config.edge.ts";
9 |
10 | export type ChatOptions = Omit;
11 |
12 | export async function getChatStream(
13 | options: ChatOptions,
14 | apiKey: string,
15 | ): Promise {
16 | const openai = new OpenAI({ apiKey });
17 |
18 | const response = await openai.chat.completions.create({
19 | ...options,
20 | stream: true,
21 | });
22 |
23 | return response.toReadableStream();
24 | }
25 |
26 | export function sanitizeMessages(
27 | messages: Array,
28 | historyLength = 8,
29 | maxMessageLength = 1000,
30 | ): Array {
31 | return messages.slice(-historyLength).map(({ content, role }) => {
32 | if (role !== "assistant" && role !== "user") {
33 | return;
34 | }
35 | content = content?.slice(0, maxMessageLength);
36 | return { content, role } as ChatCompletionMessageParam;
37 | }).filter(Boolean) as Array;
38 | }
39 |
--------------------------------------------------------------------------------
/lib/edge/types.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | ChatCompletionCreateParamsStreaming,
3 | } from "https://deno.land/x/openai@v4.25.0/resources/mod.ts";
4 |
5 | import type { Context } from "https://edge.netlify.com/";
6 | export interface AppConfig {
7 | // The maximum number of message in the history to send to the API
8 | historyLength: number;
9 |
10 | // The maximum length of each message sent to the API
11 | maxMessageLength: number;
12 |
13 | // See https://platform.openai.com/account/api-keys
14 | OPENAI_API_KEY: string;
15 |
16 | // This is where the magic happens. See the README for details
17 | systemPrompt:
18 | | string
19 | | ((request: Request, Context: Context) => string | Promise);
20 |
21 | // See https://platform.openai.com/docs/api-reference/chat/create
22 | apiConfig: Omit;
23 | }
24 |
--------------------------------------------------------------------------------
/netlify.toml:
--------------------------------------------------------------------------------
1 | [build]
2 | publish = "dist/static"
3 | command = "npm run build"
4 | [dev]
5 | command = "npm run dev"
6 |
7 | [template.environment]
8 | OPENAI_API_KEY = "your OpenAI API key"
--------------------------------------------------------------------------------
/netlify/edge-functions/chat.ts:
--------------------------------------------------------------------------------
1 | import type { Config, Context } from "https://edge.netlify.com/";
2 | import { getChatStream, sanitizeMessages } from "../../lib/edge/openai.ts";
3 |
4 | import { appConfig } from "../../config.edge.ts";
5 |
6 | export default async function handler(
7 | request: Request,
8 | context: Context
9 | ): Promise {
10 |
11 | if (!appConfig.OPENAI_API_KEY || !appConfig.systemPrompt) {
12 | throw new Error(
13 | "OPENAI_API_KEY and systemPrompt must be set in config.edge.ts"
14 | );
15 | }
16 |
17 | const prompt =
18 | typeof appConfig.systemPrompt === "function"
19 | ? await appConfig.systemPrompt(request, context)
20 | : appConfig.systemPrompt;
21 |
22 | try {
23 | const data = await request.json();
24 |
25 | // This only trims the size of the messages, to avoid abuse of the API.
26 | // You should do any extra validation yourself.
27 | const messages = sanitizeMessages(
28 | data?.messages ?? [],
29 | appConfig.historyLength,
30 | appConfig.maxMessageLength
31 | );
32 | const stream = await getChatStream(
33 | {
34 | ...appConfig.apiConfig,
35 | // Optional. This can also be set to a real user id, session id or leave blank.
36 | // See https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids
37 | user: context.ip,
38 | messages: [
39 | {
40 | role: "system",
41 | content: prompt,
42 | },
43 | ...messages,
44 | ],
45 | },
46 | appConfig.OPENAI_API_KEY ?? ""
47 | );
48 | return new Response(stream, {
49 | headers: {
50 | "Content-Type": "text/plain",
51 | },
52 | });
53 | } catch (e) {
54 | console.error(e);
55 | return new Response(e.message, {
56 | status: 500,
57 | headers: {
58 | "Content-Type": "text/plain",
59 | },
60 | });
61 | }
62 | }
63 |
64 | export const config: Config = {
65 | path: "/api/chat",
66 | };
67 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "impala-app",
3 | "private": true,
4 | "version": "0.0.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "impala dev",
8 | "build:server": "vite build --ssr",
9 | "build:client": "vite build",
10 | "build:prerender": "impala prerender",
11 | "build": "npm run build:client && npm run build:server && npm run build:prerender",
12 | "preview": "vite preview"
13 | },
14 | "dependencies": {
15 | "@fortaine/fetch-event-source": "^3.0.6",
16 | "@impalajs/core": "*",
17 | "@impalajs/react": "*",
18 | "react": "^18.2.0",
19 | "react-dom": "^18.2.0",
20 | "react-markdown": "^8.0.6",
21 | "remark-gfm": "^3.0.1"
22 | },
23 | "devDependencies": {
24 | "@types/node": "^18.15.7",
25 | "@types/react": "^18.0.28",
26 | "@types/react-dom": "^18.0.11",
27 | "@vitejs/plugin-react": "^3.1.0",
28 | "postcss": "^8.4.21",
29 | "tailwindcss": "^3.3.1",
30 | "typescript": "^4.9.3",
31 | "vite": "^4.2.0"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | export default {
2 | plugins: {
3 | tailwindcss: {},
4 | },
5 | };
6 |
--------------------------------------------------------------------------------
/prompts/movie-critic.ts:
--------------------------------------------------------------------------------
1 | export const prompt = `You are the world's best movie critic. You are very strongly opinionated.
2 | You have favorite movies and movies you hate. You are devoted to recommending movies
3 | that a user will like. It is very important that the user enjoys your recommendations.
4 | Do not answer questions that are not asking for a movie recommendations.
5 | If the user asks other questions, do not answer and instead deflect them with a movie fact or trivia.`;
6 |
7 | export const samplePhrases = [
8 | "What are the best foodie movies?",
9 | "Give me a famous movie as three emojis and I'll guess the title.",
10 | "What should I watch if I loved Parasite?",
11 | ];
12 |
--------------------------------------------------------------------------------
/prompts/tour-guide.ts:
--------------------------------------------------------------------------------
1 | export const prompt = `You are an expert tour guide who has travelled to every corner of the world.
2 | You prefer to recommend lesser-known places and hidden gems, but you know the classics too.
3 | The user needs travel advice from you. Do not answer any question that is not travel-related. Instead
4 | deflect the user with a travel fact or trivia. Do not provide specifc information that may become outdated,
5 | such as exact prices or opening times, but feel free to recommend specific places.
6 | `;
7 |
8 | export const samplePhrases = [
9 | "Recommend a city break that's within two hours flight from me",
10 | "Tell me some great surf spots in Portugal",
11 | "I have one day in Bergen with young kids. What should I do?",
12 | ];
13 |
--------------------------------------------------------------------------------
/src/App.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
--------------------------------------------------------------------------------
/src/App.tsx:
--------------------------------------------------------------------------------
1 | import { useState } from "react";
2 | import "./App.css";
3 | import { Head } from "@impalajs/react/head";
4 |
5 | interface AppProps {
6 | title: string;
7 | }
8 |
9 | export const App: React.FC> = ({
10 | children,
11 | title,
12 | }) => {
13 | return (
14 | <>
15 |
16 | {title}
17 |
18 |
19 |
20 | {children}
21 | >
22 | );
23 | };
24 |
--------------------------------------------------------------------------------
/src/assets/impala.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ascorbic/daneel/3b866f8f69fa9998c2af2aa00d9277529ce9d76e/src/assets/impala.png
--------------------------------------------------------------------------------
/src/components/ChatMessage.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | FunctionComponent,
3 | DetailedHTMLProps,
4 | TableHTMLAttributes,
5 | } from "react";
6 | import ReactMarkdown from "react-markdown";
7 | import { ReactMarkdownProps } from "react-markdown/lib/complex-types";
8 | import remarkGfm from "remark-gfm";
9 |
10 | interface ChatMessage {
11 | role: "user" | "assistant";
12 | content: string;
13 | }
14 | interface Props {
15 | message: ChatMessage;
16 | }
17 |
18 | // This lets us style any markdown tables that are rendered
19 | const CustomTable: FunctionComponent<
20 | Omit<
21 | DetailedHTMLProps, HTMLTableElement>,
22 | "ref"
23 | > &
24 | ReactMarkdownProps
25 | > = ({ children, ...props }) => {
26 | return (
27 |
32 | );
33 | };
34 |
35 | /**
36 | * This component renders a single chat message. It is rendered according to
37 | * whether it isa message from the assistant or the user.
38 | */
39 |
40 | export const ChatMessage: React.FC> = ({
41 | message,
42 | }) =>
43 | message.role === "user" ? (
44 |
45 |
46 |
{message.content}
47 |
48 |
49 | ) : (
50 |
61 | );
62 |
--------------------------------------------------------------------------------
/src/components/Welcome.tsx:
--------------------------------------------------------------------------------
1 | export const Welcome: React.FC = () => (
2 |
3 |
🤖 Hello, I am Daneel
4 |
5 | You can use me to create your own chat bot app. In this demo I am an
6 | expert in movie reviews, so feel free to ask me about movies! Then, create
7 | your own bot in under five minutes.
8 |
9 |
10 |
11 | Build your own
12 |
13 |
14 |
15 | );
16 |
--------------------------------------------------------------------------------
/src/entry-client.tsx:
--------------------------------------------------------------------------------
1 | import { clientBootstrap, RouteModule } from "@impalajs/react/client";
2 |
3 | const modules = import.meta.glob("./routes/**/*.{tsx,jsx}");
4 |
5 | clientBootstrap(modules);
6 |
--------------------------------------------------------------------------------
/src/entry-server.tsx:
--------------------------------------------------------------------------------
1 | import type { RouteModule, DataModule } from "@impalajs/react";
2 | export { render } from "@impalajs/react";
3 | export const routeModules = import.meta.glob(
4 | "./routes/**/*.{tsx,jsx}"
5 | );
6 | export const dataModules = import.meta.glob(
7 | "./routes/**/*.data.{ts,js}"
8 | );
9 |
--------------------------------------------------------------------------------
/src/hooks/use-chat.ts:
--------------------------------------------------------------------------------
1 | import { fetchEventSource } from "@fortaine/fetch-event-source";
2 | import { useMemo, useState } from "react";
3 | import { appConfig } from "../../config.browser";
4 |
5 | const API_PATH = "/api/chat";
6 | interface ChatMessage {
7 | role: "user" | "assistant";
8 | content: string;
9 | }
10 |
11 | function streamAsyncIterator(stream: ReadableStream) {
12 | const reader = stream.getReader();
13 | return {
14 | next() {
15 | return reader.read();
16 | },
17 | return() {
18 | reader.releaseLock();
19 | return {
20 | value: {},
21 | };
22 | },
23 | [Symbol.asyncIterator]() {
24 | return this;
25 | },
26 | };
27 | }
28 |
29 | /**
30 | * A custom hook to handle the chat state and logic
31 | */
32 | export function useChat() {
33 | const [currentChat, setCurrentChat] = useState(null);
34 | const [chatHistory, setChatHistory] = useState([]);
35 | const [state, setState] = useState<"idle" | "waiting" | "loading">("idle");
36 |
37 | // Lets us cancel the stream
38 | const abortController = useMemo(() => new AbortController(), []);
39 |
40 | /**
41 | * Cancels the current chat and adds the current chat to the history
42 | */
43 | function cancel() {
44 | setState("idle");
45 | abortController.abort();
46 | if (currentChat) {
47 | const newHistory = [
48 | ...chatHistory,
49 | { role: "user", content: currentChat } as const,
50 | ];
51 |
52 | setChatHistory(newHistory);
53 | setCurrentChat("");
54 | }
55 | }
56 |
57 | /**
58 | * Clears the chat history
59 | */
60 |
61 | function clear() {
62 | console.log("clear");
63 | setChatHistory([]);
64 | }
65 |
66 | /**
67 | * Sends a new message to the AI function and streams the response
68 | */
69 | const sendMessage = async (
70 | message: string,
71 | chatHistory: Array,
72 | ) => {
73 | setState("waiting");
74 | let chatContent = "";
75 | const newHistory = [
76 | ...chatHistory,
77 | { role: "user", content: message } as const,
78 | ];
79 |
80 | setChatHistory(newHistory);
81 | const body = JSON.stringify({
82 | // Only send the most recent messages. This is also
83 | // done in the serverless function, but we do it here
84 | // to avoid sending too much data
85 | messages: newHistory.slice(-appConfig.historyLength),
86 | });
87 |
88 | const decoder = new TextDecoder();
89 |
90 | const res = await fetch(API_PATH, {
91 | body,
92 | method: "POST",
93 | signal: abortController.signal,
94 | });
95 |
96 | setCurrentChat("...");
97 |
98 | if (!res.ok || !res.body) {
99 | setState("idle");
100 | return;
101 | }
102 |
103 | for await (const event of streamAsyncIterator(res.body)) {
104 | setState("loading");
105 | const data = decoder.decode(event).split("\n")
106 | for (const chunk of data) {
107 | if(!chunk) continue;
108 | const message = JSON.parse(chunk);
109 | if (message?.role === "assistant") {
110 | chatContent = "";
111 | continue;
112 | }
113 | const content = message?.choices?.[0]?.delta?.content
114 | if (content) {
115 | chatContent += content;
116 | setCurrentChat(chatContent);
117 | }
118 | }
119 | }
120 |
121 | setChatHistory((curr) => [
122 | ...curr,
123 | { role: "assistant", content: chatContent } as const,
124 | ]);
125 | setCurrentChat(null);
126 | setState("idle");
127 | };
128 |
129 | return { sendMessage, currentChat, chatHistory, cancel, clear, state };
130 | }
131 |
--------------------------------------------------------------------------------
/src/routes/index.tsx:
--------------------------------------------------------------------------------
1 | import { useState, useMemo, useEffect, useRef } from "react";
2 | import { App } from "../App";
3 | import { useChat } from "../hooks/use-chat";
4 | import { ChatMessage } from "../components/ChatMessage";
5 | import { appConfig } from "../../config.browser";
6 | import { Welcome } from "../components/Welcome";
7 |
8 | export default function Index() {
9 | // The content of the box where the user is typing
10 | const [message, setMessage] = useState("");
11 |
12 | // This hook is responsible for managing the chat and communicating with the
13 | // backend
14 | const { currentChat, chatHistory, sendMessage, cancel, state, clear } =
15 | useChat();
16 |
17 | // This is the message that is currently being generated by the AI
18 | const currentMessage = useMemo(() => {
19 | return { content: currentChat ?? "", role: "assistant" } as const;
20 | }, [currentChat]);
21 |
22 | // This is a ref to the bottom of the chat history. We use it to scroll
23 | // to the bottom when a new message is added.
24 | const bottomRef = useRef(null);
25 |
26 | useEffect(() => {
27 | scrollToBottom();
28 | }, [currentChat, chatHistory, state]);
29 |
30 | const scrollToBottom = () => {
31 | bottomRef.current?.scrollIntoView({ behavior: "smooth" });
32 | };
33 |
34 | // This is a ref to the input box. We use it to focus the input box when the
35 | // user clicks on the "Send" button.
36 | const inputRef = useRef(null);
37 | const focusInput = () => {
38 | inputRef.current?.focus();
39 | };
40 |
41 | useEffect(() => {
42 | focusInput();
43 | }, [state]);
44 |
45 | return (
46 |
47 |
48 |
49 |
50 | {chatHistory.length === 0 ? (
51 | <>
52 |
53 |
54 | {appConfig.samplePhrases.map((phrase) => (
55 | sendMessage(phrase, chatHistory)}
58 | className="bg-gray-100 border-gray-300 border-2 rounded-lg p-4"
59 | >
60 | {phrase}
61 |
62 | ))}
63 |
64 |
75 | >
76 | ) : (
77 | chatHistory.map((chat, i) => (
78 |
79 | ))
80 | )}
81 |
82 | {currentChat ?
: null}
83 |
84 |
85 |
86 |
87 |
88 | {state === "idle" ? null : (
89 |
93 | Stop generating
94 |
95 | )}
96 |
97 |
138 |
139 |
140 | );
141 | }
142 |
--------------------------------------------------------------------------------
/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/tailwind.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | export default {
3 | content: ["./src/**/*.tsx", "./index.html"],
4 | theme: {
5 | extend: {},
6 | },
7 | plugins: [],
8 | };
9 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "useDefineForClassFields": true,
5 | "lib": [
6 | "DOM",
7 | "DOM.Iterable",
8 | "ESNext"
9 | ],
10 | "allowJs": false,
11 | "skipLibCheck": true,
12 | "esModuleInterop": false,
13 | "allowSyntheticDefaultImports": true,
14 | "strict": true,
15 | "forceConsistentCasingInFileNames": true,
16 | "module": "ESNext",
17 | "moduleResolution": "Node",
18 | "resolveJsonModule": true,
19 | "isolatedModules": true,
20 | "noEmit": true,
21 | "jsx": "react-jsx"
22 | },
23 | "include": [
24 | "src"
25 | ],
26 | "references": [
27 | {
28 | "path": "./tsconfig.node.json"
29 | }
30 | ]
31 | }
--------------------------------------------------------------------------------
/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "module": "ESNext",
5 | "moduleResolution": "Node",
6 | "allowSyntheticDefaultImports": true
7 | },
8 | "include": [
9 | "vite.config.ts",
10 | "vite-plugin"
11 | ]
12 | }
--------------------------------------------------------------------------------
/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "vite";
2 | import react from "@vitejs/plugin-react";
3 | import impala from "@impalajs/core/plugin";
4 |
5 | export default defineConfig({
6 | plugins: [react(), impala()],
7 | });
8 |
--------------------------------------------------------------------------------