├── .eslintrc.json ├── .gitignore ├── README.md ├── components ├── Generation.tsx ├── History.tsx ├── ImageAnswer.tsx ├── Prompt.tsx └── TextAnswer.tsx ├── next.config.js ├── package-lock.json ├── package.json ├── pages ├── _app.tsx ├── _document.tsx └── index.tsx ├── postcss.config.js ├── public └── favicon.ico ├── styles └── globals.css ├── tailwind.config.js └── tsconfig.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | .pnpm-debug.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple AI 2 | 3 | Welcome to another one of my weekend projects exploring how humans can interact with state of the art AI models. This week explores what it would be like to interact with many models through only a text input and minimalistic UI. Just type what you want to do. Anything is fair game ;). Built using zustand, nextjs, cloudflare workers, framer motion, StabilityAI API, and OpenAI API. 4 | 5 | > You can see a live version of the site at 6 | 7 | ![](https://kaj.has.rocks/r/ad09f2afb8.png) 8 | 9 | ## Contributing 10 | 11 | Just make a PR and try to follow the functional paradigm. All skill levels welcome. Let's build AI interactions together :). 12 | -------------------------------------------------------------------------------- /components/Generation.tsx: -------------------------------------------------------------------------------- 1 | import create from "zustand"; 2 | import { ImageAnswer } from "./ImageAnswer"; 3 | import { Prompt } from "./Prompt"; 4 | import { TextAnswer } from "./TextAnswer"; 5 | import { History } from "./History"; 6 | 7 | export type GenerationState = { 8 | loading: boolean; 9 | task: string | null; 10 | setLoading: (loading: boolean, task?: string | null) => void; 11 | }; 12 | 13 | export namespace Generation { 14 | export const API_BASE = "https://api.prototyped.ai"; 15 | 16 | export const use = create()((set) => ({ 17 | loading: false, 18 | task: null, 19 | setLoading: (loading, task) => set({ loading, task: task || null }), 20 | })); 21 | 22 | export type Artifact = { 23 | image: string; 24 | seed: number; 25 | }; 26 | 27 | export async function gpt3(prompt: string): Promise { 28 | const response = await fetch(`${API_BASE}/text`, { 29 | method: "POST", 30 | headers: { 31 | "Content-Type": "application/json", 32 | }, 33 | body: JSON.stringify({ prompt }), 34 | }).then((res) => res.json()); 35 | 36 | return response.choices.pop().text; 37 | } 38 | 39 | export async function stable( 40 | prompt: string, 41 | amount?: number 42 | ): Promise { 43 | const response = await fetch(`${API_BASE}/image`, { 44 | method: "POST", 45 | headers: { 46 | "Content-Type": "application/json", 47 | }, 48 | body: JSON.stringify({ 49 | prompt: prompt, 50 | count: amount || 1, 51 | generate_negative_prompt: true 52 | }), 53 | }).then((res) => res.json()); 54 | 55 | return response; 56 | } 57 | 58 | export async function interrogate(image: string): Promise { 59 | // turn image into base64 60 | if (!image.startsWith("data:image/")) { 61 | const response = await fetch(image); 62 | const blob = await response.blob(); 63 | image = await new Promise((resolve) => { 64 | const reader = new FileReader(); 65 | reader.onloadend = () => resolve(reader.result as string); 66 | reader.readAsDataURL(blob); 67 | }); 68 | } 69 | 70 | const response = await fetch(`${API_BASE}/interrogate`, { 71 | method: "POST", 72 | headers: { 73 | "Content-Type": "application/json", 74 | }, 75 | body: JSON.stringify({ 76 | image: image, 77 | }), 78 | }).then((res) => res.json()); 79 | 80 | return response.prompt; 81 | } 82 | 83 | // string type 84 | export type GPTResponse = 85 | | `Assistant: ${string}` 86 | | `Generation: ${number}x"${string}"`; 87 | 88 | export async function generate(prompt: string) { 89 | use.getState().setLoading(true, "Thinking..."); 90 | 91 | const hist = History.getLastItems(10); 92 | 93 | const init_prompt = `${Prompt.INITIAL.replace( 94 | "{DATE}", 95 | `${new Date().toLocaleDateString()} ${new Date().toLocaleTimeString()}` 96 | )}${ 97 | hist && hist.length > 0 98 | ? hist 99 | .map( 100 | (h) => 101 | `User: ${h.user}\n${h.type}: ${ 102 | h.type === "Assistant" 103 | ? h.text 104 | : `${h.images.length}x"${h.prompt}"` 105 | }` 106 | ) 107 | .join("\n\n") 108 | : "" 109 | }\n\nUser: ${prompt}\n`; 110 | 111 | const response = await gpt3(init_prompt); 112 | 113 | console.log(init_prompt); 114 | console.log(response); 115 | 116 | if (RegExp(/Generation:\s(\d+)x"(.+)"/).test(response)) { 117 | Generation.use.getState().setLoading(true, "Imagining..."); 118 | 119 | const [count, imgPrompt] = response 120 | .replace("Generation: ", "") 121 | .replace(/"/g, "") 122 | .split("x"); 123 | const artifacts = await stable(imgPrompt, parseInt(count)); 124 | 125 | use.getState().setLoading(false); 126 | 127 | ImageAnswer.use.getState().setArtifacts(artifacts); 128 | 129 | History.use.getState().addItem({ 130 | user: prompt, 131 | prompt: imgPrompt, 132 | images: artifacts.map((a) => a.image), 133 | type: "Generation", 134 | } as History.ImageHistoryItem); 135 | 136 | return artifacts; 137 | } else { 138 | use.getState().setLoading(false); 139 | 140 | const text = response.replace("Assistant: ", ""); 141 | 142 | TextAnswer.use.getState().setValue(text); 143 | 144 | History.use.getState().addItem({ 145 | user: prompt, 146 | text, 147 | type: "Assistant", 148 | } as History.TextHistoryItem); 149 | 150 | return text; 151 | } 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /components/History.tsx: -------------------------------------------------------------------------------- 1 | /* eslint-disable @next/next/no-img-element */ 2 | import create from "zustand"; 3 | 4 | export function History() { 5 | const { history } = History.use(); 6 | 7 | console.log(history); 8 | 9 | if (history.length <= 1) return null; 10 | 11 | return ( 12 |
13 | {/* show the last couple history items blurred on top of each other */} 14 | {history 15 | .reverse() 16 | .slice(1) 17 | .slice(history.length - 3) 18 | .map((item, index) => { 19 | if ("images" in item) { 20 | return ( 21 |
30 | {item.images.map((artifact) => ( 31 | history image 37 | ))} 38 |
39 | ); 40 | } else { 41 | return ( 42 |
51 |
52 |
53 |
54 | {item.user} 55 |
56 |
57 | {item.text} 58 |
59 |
60 |
61 | ); 62 | } 63 | })} 64 |
65 | ); 66 | } 67 | 68 | export type HistoryState = { 69 | history: History.HistoryItem[]; 70 | setHistory: (history: History.HistoryItem[]) => void; 71 | addItem: (item: History.HistoryItem) => void; 72 | addItems: (items: History.HistoryItem[]) => void; 73 | }; 74 | 75 | export namespace History { 76 | export const use = create()((set) => ({ 77 | history: [], 78 | setHistory: (history) => set({ history }), 79 | addItem: (item) => set((state) => ({ history: [...state.history, item] })), 80 | addItems: (items) => 81 | set((state) => ({ history: [...state.history, ...items] })), 82 | })); 83 | 84 | export const useLastItem = () => { 85 | const { history } = History.use(); 86 | 87 | return history[history.length - 1]; 88 | }; 89 | 90 | export const getLastTextItems = (n: number) => { 91 | return History.use 92 | .getState() 93 | .history.filter((item) => item.type === "Assistant") 94 | .slice(-n) as TextHistoryItem[]; 95 | }; 96 | 97 | export const getLastItems = (n: number) => { 98 | return History.use.getState().history.slice(-n) as HistoryItem[]; 99 | }; 100 | 101 | export type ImageHistoryItem = { 102 | type: "Generation"; 103 | user: string; 104 | prompt: string; 105 | images: string[]; 106 | }; 107 | 108 | export type TextHistoryItem = { 109 | type: "Assistant"; 110 | user: string; 111 | text: string; 112 | }; 113 | 114 | export type HistoryItem = ImageHistoryItem | TextHistoryItem; 115 | } 116 | -------------------------------------------------------------------------------- /components/ImageAnswer.tsx: -------------------------------------------------------------------------------- 1 | import { AnimatePresence, motion } from "framer-motion"; 2 | import { X } from "lucide-react"; 3 | import { useState } from "react"; 4 | import create from "zustand"; 5 | import { Generation } from "./Generation"; 6 | import { History } from "./History"; 7 | 8 | function Artifact({ artifact }: { artifact: Generation.Artifact }) { 9 | const [loaded, setLoaded] = useState(false); 10 | 11 | return ( 12 | setLoaded(true)} 27 | /> 28 | ); 29 | } 30 | 31 | export function ImageAnswer() { 32 | const { artifacts, setArtifacts } = ImageAnswer.use(); 33 | 34 | return ( 35 | 36 | {artifacts && artifacts.length > 0 && ( 37 | 49 | {artifacts.map((artifact) => ( 50 | 51 | ))} 52 |
53 | setArtifacts([])} /> 54 |
55 |
56 | )} 57 |
58 | ); 59 | } 60 | 61 | export type ImageAnswerState = { 62 | artifacts: Generation.Artifact[]; 63 | setArtifacts: (artifacts: Generation.Artifact[]) => void; 64 | }; 65 | 66 | export namespace ImageAnswer { 67 | export const use = create()((set) => ({ 68 | artifacts: [], 69 | setArtifacts: (artifacts) => set({ artifacts }), 70 | })); 71 | 72 | export async function addFiles(files: FileList) { 73 | // convert to blobs, get object urls, and add to artifacts + history 74 | Generation.use.getState().setLoading(true, "Pondering images..."); 75 | 76 | const artifacts = await Promise.all( 77 | Array.from(files).map(async (file) => { 78 | const blob = await file.arrayBuffer(); 79 | const url = URL.createObjectURL(new Blob([blob])); 80 | 81 | const prompt = await Generation.interrogate(url); 82 | 83 | History.use.getState().addItem({ 84 | type: "Generation", 85 | user: "*added files*", 86 | images: [url], 87 | prompt, 88 | } as History.ImageHistoryItem); 89 | 90 | return { seed: 0, image: url }; 91 | }) 92 | ); 93 | 94 | ImageAnswer.use.setState((state) => ({ 95 | artifacts: [...state.artifacts, ...artifacts], 96 | })); 97 | 98 | Generation.use.getState().setLoading(false); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /components/Prompt.tsx: -------------------------------------------------------------------------------- 1 | import { AnimatePresence, motion } from "framer-motion"; 2 | import { useEffect, useRef } from "react"; 3 | import create from "zustand"; 4 | import { Generation } from "./Generation"; 5 | import { History } from "./History"; 6 | import { ImageAnswer } from "./ImageAnswer"; 7 | import { TextAnswer } from "./TextAnswer"; 8 | 9 | export type PromptState = { 10 | value: string; 11 | setValue: (value: string) => void; 12 | }; 13 | 14 | export type Prompt = string; 15 | 16 | export function Prompt() { 17 | const textareaRef = useRef(null); 18 | const timeoutRef = useRef(null); 19 | const { value, setValue } = Prompt.use(); 20 | const { loading, task } = Generation.use(); 21 | 22 | const textAnswer = TextAnswer.use(); 23 | const imageAnswer = ImageAnswer.use(); 24 | 25 | const lastItem = History.useLastItem(); 26 | 27 | useEffect(() => { 28 | if (textareaRef.current) { 29 | const styles = window.getComputedStyle(textareaRef.current); 30 | textareaRef.current.style.height = "auto"; 31 | 32 | const newHeight = 33 | textareaRef.current.scrollHeight + 34 | parseInt(styles.paddingTop) + 35 | parseInt(styles.paddingBottom); 36 | 37 | textareaRef.current.style.height = newHeight + "px"; 38 | } 39 | }, [value, textareaRef]); 40 | 41 | return ( 42 | 43 | 0) || 57 | (textAnswer.value && textAnswer.value.length > 0) 58 | ? "justify-end" 59 | : "justify-center" 60 | }`} 61 | > 62 | 74 | {loading && task} 75 | 76 | { 81 | setValue(e.target.value); 82 | 83 | if (timeoutRef.current) { 84 | clearTimeout(timeoutRef.current); 85 | } 86 | 87 | timeoutRef.current = setTimeout(() => { 88 | if (textareaRef.current && textareaRef.current.value.length > 0) { 89 | Generation.generate(value); 90 | } 91 | }, 7000); 92 | }} 93 | placeholder={ 94 | loading 95 | ? "The computer is thinking..." 96 | : "The world awaits your wisdom..." 97 | } 98 | className={`bg-transparent text-4xl font-medium w-full h-full resize-none outline-none focus:outline-none text-center placeholder:opacity-50`} 99 | autoFocus 100 | disabled={loading} 101 | onFocus={(e) => { 102 | if ( 103 | textAnswer.value.length > 0 && 104 | imageAnswer.artifacts.length === 0 105 | ) { 106 | textAnswer.setValue(""); 107 | } 108 | // if (imageAnswer.artifacts.length > 0) { 109 | // imageAnswer.setArtifacts([]); 110 | // } 111 | e.target.setSelectionRange(0, e.target.value.length); 112 | }} 113 | onBlur={(e) => { 114 | if (lastItem) { 115 | if (lastItem.type === "Assistant") { 116 | textAnswer.setValue(lastItem.text); 117 | } else if (lastItem.type === "Generation") { 118 | imageAnswer.setArtifacts( 119 | lastItem.images.map((i) => ({ image: i, seed: 0 })) 120 | ); 121 | } 122 | } 123 | }} 124 | onKeyDown={(e) => { 125 | if (e.key === "Enter" && !e.shiftKey) { 126 | e.preventDefault(); 127 | e.stopPropagation(); 128 | 129 | if (value.trim().length > 0) { 130 | if (timeoutRef.current) { 131 | clearTimeout(timeoutRef.current); 132 | } 133 | 134 | Generation.generate(value); 135 | } 136 | } 137 | }} 138 | /> 139 | 140 | 141 | ); 142 | } 143 | 144 | export namespace Prompt { 145 | export const use = create()((set) => ({ 146 | value: "", 147 | setValue: (value) => set({ value }), 148 | })); 149 | 150 | export const INITIAL = 151 | `Determine the specific action of a user's message based on the following list and format the response correctly using the previous history as context | Date: {DATE} 152 | 153 | Actions: 154 | - Assistant chat (Assistant: {message}) 155 | - Image Generation (Generate: {count}x"{prompt}") 156 | 157 | Example: 158 | User: Hello computer 159 | Assistant: Hi there! How can I help you? 160 | 161 | User: Show me an image of a dog 162 | Generation: 1x"A black and white border collie running through a meadow, chasing a butterfly" 163 | 164 | User: What is Mars? 165 | Assistant: Mars is the fourth planet from the Sun and the second smallest planet in the Solar System. It is a terrestrial planet, meaning it is composed of mostly rock. It has a thin atmosphere and is home to the largest volcano in the Solar System, Olympus Mons. 166 | 167 | User: What's it look like? 168 | Generation: 3x"A vivid photo of the red planet Mars, taken from a telescope" 169 | 170 | User: *added files* 171 | Generation: 2x"An image of a dog and a cat sitting on a couch together" 172 | 173 | User: What do these images have in common? 174 | Assistant: They are both animals. 175 | 176 | New Context (no memory of previous prompts): 177 | ` as const; 178 | 179 | export const TEXT = 180 | `You are Assistant. A Large Language Model (LLM) Trained to answer questions and provide feedback | Date: {DATE} | Browsing: Disabled` as const; 181 | } 182 | -------------------------------------------------------------------------------- /components/TextAnswer.tsx: -------------------------------------------------------------------------------- 1 | import { AnimatePresence, motion } from "framer-motion"; 2 | import create from "zustand"; 3 | 4 | export function TextAnswer() { 5 | const { value, setValue } = TextAnswer.use(); 6 | 7 | return ( 8 | 9 | {value && value.length > 0 && ( 10 | 22 | {value} 23 | 24 | )} 25 | 26 | ); 27 | } 28 | 29 | export type TextAnswerState = { 30 | value: string; 31 | setValue: (value: string) => void; 32 | }; 33 | 34 | export namespace TextAnswer { 35 | export const use = create()((set) => ({ 36 | value: "", 37 | setValue: (value) => set({ value }), 38 | })); 39 | } 40 | -------------------------------------------------------------------------------- /next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | reactStrictMode: true, 4 | } 5 | 6 | module.exports = nextConfig 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "simple-ai", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@next/font": "13.1.0", 13 | "@types/node": "18.11.17", 14 | "@types/react": "18.0.26", 15 | "@types/react-dom": "18.0.9", 16 | "eslint": "8.30.0", 17 | "eslint-config-next": "13.1.0", 18 | "framer-motion": "^8.0.1", 19 | "lucide-react": "^0.105.0-alpha.4", 20 | "next": "13.1.0", 21 | "react": "18.2.0", 22 | "react-dom": "18.2.0", 23 | "typescript": "4.9.4", 24 | "zustand": "^4.1.5" 25 | }, 26 | "devDependencies": { 27 | "autoprefixer": "^10.4.13", 28 | "postcss": "^8.4.20", 29 | "tailwindcss": "^3.2.4" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import '../styles/globals.css' 2 | import type { AppProps } from 'next/app' 3 | 4 | export default function App({ Component, pageProps }: AppProps) { 5 | return 6 | } 7 | -------------------------------------------------------------------------------- /pages/_document.tsx: -------------------------------------------------------------------------------- 1 | import { Html, Head, Main, NextScript } from 'next/document' 2 | 3 | export default function Document() { 4 | return ( 5 | 6 | 7 | 8 |
9 | 10 | 11 | 12 | ) 13 | } 14 | -------------------------------------------------------------------------------- /pages/index.tsx: -------------------------------------------------------------------------------- 1 | import Head from "next/head"; 2 | import { useEffect } from "react"; 3 | import { Generation } from "../components/Generation"; 4 | import { History } from "../components/History"; 5 | import { ImageAnswer } from "../components/ImageAnswer"; 6 | import { Prompt } from "../components/Prompt"; 7 | import { TextAnswer } from "../components/TextAnswer"; 8 | 9 | export default function Home() { 10 | const { loading } = Generation.use(); 11 | const { artifacts } = ImageAnswer.use(); 12 | const { value } = TextAnswer.use(); 13 | 14 | useEffect(() => { 15 | // Add event listeners for file drop and dragover 16 | document.addEventListener("drop", interrogate); 17 | document.addEventListener("dragover", onDragOver); 18 | 19 | // Remove event listeners when the component is unmounted 20 | return () => { 21 | document.removeEventListener("drop", interrogate); 22 | document.removeEventListener("dragover", onDragOver); 23 | }; 24 | }, []); // Run only once 25 | 26 | const interrogate = (e: any) => { 27 | e.preventDefault(); 28 | e.stopPropagation(); 29 | const { files } = e.dataTransfer; 30 | if (files && files.length > 0) { 31 | ImageAnswer.addFiles(files); 32 | } 33 | }; 34 | 35 | const onDragOver = (event: any) => { 36 | // Prevent default behavior (e.g., open the file in the browser) 37 | event.preventDefault(); 38 | }; 39 | 40 | return ( 41 | <> 42 | 43 | Simple AI 44 | 45 | 46 | 47 | 48 |
51 | 52 |
53 | 54 |
55 |
56 | 57 |
58 | {/* */} 59 |
60 | 61 | ); 62 | } 63 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KAJdev/simple-ai/dd69a2bdd6f277f0b4d4a012b28439068fd25b2a/public/favicon.ico -------------------------------------------------------------------------------- /styles/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | * { 6 | box-sizing: border-box; 7 | padding: 0; 8 | margin: 0; 9 | } 10 | 11 | html, 12 | body { 13 | max-width: 100vw; 14 | overflow-x: hidden; 15 | } 16 | 17 | a { 18 | color: inherit; 19 | text-decoration: none; 20 | } 21 | 22 | @media (prefers-color-scheme: dark) { 23 | html { 24 | color-scheme: dark; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | "./pages/**/*.{js,ts,jsx,tsx}", 5 | "./components/**/*.{js,ts,jsx,tsx}", 6 | ], 7 | theme: { 8 | extend: {}, 9 | }, 10 | plugins: [], 11 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "strict": true, 8 | "forceConsistentCasingInFileNames": true, 9 | "noEmit": true, 10 | "esModuleInterop": true, 11 | "module": "esnext", 12 | "moduleResolution": "node", 13 | "resolveJsonModule": true, 14 | "isolatedModules": true, 15 | "jsx": "preserve", 16 | "incremental": true 17 | }, 18 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], 19 | "exclude": ["node_modules"] 20 | } 21 | --------------------------------------------------------------------------------