├── chatapp ├── .envExample ├── .eslintrc.json ├── public │ ├── favicon.ico │ ├── vercel.svg │ └── next.svg ├── next.config.js ├── pages │ ├── _document.tsx │ ├── _app.tsx │ ├── api │ │ └── chat.ts │ └── index.tsx ├── .gitignore ├── tsconfig.json ├── package.json └── styles │ ├── globals.css │ └── Home.module.css ├── .gitattributes ├── runpod ├── Dockerfile └── handler.py └── README.md /chatapp/.envExample: -------------------------------------------------------------------------------- 1 | RUNPOD_KEY= 2 | RUNPOD_ID= -------------------------------------------------------------------------------- /chatapp/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /chatapp/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleemrehmtulla/stablechat/HEAD/chatapp/public/favicon.ico -------------------------------------------------------------------------------- /chatapp/next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | reactStrictMode: true, 4 | } 5 | 6 | module.exports = nextConfig 7 | -------------------------------------------------------------------------------- /runpod/Dockerfile: -------------------------------------------------------------------------------- 1 | from python:3.11.1-buster 2 | 3 | WORKDIR / 4 | 5 | RUN pip install runpod 6 | 7 | RUN pip install torch 8 | 9 | RUN pip install transformers 10 | 11 | RUN pip install numpy 12 | 13 | RUN pip install Flask 14 | 15 | ADD handler.py . 16 | 17 | CMD [ "python", "-u", "/handler.py" ] -------------------------------------------------------------------------------- /chatapp/pages/_document.tsx: -------------------------------------------------------------------------------- 1 | import { Html, Head, Main, NextScript } from 'next/document' 2 | 3 | export default function Document() { 4 | return ( 5 | 6 | 7 | 8 |
9 | 10 | 11 | 12 | ) 13 | } 14 | -------------------------------------------------------------------------------- /chatapp/pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import "@/styles/globals.css"; 2 | import type { AppProps } from "next/app"; 3 | import { ChakraProvider } from "@chakra-ui/react"; 4 | import { Analytics } from "@vercel/analytics/react"; 5 | 6 | export default function App({ Component, pageProps }: AppProps) { 7 | return ( 8 | 9 | 10 | 11 | 12 | ); 13 | } 14 | -------------------------------------------------------------------------------- /chatapp/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | 37 | # env 38 | .env -------------------------------------------------------------------------------- /chatapp/public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /chatapp/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "strict": true, 8 | "forceConsistentCasingInFileNames": true, 9 | "noEmit": true, 10 | "esModuleInterop": true, 11 | "module": "esnext", 12 | "moduleResolution": "node", 13 | "resolveJsonModule": true, 14 | "isolatedModules": true, 15 | "jsx": "preserve", 16 | "incremental": true, 17 | "paths": { 18 | "@/*": ["./*"] 19 | } 20 | }, 21 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], 22 | "exclude": ["node_modules"] 23 | } 24 | -------------------------------------------------------------------------------- /chatapp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chatapp", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@chakra-ui/react": "^2.5.5", 13 | "@emotion/react": "^11.10.6", 14 | "@emotion/styled": "^11.10.6", 15 | "@types/node": "18.15.12", 16 | "@types/react": "18.0.37", 17 | "@types/react-dom": "18.0.11", 18 | "@vercel/analytics": "^1.0.0", 19 | "eslint": "8.38.0", 20 | "eslint-config-next": "13.3.0", 21 | "framer-motion": "^10.12.4", 22 | "next": "13.3.0", 23 | "react": "18.2.0", 24 | "react-dom": "18.2.0", 25 | "react-icons": "^4.8.0", 26 | "typescript": "5.0.4" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /chatapp/pages/api/chat.ts: -------------------------------------------------------------------------------- 1 | import type { NextApiRequest, NextApiResponse } from "next"; 2 | 3 | type Data = { 4 | data: string; 5 | }; 6 | 7 | export default async function handler( 8 | req: NextApiRequest, 9 | res: NextApiResponse 10 | ) { 11 | const response = await fetch( 12 | `https://api.runpod.ai/v2/${process.env.RUNPOD_ID}/runsync`, 13 | { 14 | method: "POST", 15 | headers: { 16 | Authorization: process.env.RUNPOD_KEY as string, 17 | "Content-Type": "application/json", 18 | }, 19 | body: JSON.stringify({ 20 | input: { 21 | prompt: req.body.prompt, 22 | }, 23 | }), 24 | } 25 | ); 26 | 27 | const data = await response.json(); 28 | const output = data.output.output; 29 | 30 | console.log("Success:", JSON.stringify(data)); 31 | 32 | res.status(200).json({ data: output }); 33 | } 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [DEPLOYMENT INACTIVE - MUST RUN LOCALLY] 2 | 3 | 4 | # Stable Chat 🌍🦒 5 | 6 | The company behind Stable Diffusion just launched their own LLM, StableLM. This repo includes a Runpod.io instance to host the model using their serverless GPUs, along with a chatbot interface to interact with the model in a friendly way (: 7 | 8 | ## How to use 9 | 10 | 1. Fork this repo 11 | 2. Navigate to /runpod 12 | 3. Upload that folder to Runpod using this guide: https://docs.runpod.io/docs/worker-image-creation 13 | 4. Navigate to /chatbot 14 | 5. Add in your runpod API key and your Model ID to `.env` 15 | 6. Hit `npm install` and then `npm run dev` 16 | 7. Open `localhost:3000` in your browser and chat with the model! 17 | 18 | ## Tech Stack 19 | 20 | - Next.js 21 | - ChakraUI 22 | - Python 23 | - Vercel (for hosting API and frontend) 24 | - Runpod (for hosting the model) 25 | 26 | ps! star this repo if you like it ⭐ 27 | -------------------------------------------------------------------------------- /chatapp/public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /runpod/handler.py: -------------------------------------------------------------------------------- 1 | import runpod 2 | import torch 3 | from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, StoppingCriteria, StoppingCriteriaList 4 | 5 | model_name = "stabilityai/stablelm-tuned-alpha-7b" 6 | m = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16).cuda() 7 | tok = AutoTokenizer.from_pretrained(model_name) 8 | generator = pipeline('text-generation', model=m, tokenizer=tok, device=0) 9 | 10 | 11 | def generate(text): 12 | generated_text = "" 13 | while not generated_text: 14 | result = generator(text, max_new_tokens=1024, num_return_sequences=1, num_beams=1, do_sample=True, 15 | temperature=1.0, top_p=0.95, top_k=1000) 16 | generated_text = result[0]["generated_text"].replace(text, "") 17 | return generated_text 18 | 19 | def handler(event): 20 | print(event) 21 | 22 | input_text = event.get('input', {}).get('prompt', None) 23 | 24 | print(input_text) 25 | 26 | if not input_text: 27 | return { "error": "No input text provided" } 28 | 29 | start_message = """# StableAssistant 30 | 31 | - StableAssistant is A helpful and harmless Open Source AI Language Model developed by Stability and CarperAI. 32 | 33 | - StableAssistant is excited to be able to help the user, and answers any questions the user may have. 34 | 35 | - StableAssistant is more than just an information source, StableAssistant is also able to write code, short stories, and make jokes. 36 | 37 | - StableAssistant responds in a concise and clear manner, like a human and friend. 38 | """ 39 | input_text = f"{start_message}\n{input_text}" 40 | 41 | try: 42 | output_text = generate(input_text) 43 | return { "output": output_text } 44 | except Exception as e: 45 | return { "error": str(e) } 46 | 47 | 48 | runpod.serverless.start({ 49 | "handler": handler 50 | }) -------------------------------------------------------------------------------- /chatapp/styles/globals.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --max-width: 1100px; 3 | --border-radius: 12px; 4 | --font-mono: ui-monospace, Menlo, Monaco, 'Cascadia Mono', 'Segoe UI Mono', 5 | 'Roboto Mono', 'Oxygen Mono', 'Ubuntu Monospace', 'Source Code Pro', 6 | 'Fira Mono', 'Droid Sans Mono', 'Courier New', monospace; 7 | 8 | --foreground-rgb: 0, 0, 0; 9 | --background-start-rgb: 214, 219, 220; 10 | --background-end-rgb: 255, 255, 255; 11 | 12 | --primary-glow: conic-gradient( 13 | from 180deg at 50% 50%, 14 | #16abff33 0deg, 15 | #0885ff33 55deg, 16 | #54d6ff33 120deg, 17 | #0071ff33 160deg, 18 | transparent 360deg 19 | ); 20 | --secondary-glow: radial-gradient( 21 | rgba(255, 255, 255, 1), 22 | rgba(255, 255, 255, 0) 23 | ); 24 | 25 | --tile-start-rgb: 239, 245, 249; 26 | --tile-end-rgb: 228, 232, 233; 27 | --tile-border: conic-gradient( 28 | #00000080, 29 | #00000040, 30 | #00000030, 31 | #00000020, 32 | #00000010, 33 | #00000010, 34 | #00000080 35 | ); 36 | 37 | --callout-rgb: 238, 240, 241; 38 | --callout-border-rgb: 172, 175, 176; 39 | --card-rgb: 180, 185, 188; 40 | --card-border-rgb: 131, 134, 135; 41 | } 42 | 43 | @media (prefers-color-scheme: dark) { 44 | :root { 45 | --foreground-rgb: 255, 255, 255; 46 | --background-start-rgb: 0, 0, 0; 47 | --background-end-rgb: 0, 0, 0; 48 | 49 | --primary-glow: radial-gradient(rgba(1, 65, 255, 0.4), rgba(1, 65, 255, 0)); 50 | --secondary-glow: linear-gradient( 51 | to bottom right, 52 | rgba(1, 65, 255, 0), 53 | rgba(1, 65, 255, 0), 54 | rgba(1, 65, 255, 0.3) 55 | ); 56 | 57 | --tile-start-rgb: 2, 13, 46; 58 | --tile-end-rgb: 2, 5, 19; 59 | --tile-border: conic-gradient( 60 | #ffffff80, 61 | #ffffff40, 62 | #ffffff30, 63 | #ffffff20, 64 | #ffffff10, 65 | #ffffff10, 66 | #ffffff80 67 | ); 68 | 69 | --callout-rgb: 20, 20, 20; 70 | --callout-border-rgb: 108, 108, 108; 71 | --card-rgb: 100, 100, 100; 72 | --card-border-rgb: 200, 200, 200; 73 | } 74 | } 75 | 76 | * { 77 | box-sizing: border-box; 78 | padding: 0; 79 | margin: 0; 80 | } 81 | 82 | html, 83 | body { 84 | max-width: 100vw; 85 | overflow-x: hidden; 86 | } 87 | 88 | body { 89 | color: rgb(var(--foreground-rgb)); 90 | background: linear-gradient( 91 | to bottom, 92 | transparent, 93 | rgb(var(--background-end-rgb)) 94 | ) 95 | rgb(var(--background-start-rgb)); 96 | } 97 | 98 | a { 99 | color: inherit; 100 | text-decoration: none; 101 | } 102 | 103 | @media (prefers-color-scheme: dark) { 104 | html { 105 | color-scheme: dark; 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /chatapp/styles/Home.module.css: -------------------------------------------------------------------------------- 1 | .main { 2 | display: flex; 3 | flex-direction: column; 4 | justify-content: space-between; 5 | align-items: center; 6 | padding: 6rem; 7 | min-height: 100vh; 8 | } 9 | 10 | .description { 11 | display: inherit; 12 | justify-content: inherit; 13 | align-items: inherit; 14 | font-size: 0.85rem; 15 | max-width: var(--max-width); 16 | width: 100%; 17 | z-index: 2; 18 | font-family: var(--font-mono); 19 | } 20 | 21 | .description a { 22 | display: flex; 23 | justify-content: center; 24 | align-items: center; 25 | gap: 0.5rem; 26 | } 27 | 28 | .description p { 29 | position: relative; 30 | margin: 0; 31 | padding: 1rem; 32 | background-color: rgba(var(--callout-rgb), 0.5); 33 | border: 1px solid rgba(var(--callout-border-rgb), 0.3); 34 | border-radius: var(--border-radius); 35 | } 36 | 37 | .code { 38 | font-weight: 700; 39 | font-family: var(--font-mono); 40 | } 41 | 42 | .grid { 43 | display: grid; 44 | grid-template-columns: repeat(4, minmax(25%, auto)); 45 | width: var(--max-width); 46 | max-width: 100%; 47 | } 48 | 49 | .card { 50 | padding: 1rem 1.2rem; 51 | border-radius: var(--border-radius); 52 | background: rgba(var(--card-rgb), 0); 53 | border: 1px solid rgba(var(--card-border-rgb), 0); 54 | transition: background 200ms, border 200ms; 55 | } 56 | 57 | .card span { 58 | display: inline-block; 59 | transition: transform 200ms; 60 | } 61 | 62 | .card h2 { 63 | font-weight: 600; 64 | margin-bottom: 0.7rem; 65 | } 66 | 67 | .card p { 68 | margin: 0; 69 | opacity: 0.6; 70 | font-size: 0.9rem; 71 | line-height: 1.5; 72 | max-width: 30ch; 73 | } 74 | 75 | .center { 76 | display: flex; 77 | justify-content: center; 78 | align-items: center; 79 | position: relative; 80 | padding: 4rem 0; 81 | } 82 | 83 | .center::before { 84 | background: var(--secondary-glow); 85 | border-radius: 50%; 86 | width: 480px; 87 | height: 360px; 88 | margin-left: -400px; 89 | } 90 | 91 | .center::after { 92 | background: var(--primary-glow); 93 | width: 240px; 94 | height: 180px; 95 | z-index: -1; 96 | } 97 | 98 | .center::before, 99 | .center::after { 100 | content: ''; 101 | left: 50%; 102 | position: absolute; 103 | filter: blur(45px); 104 | transform: translateZ(0); 105 | } 106 | 107 | .logo { 108 | position: relative; 109 | } 110 | /* Enable hover only on non-touch devices */ 111 | @media (hover: hover) and (pointer: fine) { 112 | .card:hover { 113 | background: rgba(var(--card-rgb), 0.1); 114 | border: 1px solid rgba(var(--card-border-rgb), 0.15); 115 | } 116 | 117 | .card:hover span { 118 | transform: translateX(4px); 119 | } 120 | } 121 | 122 | @media (prefers-reduced-motion) { 123 | .card:hover span { 124 | transform: none; 125 | } 126 | } 127 | 128 | /* Mobile */ 129 | @media (max-width: 700px) { 130 | .content { 131 | padding: 4rem; 132 | } 133 | 134 | .grid { 135 | grid-template-columns: 1fr; 136 | margin-bottom: 120px; 137 | max-width: 320px; 138 | text-align: center; 139 | } 140 | 141 | .card { 142 | padding: 1rem 2.5rem; 143 | } 144 | 145 | .card h2 { 146 | margin-bottom: 0.5rem; 147 | } 148 | 149 | .center { 150 | padding: 8rem 0 6rem; 151 | } 152 | 153 | .center::before { 154 | transform: none; 155 | height: 300px; 156 | } 157 | 158 | .description { 159 | font-size: 0.8rem; 160 | } 161 | 162 | .description a { 163 | padding: 1rem; 164 | } 165 | 166 | .description p, 167 | .description div { 168 | display: flex; 169 | justify-content: center; 170 | position: fixed; 171 | width: 100%; 172 | } 173 | 174 | .description p { 175 | align-items: center; 176 | inset: 0 0 auto; 177 | padding: 2rem 1rem 1.4rem; 178 | border-radius: 0; 179 | border: none; 180 | border-bottom: 1px solid rgba(var(--callout-border-rgb), 0.25); 181 | background: linear-gradient( 182 | to bottom, 183 | rgba(var(--background-start-rgb), 1), 184 | rgba(var(--callout-rgb), 0.5) 185 | ); 186 | background-clip: padding-box; 187 | backdrop-filter: blur(24px); 188 | } 189 | 190 | .description div { 191 | align-items: flex-end; 192 | pointer-events: none; 193 | inset: auto 0 0; 194 | padding: 2rem; 195 | height: 200px; 196 | background: linear-gradient( 197 | to bottom, 198 | transparent 0%, 199 | rgb(var(--background-end-rgb)) 40% 200 | ); 201 | z-index: 1; 202 | } 203 | } 204 | 205 | /* Tablet and Smaller Desktop */ 206 | @media (min-width: 701px) and (max-width: 1120px) { 207 | .grid { 208 | grid-template-columns: repeat(2, 50%); 209 | } 210 | } 211 | 212 | @media (prefers-color-scheme: dark) { 213 | .vercelLogo { 214 | filter: invert(1); 215 | } 216 | 217 | .logo { 218 | filter: invert(1) drop-shadow(0 0 0.3rem #ffffff70); 219 | } 220 | } 221 | 222 | @keyframes rotate { 223 | from { 224 | transform: rotate(360deg); 225 | } 226 | to { 227 | transform: rotate(0deg); 228 | } 229 | } 230 | -------------------------------------------------------------------------------- /chatapp/pages/index.tsx: -------------------------------------------------------------------------------- 1 | // NOTE: this code is disgustingly bad 2 | // but it does work. i had little time <:( 3 | 4 | import { useEffect, useState } from "react"; 5 | 6 | import { 7 | Box, 8 | Heading, 9 | HStack, 10 | Image, 11 | Input, 12 | Link, 13 | Text, 14 | VStack, 15 | } from "@chakra-ui/react"; 16 | import { AiOutlineSend } from "react-icons/ai"; 17 | 18 | type Message = { 19 | text: string; 20 | person: string; 21 | avatar: string; 22 | createdAt: number; 23 | }; 24 | 25 | const PEOPLE = [ 26 | { 27 | name: "BOT", 28 | avatar: 29 | "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSATcdI4GCFzAyjnPG9nU3SX45TJqPy2K2a2g&usqp=CAU", 30 | }, 31 | { 32 | name: "USER", 33 | avatar: 34 | "https://i.pinimg.com/originals/47/2d/63/472d637717c3a23875e9b00d4692be3e.jpg", 35 | }, 36 | ]; 37 | 38 | const Chat = () => { 39 | const [messages, setMessages] = useState([]); 40 | const [message, setMessage] = useState(""); 41 | 42 | const getMessage = async () => { 43 | if (!messages) return; 44 | 45 | const last4 = messages.slice(-4); 46 | 47 | const prompt = last4 48 | .map((message) => { 49 | if (message.person === "BOT") { 50 | return `<|ASSISTANT|>${message?.text.substring(0, 40)}`; 51 | } else { 52 | return `<|USER|>${message?.text.substring(0, 40)}`; 53 | } 54 | }) 55 | .join(""); 56 | 57 | const hit = await fetch("/api/chat", { 58 | method: "POST", 59 | headers: { 60 | "Content-Type": "application/json", 61 | }, 62 | body: JSON.stringify({ 63 | prompt: prompt, 64 | }), 65 | }); 66 | 67 | setMessage(""); 68 | 69 | const res = await hit.json(); 70 | 71 | setMessages((messages) => [ 72 | ...messages, 73 | { 74 | text: res.data, 75 | person: PEOPLE[0].name, 76 | avatar: PEOPLE[0].avatar, 77 | createdAt: Date.now(), 78 | }, 79 | ]); 80 | setTimeout(() => { 81 | const chat = document.getElementById("chat"); 82 | chat && chat.scrollTo({ top: chat.scrollHeight, behavior: "smooth" }); 83 | }, 500); 84 | }; 85 | 86 | useEffect(() => { 87 | if ( 88 | messages && 89 | messages.length && 90 | messages[messages.length - 1].person !== PEOPLE[0].name 91 | ) { 92 | getMessage(); 93 | const chat = document.getElementById("chat"); 94 | chat && chat.scrollTo({ top: chat.scrollHeight, behavior: "smooth" }); 95 | } 96 | 97 | // eslint-disable-next-line react-hooks/exhaustive-deps 98 | }, [messages]); 99 | 100 | useEffect(() => { 101 | const input = document.getElementById("chatinput"); 102 | input && input.focus(); 103 | }, []); 104 | 105 | return ( 106 | 116 | 117 | 118 | Stable Chat 🌍🦒 119 | 120 | 125 | (powered by, but not affiliated with stability.ai) 126 | 127 | 128 | 129 | 139 | {messages && 140 | messages.map((message) => ( 141 | 142 | pfp 149 | 150 | {message?.text} 151 | 152 | 153 | ))} 154 | 155 | 156 | 168 | setMessage(e.target.value)} 185 | value={message} 186 | onKeyDown={async (e) => { 187 | if (e.key === "Enter") { 188 | if (messages == undefined) { 189 | setMessages([ 190 | { 191 | text: message, 192 | person: PEOPLE[1].name, 193 | avatar: PEOPLE[1].avatar, 194 | createdAt: Date.now(), 195 | }, 196 | ]); 197 | } else { 198 | setMessages((messages) => [ 199 | ...messages, 200 | { 201 | text: message, 202 | person: PEOPLE[1].name, 203 | avatar: PEOPLE[1].avatar, 204 | createdAt: Date.now(), 205 | }, 206 | ]); 207 | } 208 | } 209 | }} 210 | /> 211 | { 217 | if (messages == undefined) { 218 | setMessages([ 219 | { 220 | text: message, 221 | person: PEOPLE[1].name, 222 | avatar: PEOPLE[1].avatar, 223 | createdAt: Date.now(), 224 | }, 225 | ]); 226 | } else { 227 | setMessages((messages) => [ 228 | ...messages, 229 | { 230 | text: message, 231 | person: "aleem", 232 | avatar: PEOPLE[1].avatar, 233 | createdAt: Date.now(), 234 | }, 235 | ]); 236 | } 237 | }} 238 | _hover={{ 239 | color: "gray.500", 240 | }} 241 | /> 242 | 243 | 250 | due to low interest -- i've shutdown always-on workers. it'll 251 | take 3-7 minutes for a coldstart. you can also run locally{" "} 252 | 257 | here 258 | 259 | 260 | 261 | ); 262 | }; 263 | 264 | export default Chat; 265 | --------------------------------------------------------------------------------