├── .github └── workflows │ └── release.yml ├── .gitignore ├── LICENSE ├── README.md ├── assets ├── demo.webp └── extension-demo.webp ├── package.json ├── packages ├── extension │ ├── .gitignore │ ├── README.md │ ├── package.json │ ├── postcss.config.js │ ├── public │ │ ├── icons │ │ │ ├── icon128.png │ │ │ ├── icon16.png │ │ │ └── icon48.png │ │ ├── index.html │ │ ├── manifest.json │ │ └── models │ │ │ ├── sentencepiece.js │ │ │ ├── tvmjs_runtime.wasi.js │ │ │ └── vicuna-7b-v1 │ │ │ ├── tokenizer.model │ │ │ └── vicuna-7b-v1_webgpu.wasm │ ├── rollup.config.mjs │ ├── src │ │ ├── App.js │ │ ├── Page.jsx │ │ ├── background │ │ │ └── index.js │ │ ├── globals.css │ │ ├── index.js │ │ └── pages │ │ │ ├── AboutPage.jsx │ │ │ ├── LoadingPage.jsx │ │ │ ├── MainPage.jsx │ │ │ ├── NewPromptPage.jsx │ │ │ ├── OptionsPage.jsx │ │ │ └── Textarea.jsx │ └── tailwind.config.js ├── headless │ ├── .eslintrc.json │ ├── .gitignore │ ├── LICENSE │ ├── README.md │ ├── dist │ │ ├── comlink-225113f4.js │ │ ├── index.d.ts │ │ ├── index.js │ │ ├── types │ │ │ └── src │ │ │ │ ├── hooks │ │ │ │ ├── useConversationStore.d.ts │ │ │ │ ├── useLLM.d.ts │ │ │ │ ├── usePersistantConversationStore.d.ts │ │ │ │ └── useStore.d.ts │ │ │ │ ├── index.d.ts │ │ │ │ ├── providers │ │ │ │ └── ModelProvider.d.ts │ │ │ │ └── worker │ │ │ │ └── worker.d.ts │ │ └── worker-e4952377.js │ ├── package.json │ ├── rollup.config.js │ ├── src │ │ ├── hooks │ │ │ ├── useConversationStore.tsx │ │ │ ├── useLLM.tsx │ │ │ ├── usePersistantConversationStore.tsx │ │ │ └── useStore.tsx │ │ ├── index.d.ts │ │ ├── index.ts │ │ ├── providers │ │ │ └── ModelProvider.tsx │ │ └── worker │ │ │ └── worker.ts │ └── tsconfig.json ├── model │ ├── dist │ │ ├── index.d.ts │ │ ├── index.esm.js │ │ ├── index.esm.js.map │ │ ├── index.js │ │ └── index.js.map │ ├── package.json │ ├── rollup.config.js │ ├── src │ │ ├── index.ts │ │ ├── model │ │ │ └── llm.ts │ │ ├── tvm │ │ │ ├── compact.d.ts │ │ │ ├── compact.ts │ │ │ ├── ctypes.d.ts │ │ │ ├── ctypes.ts │ │ │ ├── environment.d.ts │ │ │ ├── environment.ts │ │ │ ├── index.d.ts │ │ │ ├── index.ts │ │ │ ├── memory.d.ts │ │ │ ├── memory.ts │ │ │ ├── rpc_server.d.ts │ │ │ ├── rpc_server.ts │ │ │ ├── runtime.d.ts │ │ │ ├── runtime.ts │ │ │ ├── support.d.ts │ │ │ ├── support.ts │ │ │ ├── types.d.ts │ │ │ ├── types.ts │ │ │ ├── webgpu.d.ts │ │ │ └── webgpu.ts │ │ └── types │ │ │ └── modelApi.ts │ └── tsconfig.json └── retro-ui │ ├── .eslintrc.json │ ├── .gitignore │ ├── LICENSE │ ├── README.md │ ├── next.config.js │ ├── package.json │ ├── postcss.config.js │ ├── public │ ├── buddy88.gif │ ├── favicon.ico │ ├── sounds │ │ ├── imrcv.wav │ │ └── imsend.wav │ └── xp.jpeg │ ├── src │ ├── app │ │ ├── layout.tsx │ │ └── page.jsx │ ├── assets │ │ └── fonts │ │ │ └── ms_sans_serif.woff2 │ ├── components │ │ ├── Chat.tsx │ │ ├── ChatWindow.jsx │ │ ├── ConversationList.jsx │ │ ├── Loader.jsx │ │ ├── MessageList.jsx │ │ └── Options.jsx │ └── styles │ │ └── globals.css │ ├── tailwind.config.js │ └── tsconfig.json ├── pnpm-lock.yaml ├── pnpm-workspace.yaml └── tsconfig.base.json /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish Package to npmjs 2 | on: 3 | release: 4 | types: [published] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v3 10 | - uses: actions/setup-node@v3 11 | with: 12 | node-version: "16.x" 13 | registry-url: "https://registry.npmjs.org" 14 | - uses: pnpm/action-setup@v2 15 | name: Install pnpm 16 | id: pnpm-install 17 | with: 18 | version: 8 19 | run_install: false 20 | 21 | - name: Get pnpm store directory 22 | id: pnpm-cache 23 | shell: bash 24 | run: | 25 | echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT 26 | 27 | - uses: actions/cache@v3 28 | name: Setup pnpm cache 29 | with: 30 | path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} 31 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} 32 | restore-keys: | 33 | ${{ runner.os }}-pnpm-store- 34 | 35 | - name: Install dependencies 36 | run: pnpm install 37 | 38 | - name: Build 39 | run: pnpm build 40 | - name: Publish to npmjs 41 | run: pnpm publish 42 | env: 43 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | **/*.tsbuildinfo 3 | **/**/.next 4 | **/**/node_modules 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Matt Rickard 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # @react-llm/headless 2 | 3 | Easy-to-use headless React Hooks to run LLMs in the browser with WebGPU. As simple as `useLLM()`. 4 | 5 | ### [**Live Demo**](https://chat.matt-rickard.com) 6 | 7 | ![image](assets/demo.webp) 8 | 9 | **Features**: 10 | 11 | * Supports [Vicuna 7B](https://lmsys.org/blog/2023-03-30-vicuna/) 12 | * Use custom system prompts and "user:"/"assistant:" role names 13 | * Completion options like `max tokens` and `stop sequences` 14 | * No data leaves the browser. Accelerated via WebGPU. 15 | * Hooks built to 'Bring your own UI' 16 | * Persistent storage for conversations in browser storage. Hooks for loading and saving conversations. 17 | * Model caching for faster subsequent loads 18 | 19 | ## Installation 20 | 21 | ```bash 22 | npm install @react-llm/headless 23 | ``` 24 | 25 | ## Packages in this repository 26 | - [@react-llm/model](packages/model) - The LLM model and tokenizer compiled for the browser 27 | - [@react-llm/retro-ui](packages/retro-ui) - Retro-themed UI for the hooks 28 | - [@react-llm/extension](packages/extension) - Chrome Extension that uses the hooks 29 | - [@react-llm/headless](packages/headless) - Headless React Hooks for running LLMs in the browser 30 | 31 | 32 | ## **useLLM** API 33 | ### Types 34 | ```typescript 35 | // Model Initialization 36 | init: () => void; 37 | 38 | // Model Generation 39 | send: (msg: string, maxTokens: number, stopSequences: string[]) => void; 40 | onMessage: (msg: GenerateTextResponse) => void; 41 | setOnMessage: (cb: (msg: GenerateTextResponse) => void) => void; 42 | 43 | // Model Status 44 | loadingStatus: InitProgressReport; 45 | isGenerating: boolean; 46 | gpuDevice: GPUDeviceInfo; 47 | 48 | // Model Configuration 49 | userRoleName: string; 50 | setUserRoleName: (roleName: string) => void; 51 | assistantRoleName: string; 52 | setAssistantRoleName: (roleName: string) => void; 53 | 54 | // Conversation Management 55 | conversation: Conversation | undefined; 56 | allConversations: Conversation[] | undefined; 57 | createConversation: (title?: string, prompt?: string) => void; 58 | setConversationId: (conversationId: string) => void; 59 | deleteConversation: (conversationId: string) => void; 60 | deleteAllConversations: () => void; 61 | deleteMessages: () => void; 62 | setConversationTitle: (conversationId: string, title: string) => void; 63 | ``` 64 | 65 | ### Hooks 66 | ```typescript 67 | import useLLM from '@react-llm/headless'; 68 | 69 | const MyComponent = () => { 70 | const { 71 | conversation, 72 | allConversations, 73 | loadingStatus, 74 | isGenerating, 75 | createConversation, 76 | setConversationId, 77 | deleteConversation, 78 | deleteAllConversations, 79 | deleteMessages, 80 | setConversationTitle, 81 | onMessage, 82 | setOnMessage, 83 | userRoleName, 84 | setUserRoleName, 85 | assistantRoleName, 86 | setAssistantRoleName, 87 | gpuDevice, 88 | send, 89 | init, 90 | } = useLLM(); 91 | 92 | // Component logic... 93 | 94 | return null; 95 | }; 96 | ``` 97 | 98 | ### Provider 99 | ```typescript 100 | import { ModelProvider } from "@react-llm/headless"; 101 | 102 | export default function Home() { 103 | return ( 104 | 120 | 121 | 122 | ); 123 | } 124 | ``` 125 | 126 | ### Packages 127 | 128 | * `@react-llm/headless` - Headless React Hooks for running LLMs in the browser 129 | * `@react-llm/retro-ui` - Retro-themed UI for the hooks 130 | 131 | ## How does it work? 132 | 133 | This library is a set of React Hooks that provide a simple interface to run LLMs in the browser. It uses Vicuna 13B. 134 | 135 | * SentencePiece tokenizer (compiled for the browser via Emscripten) 136 | * Vicuna 7B (transformed to Apache TVM format) 137 | * Apache TVM and MLC Relax (compiled for the browser via Emscripten) 138 | * Off-the-main-thread WebWorker to run the model (bundled with the library) 139 | 140 | 141 | The model, tokenizer, and TVM runtime are loaded from a CDN (huggingface). The model is cached in browser storage for faster subsequent loads. 142 | 143 | 144 | 145 | 146 | ### Example 147 | See [packages/retro-ui](packages/retro-ui) for the full demo code. This is a simple example of how to use the hooks. To run it, after cloning the repo, 148 | 149 | ```bash 150 | cd packages/retro-ui 151 | pnpm install 152 | pnpm dev 153 | ``` 154 | 155 | 156 | ### License 157 | MIT 158 | 159 | The code under `packages/headless/worker/lib/tvm` is licensed under Apache 2.0. 160 | -------------------------------------------------------------------------------- /assets/demo.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/assets/demo.webp -------------------------------------------------------------------------------- /assets/extension-demo.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/assets/extension-demo.webp -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@react-llm/workspace", 3 | "version": "0.0.1", 4 | "type": "module", 5 | "main": "dist/bundle.cjs.js", 6 | "module": "dist/bundle.esm.js", 7 | "author": "Matt Rickard ", 8 | "license": "MIT", 9 | "private": true, 10 | "workspaces": [ 11 | "packages/model", 12 | "packages/headless", 13 | "packages/retro-ui", 14 | "packages/extension" 15 | ], 16 | "scripts": { 17 | "publish": "pnpm -r --access public", 18 | "build": "pnpm recursive run clean && pnpm recursive run build", 19 | "dev": "pnpm recursive run --no-bail --workspace-concurrency Infinity dev" 20 | }, 21 | "devDependencies": { 22 | "typescript": "^5.0.4" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /packages/extension/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | 25 | dist/ 26 | -------------------------------------------------------------------------------- /packages/extension/README.md: -------------------------------------------------------------------------------- 1 | # LLaMaTab Extension 2 | 3 | A large language model that runs entirely inside a Chrome Extension. It runs the instruction tuned Vicuna 13B model, based off of LLaMa. 4 | 5 | ![image](../../assets/extension-demo.webp) 6 | 7 | ### Installation 8 | To install, head to the [releases](https://github.com/r2d4/react-llm/releases/latest) tab and download the latest `llamatab-extension.zip` file. Then, in Chrome, go to `chrome://extensions`, enable developer mode, and click "Load unpacked". Select the `llamatab-extension` folder from the zip file you downloaded. You should now see the LLaMaTab extension in your toolbar. 9 | 10 | ## Building from source 11 | 12 | ```bash 13 | pnpm install 14 | pnpm build:extension 15 | ``` 16 | 17 | The extension zip file will be built to `dist/llamatab-extension.zip`. 18 | -------------------------------------------------------------------------------- /packages/extension/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@react-llm/llamatab", 3 | "version": "0.0.1", 4 | "private": true, 5 | "dependencies": { 6 | "@react-llm/headless": "workspace:*", 7 | "@react-llm/model": "workspace:^", 8 | "@types/chrome": "^0.0.236", 9 | "autoprefixer": "^10.4.14", 10 | "comlink": "^4.4.1", 11 | "comlink-extension": "^1.0.8", 12 | "postcss": "^8.4.23", 13 | "react": "^18.2.0", 14 | "react-dom": "^18.2.0", 15 | "react-scripts": "5.0.1", 16 | "tailwindcss": "^3.3.2" 17 | }, 18 | "scripts": { 19 | "start": "react-scripts start", 20 | "build": "react-scripts build && npx rollup -c", 21 | "test": "react-scripts test", 22 | "eject": "react-scripts eject", 23 | "dev": "nodemon --watch src --watch public --watch node_modules -e js,jsx,css,html,json,ts,tsx --exec 'pnpm run build'", 24 | "build:extension": "mkdir -p dist/ && pnpm run build && zip -r dist/llamatab-extension.zip build" 25 | }, 26 | "eslintConfig": { 27 | "extends": [ 28 | "react-app", 29 | "react-app/jest" 30 | ] 31 | }, 32 | "browserslist": { 33 | "production": [ 34 | ">0.2%", 35 | "not dead", 36 | "not op_mini all" 37 | ], 38 | "development": [ 39 | "last 1 chrome version", 40 | "last 1 firefox version", 41 | "last 1 safari version" 42 | ] 43 | }, 44 | "devDependencies": { 45 | "@rollup/plugin-babel": "^6.0.3", 46 | "@rollup/plugin-commonjs": "^24.1.0", 47 | "@rollup/plugin-dynamic-import-vars": "^2.0.3", 48 | "@rollup/plugin-node-resolve": "^15.0.2", 49 | "nodemon": "^2.0.22", 50 | "rollup": "^3.21.6" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/extension/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /packages/extension/public/icons/icon128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/packages/extension/public/icons/icon128.png -------------------------------------------------------------------------------- /packages/extension/public/icons/icon16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/packages/extension/public/icons/icon16.png -------------------------------------------------------------------------------- /packages/extension/public/icons/icon48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/packages/extension/public/icons/icon48.png -------------------------------------------------------------------------------- /packages/extension/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | React App 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /packages/extension/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "manifest_version": 2, 3 | "name": "LlamaTab", 4 | "version": "1.0", 5 | "description": "Run a large language model in your browser.", 6 | "background": { 7 | "scripts": [ 8 | "models/sentencepiece.js", 9 | "models/tvmjs_runtime.wasi.js", 10 | "background.bundle.js" 11 | ], 12 | "persistent": true 13 | }, 14 | "permissions": ["tabs", "storage"], 15 | "browser_action": { 16 | "default_title": "LlamaTab", 17 | "default_popup": "index.html", 18 | "default_icon": { 19 | "16": "icons/icon16.png", 20 | "48": "icons/icon48.png", 21 | "128": "icons/icon128.png" 22 | } 23 | }, 24 | "content_security_policy": "script-src 'self' 'unsafe-eval'; object-src 'self'" 25 | } 26 | -------------------------------------------------------------------------------- /packages/extension/public/models/vicuna-7b-v1/tokenizer.model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/packages/extension/public/models/vicuna-7b-v1/tokenizer.model -------------------------------------------------------------------------------- /packages/extension/public/models/vicuna-7b-v1/vicuna-7b-v1_webgpu.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r2d4/react-llm/fbf85ddeed749ec44683e70308d61560983516c1/packages/extension/public/models/vicuna-7b-v1/vicuna-7b-v1_webgpu.wasm -------------------------------------------------------------------------------- /packages/extension/rollup.config.mjs: -------------------------------------------------------------------------------- 1 | import babel from "@rollup/plugin-babel"; 2 | import commonjs from "@rollup/plugin-commonjs"; 3 | import resolve from "@rollup/plugin-node-resolve"; 4 | 5 | // eslint-disable-next-line import/no-anonymous-default-export 6 | export default { 7 | input: "src/background/index.js", // Replace with the path to your background script 8 | output: { 9 | file: "build/background.bundle.js", // Replace with the desired output path and filename 10 | format: "iife", 11 | }, 12 | external: ["react", "react-dom"], 13 | plugins: [ 14 | resolve({ 15 | browser: true, 16 | esmoduleInterop: true, 17 | }), 18 | commonjs({ 19 | transformMixedEsModules: true, 20 | }), 21 | babel({ 22 | babelHelpers: "bundled", 23 | presets: [["@babel/preset-env", { modules: false }]], 24 | }), 25 | ], 26 | }; 27 | -------------------------------------------------------------------------------- /packages/extension/src/App.js: -------------------------------------------------------------------------------- 1 | import { ModelProvider } from "@react-llm/headless"; 2 | import { useEffect, useState } from "react"; 3 | import Page from "./Page"; 4 | 5 | function App() { 6 | const [api, setApi] = useState(null); 7 | const [loadingStatus, setLoadingStatus] = useState({ progress: 0 }); 8 | useEffect(() => { 9 | // eslint-disable-next-line no-undef 10 | chrome.runtime.getBackgroundPage((backgroundPage) => { 11 | setApi(backgroundPage.API); 12 | backgroundPage.API.addInitListener(setLoadingStatus); 13 | }); 14 | return () => { 15 | // eslint-disable-next-line no-undef 16 | chrome.runtime.getBackgroundPage((backgroundPage) => { 17 | backgroundPage.API.removeInitListener(setLoadingStatus); 18 | }); 19 | }; 20 | }, []); 21 | 22 | if (!api) return null; 23 | return ( 24 |
25 | 31 |
32 | { 37 | api.promptTemplates = templates; 38 | }} 39 | setPersistedSystemPrompt={(systemPrompt) => { 40 | api.systemPrompt = systemPrompt; 41 | }} 42 | /> 43 |
44 |
45 |
46 | ); 47 | } 48 | 49 | export default App; 50 | -------------------------------------------------------------------------------- /packages/extension/src/Page.jsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { useLLM } from "@react-llm/headless"; 3 | import { useCallback, useEffect, useState } from "react"; 4 | import AboutPage from "./pages/AboutPage"; 5 | import LoadingPage from "./pages/LoadingPage"; 6 | import MainPage from "./pages/MainPage"; 7 | import NewPromptPage from "./pages/NewPromptPage"; 8 | import OptionsPage from "./pages/OptionsPage"; 9 | 10 | export default function Page({ 11 | loadingStatus, 12 | loadedSystemPrompt, 13 | loadedPromptList, 14 | setPersistedPromptList, 15 | setPersistedSystemPrompt, 16 | }) { 17 | const { 18 | send, 19 | setOnMessage, 20 | isGenerating, 21 | setConversationPrompt, 22 | conversation, 23 | } = useLLM(); 24 | const [text, setText] = useState(""); 25 | const [response, setResponse] = useState(""); 26 | const [page, setPage] = useState("main"); 27 | const [maxTokens, setMaxTokens] = useState(400); 28 | const [systemPrompt, setSystemPrompt] = useState(loadedSystemPrompt); 29 | const [promptList, setPromptList] = useState(loadedPromptList); 30 | const [prompt, setPrompt] = useState(loadedPromptList && loadedPromptList[0]); 31 | 32 | useEffect(() => { 33 | return () => { 34 | setPersistedPromptList(promptList); 35 | setPersistedSystemPrompt(systemPrompt); 36 | }; 37 | }); 38 | 39 | useEffect(() => { 40 | setOnMessage(() => (data) => { 41 | console.log(data); 42 | setResponse(data); 43 | }); 44 | }, [setOnMessage]); 45 | 46 | useEffect(() => { 47 | if (conversation?.systemPrompt !== systemPrompt) { 48 | setConversationPrompt(systemPrompt); 49 | } 50 | }, [systemPrompt, conversation]); 51 | 52 | const handleSubmit = useCallback( 53 | (e) => { 54 | e?.preventDefault(); 55 | const tmpl = prompt ? prompt.replace(/\$TEXT/g, text) : text; 56 | send(tmpl, maxTokens); 57 | }, 58 | [send, prompt, text, maxTokens] 59 | ); 60 | 61 | if (loadingStatus.progress < 1) { 62 | return ; 63 | } 64 | 65 | switch (page) { 66 | case "about": 67 | return ; 68 | case "options": 69 | return ( 70 | 77 | ); 78 | case "newPrompt": 79 | return ( 80 | 85 | ); 86 | default: 87 | return ( 88 | 99 | ); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /packages/extension/src/background/index.js: -------------------------------------------------------------------------------- 1 | import { LLMInstance } from "@react-llm/model"; 2 | 3 | const defaultWorkerConfig = { 4 | kvConfig: { 5 | numLayers: 64, 6 | shape: [32, 32, 128], 7 | dtype: "float32", 8 | }, 9 | wasmUrl: "/models/vicuna-7b-v1/vicuna-7b-v1_webgpu.wasm", 10 | cacheUrl: 11 | "https://huggingface.co/mrick/react-llm/resolve/main/models/vicuna-7b-v1/params/", 12 | tokenizerUrl: "/models/vicuna-7b-v1/tokenizer.model", 13 | sentencePieceJsUrl: "/models/sentencepiece.js", 14 | tvmRuntimeJsUrl: "/models/tvmjs_runtime.wasi.js", 15 | maxWindowSize: 2048, 16 | }; 17 | 18 | const defaultSystemPrompt = 19 | "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions and always follows the users instructions."; 20 | 21 | const API = { 22 | instance: null, 23 | initialized: false, 24 | loadingStatus: { 25 | progress: 0, 26 | }, 27 | initListeners: [], 28 | systemPrompt: defaultSystemPrompt, 29 | promptTemplates: ["$TEXT"], 30 | addInitListener(cb) { 31 | this.initListeners.push(cb); 32 | cb(this.loadingStatus); 33 | }, 34 | removeInitListener(cb) { 35 | this.initListeners = this.initListeners.filter((c) => c !== cb); 36 | }, 37 | init(cb = console.log, config = defaultWorkerConfig) { 38 | if (this.initialized) return; 39 | this.instance = new LLMInstance( 40 | config, 41 | // eslint-disable-next-line no-undef 42 | () => globalThis.sentencepiece.sentencePieceProcessor 43 | ); 44 | this.initialized = true; 45 | this.instance.init((resp) => { 46 | cb(resp); 47 | this.loadingStatus = resp; 48 | console.log("init: ", resp); 49 | this.initListeners.forEach((cb) => cb(resp)); 50 | }); 51 | }, 52 | generate(request, cb = console.log) { 53 | console.log("generate request: ", request); 54 | this.instance?.generate(request, (resp) => { 55 | console.log("generate: ", resp); 56 | cb(resp); 57 | }); 58 | }, 59 | }; 60 | 61 | // eslint-disable-next-line no-undef 62 | globalThis.API = API; 63 | -------------------------------------------------------------------------------- /packages/extension/src/globals.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss/base"; 2 | @import "tailwindcss/components"; 3 | @import "tailwindcss/utilities"; 4 | -------------------------------------------------------------------------------- /packages/extension/src/index.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactDOM from "react-dom/client"; 3 | import App from "./App"; 4 | import "./globals.css"; 5 | 6 | const root = ReactDOM.createRoot(document.getElementById("root")); 7 | root.render( 8 | 9 | 10 | 11 | ); 12 | -------------------------------------------------------------------------------- /packages/extension/src/pages/AboutPage.jsx: -------------------------------------------------------------------------------- 1 | const AboutPage = ({ setPage }) => { 2 | return ( 3 |
4 |
5 |
6 | 7 | 8 | 9 | 14 | 15 | 16 | 17 | 22 | 23 | 24 | 25 | 33 | 34 | 35 | 36 | 37 | 38 |
Author: 10 | 11 | Matt Rickard 12 | 13 |
Twitter: 18 | 19 | @mattrickard 20 | 21 |
GitHub: 26 | 30 | r2d4/react-llm 31 | 32 |
License:MIT
39 |
40 |
41 | 44 |
45 |
46 |
47 | ); 48 | }; 49 | 50 | export default AboutPage; 51 | -------------------------------------------------------------------------------- /packages/extension/src/pages/LoadingPage.jsx: -------------------------------------------------------------------------------- 1 | import { useLLM } from "@react-llm/headless"; 2 | 3 | const LoadingPage = ({ progress }) => { 4 | const { init, gpuDevice } = useLLM(); 5 | 6 | if (gpuDevice?.unsupportedReason !== null && gpuDevice?.checked) { 7 | return ( 8 |
9 |

10 | {JSON.stringify(gpuDevice)} 11 | Sorry! LLamaTab is not supported on your device. Reason $ 12 | {gpuDevice.unsupportedReason}. LLamaTab requires a device with WebGPU 13 | support. 14 |

15 |
16 | ); 17 | } 18 | if (progress === 0) 19 | return ( 20 |
21 |

22 | A Large Language Model that runs entirely in the browser via WebGPU. 23 |

24 |

25 | No data is sent to a server. Loading the model for the first time may 26 | take a few minutes. Afterwards, the model will load instantly. 27 |

28 | 34 | 35 |
36 |
37 | ); 38 | 39 | if (progress > 0 && progress < 1) { 40 | return ( 41 |
42 |
43 |

Loading model...

44 | {progress < 0.5 &&

Reticulating splines...

} 45 | {progress > 0.5 &&

Herding Llamas...

} 46 |
47 | 48 |
49 |
55 |
56 |

{Math.floor(progress * 100 * 100) / 100}%

57 |
58 |
59 |
60 | ); 61 | } 62 | }; 63 | 64 | export default LoadingPage; 65 | -------------------------------------------------------------------------------- /packages/extension/src/pages/MainPage.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useRef } from "react"; 2 | import TextArea from "./Textarea"; 3 | const MainPage = ({ 4 | prompt, 5 | setPrompt, 6 | handleSubmit, 7 | promptList, 8 | setPage, 9 | response, 10 | text, 11 | setText, 12 | isGenerating, 13 | }) => { 14 | const scrollRef = useRef(null); 15 | 16 | const scrollToBottom = () => { 17 | if (scrollRef.current) { 18 | scrollRef.current.scrollIntoView(); 19 | } 20 | }; 21 | 22 | useEffect(() => { 23 | scrollToBottom(); 24 | }, [response, isGenerating]); 25 | 26 | useEffect(() => { 27 | const handleKeyPress = (event) => { 28 | console.log(event); 29 | if (event.metaKey && event.key === "Enter") { 30 | event.preventDefault(); 31 | handleSubmit(); 32 | } 33 | }; 34 | 35 | document.addEventListener("keydown", handleKeyPress); 36 | 37 | return () => { 38 | document.removeEventListener("keydown", handleKeyPress); 39 | }; 40 | }, [handleSubmit]); 41 | 42 | return ( 43 |
44 |
45 |
setPage("about")} 48 | > 49 | ? 50 |
51 |
52 |
Text
53 |