├── .changeset
├── README.md
└── config.json
├── .eslintrc.json
├── .github
└── workflows
│ └── release.yml
├── .gitignore
├── CHANGELOG.md
├── LICENSE.md
├── README.md
├── docs
├── current-state-example-2.png
├── current-state-example.png
├── inform-ai-chat-example.png
└── magic-square.png
├── jest.config.ts
├── jest.setup.ts
├── package.json
├── pnpm-lock.yaml
├── postcss.config.js
├── rollup.config.mjs
├── src
├── InformAIContext.tsx
├── createInformAI.tsx
├── index.ts
├── test
│ ├── ChatBox.test.tsx
│ ├── ChatWrapper.test.tsx
│ ├── CurrentState.test.tsx
│ ├── InformAIContext.test.tsx
│ ├── Messages.test.tsx
│ ├── useInformAI.test.tsx
│ └── utils.test.tsx
├── types.ts
├── ui
│ ├── ChatBox.tsx
│ ├── ChatWrapper.tsx
│ ├── CurrentState.tsx
│ ├── InformAI.tsx
│ ├── Messages.tsx
│ ├── index.ts
│ └── main.css
├── useInformAI.tsx
├── useStreamableContent.tsx
└── utils.tsx
├── tailwind.config.js
└── tsconfig.json
/.changeset/README.md:
--------------------------------------------------------------------------------
1 | # Changesets
2 |
3 | Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
4 | with multi-package repos, or single-package repos to help you version and publish your code. You can
5 | find the full documentation for it [in our repository](https://github.com/changesets/changesets)
6 |
7 | We have a quick list of common questions to get you started engaging with this project in
8 | [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
9 |
--------------------------------------------------------------------------------
/.changeset/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://unpkg.com/@changesets/config@3.0.2/schema.json",
3 | "changelog": "@changesets/cli/changelog",
4 | "commit": false,
5 | "fixed": [],
6 | "linked": [],
7 | "access": "public",
8 | "baseBranch": "main",
9 | "updateInternalDependencies": "patch",
10 | "ignore": []
11 | }
12 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "es2022": true
5 | },
6 | "extends": ["eslint:recommended", "plugin:react/recommended", "plugin:react-hooks/recommended"],
7 | "parserOptions": {
8 | "ecmaFeatures": {
9 | "jsx": true
10 | },
11 | "ecmaVersion": "latest",
12 | "sourceType": "module"
13 | },
14 | "plugins": ["react", "react-hooks"],
15 | "rules": {
16 | "react/jsx-uses-react": "error",
17 | "react/jsx-uses-vars": "error"
18 | },
19 | "settings": {
20 | "react": {
21 | "version": "detect"
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Test and Release
2 |
3 | on:
4 | push:
5 | branches:
6 | - '**' # Runs on all branches
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.ref }}
10 | cancel-in-progress: true
11 |
12 | jobs:
13 | test:
14 | runs-on: ubuntu-latest
15 |
16 | strategy:
17 | matrix:
18 | node-version: [18.x, 20.x, 22.x]
19 |
20 | steps:
21 | - name: Checkout code
22 | uses: actions/checkout@v4
23 |
24 | - uses: pnpm/action-setup@v4
25 | with:
26 | version: 9.7.0
27 |
28 | - name: Set up Node.js
29 | uses: actions/setup-node@v4
30 | with:
31 | node-version: ${{ matrix.node-version }}
32 | cache: "pnpm"
33 |
34 | - name: Install dependencies
35 | run: pnpm install
36 |
37 | - name: Run tests
38 | run: npx jest
39 |
40 | release:
41 | name: Release
42 | runs-on: ubuntu-latest
43 | needs: test
44 | if: github.ref == 'refs/heads/main' # Only run this job on the main branch
45 | steps:
46 | - name: Checkout Repo
47 | uses: actions/checkout@v4
48 |
49 | - uses: pnpm/action-setup@v4
50 | with:
51 | version: 9.7.0
52 |
53 | - name: Setup Node.js
54 | uses: actions/setup-node@v4
55 | with:
56 | node-version: "20"
57 | cache: "pnpm"
58 | registry-url: "https://registry.npmjs.org"
59 |
60 | - name: Install dependencies
61 | run: pnpm install
62 |
63 | - name: Create Release Pull Request or Publish to npm
64 | id: changesets
65 | uses: changesets/action@v1
66 | with:
67 | publish: npm run ci:publish
68 | env:
69 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
70 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
71 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
72 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Node.js
2 | node_modules/
3 |
4 | # Logs
5 | logs/
6 | *.log
7 | npm-debug.log*
8 | pnpm-debug.log*
9 | yarn-debug.log*
10 | yarn-error.log*
11 |
12 | # Dependency directories
13 | jspm_packages/
14 |
15 | # Optional npm cache directory
16 | .npm/
17 |
18 | # Optional eslint cache
19 | .eslintcache
20 |
21 | # Optional REPL history
22 | .node_repl_history
23 |
24 | # Environment variables
25 | .env
26 | .env.test
27 | .env.production
28 |
29 | # Dist directory
30 | dist/
31 |
32 | # Build directories
33 | build/
34 | .tmp/
35 | out/
36 |
37 | # Coverage directory used by tools like istanbul or cypress
38 | coverage/
39 |
40 | # Temporary files
41 | tmp/
42 | temp/
43 | *.tmp
44 | *.temp
45 |
46 | # IDE files
47 | .vscode/
48 | .idea/
49 | .DS_Store
50 |
51 | # macOS specific files
52 | ._*
53 | .Spotlight-V100
54 | .Trashes
55 |
56 | # Windows specific files
57 | Thumbs.db
58 | Desktop.ini
59 |
60 | # Optional lock files
61 | package-lock.json
62 | yarn.lock
63 | .pnpm-lock.yaml
64 |
65 | # TypeScript build artifacts
66 | *.tsbuildinfo
67 |
68 | # Lint and formatting results
69 | stylelint.cache
70 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # inform-ai
2 |
3 | ## 0.5.4
4 |
5 | ### Patch Changes
6 |
7 | - 57f065e: Fix AI SDK dep again
8 |
9 | ## 0.5.3
10 |
11 | ### Patch Changes
12 |
13 | - 35fac59: Fixed Vercel AI SDK version
14 |
15 | ## 0.5.2
16 |
17 | ### Patch Changes
18 |
19 | - 1014721: Fixed CJS output
20 |
21 | ## 0.5.1
22 |
23 | ### Patch Changes
24 |
25 | - bc1e6e9: Fix scrolling overflow on Messages UI component
26 |
27 | ## 0.5.0
28 |
29 | ### Minor Changes
30 |
31 | - 8b87064: Allow customization of placeholder and send button text
32 |
33 | ## 0.4.1
34 |
35 | ### Patch Changes
36 |
37 | - fc1246a: README improvements
38 |
39 | ## 0.4.0
40 |
41 | ### Minor Changes
42 |
43 | - b7c17d0: Adopt MIT license
44 |
45 | ### Patch Changes
46 |
47 | - b7c17d0: Better README
48 |
49 | ## 0.3.2
50 |
51 | ### Patch Changes
52 |
53 | - c2bb195: Delete vestigial example dir
54 |
55 | ## 0.3.1
56 |
57 | ### Patch Changes
58 |
59 | - 7ce83dd: Rearrange dependencies, devDependencies and peerDependencies
60 |
61 | ## 0.3.0
62 |
63 | ### Minor Changes
64 |
65 | - 1254ce9: Built-in ChatWrapper component
66 |
67 | ### Patch Changes
68 |
69 | - 1254ce9: More jest tests
70 | - 1254ce9: Docs and tests
71 |
72 | ## 0.2.9
73 |
74 | ### Patch Changes
75 |
76 | - a172371: CI changes
77 |
78 | ## 0.2.8
79 |
80 | ### Patch Changes
81 |
82 | - 5e92ebf: Refactor randomId into utils
83 | - 8e819a3: Fix JsonView CSS import
84 | - 8e819a3: Create shorter IDs for components and messages
85 |
86 | ## 0.2.7
87 |
88 | ### Patch Changes
89 |
90 | - 5f744ee: Fix CSS build
91 |
92 | ## 0.2.6
93 |
94 | ### Patch Changes
95 |
96 | - 1dff333: CSS fixes
97 |
98 | ## 0.2.5
99 |
100 | ### Patch Changes
101 |
102 | - ffe5ae6: Use nanoid for generating ids
103 | - 59ade55: Simplify InformAIProvider
104 | - ffe5ae6: Add some tests
105 |
106 | ## 0.2.4
107 |
108 | ### Patch Changes
109 |
110 | - d40bed0: Allow ChatBox placeholder customization
111 | - d40bed0: nicer chevron for CurrentState
112 | - d40bed0: Show no-messages message
113 | - d40bed0: Allow ChatBox to be autoFocus or not
114 |
115 | ## 0.2.3
116 |
117 | ### Patch Changes
118 |
119 | - 0f987fb: Allow Messages to accept a className
120 | - 324abd8: Better-looking collapsed state for CurrentState
121 |
122 | ## 0.2.2
123 |
124 | ### Patch Changes
125 |
126 | - 0b50a6c: Allow CurrentState to be collapsed
127 | - 0b50a6c: Refactored mapComponentState and related functions into separate file
128 | - 64e9e3e: Remove .npmignore, whitelist files in package.json
129 | - 0b50a6c: Expanded README, swapped streamMulti for streamUI
130 | - 1b0aaee: Migrate to tailwind apply statements rather than inline classNames
131 |
132 | ## 0.2.1
133 |
134 | ### Patch Changes
135 |
136 | - 4e5de0f: Docs, make createInformAI have optional argument
137 |
138 | ## 0.2.0
139 |
140 | ### Minor Changes
141 |
142 | - d0c9d64: Added the InformAI component
143 |
144 | ### Patch Changes
145 |
146 | - a749126: Add better JSON view
147 | - 7dcb831: Fix leading zero on last sent in CurrentState
148 |
149 | ## 0.1.0
150 |
151 | ### Minor Changes
152 |
153 | - 53e9a35: Add noState option, return assigned componentId
154 |
155 | ## 0.0.4
156 |
157 | ### Patch Changes
158 |
159 | - c69d2f9: Added tailwind and postfix, incorporated into rollup build
160 |
161 | ## 0.0.3
162 |
163 | ### Patch Changes
164 |
165 | - f22bdb9: Add package scripts
166 |
167 | ## 0.0.2
168 |
169 | ### Patch Changes
170 |
171 | - 9a51acc: Github CI
172 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | # MIT License
2 |
3 | Copyright 2004 Ed Spencer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # InformAI - Context-Aware AI Integration for React Apps
2 |
3 | **InformAI** is a tool that enables seamless integration of context-aware AI into any React application. It's designed to work effortlessly with the [Vercel AI SDK](https://sdk.vercel.ai/docs/introduction), but is also compatible with other AI SDK providers.
4 |
5 | ### Key Features:
6 |
7 | - **Contextual AI Integration**: Easily expose the state of your React components to an LLM (Large Language Model) or other AI, providing valuable context with minimal effort.
8 | - **Event Publishing**: Allow your components to publish events, like user interactions, in an LLM-optimized format.
9 | - **Flexible Usage**: Works well with both client-side React components and React Server Components. Though it excels with Next.js and React Server Components, these are not required.
10 |
11 | InformAI doesn't directly send data to your LLM but simplifies integration with tools like the Vercel AI SDK, making it easy to incorporate AI into your app.
12 |
13 | If ChatGPT and other LLMs can read and write text, and Vercel AI SDK adds the ability to write UI by streaming React components as part of an LLM response, InformAI fills in the missing piece by allowing the LLM to read your UI as well as write to it:
14 |
15 | 
16 |
17 | ## Installation
18 |
19 | Install the NPM package:
20 |
21 | ```sh
22 | npm install inform-ai
23 | ```
24 |
25 | Include the stylesheet if you plan to use the included UI components (or don't, if you want to use them but customize their appearance):
26 |
27 | ```tsx
28 | import "inform-ai/dist/main.css";
29 | ```
30 |
31 | ## Installing the Provider
32 |
33 | InformAI can be used via either the `` Component or the `useInformAI` hook. Either way, you need to wrap any components using InformAI inside an `InformAIProvider`:
34 |
35 | ```tsx
36 | import { InformAIProvider } from "inform-ai";
37 |
38 | //somewhere in your layout.tsx or similar:
39 | {children};
40 | ```
41 |
42 | ## Exposing Component state
43 |
44 | Now, within any React component that will be rendered inside that `InformAIProvider`, you can insert a `` node:
45 |
46 | ```tsx
47 | import { InformAI } from "inform-ai";
48 |
49 | const prompt = "Shows the life history of a person, including their name, title and age";
50 |
51 | export function Bio({ name, title, age }) {
52 | return (
53 |
54 |
55 | //... rest of the component here
56 |
57 | );
58 | }
59 | ```
60 |
61 | Adding the `` tag to our component we were able to tell the LLM 3 things about our component:
62 |
63 | - **name** - a meaningful name for this specific component instance
64 | - **props** - any props we want to pass to the LLM (must be JSON-serializable)
65 | - **prompt** - a string to help the LLM understand what the component does
66 |
67 | ### useInformAI
68 |
69 | An alternative to the `` component is to use the `useInformAI` hook. `useInformAI` is a little more versatile than ``. Here's a slightly simplified example taken from the [backups table from lansaver](https://github.com/edspencer/lansaver/blob/main/components/backup/table.tsx), showing how to use `useInformAI` instead of ``:
70 |
71 | ```tsx
72 | import { useInformAI } from "inform-ai";
73 |
74 | const prompt =
75 | "This table displays a list of backups taken for various devices. The data will be provided to you in JSON format";
76 |
77 | export function BackupsTable({
78 | name = "Backups Table",
79 | backups,
80 | showDevice = false,
81 | }: {
82 | name?: string;
83 | backups: BackupWithDevice[];
84 | showDevice?: boolean;
85 | }) {
86 | useInformAI({
87 | name,
88 | prompt,
89 | props: {
90 | backups,
91 | },
92 | });
93 |
94 | if (condensed) {
95 | return ;
96 | }
97 |
98 | return
//your table implementation
;
99 | }
100 | ```
101 |
102 | It was useful to use the hook in this case as we render a different table if `condensed` is set to true, but we wanted to surface the same information either way to InformAI, so by using 'useInformAI' we didn't need to maintain 2 duplicate copies of an `` tag in our 2 table components.
103 |
104 | ## Exposing Component events
105 |
106 | Another possibility that is unlocked by using `useInformAI` is telling the LLM about component events like clicks or other user interactions:
107 |
108 | Here's an example of a different Table component, which can render arbitrary data and exposes `click` events when the user clicks on a table cell:
109 |
110 | ```tsx
111 | const defaultPrompt = `This component is a table that displays data in a tabular format.
112 | It takes two props: data and colHeaders. The data prop is an array of objects, where
113 | each object represents a row in the table. The colHeaders prop is an optional
114 | array of strings that represent the column headers of the table.
115 | If the colHeaders prop is not provided, the component will use the
116 | keys of the first object in the data array as the column headers.
117 | The component will render the table with the provided data and column headers.`;
118 |
119 | export function Table({ data, colHeaders, name = "Table", informPrompt = defaultPrompt, header }: TableProps) {
120 | const { addEvent } = useInformAI({
121 | name,
122 | prompt: informPrompt,
123 | props: {
124 | data,
125 | colHeaders,
126 | },
127 | });
128 |
129 | //adds a new hint to the AI
130 | const cellClicked = (e: React.MouseEvent) => {
131 | addEvent({
132 | type: "user-click",
133 | description: "User clicked on a cell with data: " + (e.target as HTMLElement).innerHTML,
134 | });
135 | };
136 |
137 | return (
138 |
161 | );
162 | }
163 | ```
164 |
165 | The `type` and `description` we pass can be any strings we like.
166 |
167 | ## Viewing Current State
168 |
169 | Under the covers, InformAI collects together all of the component state and event messages that are published by `` and `useInformAI`. While in development, it's useful to be able to see what InformAI is aware of, and what will be sent with the next user message to the LLM.
170 |
171 | InformAI ships with a small React component called `` which can be rendered anywhere inside your component tree, and will show you all of the component states and events that InformAI has collected.
172 |
173 | Drop this into your layout.tsx like so:
174 |
175 | ```tsx
176 | import "inform-ai/dist/main.css";
177 | import "./globals.css";
178 |
179 | //optionally include the CurrentState component for easier InformAI debugging
180 | import { InformAIProvider, CurrentState } from "inform-ai";
181 |
182 | export default function RootLayout({
183 | children,
184 | }: Readonly<{
185 | children: React.ReactNode;
186 | }>) {
187 | return (
188 |
189 |
190 |
191 | {children}
192 |
193 |
194 |
195 |
196 | );
197 | }
198 | ```
199 |
200 | `` accepts a `className` so you can style.position it however you like (this example has it pinned top right). It will collapse/expand when you click the component heading if it's getting in the way.
201 |
202 | 
203 |
204 | `` is intended to help understand/debug in development, and is not something you'd likely ship to your users. Each time a component registers a state or event update, a row is added to CurrentState with the ability to dig down into a JSON view of all of the information.
205 |
206 | ## Adding a Chatbot
207 |
208 | How you add your Chatbot UI is completely up to you. InformAI works well alongside the Vercel AI SDK (`npm install ai`), and provides a couple of rudimentary chatbot UI components out of the box that use Vercel AI SDK.
209 |
210 | Here's how you can use that to create your own simple `ChatBot` component using the Vercel AI SDK and InformAI:
211 |
212 | ```tsx
213 | "use client";
214 |
215 | import { ChatWrapper } from "inform-ai";
216 | import { useActions, useUIState } from "ai/rsc";
217 |
218 | export function ChatBot() {
219 | const { submitUserMessage } = useActions();
220 | const [messages, setMessages] = useUIState();
221 |
222 | return ;
223 | }
224 | ```
225 |
226 | InformAI exposes `ChatBox` and `Messages` components, along with a `ChatWrapper` that just combines them both into an easy package. `ChatBox` is a fairly simple form with a text input and a button to submit the user's message, and `Messages` just renders the conversation between the user and the LLM assistant.
227 |
228 | Because the Vercel AI SDK is awesome, `Messages` can handle streaming LLM responses as well as streaming React Server Components (if you're using nextjs or similar). Here's an example of a conversation using `ChatWrapper`:
229 |
230 | 
231 |
232 | You're highly encouraged to check out the [ChatWrapper source](/src/ui/ChatWrapper.tsx) as well as that for [ChatBox](/src/ui/ChatBox.tsx) and [Messages](/src/ui/Messages.tsx) - they're all pretty straightforward components so you can use all, some or none of them in your app.
233 |
234 | ### Vercel AI backend for this example
235 |
236 | To get that `ChatBot` component to work, we actually need 2 more things:
237 |
238 | - A Vercel `` in our React tree
239 | - A `submitUserMessage` function
240 |
241 | We can define those both in a single file, something like this:
242 |
243 | ```tsx
244 | "use server";
245 |
246 | import { CoreMessage, generateId } from "ai";
247 | import { createAI } from "ai/rsc";
248 | import { AssistantMessage } from "inform-ai";
249 |
250 | export type ClientMessage = CoreMessage & {
251 | id: string;
252 | };
253 |
254 | export type AIState = {
255 | chatId: string;
256 | messages: ClientMessage[];
257 | };
258 |
259 | export type UIState = {
260 | id: string;
261 | role?: string;
262 | content: React.ReactNode;
263 | }[];
264 |
265 | export async function submitUserMessage(messages: ClientMessage[]) {
266 | const aiState = getMutableAIState();
267 |
268 | //add the new messages to the AI State so the user can refresh and not lose the context
269 | aiState.update({
270 | ...aiState.get(),
271 | messages: [...aiState.get().messages, ...messages],
272 | });
273 |
274 | //set up our streaming LLM response using Vercel AI SDK
275 | const result = await streamUI({
276 | model: openai("gpt-4o-2024-08-06"),
277 | system: "You are a helpful assistant who blah blah blah",
278 | messages: aiState.get().messages,
279 | text: ({ content, done }) => {
280 | if (done) {
281 | //save the LLM's response to our AIState
282 | aiState.done({
283 | ...aiState.get(),
284 | messages: [...aiState.get().messages, { role: "assistant", content }],
285 | });
286 | }
287 |
288 | //AssistantMessage is a simple, styled component that supports streaming text/UI responses
289 | return ;
290 | },
291 | });
292 |
293 | return {
294 | id: generateId(),
295 | content: result.value,
296 | };
297 | }
298 |
299 | export const AIProvider = createAI({
300 | actions: {
301 | submitUserMessage,
302 | },
303 | initialUIState: [] as UIState,
304 | initialAIState: { chatId: generateId(), messages: [] } as AIState,
305 | });
306 | ```
307 |
308 | This gives us our `submitUserMessage` and `` exports. All you need to do now is add the `` into your React component tree, just like we did with ``, and everything should Just Work. The `useActions()` hook we used in our `ChatBot.tsx` will be able to pull out our `submitUserMessage` function and pass it to `ChatWrapper`, which will then call it when the user enters and sends a message.
309 |
310 | The AIState management we do there is to keep a running context of the conversation so far - see the [Vercel AI SDK AIState docs](https://sdk.vercel.ai/examples/next-app/state-management/ai-ui-states) if you're not familiar with that pattern.
311 |
312 | The `text` prop we passed to `streamUI` is doing 2 things - rendering a pretty `` bubble for the streaming LLM response, and saving the finished LLM response into the AIState history when the LLM has finished its answer. This allows the LLM to see the whole conversation when the user sends follow-up messages, without the client needing to send the entire conversation each time.
313 |
314 | ### Tips & Tricks
315 |
316 | #### Page-level integration
317 |
318 | The fastest way to add InformAI to your app is by doing so at the page level. Below is an example from the [lansaver application](https://github.com/edspencer/lansaver), which is a nextjs app that backs up configurations for network devices like firewalls and managed switches ([see the full SchedulePage component here](https://github.com/edspencer/lansaver/blob/main/app/schedules/%5Bid%5D/page.tsx)).
319 |
320 | This is a React Server Component, rendered on the server. It imports the `` React component, defines a `prompt` string to help the LLM understand what this component does, and then renders `` with a meaningful component `name`, the `prompt`, and an arbitrary `props` object, which is passed to the LLM in addition to the name and prompt:
321 |
322 | ```tsx
323 | import { InformAI } from "inform-ai";
324 |
325 | const prompt = `A page that shows the details of a schedule. It should show the schedule's configuration, the devices in the schedule, and recent jobs for the schedule. It should also have buttons to run the schedule, edit the schedule, and delete the schedule.`;
326 |
327 | export default async function SchedulePage({ params: { id } }: { params: { id: string } }) {
328 | const schedule = await getSchedule(parseInt(id, 10));
329 |
330 | if (!schedule) {
331 | return notFound();
332 | }
333 |
334 | const devices = await getScheduleDevices(schedule.id);
335 | const jobs = await recentJobs(schedule.id);
336 |
337 | return (
338 |
358 | );
359 | }
360 | ```
361 |
362 | In this case we passed the `schedule` (a row from the database), `devices` (an array of device database rows) and `jobs` (an array of recent backup jobs) to the LLM, but we could have passed anything into `props`, so long as it is serializable into JSON. Next time the user sends the LLM a message, it will also receive all of the context we just exposed to it about this page, so can answer questions about what the user is looking at.
363 |
364 | When possible, it is usually better to use InformAI at the component level rather than the page level to take advantage of React's composability, but it's really up to you.
365 |
366 | #### Streaming UI using Tools
367 |
368 | We can extend our use of streamUI and other functions like it by providing tools definitions for the LLM to choose from. The streamUI() function and its UI streaming capabilities are 100% Vercel AI SDK functionality and not InformAI itself, but InformAI plays well with it and supports streaming UI instead of/in addition to streaming text responses from the LLM:
369 |
370 | ```tsx
371 | //inside our submitUserMessage function
372 | const result = await streamUI({
373 | model: openai("gpt-4o-2024-08-06"),
374 | system: "You are a helpful assistant who blah blah blah",
375 | messages: aiState.get().messages,
376 | text: ({ content, done }) => {
377 | if (done) {
378 | //save the LLM's response to our AIState
379 | aiState.done({
380 | ...aiState.get(),
381 | messages: [...aiState.get().messages, { role: "assistant", content }],
382 | });
383 | }
384 |
385 | return ;
386 | },
387 | tools: {
388 | redirect: RedirectTool,
389 | backupsTable: BackupsTableTool,
390 | },
391 | });
392 | ```
393 |
394 | In the code snippet above we defined 2 tools that the LLM can execute if it thinks it makes sense to do so. If the tool has a `generate` function, it can render arbitrary React components that will be streamed to the browser. Tools can be defined inline but they're easier to read, test and swap in/out when extracted into their own files (tool-calling LLMs like those from OpenAI are still not great at picking the right tool when given too many options).
395 |
396 | Here's a real-world example of a tool definition used in the [LANsaver](https://github.com/edspencer/lansaver) project ([see the full tool source](https://github.com/edspencer/lansaver/blob/main/app/tools/BackupsTable.tsx)). Most of this file is just textual description telling the LLM what the tool is and how to use it. The important part of the tool is the `generate` function:
397 |
398 | ```tsx
399 | import { z } from "zod";
400 | import { BackupsTable } from "@/components/backup/table";
401 | import { getDeviceBackups, getDeviceByHostname } from "@/models/device";
402 | import { getPaginatedBackups } from "@/models/backup";
403 | import { Spinner } from "@/components/common/spinner";
404 |
405 | type BackupsTableToolParameters = {
406 | condensed?: boolean;
407 | showDevice?: boolean;
408 | deviceId?: number;
409 | deviceName?: string;
410 | perPage?: number;
411 | name?: string;
412 | };
413 |
414 | const BackupsTableTool = {
415 | //tells the LLM what this tool is and when to use it
416 | description:
417 | "Renders a table of backups. Optionally, you can show the device column and condense the table. If the user requests to see all backups, do not pass in a deviceId.",
418 |
419 | //tells the LLM how to invoke the tool, what the arguments are and which are optional
420 | parameters: z.object({
421 | condensed: z
422 | .boolean()
423 | .optional()
424 | .describe("Set to true to condense the table and hide some of the extraneous columns"),
425 | showDevice: z.boolean().optional().describe("Set to true to show the device column"),
426 | deviceId: z
427 | .number()
428 | .optional()
429 | .describe("The ID of the device to show backups for (do not set to show all backups)"),
430 | deviceName: z
431 | .string()
432 | .optional()
433 | .describe(
434 | "The name of the device to show backups for. Pass this if the user asks for backups for a device by name. The tool will perform a fuzzy search for this device"
435 | ),
436 | perPage: z.number().optional().describe("The number of backups to show per page (defaults to 5)"),
437 | name: z.string().optional().describe("A name to give to this table. For example, 'Recent Backups for Device X'"),
438 | }),
439 |
440 | //if the LLM decides to call this tool, generate will be called with the arguments the LLM decided to use
441 | //this function can yield a temporary piece of UI, like a spinner, and then return the permanent UI when ready
442 | generate: async function* (config: BackupsTableToolParameters) {
443 | const { condensed, showDevice, deviceId, deviceName, perPage = 5, name } = config;
444 |
445 | let backups;
446 |
447 | yield ;
448 |
449 | if (deviceName) {
450 | // Perform a fuzzy search for the device
451 | const device = await getDeviceByHostname(deviceName);
452 |
453 | if (device) {
454 | backups = await getDeviceBackups(device.id, { take: perPage });
455 | }
456 | } else if (deviceId) {
457 | backups = await getDeviceBackups(deviceId, { take: perPage });
458 | }
459 |
460 | if (!backups) {
461 | backups = (await getPaginatedBackups({ includeDevice: true, page: 1, perPage })).backups;
462 | }
463 |
464 | return ;
465 | },
466 | };
467 |
468 | export default BackupsTableTool;
469 | ```
470 |
471 | Note that this is all just vanilla Vercel AI SDK functionality, and you can read more about it [in their docs](https://sdk.vercel.ai/examples/next-app/interface/route-components). Basically, though, this function `yield`s a Spinner component while it is loading the data for the real component it will show, then does some basic fuzzy searching, then finally returns the `` component, which will be streamed to the UI.
472 |
473 | The [Render Interface During Tool Call](https://sdk.vercel.ai/examples/next-app/tools/render-interface-during-tool-call) documentation in the Vercel AI SDK is a good thing to read if you're not familiar with what that can do already.
474 |
475 | ### What the LLM sees
476 |
477 | Here's an example of a quick conversation with the LLM after just integrating InformAI into the `SchedulePage` component that we showed above. `SchedulePage` is just a Next JS page (therefore a React component) that shows some simple details about a backup schedule for devices on a network:
478 |
479 | 
480 |
481 | By just adding the `` tag to our `SchedulePage`, we were able to have this conversation with the LLM about its contents, without having to do any other integration. But because we also defined our `FirewallsTableTool` tool, the LLM knew how to stream our `` component back instead of a text response.
482 |
483 | Because `` also uses InformAI, via the `useInformAI` hook, the LLM also sees all of the information in the React component that it just streamed back, so it was able to answer questions about that too ("did all of those complete successfully?").
484 |
485 | As our project was using the bundled `` component while we had this conversation, we could easily see what the state sent to the LLM looks like directly in our UI:
486 |
487 | 
488 |
489 | Here you can see that 4 `state` messages were published to InformAI - the first 2 were for the `SchedulePage` (which has name=`Schedule Detail Page`), and 2 for the freshly-streamed `` that the LLM sent back. Expanding the last message there, we can see that the LLM gave the streamed component a sensible name based on the user's request, and also has the `prompt` and `props` that we supply it in ``.
490 |
491 | The 'Last Sent' message at the bottom tells us that all of the messages above were already sent to the LLM, as they were popped off the stack using `popRecentMessages` in our `` component. `ChatWrapper` also did some deduping and conversion of the messages into an LLM-friendly format. If we add `console.log(aiState.get().messages)` to our `submitUserMessage` function we will see something like this:
492 |
493 | ```
494 | [
495 | {
496 | id: '492b4wc',
497 | content: 'Component adc51d has updated its state\n' +
498 | 'Component Name: Schedule Detail Page\n' +
499 | "Component self-description: A page that shows the details of a schedule. It should show the schedule's configuration, the devices in the schedule, and recent jobs for the schedule. It should also have buttons to run the schedule, edit the schedule, and delete the schedule.\n" +
500 | 'Component props: {"schedule":{"id":6,"disabled":false,"cron":"0 0 * * *","name":"Daily","devices":"21,19,83","createdAt":"2024-06-14T19:55:29.825Z","updatedAt":"2024-08-13T21:17:43.936Z"},"devices":[{"id":21,"type":"tplink","hostname":"masterclosetswitch.local","credentials":"fdbec89f1c1bb41b0e55d60f46092da3:7391a495c95411ebc5f087c4b8f5bcfb2b903d370cedd6a819a9e69b15f03999b9fbc4378a5254751ddb038bfec87facd5b642d0aa28b48b9ecf675b0deceb28","config":"{}","createdAt":"2024-06-14T19:55:29.824Z","updatedAt":"2024-06-14T19:55:29.824Z"},{"id":19,"type":"opnsense","hostname":"firewall.local","credentials":"dd503eaafa2acdae999023a63735b7b8:9af028d461a8b3aea27c6edc013d64e98d33476d8614bdd0ad1cab42601a2517c01cc0342b6946fee8bb5a31ceaa26a659b37051da1584ba163360f9465997154ff7f9344ff5726683fe6183e6e7054f622aeeaaa2402dc416e5ae6edea6cb34ff9d80720bb9942d2ccb90015821f8fa103ec0f116bcc3532b2ff285dad80ec56c90503996b094daf52b5775b224b137a8ba0dc13d29e2e4c37d244ff10bda30bc7ed892390efc3e3ac19dd0845e7cb0e6b3cd88c2f126d2f8d9b7191f85f72f","config":"{}","createdAt":"2024-06-14T19:55:29.823Z","updatedAt":"2024-06-16T16:06:39.091Z"},{"id":83,"type":"hass","hostname":"13232pct.duckdns.org","credentials":"6628e50a7bd550741dd1963ef98bfb67:107376648f66355e19787eb82036ea506a9cae6152ed98f1f1739640d2a930f30c54683c9bc3eaebd49043434afeed16b7928ba31b44048477b40d68f2a1638d83a9e1aaf83f015ffc53ed5114eb77fd90e06cfe3f52f804d9433056b985a0f00358e42d04733440e7c3c245a926266e3f5d1232022850baa970e38d8a33b032e1ccdadc563574420447cacb8498dbb637dfdfa19272cf226df112730cd8e4282e09ce99c30e0854c7ca5144426ad8f7f349fcffea7da3f7970c3ad5af9b33023ad7f057ad4144817f9df0e4c69e1466","config":"{\\"port\\":\\"3000\\"}","createdAt":"2024-07-16T17:13:33.455Z","updatedAt":"2024-07-16T17:13:33.455Z"}],"jobs":[{"id":24,"createdAt":"2024-08-13T21:17:54.387Z","updatedAt":"2024-08-13T21:18:31.499Z","startedAt":"2024-08-13T21:17:54.400Z","finishedAt":"2024-08-13T21:18:31.496Z","status":"completed","scheduleId":6,"_count":{"backups":3}},{"id":23,"createdAt":"2024-08-13T21:15:46.991Z","updatedAt":"2024-08-13T21:15:47.571Z","startedAt":"2024-08-13T21:15:47.004Z","finishedAt":"2024-08-13T21:15:47.570Z","status":"completed","scheduleId":6,"_count":{"backups":2}},{"id":22,"createdAt":"2024-08-13T21:09:42.083Z","updatedAt":"2024-08-13T21:09:42.083Z","startedAt":null,"finishedAt":null,"status":"pending","scheduleId":6,"_count":{"backups":1}},{"id":18,"createdAt":"2024-07-15T15:37:38.955Z","updatedAt":"2024-07-15T15:38:14.366Z","startedAt":"2024-07-15T15:37:38.967Z","finishedAt":"2024-07-15T15:38:14.365Z","status":"completed","scheduleId":6,"_count":{"backups":2}},{"id":17,"createdAt":"2024-07-15T15:36:30.814Z","updatedAt":"2024-07-15T15:37:06.483Z","startedAt":"2024-07-15T15:36:30.828Z","finishedAt":"2024-07-15T15:37:06.482Z","status":"completed","scheduleId":6,"_count":{"backups":2}}]}',
501 | role: 'system'
502 | },
503 | {
504 | id: 'QfhU2Z2',
505 | content: 'how many devices does this schedule back up?',
506 | role: 'user'
507 | },
508 | { role: 'assistant', content: 'This schedule backs up 3 devices.' },
509 | {
510 | id: '3Rytlyw',
511 | content: 'Component 2b5e1c has updated its state\n' +
512 | 'Component Name: 3 Most Recent Backups for firewall.local\n' +
513 | 'Component self-description: This table displays a list of backups taken for various devices. The data will be provided to you in JSON format\n' +
514 | 'Component props: {"backups":[{"id":65,"jobId":24,"deviceId":19,"createdAt":"2024-08-13T21:17:54.391Z","updatedAt":"2024-08-13T21:17:54.626Z","status":"completed","bytes":57117},{"id":63,"jobId":23,"deviceId":19,"createdAt":"2024-08-13T21:15:46.996Z","updatedAt":"2024-08-13T21:15:47.571Z","status":"completed","bytes":57117},{"id":61,"jobId":22,"deviceId":19,"createdAt":"2024-08-13T21:09:42.091Z","updatedAt":"2024-08-13T21:09:42.091Z","status":"pending","bytes":null}]}',
515 | role: 'system'
516 | },
517 | {
518 | id: 'Min6j9V',
519 | content: 'did all of those complete successfully?',
520 | role: 'user'
521 | },
522 | {
523 | role: 'assistant',
524 | content: 'Out of the three most recent backups for the device "firewall.local," two backups completed successfully, and one is still pending:\n' +
525 | '\n' +
526 | '1. Backup ID 65: Completed\n' +
527 | '2. Backup ID 63: Completed\n' +
528 | '3. Backup ID 61: Pending'
529 | }
530 | ]
531 | ```
532 |
533 | It's a little dense to the human eye, but here we can see the first message is from the `system` role, and is a string representation of the content that we supplied to `` in our `SchedulePage` React component. After that we see our user message, followed by another `system` message that InformAI injected for us because the `` was streamed in as a response and published data to InformAI.
534 |
535 | The internal messages stored by InformAI are converted into LLM-friendly strings via the [mapComponentMessages](https://github.com/edspencer/inform-ai/blob/main/src/utils.tsx) function, but it's easy to swap that out for any function you like. The default `mapComponentMessages` function just delegates to a function that looks like this:
536 |
537 | ```tsx
538 | export function mapStateToContent(state: StateMessage) {
539 | const content = [];
540 |
541 | const { name, componentId, prompt, props } = state.content;
542 |
543 | content.push(`Component ${componentId} has updated its state`);
544 |
545 | if (name) {
546 | content.push(`Component Name: ${name}`);
547 | }
548 |
549 | if (prompt) {
550 | content.push(`Component self-description: ${prompt}`);
551 | }
552 |
553 | if (props) {
554 | content.push(`Component props: ${JSON.stringify(props)}`);
555 | }
556 |
557 | return content.join("\n");
558 | }
559 | ```
560 |
--------------------------------------------------------------------------------
/docs/current-state-example-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edspencer/inform-ai/8f36699a9d71c257109b6cb885c9f5146878bbc8/docs/current-state-example-2.png
--------------------------------------------------------------------------------
/docs/current-state-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edspencer/inform-ai/8f36699a9d71c257109b6cb885c9f5146878bbc8/docs/current-state-example.png
--------------------------------------------------------------------------------
/docs/inform-ai-chat-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edspencer/inform-ai/8f36699a9d71c257109b6cb885c9f5146878bbc8/docs/inform-ai-chat-example.png
--------------------------------------------------------------------------------
/docs/magic-square.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edspencer/inform-ai/8f36699a9d71c257109b6cb885c9f5146878bbc8/docs/magic-square.png
--------------------------------------------------------------------------------
/jest.config.ts:
--------------------------------------------------------------------------------
1 | import type { JestConfigWithTsJest } from "ts-jest";
2 |
3 | const jestConfig: JestConfigWithTsJest = {
4 | preset: "ts-jest",
5 | testEnvironment: "jsdom",
6 | moduleNameMapper: {
7 | "^ai/rsc$": "/node_modules/ai/rsc/dist",
8 | "^@/(.*)$": "/$1",
9 | "\\.(css|less|sass|scss)$": "identity-obj-proxy",
10 | },
11 | modulePaths: [""],
12 | transformIgnorePatterns: ["/node_modules/(?!react18-json-view)"],
13 | testPathIgnorePatterns: ["/node_modules/", "/dist/"],
14 |
15 | setupFilesAfterEnv: ["/jest.setup.ts"],
16 | };
17 |
18 | export default jestConfig;
19 |
--------------------------------------------------------------------------------
/jest.setup.ts:
--------------------------------------------------------------------------------
1 | import "@testing-library/jest-dom/jest-globals";
2 | import "@testing-library/jest-dom";
3 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "inform-ai",
3 | "version": "0.5.4",
4 | "description": "A collection of hooks and utilities to easily add contextual AI to React applications",
5 | "main": "./dist/index.cjs.js",
6 | "module": "./dist/index.js",
7 | "types": "./dist/types/index.d.ts",
8 | "exports": {
9 | ".": {
10 | "import": "./dist/index.js",
11 | "require": "./dist/index.cjs.js"
12 | }
13 | },
14 | "scripts": {
15 | "test": "npx jest",
16 | "build": "tsc && rollup -c",
17 | "build:watch": "concurrently \"tsc --watch\" \"rollup -c --watch\"",
18 | "ci:version": "changeset version",
19 | "ci:publish": "tsc && rollup -c && changeset publish"
20 | },
21 | "keywords": [
22 | "ai",
23 | "react"
24 | ],
25 | "files": [
26 | "/dist/**/*"
27 | ],
28 | "repository": {
29 | "type": "git",
30 | "url": "git+https://github.com/edspencer/inform-ai.git"
31 | },
32 | "author": "Ed Spencer",
33 | "license": "MIT",
34 | "devDependencies": {
35 | "@changesets/cli": "^2.27.7",
36 | "@jest/globals": "^29.7.0",
37 | "@rollup/plugin-commonjs": "^26.0.1",
38 | "@rollup/plugin-node-resolve": "^15.2.3",
39 | "@testing-library/dom": "^10.4.0",
40 | "@testing-library/jest-dom": "^6.4.8",
41 | "@testing-library/react": "^16.0.0",
42 | "@types/jest": "^29.5.12",
43 | "@types/react": "^18.3.4",
44 | "@types/react-dom": "^18.3.0",
45 | "@types/uuid": "^10.0.0",
46 | "ai": ">=3.3.17",
47 | "autoprefixer": "^10.4.20",
48 | "concurrently": "^8.2.2",
49 | "eslint": "^8.57.0",
50 | "eslint-plugin-react": "^7.35.0",
51 | "eslint-plugin-react-hooks": "^4.6.2",
52 | "identity-obj-proxy": "^3.0.0",
53 | "jest": "^29.7.0",
54 | "jest-environment-jsdom": "^29.7.0",
55 | "postcss": "^8.4.41",
56 | "react": "^18 || ^19",
57 | "react-dom": "^18 || ^19",
58 | "rollup": "^4.20.0",
59 | "rollup-plugin-peer-deps-external": "^2.2.4",
60 | "rollup-plugin-postcss": "^4.0.2",
61 | "rollup-plugin-typescript2": "^0.36.0",
62 | "tailwindcss": "^3.4.9",
63 | "ts-jest": "^29.2.4",
64 | "ts-node": "^10.9.2",
65 | "typescript": "^5.5.4"
66 | },
67 | "dependencies": {
68 | "clsx": "^2.1.1",
69 | "react18-json-view": "^0.2.8",
70 | "uuid": "^10.0.0"
71 | },
72 | "peerDependencies": {
73 | "ai": ">=3.3.17",
74 | "react": "^18 || ^19",
75 | "react-dom": "^18 || ^19"
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | "tailwindcss/nesting": {},
4 | tailwindcss: {},
5 | autoprefixer: {},
6 | },
7 | };
8 |
--------------------------------------------------------------------------------
/rollup.config.mjs:
--------------------------------------------------------------------------------
1 | import typescript from "rollup-plugin-typescript2";
2 | import commonjs from "@rollup/plugin-commonjs";
3 | import peerDepsExternal from "rollup-plugin-peer-deps-external";
4 | import postcss from "rollup-plugin-postcss";
5 |
6 | export default [
7 | {
8 | input: "src/ui/main.css",
9 | output: [{ file: "dist/main.css", format: "es" }],
10 | plugins: [
11 | postcss({
12 | extract: true,
13 | minimize: true,
14 | }),
15 | ],
16 | },
17 | // ESM Build
18 | {
19 | input: "src/index.ts",
20 | output: {
21 | file: "dist/index.js",
22 | format: "esm",
23 | sourcemap: true,
24 | },
25 | plugins: [peerDepsExternal(), typescript({ useTsconfigDeclarationDir: true }), commonjs()],
26 | watch: {
27 | include: "src/**",
28 | },
29 | },
30 | // CommonJS Build
31 | {
32 | input: "src/index.ts",
33 | output: {
34 | file: "dist/index.cjs.js",
35 | format: "cjs",
36 | sourcemap: true,
37 | },
38 | plugins: [peerDepsExternal(), typescript({ useTsconfigDeclarationDir: true }), commonjs()],
39 | watch: {
40 | include: "src/**",
41 | },
42 | },
43 | ];
44 |
--------------------------------------------------------------------------------
/src/InformAIContext.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { createContext, useContext, useState, ReactNode } from "react";
4 | import { randomId } from "./utils";
5 |
6 | import { StateMessage, EventMessage, Message, ComponentState, ComponentEvent, Conversation } from "./types";
7 |
8 | /**
9 | * Defines the shape of the InformAIContext.
10 | */
11 | export interface InformAIContextType {
12 | messages: Message[];
13 | conversation: Conversation;
14 | addMessage: (message: Message) => void;
15 | addState: (state: ComponentState) => void;
16 | addEvent: (event: ComponentEvent) => void;
17 | addStateMessage: (state: StateMessage) => void;
18 | addEventMessage: (event: EventMessage) => void;
19 | getEvents?: () => Message[];
20 | getState: (componentId: string) => ComponentState | undefined;
21 | updateState: (componentId: string, updates: object) => void;
22 | onEvent?: (event: any) => void;
23 | getMessagesSince: (since: Date) => Message[];
24 | popRecentMessages: (since?: Date) => Message[];
25 | clearRecentMessages: (since?: Date) => void;
26 | getRecentMessages: () => Message[];
27 | }
28 |
29 | /**
30 | * The InformAIContext that provides access to messages and conversation state.
31 | */
32 | export const InformAIContext = createContext(undefined);
33 |
34 | /**
35 | * Props for the InformAIProvider component.
36 | */
37 | interface InformAIProviderProps {
38 | children?: ReactNode;
39 | onEvent?: (event: any) => void;
40 | }
41 |
42 | /**
43 | * The internal implementation of the InformAIProvider component. Sample usage:
44 | *
45 | * import { InformAIProvider } from 'inform-ai';
46 | *
47 | * export default function MyComponent() {
48 | * return (
49 | *
50 | * {children}
51 | *
52 | * );
53 | * }
54 | *
55 | * Now within child React components you can use useInformAI() or to surface
56 | * information about your components to the LLM.
57 | */
58 | export const InformAIProvider = ({ children, onEvent }: InformAIProviderProps) => {
59 | const [messages, setMessages] = useState([]);
60 | const [conversation, setConversation] = useState({
61 | id: randomId(8),
62 | createdAt: new Date(),
63 | lastSentAt: new Date(),
64 | });
65 |
66 | /**
67 | * Retrieves the state of a component with the specified componentId.
68 | * @param componentId The ID of the component.
69 | * @returns The state of the component, if found; otherwise, undefined.
70 | */
71 | function getState(componentId: string) {
72 | return messages
73 | .reverse()
74 | .filter((message) => message.type === "state")
75 | .map((message) => message as StateMessage)
76 | .find((message) => message.content.componentId === componentId)?.content;
77 | }
78 |
79 | /**
80 | * Adds a message to the list of messages.
81 | * @param message The message to add.
82 | */
83 | function addMessage(message: Message) {
84 | setMessages((prevMessages) => [...prevMessages, message]);
85 | }
86 |
87 | /**
88 | * Adds a state message to the list of messages.
89 | * @param state The state message to add.
90 | */
91 | function addState(state: ComponentState) {
92 | addMessage({
93 | id: randomId(8),
94 | createdAt: new Date(),
95 | type: "state",
96 | content: state,
97 | });
98 | }
99 |
100 | /**
101 | * Updates the state of a component with the specified componentId.
102 | * @param componentId The ID of the component.
103 | * @param updates The updates to apply to the state.
104 | */
105 | function updateState(componentId: string, updates: object) {
106 | const mostRecentState = getState(componentId);
107 |
108 | if (mostRecentState) {
109 | return addState({ ...mostRecentState, ...updates });
110 | } else {
111 | return addState(updates as ComponentState);
112 | }
113 | }
114 |
115 | /**
116 | * Clears the recent messages up to the specified date.
117 | * @param since The date to clear messages from. If not specified, clears all recent messages.
118 | */
119 | function clearRecentMessages(since?: Date) {
120 | const cutoff = since || conversation.lastSentAt;
121 | setMessages((prevMessages) => prevMessages.filter((message) => message.createdAt < cutoff));
122 | }
123 |
124 | /**
125 | * Retrieves and removes the recent messages up to the specified date.
126 | * @param since The date to retrieve messages from. If not specified, retrieves all recent messages.
127 | * @returns The recent messages.
128 | */
129 | function popRecentMessages(since?: Date) {
130 | const cutoff = since || conversation.lastSentAt;
131 | const recentMessages = messages.filter((message) => message.createdAt > cutoff);
132 |
133 | setConversation((prevConversation) => ({
134 | ...prevConversation,
135 | lastSentAt: new Date(),
136 | }));
137 |
138 | return recentMessages;
139 | }
140 |
141 | /**
142 | * Adds an event message to the list of messages.
143 | * @param event The event message to add.
144 | */
145 | function addEvent(event: ComponentEvent) {
146 | addMessage({
147 | id: randomId(8),
148 | createdAt: new Date(),
149 | type: "event",
150 | content: event,
151 | });
152 | }
153 |
154 | /**
155 | * Adds a state message to the list of messages.
156 | * @param state The state message to add.
157 | */
158 | function addStateMessage(state: StateMessage) {
159 | addMessage(state);
160 | }
161 |
162 | /**
163 | * Adds an event message to the list of messages and triggers the onEvent callback.
164 | * @param event The event message to add.
165 | */
166 | function addEventMessage(event: EventMessage) {
167 | addMessage(event);
168 |
169 | if (onEvent) {
170 | onEvent(event);
171 | }
172 | }
173 |
174 | /**
175 | * Retrieves the messages since the specified date.
176 | * @param since The date to retrieve messages from.
177 | * @returns The messages since the specified date.
178 | */
179 | function getMessagesSince(since: Date) {
180 | return messages.filter((message) => message.createdAt >= since);
181 | }
182 |
183 | /**
184 | * Retrieves the recent messages.
185 | * @returns The recent messages.
186 | */
187 | function getRecentMessages() {
188 | return getMessagesSince(conversation.lastSentAt);
189 | }
190 |
191 | return (
192 |
210 | {children}
211 |
212 | );
213 | };
214 |
215 | /**
216 | * Removes duplicate state messages from the list of messages, keeping only the most recent.
217 | * Keeps all the event messages intact.
218 | * @param messages The list of messages.
219 | * @returns The deduplicated list of messages.
220 | */
221 | export function dedupeMessages(messages: Message[]) {
222 | const seen = new Set();
223 | return messages
224 | .reverse()
225 | .filter((message) => {
226 | if (message.type === "state") {
227 | const { componentId } = message.content;
228 |
229 | if (componentId) {
230 | if (seen.has(componentId)) {
231 | return false;
232 | }
233 | seen.add(componentId);
234 | }
235 | }
236 |
237 | return true;
238 | })
239 | .reverse();
240 | }
241 |
242 | /**
243 | * Custom hook for accessing the InformAIContext.
244 | * @returns The InformAIContext.
245 | * @throws An error if used outside of an InformAIProvider.
246 | */
247 | export const useInformAIContext = () => {
248 | const context = useContext(InformAIContext);
249 | if (!context) {
250 | throw new Error("useInformAIContext must be used within an InformAIProvider");
251 | }
252 | return context;
253 | };
254 |
--------------------------------------------------------------------------------
/src/createInformAI.tsx:
--------------------------------------------------------------------------------
1 | import { InformAIProvider } from "./InformAIContext";
2 |
3 | type InformAIProvider = (props: {
4 | onEvent?: (message: string) => void;
5 | children: React.ReactNode;
6 | }) => React.ReactElement;
7 |
8 | export function createInformAI({ onEvent }: { onEvent?: (event: any) => void } = {}) {
9 | const InformAI: InformAIProvider = (props) => {
10 | return {props.children};
11 | };
12 |
13 | return InformAI;
14 | }
15 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export * from "./createInformAI";
2 | export * from "./ui";
3 | export * from "./InformAIContext";
4 | export * from "./types";
5 | export * from "./utils";
6 | export * from "./useInformAI";
7 |
--------------------------------------------------------------------------------
/src/test/ChatBox.test.tsx:
--------------------------------------------------------------------------------
1 | import { render, screen, fireEvent, waitFor } from "@testing-library/react";
2 | import { ChatBox } from "../ui/ChatBox";
3 |
4 | describe("ChatBox", () => {
5 | it("renders correctly", () => {
6 | render( true} />);
7 | expect(screen.getByRole("textbox")).toBeInTheDocument();
8 | });
9 |
10 | it("honors autoFocus", () => {
11 | render( true} autoFocus={true} />);
12 | expect(screen.getByRole("textbox")).toHaveFocus();
13 | });
14 |
15 | it("honors placeholder", () => {
16 | render( true} placeholder="test placeholder" />);
17 | expect(screen.getByPlaceholderText("test placeholder")).toBeInTheDocument();
18 | });
19 |
20 | it('clears the input if "onSubmit" returns true', async () => {
21 | const onSubmit = jest.fn(async () => true);
22 |
23 | render();
24 | const input = screen.getByRole("textbox");
25 | const button = screen.getByRole("button");
26 |
27 | fireEvent.change(input, { target: { value: "test" } });
28 | expect(input).toHaveValue("test");
29 |
30 | fireEvent.click(button);
31 |
32 | await waitFor(() => {
33 | expect(input).toHaveValue("");
34 | });
35 | expect(onSubmit).toHaveBeenCalledTimes(1);
36 | });
37 |
38 | it('does not clear the input if "onSubmit" returns false', async () => {
39 | const onSubmit = jest.fn(async () => false);
40 |
41 | render();
42 | const input = screen.getByRole("textbox");
43 | const form = screen.getByRole("form");
44 |
45 | fireEvent.change(input, { target: { value: "test" } });
46 |
47 | expect(input).toHaveValue("test");
48 |
49 | fireEvent.submit(form);
50 |
51 | expect(onSubmit).toHaveBeenCalledTimes(1);
52 | expect(input).toHaveValue("test");
53 | });
54 | });
55 |
--------------------------------------------------------------------------------
/src/test/ChatWrapper.test.tsx:
--------------------------------------------------------------------------------
1 | import { render, screen, fireEvent, waitFor, act } from "@testing-library/react";
2 | import { ChatWrapper } from "../ui/ChatWrapper";
3 | import { InformAIProvider, useInformAIContext } from "../InformAIContext";
4 | import { useState } from "react";
5 | import { useInformAI } from "../useInformAI";
6 | import { FormattedMessage } from "../types";
7 |
8 | describe("ChatWrapper", () => {
9 | let currentMessages: any[];
10 |
11 | const AppChatWrapper = ({ submitUserMessage = jest.fn() }: { submitUserMessage?: jest.Mock }) => {
12 | const [messages, setMessages] = useState([]);
13 |
14 | currentMessages = messages;
15 |
16 | return ;
17 | };
18 |
19 | it("renders correctly", () => {
20 | render(
21 |
22 |
23 |
24 | );
25 |
26 | //just checks that the component renders the ChatBox
27 | expect(screen.getByRole("textbox")).toBeInTheDocument();
28 | });
29 |
30 | it("renders user messages correctly", async () => {
31 | render(
32 |
33 |
34 |
35 | );
36 |
37 | fireEvent.change(screen.getByRole("textbox"), {
38 | target: { value: "test message" },
39 | });
40 | fireEvent.submit(screen.getByRole("form"));
41 |
42 | await waitFor(() => screen.getByText("test message"));
43 | });
44 |
45 | describe("collecting messages to send to the LLM", () => {
46 | let contextValues: ReturnType | undefined = undefined;
47 | let mockSubmitUserMessage: jest.Mock;
48 |
49 | const AppComponent = () => {
50 | const { addState } = useInformAI({
51 | name: "MyAppComponent",
52 | props: { key: "value" },
53 | prompt: "MyAppComponent prompt",
54 | });
55 |
56 | contextValues = useInformAIContext();
57 |
58 | const handleClick = () => {
59 | addState({ props: { key: "newValue" } });
60 | };
61 |
62 | return
clickable element
;
63 | };
64 |
65 | beforeEach(async () => {
66 | mockSubmitUserMessage = jest.fn(async () => ({
67 | id: "response-id",
68 | content: "response message",
69 | role: "assistant",
70 | }));
71 |
72 | render(
73 |
74 |
75 |
76 |
77 | );
78 |
79 | //after this we should have 2 state messages in the context
80 | fireEvent.click(screen.getByText("clickable element"));
81 |
82 | expect(contextValues!.messages.length).toBe(2);
83 |
84 | //submit a message from the user
85 | fireEvent.change(screen.getByRole("textbox"), {
86 | target: { value: "test message" },
87 | });
88 |
89 | await act(async () => {
90 | fireEvent.submit(screen.getByRole("form"));
91 | });
92 | });
93 |
94 | it("sends the correct deduped state and user messages to submitUserMessage", async () => {
95 | //test that the correct messages were sent to submitUserMessage
96 | expect(mockSubmitUserMessage).toHaveBeenCalledTimes(1);
97 | const [submittedMessages] = mockSubmitUserMessage.mock.calls[0];
98 |
99 | //the 2 state messages for the component should have been deduped into 1 (plus the user message)
100 | expect(submittedMessages.length).toBe(2);
101 |
102 | const stateMessage = submittedMessages[0] as FormattedMessage;
103 | expect(stateMessage.content).toContain('Component props: {"key":"newValue"}');
104 | expect(stateMessage.role).toEqual("system");
105 | expect(stateMessage.id.length).toEqual(10);
106 |
107 | const userMessage = submittedMessages[1] as FormattedMessage;
108 | expect(userMessage.content).toEqual("test message");
109 | expect(userMessage.role).toEqual("user");
110 | expect(userMessage.id.length).toEqual(10);
111 | });
112 |
113 | it("ends up with the correct messages", async () => {
114 | //we should end up with 3 messages - a state message, a user message, and an LLM response
115 | expect(currentMessages.length).toBe(3);
116 |
117 | const stateMessage = currentMessages[0] as FormattedMessage;
118 | expect(stateMessage.content).toContain('Component props: {"key":"newValue"}');
119 | expect(stateMessage.role).toEqual("system");
120 | expect(stateMessage.id.length).toEqual(10);
121 |
122 | const userMessage = currentMessages[1] as FormattedMessage;
123 | expect(userMessage.role).toEqual("user");
124 | expect(userMessage.id.length).toEqual(10);
125 |
126 | const responseMessage = currentMessages[2] as FormattedMessage;
127 | expect(responseMessage.role).toEqual("assistant");
128 | expect(responseMessage.content).toEqual("response message");
129 | expect(responseMessage.id).toEqual("response-id");
130 | });
131 |
132 | it("clears the text input", async () => {
133 | expect(screen.getByRole("textbox")).toHaveValue("");
134 | });
135 | });
136 | });
137 |
--------------------------------------------------------------------------------
/src/test/CurrentState.test.tsx:
--------------------------------------------------------------------------------
1 | import { render, screen, act, fireEvent, waitFor } from "@testing-library/react";
2 | import { CurrentState } from "../ui/CurrentState";
3 | import { InformAIProvider } from "../InformAIContext";
4 | import { useInformAI } from "../useInformAI";
5 |
6 | describe("CurrentState", () => {
7 | it("renders correctly", () => {
8 | render(
9 |
10 |
11 |
12 | );
13 | expect(screen.getByText("Current InformAI State")).toBeInTheDocument();
14 | });
15 |
16 | it("renders the current state", async () => {
17 | const name = "TestComponentName";
18 |
19 | const TestComponent = () => {
20 | useInformAI({
21 | name,
22 | props: {
23 | key: "value",
24 | },
25 | prompt: "This is a test component",
26 | });
27 |
28 | return