├── .env.local.example ├── .eslintrc.json ├── .gitignore ├── README.md ├── next.config.mjs ├── package-lock.json ├── package.json ├── postcss.config.js ├── public ├── next.svg └── vercel.svg ├── renovate.json ├── src └── app │ ├── actions │ ├── useAppendSlide.tsx │ └── useUpdateSlide.tsx │ ├── api │ ├── copilotkit │ │ ├── research.ts │ │ └── route.ts │ ├── transcribe │ │ └── route.ts │ └── tts │ │ └── route.ts │ ├── components │ ├── buttons │ │ ├── ActionButton.tsx │ │ ├── AddSlideButton.tsx │ │ ├── DeleteSlideButton.tsx │ │ ├── GenerateSlideButton.tsx │ │ ├── NavButton.tsx │ │ ├── PerformResearchSwitch.tsx │ │ └── SpeakCurrentSlideButton.tsx │ ├── main │ │ ├── Header.tsx │ │ ├── Presentation.tsx │ │ └── Slide.tsx │ └── misc │ │ ├── SlideNumberIndicator.tsx │ │ └── SlidePreview.tsx │ ├── favicon.ico │ ├── globals.css │ ├── layout.tsx │ ├── page.tsx │ ├── styles.css │ ├── types.ts │ └── utils │ └── globalAudio.tsx ├── tailwind.config.ts └── tsconfig.json /.env.local.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY=xxxxxxx 2 | OPENAI_MODEL=gpt-4-1106-preview 3 | TAVILY_API_KEY=xxxxxxx 4 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | .yarn/install-state.gz 8 | 9 | # testing 10 | /coverage 11 | 12 | # next.js 13 | /.next/ 14 | /out/ 15 | 16 | # production 17 | /build 18 | 19 | # misc 20 | .DS_Store 21 | *.pem 22 | 23 | # debug 24 | npm-debug.log* 25 | yarn-debug.log* 26 | yarn-error.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | 38 | TODO.local.md 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This is a demo that showcases using CopilotKit to build a PowerPoint like web app. 2 | 3 | ## Deploy with Vercel 4 | 5 | To deploy with Vercel, click the button below: 6 | 7 | [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FCopilotKit%2Fdemo-presentation&env=NEXT_PUBLIC_COPILOT_CLOUD_API_KEY,TAVILY_API_KEY,OPENAI_API_KEY&envDescription=By%20setting%20the%20TAVILY_API_KEY%2C%20you%20control%20whether%20the%20web%20search%20capabilities%20are%20enabled.%20Set%20it%20to%20NONE%20to%20disable%20this%20feature.%20To%20use%20TTS%2C%20set%20OPENAI_API%20key%2C%20otherwise%20set%20it%20to%20NONE.&project-name=copilotkit-demo-presentation&repository-name=copilotkit-demo-presentation) 8 | 9 | ## Getting Started` 10 | 11 | ### 1. install the needed package: 12 | 13 | ```bash 14 | npm i 15 | ``` 16 | 17 | ### 2. Set the required environment variables: 18 | 19 | copy `.env.local.example` to `.env.local` and populate the required environment variables. 20 | 21 | > ⚠️ **Important:** Not all users have access to the GPT-4 model yet. If you don't have access, you can use GPT-3 by setting `OPENAI_MODEL` to `gpt-3.5-turbo` in the `.env.local` file. 22 | 23 | **If you want online research to work, you only need a tavily API key, which you can obtain here: https://tavily.com/** 24 | 25 | ### 3. Run the app 26 | 27 | ```bash 28 | npm run dev 29 | ``` 30 | 31 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 32 | 33 | You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. 34 | 35 | ## Zoom in on the CopilotKit code 36 | 37 | 1. Search for `useMakeCopilotReadable` to see where frontend application information is being made accessible to the Copilot engine 38 | 39 | 2. Search for `useAppendSlide` and `useUpdateSlide` to see where the frontend application action is made accessible to the Copilot engine. 40 | 41 | 3. In `route.ts`, see how the backend-running `researchAction` is defined against the `research.ts` agent, powered by LangChain's LangGraph and by Tavily research API. 42 | 43 | ``` 44 | 45 | ## Learn More 46 | 47 | To learn more about CopilotKit, take a look at the following resources: 48 | 49 | - [CopilotKit Documentation](https://docs.copilotkit.ai/getting-started/quickstart-chatbot) - learn about CopilotKit features and API. 50 | - [GitHub](https://github.com/CopilotKit/CopilotKit) - Check out the CopilotKit GitHub repository. 51 | - [Discord](https://discord.gg/6dffbvGU3D) - Join the CopilotKit Discord community. 52 | ``` 53 | -------------------------------------------------------------------------------- /next.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = {}; 3 | 4 | export default nextConfig; 5 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "copilotkit-demo", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@copilotkit/runtime": "1.3.1", 13 | "@copilotkit/react-core": "1.3.1", 14 | "@copilotkit/react-textarea": "1.3.1", 15 | "@copilotkit/react-ui": "1.3.1", 16 | "@copilotkit/shared": "1.3.1", 17 | "@heroicons/react": "^2.1.1", 18 | "@langchain/community": "^0.0.29", 19 | "@langchain/core": "^0.1.29", 20 | "@langchain/langgraph": "^0.0.7", 21 | "@langchain/openai": "^0.0.14", 22 | "clsx": "^2.1.0", 23 | "langchain": "^0.1.19", 24 | "next": "14.1.0", 25 | "openai": "^4.28.0", 26 | "react": "^18", 27 | "react-dom": "^18" 28 | }, 29 | "devDependencies": { 30 | "@types/node": "^20.11.19", 31 | "@types/react": "^18.2.55", 32 | "@types/react-dom": "^18.2.19", 33 | "autoprefixer": "^10.0.1", 34 | "eslint": "^8", 35 | "eslint-config-next": "14.1.0", 36 | "postcss": "^8", 37 | "tailwindcss": "^3.4.3", 38 | "typescript": "^5" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "dependencyDashboard": true, 4 | "extends": [ 5 | "config:recommended" 6 | ], 7 | "packageRules": [ 8 | { 9 | "enabled": false, 10 | "matchPackageNames": [ 11 | "*" 12 | ], 13 | "labels": ["dependencies"] 14 | }, 15 | { 16 | "enabled": true, 17 | "matchPackageNames": [ 18 | "/^@copilotkit/" 19 | ], 20 | "labels": ["copilotkit"], 21 | "groupName": "CopilotKit dependencies" 22 | } 23 | ] 24 | } -------------------------------------------------------------------------------- /src/app/actions/useAppendSlide.tsx: -------------------------------------------------------------------------------- 1 | import { useCopilotAction } from "@copilotkit/react-core"; 2 | import { SlideModel } from "../types"; 3 | import { SlidePreview } from "../components/misc/SlidePreview"; 4 | 5 | interface AppendSlideParams { 6 | setSlides: (fn: (slides: SlideModel[]) => SlideModel[]) => void; 7 | setCurrentSlideIndex: (fn: (i: number) => number) => void; 8 | slides: SlideModel[]; 9 | } 10 | 11 | export default function useAppendSlide({ 12 | setSlides, 13 | setCurrentSlideIndex, 14 | slides, 15 | }: AppendSlideParams) { 16 | useCopilotAction({ 17 | name: "appendSlide", 18 | description: 19 | "Add a slide after all the existing slides. Call this function multiple times to add multiple slides.", 20 | parameters: [ 21 | { 22 | name: "content", 23 | description: 24 | "The content of the slide. MUST consist of a title, then an empty newline, then a few bullet points. Always between 1-3 bullet points - no more, no less.", 25 | }, 26 | { 27 | name: "backgroundImageUrl", 28 | description: 29 | "The url of the background image for the slide. Use the getImageUrl tool to retrieve a URL for a topic.", 30 | }, 31 | { 32 | name: "spokenNarration", 33 | description: 34 | "The text to read while presenting the slide. Should be distinct from the slide's content, " + 35 | "and can include additional context, references, etc. Will be read aloud as-is. " + 36 | "Should be a few sentences long, clear, and smooth to read." + 37 | "DO NOT include meta-commentary, such as 'in this slide', 'we explore', etc.", 38 | }, 39 | ], 40 | 41 | handler: async ({ content, backgroundImageUrl, spokenNarration }) => { 42 | const newSlide: SlideModel = { 43 | content, 44 | backgroundImageUrl, 45 | spokenNarration, 46 | }; 47 | 48 | setSlides((slides) => [...slides, newSlide]); 49 | setCurrentSlideIndex((i) => slides.length); 50 | }, 51 | render: (props) => { 52 | return ( 53 | 54 | ); 55 | }, 56 | }); 57 | } 58 | -------------------------------------------------------------------------------- /src/app/actions/useUpdateSlide.tsx: -------------------------------------------------------------------------------- 1 | import { useCopilotAction } from "@copilotkit/react-core"; 2 | import { SlideModel } from "../types"; 3 | import { SlidePreview } from "../components/misc/SlidePreview"; 4 | 5 | interface UpdateSlideParams { 6 | partialUpdateSlide: (partialSlide: Partial) => void; 7 | } 8 | 9 | export default function useUpdateSlide({ 10 | partialUpdateSlide, 11 | }: UpdateSlideParams) { 12 | useCopilotAction({ 13 | name: "updateSlide", 14 | description: "Update the current slide.", 15 | parameters: [ 16 | { 17 | name: "content", 18 | description: 19 | "The content of the slide. Should generally consist of a few bullet points.", 20 | }, 21 | { 22 | name: "backgroundImageUrl", 23 | description: 24 | "The url of the background image for the slide. Use the getImageUrl tool to retrieve a URL for a topic.", 25 | }, 26 | { 27 | name: "spokenNarration", 28 | description: 29 | "The spoken narration for the slide. This is what the user will hear when the slide is shown.", 30 | }, 31 | ], 32 | handler: async ({ content, backgroundImageUrl, spokenNarration }) => { 33 | partialUpdateSlide({ 34 | content, 35 | backgroundImageUrl, 36 | spokenNarration, 37 | }); 38 | }, 39 | render: (props) => { 40 | return ( 41 | 42 | ); 43 | }, 44 | }); 45 | } 46 | -------------------------------------------------------------------------------- /src/app/api/copilotkit/research.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This is a port of GPT Newspaper to LangGraph JS, adapted from the original Python code. 3 | * 4 | * https://github.com/assafelovic/gpt-newspaper 5 | */ 6 | import { HumanMessage, SystemMessage } from "@langchain/core/messages"; 7 | import { ChatOpenAI } from "@langchain/openai"; 8 | import { StateGraph, END } from "@langchain/langgraph"; 9 | import { RunnableLambda } from "@langchain/core/runnables"; 10 | import { TavilySearchAPIRetriever } from "@langchain/community/retrievers/tavily_search_api"; 11 | 12 | interface AgentState { 13 | topic: string; 14 | searchResults?: string; 15 | article?: string; 16 | critique?: string; 17 | } 18 | 19 | function model() { 20 | return new ChatOpenAI({ 21 | temperature: 0, 22 | modelName: "gpt-3.5-turbo-0125", 23 | }); 24 | } 25 | 26 | async function search(state: { 27 | agentState: AgentState; 28 | }): Promise<{ agentState: AgentState }> { 29 | const retriever = new TavilySearchAPIRetriever({ 30 | k: 10, 31 | }); 32 | let topic = state.agentState.topic; 33 | // must be at least 5 characters long 34 | if (topic.length < 5) { 35 | topic = "topic: " + topic; 36 | } 37 | console.log("searching for topic:", topic); 38 | const docs = await retriever.getRelevantDocuments(topic); 39 | console.log("search result length:", docs.length); 40 | return { 41 | agentState: { 42 | ...state.agentState, 43 | searchResults: JSON.stringify(docs), 44 | }, 45 | }; 46 | } 47 | 48 | async function curate(state: { 49 | agentState: AgentState; 50 | }): Promise<{ agentState: AgentState }> { 51 | console.log("curating search results"); 52 | const response = await model().invoke( 53 | [ 54 | new SystemMessage( 55 | `You are a personal newspaper editor. 56 | Your sole task is to return a list of URLs of the 5 most relevant articles for the provided topic or query as a JSON list of strings 57 | in this format: 58 | { 59 | urls: ["url1", "url2", "url3", "url4", "url5"] 60 | } 61 | .`.replace(/\s+/g, " ") 62 | ), 63 | new HumanMessage( 64 | `Today's date is ${new Date().toLocaleDateString("en-GB")}. 65 | Topic or Query: ${state.agentState.topic} 66 | 67 | Here is a list of articles: 68 | ${state.agentState.searchResults}`.replace(/\s+/g, " ") 69 | ), 70 | ], 71 | { 72 | response_format: { 73 | type: "json_object", 74 | }, 75 | } 76 | ); 77 | const urls = JSON.parse(response.content as string).urls; 78 | const searchResults = JSON.parse(state.agentState.searchResults!); 79 | const newSearchResults = searchResults.filter((result: any) => { 80 | return urls.includes(result.metadata.source); 81 | }); 82 | console.log("curated search results:", newSearchResults); 83 | return { 84 | agentState: { 85 | ...state.agentState, 86 | searchResults: JSON.stringify(newSearchResults), 87 | }, 88 | }; 89 | } 90 | 91 | async function critique(state: { 92 | agentState: AgentState; 93 | }): Promise<{ agentState: AgentState }> { 94 | console.log("critiquing article"); 95 | let feedbackInstructions = ""; 96 | if (state.agentState.critique) { 97 | feedbackInstructions = 98 | `The writer has revised the article based on your previous critique: ${state.agentState.critique} 99 | The writer might have left feedback for you encoded between tags. 100 | The feedback is only for you to see and will be removed from the final article. 101 | `.replace(/\s+/g, " "); 102 | } 103 | const response = await model().invoke([ 104 | new SystemMessage( 105 | `You are a personal newspaper writing critique. Your sole purpose is to provide short feedback on a written 106 | article so the writer will know what to fix. 107 | Today's date is ${new Date().toLocaleDateString("en-GB")} 108 | Your task is to provide a really short feedback on the article only if necessary. 109 | if you think the article is good, please return [DONE]. 110 | you can provide feedback on the revised article or just 111 | return [DONE] if you think the article is good. 112 | Please return a string of your critique or [DONE].`.replace(/\s+/g, " ") 113 | ), 114 | new HumanMessage( 115 | `${feedbackInstructions} 116 | This is the article: ${state.agentState.article}` 117 | ), 118 | ]); 119 | const content = response.content as string; 120 | console.log("critique:", content); 121 | return { 122 | agentState: { 123 | ...state.agentState, 124 | critique: content.includes("[DONE]") ? undefined : content, 125 | }, 126 | }; 127 | } 128 | 129 | async function write(state: { 130 | agentState: AgentState; 131 | }): Promise<{ agentState: AgentState }> { 132 | console.log("writing article"); 133 | const response = await model().invoke([ 134 | new SystemMessage( 135 | `You are a personal newspaper writer. Your sole purpose is to write a well-written article about a 136 | topic using a list of articles. Write 5 paragraphs in markdown.`.replace( 137 | /\s+/g, 138 | " " 139 | ) 140 | ), 141 | new HumanMessage( 142 | `Today's date is ${new Date().toLocaleDateString("en-GB")}. 143 | Your task is to write a critically acclaimed article for me about the provided query or 144 | topic based on the sources. 145 | Here is a list of articles: ${state.agentState.searchResults} 146 | This is the topic: ${state.agentState.topic} 147 | Please return a well-written article based on the provided information.`.replace( 148 | /\s+/g, 149 | " " 150 | ) 151 | ), 152 | ]); 153 | const content = response.content as string; 154 | console.log("article:", content); 155 | return { 156 | agentState: { 157 | ...state.agentState, 158 | article: content, 159 | }, 160 | }; 161 | } 162 | 163 | async function revise(state: { 164 | agentState: AgentState; 165 | }): Promise<{ agentState: AgentState }> { 166 | console.log("revising article"); 167 | const response = await model().invoke([ 168 | new SystemMessage( 169 | `You are a personal newspaper editor. Your sole purpose is to edit a well-written article about a 170 | topic based on given critique.`.replace(/\s+/g, " ") 171 | ), 172 | new HumanMessage( 173 | `Your task is to edit the article based on the critique given. 174 | This is the article: ${state.agentState.article} 175 | This is the critique: ${state.agentState.critique} 176 | Please return the edited article based on the critique given. 177 | You may leave feedback about the critique encoded between tags like this: 178 | here goes the feedback ...`.replace(/\s+/g, " ") 179 | ), 180 | ]); 181 | const content = response.content as string; 182 | console.log("revised article:", content); 183 | return { 184 | agentState: { 185 | ...state.agentState, 186 | article: content, 187 | }, 188 | }; 189 | } 190 | 191 | const agentState = { 192 | agentState: { 193 | value: (x: AgentState, y: AgentState) => y, 194 | default: () => ({ 195 | topic: "", 196 | }), 197 | }, 198 | }; 199 | 200 | // Define the function that determines whether to continue or not 201 | const shouldContinue = (state: { agentState: AgentState }) => { 202 | const result = state.agentState.critique === undefined ? "end" : "continue"; 203 | return result; 204 | }; 205 | 206 | const workflow = new StateGraph({ 207 | channels: agentState, 208 | }); 209 | 210 | workflow.addNode("search", new RunnableLambda({ func: search }) as any); 211 | workflow.addNode("curate", new RunnableLambda({ func: curate }) as any); 212 | workflow.addNode("write", new RunnableLambda({ func: write }) as any); 213 | workflow.addNode("critique", new RunnableLambda({ func: critique }) as any); 214 | workflow.addNode("revise", new RunnableLambda({ func: revise }) as any); 215 | 216 | workflow.addEdge("search", "curate"); 217 | workflow.addEdge("curate", "write"); 218 | workflow.addEdge("write", "critique"); 219 | 220 | // We now add a conditional edge 221 | workflow.addConditionalEdges( 222 | // First, we define the start node. We use `agent`. 223 | // This means these are the edges taken after the `agent` node is called. 224 | "critique", 225 | // Next, we pass in the function that will determine which node is called next. 226 | shouldContinue, 227 | // Finally we pass in a mapping. 228 | // The keys are strings, and the values are other nodes. 229 | // END is a special node marking that the graph should finish. 230 | // What will happen is we will call `should_continue`, and then the output of that 231 | // will be matched against the keys in this mapping. 232 | // Based on which one it matches, that node will then be called. 233 | { 234 | // If `tools`, then we call the tool node. 235 | continue: "revise", 236 | // Otherwise we finish. 237 | end: END, 238 | } 239 | ); 240 | 241 | workflow.addEdge("revise", "critique"); 242 | 243 | workflow.setEntryPoint("search"); 244 | const app = workflow.compile(); 245 | 246 | export async function researchWithLangGraph(topic: string) { 247 | const inputs = { 248 | agentState: { 249 | topic, 250 | }, 251 | }; 252 | const result = await app.invoke(inputs); 253 | const regex = /[\s\S]*?<\/FEEDBACK>/g; 254 | const article = result.agentState.article.replace(regex, ""); 255 | return article; 256 | } 257 | -------------------------------------------------------------------------------- /src/app/api/copilotkit/route.ts: -------------------------------------------------------------------------------- 1 | import { researchWithLangGraph } from "./research"; 2 | import { Action } from "@copilotkit/shared"; 3 | import { NextRequest } from "next/server"; 4 | import { 5 | CopilotRuntime, 6 | copilotRuntimeNextJSAppRouterEndpoint, 7 | OpenAIAdapter, 8 | } from "@copilotkit/runtime"; 9 | 10 | const UNSPLASH_ACCESS_KEY_ENV = "UNSPLASH_ACCESS_KEY"; 11 | const UNSPLASH_ACCESS_KEY = process.env[UNSPLASH_ACCESS_KEY_ENV]; 12 | 13 | const researchAction: Action = { 14 | name: "research", 15 | description: 16 | "Call this function to conduct research on a certain topic. Respect other notes about when to call this function", 17 | parameters: [ 18 | { 19 | name: "topic", 20 | type: "string", 21 | description: "The topic to research. 5 characters or longer.", 22 | }, 23 | ], 24 | handler: async ({ topic }) => { 25 | console.log("Researching topic: ", topic); 26 | return await researchWithLangGraph(topic); 27 | }, 28 | }; 29 | 30 | export const POST = async (req: NextRequest) => { 31 | const actions: Action[] = [ 32 | { 33 | name: "getImageUrl", 34 | description: "Get an image url for a topic", 35 | parameters: [ 36 | { 37 | name: "topic", 38 | description: "The topic of the image", 39 | }, 40 | ], 41 | handler: async ({ topic }) => { 42 | if (UNSPLASH_ACCESS_KEY) { 43 | const response = await fetch( 44 | `https://api.unsplash.com/search/photos?query=${encodeURIComponent( 45 | topic 46 | )}&per_page=10&order_by=relevant&content_filter=high`, 47 | { 48 | headers: { 49 | Authorization: `Client-ID ${UNSPLASH_ACCESS_KEY}`, 50 | }, 51 | } 52 | ); 53 | const data = await response.json(); 54 | if (data.results && data.results.length > 0) { 55 | const randomIndex = Math.floor(Math.random() * data.results.length); 56 | return data.results[randomIndex].urls.regular; 57 | } 58 | } 59 | return ( 60 | 'url("https://loremflickr.com/800/600/' + 61 | encodeURIComponent(topic) + 62 | '")' 63 | ); 64 | }, 65 | }, 66 | ]; 67 | 68 | if ( 69 | process.env["TAVILY_API_KEY"] && 70 | process.env["TAVILY_API_KEY"] !== "NONE" 71 | ) { 72 | actions.push(researchAction); 73 | } 74 | 75 | const openaiModel = process.env["OPENAI_MODEL"]; 76 | 77 | console.log("ENV.COPILOT_CLOUD_API_KEY", process.env.COPILOT_CLOUD_API_KEY); 78 | 79 | const { handleRequest } = copilotRuntimeNextJSAppRouterEndpoint({ 80 | runtime: new CopilotRuntime({ actions }), 81 | serviceAdapter: new OpenAIAdapter({ model: openaiModel }), 82 | endpoint: req.nextUrl.pathname, 83 | }); 84 | 85 | return handleRequest(req); 86 | }; 87 | -------------------------------------------------------------------------------- /src/app/api/transcribe/route.ts: -------------------------------------------------------------------------------- 1 | import { OpenAI } from "openai"; 2 | 3 | // export const runtime = "edge"; 4 | 5 | const openai = new OpenAI(); 6 | 7 | export async function POST(req: Request): Promise { 8 | try { 9 | const formData = await req.formData(); 10 | const file = formData.get("file") as File; 11 | 12 | if (!file) { 13 | return new Response("File not provided", { status: 400 }); 14 | } 15 | 16 | const transcription = await openai.audio.transcriptions.create({ 17 | file, 18 | model: "whisper-1", 19 | }); 20 | 21 | return new Response(JSON.stringify(transcription), { 22 | status: 200, 23 | headers: { "Content-Type": "application/json" }, 24 | }); 25 | } catch (error: any) { 26 | return new Response(JSON.stringify({ error: error.message }), { 27 | status: 500, 28 | headers: { "Content-Type": "application/json" }, 29 | }); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/app/api/tts/route.ts: -------------------------------------------------------------------------------- 1 | import { OpenAI } from "openai"; 2 | 3 | export const runtime = "edge"; 4 | 5 | export async function GET(req: Request): Promise { 6 | const openai = new OpenAI(); 7 | 8 | const url = new URL(req.url); 9 | const text = url.searchParams.get("text"); // 'text' is the query parameter name 10 | 11 | if (!text) { 12 | return new Response("Text parameter is missing", { status: 400 }); 13 | } 14 | 15 | const response = await openai.audio.speech.create({ 16 | voice: "alloy", 17 | input: text, 18 | model: "tts-1", 19 | }); 20 | 21 | return response; 22 | } 23 | -------------------------------------------------------------------------------- /src/app/components/buttons/ActionButton.tsx: -------------------------------------------------------------------------------- 1 | import clsx from "clsx"; 2 | 3 | interface ActionButtonProps { 4 | children: React.ReactNode; 5 | onClick?: () => void; 6 | disabled?: boolean; 7 | inProgress?: boolean; 8 | } 9 | 10 | export function ActionButton({ 11 | children, 12 | onClick, 13 | disabled, 14 | inProgress, 15 | }: ActionButtonProps) { 16 | return ( 17 | 31 | ); 32 | } 33 | -------------------------------------------------------------------------------- /src/app/components/buttons/AddSlideButton.tsx: -------------------------------------------------------------------------------- 1 | import { SlideModel } from "@/app/types"; 2 | import { ActionButton } from "./ActionButton"; 3 | import { PlusCircleIcon } from "@heroicons/react/24/outline"; 4 | 5 | interface AddSlideButtonProps { 6 | currentSlideIndex: number; 7 | setCurrentSlideIndex: (fn: (i: number) => number) => void; 8 | setSlides: (fn: (slides: SlideModel[]) => SlideModel[]) => void; 9 | } 10 | 11 | export function AddSlideButton({ 12 | currentSlideIndex, 13 | setCurrentSlideIndex, 14 | setSlides, 15 | }: AddSlideButtonProps) { 16 | return ( 17 | { 19 | const newSlide: SlideModel = { 20 | content: "", 21 | backgroundImageUrl: 22 | "https://loremflickr.com/cache/resized/65535_53415810728_d1db6e2660_h_800_600_nofilter.jpg", 23 | spokenNarration: "", 24 | }; 25 | setSlides((slides) => [ 26 | ...slides.slice(0, currentSlideIndex + 1), 27 | newSlide, 28 | ...slides.slice(currentSlideIndex + 1), 29 | ]); 30 | setCurrentSlideIndex((i) => i + 1); 31 | }} 32 | > 33 | 34 | 35 | ); 36 | } 37 | -------------------------------------------------------------------------------- /src/app/components/buttons/DeleteSlideButton.tsx: -------------------------------------------------------------------------------- 1 | import { SlideModel } from "@/app/types"; 2 | import { ActionButton } from "./ActionButton"; 3 | import { TrashIcon } from "@heroicons/react/24/outline"; 4 | 5 | interface DeleteSlideButtonProps { 6 | currentSlideIndex: number; 7 | setCurrentSlideIndex: (fn: (i: number) => number) => void; 8 | slides: SlideModel[]; 9 | setSlides: (fn: (slides: SlideModel[]) => SlideModel[]) => void; 10 | } 11 | 12 | export function DeleteSlideButton({ 13 | currentSlideIndex, 14 | setCurrentSlideIndex, 15 | slides, 16 | setSlides, 17 | }: DeleteSlideButtonProps) { 18 | return ( 19 | { 22 | // delete the current slide 23 | setSlides((slides) => [ 24 | ...slides.slice(0, currentSlideIndex), 25 | ...slides.slice(currentSlideIndex + 1), 26 | ]); 27 | setCurrentSlideIndex((i) => 0); 28 | }} 29 | > 30 | 31 | 32 | ); 33 | } 34 | -------------------------------------------------------------------------------- /src/app/components/buttons/GenerateSlideButton.tsx: -------------------------------------------------------------------------------- 1 | import { CopilotContextParams, CopilotTask } from "@copilotkit/react-core"; 2 | import { useState } from "react"; 3 | import { ActionButton } from "./ActionButton"; 4 | import { SparklesIcon } from "@heroicons/react/24/outline"; 5 | 6 | interface GenerateSlideButtonProps { 7 | context: CopilotContextParams; 8 | } 9 | 10 | export function GenerateSlideButton({ context }: GenerateSlideButtonProps) { 11 | const [isGeneratingSlide, setIsGeneratingSlide] = useState(false); 12 | return ( 13 | { 16 | try { 17 | let slideContent = prompt("What should the new slide be about?"); 18 | if (slideContent === null) { 19 | return; 20 | } 21 | setIsGeneratingSlide(true); 22 | const generateSlideTask = new CopilotTask({ 23 | instructions: 24 | "Make a new slide given this user input: " + 25 | slideContent + 26 | "\n DO NOT carry out research", 27 | }); 28 | await generateSlideTask.run(context); 29 | } finally { 30 | setIsGeneratingSlide(false); 31 | } 32 | }} 33 | > 34 | 35 | 36 | ); 37 | } 38 | -------------------------------------------------------------------------------- /src/app/components/buttons/NavButton.tsx: -------------------------------------------------------------------------------- 1 | import clsx from "clsx"; 2 | 3 | interface NavButtonProps { 4 | children: React.ReactNode; 5 | onClick?: () => void; 6 | disabled?: boolean; 7 | } 8 | 9 | export function NavButton({ children, onClick, disabled }: NavButtonProps) { 10 | return ( 11 | 24 | ); 25 | } 26 | -------------------------------------------------------------------------------- /src/app/components/buttons/PerformResearchSwitch.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | 3 | interface PerformResearchSwitchProps { 4 | isEnabled: boolean; 5 | setIsEnabled: (fn: (b: boolean) => boolean) => void; 6 | } 7 | 8 | export const PerformResearchSwitch = ({ 9 | isEnabled, 10 | setIsEnabled, 11 | }: PerformResearchSwitchProps) => { 12 | return ( 13 | 35 | ); 36 | }; 37 | -------------------------------------------------------------------------------- /src/app/components/buttons/SpeakCurrentSlideButton.tsx: -------------------------------------------------------------------------------- 1 | import { useState } from "react"; 2 | import { ActionButton } from "./ActionButton"; 3 | import { SpeakerWaveIcon } from "@heroicons/react/24/outline"; 4 | import { resetGlobalAudio, speak } from "@/app/utils/globalAudio"; 5 | 6 | interface SpeakCurrentSlideButtonProps { 7 | spokenNarration: string; 8 | } 9 | 10 | export function SpeakCurrentSlideButton({ 11 | spokenNarration, 12 | }: SpeakCurrentSlideButtonProps) { 13 | const [isSpeaking, setIsSpeaking] = useState(false); 14 | return ( 15 | 16 | { 19 | resetGlobalAudio(); 20 | try { 21 | setIsSpeaking(true); 22 | await speak(spokenNarration); 23 | } finally { 24 | setIsSpeaking(false); 25 | } 26 | }} 27 | /> 28 | 29 | ); 30 | } 31 | -------------------------------------------------------------------------------- /src/app/components/main/Header.tsx: -------------------------------------------------------------------------------- 1 | import clsx from "clsx"; 2 | import { SlideModel } from "@/app/types"; 3 | import { useMemo } from "react"; 4 | import { useCopilotContext } from "@copilotkit/react-core"; 5 | import { SlideNumberIndicator } from "../misc/SlideNumberIndicator"; 6 | import { GenerateSlideButton } from "../buttons/GenerateSlideButton"; 7 | import { SpeakCurrentSlideButton } from "../buttons/SpeakCurrentSlideButton"; 8 | import { DeleteSlideButton } from "../buttons/DeleteSlideButton"; 9 | import { NavButton } from "../buttons/NavButton"; 10 | import { ChevronLeftIcon, ChevronRightIcon } from "@heroicons/react/24/outline"; 11 | import { PerformResearchSwitch } from "../buttons/PerformResearchSwitch"; 12 | import { AddSlideButton } from "../buttons/AddSlideButton"; 13 | 14 | interface HeaderProps { 15 | currentSlideIndex: number; 16 | setCurrentSlideIndex: (fn: (i: number) => number) => void; 17 | slides: SlideModel[]; 18 | setSlides: (fn: (slides: SlideModel[]) => SlideModel[]) => void; 19 | performResearch: boolean; 20 | setPerformResearch: (fn: (b: boolean) => boolean) => void; 21 | } 22 | 23 | export function Header({ 24 | currentSlideIndex, 25 | setCurrentSlideIndex, 26 | slides, 27 | setSlides, 28 | performResearch, 29 | setPerformResearch, 30 | }: HeaderProps) { 31 | const currentSlide = useMemo( 32 | () => slides[currentSlideIndex], 33 | [slides, currentSlideIndex] 34 | ); 35 | 36 | /** 37 | * We need to get the context here to run a Copilot task for generating a slide 38 | **/ 39 | const context = useCopilotContext(); 40 | 41 | return ( 42 |
43 |
44 | {/* Back */} 45 | setCurrentSlideIndex((i) => i - 1)} 48 | > 49 | 50 | 51 | 52 | {/* Forward */} 53 | setCurrentSlideIndex((i) => i + 1)} 56 | > 57 | 58 | 59 | 60 | {/* Perform Research */} 61 | 65 |
66 | 67 | 70 | 71 |
72 | 75 | 76 | 77 | 78 | 81 | 82 | 85 |
86 |
87 | ); 88 | } 89 | -------------------------------------------------------------------------------- /src/app/components/main/Presentation.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { useCopilotReadable } from "@copilotkit/react-core"; 3 | import { useCallback, useMemo, useState } from "react"; 4 | import { Slide } from "./Slide"; 5 | import { Header } from "./Header"; 6 | import useAppendSlide from "../../actions/useAppendSlide"; 7 | import { SlideModel } from "@/app/types"; 8 | 9 | interface PresentationProps { 10 | performResearch: boolean; 11 | setPerformResearch: (fn: (b: boolean) => boolean) => void; 12 | } 13 | 14 | export const Presentation = ({ 15 | performResearch, 16 | setPerformResearch, 17 | }: PresentationProps) => { 18 | const [slides, setSlides] = useState([ 19 | { 20 | content: "This is the first slide.", 21 | backgroundImageUrl: 22 | "https://loremflickr.com/cache/resized/65535_53415810728_d1db6e2660_h_800_600_nofilter.jpg", 23 | spokenNarration: "This is the first slide. Welcome to our presentation!", 24 | }, 25 | ]); 26 | const [currentSlideIndex, setCurrentSlideIndex] = useState(0); 27 | const currentSlide = useMemo( 28 | () => slides[currentSlideIndex], 29 | [slides, currentSlideIndex] 30 | ); 31 | 32 | /** 33 | * This makes all slides available to the Copilot. 34 | */ 35 | useCopilotReadable({ 36 | description: "These are all the slides", 37 | value: slides, 38 | }); 39 | 40 | /** 41 | * This makes the current slide available to the Copilot. 42 | */ 43 | useCopilotReadable({ 44 | description: "This is the current slide", 45 | value: currentSlide, 46 | }); 47 | 48 | /** 49 | * This action allows the Copilot to append a new slide to the presentation. 50 | */ 51 | useAppendSlide({ 52 | setSlides, 53 | setCurrentSlideIndex, 54 | slides, 55 | }); 56 | 57 | const updateCurrentSlide = useCallback( 58 | (partialSlide: Partial) => { 59 | setSlides((slides) => [ 60 | ...slides.slice(0, currentSlideIndex), 61 | { ...slides[currentSlideIndex], ...partialSlide }, 62 | ...slides.slice(currentSlideIndex + 1), 63 | ]); 64 | }, 65 | [currentSlideIndex, setSlides] 66 | ); 67 | 68 | return ( 69 |
75 |
83 |
87 |
91 | 92 |
93 |
94 |
95 | ); 96 | }; 97 | -------------------------------------------------------------------------------- /src/app/components/main/Slide.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import useUpdateSlide from "../../actions/useUpdateSlide"; 3 | import { SlideModel } from "@/app/types"; 4 | 5 | export interface SlideProps { 6 | slide: SlideModel; 7 | partialUpdateSlide: (partialSlide: Partial) => void; 8 | } 9 | 10 | export const Slide = (props: SlideProps) => { 11 | const backgroundImage = `url("${props.slide.backgroundImageUrl}")`; 12 | 13 | /** 14 | * This action allows the Copilot to update the current slide. 15 | */ 16 | useUpdateSlide({ partialUpdateSlide: props.partialUpdateSlide }); 17 | 18 | return ( 19 |
20 |
21 | { 24 | props.partialUpdateSlide({ content: newContent }); 25 | }} 26 | /> 27 | { 30 | props.partialUpdateSlide({ spokenNarration: newSpokenNarration }); 31 | }} 32 | /> 33 |
34 | 35 |
36 | ); 37 | }; 38 | 39 | function SlideImage({ backgroundImage }: { backgroundImage: string }) { 40 | return ( 41 |
50 | ); 51 | } 52 | 53 | interface SpeakerNotesProps { 54 | spokenNarration: string; 55 | onChange: (newSpokenNarration: string) => void; 56 | } 57 | 58 | function SlideSpeakerNotes({ spokenNarration, onChange }: SpeakerNotesProps) { 59 | return ( 60 |
61 |