├── .gitignore ├── Dockerfile ├── README.md ├── package-lock.json ├── package.json ├── public ├── favicon.ico ├── favicon2.ico ├── index.html ├── logo192.png ├── logo512.png ├── manifest.json └── robots.txt └── src ├── App.js ├── App.test.js ├── index.css ├── index.js ├── logo.svg ├── reportWebVitals.js └── setupTests.js /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use an official Node.js runtime as the base image 2 | FROM node:14 3 | 4 | # Set the working directory in the container 5 | WORKDIR /app 6 | 7 | # Copy package.json and package-lock.json to the working directory 8 | COPY package*.json ./ 9 | 10 | # Install the application dependencies 11 | RUN npm install 12 | 13 | # Copy the rest of the application code to the working directory 14 | COPY . . 15 | 16 | # Create a .env file and set environment variables 17 | # Note: In a production environment, you should use secrets management instead 18 | RUN echo "REACT_APP_OPENAI_API_KEY=${REACT_APP_OPENAI_API_KEY}" > .env && \ 19 | echo "REACT_APP_ANTHROPIC_API_KEY=${REACT_APP_ANTHROPIC_API_KEY}" >> .env 20 | 21 | # Build the application 22 | RUN npm run build 23 | 24 | # Install a simple HTTP server for serving static content 25 | RUN npm install -g serve 26 | 27 | # Expose the port the app runs on 28 | EXPOSE 3000 29 | 30 | # Define the command to run the app 31 | CMD ["serve", "-s", "build", "-l", "3000"] 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Prompt Engineering Tool 2 | 3 | ## Description 4 | 5 | The Prompt Engineering Tool is a web-based application designed to help users experiment with and optimize prompts for various large language models (LLMs). It allows users to: 6 | 7 | ![image](https://github.com/teknium1/Prompt-Engineering-Toolkit/assets/127238744/9611af16-a950-41e6-91d2-22f077e6496d) 8 | 9 | - Test prompts across multiple LLM providers simultaneously 10 | - Save and load prompt templates 11 | - Manage variables for dynamic prompt generation 12 | - Save and load model configurations 13 | - Compare outputs from different models side-by-side 14 | 15 | This tool is particularly useful for developers, researchers, and content creators working with AI language models to refine their prompts and achieve better results. 16 | 17 | ## Features 18 | 19 | - Support for multiple LLM providers (currently OpenAI and Anthropic) 20 | - Global and individual prompt modes 21 | - Variable management for dynamic prompt generation 22 | - Save and load functionality for prompts, variables, and model configurations 23 | - Responsive design with resizable panels 24 | - Temperature adjustment for each model 25 | 26 | ## Setup 27 | 28 | ### Prerequisites 29 | 30 | - Node.js (v14 or later) 31 | - npm (v6 or later) 32 | 33 | ### Installation 34 | 35 | 1. Clone the repository: 36 | 37 | ```bash 38 | git clone https://github.com/Teknium1/prompt-engineering-toolkit.git 39 | ``` 40 | 41 | 2. Navigate to the project directory: 42 | 43 | ```bash 44 | cd prompt-engineering-toolkit 45 | ``` 46 | 47 | 3. Install the dependencies: 48 | ```bash 49 | npm install 50 | ``` 51 | 52 | This will install the following main libraries: 53 | - `react` and `react-dom`: For building the user interface 54 | - `@mui/material` and `@emotion/react`: For Material-UI components and styling 55 | - `axios`: For making HTTP requests to the LLM APIs 56 | - `react-resizable-panels`: For the resizable panel layout 57 | 58 | 4. Create a `.env` file in the root directory and add your API keys: 59 | ``` 60 | REACT_APP_OPENAI_API_KEY=your_openai_api_key_here 61 | REACT_APP_ANTHROPIC_API_KEY=your_anthropic_api_key_here 62 | ``` 63 | 64 | 5. Start the development server: 65 | ```bash 66 | npm start 67 | ``` 68 | 69 | 6. Open your browser and visit `http://localhost:3000` to use the application. 70 | 71 | ## Usage 72 | 73 | 1. Configure your API keys for the LLM providers you want to use (OpenAI, Anthropic, etc.) in the "Model Configurations" section. 74 | 75 | 2. Create variables if needed in the "Variables" section. 76 | 77 | 3. Enter your prompt in the main prompt area or use the global prompt feature. 78 | 79 | 4. Click "Run Prompt" to send the prompt to the configured models. 80 | 81 | 5. View the outputs in the respective model sections. 82 | 83 | 6. Save prompts, variables, or model configurations for future use. 84 | 85 | ## Contributing 86 | 87 | Contributions are welcome! Please feel free to submit a Pull Request. 88 | 89 | ## License 90 | 91 | This project is licensed under the MIT License. 92 | 93 | ``` 94 | MIT License 95 | Permission is hereby granted, free of charge, to any person obtaining a copy 96 | of this software and associated documentation files (the "Software"), to deal 97 | in the Software without restriction, including without limitation the rights 98 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 99 | copies of the Software, and to permit persons to whom the Software is 100 | furnished to do so, subject to the following conditions: 101 | The above copyright notice and this permission notice shall be included in all 102 | copies or substantial portions of the Software. 103 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 104 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 105 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 106 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 107 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 108 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 109 | SOFTWARE.``` 110 | 111 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prompt-engineering-tool", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@emotion/react": "^11.11.4", 7 | "@emotion/styled": "^11.11.5", 8 | "@mui/icons-material": "^5.15.20", 9 | "@mui/material": "^5.15.20", 10 | "@testing-library/jest-dom": "^5.17.0", 11 | "@testing-library/react": "^13.4.0", 12 | "@testing-library/user-event": "^13.5.0", 13 | "axios": "^1.7.2", 14 | "react": "^18.3.1", 15 | "react-dom": "^18.3.1", 16 | "react-resizable-panels": "^2.0.19", 17 | "react-scripts": "5.0.1", 18 | "web-vitals": "^2.1.4" 19 | }, 20 | "scripts": { 21 | "start": "react-scripts start", 22 | "build": "react-scripts build", 23 | "test": "react-scripts test", 24 | "eject": "react-scripts eject" 25 | }, 26 | "eslintConfig": { 27 | "extends": [ 28 | "react-app", 29 | "react-app/jest" 30 | ] 31 | }, 32 | "browserslist": { 33 | "production": [ 34 | ">0.2%", 35 | "not dead", 36 | "not op_mini all" 37 | ], 38 | "development": [ 39 | "last 1 chrome version", 40 | "last 1 firefox version", 41 | "last 1 safari version" 42 | ] 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teknium1/Prompt-Engineering-Toolkit/57e235acd0c81fa09ac0d9892146ba8a9f0fb7f3/public/favicon.ico -------------------------------------------------------------------------------- /public/favicon2.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teknium1/Prompt-Engineering-Toolkit/57e235acd0c81fa09ac0d9892146ba8a9f0fb7f3/public/favicon2.ico -------------------------------------------------------------------------------- /public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | React App 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teknium1/Prompt-Engineering-Toolkit/57e235acd0c81fa09ac0d9892146ba8a9f0fb7f3/public/logo192.png -------------------------------------------------------------------------------- /public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teknium1/Prompt-Engineering-Toolkit/57e235acd0c81fa09ac0d9892146ba8a9f0fb7f3/public/logo512.png -------------------------------------------------------------------------------- /public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /src/App.js: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { 3 | Paper, TextField, Button, Typography, 4 | List, ListItem, IconButton, Box, Select, MenuItem, FormControl, InputLabel, Slider, Dialog, DialogTitle, DialogContent, DialogActions, 5 | Switch, FormControlLabel, Accordion, AccordionSummary, AccordionDetails, 6 | AppBar, Toolbar 7 | } from '@mui/material'; 8 | import DeleteIcon from '@mui/icons-material/Delete'; 9 | import AddIcon from '@mui/icons-material/Add'; 10 | import SaveIcon from '@mui/icons-material/Save'; 11 | import DownloadIcon from '@mui/icons-material/Download'; 12 | import ExpandMoreIcon from '@mui/icons-material/ExpandMore'; 13 | import axios from 'axios'; 14 | import { v4 as uuidv4 } from 'uuid'; 15 | import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels'; 16 | 17 | function App() { 18 | const [modelConfigs, setModelConfigs] = useState([ 19 | { 20 | id: Date.now(), 21 | provider: 'openai', 22 | model: 'gpt-3.5-turbo', 23 | temperature: 0.7, 24 | maxTokens: 1000, 25 | apiKey: '', 26 | endpoint: 'https://api.openai.com/v1/chat/completions', 27 | prompts: [{ id: Date.now(), systemPrompt: '', userPrompt: '', output: '' }] 28 | } 29 | ]); 30 | const [variables, setVariables] = useState([]); 31 | const [savedPrompts, setSavedPrompts] = useState([]); 32 | const [savedModels, setSavedModels] = useState([]); 33 | const [savedVariables, setSavedVariables] = useState([]); 34 | const [openSaveModelDialog, setOpenSaveModelDialog] = useState(false); 35 | const [openSaveVariablesDialog, setOpenSaveVariablesDialog] = useState(false); 36 | const [openSavePromptDialog, setOpenSavePromptDialog] = useState(false); 37 | const [modelNameToSave, setModelNameToSave] = useState(''); 38 | const [variablesNameToSave, setVariablesNameToSave] = useState(''); 39 | const [promptNameToSave, setPromptNameToSave] = useState(''); 40 | const [modelToSave, setModelToSave] = useState(null); 41 | const [promptToSave, setPromptToSave] = useState(null); 42 | const [globalSystemPrompt, setGlobalSystemPrompt] = useState(''); 43 | const [globalUserPrompt, setGlobalUserPrompt] = useState(''); 44 | const [useGlobalPrompt, setUseGlobalPrompt] = useState(false); 45 | const [selectedReviewModel, setSelectedReviewModel] = useState(''); 46 | const [promptReviewSuggestion, setPromptReviewSuggestion] = useState(''); 47 | 48 | useEffect(() => { 49 | const saved = localStorage.getItem('savedPrompts'); 50 | if (saved) setSavedPrompts(JSON.parse(saved)); 51 | const savedModelsData = localStorage.getItem('savedModels'); 52 | if (savedModelsData) setSavedModels(JSON.parse(savedModelsData)); 53 | const savedVariablesData = localStorage.getItem('savedVariables'); 54 | if (savedVariablesData) setSavedVariables(JSON.parse(savedVariablesData)); 55 | }, []); 56 | 57 | const handleAddVariable = () => setVariables([...variables, { name: '', value: '' }]); 58 | 59 | const handleVariableChange = (index, field, value) => { 60 | const newVariables = [...variables]; 61 | newVariables[index][field] = value; 62 | setVariables(newVariables); 63 | }; 64 | 65 | const handleDeleteVariable = (index) => { 66 | const newVariables = variables.filter((_, i) => i !== index); 67 | setVariables(newVariables); 68 | }; 69 | 70 | const handleModelConfigChange = (id, field, value) => { 71 | const newConfigs = modelConfigs.map(config => 72 | config.id === id ? {...config, [field]: value} : config 73 | ); 74 | setModelConfigs(newConfigs); 75 | }; 76 | 77 | const handleAddModel = () => { 78 | const newModel = { 79 | id: Date.now(), 80 | provider: 'openai', 81 | model: 'gpt-3.5-turbo', 82 | temperature: 0.7, 83 | maxTokens: 1000, 84 | apiKey: '', 85 | endpoint: 'https://api.openai.com/v1/chat/completions', 86 | prompts: [{ id: Date.now(), systemPrompt: '', userPrompt: '', output: '' }] 87 | }; 88 | setModelConfigs([...modelConfigs, newModel]); 89 | }; 90 | 91 | const handleDeleteModel = (id) => { 92 | const newConfigs = modelConfigs.filter(config => config.id !== id); 93 | setModelConfigs(newConfigs); 94 | }; 95 | 96 | const handleAddPrompt = (modelId) => { 97 | const model = modelConfigs.find(m => m.id === modelId); 98 | if (model.prompts[model.prompts.length - 1].output) { 99 | const newConfigs = modelConfigs.map(config => { 100 | if (config.id === modelId) { 101 | return { 102 | ...config, 103 | prompts: [...config.prompts, { id: Date.now(), userPrompt: '', output: '' }] 104 | }; 105 | } 106 | return config; 107 | }); 108 | setModelConfigs(newConfigs); 109 | } 110 | }; 111 | 112 | const handleRemoveLastTurn = (modelId) => { 113 | const newConfigs = modelConfigs.map(config => { 114 | if (config.id === modelId && config.prompts.length > 1) { 115 | return { 116 | ...config, 117 | prompts: config.prompts.slice(0, -1) 118 | }; 119 | } 120 | return config; 121 | }); 122 | setModelConfigs(newConfigs); 123 | }; 124 | 125 | const handlePromptChange = (modelId, promptId, field, value) => { 126 | const newConfigs = modelConfigs.map(config => { 127 | if (config.id === modelId) { 128 | return { 129 | ...config, 130 | prompts: config.prompts.map(prompt => 131 | prompt.id === promptId ? {...prompt, [field]: value} : prompt 132 | ) 133 | }; 134 | } 135 | return config; 136 | }); 137 | setModelConfigs(newConfigs); 138 | }; 139 | 140 | const openSavePromptDialogHandler = (modelId, promptId) => { 141 | setPromptToSave({ modelId, promptId }); 142 | setPromptNameToSave(''); 143 | setOpenSavePromptDialog(true); 144 | }; 145 | 146 | const savePrompt = () => { 147 | if (promptNameToSave) { 148 | let promptContent; 149 | if (useGlobalPrompt) { 150 | promptContent = { systemPrompt: globalSystemPrompt, userPrompt: globalUserPrompt }; 151 | } else if (promptToSave) { 152 | const { modelId, promptId } = promptToSave; 153 | const model = modelConfigs.find(m => m.id === modelId); 154 | const prompt = model.prompts.find(p => p.id === promptId); 155 | promptContent = { systemPrompt: prompt.systemPrompt, userPrompt: prompt.userPrompt }; 156 | } 157 | const newSavedPrompts = [...savedPrompts, { name: promptNameToSave, content: promptContent }]; 158 | setSavedPrompts(newSavedPrompts); 159 | localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts)); 160 | setOpenSavePromptDialog(false); 161 | } 162 | }; 163 | 164 | const loadPrompt = (savedPrompt) => { 165 | if (useGlobalPrompt) { 166 | setGlobalSystemPrompt(savedPrompt.content.systemPrompt); 167 | setGlobalUserPrompt(savedPrompt.content.userPrompt); 168 | } else { 169 | const newConfigs = modelConfigs.map(config => ({ 170 | ...config, 171 | prompts: config.prompts.map(prompt => ({ 172 | ...prompt, 173 | systemPrompt: savedPrompt.content.systemPrompt, 174 | userPrompt: savedPrompt.content.userPrompt 175 | })) 176 | })); 177 | setModelConfigs(newConfigs); 178 | } 179 | }; 180 | 181 | const removeSavedPrompt = (index) => { 182 | const newSavedPrompts = savedPrompts.filter((_, i) => i !== index); 183 | setSavedPrompts(newSavedPrompts); 184 | localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts)); 185 | }; 186 | 187 | const openSaveModelDialogHandler = (model) => { 188 | setModelToSave(model); 189 | setModelNameToSave(''); 190 | setOpenSaveModelDialog(true); 191 | }; 192 | 193 | const saveModel = () => { 194 | if (modelNameToSave && modelToSave) { 195 | const modelToSaveWithoutId = {...modelToSave, id: undefined}; 196 | const newSavedModels = [...savedModels, { name: modelNameToSave, config: modelToSaveWithoutId }]; 197 | setSavedModels(newSavedModels); 198 | localStorage.setItem('savedModels', JSON.stringify(newSavedModels)); 199 | setOpenSaveModelDialog(false); 200 | } 201 | }; 202 | 203 | const loadModel = (savedModel, modelId) => { 204 | const newConfigs = modelConfigs.map(config => { 205 | if (config.id === modelId) { 206 | return { 207 | ...config, 208 | ...savedModel.config, 209 | id: config.id, 210 | prompts: config.prompts 211 | }; 212 | } 213 | return config; 214 | }); 215 | setModelConfigs(newConfigs); 216 | }; 217 | 218 | const removeSavedModel = (index) => { 219 | const newSavedModels = savedModels.filter((_, i) => i !== index); 220 | setSavedModels(newSavedModels); 221 | localStorage.setItem('savedModels', JSON.stringify(newSavedModels)); 222 | }; 223 | 224 | const openSaveVariablesDialogHandler = () => { 225 | setVariablesNameToSave(''); 226 | setOpenSaveVariablesDialog(true); 227 | }; 228 | 229 | const saveVariables = () => { 230 | if (variablesNameToSave) { 231 | const newSavedVariables = [...savedVariables, { name: variablesNameToSave, variables: variables }]; 232 | setSavedVariables(newSavedVariables); 233 | localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables)); 234 | setOpenSaveVariablesDialog(false); 235 | } 236 | }; 237 | 238 | const loadVariables = (savedVariableSet) => { 239 | setVariables(savedVariableSet.variables); 240 | }; 241 | 242 | const removeSavedVariables = (index) => { 243 | const newSavedVariables = savedVariables.filter((_, i) => i !== index); 244 | setSavedVariables(newSavedVariables); 245 | localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables)); 246 | }; 247 | 248 | const toggleGlobalPrompt = () => { 249 | setUseGlobalPrompt(!useGlobalPrompt); 250 | }; 251 | 252 | const runPrompt = async (modelId, promptId) => { 253 | const model = modelConfigs.find(m => m.id === modelId); 254 | const prompt = model.prompts.find(p => p.id === promptId); 255 | let messages = []; 256 | 257 | if (promptId === model.prompts[0].id && useGlobalPrompt) { 258 | messages = [ 259 | { role: 'system', content: globalSystemPrompt }, 260 | { role: 'user', content: globalUserPrompt } 261 | ]; 262 | } else { 263 | const previousPrompts = model.prompts.slice(0, model.prompts.findIndex(p => p.id === promptId) + 1); 264 | messages = previousPrompts.flatMap((p, index) => { 265 | if (index === 0 && !useGlobalPrompt) { 266 | return [ 267 | { role: 'system', content: p.systemPrompt }, 268 | { role: 'user', content: p.userPrompt } 269 | ]; 270 | } else { 271 | return [ 272 | { role: 'user', content: p.userPrompt }, 273 | ...(p.output ? [{ role: 'assistant', content: p.output }] : []) 274 | ]; 275 | } 276 | }); 277 | } 278 | 279 | // Apply variables 280 | messages = messages.map(message => ({ 281 | ...message, 282 | content: variables.reduce((content, variable) => 283 | content.replace(new RegExp(`\\{${variable.name}\\}`, 'g'), variable.value), 284 | message.content 285 | ) 286 | })); 287 | 288 | try { 289 | let response; 290 | switch (model.provider) { 291 | case 'openai': 292 | response = await axios.post(model.endpoint, { 293 | model: model.model, 294 | messages: messages, 295 | temperature: model.temperature, 296 | max_tokens: model.maxTokens, 297 | }, { 298 | headers: { 'Authorization': `Bearer ${model.apiKey}` } 299 | }); 300 | break; 301 | case 'anthropic': 302 | // Implement Anthropic API call here 303 | break; 304 | case 'azure': 305 | response = await axios.post( 306 | `https://${model.resourceName}.openai.azure.com/openai/deployments/${model.deploymentId}/chat/completions?api-version=2023-05-15`, 307 | { 308 | messages: messages, 309 | temperature: model.temperature, 310 | max_tokens: model.maxTokens, 311 | }, 312 | { 313 | headers: { 'api-key': model.apiKey } 314 | } 315 | ); 316 | break; 317 | case 'bedrock': 318 | // This is a placeholder and won't work as-is 319 | // You'll need to use AWS SDK for JavaScript v3 for this 320 | console.log('Amazon Bedrock API call not implemented'); 321 | break; 322 | default: 323 | throw new Error('Unknown provider'); 324 | } 325 | 326 | const output = response.data.choices[0].message.content; 327 | setModelConfigs(prevConfigs => prevConfigs.map(config => { 328 | if (config.id === modelId) { 329 | return { 330 | ...config, 331 | prompts: config.prompts.map(p => 332 | p.id === promptId ? {...p, output} : p 333 | ) 334 | }; 335 | } 336 | return config; 337 | })); 338 | } catch (error) { 339 | console.error('Error calling API:', error); 340 | const errorOutput = `Error: ${error.message}`; 341 | setModelConfigs(prevConfigs => prevConfigs.map(config => { 342 | if (config.id === modelId) { 343 | return { 344 | ...config, 345 | prompts: config.prompts.map(p => 346 | p.id === promptId ? {...p, output: errorOutput} : p 347 | ) 348 | }; 349 | } 350 | return config; 351 | })); 352 | } 353 | }; 354 | 355 | const runAllPrompts = async () => { 356 | for (const config of modelConfigs) { 357 | for (const prompt of config.prompts) { 358 | await runPrompt(config.id, prompt.id); 359 | } 360 | } 361 | }; 362 | 363 | const reviewPrompt = async () => { 364 | if (!selectedReviewModel) return; 365 | 366 | const model = savedModels.find(m => m.name === selectedReviewModel)?.config; 367 | if (!model) return; 368 | 369 | const prompt = `Please review and suggest improvements for the following prompt: 370 | System Prompt: ${globalSystemPrompt} 371 | User Prompt: ${globalUserPrompt} 372 | 373 | Provide concise suggestions to improve the prompt's effectiveness.`; 374 | 375 | try { 376 | const response = await axios.post(model.endpoint, { 377 | model: model.model, 378 | messages: [{ role: 'user', content: prompt }], 379 | temperature: model.temperature, 380 | max_tokens: model.maxTokens, 381 | }, { 382 | headers: { 'Authorization': `Bearer ${model.apiKey}` } 383 | }); 384 | setPromptReviewSuggestion(response.data.choices[0].message.content); 385 | } catch (error) { 386 | console.error('Error reviewing prompt:', error); 387 | setPromptReviewSuggestion(`Error reviewing prompt: ${error.message}`); 388 | } 389 | }; 390 | 391 | const saveConversationData = () => { 392 | const conversationData = modelConfigs.map(config => { 393 | const modelName = savedModels.find(m => m.config.model === config.model)?.name || 394 | `${config.provider}-Endpoint-${config.endpoint.split('/').pop()}-${config.model}-temp-${config.temperature}`; 395 | 396 | const conversations = config.prompts.flatMap(prompt => [ 397 | { role: "system", content: prompt.systemPrompt }, 398 | { role: "user", content: prompt.userPrompt }, 399 | { role: "assistant", content: prompt.output } 400 | ]); 401 | 402 | return { 403 | conversations, 404 | model: modelName 405 | }; 406 | }); 407 | 408 | const jsonData = JSON.stringify(conversationData, null, 2); 409 | const blob = new Blob([jsonData], { type: 'application/json' }); 410 | const url = URL.createObjectURL(blob); 411 | const link = document.createElement('a'); 412 | link.href = url; 413 | link.download = `conversation-${uuidv4()}.json`; 414 | document.body.appendChild(link); 415 | link.click(); 416 | document.body.removeChild(link); 417 | }; 418 | 419 | return ( 420 | 421 | Prompt Engineering Tool 422 | 423 | 424 | 425 | 426 | Saved Prompts 427 | 428 | {savedPrompts.map((savedPrompt, index) => ( 429 | 430 | 433 | removeSavedPrompt(index)}> 434 | 435 | ))} 436 | 437 | 438 | 439 | 440 | 441 | 442 | 443 | } 445 | label="Use Global Prompt" 446 | /> 447 | {useGlobalPrompt && ( 448 | <> 449 | setGlobalSystemPrompt(e.target.value)} 457 | variant="outlined" 458 | sx={{ mt: 2, mb: 2 }} 459 | /> 460 | setGlobalUserPrompt(e.target.value)} 468 | variant="outlined" 469 | sx={{ mb: 2 }} 470 | /> 471 | 472 | )} 473 | 474 | 477 | 480 | 481 | 482 | 483 | {modelConfigs.map((config) => ( 484 | 485 | 486 | 487 | Model 488 | 505 | 506 | {!useGlobalPrompt ? ( 507 | config.prompts.map((prompt, index) => ( 508 | 509 | Turn {index + 1} 510 | {index === 0 && ( 511 | handlePromptChange(config.id, prompt.id, 'systemPrompt', e.target.value)} 519 | variant="outlined" 520 | sx={{ mb: 2 }} 521 | /> 522 | )} 523 | handlePromptChange(config.id, prompt.id, 'userPrompt', e.target.value)} 531 | variant="outlined" 532 | sx={{ mb: 2 }} 533 | /> 534 | 535 | 538 | 541 | 542 | 553 | 554 | )) 555 | ) : ( 556 | 567 | )} 568 | {!useGlobalPrompt && ( 569 | 570 | {config.prompts[config.prompts.length - 1].output && ( 571 | 574 | )} 575 | {config.prompts.length > 1 && ( 576 | 579 | )} 580 | 581 | )} 582 | 583 | 584 | ))} 585 | 586 | 587 | 590 | 591 | 592 | 593 | 594 | 595 | 596 | 597 | Variables 598 | 599 | {variables.map((variable, index) => ( 600 | 601 | 602 | handleVariableChange(index, 'name', e.target.value)} 608 | sx={{ mb: 1 }} 609 | /> 610 | handleVariableChange(index, 'value', e.target.value)} 618 | /> 619 | handleDeleteVariable(index)} 621 | sx={{ position: 'absolute', top: 0, right: 0 }} 622 | > 623 | 624 | 625 | 626 | 627 | ))} 628 | 629 | 630 | 633 | 636 | 637 | 638 | 639 | Saved Variables 640 | 641 | {savedVariables.map((savedVarSet, index) => ( 642 | 643 | 646 | removeSavedVariables(index)}> 647 | 648 | ))} 649 | 650 | 651 | 652 | Model Configurations 653 | {modelConfigs.map((config) => ( 654 | 655 | 656 | Provider 657 | 664 | 665 | handleModelConfigChange(config.id, 'model', e.target.value)} 667 | /> 668 | {config.provider === 'azure' && ( 669 | <> 670 | handleModelConfigChange(config.id, 'resourceName', e.target.value)} 672 | /> 673 | handleModelConfigChange(config.id, 'deploymentId', e.target.value)} 675 | /> 676 | 677 | )} 678 | {config.provider === 'bedrock' && ( 679 | <> 680 | handleModelConfigChange(config.id, 'region', e.target.value)} 682 | /> 683 | handleModelConfigChange(config.id, 'modelId', e.target.value)} 685 | /> 686 | 687 | )} 688 | handleModelConfigChange(config.id, 'apiKey', e.target.value)} 690 | type="password" 691 | /> 692 | handleModelConfigChange(config.id, 'endpoint', e.target.value)} 694 | /> 695 | handleModelConfigChange(config.id, 'maxTokens', parseInt(e.target.value))} 697 | /> 698 | Temperature: {config.temperature} 699 | handleModelConfigChange(config.id, 'temperature', newValue)} 702 | min={0} max={1} step={0.1} 703 | /> 704 | 705 | 708 | 711 | 712 | 713 | ))} 714 | 715 | } 717 | aria-controls="saved-models-content" 718 | id="saved-models-header" 719 | > 720 | Saved Models 721 | 722 | 723 | {savedModels.map((savedModel, index) => ( 724 | 725 | {savedModel.name} 726 | Provider: {savedModel.config.provider} 727 | Model: {savedModel.config.model} 728 | Temperature: {savedModel.config.temperature} 729 | Max Tokens: {savedModel.config.maxTokens} 730 | 731 | 734 | 737 | 738 | 739 | ))} 740 | 741 | 742 | 743 | 744 | 745 | 746 | 747 | 748 | 749 | 750 | Review Model 751 | 760 | 761 | 764 | {promptReviewSuggestion} 765 | 768 | 769 | 770 | setOpenSavePromptDialog(false)}> 771 | Save Prompt 772 | 773 | setPromptNameToSave(e.target.value)} 782 | /> 783 | 784 | 785 | 786 | 787 | 788 | 789 | setOpenSaveModelDialog(false)}> 790 | Save Model Configuration 791 | 792 | setModelNameToSave(e.target.value)} 801 | /> 802 | 803 | 804 | 805 | 806 | 807 | 808 | setOpenSaveVariablesDialog(false)}> 809 | Save Variables 810 | 811 | setVariablesNameToSave(e.target.value)} 820 | /> 821 | 822 | 823 | 824 | 825 | 826 | 827 | 828 | ); 829 | } 830 | 831 | export default App; -------------------------------------------------------------------------------- /src/App.test.js: -------------------------------------------------------------------------------- 1 | import { render, screen } from '@testing-library/react'; 2 | import App from './App'; 3 | 4 | test('renders learn react link', () => { 5 | render(); 6 | const linkElement = screen.getByText(/learn react/i); 7 | expect(linkElement).toBeInTheDocument(); 8 | }); 9 | -------------------------------------------------------------------------------- /src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', 12 | monospace; 13 | } 14 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import CssBaseline from '@mui/material/CssBaseline'; 4 | import { ThemeProvider, createTheme } from '@mui/material/styles'; 5 | import App from './App'; 6 | 7 | const theme = createTheme(); 8 | 9 | ReactDOM.render( 10 | 11 | 12 | 13 | 14 | 15 | , 16 | document.getElementById('root') 17 | ); -------------------------------------------------------------------------------- /src/logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/reportWebVitals.js: -------------------------------------------------------------------------------- 1 | const reportWebVitals = onPerfEntry => { 2 | if (onPerfEntry && onPerfEntry instanceof Function) { 3 | import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { 4 | getCLS(onPerfEntry); 5 | getFID(onPerfEntry); 6 | getFCP(onPerfEntry); 7 | getLCP(onPerfEntry); 8 | getTTFB(onPerfEntry); 9 | }); 10 | } 11 | }; 12 | 13 | export default reportWebVitals; 14 | -------------------------------------------------------------------------------- /src/setupTests.js: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom'; 6 | --------------------------------------------------------------------------------