├── .env
├── .gitignore
├── README.md
├── index.html
├── package-lock.json
├── package.json
├── postcss.config.cjs
├── public
├── favicon-dark
│ ├── android-chrome-192x192.png
│ ├── android-chrome-512x512.png
│ ├── apple-touch-icon.png
│ ├── favicon-16x16.png
│ ├── favicon-32x32.png
│ ├── favicon.ico
│ └── site.webmanifest
├── favicon-light
│ ├── android-chrome-192x192.png
│ ├── android-chrome-512x512.png
│ ├── apple-touch-icon.png
│ ├── favicon-16x16.png
│ ├── favicon-32x32.png
│ ├── favicon.ico
│ └── site.webmanifest
└── images
│ └── site-screenshot.png
├── src
├── App.tsx
├── design_system
│ ├── Button.tsx
│ ├── Message.tsx
│ └── SyntaxHighlighter.tsx
├── hooks
│ └── useVoices.ts
├── index.css
├── lib
│ ├── api.ts
│ ├── config.ts
│ ├── storage.ts
│ └── voice.ts
├── main.tsx
└── vite-env.d.ts
├── tailwind.config.cjs
├── tsconfig.json
├── tsconfig.node.json
└── vite.config.ts
/.env:
--------------------------------------------------------------------------------
1 | VITE_IS_LOCAL_SETUP_REQUIRED="0"
2 | VITE_API_HOST="https://chatgpt-voice-server.herokuapp.com"
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | Have a conversation with ChatGPT. Casually 🔈 🤖 ⚡️
10 |
11 |
12 |
13 | Website | Backend
14 |
17 |
18 |
19 |
20 | ## Features
21 |
22 | - 📣 Conversation with ChatGPT, with full context, in a neat UI.
23 | - ⚙️ Customizable voice and speech rate.
24 |
25 |
26 |
27 |
28 |
29 | ## Development
30 |
31 | You can also set up this project locally to play with it, contribute to it or hack it to your heart's content. Simply clone it, install dependencies then start the dev server.
32 |
33 | ```bash
34 | git clone https://github.com/sonngdev/chatgpt-voice.git
35 | cd chatgpt-voice
36 | npm install
37 | npm run dev
38 | ```
39 |
40 | [A backend server](https://github.com/sonngdev/chatgpt-server) accompanies this frontend client. See its `README` file for installation guide.
41 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | ChatGPT With Voice
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "chatgpt-voice",
3 | "private": true,
4 | "version": "0.0.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite",
8 | "build": "tsc && vite build",
9 | "preview": "vite preview"
10 | },
11 | "dependencies": {
12 | "@radix-ui/react-dialog": "^1.0.2",
13 | "@radix-ui/react-select": "^1.1.2",
14 | "@radix-ui/react-slider": "^1.1.0",
15 | "@radix-ui/react-tooltip": "^1.0.2",
16 | "react": "^18.2.0",
17 | "react-device-detect": "^2.2.2",
18 | "react-dom": "^18.2.0",
19 | "react-feather": "^2.0.10",
20 | "react-speech-recognition": "^3.10.0",
21 | "react-syntax-highlighter": "^15.5.0",
22 | "regenerator-runtime": "^0.13.11"
23 | },
24 | "devDependencies": {
25 | "@types/react": "^18.0.26",
26 | "@types/react-dom": "^18.0.9",
27 | "@types/react-speech-recognition": "^3.9.0",
28 | "@types/react-syntax-highlighter": "^15.5.6",
29 | "@vitejs/plugin-react": "^3.0.0",
30 | "autoprefixer": "^10.4.13",
31 | "postcss": "^8.4.21",
32 | "tailwindcss": "^3.2.4",
33 | "typescript": "^4.9.3",
34 | "vite": "^4.0.0"
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/postcss.config.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/public/favicon-dark/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/android-chrome-192x192.png
--------------------------------------------------------------------------------
/public/favicon-dark/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/android-chrome-512x512.png
--------------------------------------------------------------------------------
/public/favicon-dark/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/apple-touch-icon.png
--------------------------------------------------------------------------------
/public/favicon-dark/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/favicon-16x16.png
--------------------------------------------------------------------------------
/public/favicon-dark/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/favicon-32x32.png
--------------------------------------------------------------------------------
/public/favicon-dark/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-dark/favicon.ico
--------------------------------------------------------------------------------
/public/favicon-dark/site.webmanifest:
--------------------------------------------------------------------------------
1 | {"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
--------------------------------------------------------------------------------
/public/favicon-light/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/android-chrome-192x192.png
--------------------------------------------------------------------------------
/public/favicon-light/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/android-chrome-512x512.png
--------------------------------------------------------------------------------
/public/favicon-light/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/apple-touch-icon.png
--------------------------------------------------------------------------------
/public/favicon-light/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/favicon-16x16.png
--------------------------------------------------------------------------------
/public/favicon-light/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/favicon-32x32.png
--------------------------------------------------------------------------------
/public/favicon-light/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/favicon-light/favicon.ico
--------------------------------------------------------------------------------
/public/favicon-light/site.webmanifest:
--------------------------------------------------------------------------------
1 | {"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
--------------------------------------------------------------------------------
/public/images/site-screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sonngdev/chatgpt-voice/a6d19826d47f8ff67181b4a5a3c17194cbaa6506/public/images/site-screenshot.png
--------------------------------------------------------------------------------
/src/App.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Fragment,
3 | useCallback,
4 | useEffect,
5 | useMemo,
6 | useRef,
7 | useState,
8 | } from 'react';
9 | import { useSpeechRecognition } from 'react-speech-recognition';
10 | import {
11 | GitHub,
12 | Settings,
13 | FilePlus,
14 | Mic,
15 | Activity,
16 | Loader,
17 | AlertTriangle,
18 | X,
19 | ChevronDown,
20 | ChevronUp,
21 | Check,
22 | Headphones,
23 | Info,
24 | } from 'react-feather';
25 | import * as Tooltip from '@radix-ui/react-tooltip';
26 | import * as Dialog from '@radix-ui/react-dialog';
27 | import * as Slider from '@radix-ui/react-slider';
28 | import * as Select from '@radix-ui/react-select';
29 | import { isDesktop, isMobile } from 'react-device-detect';
30 |
31 | import Button from './design_system/Button';
32 | import SyntaxHighlighter from './design_system/SyntaxHighlighter';
33 | import Message from './design_system/Message';
34 | import API from './lib/api';
35 | import Config from './lib/config';
36 | import Storage from './lib/storage';
37 | import Voice from './lib/voice';
38 | import useVoices from './hooks/useVoices';
39 |
40 | interface CreateChatGPTMessageResponse {
41 | answer: string;
42 | messageId: string;
43 | }
44 |
45 | interface Message {
46 | type: 'prompt' | 'response';
47 | text: string;
48 | }
49 |
50 | interface VoiceMappings {
51 | [group: string]: SpeechSynthesisVoice[];
52 | }
53 |
54 | enum State {
55 | IDLE,
56 | LISTENING,
57 | PROCESSING,
58 | }
59 |
60 | const savedData = Storage.load();
61 |
62 | function App() {
63 | const {
64 | browserSupportsSpeechRecognition,
65 | isMicrophoneAvailable,
66 | transcript,
67 | listening,
68 | finalTranscript,
69 | } = useSpeechRecognition();
70 |
71 | const initialMessages: Message[] = [
72 | { type: 'response', text: 'Try speaking to the microphone.' },
73 | ];
74 | const defaultSettingsRef = useRef({
75 | host: 'http://localhost',
76 | port: 8000,
77 | voiceURI: '',
78 | voiceSpeed: 1,
79 | });
80 | const [state, setState] = useState(State.IDLE);
81 | const [messages, setMessages] = useState(initialMessages);
82 | const [settings, setSettings] = useState({
83 | host: (savedData?.host as string) ?? defaultSettingsRef.current.host,
84 | port: (savedData?.port as number) ?? defaultSettingsRef.current.port,
85 | voiceURI:
86 | (savedData?.voiceURI as string) ?? defaultSettingsRef.current.voiceURI,
87 | voiceSpeed:
88 | (savedData?.voiceSpeed as number) ??
89 | defaultSettingsRef.current.voiceSpeed,
90 | });
91 | const [isModalVisible, setIsModalVisible] = useState(false);
92 | const [isTooltipVisible, setIsTooltipVisible] = useState(
93 | Config.IS_LOCAL_SETUP_REQUIRED,
94 | );
95 | const { voices, defaultVoice } = useVoices();
96 | const abortRef = useRef(null);
97 | const conversationRef = useRef({ currentMessageId: '' });
98 | const bottomDivRef = useRef(null);
99 |
100 | const availableVoices = useMemo(() => {
101 | const englishTypes = new Map();
102 | englishTypes.set('en-AU', 'English (Australia)');
103 | englishTypes.set('en-CA', 'English (Canada)');
104 | englishTypes.set('en-GB', 'English (United Kingdom)');
105 | englishTypes.set('en-IE', 'English (Ireland)');
106 | englishTypes.set('en-IN', 'English (India)');
107 | englishTypes.set('en-NZ', 'English (New Zealand)');
108 | englishTypes.set('en-US', 'English (United State)');
109 |
110 | const localEnglishVoices = voices.filter(
111 | (voice) => voice.localService && voice.lang.startsWith('en-'),
112 | );
113 |
114 | const result: VoiceMappings = {};
115 | for (let voice of localEnglishVoices) {
116 | const label = englishTypes.get(voice.lang);
117 | if (typeof label !== 'string') {
118 | continue;
119 | }
120 | if (!result[label]) {
121 | result[label] = [];
122 | }
123 | result[label].push(voice);
124 | }
125 | return result;
126 | }, [voices]);
127 |
128 | const selectedVoice = useMemo(() => {
129 | return voices.find((voice) => voice.voiceURI === settings.voiceURI);
130 | }, [voices, settings.voiceURI]);
131 |
132 | const recognizeSpeech = () => {
133 | if (state === State.IDLE) {
134 | Voice.enableAutoplay();
135 | Voice.startListening();
136 | } else if (state === State.LISTENING) {
137 | Voice.stopListening();
138 | }
139 | };
140 |
141 | const speak = useCallback(
142 | (text: string) => {
143 | Voice.speak(text, { voice: selectedVoice, rate: settings.voiceSpeed });
144 | },
145 | [selectedVoice, settings.voiceSpeed],
146 | );
147 |
148 | const resetConversation = () => {
149 | setState(State.IDLE);
150 | setMessages(initialMessages);
151 | conversationRef.current = { currentMessageId: '' };
152 |
153 | Voice.idle();
154 | abortRef.current?.abort();
155 | };
156 |
157 | const handleModalOpenChange = (isOpen: boolean) => {
158 | setIsModalVisible(isOpen);
159 | Storage.save(settings);
160 | };
161 |
162 | const resetSetting = (setting: keyof typeof settings) => {
163 | setSettings({
164 | ...settings,
165 | [setting]: defaultSettingsRef.current[setting],
166 | });
167 | };
168 |
169 | useEffect(() => {
170 | setState((oldState) => {
171 | if (listening) {
172 | return State.LISTENING;
173 | }
174 | if (
175 | (oldState === State.LISTENING && transcript) || // At this point finalTranscript may not have a value yet
176 | oldState === State.PROCESSING // Avoid setting state to IDLE when transcript is set to '' while processing
177 | ) {
178 | return State.PROCESSING;
179 | }
180 | return State.IDLE;
181 | });
182 | }, [listening, transcript, finalTranscript]);
183 |
184 | // Scroll to bottom when user is speaking a prompt
185 | useEffect(() => {
186 | if (state === State.LISTENING) {
187 | bottomDivRef.current?.scrollIntoView({ behavior: 'smooth' });
188 | }
189 | }, [state]);
190 |
191 | // Scroll to bottom when there is a new response
192 | useEffect(() => {
193 | bottomDivRef.current?.scrollIntoView({ behavior: 'smooth' });
194 | }, [messages.length]);
195 |
196 | useEffect(() => {
197 | if (!defaultVoice) {
198 | return;
199 | }
200 |
201 | defaultSettingsRef.current.voiceURI = defaultVoice.voiceURI;
202 | setSettings((oldSettings) => {
203 | // If a preferred voice is already set, keep it
204 | if (oldSettings.voiceURI) {
205 | return oldSettings;
206 | }
207 | return {
208 | ...oldSettings,
209 | voiceURI: defaultVoice.voiceURI,
210 | };
211 | });
212 | }, [defaultVoice]);
213 |
214 | useEffect(() => {
215 | if (state !== State.PROCESSING || !finalTranscript) {
216 | return;
217 | }
218 |
219 | setMessages((oldMessages) => [
220 | ...oldMessages,
221 | { type: 'prompt', text: finalTranscript },
222 | ]);
223 |
224 | const host = Config.IS_LOCAL_SETUP_REQUIRED
225 | ? `${settings.host}:${settings.port}`
226 | : Config.API_HOST;
227 | const { response, abortController } = API.sendMessage(host, {
228 | text: finalTranscript,
229 | parentMessageId: conversationRef.current.currentMessageId || undefined,
230 | });
231 | abortRef.current = abortController;
232 |
233 | response
234 | .then((res) => res.json())
235 | .then((res: CreateChatGPTMessageResponse) => {
236 | conversationRef.current.currentMessageId = res.messageId;
237 | setMessages((oldMessages) => [
238 | ...oldMessages,
239 | { type: 'response', text: res.answer },
240 | ]);
241 | speak(res.answer);
242 | })
243 | .catch((err: unknown) => {
244 | console.warn(err);
245 | let response: string;
246 |
247 | // Ignore aborted request
248 | if (abortController.signal.aborted) {
249 | return;
250 | }
251 |
252 | // Connection refused
253 | if (err instanceof TypeError && Config.IS_LOCAL_SETUP_REQUIRED) {
254 | response =
255 | 'Local server needs to be set up first. Click on the Settings button to see how.';
256 | setIsTooltipVisible(true);
257 | } else {
258 | response = 'Failed to get the response, please try again.';
259 | }
260 | setMessages((oldMessages) => [
261 | ...oldMessages,
262 | { type: 'response', text: response },
263 | ]);
264 | speak(response);
265 | })
266 | .finally(() => {
267 | setState(State.IDLE);
268 | });
269 | }, [state, finalTranscript, settings, speak]);
270 |
271 | if (!browserSupportsSpeechRecognition) {
272 | return (
273 |
274 | This browser doesn't support speech recognition. Please use Chrome.
275 |
276 | );
277 | }
278 |
279 | return (
280 |
281 |
282 | {/* w-64 so text will break after ChatGPT */}
283 |
284 | ChatGPT With Voice
285 |
286 |
287 |
292 |
293 |
294 |
295 | {messages.map(({ type, text }, index) => {
296 | const getIsActive = () => {
297 | switch (state) {
298 | case State.IDLE: {
299 | if (type === 'prompt') {
300 | return index === messages.length - 2;
301 | } else if (type === 'response') {
302 | return index === messages.length - 1;
303 | }
304 | return false;
305 | }
306 |
307 | case State.LISTENING:
308 | return false;
309 |
310 | case State.PROCESSING:
311 | return type === 'prompt' && index === messages.length - 1;
312 |
313 | default:
314 | return false;
315 | }
316 | };
317 | return (
318 |
325 | );
326 | })}
327 | {state === State.LISTENING && (
328 |
329 | )}
330 |
331 |
332 |
333 |
334 |
335 | {!isMicrophoneAvailable && (
336 |
337 |
340 |
341 | Please allow microphone permission for this app to work
342 | properly.
343 |
344 |
345 | )}
346 |
347 |
348 |
349 |
350 | {/**
351 | * We want a tooltip that positions itself against the Settings button.
352 | * However, we don't want the tooltip to display each time we hover on it.
353 | * So, an invisible div that is right on top of the Settings button is
354 | * used here as the tooltip's target.
355 | */}
356 |
357 |
361 |
362 |
363 |
364 |
365 |
371 | {isMobile
372 | ? 'Run a local server on Desktop to see this works.'
373 | : 'Set up local server first.'}
374 |
375 |
376 |
377 |
378 |
379 |
380 |
setIsModalVisible(true)}
383 | >
384 |
385 |
386 |
387 |
388 |
411 | {state === State.IDLE ? (
412 |
413 | ) : state === State.LISTENING ? (
414 |
417 | ) : state === State.PROCESSING ? (
418 |
419 |
420 |
421 | ) : null}
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 | {/* Settings modal */}
431 |
432 |
433 |
434 |
439 |
440 | Settings
441 |
442 |
443 | {Config.IS_LOCAL_SETUP_REQUIRED && (
444 |
445 | Set up local server on Desktop in 3 easy steps.
446 |
447 | )}
448 |
449 |
450 | {Config.IS_LOCAL_SETUP_REQUIRED && (
451 |
452 |
Step 1
453 |
454 | Clone chatgpt-server
repo.
455 |
456 |
457 | git clone https://github.com/sonngdev/chatgpt-server.git
458 |
459 |
460 |
Step 2
461 |
462 | Create .env
file in the project's root. You
463 | need an{' '}
464 |
465 | OpenAI account
466 |
467 | .
468 |
469 |
470 | {[
471 | 'PORT=8000 # Or whichever port available',
472 | 'OPENAI_EMAIL=""',
473 | 'OPENAI_PASSWORD=""',
474 | ].join('\n')}
475 |
476 |
477 |
Step 3
478 |
479 | Start the server - done! Make sure you are using Node 18 or
480 | higher.
481 |
482 |
483 | {['npm install', 'npm run build', 'npm run start'].join(
484 | '\n',
485 | )}
486 |
487 |
488 | )}
489 |
490 |
491 | {Config.IS_LOCAL_SETUP_REQUIRED && isDesktop && (
492 |
493 |
Server
494 |
495 |
496 | Host
497 |
498 | {
502 | setSettings({ ...settings, host: e.target.value });
503 | }}
504 | className="border border-dark border-r-0 rounded-l-md bg-transparent p-2 flex-1"
505 | />
506 | resetSetting('host')}
510 | >
511 | Reset
512 |
513 |
514 |
515 |
516 | Port
517 |
518 | {
523 | setSettings({
524 | ...settings,
525 | port: Number(e.target.value),
526 | });
527 | }}
528 | className="border border-dark border-r-0 rounded-l-md bg-transparent p-2 flex-1"
529 | />
530 | resetSetting('port')}
534 | >
535 | Reset
536 |
537 |
538 |
539 |
540 |
541 |
542 | This app will find the server at{' '}
543 | {`${settings.host}:${settings.port}`}
544 |
545 |
546 | )}
547 |
548 |
549 |
Voice
550 |
551 |
552 | Name
553 |
554 | {
557 | setSettings({
558 | ...settings,
559 | voiceURI: value,
560 | });
561 | }}
562 | >
563 |
568 |
569 |
570 |
571 |
572 |
573 |
574 |
575 |
576 |
577 |
578 |
579 | {Object.entries(availableVoices).map(
580 | ([group, voicesInGroup], index) => (
581 |
582 | {index > 0 && (
583 |
584 | )}
585 |
586 |
587 |
588 | {group}
589 |
590 | {voicesInGroup.map((voice) => (
591 |
596 |
597 | {voice.name}
598 |
599 |
600 |
601 |
602 |
603 | ))}
604 |
605 |
606 | ),
607 | )}
608 |
609 |
610 |
611 |
612 |
613 |
614 |
615 | resetSetting('voiceURI')}
619 | >
620 | Reset
621 |
622 |
623 |
624 |
625 |
626 | Speed
627 |
628 |
{
633 | setSettings({ ...settings, voiceSpeed: newSpeed });
634 | }}
635 | max={2}
636 | min={0.5}
637 | step={0.1}
638 | aria-label="Voice speed"
639 | >
640 |
641 |
642 |
643 |
644 |
645 |
646 | {`${settings.voiceSpeed.toFixed(2)}x`}
647 |
648 |
resetSetting('voiceSpeed')}
651 | >
652 | Reset
653 |
654 |
655 |
656 |
657 |
speak('It was a dark and stormy night')}
661 | >
662 |
663 | Try speaking
664 |
665 |
666 |
667 |
668 |
669 |
670 |
675 |
676 |
677 |
678 |
679 |
680 |
681 |
682 | );
683 | }
684 |
685 | export default App;
686 |
--------------------------------------------------------------------------------
/src/design_system/Button.tsx:
--------------------------------------------------------------------------------
1 | import { ButtonHTMLAttributes, DetailedHTMLProps, forwardRef } from 'react';
2 |
3 | interface ButtonProps
4 | extends DetailedHTMLProps<
5 | ButtonHTMLAttributes,
6 | HTMLButtonElement
7 | > {
8 | size?: 'large' | 'normal' | 'small';
9 | variant?: 'outline' | 'solid';
10 | iconOnly?: boolean;
11 | }
12 |
13 | const Button = forwardRef((props, ref) => {
14 | const {
15 | size = 'normal',
16 | variant = 'outline',
17 | iconOnly = true,
18 | className = '',
19 | ...rest
20 | } = props;
21 |
22 | const getClassNameFromSize = () => {
23 | if (size === 'normal') {
24 | if (iconOnly) {
25 | return 'w-11 h-11';
26 | }
27 | return 'px-3 py-2 rounded-md';
28 | }
29 | if (size === 'small') {
30 | if (iconOnly) {
31 | return 'w-6 h-6';
32 | }
33 | return 'px-2 py-1 text-xs rounded-sm';
34 | }
35 | return '';
36 | };
37 |
38 | const getClassNameFromVariant = () => {
39 | if (variant === 'outline') {
40 | return 'border border-dark bg-transparent hover:opacity-60 focus:opacity-60';
41 | }
42 | return '';
43 | };
44 |
45 | const getClassNameFromIconOnly = () => {
46 | if (iconOnly) {
47 | return 'rounded-full';
48 | }
49 | return '';
50 | };
51 |
52 | const cn = [
53 | getClassNameFromSize(),
54 | getClassNameFromVariant(),
55 | getClassNameFromIconOnly(),
56 | 'flex justify-center items-center transition-opacity',
57 | className,
58 | ].join(' ');
59 |
60 | return ;
61 | });
62 |
63 | export default Button;
64 |
--------------------------------------------------------------------------------
/src/design_system/Message.tsx:
--------------------------------------------------------------------------------
1 | import { KeyboardEventHandler } from 'react';
2 | import { DollarSign, Terminal } from 'react-feather';
3 |
4 | interface MessageProps {
5 | type: 'prompt' | 'response';
6 | text: string;
7 | isActive: boolean;
8 | onClick?(text: string): void;
9 | }
10 |
11 | export default function Message({
12 | type,
13 | text,
14 | isActive,
15 | onClick,
16 | }: MessageProps) {
17 | const handleClick = () => {
18 | if (onClick) {
19 | onClick(text);
20 | }
21 | };
22 |
23 | const handleKeyDown: KeyboardEventHandler = (e) => {
24 | if (e.key === 'Enter' && onClick) {
25 | onClick(text);
26 | }
27 | };
28 |
29 | return (
30 |
39 |
40 | {type === 'prompt' ? (
41 |
42 |
43 |
44 | ) : (
45 |
46 |
47 |
48 | )}
49 |
50 |
{text}
51 |
52 | );
53 | }
54 |
--------------------------------------------------------------------------------
/src/design_system/SyntaxHighlighter.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Prism as SH,
3 | SyntaxHighlighterProps as SHProps,
4 | } from 'react-syntax-highlighter';
5 |
6 | /**
7 | * Based on Atom Dark stylesheet
8 | * @see https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/699056430175e7a09668f85b8ccad9f18b186066/src/styles/prism/atom-dark.js
9 | */
10 | const defaultStyle = {
11 | 'code[class*="language-"]': {
12 | color: '#191308', // Dark
13 | fontFamily: '"DM Mono", monospace',
14 | direction: 'ltr',
15 | textAlign: 'left',
16 | whiteSpace: 'pre',
17 | wordSpacing: 'normal',
18 | wordBreak: 'normal',
19 | lineHeight: '1.5',
20 | MozTabSize: '4',
21 | OTabSize: '4',
22 | tabSize: '4',
23 | WebkitHyphens: 'none',
24 | MozHyphens: 'none',
25 | msHyphens: 'none',
26 | hyphens: 'none',
27 | },
28 | 'pre[class*="language-"]': {
29 | color: '#191308', // Dark
30 | fontFamily: '"DM Mono", monospace',
31 | direction: 'ltr',
32 | textAlign: 'left',
33 | whiteSpace: 'pre',
34 | wordSpacing: 'normal',
35 | wordBreak: 'normal',
36 | lineHeight: '1.5',
37 | MozTabSize: '4',
38 | OTabSize: '4',
39 | tabSize: '4',
40 | WebkitHyphens: 'none',
41 | MozHyphens: 'none',
42 | msHyphens: 'none',
43 | hyphens: 'none',
44 | padding: '0.5em',
45 | margin: '.5em 0',
46 | overflow: 'auto',
47 | borderRadius: '0.375em',
48 | background: 'transparent',
49 | border: 'solid 1px #191308', // Dark
50 | },
51 | ':not(pre) > code[class*="language-"]': {
52 | background: '#191308', // Dark
53 | padding: '.1em',
54 | borderRadius: '.3em',
55 | },
56 | comment: {
57 | color: '#7C7C7C',
58 | },
59 | prolog: {
60 | color: '#7C7C7C',
61 | },
62 | doctype: {
63 | color: '#7C7C7C',
64 | },
65 | cdata: {
66 | color: '#7C7C7C',
67 | },
68 | punctuation: {
69 | color: '#191308', // Dark
70 | },
71 | '.namespace': {
72 | Opacity: '.7',
73 | },
74 | property: {
75 | color: '#96CBFE',
76 | },
77 | keyword: {
78 | color: '#96CBFE',
79 | },
80 | tag: {
81 | color: '#96CBFE',
82 | },
83 | 'class-name': {
84 | color: '#FFFFB6',
85 | textDecoration: 'underline',
86 | },
87 | boolean: {
88 | color: '#99CC99',
89 | },
90 | constant: {
91 | color: '#99CC99',
92 | },
93 | symbol: {
94 | color: '#f92672',
95 | },
96 | deleted: {
97 | color: '#f92672',
98 | },
99 | number: {
100 | color: '#C64191', // Accent 2
101 | },
102 | selector: {
103 | color: '#028090', // Accent 1
104 | },
105 | 'attr-name': {
106 | color: '#028090', // Accent 1
107 | },
108 | string: {
109 | color: '#028090', // Accent 1
110 | },
111 | char: {
112 | color: '#028090', // Accent 1
113 | },
114 | builtin: {
115 | color: '#028090', // Accent 1
116 | },
117 | inserted: {
118 | color: '#028090', // Accent 1
119 | },
120 | variable: {
121 | color: '#191308', // Dark
122 | },
123 | operator: {
124 | color: '#191308', // Dark
125 | },
126 | entity: {
127 | color: '#FFFFB6',
128 | cursor: 'help',
129 | },
130 | url: {
131 | color: '#96CBFE',
132 | },
133 | '.language-css .token.string': {
134 | color: '#87C38A',
135 | },
136 | '.style .token.string': {
137 | color: '#87C38A',
138 | },
139 | atrule: {
140 | color: '#F9EE98',
141 | },
142 | 'attr-value': {
143 | color: '#F9EE98',
144 | },
145 | function: {
146 | color: '#191308', // Dark
147 | },
148 | regex: {
149 | color: '#E9C062',
150 | },
151 | important: {
152 | color: '#fd971f',
153 | fontWeight: 'bold',
154 | },
155 | bold: {
156 | fontWeight: 'bold',
157 | },
158 | italic: {
159 | fontStyle: 'italic',
160 | },
161 | };
162 |
163 | export default function SyntaxHighlighter(props: SHProps) {
164 | const { style = defaultStyle, ...rest } = props;
165 | // @ts-ignore
166 | return ;
167 | }
168 |
--------------------------------------------------------------------------------
/src/hooks/useVoices.ts:
--------------------------------------------------------------------------------
1 | import { useEffect, useState } from 'react';
2 | import { isMobile, isSafari } from 'react-device-detect';
3 |
4 | export default function useVoices() {
5 | const [voices, setVoices] = useState([]);
6 |
7 | const defaultVoice = voices.find(
8 | (voice) => voice.default && voice.lang.startsWith('en-'),
9 | );
10 |
11 | // Display voices when they become available
12 | useEffect(() => {
13 | const updateVoiceSettings = () => {
14 | const newVoices = window.speechSynthesis.getVoices();
15 | setVoices(newVoices);
16 | };
17 |
18 | // Safari doesn't support `voiceschanged` event, so we have to
19 | // periodically check if voices are loaded.
20 | // So is any mobile browser on iOS.
21 | if (isSafari || isMobile) {
22 | let interval = setInterval(() => {
23 | const newVoices = window.speechSynthesis.getVoices();
24 | if (newVoices.length > 0) {
25 | clearInterval(interval);
26 | updateVoiceSettings();
27 | }
28 | }, 100);
29 | // Stop checking after 10 seconds
30 | setTimeout(() => clearInterval(interval), 10_000);
31 |
32 | return () => clearInterval(interval);
33 | }
34 |
35 | window.speechSynthesis.addEventListener(
36 | 'voiceschanged',
37 | updateVoiceSettings,
38 | );
39 |
40 | return () => {
41 | window.speechSynthesis.removeEventListener(
42 | 'voiceschanged',
43 | updateVoiceSettings,
44 | );
45 | };
46 | }, []);
47 |
48 | return { voices, defaultVoice };
49 | }
50 |
--------------------------------------------------------------------------------
/src/index.css:
--------------------------------------------------------------------------------
1 | @import url('https://fonts.googleapis.com/css2?family=DM+Mono:wght@400;500&family=Major+Mono+Display&display=swap');
2 |
3 | @tailwind base;
4 | @tailwind components;
5 | @tailwind utilities;
6 |
7 | @layer base {
8 | :root {
9 | @apply font-mono;
10 | @apply bg-light;
11 | @apply text-dark;
12 | }
13 |
14 | body {
15 | @apply text-sm;
16 | }
17 |
18 | a {
19 | @apply underline;
20 | @apply underline-offset-2;
21 | @apply cursor-pointer;
22 | }
23 |
24 | code {
25 | @apply text-accent2;
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/lib/api.ts:
--------------------------------------------------------------------------------
1 | interface SendMessagePayload {
2 | text: string;
3 | parentMessageId?: string;
4 | }
5 |
6 | class APIClient {
7 | sendMessage(host: string, payload: SendMessagePayload) {
8 | const abortController = new AbortController();
9 |
10 | const response = fetch(`${host}/chatgpt/messages`, {
11 | method: 'POST',
12 | headers: {
13 | 'Content-Type': 'application/json',
14 | },
15 | body: JSON.stringify(payload),
16 | signal: abortController.signal,
17 | });
18 |
19 | return { response, abortController };
20 | }
21 | }
22 |
23 | const API = new APIClient();
24 |
25 | export default API;
26 |
--------------------------------------------------------------------------------
/src/lib/config.ts:
--------------------------------------------------------------------------------
1 | if (import.meta.env.VITE_IS_LOCAL_SETUP_REQUIRED === undefined) {
2 | throw new Error('Env variable VITE_IS_LOCAL_SETUP_REQUIRED is required.');
3 | }
4 | if (import.meta.env.VITE_API_HOST === undefined) {
5 | throw new Error('Env variable VITE_API_HOST is required.');
6 | }
7 |
8 | const Config = {
9 | IS_LOCAL_SETUP_REQUIRED: Boolean(
10 | Number(import.meta.env.VITE_IS_LOCAL_SETUP_REQUIRED),
11 | ),
12 | API_HOST: import.meta.env.VITE_API_HOST,
13 | };
14 |
15 | export default Config;
16 |
--------------------------------------------------------------------------------
/src/lib/storage.ts:
--------------------------------------------------------------------------------
1 | const LOCAL_STORAGE_KEY = 'CHATGPT_WITH_VOICE';
2 |
3 | interface PersistableData {
4 | [key: string]: any;
5 | }
6 |
7 | class StorageManager {
8 | save(data: PersistableData) {
9 | const serialized = JSON.stringify(data);
10 | localStorage.setItem(LOCAL_STORAGE_KEY, serialized);
11 | }
12 |
13 | load(): PersistableData | null {
14 | const serialized = localStorage.getItem(LOCAL_STORAGE_KEY);
15 | if (!serialized) {
16 | return null;
17 | }
18 | try {
19 | return JSON.parse(serialized);
20 | } catch {
21 | return null;
22 | }
23 | }
24 | }
25 |
26 | const Storage = new StorageManager();
27 |
28 | export default Storage;
29 |
--------------------------------------------------------------------------------
/src/lib/voice.ts:
--------------------------------------------------------------------------------
1 | import SpeechRecognition from 'react-speech-recognition';
2 |
3 | interface SpeakOptions {
4 | voice?: SpeechSynthesisVoice;
5 | rate?: number;
6 | }
7 |
8 | class VoiceManager {
9 | private isAutoplayEnabled = false;
10 |
11 | enableAutoplay() {
12 | if (!this.isAutoplayEnabled) {
13 | this.speak('');
14 | this.isAutoplayEnabled = true;
15 | }
16 | }
17 |
18 | startListening() {
19 | window.speechSynthesis.cancel();
20 | SpeechRecognition.startListening();
21 | }
22 |
23 | stopListening() {
24 | SpeechRecognition.stopListening();
25 | }
26 |
27 | speak(text: string, options: SpeakOptions | undefined = undefined) {
28 | window.speechSynthesis.cancel();
29 | const utterance = new SpeechSynthesisUtterance(text);
30 | if (options?.voice) {
31 | utterance.voice = options.voice;
32 | }
33 | if (options?.rate) {
34 | utterance.rate = options.rate;
35 | }
36 | window.speechSynthesis.speak(utterance);
37 | }
38 |
39 | idle() {
40 | window.speechSynthesis.cancel();
41 | SpeechRecognition.abortListening();
42 | }
43 | }
44 |
45 | const Voice = new VoiceManager();
46 |
47 | export default Voice;
48 |
--------------------------------------------------------------------------------
/src/main.tsx:
--------------------------------------------------------------------------------
1 | // Keep this import at the top of this file
2 | // for 'react-speech-recognition' to work.
3 | import 'regenerator-runtime/runtime';
4 |
5 | import React from 'react'
6 | import ReactDOM from 'react-dom/client'
7 | import App from './App'
8 | import './index.css'
9 |
10 | ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
11 |
12 |
13 | ,
14 | )
15 |
--------------------------------------------------------------------------------
/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | interface ImportMetaEnv {
4 | readonly VITE_IS_LOCAL_SETUP_REQUIRED: string;
5 | readonly VITE_API_HOST: string;
6 | }
7 |
8 | interface ImportMeta {
9 | readonly env: ImportMetaEnv;
10 | }
11 |
--------------------------------------------------------------------------------
/tailwind.config.cjs:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | const colors = require('tailwindcss/colors');
3 |
4 | module.exports = {
5 | content: ['./index.html', './src/**/*.{js,ts,jsx,tsx}'],
6 | theme: {
7 | extend: {
8 | fontFamily: {
9 | mono: ['DM Mono', 'monospace'],
10 | title: ['Major Mono Display', 'monospace'],
11 | },
12 | colors: {
13 | light: '#FFFCE8',
14 | dark: '#191308',
15 | accent1: '#028090',
16 | accent2: '#C64191',
17 | danger: colors.red[700],
18 | },
19 | boxShadow: {
20 | solid: '3px 3px 0 #191308',
21 | },
22 | animation: {
23 | blink: 'blink 2s infinite',
24 | 'spin-2': 'spin 2s linear infinite',
25 | 'fade-in': 'fade-in 300ms cubic-bezier(0.16, 1, 0.3, 1)',
26 | 'rise-up': 'rise-up 300ms cubic-bezier(0.16, 1, 0.3, 1)',
27 | },
28 | keyframes: {
29 | blink: {
30 | '0%, 100%': { opacity: 0 },
31 | '50%': { opacity: 1 },
32 | },
33 | 'fade-in': {
34 | '0%': { opacity: 0 },
35 | '100%': { opacity: 100 },
36 | },
37 | 'rise-up': {
38 | '0%': { opacity: 0, transform: 'translate(-50%, -48%) scale(0.96)' },
39 | '100%': { opacity: 1, transform: 'translate(-50%, -50%) scale(1)' },
40 | },
41 | },
42 | },
43 | },
44 | plugins: [],
45 | };
46 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "useDefineForClassFields": true,
5 | "lib": ["DOM", "DOM.Iterable", "ESNext"],
6 | "allowJs": false,
7 | "skipLibCheck": true,
8 | "esModuleInterop": false,
9 | "allowSyntheticDefaultImports": true,
10 | "strict": true,
11 | "forceConsistentCasingInFileNames": true,
12 | "module": "ESNext",
13 | "moduleResolution": "Node",
14 | "resolveJsonModule": true,
15 | "isolatedModules": true,
16 | "noEmit": true,
17 | "jsx": "react-jsx"
18 | },
19 | "include": ["src"],
20 | "references": [{ "path": "./tsconfig.node.json" }]
21 | }
22 |
--------------------------------------------------------------------------------
/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "module": "ESNext",
5 | "moduleResolution": "Node",
6 | "allowSyntheticDefaultImports": true
7 | },
8 | "include": ["vite.config.ts"]
9 | }
10 |
--------------------------------------------------------------------------------
/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 |
4 | // https://vitejs.dev/config/
5 | export default defineConfig({
6 | plugins: [react()],
7 | })
8 |
--------------------------------------------------------------------------------