├── public
├── icon16.png
├── icon32.png
├── icon64.png
├── icon128.png
├── background.js
├── index.html
├── content.css
├── manifest.json
├── README.md
└── content.js
├── src
├── fonts
│ └── UbuntuMono-Regular.ttf
├── index.css
├── index.js
├── App.css
├── themeOptions.js
└── App.js
├── .prettierrc
├── .gitignore
├── package.json
└── README.md
/public/icon16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ordinath/Whisper_to_ChatGPT/HEAD/public/icon16.png
--------------------------------------------------------------------------------
/public/icon32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ordinath/Whisper_to_ChatGPT/HEAD/public/icon32.png
--------------------------------------------------------------------------------
/public/icon64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ordinath/Whisper_to_ChatGPT/HEAD/public/icon64.png
--------------------------------------------------------------------------------
/public/icon128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ordinath/Whisper_to_ChatGPT/HEAD/public/icon128.png
--------------------------------------------------------------------------------
/src/fonts/UbuntuMono-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ordinath/Whisper_to_ChatGPT/HEAD/src/fonts/UbuntuMono-Regular.ttf
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "trailingComma": "es5",
3 | "tabWidth": 4,
4 | "semi": true,
5 | "singleQuote": true,
6 | "printWidth": 160,
7 | "endOfLine": "lf"
8 | }
--------------------------------------------------------------------------------
/src/index.css:
--------------------------------------------------------------------------------
1 | @font-face {
2 | font-family: 'Ubuntu Mono';
3 | src: local('UbuntuMono-Regular'), url(./fonts/UbuntuMono-Regular.ttf) format('truetype');
4 | }
5 |
6 | body {
7 | background-color: rgb(51, 49, 53);
8 | }
9 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom/client';
3 | import './index.css';
4 | import App from './App';
5 |
6 | const root = ReactDOM.createRoot(document.getElementById('root'));
7 | root.render(
8 |
9 |
10 |
11 | );
12 |
--------------------------------------------------------------------------------
/public/background.js:
--------------------------------------------------------------------------------
1 | // eslint-disable-next-line no-undef
2 | chrome.runtime.onInstalled.addListener(() => {
3 | console.log('Extension installed!');
4 | });
5 |
6 | // eslint-disable-next-line no-undef
7 | chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
8 | console.log(message);
9 | });
10 |
--------------------------------------------------------------------------------
/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
45 |
46 |
47 |
48 |
49 |
54 |
`;
55 | }
56 |
57 | return `
58 |
59 |
60 |
61 | Enjoying Whisper To ChatGPT?
62 |
63 |
64 | Try our
Desktop App and dictate anywhere!
65 |
66 |
67 |
72 |
`;
73 | };
74 |
75 | function logError(message, error) {
76 | console.error(`[Whisper to ChatGPT] ${message}`, error);
77 | }
78 |
79 | async function retrieveFromStorage(key) {
80 | return new Promise((resolve) => {
81 | chrome.storage.sync.get(key, function (result) {
82 | resolve(result[key]);
83 | });
84 | });
85 | }
86 |
87 | class AudioRecorder {
88 | constructor() {
89 | this.recording = false;
90 | this.mediaRecorder = null;
91 | this.textarea = null;
92 | this.micButton = null;
93 | this.token = null;
94 | this.snippetButtons = [];
95 | this.popupContainer = null;
96 | this.activePopup = null;
97 | }
98 |
99 | async listenForKeyboardShortcut() {
100 | if (await this.shortcutEnabled()) {
101 | const shortcutFirstKey = await retrieveFromStorage('config_shortcut_first_key');
102 | const shortcutFirstModifier = await retrieveFromStorage('config_shortcut_first_modifier');
103 | const shortcutSecondModifier = await retrieveFromStorage('config_shortcut_second_modifier');
104 | document.addEventListener('keydown', (event) => {
105 | if (event.code === `Key${shortcutFirstKey.toUpperCase()}`) {
106 | // console.log(event);
107 | if (shortcutFirstModifier && shortcutFirstModifier !== 'none' && !event[shortcutFirstModifier]) return;
108 | if (shortcutSecondModifier && shortcutSecondModifier !== 'none' && !event[shortcutSecondModifier]) return;
109 |
110 | event.preventDefault();
111 |
112 | // Find our microphone button based on the new UI structure
113 | const micButton = document.querySelector('.microphone_button');
114 | if (micButton) {
115 | micButton.click();
116 | } else {
117 | // If our button doesn't exist yet, try to find the input and add it
118 | const promptTextarea = document.querySelector('div[contenteditable="true"][id="prompt-textarea"]');
119 | if (promptTextarea) {
120 | addMicrophoneButton(promptTextarea, 'main');
121 | // Give a small delay to ensure the button is added
122 | setTimeout(() => {
123 | const newMicButton = document.querySelector('.microphone_button');
124 | if (newMicButton) {
125 | newMicButton.click();
126 | }
127 | }, 100);
128 | }
129 | }
130 | }
131 | });
132 | }
133 | }
134 |
135 | createMicButton(inputType, version) {
136 | this.micButton = document.createElement('button');
137 | if (inputType === 'main') {
138 | this.micButton.className = `microphone_button ${version === 'PRO' ? PRO_MAIN_MICROPHONE_BUTTON_CLASSES : NON_PRO_MAIN_MICROPHONE_BUTTON_CLASSES}`;
139 | } else {
140 | this.micButton.className = `microphone_button ${SECONDARY_MICROPHONE_BUTTON_CLASSES}`;
141 | }
142 | this.micButton.innerHTML = SVG_MIC_HTML;
143 | this.micButton.addEventListener('click', (e) => {
144 | e.preventDefault();
145 | this.toggleRecording();
146 | });
147 | }
148 |
149 | updateButtonGridPosition() {
150 | const textareaRows = this.textarea.clientHeight / 24;
151 |
152 | if (this.snippetButtons) {
153 | this.snippetButtons.forEach((buttonObj, index) => {
154 | buttonObj.y = buttonObj.initialY - (textareaRows - 1) * 1.5;
155 | buttonObj.button.style.transform = `translate(${buttonObj.x}rem, ${buttonObj.y}rem)`;
156 | });
157 | }
158 | }
159 |
160 | observeTextareaResize() {
161 | this.resizeObserver = new ResizeObserver(() => {
162 | this.updateButtonGridPosition();
163 | });
164 | this.resizeObserver.observe(this.textarea);
165 | }
166 |
167 | async downloadEnabled() {
168 | return await retrieveFromStorage('config_enable_download');
169 | }
170 |
171 | async translationEnabled() {
172 | return await retrieveFromStorage('config_enable_translation');
173 | }
174 |
175 | // async snippetsEnabled() {
176 | // return await retrieveFromStorage('config_enable_snippets');
177 | // }
178 |
179 | async shortcutEnabled() {
180 | const shortcutEnabled = await retrieveFromStorage('config_enable_shortcut');
181 | // initialize the shortcut keys if they are not set (first time user)
182 | const shortcutFirstKey = await retrieveFromStorage('config_shortcut_first_key');
183 | const shortcutFirstModifier = await retrieveFromStorage('config_shortcut_first_modifier');
184 | const shortcutSecondModifier = await retrieveFromStorage('config_shortcut_second_modifier');
185 | if (!shortcutFirstKey && !shortcutFirstModifier && !shortcutSecondModifier) {
186 | const platform = navigator.userAgentData.platform.toLowerCase();
187 | if (platform.indexOf('mac') > -1) {
188 | await chrome.storage?.sync.set(
189 | {
190 | config_shortcut_first_modifier: 'ctrlKey',
191 | config_shortcut_first_key: 'r',
192 | },
193 | () => {}
194 | );
195 | } else if (platform.indexOf('win') > -1) {
196 | await chrome.storage?.sync.set(
197 | {
198 | config_shortcut_first_modifier: 'shiftKey',
199 | config_shortcut_second_modifier: 'altKey',
200 | config_shortcut_first_key: 'r',
201 | },
202 | () => {}
203 | );
204 | }
205 | }
206 | return shortcutEnabled;
207 | }
208 |
209 | async retrieveToken() {
210 | return await retrieveFromStorage('openai_token');
211 | }
212 |
213 | async getSelectedPrompt() {
214 | const selectedPrompt = await retrieveFromStorage('openai_selected_prompt');
215 | const prompts = await retrieveFromStorage('openai_prompts');
216 | // if (!prompts) we initialize the prompts (first time user)
217 | if (!prompts || !selectedPrompt) {
218 | // backwards compatibility with 1.0 version
219 | const previousVersionPrompt = await retrieveFromStorage('openai_prompt');
220 |
221 | const initialPrompt = {
222 | title: 'Initial prompt',
223 | content: previousVersionPrompt
224 | ? previousVersionPrompt
225 | : `The transcript is about OpenAI which makes technology like DALL·E, GPT-3, and ChatGPT with the hope of one day building an AGI system that benefits all of humanity.`,
226 | };
227 | await chrome.storage?.sync.set(
228 | {
229 | openai_prompts: [initialPrompt],
230 | openai_selected_prompt: 0,
231 | },
232 | () => {}
233 | );
234 | return initialPrompt;
235 | } else {
236 | return prompts[selectedPrompt];
237 | }
238 | }
239 |
240 | async incrementUsageCount() {
241 | // console.log('incrementUsageCount');
242 | const currentCount = (await retrieveFromStorage(USAGE_COUNT_KEY)) || 0;
243 | const newCount = currentCount + 1;
244 | await chrome.storage.sync.set({ [USAGE_COUNT_KEY]: newCount });
245 |
246 | const dismissed = await retrieveFromStorage(POPUP_DISMISSED_KEY);
247 | const lastShown = (await retrieveFromStorage(POPUP_LAST_SHOWN_KEY)) || 0;
248 |
249 | if (!dismissed) {
250 | // Show popup for first time users at threshold
251 | if (newCount >= POPUP_THRESHOLD && lastShown === 0) {
252 | this.showPopup(true);
253 | await chrome.storage.sync.set({ [POPUP_LAST_SHOWN_KEY]: newCount });
254 | }
255 | // After threshold, show popup every POPUP_FREQUENCY uses
256 | else if (newCount >= POPUP_THRESHOLD && newCount - lastShown >= POPUP_FREQUENCY) {
257 | this.showPopup(false);
258 | await chrome.storage.sync.set({ [POPUP_LAST_SHOWN_KEY]: newCount });
259 | }
260 | }
261 | }
262 |
263 | async showPopup(firstTime = false) {
264 | // If there's already an active popup, don't show another one
265 | if (this.activePopup && document.contains(this.activePopup)) {
266 | return;
267 | }
268 |
269 | // Get the close count
270 | const closeCount = (await retrieveFromStorage(POPUP_CLOSE_COUNT_KEY)) || 0;
271 |
272 | const popupElement = document.createElement('div');
273 | popupElement.className = 'whisper-popup';
274 |
275 | // If we've closed it 3 times and this is after closing the promo message
276 | const showDontShowOption = closeCount > 0 && closeCount % POPUP_MIN_CLOSES_FOR_DONT_SHOW === 0;
277 | popupElement.innerHTML = getPopupHtml(showDontShowOption);
278 | const popup = popupElement.firstElementChild;
279 |
280 | if (this.popupContainer) {
281 | this.popupContainer.appendChild(popup);
282 | this.activePopup = popup; // Store reference to the active popup
283 | }
284 |
285 | // Handle popup close and checkbox
286 | const closeButton = popup.querySelector('.whisper-popup-close');
287 | if (closeButton) {
288 | closeButton.addEventListener('click', async (e) => {
289 | // Prevent event propagation
290 | e.preventDefault();
291 | e.stopPropagation();
292 |
293 | if (showDontShowOption) {
294 | // If this is the "don't show again" popup, check if the checkbox is checked
295 | const checkbox = popup.querySelector('#whisper-dont-show');
296 | if (checkbox && checkbox.checked) {
297 | await chrome.storage.sync.set({ [POPUP_DISMISSED_KEY]: true });
298 | }
299 | // Reset close count after showing "don't show again" option
300 | await chrome.storage.sync.set({ [POPUP_CLOSE_COUNT_KEY]: 0 });
301 | } else {
302 | // Increment and store close count
303 | const newCloseCount = closeCount + 1;
304 | await chrome.storage.sync.set({ [POPUP_CLOSE_COUNT_KEY]: newCloseCount });
305 |
306 | // If we've just hit the threshold, show the "don't show again" popup
307 | if (newCloseCount % POPUP_MIN_CLOSES_FOR_DONT_SHOW === 0) {
308 | this.activePopup = null; // Clear the active popup reference
309 | popup.remove();
310 | this.showPopup(false); // Show the "don't show again" popup
311 | return;
312 | }
313 | }
314 |
315 | // Update last shown count and remove popup
316 | const currentCount = (await retrieveFromStorage(USAGE_COUNT_KEY)) || 0;
317 | await chrome.storage.sync.set({ [POPUP_LAST_SHOWN_KEY]: currentCount });
318 | this.activePopup = null; // Clear the active popup reference
319 | popup.remove();
320 | });
321 | }
322 |
323 | // Add cleanup when popup is removed from DOM
324 | const observer = new MutationObserver((mutations) => {
325 | mutations.forEach((mutation) => {
326 | mutation.removedNodes.forEach((node) => {
327 | if (node === popup) {
328 | this.activePopup = null;
329 | observer.disconnect();
330 | }
331 | });
332 | });
333 | });
334 |
335 | if (popup.parentNode) {
336 | observer.observe(popup.parentNode, { childList: true });
337 | }
338 | }
339 |
340 | async startRecording() {
341 | try {
342 | const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
343 | this.mediaRecorder = new MediaRecorder(stream);
344 | let chunks = [];
345 | this.mediaRecorder.addEventListener('dataavailable', (event) => chunks.push(event.data));
346 |
347 | this.mediaRecorder.addEventListener('stop', async () => {
348 | this.setButtonState('loading');
349 | const audioBlob = new Blob(chunks, { type: 'audio/webm' });
350 | if (await this.downloadEnabled()) {
351 | downloadFile(audioBlob);
352 | }
353 |
354 | const storedToken = await this.retrieveToken();
355 | const storedPrompt = await this.getSelectedPrompt();
356 | const headers = new Headers({
357 | Authorization: `Bearer ${storedToken}`,
358 | });
359 | const formData = new FormData();
360 | formData.append('file', audioBlob, 'recording.webm');
361 | formData.append('model', 'whisper-1');
362 | formData.append('prompt', storedPrompt.content);
363 |
364 | const requestOptions = {
365 | method: 'POST',
366 | headers,
367 | body: formData,
368 | redirect: 'follow',
369 | };
370 |
371 | const requestUrl = (await this.translationEnabled()) ? TRANSLATION_URL : TRANSCRIPTION_URL;
372 |
373 | try {
374 | const response = await fetch(requestUrl, requestOptions);
375 | this.setButtonState('ready');
376 | if (response.status === 200) {
377 | const result = await response.json();
378 | this.insertTextResult(result.text);
379 | } else {
380 | const errorMessage = getErrorMessage(response.status);
381 | this.insertTextResult(errorMessage);
382 | }
383 | } catch (error) {
384 | this.insertTextResult('Network error! Please check your internet connection and try again.');
385 | } finally {
386 | this.recording = false;
387 | stream.getTracks().forEach((track) => track.stop());
388 | }
389 |
390 | await this.incrementUsageCount();
391 | });
392 |
393 | this.mediaRecorder.start();
394 | this.setButtonState('recording');
395 | this.recording = true;
396 | } catch (error) {
397 | console.error(error);
398 | }
399 | }
400 |
401 | stopRecording() {
402 | this.mediaRecorder.stop();
403 | this.micButton.innerHTML = SVG_MIC_HTML;
404 | this.recording = false;
405 | }
406 |
407 | toggleRecording() {
408 | if (!this.recording) {
409 | this.startRecording();
410 | } else {
411 | this.stopRecording();
412 | }
413 | }
414 |
415 | insertTextResult(resultText) {
416 | const inputElement = this.textarea;
417 |
418 | // If this is a contenteditable div rather than a textarea
419 | if (inputElement.isContentEditable) {
420 | // Set focus to the input element
421 | inputElement.focus();
422 |
423 | // Insert text at current cursor position or at the end
424 | const selection = window.getSelection();
425 | const range = selection.getRangeAt(0);
426 |
427 | // Create a text node with the result
428 | const textNode = document.createTextNode(resultText);
429 |
430 | // Insert the text node
431 | range.insertNode(textNode);
432 |
433 | // Move cursor to end of inserted text
434 | range.setStartAfter(textNode);
435 | range.setEndAfter(textNode);
436 | selection.removeAllRanges();
437 | selection.addRange(range);
438 |
439 | // Trigger an input event to notify ChatGPT that content has changed
440 | const inputEvent = new Event('input', { bubbles: true, cancelable: true });
441 | inputElement.dispatchEvent(inputEvent);
442 | } else {
443 | // Original logic for standard textareas
444 | // Check if the input element is focused
445 | const isInputFocused = document.activeElement === inputElement;
446 |
447 | if (isInputFocused) {
448 | // If focused, insert at cursor position
449 | const selection = window.getSelection();
450 | const range = selection.getRangeAt(0);
451 |
452 | // Create a new text node with the result text
453 | const textNode = document.createTextNode(resultText);
454 |
455 | // Insert the new text node at the current cursor position
456 | range.insertNode(textNode);
457 |
458 | // Move the cursor to the end of the inserted text
459 | range.setStartAfter(textNode);
460 | range.setEndAfter(textNode);
461 | selection.removeAllRanges();
462 | selection.addRange(range);
463 | } else {
464 | // If not focused, append to the end
465 | const lastParagraph = inputElement.querySelector('p:last-child') || inputElement;
466 |
467 | // Create a new text node with the result text
468 | const textNode = document.createTextNode(resultText);
469 |
470 | // Append the new text node to the last paragraph
471 | lastParagraph.appendChild(textNode);
472 |
473 | // Move the cursor to the end of the appended text
474 | const range = document.createRange();
475 | range.selectNodeContents(lastParagraph);
476 | range.collapse(false);
477 | const selection = window.getSelection();
478 | selection.removeAllRanges();
479 | selection.addRange(range);
480 | }
481 |
482 | // Trigger an input event to notify any listeners
483 | const inputEvent = new Event('input', { bubbles: true, cancelable: true });
484 | inputElement.dispatchEvent(inputEvent);
485 |
486 | // Set focus to the input element
487 | inputElement.focus();
488 | }
489 | }
490 |
491 | setButtonState(state) {
492 | const hoverClasses = ['hover:bg-gray-100', 'dark:hover:text-gray-400', 'dark:hover:bg-gray-900'];
493 | switch (state) {
494 | case 'recording':
495 | this.micButton.disabled = false;
496 | this.micButton.innerHTML = SVG_MIC_SPINNING_HTML;
497 | break;
498 | case 'loading':
499 | this.micButton.disabled = true;
500 | this.micButton.innerHTML = SVG_SPINNER_HTML;
501 | this.micButton.classList.remove(...hoverClasses);
502 | break;
503 | case 'ready':
504 | default:
505 | this.micButton.disabled = false;
506 | this.micButton.innerHTML = SVG_MIC_HTML;
507 | this.micButton.classList.add(...hoverClasses);
508 | break;
509 | }
510 | }
511 | }
512 |
513 | // First, let's create a singleton recorder instance
514 | let globalRecorder = null;
515 |
516 | function addMicrophoneButton(inputElement, inputType) {
517 | try {
518 | // Check if button already exists using the constant selector
519 | if (document.querySelector(MIC_BUTTON_SELECTOR)) {
520 | return;
521 | }
522 |
523 | // Find the parent container using the constant selector
524 | // This parent is crucial for correctly locating the buttonsArea
525 | const overallInputAndButtonContainer = inputElement.closest(PARENT_CONTAINER_SELECTOR);
526 | if (!overallInputAndButtonContainer) {
527 | return;
528 | }
529 |
530 | // Find the new buttons area within the overall container
531 | const buttonsArea = overallInputAndButtonContainer.querySelector(BUTTONS_AREA_SELECTOR);
532 | if (!buttonsArea) {
533 | return;
534 | }
535 |
536 | // Create or reuse the global recorder
537 | if (!globalRecorder) {
538 | globalRecorder = new AudioRecorder();
539 | globalRecorder.textarea = inputElement;
540 | globalRecorder.listenForKeyboardShortcut();
541 | } else {
542 | globalRecorder.textarea = inputElement;
543 | }
544 |
545 | // Create the microphone button
546 | globalRecorder.createMicButton(inputType, 'NON-PRO');
547 |
548 | // Create the wrapper for the mic button and popup
549 | const micWrapper = document.createElement('div');
550 | micWrapper.className = 'relative flex items-center';
551 |
552 | // Create container for popup messages, positioned absolutely using inline styles
553 | const popupContainer = document.createElement('div');
554 | popupContainer.className = 'whitespace-nowrap z-10';
555 | popupContainer.style.position = 'absolute';
556 | popupContainer.style.bottom = '0';
557 | popupContainer.style.right = '100%';
558 | popupContainer.style.marginRight = '0.5rem';
559 |
560 | // Create the container for just the mic button
561 | const micContainer = document.createElement('div');
562 | micContainer.className = 'min-w-9';
563 | micContainer.appendChild(globalRecorder.micButton);
564 |
565 | // Append popup and mic containers to the wrapper
566 | micWrapper.appendChild(popupContainer);
567 | micWrapper.appendChild(micContainer);
568 |
569 | // Insert the complete wrapper into the buttons area
570 | buttonsArea.insertBefore(micWrapper, buttonsArea.firstChild);
571 | globalRecorder.popupContainer = popupContainer;
572 | } catch (error) {
573 | console.log('[Whisper to ChatGPT] Non-critical error in button addition:', error);
574 | }
575 | }
576 |
577 | // --- Updated Helper function for adding the button ---
578 | function tryAddButton() {
579 | try {
580 | if (document.querySelector(MIC_BUTTON_SELECTOR)) {
581 | return;
582 | }
583 |
584 | const inputElement = document.querySelector(INPUT_SELECTOR);
585 | if (inputElement) {
586 | const overallInputAndButtonContainer = inputElement.closest(PARENT_CONTAINER_SELECTOR);
587 | if (overallInputAndButtonContainer) {
588 | const buttonContainer = overallInputAndButtonContainer.querySelector(BUTTONS_AREA_SELECTOR);
589 | if (buttonContainer) {
590 | addMicrophoneButton(inputElement, 'main');
591 | }
592 | }
593 | }
594 | } catch (error) {
595 | console.log('[Whisper to ChatGPT] Non-critical error in tryAddButton:', error);
596 | }
597 | }
598 | // --- End Updated Helper ---
599 |
600 | function observeDOM() {
601 | try {
602 | const targetNode = document.body;
603 | const config = { childList: true, subtree: true };
604 |
605 | const callback = function (mutationsList, observer) {
606 | tryAddButton();
607 | };
608 |
609 | const observer = new MutationObserver(callback);
610 | observer.observe(targetNode, config);
611 |
612 | tryAddButton();
613 | } catch (error) {
614 | logError('Failed to observe DOM', error);
615 | }
616 | }
617 |
618 | async function init() {
619 | try {
620 | if (TESTING) {
621 | chrome.storage.sync.clear();
622 | }
623 |
624 | observeDOM();
625 | document.addEventListener('click', (event) => {
626 | const target = event.target;
627 | if (target.closest(INPUT_SELECTOR)) {
628 | tryAddButton();
629 | }
630 | });
631 |
632 | console.log('[Whisper to ChatGPT] Extension initialized successfully');
633 | } catch (error) {
634 | logError('Failed to initialize extension', error);
635 | }
636 | }
637 |
638 | function downloadFile(file) {
639 | // set a fileName containing the current date and time in readable format (e.g. `Recording 24.03.2023 13:00.webm` for German locale, but `Recording 03/24/2023 01:00 PM.webm` for English locale)
640 | const fileName = `Recording ${new Date().toLocaleString('en-US', {
641 | year: 'numeric',
642 | month: '2-digit',
643 | day: '2-digit',
644 | hour: '2-digit',
645 | minute: '2-digit',
646 | hour12: true,
647 | })}.webm`;
648 |
649 | // download file
650 | const a = document.createElement('a');
651 | a.href = URL.createObjectURL(file);
652 | a.download = fileName;
653 | a.click();
654 | }
655 |
656 | const getErrorMessage = (status) => {
657 | switch (status) {
658 | case 401:
659 | return 'Authentication error! Please check if your OpenAI API key is valid in the extension settings.';
660 | case 429:
661 | return 'Too many requests to OpenAI server. Please wait a moment and try again.';
662 | case 400:
663 | return 'Bad request! The audio file may be too large or in an unsupported format.';
664 | case 500:
665 | return 'OpenAI server error. Please try again later.';
666 | case 503:
667 | return 'OpenAI service is temporarily unavailable. Please try again later.';
668 | default:
669 | return `Error ${status}: Unable to process audio. Please check your API key or try again later.`;
670 | }
671 | };
672 |
673 | init();
674 |
--------------------------------------------------------------------------------