11 | Story ID
12 |
13 | Story API Key
14 |
15 | only needed for draft or unpublished versions
16 |
17 |
18 | Version
19 |
20 | use -1 for draft, or undefined for most recent published
21 |
22 |
23 | StartGraphReferenceId
24 | only for pro stories
25 |
26 |
11 | Story ID
12 |
13 | Story API Key
14 |
15 | only needed for draft or unpublished versions
16 |
17 |
18 | Version
19 |
20 | use -1 for draft, or undefined for most recent published
21 |
22 |
23 | StartGraphReferenceId
24 | only for pro stories
25 |
26 |
11 | Story ID
12 |
13 | Story API Key
14 |
15 | only needed for draft or unpublished versions
16 |
17 |
18 | Version
19 |
20 | use -1 for draft, or undefined for most recent published
21 |
22 |
23 | StartGraphReferenceId
24 | only for pro stories
25 |
26 |
27 |
28 |
29 |
30 |
35 |
36 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@charisma-ai/sdk",
3 | "version": "7.0.1",
4 | "description": "Charisma.ai SDK for Javascript (browser)",
5 | "source": "src/index.ts",
6 | "type": "module",
7 | "exports": "./dist/index.js",
8 | "types": "./dist/index.d.ts",
9 | "sideEffects": false,
10 | "repository": "https://github.com/charisma-ai/charisma-sdk-js",
11 | "author": "Charisma Entertainment Ltd (https://charisma.ai)",
12 | "license": "MIT",
13 | "keywords": [
14 | "ai",
15 | "character",
16 | "npc",
17 | "virtual beings",
18 | "voice"
19 | ],
20 | "files": [
21 | "dist/*"
22 | ],
23 | "scripts": {
24 | "build": "tsc && swc src -d dist",
25 | "cm": "git-cz",
26 | "lint": "eslint --fix --ext js,ts,tsx .",
27 | "prebuild": "rimraf dist",
28 | "prepublish": "pnpm run build",
29 | "semantic-release": "semantic-release",
30 | "test": "jest"
31 | },
32 | "dependencies": {
33 | "colyseus.js": "^0.14.13",
34 | "eventemitter3": "^5.0.1",
35 | "jwt-decode": "^3.1.2",
36 | "p-queue": "^7.3.4",
37 | "socket.io-client": "^4.7.5"
38 | },
39 | "devDependencies": {
40 | "@swc/cli": "^0.1.62",
41 | "@swc/core": "^1.3.57",
42 | "@types/jest": "^29.5.12",
43 | "@typescript-eslint/eslint-plugin": "^5.59.5",
44 | "@typescript-eslint/parser": "^5.59.5",
45 | "commitizen": "^4.3.0",
46 | "cz-conventional-changelog": "^3.3.0",
47 | "eslint": "^8.40.0",
48 | "eslint-config-airbnb-base": "^15.0.0",
49 | "eslint-config-prettier": "^8.8.0",
50 | "eslint-plugin-import": "^2.27.5",
51 | "eslint-plugin-jsx-a11y": "^6.7.1",
52 | "eslint-plugin-prettier": "^4.2.1",
53 | "husky": "^8.0.3",
54 | "jest": "^29.7.0",
55 | "jest-environment-jsdom": "^29.7.0",
56 | "lint-staged": "^13.2.2",
57 | "parcel": "2.0.0-nightly.1296",
58 | "prettier": "^2.8.8",
59 | "rimraf": "^5.0.0",
60 | "semantic-release": "^21.0.2",
61 | "ts-jest": "^29.2.3",
62 | "typescript": "^5.0.4"
63 | },
64 | "config": {
65 | "commitizen": {
66 | "path": "./node_modules/cz-conventional-changelog"
67 | }
68 | },
69 | "@parcel/resolver-default": {
70 | "packageExports": true
71 | },
72 | "packageManager": "pnpm@9.15.3+sha512.1f79bc245a66eb0b07c5d4d83131240774642caaa86ef7d0434ab47c0d16f66b04e21e0c086eb61e62c77efc4d7f7ec071afad3796af64892fae66509173893a"
73 | }
74 |
--------------------------------------------------------------------------------
/src/speech-types.ts:
--------------------------------------------------------------------------------
1 | export type SpeechRecognitionErrorCode =
2 | | "aborted"
3 | | "audio-capture"
4 | | "bad-grammar"
5 | | "language-not-supported"
6 | | "network"
7 | | "no-speech"
8 | | "not-allowed"
9 | | "service-not-allowed";
10 |
11 | export interface SpeechRecognitionErrorEventInit extends EventInit {
12 | error: SpeechRecognitionErrorCode;
13 | message?: string;
14 | }
15 |
16 | export interface SpeechRecognitionEventInit extends EventInit {
17 | resultIndex?: number;
18 | results: SpeechRecognitionResultList;
19 | }
20 |
21 | export interface SpeechGrammar {
22 | src: string;
23 | weight: number;
24 | }
25 |
26 | export interface SpeechGrammarList {
27 | readonly length: number;
28 | addFromString(string: string, weight?: number): void;
29 | addFromURI(src: string, weight?: number): void;
30 | item(index: number): SpeechGrammar;
31 | [index: number]: SpeechGrammar;
32 | }
33 |
34 | export interface SpeechRecognitionErrorEvent extends Event {
35 | readonly error: SpeechRecognitionErrorCode;
36 | readonly message: string;
37 | }
38 |
39 | export interface SpeechRecognitionEvent extends Event {
40 | readonly resultIndex: number;
41 | readonly results: SpeechRecognitionResultList;
42 | }
43 |
44 | interface SpeechRecognitionEventMap {
45 | audioend: Event;
46 | audiostart: Event;
47 | end: Event;
48 | error: SpeechRecognitionErrorEvent;
49 | nomatch: SpeechRecognitionEvent;
50 | result: SpeechRecognitionEvent;
51 | soundend: Event;
52 | soundstart: Event;
53 | speechend: Event;
54 | speechstart: Event;
55 | start: Event;
56 | }
57 |
58 | export interface SpeechRecognition extends EventTarget {
59 | continuous: boolean;
60 | grammars: SpeechGrammarList;
61 | interimResults: boolean;
62 | lang: string;
63 | maxAlternatives: number;
64 | onaudioend: ((this: SpeechRecognition, ev: Event) => any) | null;
65 | onaudiostart: ((this: SpeechRecognition, ev: Event) => any) | null;
66 | onend: ((this: SpeechRecognition, ev: Event) => any) | null;
67 | onerror:
68 | | ((this: SpeechRecognition, ev: SpeechRecognitionErrorEvent) => any)
69 | | null;
70 | onnomatch:
71 | | ((this: SpeechRecognition, ev: SpeechRecognitionEvent) => any)
72 | | null;
73 | onresult:
74 | | ((this: SpeechRecognition, ev: SpeechRecognitionEvent) => any)
75 | | null;
76 | onsoundend: ((this: SpeechRecognition, ev: Event) => any) | null;
77 | onsoundstart: ((this: SpeechRecognition, ev: Event) => any) | null;
78 | onspeechend: ((this: SpeechRecognition, ev: Event) => any) | null;
79 | onspeechstart: ((this: SpeechRecognition, ev: Event) => any) | null;
80 | onstart: ((this: SpeechRecognition, ev: Event) => any) | null;
81 | abort(): void;
82 | start(): void;
83 | stop(): void;
84 | addEventListener(
85 | type: K,
86 | listener: (
87 | this: SpeechRecognition,
88 | ev: SpeechRecognitionEventMap[K],
89 | ) => any,
90 | options?: boolean | AddEventListenerOptions,
91 | ): void;
92 | addEventListener(
93 | type: string,
94 | listener: EventListenerOrEventListenerObject,
95 | options?: boolean | AddEventListenerOptions,
96 | ): void;
97 | removeEventListener(
98 | type: K,
99 | listener: (
100 | this: SpeechRecognition,
101 | ev: SpeechRecognitionEventMap[K],
102 | ) => any,
103 | options?: boolean | EventListenerOptions,
104 | ): void;
105 | removeEventListener(
106 | type: string,
107 | listener: EventListenerOrEventListenerObject,
108 | options?: boolean | EventListenerOptions,
109 | ): void;
110 | }
111 |
--------------------------------------------------------------------------------
/demos/no_stt/src/main.js:
--------------------------------------------------------------------------------
1 | import "./style.css";
2 | import {
3 | Playthrough,
4 | AudioManager,
5 | createPlaythroughToken,
6 | createConversation,
7 | } from "@charisma-ai/sdk";
8 |
9 | const messagesDiv = document.getElementById("messages");
10 |
11 | const appendMessage = (message, className, name) => {
12 | const div = document.createElement("div");
13 | div.classList.add(className, "message");
14 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`;
15 | messagesDiv?.appendChild(div);
16 | };
17 |
18 | // Setup the audio manager.
19 | const audioManager = new AudioManager({});
20 |
21 | let playthrough;
22 | let conversation;
23 |
24 | window.start = async function start() {
25 | // In order to play audio, this method must be called by a user interaction.
26 | // This is due to a security restriction in some browsers.
27 | audioManager.initialise();
28 |
29 | const storyIdInput = document.getElementById("story-id");
30 | const storyId = Number(storyIdInput.value);
31 | const storyApiKeyInput = document.getElementById("story-api-key");
32 | const storyApiKey = storyApiKeyInput.value;
33 | const storyVersionInput = document.getElementById("version");
34 | const storyVersion = Number(storyVersionInput.value) || undefined;
35 | const StartGraphReferenceIdInput = document.getElementById(
36 | "startGraphReferenceId",
37 | );
38 | const startGraphReferenceId = StartGraphReferenceIdInput.value;
39 |
40 | const { token } = await createPlaythroughToken({
41 | storyId,
42 | apiKey: storyApiKey,
43 | version: storyVersion,
44 | });
45 |
46 | const { conversationUuid } = await createConversation(token);
47 | playthrough = new Playthrough(token);
48 | conversation = playthrough.joinConversation(conversationUuid);
49 |
50 | conversation.setSpeechConfig({
51 | encoding: ["mp3", "wav"],
52 | output: "buffer",
53 | });
54 |
55 | conversation.on("message", (message) => {
56 | const characterMessage =
57 | message.type === "character" ? message.message : null;
58 |
59 | // For this demo, we only care about character messages.
60 | if (!characterMessage) return;
61 |
62 | // Put the character message on the page.
63 | appendMessage(
64 | characterMessage.text,
65 | "character-message",
66 | characterMessage.character?.name,
67 | );
68 |
69 | // Play character speech.
70 | if (characterMessage.speech) {
71 | audioManager.playCharacterSpeech(characterMessage.speech.audio, {
72 | trackId: String(characterMessage.character?.id),
73 | interrupt: "track",
74 | });
75 | }
76 |
77 | if (characterMessage.media) {
78 | if (characterMessage.media.stopAllAudio) {
79 | audioManager.mediaAudioStopAll();
80 | }
81 |
82 | // Play media audio if it exists in the node.
83 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks);
84 | }
85 | });
86 |
87 | conversation.on("problem", console.warn);
88 |
89 | // Listen for the playthrough to connect and start the conversation when it does.
90 | let started = false;
91 | playthrough.on("connection-status", (status) => {
92 | appendMessage(
93 | status,
94 | status === "disconnected" ? "disconnected-message" : "connected-message",
95 | );
96 |
97 | if (status === "connected" && !started) {
98 | const conversationParameters = startGraphReferenceId
99 | ? { startGraphReferenceId }
100 | : undefined;
101 | conversation.start(conversationParameters);
102 | started = true;
103 | }
104 | });
105 |
106 | await playthrough.connect();
107 | };
108 |
109 | const reply = () => {
110 | if (!playthrough || !conversation) return;
111 |
112 | const replyInput = document.getElementById("reply-input");
113 | const text = replyInput.value;
114 |
115 | if (text.trim() === "") return;
116 |
117 | conversation.reply({ text });
118 | replyInput.value = "";
119 |
120 | // Put player message on the page.
121 | appendMessage(text, "player-message", "You");
122 | };
123 |
124 | // Handle the Enter key press.
125 | window.onKeyPress = function onKeyPress(event) {
126 | if (!event || !event.currentTarget) return;
127 | if (event.key === "Enter") {
128 | reply();
129 | }
130 | };
131 |
132 | window.reply = reply;
133 |
134 |
--------------------------------------------------------------------------------
/src/AudioInputsBrowser.ts:
--------------------------------------------------------------------------------
1 | import { EventEmitter } from "eventemitter3";
2 |
3 | import type {
4 | SpeechRecognition,
5 | SpeechRecognitionErrorCode,
6 | SpeechRecognitionEvent,
7 | } from "./speech-types.js";
8 |
9 | interface Constructable {
10 | new (): T;
11 | }
12 |
13 | interface WindowWithSpeechRecognition extends Window {
14 | SpeechRecognition?: Constructable;
15 | webkitSpeechRecognition?: Constructable;
16 | }
17 |
18 | declare const window: WindowWithSpeechRecognition;
19 |
20 | const SpeechRecognitionClass =
21 | typeof window !== "undefined"
22 | ? window.SpeechRecognition || window.webkitSpeechRecognition
23 | : undefined;
24 |
25 | export interface SpeechRecognitionOptions {
26 | continuous?: boolean;
27 | interimResults?: boolean;
28 | lang?: string;
29 | }
30 |
31 | export interface SpeechRecognitionStopOptions {
32 | waitForLastResult?: boolean;
33 | }
34 |
35 | type AudioInputsBrowserEvents = {
36 | result: [SpeechRecognitionEvent];
37 | transcript: [string];
38 | "transcript-interim": [string];
39 | error: [SpeechRecognitionErrorCode];
40 | timeout: [];
41 | start: [];
42 | stop: [];
43 | };
44 |
45 | class AudioInputsBrowser extends EventEmitter {
46 | private recognition = SpeechRecognitionClass
47 | ? new SpeechRecognitionClass()
48 | : undefined;
49 |
50 | private timeoutId: number | undefined;
51 |
52 | public isSupported = SpeechRecognitionClass !== undefined;
53 |
54 | public startListening = (
55 | timeout = 10000,
56 | {
57 | continuous = false,
58 | interimResults = true,
59 | lang = "en-GB",
60 | }: SpeechRecognitionOptions = {},
61 | ): void => {
62 | if (!this.recognition) {
63 | return;
64 | }
65 |
66 | if (this.timeoutId !== undefined) {
67 | clearTimeout(this.timeoutId);
68 | }
69 |
70 | const { recognition } = this;
71 | recognition.continuous = continuous;
72 | recognition.interimResults = interimResults;
73 | recognition.lang = lang;
74 | recognition.onresult = this.onRecognitionResult;
75 | recognition.onstart = (): void => {
76 | this.emit("start");
77 | };
78 | recognition.onend = (): void => {
79 | this.emit("stop");
80 | recognition.start();
81 | };
82 | recognition.onerror = (event): void => {
83 | this.emit("error", event.error);
84 | };
85 |
86 | try {
87 | recognition.start();
88 | } catch (err) {
89 | // this is fine, it just means we tried to start/stop a stream when it was already started/stopped
90 | }
91 |
92 | if (timeout !== undefined) {
93 | this.timeoutId = window.setTimeout(this.onTimeout, timeout);
94 | }
95 | };
96 |
97 | public stopListening = ({
98 | waitForLastResult = false,
99 | }: SpeechRecognitionStopOptions = {}): void => {
100 | if (this.timeoutId !== undefined) {
101 | clearTimeout(this.timeoutId);
102 | }
103 |
104 | const { recognition } = this;
105 | if (recognition) {
106 | if (!waitForLastResult) {
107 | recognition.onresult = (): void => undefined;
108 | }
109 | recognition.onend = (): void => {
110 | this.emit("stop");
111 | };
112 | try {
113 | if (waitForLastResult) {
114 | recognition.stop();
115 | } else {
116 | recognition.abort();
117 | }
118 | } catch (err) {
119 | // this is fine, it just means we tried to start/stop a stream when it was already started/stopped
120 | }
121 | }
122 | };
123 |
124 | public resetTimeout = (timeout: number): void => {
125 | if (this.timeoutId !== undefined) {
126 | clearTimeout(this.timeoutId);
127 | }
128 |
129 | this.timeoutId = window.setTimeout(this.onTimeout, timeout);
130 | };
131 |
132 | private onTimeout = (): void => {
133 | this.timeoutId = undefined;
134 | this.emit("timeout");
135 | this.stopListening();
136 | };
137 |
138 | private onRecognitionResult = (event: SpeechRecognitionEvent): void => {
139 | this.emit("result", event);
140 |
141 | if (event.results.length === 0) {
142 | return;
143 | }
144 |
145 | const lastResult = event.results[event.results.length - 1];
146 | const message = lastResult[0].transcript.trim();
147 | if (lastResult.isFinal) {
148 | this.emit("transcript", message);
149 | } else {
150 | this.emit("transcript-interim", message);
151 | }
152 | };
153 | }
154 |
155 | export default AudioInputsBrowser;
156 |
--------------------------------------------------------------------------------
/src/AudioTrackManager.test.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable dot-notation */
2 | import AudioTrackManager from "./AudioTrackManager";
3 | import { AudioTrackBehaviour } from "./types";
4 |
5 | globalThis.AudioContext = jest.fn().mockImplementation(() => {
6 | const gainNodeMock = {
7 | gain: { value: 1, setValueAtTime: jest.fn() },
8 | connect: jest.fn().mockReturnThis(), // return `this` to allow chaining
9 | };
10 |
11 | const bufferSourceMock = {
12 | buffer: null,
13 | loop: false,
14 | connect: jest.fn().mockReturnValue(gainNodeMock), // Mock to allow chaining
15 | start: jest.fn(),
16 | stop: jest.fn(),
17 | onended: jest.fn(),
18 | };
19 |
20 | return {
21 | createGain: jest.fn().mockReturnValue(gainNodeMock),
22 | createBufferSource: jest.fn().mockReturnValue(bufferSourceMock),
23 | decodeAudioData: jest.fn().mockImplementation(() =>
24 | Promise.resolve({
25 | duration: 120,
26 | sampleRate: 44100,
27 | length: 5292000,
28 | numberOfChannels: 2,
29 | getChannelData: jest.fn(),
30 | }),
31 | ),
32 | destination: {
33 | connect: jest.fn(), // Mock connect on the destination as well
34 | },
35 | };
36 | });
37 |
38 | globalThis.fetch = jest.fn(() =>
39 | Promise.resolve({
40 | ok: true,
41 | status: 200,
42 | statusText: "OK",
43 | headers: new Headers(),
44 | url: "",
45 | redirected: false,
46 | type: "basic",
47 | body: null,
48 | bodyUsed: false,
49 | clone: jest.fn(),
50 | arrayBuffer: () => Promise.resolve(new ArrayBuffer(8)),
51 | json: jest.fn(),
52 | text: jest.fn(),
53 | formData: jest.fn(),
54 | blob: jest.fn(),
55 | } as unknown as Response),
56 | );
57 |
58 | describe("AudioTrackManager", () => {
59 | afterEach(() => {
60 | jest.clearAllMocks();
61 | });
62 |
63 | test("should initialize with isPlaying as false and no currentAudio", () => {
64 | const audioTrackManager = new AudioTrackManager();
65 |
66 | expect(audioTrackManager.isPlaying).toBe(false);
67 | expect(audioTrackManager["currentAudio"]).toEqual([]);
68 | });
69 |
70 | test("should play new audio tracks", async () => {
71 | const audioTrackManager = new AudioTrackManager();
72 |
73 | const audioTracks = [
74 | {
75 | url: "track1.mp3",
76 | loop: false,
77 | volume: 0.5,
78 | behaviour: AudioTrackBehaviour.Restart,
79 | stopPlaying: false,
80 | },
81 | {
82 | url: "track2.mp3",
83 | loop: true,
84 | volume: 0.8,
85 | behaviour: AudioTrackBehaviour.Continue,
86 | stopPlaying: false,
87 | },
88 | ];
89 |
90 | await audioTrackManager.play(audioTracks);
91 |
92 | expect(audioTrackManager.isPlaying).toBe(true);
93 | expect(audioTrackManager["currentAudio"]).toHaveLength(2);
94 | });
95 |
96 | test("should not play audio if audioTracks array is empty", () => {
97 | const audioTrackManager = new AudioTrackManager();
98 |
99 | audioTrackManager.play([]);
100 |
101 | expect(audioTrackManager.isPlaying).toBe(false);
102 | });
103 |
104 | test("should stop all currently playing audio tracks", async () => {
105 | const audioTrackManager = new AudioTrackManager();
106 | const audioTracks = [
107 | {
108 | url: "track1.mp3",
109 | loop: false,
110 | volume: 0.5,
111 | behaviour: AudioTrackBehaviour.Restart,
112 | stopPlaying: false,
113 | },
114 | {
115 | url: "track2.mp3",
116 | loop: true,
117 | volume: 0.8,
118 | behaviour: AudioTrackBehaviour.Continue,
119 | stopPlaying: false,
120 | },
121 | ];
122 |
123 | await audioTrackManager.play(audioTracks);
124 | audioTrackManager.stopAll();
125 |
126 | expect(audioTrackManager.isPlaying).toBe(false);
127 | expect(audioTrackManager["currentAudio"]).toEqual([]);
128 | });
129 |
130 | test("should restart an audio track when behaviour is set to 'restart'", async () => {
131 | const audioTrackManager = new AudioTrackManager();
132 | const audioTracks = [
133 | {
134 | url: "track1.mp3",
135 | loop: false,
136 | volume: 0.5,
137 | behaviour: AudioTrackBehaviour.Restart,
138 | stopPlaying: false,
139 | },
140 | {
141 | url: "track2.mp3",
142 | loop: true,
143 | volume: 0.8,
144 | behaviour: AudioTrackBehaviour.Continue,
145 | stopPlaying: false,
146 | },
147 | ];
148 |
149 | await audioTrackManager.play(audioTracks);
150 |
151 | // Play the same track again, triggering the restart behavior
152 | audioTrackManager.play([audioTracks[0]]);
153 |
154 | expect(audioTrackManager["currentAudio"]).toHaveLength(1);
155 | });
156 | });
157 |
--------------------------------------------------------------------------------
/src/Conversation.ts:
--------------------------------------------------------------------------------
1 | import { EventEmitter } from "eventemitter3";
2 | import PQueue from "p-queue";
3 |
4 | import Playthrough from "./Playthrough.js";
5 | import {
6 | StartEvent,
7 | ReplyEvent,
8 | ActionEvent,
9 | SpeechConfig,
10 | MessageEvent,
11 | StartTypingEvent,
12 | StopTypingEvent,
13 | EpisodeCompleteEvent,
14 | ConfirmActionEvent,
15 | ConfirmReplyEvent,
16 | ConfirmResumeEvent,
17 | ConfirmStartEvent,
18 | ConfirmTapEvent,
19 | ReplyIntermediateEvent,
20 | ProblemEvent,
21 | Message,
22 | } from "./types.js";
23 |
24 | export interface ConversationOptions {
25 | speechConfig?: SpeechConfig;
26 | }
27 |
28 | export type ConversationEvents = {
29 | // Events sent from server
30 | message: [MessageEvent];
31 | "start-typing": [StartTypingEvent];
32 | "stop-typing": [StopTypingEvent];
33 | "episode-complete": [EpisodeCompleteEvent];
34 | problem: [ProblemEvent];
35 | // Confirmation events sent from server
36 | action: [ConfirmActionEvent];
37 | reply: [ConfirmReplyEvent];
38 | resume: [ConfirmResumeEvent];
39 | start: [ConfirmStartEvent];
40 | tap: [ConfirmTapEvent];
41 | // Local events
42 | "playback-start": [];
43 | "playback-stop": [];
44 | };
45 |
46 | export class Conversation extends EventEmitter {
47 | private uuid: string;
48 |
49 | private eventQueue: PQueue = new PQueue();
50 |
51 | private lastEventId?: string;
52 |
53 | private playthroughInstance: Playthrough;
54 |
55 | private options: ConversationOptions = {};
56 |
57 | public constructor(
58 | conversationUuid: string,
59 | playthroughInstance: Playthrough,
60 | options?: ConversationOptions,
61 | ) {
62 | super();
63 |
64 | this.uuid = conversationUuid;
65 | this.playthroughInstance = playthroughInstance;
66 |
67 | if (options) {
68 | this.options = options;
69 | }
70 |
71 | // Whenever we emit a message, store the last event id so we know where to
72 | // restore from if a disconnection occurs.
73 | this.on("message", (message) => {
74 | this.lastEventId = message.eventId;
75 | });
76 |
77 | // Please excuse this ghastly hack, but Babel complains about
78 | // transforming a function class property with an arrow function inside
79 | // (only on non-"modern" builds)
80 | this.addIncomingEvent = this.addIncomingEvent.bind(this);
81 | }
82 |
83 | public addIncomingEvent<
84 | T extends EventEmitter.EventNames,
85 | >(
86 | eventName: T,
87 | ...eventArgs: EventEmitter.EventArgs
88 | ): true {
89 | this.eventQueue.add(() => this.emit(eventName, ...eventArgs));
90 | return true;
91 | }
92 |
93 | public start = (event: StartEvent = {}): void => {
94 | return this.playthroughInstance.addOutgoingEvent("start", {
95 | ...this.options,
96 | ...event,
97 | conversationUuid: this.uuid,
98 | });
99 | };
100 |
101 | public reply = (event: ReplyEvent): void => {
102 | return this.playthroughInstance.addOutgoingEvent("reply", {
103 | ...this.options,
104 | ...event,
105 | conversationUuid: this.uuid,
106 | });
107 | };
108 |
109 | public replyIntermediate = (event: ReplyIntermediateEvent): void => {
110 | return this.playthroughInstance.addOutgoingEvent("reply-intermediate", {
111 | ...this.options,
112 | ...event,
113 | conversationUuid: this.uuid,
114 | });
115 | };
116 |
117 | public tap = (): void => {
118 | return this.playthroughInstance.addOutgoingEvent("tap", {
119 | ...this.options,
120 | conversationUuid: this.uuid,
121 | });
122 | };
123 |
124 | public action = (event: ActionEvent): void => {
125 | return this.playthroughInstance.addOutgoingEvent("action", {
126 | ...this.options,
127 | ...event,
128 | conversationUuid: this.uuid,
129 | });
130 | };
131 |
132 | public resume = (): void => {
133 | return this.playthroughInstance.addOutgoingEvent("resume", {
134 | ...this.options,
135 | conversationUuid: this.uuid,
136 | });
137 | };
138 |
139 | public setSpeechConfig = (speechConfig: SpeechConfig | undefined): void => {
140 | this.options.speechConfig = speechConfig;
141 | };
142 |
143 | public reconnect = async (): Promise => {
144 | // If we haven't received any messages so far, there's nowhere to playback from.
145 | if (typeof this.lastEventId === "string") {
146 | // Receiving new events when trying to playback is confusing, so pause the event queue.
147 | this.eventQueue.pause();
148 | try {
149 | const { events } = await this.playthroughInstance.getEventHistory({
150 | conversationUuid: this.uuid,
151 | minEventId: this.lastEventId,
152 | limit: 1000,
153 | eventTypes: ["message_character"],
154 | });
155 | if (events.length > 0) {
156 | this.emit("playback-start");
157 | events.forEach((event) => {
158 | // If we've emitted a new message since playback started, let's ignore playback ones.
159 | if (BigInt(event.id) > BigInt(this.lastEventId as string)) {
160 | this.emit("message", {
161 | ...(event.payload as Message),
162 | conversationUuid: this.uuid,
163 | });
164 | }
165 | });
166 | this.emit("playback-stop");
167 | }
168 | } finally {
169 | // We can restart the queue now playback is finished.
170 | this.eventQueue.start();
171 | }
172 | }
173 | };
174 | }
175 |
176 | export default Conversation;
177 |
--------------------------------------------------------------------------------
/demos/browser_stt/src/main.ts:
--------------------------------------------------------------------------------
1 | import "./style.css";
2 | import {
3 | Playthrough,
4 | AudioManager,
5 | createPlaythroughToken,
6 | createConversation,
7 | Conversation,
8 | Message,
9 | } from "@charisma-ai/sdk";
10 |
11 | // In this demo, we'll extend the global "window" with the functions we need so we can call them from the HTML.
12 | declare global {
13 | interface Window {
14 | start: () => Promise;
15 | reply: () => void;
16 | onKeyPress: (event: KeyboardEvent) => void;
17 | toggleMicrophone: (event: Event) => void;
18 | }
19 | }
20 |
21 | const messagesDiv = document.getElementById("messages");
22 | const recordButton = document.getElementById("record-button");
23 |
24 | const appendMessage = (message: string, className: string, name?: string) => {
25 | const div = document.createElement("div");
26 | div.classList.add(className, "message");
27 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`;
28 | messagesDiv?.appendChild(div);
29 | };
30 |
31 | // Keep track of the recording statuses of the microphone so we can update the UI accordingly.
32 | let recordingStatus: "recording" | "off" | "starting" = "off";
33 |
34 | const handleStartSTT = () => {
35 | recordingStatus = "recording";
36 | if (recordButton) recordButton.innerHTML = "Stop";
37 | };
38 |
39 | const handleStopSTT = () => {
40 | recordingStatus = "off";
41 | if (recordButton) recordButton.innerHTML = "Record";
42 | };
43 |
44 | const handleTranscript = (transcript: string) => {
45 | const replyInput = document.getElementById("reply-input");
46 | if (replyInput) {
47 | replyInput.value = transcript;
48 | }
49 | };
50 |
51 | // Setup the audio manager.
52 | const audioManager = new AudioManager({
53 | duckVolumeLevel: 0.1,
54 | sttService: "browser",
55 | streamTimeslice: 100,
56 | handleTranscript,
57 | handleStartSTT,
58 | handleStopSTT,
59 | });
60 |
61 | if (!audioManager.browserIsSupported()) {
62 | appendMessage(
63 | "Your browser does not support the browser STT service.",
64 | "error-message",
65 | );
66 | }
67 |
68 | let playthrough: Playthrough;
69 | let conversation: Conversation;
70 |
71 | window.start = async function start() {
72 | // In order to play audio, this method must be called by a user interaction.
73 | // This is due to a security restriction in some browsers.
74 | audioManager.initialise();
75 |
76 | const storyIdInput = document.getElementById("story-id") as HTMLInputElement;
77 | const storyId = Number(storyIdInput.value);
78 | const storyApiKeyInput = document.getElementById(
79 | "story-api-key",
80 | ) as HTMLInputElement;
81 | const storyApiKey = storyApiKeyInput.value;
82 | const storyVersionInput = document.getElementById(
83 | "version",
84 | ) as HTMLInputElement;
85 | const storyVersion = Number(storyVersionInput.value) || undefined;
86 | const StartGraphReferenceIdInput = document.getElementById(
87 | "startGraphReferenceId",
88 | ) as HTMLInputElement;
89 | const startGraphReferenceId = StartGraphReferenceIdInput.value;
90 |
91 | const { token } = await createPlaythroughToken({
92 | storyId,
93 | apiKey: storyApiKey,
94 | version: storyVersion,
95 | });
96 |
97 | const { conversationUuid } = await createConversation(token);
98 | playthrough = new Playthrough(token);
99 | conversation = playthrough.joinConversation(conversationUuid);
100 |
101 | conversation.setSpeechConfig({
102 | encoding: ["mp3", "wav"],
103 | output: "buffer",
104 | });
105 |
106 | conversation.on("message", (message: Message) => {
107 | const characterMessage =
108 | message.type === "character" ? message.message : null;
109 |
110 | // For this demo, we only care about character messages.
111 | if (!characterMessage) return;
112 |
113 | // Put the character message on the page.
114 | appendMessage(
115 | characterMessage.text,
116 | "character-message",
117 | characterMessage.character?.name,
118 | );
119 |
120 | // Play character speech.
121 | if (characterMessage.speech) {
122 | audioManager.playCharacterSpeech(
123 | characterMessage.speech.audio as ArrayBuffer,
124 | {
125 | trackId: String(characterMessage.character?.id),
126 | interrupt: "track",
127 | },
128 | );
129 | }
130 |
131 | if (characterMessage.media) {
132 | if (characterMessage.media.stopAllAudio) {
133 | audioManager.mediaAudioStopAll();
134 | }
135 |
136 | // Play media audio if it exists in the node.
137 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks);
138 | }
139 | });
140 |
141 | conversation.on("problem", console.warn);
142 |
143 | // Listen for the playthrough to connect and start the conversation when it does.
144 | let started = false;
145 | playthrough.on("connection-status", (status) => {
146 | appendMessage(
147 | status,
148 | status === "disconnected" ? "disconnected-message" : "connected-message",
149 | );
150 |
151 | if (status === "connected" && !started) {
152 | const conversationParameters = startGraphReferenceId
153 | ? { startGraphReferenceId }
154 | : undefined;
155 | conversation.start(conversationParameters);
156 | started = true;
157 | }
158 | });
159 |
160 | await playthrough.connect();
161 | };
162 |
163 | const reply = () => {
164 | if (!playthrough || !conversation) return;
165 |
166 | // Stop listening when you send a message.
167 | audioManager.stopListening();
168 |
169 | const replyInput = document.getElementById("reply-input");
170 | const text = replyInput.value;
171 |
172 | if (text.trim() === "") return;
173 |
174 | conversation.reply({ text });
175 | replyInput.value = "";
176 |
177 | // Put player message on the page.
178 | appendMessage(text, "player-message", "You");
179 | };
180 |
181 | // Handle the Enter key press.
182 | window.onKeyPress = function onKeyPress(event) {
183 | if (!event || !event.currentTarget) return;
184 | if (event.key === "Enter") {
185 | reply();
186 | }
187 | };
188 |
189 | window.reply = reply;
190 |
191 | // Toggling the microphone will request the stt service to connect.
192 | window.toggleMicrophone = () => {
193 | if (!recordButton) return;
194 |
195 | if (recordingStatus === "off") {
196 | audioManager.startListening();
197 | recordingStatus = "starting";
198 | recordButton.innerHTML = "...";
199 | } else if (recordingStatus === "recording") {
200 | audioManager.stopListening();
201 | recordingStatus = "off";
202 | recordButton.innerHTML = "Record";
203 | }
204 | };
205 |
--------------------------------------------------------------------------------
/src/AudioTrackManager.ts:
--------------------------------------------------------------------------------
1 | import { AudioTrack } from "./types.js";
2 |
3 | interface Constructable {
4 | new (): T;
5 | }
6 |
7 | interface WindowWithAudioContext extends Window {
8 | AudioContext?: Constructable;
9 | webkitAudioContext?: Constructable;
10 | }
11 |
12 | declare const window: WindowWithAudioContext;
13 |
14 | class AudioTrackManager {
15 | private audioContext: AudioContext | undefined;
16 |
17 | private muteForClientGainNode: GainNode | null = null;
18 |
19 | private duckForMicrophoneGainNode: GainNode | null = null;
20 |
21 | private clientVolumeGainNode: GainNode | null = null;
22 |
23 | public isPlaying: boolean;
24 |
25 | private duckControlCurrentGainVolume = 1;
26 |
27 | private clientSetVolume = 1;
28 |
29 | private clientSetMuted = false;
30 |
31 | private currentAudio: {
32 | source: AudioBufferSourceNode;
33 | originalGainNode: GainNode;
34 | url: string;
35 | originalVolume: number;
36 | }[];
37 |
38 | constructor() {
39 | this.isPlaying = false;
40 | this.currentAudio = [];
41 | }
42 |
43 | private async loadAudioBuffer(url: string): Promise {
44 | if (this.audioContext === undefined) return undefined;
45 |
46 | const response = await fetch(url);
47 | const arrayBuffer = await response.arrayBuffer();
48 | return this.audioContext.decodeAudioData(arrayBuffer);
49 | }
50 |
51 | private async playNewSource(audioTrack: AudioTrack): Promise {
52 | if (!audioTrack.url) return;
53 |
54 | const audioBuffer = await this.loadAudioBuffer(audioTrack.url);
55 | if (this.audioContext === undefined) return;
56 |
57 | const sourceGainNode = this.audioContext.createGain();
58 | sourceGainNode.gain.value = audioTrack.volume;
59 |
60 | const source = this.audioContext.createBufferSource();
61 | if (audioBuffer === undefined) return;
62 | source.buffer = audioBuffer;
63 | source.loop = audioTrack.loop;
64 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
65 | source.connect(sourceGainNode).connect(this.muteForClientGainNode!);
66 | source.start(0);
67 |
68 | source.onended = () => {
69 | this.currentAudio = this.currentAudio.filter(
70 | (currentAudio) => currentAudio.source !== source,
71 | );
72 |
73 | if (this.currentAudio.length === 0) {
74 | this.isPlaying = false;
75 | }
76 | };
77 |
78 | this.currentAudio.push({
79 | source,
80 | originalGainNode: sourceGainNode,
81 | url: audioTrack.url,
82 | originalVolume: audioTrack.volume,
83 | });
84 | }
85 |
86 | public getAudioContext = (): AudioContext => {
87 | if (this.audioContext) {
88 | return this.audioContext;
89 | }
90 |
91 | const AudioContextClass = window.AudioContext || window.webkitAudioContext;
92 |
93 | if (!AudioContextClass) {
94 | throw new Error("AudioContext isn't supported in this browser.");
95 | }
96 |
97 | this.audioContext = new AudioContextClass();
98 | this.clientVolumeGainNode = this.audioContext.createGain();
99 | this.duckForMicrophoneGainNode = this.audioContext.createGain();
100 | this.muteForClientGainNode = this.audioContext.createGain();
101 |
102 | this.muteForClientGainNode.gain.setValueAtTime(
103 | this.clientSetMuted ? 0 : 1,
104 | this.audioContext.currentTime,
105 | );
106 | this.duckForMicrophoneGainNode.gain.setValueAtTime(
107 | this.duckControlCurrentGainVolume,
108 | this.audioContext.currentTime,
109 | );
110 | this.clientVolumeGainNode.gain.setValueAtTime(
111 | this.clientSetVolume,
112 | this.audioContext.currentTime,
113 | );
114 |
115 | this.muteForClientGainNode.connect(this.duckForMicrophoneGainNode);
116 | this.duckForMicrophoneGainNode.connect(this.clientVolumeGainNode);
117 | this.clientVolumeGainNode.connect(this.audioContext.destination);
118 |
119 | return this.audioContext;
120 | };
121 |
122 | public async play(audioTracks: AudioTrack[]): Promise {
123 | if (audioTracks.length === 0) {
124 | return;
125 | }
126 | if (this.audioContext === undefined) {
127 | this.getAudioContext();
128 | }
129 |
130 | this.isPlaying = true;
131 |
132 | await Promise.all(
133 | audioTracks.map(async (audioTrack) => {
134 | if (!audioTrack.url) return;
135 |
136 | const index = this.currentAudio.findIndex(
137 | (currentAudio) => currentAudio.url === audioTrack.url,
138 | );
139 |
140 | if (index === -1) {
141 | await this.playNewSource(audioTrack);
142 | } else {
143 | if (audioTrack.stopPlaying) {
144 | this.currentAudio[index].source.stop();
145 | this.currentAudio = this.currentAudio.filter(
146 | (currentAudio) => currentAudio.url !== audioTrack.url,
147 | );
148 | return;
149 | }
150 |
151 | if (audioTrack.behaviour === "restart") {
152 | this.currentAudio[index].source.stop();
153 | this.currentAudio = this.currentAudio.filter(
154 | (currentAudio) => currentAudio.url !== audioTrack.url,
155 | );
156 | await this.playNewSource(audioTrack);
157 | }
158 | }
159 | }),
160 | );
161 |
162 | if (this.currentAudio.length === 0) {
163 | this.isPlaying = false;
164 | }
165 | }
166 |
167 | public pause(): void {
168 | this.isPlaying = false;
169 | this.currentAudio.forEach(({ source }) => {
170 | source.stop();
171 | });
172 | }
173 |
174 | public stopAll(): void {
175 | this.currentAudio.forEach(({ source }) => {
176 | source.stop();
177 | });
178 | this.currentAudio = [];
179 | this.isPlaying = false;
180 | }
181 |
182 | public get isMutedByClient(): boolean {
183 | return this.clientSetMuted;
184 | }
185 |
186 | public set isMutedByClient(muted: boolean) {
187 | this.clientSetMuted = muted;
188 | if (!this.audioContext || !this.muteForClientGainNode) {
189 | return;
190 | }
191 | this.muteForClientGainNode.gain.setValueAtTime(
192 | this.clientSetMuted ? 0 : 1,
193 | this.audioContext.currentTime + 0.1,
194 | );
195 | }
196 |
197 | public get normalVolume(): number {
198 | return this.clientSetVolume;
199 | }
200 |
201 | public set normalVolume(volume: number) {
202 | const clampedVolume = Math.max(0, Math.min(1, volume));
203 |
204 | this.clientSetVolume = clampedVolume;
205 | if (!this.audioContext || !this.clientVolumeGainNode) {
206 | return;
207 | }
208 | this.clientVolumeGainNode.gain.setValueAtTime(
209 | this.clientSetVolume,
210 | this.audioContext.currentTime + 0.1,
211 | );
212 | }
213 |
214 | public duckTo(volume: number): void {
215 | this.duckControlCurrentGainVolume = volume;
216 | if (!this.audioContext || !this.duckForMicrophoneGainNode) {
217 | return;
218 | }
219 | this.duckForMicrophoneGainNode.gain.setValueAtTime(
220 | this.duckControlCurrentGainVolume,
221 | this.audioContext.currentTime + 0.05,
222 | );
223 | }
224 |
225 | public duckOff(): void {
226 | this.duckControlCurrentGainVolume = 1;
227 | if (!this.audioContext || !this.duckForMicrophoneGainNode) {
228 | return;
229 | }
230 | this.duckForMicrophoneGainNode.gain.setValueAtTime(
231 | this.duckControlCurrentGainVolume,
232 | this.audioContext.currentTime + 0.01,
233 | );
234 | }
235 | }
236 |
237 | export default AudioTrackManager;
238 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | export type JSONValue =
2 | | string
3 | | number
4 | | boolean
5 | | null
6 | | JSONValue[]
7 | | { [key: string]: JSONValue };
8 |
9 | // Message field types
10 |
11 | export type MediaType =
12 | | "image"
13 | | "video"
14 | | "audio"
15 | | "youtube"
16 | | "vimeo"
17 | | "unknown";
18 |
19 | export type ActiveFeelingEffect = {
20 | feeling: string;
21 | intensity: number;
22 | duration: number;
23 | durationRemaining: number;
24 | };
25 |
26 | export type Emotion = {
27 | id: number;
28 | name: string;
29 | avatar: string | null;
30 | moodPositivity: number;
31 | moodEnergy: number;
32 | playerRelationship: number;
33 | activeEffects: ActiveFeelingEffect[];
34 | };
35 |
36 | export type Memory = {
37 | id: number;
38 | recallValue: string;
39 | saveValue: JSONValue | null;
40 | };
41 |
42 | export type MessagePathItem = {
43 | id: number;
44 | type: "node" | "edge";
45 | graphId: number;
46 | };
47 | export type MessagePath = MessagePathItem[];
48 |
49 | export type Impact = {
50 | id: number;
51 | impact: string;
52 | isImpactShareable: boolean;
53 | impactImageUrl: string | null;
54 | };
55 |
56 | export type Metadata = {
57 | [key: string]: string | undefined;
58 | };
59 |
60 | export type Character = {
61 | id: number;
62 | name: string;
63 | avatar: string | null;
64 | };
65 |
66 | export type Speech = {
67 | audio: ArrayBuffer | string;
68 | duration: number;
69 | };
70 |
71 | export type BubblePoints = [number, number, number];
72 | export type BubbleTailPosition = string;
73 | export type BubbleStyle = string;
74 |
75 | export type ImageLayerPoints = [[number, number], [number, number]];
76 | export enum ImageLayerResizeMode {
77 | Contain = "contain",
78 | Cover = "cover",
79 | }
80 | export type ImageLayerOrientation = "portrait" | "landscape";
81 | export type ImageLayer = {
82 | url: string | null;
83 | points: ImageLayerPoints;
84 | resizeMode: ImageLayerResizeMode;
85 | orientation: ImageLayerOrientation;
86 | };
87 |
88 | export enum AudioTrackBehaviour {
89 | Continue = "continue",
90 | Restart = "restart",
91 | }
92 | export type AudioTrack = {
93 | url: string | null;
94 | behaviour: AudioTrackBehaviour;
95 | loop: boolean;
96 | volume: number;
97 | stopPlaying: boolean;
98 | };
99 |
100 | export type Media = {
101 | animationIn: string | null;
102 | animationOut: string | null;
103 | bubblePoints: BubblePoints | null;
104 | bubbleTailPosition: BubbleTailPosition | null;
105 | bubbleStyle: BubbleStyle | null;
106 | imageLayers: ImageLayer[];
107 | audioTracks: AudioTrack[];
108 | stopAllAudio: boolean;
109 | };
110 |
111 | // Message types
112 |
113 | export type MessageCharacter = {
114 | text: string;
115 | character: Character | null;
116 | metadata: Metadata;
117 | speech: Speech | null;
118 | media: Media;
119 | impact: Impact | null;
120 | };
121 |
122 | export type MessagePanel = {
123 | metadata: Metadata;
124 | media: Media;
125 | impact: Impact | null;
126 | };
127 |
128 | export type MessageMedia = {
129 | url: string;
130 | mediaType: MediaType;
131 | };
132 |
133 | type GenericMessage = {
134 | type: T;
135 | message: S;
136 | eventId: string;
137 | timestamp: number;
138 | endStory: boolean;
139 | tapToContinue: boolean;
140 | path: MessagePath;
141 | emotions: Emotion[];
142 | memories: Memory[];
143 | };
144 |
145 | export type Message =
146 | | GenericMessage<"character", MessageCharacter>
147 | | GenericMessage<"panel", MessagePanel>
148 | | GenericMessage<"media", MessageMedia>;
149 |
150 | // Speech config (set on Conversation)
151 |
152 | export type SpeechEncoding = "mp3" | "ogg" | "pcm" | "wav";
153 | export type SpeechOutput = "url" | "buffer";
154 | export interface SpeechConfig {
155 | encoding?: SpeechEncoding | SpeechEncoding[];
156 | output?: SpeechOutput;
157 | }
158 |
159 | // Events sent to server
160 |
161 | export interface StartEvent {
162 | sceneIndex?: number;
163 | startGraphId?: number;
164 | startGraphReferenceId?: string;
165 | startNodeId?: number;
166 | }
167 |
168 | export type InputType = "keyboard" | "microphone";
169 |
170 | export interface ReplyEvent {
171 | text: string;
172 | inputType?: InputType;
173 | }
174 |
175 | export interface ReplyIntermediateEvent {
176 | text: string;
177 | inputType: InputType;
178 | }
179 |
180 | export interface ActionEvent {
181 | action: string;
182 | }
183 |
184 | // Events sent to client
185 |
186 | export interface StartTypingEvent {
187 | conversationUuid: string;
188 | }
189 |
190 | export interface StopTypingEvent {
191 | conversationUuid: string;
192 | }
193 |
194 | export type MessageEvent = {
195 | conversationUuid: string;
196 | } & Message;
197 |
198 | export type CharacterMoodChange = {
199 | characterId: number;
200 | characterName: string | null;
201 | characterAvatar: string | null;
202 | // moodChange: Partial;
203 | };
204 | export type CharacterMoodChanges = CharacterMoodChange[];
205 |
206 | export interface EpisodeCompleteEvent {
207 | conversationUuid: string;
208 | impacts: Impact[];
209 | completedEpisodeId: number;
210 | nextEpisodeId: number | null;
211 | characterMoodChanges: CharacterMoodChanges;
212 | }
213 |
214 | export type ProblemEvent = {
215 | code: string;
216 | error: string;
217 | conversationUuid?: string;
218 | };
219 |
220 | // Confirmation events sent from server
221 |
222 | export type ConfirmActionEventPayload = {
223 | conversationUuid: string;
224 | action: string;
225 | };
226 |
227 | export type ConfirmReplyEventPayload = {
228 | conversationUuid: string;
229 | text: string;
230 | };
231 |
232 | export type ConfirmResumeEventPayload = {
233 | conversationUuid: string;
234 | };
235 |
236 | export type ConfirmStartEventPayload = {
237 | conversationUuid: string;
238 | startGraphId?: number;
239 | startGraphReferenceId?: string;
240 | startNodeId?: number;
241 | sceneIndex?: number;
242 | resetEmotions?: boolean;
243 | };
244 |
245 | export type ConfirmTapEventPayload = {
246 | conversationUuid: string;
247 | };
248 |
249 | type ConfirmEvent> = {
250 | eventId: string;
251 | timestamp: number;
252 | playerId: string | null;
253 | } & S;
254 |
255 | export type ConfirmActionEvent = ConfirmEvent;
256 | export type ConfirmReplyEvent = ConfirmEvent;
257 | export type ConfirmResumeEvent = ConfirmEvent;
258 | export type ConfirmStartEvent = ConfirmEvent;
259 | export type ConfirmTapEvent = ConfirmEvent;
260 |
261 | export type SpeechRecognitionStartEvent = {
262 | service: "unified" | "unified:google" | "unified:aws" | "unified:deepgram";
263 | sampleRate?: number;
264 | languageCode?: string;
265 | encoding?: string;
266 | customServiceParameters?: Record;
267 | returnRaw?: boolean;
268 | traceId?: number;
269 | };
270 |
271 | export type SpeechRecognitionResponse = {
272 | confidence?: number;
273 | durationInSeconds?: number;
274 | speechFinal: boolean;
275 | isFinal: boolean;
276 | text: string;
277 | traceId?: number;
278 | };
279 |
280 | type SpeechRecognitionParameters = {
281 | sampleRate: number;
282 | languageCode: string;
283 | encoding: string;
284 | customServiceParameters: unknown;
285 | returnRaw: boolean;
286 | traceId?: number;
287 | };
288 |
289 | export type SpeechRecognitionStarted = {
290 | id: string;
291 | playerSessionId: string;
292 | service: string;
293 | parameters: SpeechRecognitionParameters;
294 | startedAt: Date;
295 | };
296 |
297 | export type SpeechRecognitionStopped = {
298 | id: string;
299 | playerSessionId: string;
300 | service: string;
301 | parameters: SpeechRecognitionParameters;
302 | startedAt: Date;
303 | endedAt: Date;
304 | creditCount: number;
305 | };
306 |
--------------------------------------------------------------------------------
/demos/deepgram_stt_and_audio_out_controls/src/main.ts:
--------------------------------------------------------------------------------
1 | import "./style.css";
2 | import {
3 | Playthrough,
4 | AudioManager,
5 | createPlaythroughToken,
6 | createConversation,
7 | Conversation,
8 | Message,
9 | } from "@charisma-ai/sdk";
10 |
11 | // In this demo, we'll extend the global "window" with the functions we need so we can call them from the HTML.
12 | declare global {
13 | interface Window {
14 | start: () => Promise;
15 | reply: () => void;
16 | onKeyPress: (event: KeyboardEvent) => void;
17 | setBackgroundAudio: (audioOn: boolean) => void;
18 | setCharacterAudio: (audioOn: boolean) => void;
19 | toggleMicrophone: (event: Event) => void;
20 | setBackgroundAudioVolume: (volume: number) => void;
21 | setCharacterAudioVolume: (volume: number) => void;
22 | }
23 | }
24 |
25 | const messagesDiv = document.getElementById("messages");
26 | const recordButton = document.getElementById("record-button");
27 |
28 | const appendMessage = (message: string, className: string, name?: string) => {
29 | const div = document.createElement("div");
30 | div.classList.add(className, "message");
31 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`;
32 | messagesDiv?.appendChild(div);
33 | };
34 |
35 | // Keep track of the recording statuses of the microphone so we can update the UI accordingly.
36 | let recordingStatus: "recording" | "off" | "starting" = "off";
37 | let confirmedText = "";
38 | let volatileText = "";
39 |
40 | const handleStartSTT = () => {
41 | recordingStatus = "recording";
42 | if (recordButton) recordButton.innerHTML = "Stop";
43 | const replyInput = document.getElementById("reply-input");
44 |
45 | if (replyInput) {
46 | replyInput.value = "";
47 | }
48 | };
49 |
50 | const handleStopSTT = () => {
51 | recordingStatus = "off";
52 | if (recordButton) recordButton.innerHTML = "Record";
53 | };
54 |
55 | const handleTranscript = (transcript: string) => {
56 | confirmedText = `${confirmedText} ${transcript}`;
57 | volatileText = "";
58 | const replyInput = document.getElementById("reply-input");
59 | if (replyInput) {
60 | replyInput.value = confirmedText;
61 | }
62 | };
63 |
64 | const handleInterimTranscript = (interimTranscript: string) => {
65 | volatileText = interimTranscript;
66 | const replyInput = document.getElementById("reply-input");
67 | if (replyInput) {
68 | replyInput.value = `${confirmedText} ${volatileText}`;
69 | }
70 | };
71 |
72 | // Setup the audio manager.
73 | const audioManager = new AudioManager({
74 | duckVolumeLevel: 0.1,
75 | sttService: "charisma/deepgram",
76 | streamTimeslice: 100,
77 | handleTranscript,
78 | handleInterimTranscript,
79 | handleStartSTT,
80 | handleStopSTT,
81 | handleDisconnect: (message: string) =>
82 | appendMessage(message, "disconnected-message"),
83 | handleConnect: (message: string) =>
84 | appendMessage(message, "connected-message"),
85 | debugLogFunction: (message: string) =>
86 | console.log(
87 | `${new Date().toISOString().split("T")[1].slice(0, 12)} ${message}`,
88 | ),
89 | });
90 |
91 | let playthrough: Playthrough;
92 | let conversation: Conversation;
93 |
94 | window.start = async function start() {
95 | // In order to play audio, this method must be called by a user interaction.
96 | // This is due to a security restriction in some browsers.
97 | audioManager.initialise();
98 |
99 | const storyIdInput = document.getElementById("story-id") as HTMLInputElement;
100 | const storyId = Number(storyIdInput.value);
101 | const storyApiKeyInput = document.getElementById(
102 | "story-api-key",
103 | ) as HTMLInputElement;
104 | const storyApiKey = storyApiKeyInput.value;
105 | const storyVersionInput = document.getElementById(
106 | "version",
107 | ) as HTMLInputElement;
108 | const storyVersion = Number(storyVersionInput.value) || undefined;
109 | const StartGraphReferenceIdInput = document.getElementById(
110 | "startGraphReferenceId",
111 | ) as HTMLInputElement;
112 | const startGraphReferenceId = StartGraphReferenceIdInput.value;
113 |
114 | const { token } = await createPlaythroughToken({
115 | storyId,
116 | apiKey: storyApiKey,
117 | version: storyVersion,
118 | });
119 |
120 | const { conversationUuid } = await createConversation(token);
121 | playthrough = new Playthrough(token);
122 | conversation = playthrough.joinConversation(conversationUuid);
123 |
124 | conversation.setSpeechConfig({
125 | encoding: ["mp3", "wav"],
126 | output: "buffer",
127 | });
128 |
129 | conversation.on("message", (message: Message) => {
130 | const characterMessage =
131 | message.type === "character" ? message.message : null;
132 |
133 | // For this demo, we only care about character messages.
134 | if (!characterMessage) return;
135 |
136 | // Put the character message on the page.
137 | appendMessage(
138 | characterMessage.text,
139 | "character-message",
140 | characterMessage.character?.name,
141 | );
142 |
143 | // Play character speech.
144 | if (characterMessage.speech) {
145 | audioManager.playCharacterSpeech(
146 | characterMessage.speech.audio as ArrayBuffer,
147 | {
148 | trackId: String(characterMessage.character?.id),
149 | interrupt: "track",
150 | },
151 | );
152 | }
153 |
154 | if (characterMessage.media) {
155 | if (characterMessage.media.stopAllAudio) {
156 | audioManager.mediaAudioStopAll();
157 | }
158 |
159 | // Play media audio if it exists in the node.
160 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks);
161 | }
162 | });
163 |
164 | conversation.on("problem", console.warn);
165 |
166 | // Listen for the playthrough to connect and start the conversation when it does.
167 | let started = false;
168 | playthrough.on("connection-status", (status) => {
169 | appendMessage(
170 | status,
171 | status === "disconnected" ? "disconnected-message" : "connected-message",
172 | );
173 |
174 | if (status === "connected" && !started) {
175 | const conversationParameters = startGraphReferenceId
176 | ? { startGraphReferenceId }
177 | : undefined;
178 | conversation.start(conversationParameters);
179 | started = true;
180 | }
181 | });
182 |
183 | const { playerSessionId } = await playthrough.connect();
184 | audioManager.connect(token, playerSessionId);
185 | };
186 |
187 | const reply = () => {
188 | if (!playthrough || !conversation) return;
189 |
190 | // Stop listening when you send a message.
191 | audioManager.stopListening();
192 |
193 | const replyInput = document.getElementById("reply-input");
194 | const text = replyInput.value;
195 |
196 | if (text.trim() === "") return;
197 |
198 | conversation.reply({ text });
199 |
200 | // Put player message on the page.
201 | appendMessage(text, "player-message", "You");
202 | replyInput.value = "";
203 | };
204 |
205 | // Handle the Enter key press.
206 | window.onKeyPress = function onKeyPress(event) {
207 | if (!event || !event.currentTarget) return;
208 | if (event.key === "Enter") {
209 | reply();
210 | }
211 | };
212 |
213 | window.reply = reply;
214 |
215 | // Toggling the microphone will request the stt service to connect.
216 | window.toggleMicrophone = () => {
217 | if (!recordButton) return;
218 |
219 | if (recordingStatus === "off") {
220 | audioManager.startListening();
221 | confirmedText = "";
222 | volatileText = "";
223 | recordingStatus = "starting";
224 | recordButton.innerHTML = "...";
225 | } else if (recordingStatus === "recording") {
226 | audioManager.stopListening();
227 | recordingStatus = "off";
228 | recordButton.innerHTML = "Record";
229 | }
230 | };
231 |
232 | window.setBackgroundAudio = (audioOn: boolean) => {
233 | audioManager.mediaAudioIsMuted = !audioOn;
234 | };
235 |
236 | window.setBackgroundAudioVolume = (volume: number) => {
237 | audioManager.mediaAudioVolume = volume;
238 | };
239 |
240 | window.setCharacterAudio = (audioOn: boolean) => {
241 | audioManager.characterSpeechIsMuted = !audioOn;
242 | };
243 |
244 | window.setCharacterAudioVolume = (volume: number) => {
245 | audioManager.characterSpeechVolume = volume;
246 | };
247 |
--------------------------------------------------------------------------------
/src/AudioOutputsService.ts:
--------------------------------------------------------------------------------
1 | import { EventEmitter } from "eventemitter3";
2 |
3 | interface Constructable {
4 | new (): T;
5 | }
6 |
7 | interface WindowWithAudioContext extends Window {
8 | AudioContext?: Constructable;
9 | webkitAudioContext?: Constructable;
10 | }
11 |
12 | declare const window: WindowWithAudioContext;
13 |
14 | type AudioOutputsServiceEvents = {
15 | start: [];
16 | stop: [];
17 | };
18 |
19 | export type AudioOutputsServicePlayOptions = {
20 | /**
21 | * Whether to interrupt the same track as the `trackId` passed (`track`), all currently playing audio (`all`), or not to interrupt anything (`none`). Default is `none`.
22 | */
23 | interrupt?: "track" | "all" | "none";
24 | /**
25 | * If you want to prevent a particular character to speak over themselves, a `trackId` can be set to a unique string. When playing another speech clip, if the same `trackId` is passed and `interrupt` is set to `true`, then the previous clip will stop playing. Default is unset.
26 | */
27 | trackId?: string;
28 | };
29 |
30 | type AudioOutputsServiceSource = {
31 | sourceNode: AudioBufferSourceNode;
32 | trackId?: string;
33 | };
34 |
35 | class AudioOutputsService extends EventEmitter {
36 | private audioContext: AudioContext | undefined;
37 |
38 | private muteForMicrophoneGainNode: GainNode | null = null;
39 |
40 | private muteForClientGainNode: GainNode | null = null;
41 |
42 | private volumeGainNode: GainNode | null = null;
43 |
44 | private analyserNode: AnalyserNode | null = null;
45 |
46 | private clientSetVolume = 1;
47 |
48 | private clientSetMuted: boolean;
49 |
50 | private currentSources: AudioOutputsServiceSource[] = [];
51 |
52 | private debugLogFunction: (message: string) => void;
53 |
54 | constructor(
55 | debugLogFunction: (message: string) => void,
56 | muteCharacterAudio: boolean,
57 | ) {
58 | super();
59 | this.debugLogFunction = debugLogFunction;
60 | this.clientSetMuted = muteCharacterAudio;
61 | }
62 |
63 | public getAudioContext = (): AudioContext => {
64 | this.debugLogFunction("AudioOutputsService getAudioContext");
65 | if (this.audioContext) {
66 | return this.audioContext;
67 | }
68 |
69 | const AudioContextClass = window.AudioContext || window.webkitAudioContext;
70 |
71 | if (!AudioContextClass) {
72 | throw new Error("AudioContext isn't supported in this browser.");
73 | }
74 |
75 | this.audioContext = new AudioContextClass();
76 |
77 | // Create and store the gain nodes.
78 | this.muteForMicrophoneGainNode = this.audioContext.createGain();
79 | this.muteForClientGainNode = this.audioContext.createGain();
80 | this.volumeGainNode = this.audioContext.createGain();
81 | this.analyserNode = this.audioContext.createAnalyser();
82 |
83 | this.muteForMicrophoneGainNode.gain.setValueAtTime(
84 | 1,
85 | this.audioContext.currentTime,
86 | );
87 | this.muteForClientGainNode.gain.setValueAtTime(
88 | this.clientSetMuted ? 0 : 1,
89 | this.audioContext.currentTime,
90 | );
91 | this.volumeGainNode.gain.setValueAtTime(
92 | this.normalVolume,
93 | this.audioContext.currentTime,
94 | );
95 |
96 | this.volumeGainNode
97 | .connect(this.muteForClientGainNode)
98 | .connect(this.muteForMicrophoneGainNode)
99 | .connect(this.analyserNode)
100 | .connect(this.audioContext.destination);
101 |
102 | return this.audioContext;
103 | };
104 |
105 | public play = async (
106 | audio: ArrayBuffer,
107 | options: boolean | AudioOutputsServicePlayOptions = {},
108 | ): Promise => {
109 | this.debugLogFunction("AudioOutputsService play");
110 |
111 | // Backwards-compatible with the old boolean `interrupt` parameter
112 | if (typeof options === "boolean") {
113 | console.warn(
114 | "Passing a boolean as the second parameter to `speaker.play()` is deprecated, and should be updated to use an `options` object.",
115 | );
116 | // eslint-disable-next-line no-param-reassign
117 | options = { interrupt: options ? "all" : "none" };
118 | }
119 |
120 | const { interrupt = "none", trackId } = options;
121 |
122 | const audioContext = this.getAudioContext();
123 |
124 | if (!this.volumeGainNode) {
125 | throw new Error("volumeGainNode is not initialized.");
126 | }
127 |
128 | const source = audioContext.createBufferSource();
129 | source.connect(this.volumeGainNode);
130 | source.buffer = await new Promise((resolve, reject): void => {
131 | audioContext.decodeAudioData(audio, resolve, reject);
132 | });
133 |
134 | return new Promise((resolve): void => {
135 | source.onended = (): void => {
136 | resolve();
137 | this.currentSources = this.currentSources.filter(
138 | (currentSource) => currentSource.sourceNode !== source,
139 | );
140 | if (this.currentSources.length === 0) {
141 | this.emit("stop");
142 | }
143 | };
144 | if (this.currentSources.length > 0 && interrupt !== "none") {
145 | this.currentSources.forEach((currentSource) => {
146 | if (
147 | interrupt === "all" ||
148 | (interrupt === "track" && currentSource.trackId === trackId)
149 | ) {
150 | currentSource.sourceNode.stop();
151 | }
152 | });
153 | }
154 | if (this.currentSources.length === 0) {
155 | this.emit("start");
156 | }
157 | this.currentSources.push({ sourceNode: source, trackId });
158 | source.start();
159 | });
160 | };
161 |
162 | public get normalVolume(): number {
163 | return this.clientSetVolume;
164 | }
165 |
166 | public set normalVolume(volume: number) {
167 | const clampedVolume = Math.max(0, Math.min(1, volume));
168 |
169 | this.clientSetVolume = clampedVolume;
170 |
171 | if (!this.volumeGainNode || !this.audioContext) {
172 | return;
173 | }
174 |
175 | // smooth ramp to new value
176 | this.volumeGainNode.gain.setValueAtTime(
177 | this.volumeGainNode.gain.value,
178 | this.audioContext.currentTime,
179 | );
180 | this.volumeGainNode.gain.linearRampToValueAtTime(
181 | clampedVolume,
182 | this.audioContext.currentTime + 0.1,
183 | );
184 | }
185 |
186 | public get isMutedByClient(): boolean {
187 | return this.clientSetMuted;
188 | }
189 |
190 | public set isMutedByClient(value: boolean) {
191 | this.debugLogFunction(`AudioOutputsService setIsMutedByClient ${value}`);
192 |
193 | this.clientSetMuted = value;
194 |
195 | if (!this.muteForClientGainNode || !this.audioContext) {
196 | return;
197 | }
198 |
199 | // smooth ramp to new value
200 | this.muteForClientGainNode.gain.setValueAtTime(
201 | this.muteForClientGainNode.gain.value,
202 | this.audioContext.currentTime,
203 | );
204 | this.muteForClientGainNode.gain.linearRampToValueAtTime(
205 | value ? 0 : 1,
206 | this.audioContext.currentTime + 0.1,
207 | );
208 | }
209 |
210 | public beginMutingForMicrophone = (): void => {
211 | this.debugLogFunction(`AudioOutputsService beginMuting`);
212 | if (!this.muteForMicrophoneGainNode || !this.audioContext) return;
213 |
214 | // Fade out quickly
215 | this.muteForMicrophoneGainNode.gain.setValueAtTime(
216 | this.muteForMicrophoneGainNode.gain.value,
217 | this.audioContext.currentTime,
218 | );
219 | this.muteForMicrophoneGainNode.gain.linearRampToValueAtTime(
220 | 0,
221 | this.audioContext.currentTime + 0.05,
222 | );
223 | };
224 |
225 | public endMutingForMicrophone = (): void => {
226 | this.debugLogFunction(`AudioOutputsService endMuting`);
227 | if (!this.muteForMicrophoneGainNode || !this.audioContext) return;
228 |
229 | // Fade in very quickly
230 | this.muteForMicrophoneGainNode.gain.setValueAtTime(
231 | this.muteForMicrophoneGainNode.gain.value,
232 | this.audioContext.currentTime,
233 | );
234 | this.muteForMicrophoneGainNode.gain.linearRampToValueAtTime(
235 | 1,
236 | this.audioContext.currentTime + 0.01,
237 | );
238 | };
239 |
240 | public getAnalyserNode = (): AnalyserNode | null => {
241 | this.debugLogFunction("AudioOutputsService getAnalyserNode");
242 | return this.analyserNode || null;
243 | };
244 | }
245 |
246 | export default AudioOutputsService;
247 |
--------------------------------------------------------------------------------
/src/AudioManager.test.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable dot-notation */
2 | import MockAudioInputsService from "./__mocks__/MockAudioInputsService";
3 | import MockAudioInputsBrowser from "./__mocks__/MockAudioInputsBrowser";
4 |
5 | import AudioManager, { AudioManagerOptions } from "./AudioManager";
6 |
7 | jest.mock("./AudioInputsService", () => ({
8 | __esModule: true,
9 | default: MockAudioInputsService,
10 | }));
11 |
12 | jest.mock("./AudioInputsBrowser", () => ({
13 | __esModule: true,
14 | default: MockAudioInputsBrowser,
15 | }));
16 |
17 | describe("AudioManager", () => {
18 | afterEach(() => {
19 | jest.clearAllMocks();
20 | });
21 |
22 | test("should initialise with default options", () => {
23 | const defaultOptions: AudioManagerOptions = {};
24 | const audioManager = new AudioManager(defaultOptions);
25 |
26 | expect(audioManager["duckVolumeLevel"]).toBe(0);
27 | expect(audioManager["sttService"]).toBe("charisma/deepgram");
28 | });
29 |
30 | test("should initialise with provided options", () => {
31 | const mockOptions: AudioManagerOptions = {
32 | duckVolumeLevel: 0.2,
33 | sttService: "browser",
34 | };
35 |
36 | const audioManager = new AudioManager(mockOptions);
37 |
38 | expect(audioManager["duckVolumeLevel"]).toBe(0.2);
39 | expect(audioManager["sttService"]).toBe("browser");
40 | });
41 |
42 | test("microphone methods should call on audioInputsBrowser when browser is used", () => {
43 | const mockOptions: AudioManagerOptions = {
44 | sttService: "browser",
45 | };
46 |
47 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
48 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
49 | const audioManager = new AudioManager(mockOptions);
50 |
51 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
52 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
53 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
54 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
55 |
56 | audioManager.startListening();
57 | audioManager.stopListening();
58 | audioManager.resetTimeout(100);
59 |
60 | expect(mockAudioInputsBrowserInstance.startListening).toHaveBeenCalled();
61 | expect(mockAudioInputsBrowserInstance.stopListening).toHaveBeenCalled();
62 | expect(mockAudioInputsBrowserInstance.resetTimeout).toHaveBeenCalledWith(
63 | 100,
64 | );
65 |
66 | expect(
67 | mockAudioInputsServiceInstance.startListening,
68 | ).not.toHaveBeenCalled();
69 | expect(mockAudioInputsServiceInstance.stopListening).not.toHaveBeenCalled();
70 | expect(mockAudioInputsServiceInstance.resetTimeout).not.toHaveBeenCalled();
71 | });
72 |
73 | test("startListening should call startListening on audioInputsService", () => {
74 | const mockOptions: AudioManagerOptions = {
75 | sttService: "charisma/deepgram",
76 | };
77 |
78 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
79 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
80 | const audioManager = new AudioManager(mockOptions);
81 |
82 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
83 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
84 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
85 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
86 |
87 | audioManager.startListening();
88 |
89 | expect(mockAudioInputsServiceInstance.startListening).toHaveBeenCalled();
90 |
91 | expect(
92 | mockAudioInputsBrowserInstance.startListening,
93 | ).not.toHaveBeenCalled();
94 | });
95 |
96 | test("stopListening should call stopListening on audioInputsService", () => {
97 | const mockOptions: AudioManagerOptions = {
98 | sttService: "charisma/deepgram",
99 | };
100 |
101 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
102 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
103 | const audioManager = new AudioManager(mockOptions);
104 |
105 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
106 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
107 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
108 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
109 |
110 | audioManager.stopListening();
111 |
112 | expect(mockAudioInputsServiceInstance.stopListening).toHaveBeenCalled();
113 |
114 | expect(mockAudioInputsBrowserInstance.stopListening).not.toHaveBeenCalled();
115 | });
116 |
117 | test("resetTimeout should call connect on audioInputsService", () => {
118 | const mockOptions: AudioManagerOptions = {
119 | sttService: "charisma/deepgram",
120 | };
121 |
122 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
123 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
124 | const audioManager = new AudioManager(mockOptions);
125 |
126 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
127 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
128 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
129 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
130 |
131 | audioManager.resetTimeout(100);
132 |
133 | expect(mockAudioInputsServiceInstance.resetTimeout).toHaveBeenCalledWith(
134 | 100,
135 | );
136 |
137 | expect(mockAudioInputsBrowserInstance.resetTimeout).not.toHaveBeenCalled();
138 | });
139 |
140 | test("startListening should call startListening on audioInputsBrowser", () => {
141 | const mockOptions: AudioManagerOptions = {
142 | sttService: "browser",
143 | };
144 |
145 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
146 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
147 | const audioManager = new AudioManager(mockOptions);
148 |
149 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
150 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
151 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
152 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
153 |
154 | audioManager.startListening();
155 |
156 | expect(mockAudioInputsBrowserInstance.startListening).toHaveBeenCalled();
157 |
158 | expect(
159 | mockAudioInputsServiceInstance.startListening,
160 | ).not.toHaveBeenCalled();
161 | });
162 |
163 | test("stopListening should call stopListening on audioInputsBrowser", () => {
164 | const mockOptions: AudioManagerOptions = {
165 | sttService: "browser",
166 | };
167 |
168 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
169 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
170 | const audioManager = new AudioManager(mockOptions);
171 |
172 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
173 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
174 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
175 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
176 |
177 | audioManager.stopListening();
178 |
179 | expect(mockAudioInputsBrowserInstance.stopListening).toHaveBeenCalled();
180 |
181 | expect(mockAudioInputsServiceInstance.stopListening).not.toHaveBeenCalled();
182 | });
183 |
184 | test("resetTimeout should call connect on audioInputsBRowser", () => {
185 | const mockOptions: AudioManagerOptions = {
186 | sttService: "browser",
187 | };
188 |
189 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
190 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
191 | const audioManager = new AudioManager(mockOptions);
192 |
193 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
194 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
195 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
196 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
197 |
198 | audioManager.resetTimeout(100);
199 |
200 | expect(mockAudioInputsBrowserInstance.resetTimeout).toHaveBeenCalledWith(
201 | 100,
202 | );
203 |
204 | expect(mockAudioInputsServiceInstance.resetTimeout).not.toHaveBeenCalled();
205 | });
206 |
207 | test("connect should call AudioInputsService.connect with the correct token", () => {
208 | const mockAudioInputsServiceInstance = new MockAudioInputsService();
209 | const audioManager = new AudioManager({});
210 |
211 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
212 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance;
213 |
214 | const token = "test-token";
215 | const playerSessionId = "test-player-session-id";
216 |
217 | audioManager.connect(token, playerSessionId);
218 |
219 | expect(mockAudioInputsServiceInstance.connect).toHaveBeenCalledWith(
220 | token,
221 | playerSessionId,
222 | );
223 | });
224 |
225 | test("browserIsSupported should return the value from AudioInputsBrowser", () => {
226 | const mockOptions: AudioManagerOptions = {
227 | sttService: "browser",
228 | };
229 |
230 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser();
231 | const audioManager = new AudioManager(mockOptions);
232 |
233 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
234 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance;
235 |
236 | mockAudioInputsBrowserInstance.isSupported = false;
237 |
238 | expect(audioManager.browserIsSupported()).toBe(false);
239 | });
240 | });
241 |
--------------------------------------------------------------------------------
/src/AudioManager.ts:
--------------------------------------------------------------------------------
1 | import AudioTrackManager from "./AudioTrackManager.js";
2 | import AudioInputsService from "./AudioInputsService.js";
3 | import AudioOutputsService, {
4 | AudioOutputsServicePlayOptions,
5 | } from "./AudioOutputsService.js";
6 | import AudioInputsBrowser from "./AudioInputsBrowser.js";
7 | import { AudioTrack } from "./types.js";
8 |
9 | export interface AudioManagerOptions {
10 | duckVolumeLevel?: number;
11 | sttService?: "browser" | "charisma/deepgram";
12 | streamTimeslice?: number;
13 | reconnectAttemptsTimeout?: number;
14 | sttUrl?: string;
15 | muteCharacterAudio?: boolean;
16 | handleStartSTT?: () => void;
17 | handleStopSTT?: () => void;
18 | handleTranscript?: (transcript: string) => void;
19 | handleInterimTranscript?: (transcript: string) => void;
20 | handleError?: (error: string) => void;
21 | handleDisconnect?: (message: string) => void;
22 | handleConnect?: (message: string) => void;
23 | debugLogFunction?: (message: string) => void;
24 | }
25 |
26 | class AudioManager {
27 | private audioInputsService: AudioInputsService;
28 |
29 | private audioInputsBrowser: AudioInputsBrowser;
30 |
31 | private audioOutputsService: AudioOutputsService;
32 |
33 | private audioTrackManager: AudioTrackManager;
34 |
35 | private duckVolumeLevel: number;
36 |
37 | private sttService: "browser" | "charisma/deepgram";
38 |
39 | private microphoneIsOn = false;
40 |
41 | private debugLogFunction: (message: string) => void;
42 |
43 | constructor(options: AudioManagerOptions) {
44 | // eslint-disable-next-line @typescript-eslint/no-empty-function
45 | this.debugLogFunction = options.debugLogFunction || (() => {});
46 | this.debugLogFunction("AudioManager running constructor");
47 | this.duckVolumeLevel = options.duckVolumeLevel ?? 0;
48 | this.sttService = options.sttService ?? "charisma/deepgram";
49 |
50 | this.audioInputsService = new AudioInputsService(
51 | options.streamTimeslice,
52 | options.reconnectAttemptsTimeout,
53 | options.sttUrl,
54 | this.debugLogFunction,
55 | );
56 | this.audioInputsBrowser = new AudioInputsBrowser();
57 | const muteCharacterAudio = !!options.muteCharacterAudio;
58 | this.audioOutputsService = new AudioOutputsService(
59 | this.debugLogFunction,
60 | muteCharacterAudio,
61 | );
62 | this.audioTrackManager = new AudioTrackManager();
63 |
64 | // Listen to events from the AudioInputsService
65 | this.audioInputsService.on(
66 | "start",
67 | options.handleStartSTT ??
68 | (() => console.error("handleStartSTT() is not setup")),
69 | );
70 | this.audioInputsService.on(
71 | "stop",
72 | options.handleStopSTT ??
73 | (() => console.error("handleStopSTT() is not setup")),
74 | );
75 | this.audioInputsService.on(
76 | "transcript",
77 | options.handleTranscript ??
78 | (() => console.error("handleTranscript() is not setup.")),
79 | );
80 | this.audioInputsService.on(
81 | "transcript-interim",
82 | options.handleInterimTranscript ??
83 | (() => console.log("handleInterimTranscript() is not setup.")),
84 | );
85 | this.audioInputsService.on("error", options.handleError ?? console.error);
86 | this.audioInputsService.on(
87 | "disconnect",
88 | options.handleDisconnect ?? console.error,
89 | );
90 | this.audioInputsService.on("connect", options.handleConnect ?? console.log);
91 |
92 | // Listen to events from the AudioInputsBrowser
93 | this.audioInputsBrowser.on(
94 | "start",
95 | options.handleStartSTT ??
96 | (() => console.error("handleStartSTT() is not setup")),
97 | );
98 | this.audioInputsBrowser.on(
99 | "stop",
100 | options.handleStopSTT ??
101 | (() => console.error("handleStopSTT() is not setup")),
102 | );
103 | this.audioInputsBrowser.on(
104 | "transcript",
105 | options.handleTranscript ??
106 | (() => console.error("handleTranscript() is not setup")),
107 | );
108 | this.audioInputsBrowser.on(
109 | "transcript-interim",
110 | options.handleInterimTranscript ??
111 | (() => console.log("handleInterimTranscript() is not setup")),
112 | );
113 | this.audioInputsBrowser.on("error", options.handleError ?? console.error);
114 |
115 | // Listen to events from the AudioOutputsService
116 | this.audioOutputsService.on("start", () => {
117 | if (this.microphoneIsOn) {
118 | this.audioOutputsService.beginMutingForMicrophone();
119 | } else {
120 | this.audioOutputsService.endMutingForMicrophone();
121 | }
122 | });
123 | this.audioOutputsService.on("stop", () => {
124 | if (this.microphoneIsOn) {
125 | this.audioOutputsService.beginMutingForMicrophone();
126 | } else {
127 | this.audioOutputsService.endMutingForMicrophone();
128 | }
129 | });
130 | this.debugLogFunction("AudioManager finished constructor");
131 | }
132 |
133 | // **
134 | // ** Audio Input ** //
135 | // **
136 | public startListening = (timeout?: number): void => {
137 | this.debugLogFunction("AudioManager startListening");
138 | if (this.sttService === "browser") {
139 | this.audioInputsBrowser.startListening(timeout);
140 | } else if (this.sttService === "charisma/deepgram") {
141 | this.audioInputsService.startListening(timeout);
142 | }
143 |
144 | this.microphoneIsOn = true;
145 | this.audioOutputsService.beginMutingForMicrophone();
146 |
147 | if (this.audioTrackManager.isPlaying) {
148 | this.audioTrackManager.duckTo(this.duckVolumeLevel);
149 | }
150 | };
151 |
152 | public stopListening = (): void => {
153 | this.debugLogFunction("AudioManager stopListening");
154 | if (this.sttService === "browser") {
155 | this.audioInputsBrowser.stopListening();
156 | } else if (this.sttService === "charisma/deepgram") {
157 | this.audioInputsService.stopListening();
158 | }
159 |
160 | this.microphoneIsOn = false;
161 |
162 | this.audioOutputsService.endMutingForMicrophone();
163 |
164 | if (this.audioTrackManager.isPlaying) {
165 | this.audioTrackManager.duckOff();
166 | }
167 | };
168 |
169 | public connect = (token: string, playerSessionId: string): void => {
170 | this.debugLogFunction("AudioManager connect");
171 | if (this.sttService === "charisma/deepgram") {
172 | this.audioInputsService.connect(token, playerSessionId);
173 | }
174 | };
175 |
176 | public disconnect = (): void => {
177 | this.debugLogFunction("AudioManager disconnect");
178 | if (this.sttService === "charisma/deepgram") {
179 | this.audioInputsService.disconnect();
180 | }
181 | };
182 |
183 | public resetTimeout = (timeout: number): void => {
184 | this.debugLogFunction("AudioManager resetTimeout");
185 | if (this.sttService === "charisma/deepgram") {
186 | this.audioInputsService.resetTimeout(timeout);
187 | } else {
188 | this.audioInputsBrowser.resetTimeout(timeout);
189 | }
190 | };
191 |
192 | // **
193 | // ** Browser STT Service ** //
194 | // **
195 | public browserIsSupported = (): boolean => {
196 | this.debugLogFunction("AudioManager browserIsSupported");
197 | return this.audioInputsBrowser.isSupported;
198 | };
199 |
200 | // **
201 | // ** Initialise Audio
202 | // **
203 | public initialise = (): void => {
204 | this.debugLogFunction("AudioManager initialise");
205 | const outputContext = this.audioOutputsService.getAudioContext();
206 | const trackContext = this.audioTrackManager.getAudioContext();
207 | const resumeAudio = () => {
208 | outputContext.resume();
209 | trackContext.resume();
210 | };
211 | document.addEventListener("pointerdown", resumeAudio, { once: true });
212 | document.addEventListener("keydown", resumeAudio, { once: true });
213 | };
214 |
215 | // **
216 | // ** Audio Outputs Service ** //
217 | // **
218 | public playCharacterSpeech = (
219 | audio: ArrayBuffer,
220 | options: boolean | AudioOutputsServicePlayOptions,
221 | ): Promise => {
222 | this.debugLogFunction("AudioManager playCharacterSpeech");
223 | return this.audioOutputsService.play(audio, options);
224 | };
225 |
226 | public get characterSpeechVolume(): number {
227 | return this.audioOutputsService.normalVolume;
228 | }
229 |
230 | public set characterSpeechVolume(volume: number) {
231 | this.audioOutputsService.normalVolume = volume;
232 | }
233 |
234 | public get characterSpeechIsMuted(): boolean {
235 | return this.audioOutputsService.isMutedByClient;
236 | }
237 |
238 | public set characterSpeechIsMuted(value: boolean) {
239 | this.debugLogFunction(`AudioManager characterSpeechIsMuted set ${value}`);
240 | this.audioOutputsService.isMutedByClient = value;
241 | }
242 |
243 | public getCharacterSpeechAnalyserNode = (): AnalyserNode | null => {
244 | this.debugLogFunction("AudioManager getCharacterSpeechAnalyserNode");
245 | return this.audioOutputsService.getAnalyserNode();
246 | };
247 |
248 | // **
249 | // ** Audio Track Manager ** //
250 | // **
251 | public mediaAudioPlay = (audioTracks: AudioTrack[]): void => {
252 | this.debugLogFunction("AudioManager mediaAudioPlay");
253 | this.audioTrackManager.play(audioTracks);
254 | };
255 |
256 | public get mediaAudioVolume(): number {
257 | return this.audioTrackManager.normalVolume;
258 | }
259 |
260 | public set mediaAudioVolume(volume: number) {
261 | this.audioTrackManager.normalVolume = volume;
262 | }
263 |
264 | public get mediaAudioIsMuted(): boolean {
265 | return this.audioTrackManager.isMutedByClient;
266 | }
267 |
268 | public set mediaAudioIsMuted(value: boolean) {
269 | this.debugLogFunction(`AudioManager mediaAudioIsMuted set ${value}`);
270 | this.audioTrackManager.isMutedByClient = value;
271 | }
272 |
273 | public mediaAudioStopAll = (): void => {
274 | this.debugLogFunction("AudioManager mediaAudioStopAll");
275 | this.audioTrackManager.stopAll();
276 | };
277 | }
278 |
279 | export default AudioManager;
280 |
--------------------------------------------------------------------------------
/src/AudioInputsService.ts:
--------------------------------------------------------------------------------
1 | import { EventEmitter } from "eventemitter3";
2 | import { io, type Socket } from "socket.io-client";
3 | import type { SpeechRecognitionEvent } from "./speech-types.js";
4 |
5 | type AudioInputsServiceEvents = {
6 | result: [SpeechRecognitionEvent];
7 | transcript: [string];
8 | "transcript-interim": [string];
9 | error: [string];
10 | timeout: [];
11 | start: [];
12 | stop: [];
13 | disconnect: [string];
14 | connect: [string];
15 | };
16 |
17 | const setupMicrophone = async (): Promise => {
18 | const userMedia = await navigator.mediaDevices.getUserMedia({
19 | audio: {
20 | echoCancellation: true,
21 | noiseSuppression: true,
22 | autoGainControl: true,
23 | },
24 | });
25 |
26 | const mediaRecorder = new MediaRecorder(userMedia);
27 | return mediaRecorder;
28 | };
29 |
30 | class AudioInputsService extends EventEmitter {
31 | private timeoutId?: number;
32 |
33 | private microphone?: MediaRecorder;
34 |
35 | private socket?: Socket;
36 |
37 | private streamTimeslice: number;
38 |
39 | private reconnectAttemptsTimeout: number;
40 |
41 | private ready = false;
42 |
43 | private playthroughToken?: string;
44 |
45 | private playerSessionId?: string;
46 |
47 | private sttUrl: string;
48 |
49 | private debugLogFunction: (message: string) => void;
50 |
51 | constructor(
52 | streamTimeslice: number | undefined,
53 | reconnectAttemptsTimeout: number | undefined,
54 | sttUrl: string | undefined,
55 | debugLogFunction: (message: string) => void,
56 | ) {
57 | super();
58 |
59 | this.debugLogFunction = debugLogFunction;
60 | this.debugLogFunction("AudioInputsService running constructor");
61 |
62 | this.streamTimeslice = streamTimeslice ?? 100;
63 | this.reconnectAttemptsTimeout = reconnectAttemptsTimeout ?? 60 * 1000;
64 | this.sttUrl = sttUrl ?? "https://stt.charisma.ai";
65 | }
66 |
67 | private isReconnecting = false;
68 |
69 | private attemptReconnect = (): void => {
70 | this.debugLogFunction("AudioInputsService attemptReconnect");
71 | if (this.playthroughToken === undefined || this.isReconnecting) return;
72 |
73 | const reconnectIntervalBase = 2000;
74 | const maxAttempts = 5;
75 |
76 | const reconnectAttempts = 0;
77 | let shouldTryAgain = true;
78 |
79 | this.isReconnecting = true;
80 |
81 | const endReconnect = () => {
82 | shouldTryAgain = false;
83 | this.isReconnecting = false;
84 | };
85 |
86 | const tryReconnect = (attempt: number) => {
87 | this.debugLogFunction(
88 | `AudioInputsService tryReconnect attempt ${attempt}`,
89 | );
90 | if (!shouldTryAgain) return;
91 |
92 | if (attempt >= maxAttempts) {
93 | this.emit("error", "Maximum reconnect attempts reached.");
94 | endReconnect();
95 | return;
96 | }
97 |
98 | this.connect(
99 | this.playthroughToken as string,
100 | this.playerSessionId as string,
101 | )
102 | .then(() => {
103 | this.debugLogFunction("Reconnected Successfully");
104 | console.log("Reconnected successfully!");
105 | endReconnect();
106 | })
107 | .catch(() => {
108 | // Exponentially back off the next reconnection attempt
109 | const nextInterval = reconnectIntervalBase * 2 ** attempt;
110 | console.log(
111 | `Reconnect attempt failed. Trying again in ${
112 | nextInterval / 1000
113 | } seconds...`,
114 | );
115 |
116 | if (shouldTryAgain) {
117 | setTimeout(() => tryReconnect(attempt + 1), nextInterval);
118 | }
119 | });
120 | };
121 |
122 | tryReconnect(reconnectAttempts);
123 |
124 | setTimeout(() => {
125 | this.debugLogFunction("Reconnect attempts timed out");
126 | this.emit("error", "Reconnect attempts timed out.");
127 | endReconnect();
128 | }, this.reconnectAttemptsTimeout);
129 | };
130 |
131 | public connect = (token: string, playerSessionId: string): Promise => {
132 | this.debugLogFunction(`AudioInputService connect to ${this.sttUrl}`);
133 |
134 | this.playthroughToken = token;
135 | this.playerSessionId = playerSessionId;
136 |
137 | return new Promise((resolve, reject) => {
138 | if (this.socket) {
139 | this.debugLogFunction("Socket already connected");
140 | console.log("Socket already connected");
141 | resolve();
142 | }
143 |
144 | this.socket = io(this.sttUrl, {
145 | transports: ["websocket"],
146 | query: {
147 | token,
148 | playerSessionId,
149 | },
150 | reconnection: false,
151 | });
152 |
153 | this.socket.on("error", (error: string) => {
154 | this.debugLogFunction(`AudioInputService error: ${error}`);
155 | console.error(error);
156 | this.emit("error", error);
157 | reject(error);
158 | });
159 |
160 | this.socket.on("transcript", (transcript: string) => {
161 | this.debugLogFunction(`AudioInputService transcript: ${transcript}`);
162 | if (transcript) {
163 | queueMicrotask(() => this.emit("transcript", transcript));
164 | }
165 | });
166 |
167 | this.socket.on("transcript-interim", (transcript: string) => {
168 | this.debugLogFunction(
169 | `AudioInputService interim transcript: ${transcript}`,
170 | );
171 | if (transcript) {
172 | queueMicrotask(() => this.emit("transcript-interim", transcript));
173 | }
174 | });
175 |
176 | // Attempts to reconnect to the stt server if the connection is lost and we DO have internet.
177 | this.socket.on("disconnect", (reason) => {
178 | this.debugLogFunction(`AudioInputService disconnect. ${reason}`);
179 | console.log("Socket disconnected. Reason:", reason);
180 |
181 | this.emit("disconnect", "Disconnected from speech-to-text server.");
182 | this.ready = false;
183 |
184 | if (this.socket) {
185 | this.socket.close();
186 | this.socket = undefined;
187 | }
188 |
189 | this.microphone = undefined;
190 |
191 | this.attemptReconnect();
192 | });
193 |
194 | this.socket.on("connect", () => {
195 | this.debugLogFunction(
196 | "AudioInputService connected to speech-to-text service.",
197 | );
198 | this.emit("connect", "Connected to speech-to-text service.");
199 |
200 | // Deepgram requires a short interval before data is sent.
201 | setTimeout(() => {
202 | this.ready = true;
203 | resolve();
204 | }, 2000);
205 | });
206 | });
207 | };
208 |
209 | public disconnect = () => {
210 | this.debugLogFunction("AudioInputService disconnect");
211 | this.ready = false;
212 |
213 | if (this.socket) {
214 | this.socket.close();
215 | this.socket = undefined;
216 | }
217 |
218 | this.microphone = undefined;
219 | this.debugLogFunction(
220 | "AudioInputService disconnected from speech-to-text server.",
221 | );
222 | this.emit("disconnect", "Disconnected from speech-to-text server.");
223 | };
224 |
225 | public startListening = async (timeout = 10000): Promise => {
226 | this.debugLogFunction("AudioInputService startListening");
227 | if (!this.ready) {
228 | this.debugLogFunction("AudioInputService startListening not ready");
229 | return;
230 | }
231 |
232 | try {
233 | if (!this.microphone) {
234 | this.debugLogFunction(
235 | "AudioInputService startListening setting up microphone",
236 | );
237 | this.microphone = await setupMicrophone();
238 | }
239 | } catch (error) {
240 | this.debugLogFunction(
241 | "AudioInputService startListening failed to access microphone",
242 | );
243 | console.error("Failed to access microphone:", error);
244 | this.emit("error", "Failed to access microphone");
245 | return;
246 | }
247 |
248 | if (this.timeoutId !== undefined) {
249 | clearTimeout(this.timeoutId);
250 | }
251 |
252 | if (timeout !== undefined) {
253 | this.timeoutId = window.setTimeout(this.onTimeout, timeout);
254 | }
255 |
256 | this.microphone.ondataavailable = (event) => {
257 | if (!this.socket || event.data.size === 0) {
258 | return;
259 | }
260 | this.socket.emit("packet-sent", event.data);
261 | };
262 |
263 | this.microphone.onstart = () => {
264 | this.emit("start");
265 | };
266 |
267 | this.microphone.onstop = () => {
268 | this.emit("stop");
269 | };
270 |
271 | this.microphone.onpause = () => {
272 | this.emit("stop");
273 | };
274 |
275 | this.microphone.onresume = () => {
276 | this.emit("start");
277 | };
278 |
279 | this.microphone.addEventListener("error", (error) => {
280 | this.emit("error", error.toString());
281 | this.stopListening();
282 | });
283 |
284 | this.microphone.start(this.streamTimeslice);
285 | };
286 |
287 | public stopListening = (): void => {
288 | this.debugLogFunction("AudioInputService stopListening");
289 | if (this.timeoutId !== undefined) {
290 | clearTimeout(this.timeoutId);
291 | }
292 |
293 | if (!this.microphone) {
294 | this.debugLogFunction("AudioInputService stopListening !this.microphone");
295 | this.emit("stop");
296 | return;
297 | }
298 |
299 | this.microphone.stop();
300 |
301 | if (!this.socket) {
302 | return;
303 | }
304 | this.debugLogFunction("end-current-transcription");
305 | this.socket.emit("end-current-transcription");
306 | };
307 |
308 | public resetTimeout = (timeout: number): void => {
309 | this.debugLogFunction("AudioInputService resetTimeout");
310 | if (this.timeoutId !== undefined) {
311 | clearTimeout(this.timeoutId);
312 | }
313 |
314 | this.timeoutId = window.setTimeout(this.onTimeout, timeout);
315 | };
316 |
317 | private onTimeout = (): void => {
318 | this.debugLogFunction("AudioInputService onTimeout");
319 | this.timeoutId = undefined;
320 | this.emit("timeout");
321 | this.stopListening();
322 | };
323 | }
324 |
325 | export default AudioInputsService;
326 |
--------------------------------------------------------------------------------
/src/api.ts:
--------------------------------------------------------------------------------
1 | import type { Emotion, Impact, JSONValue, Memory } from "./types.js";
2 |
3 | const createSearchParams = <
4 | Params extends Record,
5 | >(
6 | params: Params,
7 | ) => {
8 | const query = new URLSearchParams();
9 | Object.entries(params).forEach(([key, value]) => {
10 | if (typeof value === "string" || typeof value === "number") {
11 | query.append(key, value.toString());
12 | } else if (Array.isArray(value)) {
13 | value.forEach((valueInner) => query.append(key, valueInner));
14 | }
15 | });
16 | return query;
17 | };
18 |
19 | const fetchHelper = async (
20 | endpoint: string,
21 | options: RequestInit = {},
22 | ): Promise => {
23 | // Always default to `Accept: application/json`
24 | let headers: Record = {
25 | Accept: "application/json",
26 | ...(options.headers as Record),
27 | };
28 | if (
29 | typeof options.method === "string" &&
30 | options.method.toLowerCase() === "post"
31 | ) {
32 | // If it's a POST method, default to `Content-Type: application/json` for the body
33 | headers = { "Content-Type": "application/json", ...headers };
34 | }
35 |
36 | const response = await fetch(endpoint, { mode: "cors", ...options, headers });
37 |
38 | let data: unknown = {};
39 | try {
40 | // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
41 | data = await response.json();
42 | } catch (err) {
43 | // Some endpoints just return a status code and no JSON body data.
44 | }
45 |
46 | if (!response.ok) {
47 | throw new Error(
48 | (data as { error?: string }).error ||
49 | `Something went wrong calling \`${endpoint}\``,
50 | );
51 | }
52 |
53 | return data as T;
54 | };
55 |
56 | let globalBaseUrl = "https://play.charisma.ai";
57 |
58 | export const getGlobalBaseUrl = (): string => globalBaseUrl;
59 |
60 | export const setGlobalBaseUrl = (newBaseUrl: string): void => {
61 | globalBaseUrl = newBaseUrl;
62 | };
63 |
64 | export type CommonApiOptions = {
65 | baseUrl?: string;
66 | };
67 |
68 | export type CreatePlaythroughTokenOptions = {
69 | /**
70 | * The `id` of the story that you want to create a new playthrough for. The story must be published, unless a Charisma.ai user token has been passed and the user matches the owner of the story.
71 | */
72 | storyId: number;
73 | /**
74 | * The `version` of the story that you want to create a new playthrough for. If omitted, it will default to the most recent published version. To get the draft version of a story, pass `-1` and an `apiKey`.
75 | */
76 | version?: number;
77 | /**
78 | * It is recommended to use the more secure `apiKey` instead of `userToken`. To access draft, test or unpublished versions of your story, pass a `userToken`.
79 | */
80 | userToken?: string;
81 | /**
82 | * To access draft, test or unpublished versions of your story, pass an `apiKey`. The API key can be found on the story overview page.
83 | */
84 | apiKey?: string;
85 | /**
86 | * To play a story in a language other than English (`en`, the default), pass a BCP-47 `languageCode`. For example, to play in Italian, use `it`.
87 | */
88 | languageCode?: string;
89 | };
90 |
91 | export type CreatePlaythroughTokenResult = {
92 | /**
93 | * The playthrough token, used for connecting to this playthrough. It never expires,
94 | * so can be saved in a secure place for players to continue playing between sessions.
95 | *
96 | * To create a playthrough with the token, use `new Playthrough(token)`.
97 | */
98 | token: string;
99 | /**
100 | * The unique identifier of the playthrough, encoded inside the token. It can be useful
101 | * as a debugging tool.
102 | */
103 | playthroughUuid: string;
104 | };
105 |
106 | export async function createPlaythroughToken(
107 | options: CreatePlaythroughTokenOptions,
108 | apiOptions?: CommonApiOptions,
109 | ): Promise {
110 | if (
111 | options.version === -1 &&
112 | options.userToken === undefined &&
113 | options.apiKey === undefined
114 | ) {
115 | throw new Error(
116 | "To play the draft version (-1) of a story, an `apiKey` or `userToken` must also be passed.",
117 | );
118 | }
119 |
120 | let authHeader: string | undefined;
121 | if (options.apiKey) {
122 | authHeader = `API-Key ${options.apiKey}`;
123 | } else if (options.userToken) {
124 | authHeader = `Bearer ${options.userToken}`;
125 | }
126 |
127 | try {
128 | const result = await fetchHelper<{
129 | token: string;
130 | playthroughUuid: string;
131 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/token`, {
132 | body: JSON.stringify({
133 | storyId: options.storyId,
134 | version: options.version,
135 | languageCode: options.languageCode,
136 | }),
137 | headers: authHeader ? { Authorization: authHeader } : undefined,
138 | method: "POST",
139 | });
140 | return result;
141 | } catch (err) {
142 | throw new Error(`A playthrough token could not be generated: ${err}`);
143 | }
144 | }
145 |
146 | export type CreateConversationResult = {
147 | /**
148 | * The unique identifier of the created conversation. Pass this into `playthrough.joinConversation`
149 | * to get a scoped `Conversation` instance.
150 | */
151 | conversationUuid: string;
152 | };
153 |
154 | export async function createConversation(
155 | token: string,
156 | apiOptions?: CommonApiOptions,
157 | ): Promise {
158 | const result = await fetchHelper<{
159 | conversationUuid: string;
160 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/conversation`, {
161 | body: JSON.stringify({}),
162 | headers: { Authorization: `Bearer ${token}` },
163 | method: "POST",
164 | });
165 | return result;
166 | }
167 |
168 | export type CreateCharacterConversationResult = {
169 | /**
170 | * The unique identifier of the created conversation. Pass this into `playthrough.joinConversation`
171 | * to get a scoped `Conversation` instance.
172 | */
173 | conversationUuid: string;
174 | };
175 |
176 | export async function createCharacterConversation(
177 | token: string,
178 | characterId: number,
179 | apiOptions?: CommonApiOptions,
180 | ): Promise {
181 | const result = await fetchHelper<{
182 | conversationUuid: string;
183 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/conversation/character`, {
184 | body: JSON.stringify({ characterId }),
185 | headers: { Authorization: `Bearer ${token}` },
186 | method: "POST",
187 | });
188 | return result;
189 | }
190 |
191 | export type EventType =
192 | | "start"
193 | | "message_player"
194 | | "message_character"
195 | | "set_mood"
196 | | "resume"
197 | | "set_memory"
198 | | "tap"
199 | | "restart"
200 | | "episode_complete"
201 | | "fork";
202 |
203 | export type Event = {
204 | id: string;
205 | type: EventType;
206 | timestamp: string;
207 | conversationUuid: string | null;
208 | playthroughUuid: string;
209 | payload: unknown;
210 | };
211 |
212 | export type GetEventHistoryOptions = {
213 | conversationUuid?: string;
214 | eventTypes?: EventType[];
215 | minEventId?: string;
216 | maxEventId?: string;
217 | limit: number;
218 | sort?: "asc" | "desc";
219 | };
220 |
221 | export type GetEventHistoryResult = {
222 | events: Event[];
223 | };
224 |
225 | /**
226 | * Gets the events that have happened in the playthrough, such as character and player messages amongst others. The returned events can be filtered by using options.
227 | */
228 | export async function getEventHistory(
229 | token: string,
230 | // eslint-disable-next-line default-param-last
231 | options: GetEventHistoryOptions = { limit: 1000 },
232 | apiOptions?: CommonApiOptions,
233 | ): Promise {
234 | const query = createSearchParams(options);
235 | const result = await fetchHelper(
236 | `${
237 | apiOptions?.baseUrl || globalBaseUrl
238 | }/play/event-history?${query.toString()}`,
239 | {
240 | headers: { Authorization: `Bearer ${token}` },
241 | method: "GET",
242 | },
243 | );
244 | return result;
245 | }
246 |
247 | export type GetPlaythroughInfoResult = {
248 | emotions: Emotion[];
249 | memories: Memory[];
250 | impacts: Impact[];
251 | };
252 |
253 | /**
254 | * Returns current information about the playthrough, such as character emotions and memories.
255 | */
256 | export async function getPlaythroughInfo(
257 | token: string,
258 | apiOptions?: CommonApiOptions,
259 | ): Promise {
260 | const result = await fetchHelper(
261 | `${
262 | apiOptions?.baseUrl || globalBaseUrl
263 | }/play/playthrough-info?use_typed_memories=1`,
264 | {
265 | headers: { Authorization: `Bearer ${token}` },
266 | method: "GET",
267 | },
268 | );
269 | return result;
270 | }
271 |
272 | export type MemoryToSet = { recallValue: string; saveValue: JSONValue | null };
273 |
274 | /**
275 | * Directly sets a memory in Charisma. The promise resolves when the memory has been committed so is guaranteed to be set, but it may take a short amount of time (usually < 1s) for the updated value to propagate to any active playthrough instances.
276 | *
277 | * It is highly recommended to call `setMemory` with an array instead of calling `setMemory` multiple times, to only cause one refetch of the current memory values in the chat engine.
278 | */
279 | export async function setMemory(
280 | token: string,
281 | memoryRecallValue: string,
282 | saveValue: string | null,
283 | apiOptions?: CommonApiOptions,
284 | ): Promise;
285 | export async function setMemory(
286 | token: string,
287 | memoriesToSet: MemoryToSet[],
288 | apiOptions?: CommonApiOptions,
289 | ): Promise;
290 | export async function setMemory(
291 | token: string,
292 | memoryRecallValueOrMemories: string | MemoryToSet[],
293 | saveValueOrApiOptions?: string | null | CommonApiOptions,
294 | apiOptions?: CommonApiOptions,
295 | ): Promise {
296 | let resolvedApiOptions = apiOptions;
297 |
298 | let memories: MemoryToSet[] = [];
299 | if (Array.isArray(memoryRecallValueOrMemories)) {
300 | memories = memoryRecallValueOrMemories;
301 | resolvedApiOptions = saveValueOrApiOptions as CommonApiOptions | undefined;
302 | } else {
303 | memories = [
304 | {
305 | recallValue: memoryRecallValueOrMemories,
306 | saveValue: saveValueOrApiOptions as JSONValue | null,
307 | },
308 | ];
309 | }
310 |
311 | await fetchHelper(
312 | `${resolvedApiOptions?.baseUrl || globalBaseUrl}/play/set-memory`,
313 | {
314 | body: JSON.stringify({
315 | memories,
316 | }),
317 | headers: { Authorization: `Bearer ${token}` },
318 | method: "POST",
319 | },
320 | );
321 | }
322 |
323 | export async function restartFromEpisodeId(
324 | token: string,
325 | episodeId: number,
326 | apiOptions?: CommonApiOptions,
327 | ): Promise {
328 | await fetchHelper(
329 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-episode`,
330 | {
331 | body: JSON.stringify({ episodeId }),
332 | headers: { Authorization: `Bearer ${token}` },
333 | method: "POST",
334 | },
335 | );
336 | }
337 |
338 | export async function restartFromEpisodeIndex(
339 | token: string,
340 | episodeIndex: number,
341 | apiOptions?: CommonApiOptions,
342 | ): Promise {
343 | await fetchHelper(
344 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-episode`,
345 | {
346 | body: JSON.stringify({ episodeIndex }),
347 | headers: { Authorization: `Bearer ${token}` },
348 | method: "POST",
349 | },
350 | );
351 | }
352 |
353 | export async function restartFromEventId(
354 | token: string,
355 | eventId: string,
356 | apiOptions?: CommonApiOptions,
357 | ): Promise {
358 | await fetchHelper(
359 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-event`,
360 | {
361 | body: JSON.stringify({ eventId }),
362 | headers: { Authorization: `Bearer ${token}` },
363 | method: "POST",
364 | },
365 | );
366 | }
367 |
368 | export type ForkPlaythroughTokenResult = {
369 | /**
370 | * The playthrough token, used for connecting to this playthrough. It never expires,
371 | * so can be saved in a secure place for players to continue playing between sessions.
372 | *
373 | * To create a playthrough with the token, use `new Playthrough(token)`.
374 | */
375 | token: string;
376 | /**
377 | * The unique identifier of the playthrough, encoded inside the token. It can be useful
378 | * as a debugging tool.
379 | */
380 | playthroughUuid: string;
381 | };
382 |
383 | /**
384 | * Creates a clone of the playthrough, including memories and emotions, and returns a new playthrough linked to the latest promoted story version.
385 | *
386 | * This is useful when you've published a new story version. Since playthroughs are bound to a particular story version, you need to "fork" the playthrough in order to move a player over to the newly published version.
387 | */
388 | export async function forkPlaythroughToken(
389 | token: string,
390 | apiOptions?: CommonApiOptions,
391 | ): Promise {
392 | const result = await fetchHelper<{
393 | token: string;
394 | playthroughUuid: string;
395 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/fork-playthrough`, {
396 | body: JSON.stringify({}),
397 | headers: { Authorization: `Bearer ${token}` },
398 | method: "POST",
399 | });
400 | return result;
401 | }
402 |
403 | export type ResetPlaythroughOptions = {
404 | /**
405 | * The event ID to reset the playthrough to.
406 | */
407 | eventId: string;
408 | };
409 |
410 | /**
411 | * Resets a playthrough's state to a particular event ID. If this playthrough has been forked, the event ID can be from any of this playthrough's ancestors. This resets memories and emotions **ONLY**.
412 | */
413 | export async function resetPlaythrough(
414 | token: string,
415 | options: ResetPlaythroughOptions,
416 | apiOptions?: CommonApiOptions,
417 | ): Promise {
418 | await fetchHelper(
419 | `${apiOptions?.baseUrl || globalBaseUrl}/play/reset-playthrough`,
420 | {
421 | body: JSON.stringify(options),
422 | headers: { Authorization: `Bearer ${token}` },
423 | method: "POST",
424 | },
425 | );
426 | }
427 |
--------------------------------------------------------------------------------
/src/Playthrough.ts:
--------------------------------------------------------------------------------
1 | import { EventEmitter } from "eventemitter3";
2 | import * as Colyseus from "colyseus.js";
3 | import jwtDecode from "jwt-decode";
4 |
5 | import * as api from "./api.js";
6 |
7 | import {
8 | StartTypingEvent,
9 | StopTypingEvent,
10 | MessageEvent,
11 | EpisodeCompleteEvent,
12 | ConfirmActionEvent,
13 | ConfirmReplyEvent,
14 | ConfirmResumeEvent,
15 | ConfirmStartEvent,
16 | ConfirmTapEvent,
17 | ProblemEvent,
18 | JSONValue,
19 | } from "./types.js";
20 | // eslint-disable-next-line import/no-named-as-default
21 | import Conversation, { ConversationOptions } from "./Conversation.js";
22 |
23 | export type ConnectionStatus = "disconnected" | "connecting" | "connected";
24 |
25 | const sdkInfo = {
26 | sdkId: "js",
27 | sdkVersion: "7.0.1",
28 | protocolVersion: 2,
29 | };
30 |
31 | type PlaythroughEvents = {
32 | "connection-status": [ConnectionStatus];
33 | error: [any];
34 | problem: [{ code: string; error: string }];
35 | };
36 |
37 | class Playthrough extends EventEmitter {
38 | private token: string;
39 |
40 | private uuid: string;
41 |
42 | private baseUrl?: string;
43 |
44 | private client: Colyseus.Client | undefined;
45 |
46 | private room: Colyseus.Room | undefined;
47 |
48 | private connectionStatus: ConnectionStatus = "disconnected";
49 |
50 | private shouldReconnect = true;
51 |
52 | private activeConversations = new Map();
53 |
54 | public playerSessionId?: string;
55 |
56 | public constructor(token: string, baseUrl?: string) {
57 | super();
58 |
59 | this.token = token;
60 |
61 | const { playthrough_uuid: playthroughUuid } = jwtDecode<{
62 | // eslint-disable-next-line camelcase
63 | playthrough_uuid: string;
64 | }>(this.token);
65 |
66 | this.uuid = playthroughUuid;
67 |
68 | this.baseUrl = baseUrl;
69 | }
70 |
71 | public async getPlayerSessionId(): Promise {
72 | const DELAY = 100;
73 | const MAX_ATTEMPTS = 100;
74 |
75 | for (let attempts = 0; attempts < MAX_ATTEMPTS; attempts += 1) {
76 | if (this.playerSessionId !== undefined) {
77 | return this.playerSessionId;
78 | }
79 |
80 | // eslint-disable-next-line no-await-in-loop
81 | await new Promise((resolve) => {
82 | setTimeout(resolve, DELAY);
83 | });
84 | }
85 |
86 | throw new Error(
87 | `Could not get player session id after ${MAX_ATTEMPTS} attempts.`,
88 | );
89 | }
90 |
91 | public createConversation(): ReturnType {
92 | return api.createConversation(this.token, { baseUrl: this.baseUrl });
93 | }
94 |
95 | public createCharacterConversation(
96 | characterId: number,
97 | ): ReturnType {
98 | return api.createCharacterConversation(this.token, characterId, {
99 | baseUrl: this.baseUrl,
100 | });
101 | }
102 |
103 | public getEventHistory(
104 | options: api.GetEventHistoryOptions,
105 | ): ReturnType {
106 | return api.getEventHistory(this.token, options, {
107 | baseUrl: this.baseUrl,
108 | });
109 | }
110 |
111 | public getPlaythroughInfo(): ReturnType {
112 | return api.getPlaythroughInfo(this.token, { baseUrl: this.baseUrl });
113 | }
114 |
115 | public setMemory(
116 | recallValue: string,
117 | saveValue: JSONValue | null,
118 | ): ReturnType;
119 |
120 | public setMemory(
121 | memories: api.MemoryToSet[],
122 | ): ReturnType;
123 |
124 | public setMemory(
125 | memoryRecallValueOrMemories: string | api.MemoryToSet[],
126 | saveValue?: JSONValue | null,
127 | ): ReturnType {
128 | let memories: api.MemoryToSet[] = [];
129 | if (Array.isArray(memoryRecallValueOrMemories)) {
130 | memories = memoryRecallValueOrMemories;
131 | } else {
132 | memories = [
133 | {
134 | recallValue: memoryRecallValueOrMemories,
135 | saveValue: saveValue as JSONValue | null,
136 | },
137 | ];
138 | }
139 |
140 | return api.setMemory(this.token, memories, {
141 | baseUrl: this.baseUrl,
142 | });
143 | }
144 |
145 | public restartFromEpisodeId(
146 | episodeId: number,
147 | ): ReturnType {
148 | return api.restartFromEpisodeId(this.token, episodeId, {
149 | baseUrl: this.baseUrl,
150 | });
151 | }
152 |
153 | public restartFromEpisodeIndex(
154 | episodeIndex: number,
155 | ): ReturnType {
156 | return api.restartFromEpisodeIndex(this.token, episodeIndex, {
157 | baseUrl: this.baseUrl,
158 | });
159 | }
160 |
161 | public restartFromEventId(
162 | eventId: string,
163 | ): ReturnType {
164 | return api.restartFromEventId(this.token, eventId, {
165 | baseUrl: this.baseUrl,
166 | });
167 | }
168 |
169 | public joinConversation = (
170 | conversationUuid: string,
171 | options?: ConversationOptions,
172 | ): Conversation => {
173 | const conversation = new Conversation(conversationUuid, this, options);
174 | if (this.activeConversations.has(conversationUuid)) {
175 | return this.activeConversations.get(conversationUuid) as Conversation;
176 | }
177 | this.activeConversations.set(conversationUuid, conversation);
178 | return conversation;
179 | };
180 |
181 | public leaveConversation = (conversationUuid: string): void => {
182 | if (!this.activeConversations.has(conversationUuid)) {
183 | throw new Error(
184 | `The conversation with id \`${conversationUuid}\` has not been joined, so cannot be left.`,
185 | );
186 | }
187 | this.activeConversations.delete(conversationUuid);
188 | };
189 |
190 | public getConversation = (
191 | conversationUuid: string,
192 | ): Conversation | undefined => {
193 | return this.activeConversations.get(conversationUuid);
194 | };
195 |
196 | public addOutgoingEvent = (eventName: string, eventData?: unknown): void => {
197 | if (this.room) {
198 | if (this.connectionStatus === "connected") {
199 | this.room.send(eventName, eventData);
200 | } else {
201 | console.warn(
202 | `Event \`${eventName}\` was not sent as the socket was not ready. Wait for the \`connection-status\` event to be called with \`connected\` before sending events.`,
203 | );
204 | }
205 | } else {
206 | console.log(
207 | `Event \`${eventName}\` was not sent as the socket was not initialised. Call \`playthrough.connect()\` to connect the socket.`,
208 | );
209 | }
210 | };
211 |
212 | public connect = async (): Promise<{ playerSessionId: string }> => {
213 | const baseUrl = this.baseUrl || api.getGlobalBaseUrl();
214 |
215 | if (!this.client) {
216 | this.client = new Colyseus.Client(baseUrl.replace(/^http/, "ws"));
217 | }
218 |
219 | this.room = await this.client.joinOrCreate("chat", {
220 | playthroughId: this.uuid,
221 | token: this.token,
222 | sdkInfo,
223 | });
224 |
225 | this.attachRoomHandlers(this.room);
226 |
227 | this.shouldReconnect = true;
228 |
229 | const playerSessionId = await this.getPlayerSessionId();
230 |
231 | return { playerSessionId };
232 | };
233 |
234 | public pause = (): void => {
235 | this.addOutgoingEvent("pause");
236 | };
237 |
238 | public play = (): void => {
239 | this.addOutgoingEvent("play");
240 | };
241 |
242 | private attachRoomHandlers = (room: Colyseus.Room) => {
243 | room.onMessage("status", this.onConnected);
244 | room.onMessage("problem", this.onProblem);
245 | room.onMessage("start-typing", this.onStartTyping);
246 | room.onMessage("stop-typing", this.onStopTyping);
247 | room.onMessage("message", this.onMessage);
248 | room.onMessage("episode-complete", this.onEpisodeComplete);
249 |
250 | room.onMessage("action", this.onAction);
251 | room.onMessage("reply", this.onReply);
252 | room.onMessage("resume", this.onResume);
253 | room.onMessage("start", this.onStart);
254 | room.onMessage("tap", this.onTap);
255 | room.onMessage("player-session-id", (playerSessionId: string) => {
256 | this.playerSessionId = playerSessionId;
257 | });
258 |
259 | room.onError(this.onError);
260 |
261 | // eslint-disable-next-line @typescript-eslint/no-misused-promises
262 | room.onLeave(async (code) => {
263 | room.removeAllListeners();
264 | this.room = undefined;
265 |
266 | // Normal disconnection codes (i.e. user chose to disconnect explicitly)
267 | if (code === 4000 || !this.shouldReconnect) {
268 | this.onDisconnect();
269 | return;
270 | }
271 |
272 | let roomExpired = false;
273 |
274 | for (let attempts = 0; attempts < 20; attempts += 1) {
275 | if (!roomExpired) {
276 | try {
277 | // Try to reconnect into the same room.
278 | this.onReconnecting();
279 | // eslint-disable-next-line no-await-in-loop
280 | const newRoom = await this.client?.reconnect(
281 | room.id,
282 | room.sessionId,
283 | );
284 | if (newRoom) {
285 | this.attachRoomHandlers(newRoom);
286 | this.room = newRoom;
287 | this.onReconnect();
288 | this.onConnected();
289 | return;
290 | }
291 | } catch (err) {
292 | if (/room ".*" not found/.test((err as Error).message)) {
293 | roomExpired = true;
294 | }
295 | }
296 | }
297 |
298 | // If we could reconnect (network is up), but the exact room no longer exists (it expired), try and create a new room.
299 | if (roomExpired) {
300 | try {
301 | // eslint-disable-next-line no-await-in-loop
302 | const newRoom = await this.client?.joinOrCreate("chat", {
303 | playthroughId: this.uuid,
304 | token: this.token,
305 | sdkInfo,
306 | });
307 | if (newRoom) {
308 | this.attachRoomHandlers(newRoom);
309 | this.room = newRoom;
310 | this.onReconnect();
311 | this.onConnected();
312 | return;
313 | }
314 | } catch (err2) {
315 | console.error(
316 | "Could not reconnect to a Charisma playthrough.",
317 | err2,
318 | );
319 | }
320 | }
321 |
322 | // eslint-disable-next-line no-await-in-loop
323 | await new Promise((resolve) => {
324 | setTimeout(() => resolve(), 5000 + Math.floor(Math.random() * 1000));
325 | });
326 | }
327 |
328 | // We failed to both reconnect into the same room, and a new room, so disconnect.
329 | this.onDisconnect();
330 | });
331 | };
332 |
333 | public disconnect = (): void => {
334 | this.shouldReconnect = false;
335 |
336 | if (this.room) {
337 | this.room.leave();
338 | }
339 | };
340 |
341 | private changeConnectionStatus = (newStatus: ConnectionStatus): void => {
342 | if (newStatus !== this.connectionStatus) {
343 | this.connectionStatus = newStatus;
344 | this.emit("connection-status", newStatus);
345 | }
346 | };
347 |
348 | private onReconnect = (): void => {
349 | this.activeConversations.forEach((conversation) => {
350 | conversation.reconnect().catch((err) => {
351 | console.error(
352 | `Something went wrong reconnecting to conversation:`,
353 | err,
354 | );
355 | });
356 | });
357 | };
358 |
359 | private onReconnecting = (): void => {
360 | this.changeConnectionStatus("connecting");
361 | };
362 |
363 | private onDisconnect = (): void => {
364 | this.changeConnectionStatus("disconnected");
365 | };
366 |
367 | private onConnected = (): void => {
368 | this.changeConnectionStatus("connected");
369 | };
370 |
371 | private onError = (code: number, message?: string): void => {
372 | this.emit("error", { message, code });
373 | };
374 |
375 | private onProblem = (event: ProblemEvent): void => {
376 | this.emit("problem", event);
377 | if (event.conversationUuid) {
378 | const conversation = this.activeConversations.get(event.conversationUuid);
379 | if (conversation) {
380 | conversation.addIncomingEvent("problem", event);
381 | }
382 | }
383 | };
384 |
385 | private onStartTyping = (event: StartTypingEvent): void => {
386 | const conversation = this.activeConversations.get(event.conversationUuid);
387 | if (conversation) {
388 | conversation.addIncomingEvent("start-typing", event);
389 | }
390 | };
391 |
392 | private onStopTyping = (event: StopTypingEvent): void => {
393 | const conversation = this.activeConversations.get(event.conversationUuid);
394 | if (conversation) {
395 | conversation.addIncomingEvent("stop-typing", event);
396 | }
397 | };
398 |
399 | private onMessage = (event: MessageEvent): void => {
400 | const conversation = this.activeConversations.get(event.conversationUuid);
401 | if (conversation) {
402 | conversation.addIncomingEvent("message", event);
403 | }
404 | };
405 |
406 | private onEpisodeComplete = (event: EpisodeCompleteEvent): void => {
407 | const conversation = this.activeConversations.get(event.conversationUuid);
408 | if (conversation) {
409 | conversation.addIncomingEvent("episode-complete", event);
410 | }
411 | };
412 |
413 | private onAction = (event: ConfirmActionEvent): void => {
414 | const conversation = this.activeConversations.get(event.conversationUuid);
415 | if (conversation) {
416 | conversation.addIncomingEvent("action", event);
417 | }
418 | };
419 |
420 | private onResume = (event: ConfirmResumeEvent): void => {
421 | const conversation = this.activeConversations.get(event.conversationUuid);
422 | if (conversation) {
423 | conversation.addIncomingEvent("resume", event);
424 | }
425 | };
426 |
427 | private onReply = (event: ConfirmReplyEvent): void => {
428 | const conversation = this.activeConversations.get(event.conversationUuid);
429 | if (conversation) {
430 | conversation.addIncomingEvent("reply", event);
431 | }
432 | };
433 |
434 | private onStart = (event: ConfirmStartEvent): void => {
435 | const conversation = this.activeConversations.get(event.conversationUuid);
436 | if (conversation) {
437 | conversation.addIncomingEvent("start", event);
438 | }
439 | };
440 |
441 | private onTap = (event: ConfirmTapEvent): void => {
442 | const conversation = this.activeConversations.get(event.conversationUuid);
443 | if (conversation) {
444 | conversation.addIncomingEvent("tap", event);
445 | }
446 | };
447 | }
448 |
449 | export default Playthrough;
450 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Charisma.ai SDK for JavaScript
2 |
3 | ```
4 | pnpm i @charisma-ai/sdk
5 | ```
6 |
7 | ## Usage
8 |
9 | ```js
10 | // main.js
11 | import {
12 | Playthrough,
13 | createPlaythroughToken,
14 | createConversation,
15 | } from "@charisma-ai/sdk";
16 |
17 | let conversation;
18 |
19 | async function start() {
20 | // Get a unique token for the playthrough.
21 | const { token } = await createPlaythroughToken({ storyId: 4 });
22 |
23 | // Create a new conversation.
24 | const { conversationUuid } = await createConversation(token);
25 |
26 | // Create a new playthrough.
27 | const playthrough = new Playthrough(token);
28 |
29 | // Join the conversation.
30 | conversation = playthrough.joinConversation(conversationUuid);
31 |
32 | // Handle messages in the conversation.
33 | conversation.on("message", (message) => {
34 | console.log(message.message.text);
35 | });
36 |
37 | conversation.on("problem", console.warn);
38 |
39 | // Prepare the listener to start the conversation when the playthrough is connected.
40 | playthrough.on("connection-status", (status) => {
41 | if (status === "connected") {
42 | conversation.start();
43 | }
44 | });
45 |
46 | await playthrough.connect();
47 | }
48 |
49 | // Send the reply to charisma.
50 | function reply(message) {
51 | conversation.reply({ text: message });
52 | }
53 | ```
54 |
55 | ## API Reference
56 |
57 | There are two ways to use the API directly, either by importing `api`, which includes all the API methods, or you can import API methods individually, like `createPlaythroughToken`.
58 |
59 | ```js
60 | import { api, createPlaythroughToken } from "@charisma-ai/sdk";
61 |
62 | api.createPlaythroughToken();
63 | createPlaythroughToken();
64 | ```
65 |
66 | Most API methods are also callable using an instance of the `Playthrough` class, which automatically scopes the API calls to the playthrough `token` passed when creating the instance:
67 |
68 | ```js
69 | const playthrough = new Playthrough(token);
70 | // No need to pass `token` here!
71 | playthrough.createConversation();
72 | ```
73 |
74 | #### createPlaythroughToken
75 |
76 | Use this to set up a new playthrough.
77 |
78 | - `storyId` (`number`): The `id` of the story that you want to create a new playthrough for. The story must be published, unless a Charisma.ai user token has been passed and the user matches the owner of the story.
79 | - `version` (`number`, optional): The `version` of the story that you want to create a new playthrough for. If omitted, it will default to the most recent published version. To get the draft version of a story, pass `-1` and an `apiKey`.
80 | - `apiKey` (`string`, optional): To access draft, test or unpublished versions of your story, pass an `apiKey`. The API key can be found on the story overview page.
81 | - `languageCode` (`string`, optional): To play a story in a language other than English (`en`, the default), pass a BCP-47 `languageCode`. For example, to play in Italian, use `it`.
82 |
83 | Returns a promise that resolves with the token.
84 |
85 | ```js
86 | const { token } = await createPlaythroughToken({
87 | storyId: 12,
88 | version: 4,
89 | apiKey: "...",
90 | languageCode: "en",
91 | });
92 | ```
93 |
94 | #### createConversation
95 |
96 | A playthrough can have many simultaneous conversations. In order to start interacting, a conversation needs to be created, which can then be joined.
97 |
98 | - `playthroughToken` (`string`): The token generated with `createPlaythroughToken`.
99 |
100 | ```js
101 | const { conversationUuid } = await createConversation(token);
102 | ```
103 |
104 | ## Playthrough
105 |
106 | Create a new `Playthrough` instance to connect to a playthrough and interact with the chat engine.
107 |
108 | - `playthroughToken` (`string`): The `token` generated in `createPlaythroughToken`.
109 |
110 | #### Playthrough.joinConversation
111 |
112 | This makes the `Playthrough` instance listen out for events for a particular conversation, and returns a `Conversation` that events can be called on and event listeners attached.
113 |
114 | - `conversationUuid` (`string`): The conversation UUID generated with `createConversation`.
115 |
116 | Returns a `Conversation`, which can be used to send and receive events bound to that conversation.
117 |
118 | ```js
119 | playthrough.joinConversation(conversationUuid);
120 | ```
121 |
122 | #### Playthrough.connect
123 |
124 | This is what kicks off the connection to the chat engine. Call this once you're ready to start sending and receiving events.
125 |
126 | Returns an object with a `playerSessionId` property.
127 |
128 | ```js
129 | await playthrough.connect();
130 | ```
131 |
132 | #### Playthrough.disconnect
133 |
134 | If you want to end the connection to the playthrough, you can call `playthrough.disconnect()`.
135 |
136 | ```js
137 | playthrough.disconnect();
138 | ```
139 |
140 | ## Events
141 |
142 | To interact with the story, events are sent to and from the server that the WebSocket is connected to.
143 |
144 | ### Events sent from client
145 |
146 | #### conversation.start({ ... })
147 |
148 | ```js
149 | {
150 | // For Pro stories, start the story at a particular subplot with the `startGraphReferenceId`.
151 | // It can be found by clicking '...' next to the subplot in the sidebar, and clicking 'Edit details'.
152 | // For Web Comic stories do not provide `startGraphReferenceId`, the story will start automatically from the first scene
153 | "startGraphReferenceId": "my-id", // Optional, default undefined
154 | }
155 | ```
156 |
157 | #### conversation.reply({ ... })
158 |
159 | ```js
160 | {
161 | "text": "Please reply to this!"
162 | }
163 | ```
164 |
165 | #### conversation.tap({ ... })
166 |
167 | This event has no fields.
168 |
169 | #### conversation.action({ ... })
170 |
171 | ```js
172 | {
173 | "action": "pick-up-book"
174 | }
175 | ```
176 |
177 | #### conversation.resume({ ... })
178 |
179 | This event has no fields.
180 |
181 | ### Events received by client
182 |
183 | #### conversation.on('message', (event) => { ... })
184 |
185 | ```js
186 | {
187 | "message": {
188 | "text": "Greetings and good day.",
189 | "character": {
190 | "id": 20,
191 | "name": "Ted Baker",
192 | "avatar": "https://s3.charisma.ai/..."
193 | },
194 | "speech": {
195 | "duration": 203,
196 | "audio": /* either a buffer, or a URL */,
197 | }
198 | "metadata": {
199 | "myMetadata": "someValue"
200 | },
201 | "media": null
202 | },
203 | "endStory": false,
204 | "path": [{ "id": 1, "type": "edge" }, { "id": 2, "type": "node" }]
205 | }
206 | ```
207 |
208 | #### conversation.on('start-typing', () => { ... })
209 |
210 | This event has no additional data.
211 |
212 | #### conversation.on('stop-typing', () => { ... })
213 |
214 | This event has no additional data.
215 |
216 | #### conversation.on('action', (event) => { ... })
217 |
218 | #### conversation.on('reply', (event) => { ... })
219 |
220 | #### conversation.on('resume', (event) => { ... })
221 |
222 | #### conversation.on('start', (event) => { ... })
223 |
224 | #### conversation.on('tap', (event) => { ... })
225 |
226 | When another player sends specific events to a Charisma playthrough, they are sent back to all other connected players, so that other players can perform actions based on the events, such as displaying their messages in UI.
227 |
228 | The events that are currently echoed to all clients are `action`, `reply`, `resume`, `start` and `tap`.
229 |
230 | **Important:** These events are **not** emitted for the player that sent the original corresponding event!
231 |
232 | Each event includes its committed `eventId` and `timestamp` as well as the original payload (excluding the `speechConfig`).
233 |
234 | #### conversation.on('problem', (event) => { ... })
235 |
236 | If a problem occurs during a conversation, such as a pathway not being found after submitting a player message, `problem` will be emitted.
237 |
238 | ### Conversation helpers
239 |
240 | #### conversation.setSpeechConfig(config)
241 |
242 | This sets the speech configuration to use for all events in the conversation until set otherwise:
243 |
244 | ```json
245 | {
246 | "encoding": ["ogg", "mp3"],
247 | "output": "buffer"
248 | }
249 | ```
250 |
251 | `encoding` is the file format of the resulting speech: `mp3`, `ogg`, `wav` or `pcm`. If an array, Charisma will use the first encoding that the voice supports, useful for cases where a voice synthesis service of a particular voice does not support the "default" encoding you wish to use.
252 |
253 | `output` determines whether the speech received back is a `buffer` (a byte array) or whether it should instead be a `url` pointing to the audio file.
254 |
255 | ## AudioManager
256 |
257 | The audio manager will handle the audio from characters, media and speech-to-text functionality.
258 |
259 | ```js
260 | import { AudioManager } from "@charisma-ai/sdk";
261 |
262 | const audio = new AudioManager({
263 | // AudioManager options
264 | handleTranscript: (transcript: string) => {
265 | console.log(transcript);
266 | },
267 | });
268 | ```
269 |
270 | #### AudioManager Options
271 |
272 | | Option | Type | Default | Description |
273 | | ------------------ | ---------------------------------- | --------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --- |
274 | | `debugLogFunction` | `(message: string) => void` | `() => {}` | Callback to handle log messages for debugging. |
275 | | `duckVolumeLevel` | `number` | 0 | Background audio volume level to use when ducking for microphone input (0 to 1). | |
276 | | `handleConnect` | `(message: string) => void` | `console.log(message)` | Callback to handle when the transcription service connects. |
277 | | `handleError` | `(error: string) => void` | `console.error(error)` | Callback to handle errors. |
278 | | `handleDisconnect` | `(message: string) => void` | `console.error(message)` | Callback to handle when the transcription service disconnects. |
279 | | `handleStartSTT` | `() => void` | | Callback to handle when speech-to-text starts. Can be used to update the UI. |
280 | | `handleStopSTT` | `() => void` | | Callback to handle when speech-to-text stops. |
281 | | `handleTranscript` | `(transcript: string) => void` | | Callback to handle transcripts. |
282 | | `streamTimeslice` | `number` | 100 | The number of milliseconds to record into each Blob. See https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/start#timeslice |
283 | | `sttService` | `"charisma/deepgram" \| "browser"` | `"charisma/deepgram"` | Speech-to-text service to use (see below). |
284 | | `sttUrl` | `string` | `"https://stt.charisma.ai"` | Speech-to-text service URL. |
285 |
286 | There are currently two speech-to-text services available:
287 |
288 | - `charisma/deepgram`: Deepgram is a neural network based speech-to-text service that that can be accessed through Charsima.ai.
289 | - `browser`: Some browsers have built-in speech recognition, which can be used to provide speech-to-text functionality. **This is only available in browsers that support `SpeechRecognition`. Please refer to [this browser compatibility table](https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition#browser_compatibility) for more details.**
290 |
291 | ### Speech-to-text
292 |
293 | #### audioManager.startListening(timeout?: number)
294 |
295 | Starts listening for speech. This will call handleStartSTT() when the speech-to-text service starts.
296 | Takes a `timeout` argument in milliseconds, which will automatically stop the speech-to-text service after the timeout. Defaults to 10000 (ten seconds) if not provided.
297 |
298 | #### audioManager.stopListening()
299 |
300 | Stops listening for speech. This will call handleStopSTT() when the speech-to-text service stops.
301 |
302 | #### audioManager.connect(token: string, playerSessionId: string)
303 |
304 | Connects the to the speech-to-text service using the playthrough token and player session id to validate. This is only needed when using the `charisma/deepgram` speech-to-text service.
305 |
306 | The `playerSessionId` is returned from `playthrough.connect()`. See the `deepgram-stt` demo for an example.
307 |
308 | #### audioManager.disconnect()
309 |
310 | Disconnects from the speech-to-text service.
311 |
312 | #### audioManager.resetTimeout(timeout: number)
313 |
314 | Resets the timeout for the speech-to-text service to `timeout` in milliseconds. If this is not run, the speech-to-text service will default to a timeout of 10 seconds.
315 | After the timeout, the speech-to-text service will automatically stop listening.
316 |
317 | #### audioManager.browserIsSupported(): boolean
318 |
319 | Returns `true` if the browser supports the `browser` speech recognition service.
320 |
321 | ### General Output
322 |
323 | #### audioManager.initialise()
324 |
325 | Initialises the audio for characters and media. This method _must_ be called before attempting to play audio from media nodes or character speech.
326 |
327 | This method _must_ also be called from a user interaction event, such as a click or a keypress. This is due to a security restriction in some browsers (especially mobile). We recommend adding it to the "start" button the sets up your playthrough. See the demos for an example.
328 |
329 | ### Character Audio
330 |
331 | #### audioManager.playCharacterSpeech(audio: ArrayBuffer, options: AudioOutputsServicePlayOptions): Promise
332 |
333 | This plays the generated speech in the message event. Typically, you would want to use this in combination with a `message` conversation handler.
334 |
335 | Returns a Promise that resolves once the speech has ended.
336 |
337 | `options` is an object with two properties:
338 |
339 | ```ts
340 | type SpeakerPlayOptions = {
341 | /**
342 | * Whether to interrupt the same track as the `trackId` passed (`track`), all currently playing audio (`all`), or not to interrupt anything (`none`). Default is `none`.
343 | */
344 | interrupt?: "track" | "all" | "none";
345 | /**
346 | * If you want to prevent a particular character to speak over themselves, a `trackId` can be set to a unique string. When playing another speech clip, if the same `trackId` is passed and `interrupt` is set to `true`, then the previous clip will stop playing. Default is unset.
347 | */
348 | trackId?: string;
349 | };
350 | ```
351 |
352 | #### audioManager.characterSpeechVolume
353 |
354 | Get or set the volume of the character speech. Must be a number between 0 and 1.
355 |
356 | #### audioManager.characterSpeechIsMuted
357 |
358 | Get or set whether character speech is muted. Must be a boolean.
359 |
360 | ### Media Track Audio
361 |
362 | #### audioManager.mediaAudioPlay(audioTracks: AudioTrack[]): void
363 |
364 | Will play the audio tracks in a message event. An empty array can also be passed here so it can be called on every message event.
365 |
366 | #### audioManager.mediaAudioVolume
367 |
368 | Get or set the volume of all non-character audio. Must be a number between 0 and 1.
369 |
370 | #### audioManager.characterSpeechIsMuted
371 |
372 | Get or set whether all non-character audio is muted. Must be a boolean.
373 |
374 | #### audioManager.mediaAudioStopAll()
375 |
376 | Will stop all media audio tracks.
377 |
378 | ## Questions
379 |
380 | For further details or any questions, feel free to get in touch at [hello@charisma.ai](mailto:hello@charisma.ai), or head to the [Charisma docs](https://charisma.ai/docs)!
381 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ### vNext
4 |
5 | ### v7.0.1
6 |
7 | - Expose Character Speech Analyser node.
8 |
9 | ### v7.0.0
10 |
11 | - Add improved volume and mute controls for character and background audio outputs
12 |
13 | ### v6.0.0
14 |
15 | - Add interim transcripts from the STT service
16 | - Update the deepgram-stt demo to use interim transcripts
17 | - Stop the microphone and emit end-current-transcription, and start the microphone again for next transcription. This uses new functionality on our STT server so the same playthrough-validated socket is kept but new downstream connections to the STT service can be triggered. Without this the results from from earlier recordings can bleed into the current transcription.
18 | - **BREAKING** rename and rework character speech. No longer use audioManager outputServicePlay and now use audioManager playCharacterSpeech and volume will automatically go back to normal level after interruption from microphone.
19 | - **BREAKING** use characterSpeechVolume get and set rather than having a setCharacterSpeechVolume
20 |
21 | ### v5.0.8
22 |
23 | - Emit "stop" event when stopListening is called without a microphone.
24 |
25 | ### v5.0.7
26 |
27 | - Add `disconnect` method to disconnect from the server.
28 |
29 | ### v5.0.6
30 |
31 | - Add `timeout` argument to `startListening` methods.
32 |
33 | ### v5.0.5
34 |
35 | - Fix reconnection logic with a timer between attempts.
36 |
37 | ### v5.0.4
38 |
39 | - Add `audio.outputServiceSetVolume` to set the volume of character speech.
40 |
41 | ### v5.0.3
42 |
43 | - Add sttUrl to AudioManagerOptions.
44 |
45 | ### v5.0.2
46 |
47 | - Add missing file extensions
48 |
49 | ### v5.0.1
50 |
51 | - Add exports for `AudioManagerOptions` and `AudioOutputsServicePlayOptions`.
52 |
53 | ### v5.0.0
54 |
55 | **BREAKING**
56 |
57 | - Added `AudioManager` class to handle audio input and output.
58 | - Removed `Microphone` and `Speaker` classes. Replaced with `AudioManager` methods.
59 | - Use AudioContext API for better browser compatibility.
60 |
61 | ### v4.0.5
62 |
63 | - Updated types SpeechRecognitionStartEvent, SpeechRecognitionResponse, SpeechRecognitionParameters with traceId.
64 | - Add orientation type to ImageLayer.
65 |
66 | ### v4.0.3
67 |
68 | - Speech recognition now uses the correct sample rate in Firefox.
69 |
70 | ### v4.0.2
71 |
72 | - Stopping the microphone now stops the underlying `MediaStreamTrack`, removing the red 'listening' icon.
73 |
74 | ### v4.0.1
75 |
76 | - Fixed an issue where speech recognition was using an incorrect sample rate.
77 |
78 | ### v4.0.0
79 |
80 | - **BREAKING:** This packages now exports ES Module only.
81 | - **BREAKING:** An implementation of `fetch` is no longer included with this library. Consumers of this library should ensure their environment supports `fetch`.
82 | - **BREAKING:** This library now relies on having `URLSearchParams` available in the environment.
83 | - **BREAKING:** This library now relies on having `BigInt` available in the environment.
84 | - **BREAKING:** `playthroughId`s and `conversationId`s have been changed from `number` to `string` type everywhere in this SDK, and renamed to `playthroughUuid` and `conversationUuid`.
85 | - **BREAKING:** `api.createPlaythroughToken`, `api.createConversation` and `api.createCharacterConversation` all now return an object instead of a scalar, to facilitate any future changes to these methods.
86 | - **BREAKING:** `memories` in `getPlaythroughInfo` and in the `message` event now return `saveValue`s as JSON instead of only as strings. For example, counter memories are now actually `number`s and boolean memories are now actually `boolean`s.
87 | - **BREAKING:** `getMessageHistory` has been removed, and `getEventHistory` added as a more fully-featured alternative with much greater support for filtering, and can return all event types.
88 | - `setMemory` now accepts any JSON values instead of only strings.
89 | - **BREAKING:** Speech recognition stream now uses common objects to start up, deliver results and stop, regardless of which downstream service is selected.
90 | - Add start and stop events to the speech recognition stream.
91 |
92 | ### v3.9.0
93 |
94 | - Added `result` event to `Microphone` so clients can subscribe to raw `SpeechRecognition` events.
95 | - `recognise` and `recognise-interim` now emit the text of the _last_ result instead of the _first_ result in the `SpeechRecognition` event if `continuous` is `true`.
96 |
97 | ### v3.8.0
98 |
99 | - Multiple memories can now be set at once using the `setMemory` call.
100 |
101 | ### v3.7.0
102 |
103 | - Add support for `forkPlaythrough` API. This enables a player to upgrade to the latest published version from their old playthrough, copying across memories and emotions into the new playthrough, and returning the new token. Note that conversations are not carried across.
104 |
105 | ### v3.6.1
106 |
107 | - `problem` events scoped to a conversation can now be listened to via `conversation.on("problem", ...)`
108 |
109 | ### v3.6.0
110 |
111 | - It's now possible to specify multiple supported speech encodings in `speechConfig` by passing an array instead of a string. Charisma will use the first encoding that the voice synthesis service supports.
112 | - Added experimental support for intermediate client events. These events can be sent to Charisma to prevent characters from talking if the player is still speaking or typing. This can only be enabled for a story by getting in touch at [hello@charisma.ai](mailto:hello@charisma.ai).
113 |
114 | ### v3.5.0
115 |
116 | - Added support for Decentraland.
117 |
118 | ### v3.4.2
119 |
120 | - SDK info is now also sent upon reconnection to the room.
121 |
122 | ### v3.4.1
123 |
124 | - `package.json` now references correct emitted types location.
125 |
126 | ### v3.4.0
127 |
128 | - Added `languageCode` option to `createPlaythroughToken`, to play Charisma stories in languages other than English.
129 | - Added SDK info to joining a room, for Charisma to track which SDK versions are in use.
130 |
131 | ### v3.3.0
132 |
133 | - It's now possible to subscribe to events that are sent from other players, such as other players' messages. This can be done by adding a subscriber to a conversation to listen for the corresponding event, e.g. `conversation.on("reply", () => { /* remote player's reply */ })`. These handlers will _not_ be fired for messages sent from the local connected client, only for remote clients.
134 | - Add missing `graphId: number` to `MessagePathItem` type.
135 | - Updated dependencies.
136 |
137 | ### v3.2.0
138 |
139 | - Added `startGraphId` and `startGraphReferenceId` to `StartEvent` to start from a specific graph ID.
140 | - Added experimental `pause` and `play` methods to `Playthrough`.
141 |
142 | ### v3.1.0
143 |
144 | - Support for action node/event.
145 | - `SpeechRecognitionStopOptions` is now exported.
146 |
147 | ### v3.0.0
148 |
149 | There is a new emotion engine in Charisma! As a result...
150 |
151 | - `message.characterMoods` has been removed and replaced with `message.emotions`. This contains each character's current mood and relationship with the player, and any active feeling effects.
152 | - `setMood` has been removed. We may add an equivalent API for the new emotion engine in the future. Let us know about your use case if this interests you!
153 |
154 | ### v2.3.0
155 |
156 | - `Microphone.stopListening()` now accepts an `options` parameter with a single option `waitForLastResult`. If set to `true`, then the `recognise` will be called a final time with the result of the audio captured so far. If `false`, the operation will be aborted, so no additional `recognise` event will occur.
157 |
158 | ### v2.2.0
159 |
160 | - `Speaker.play()` now accepts an `options` parameter as its second parameter instead of a boolean value (which used to represent `interrupt`). This change is backwards compatible, but the old boolean way is deprecated and will be removed in the next major release.
161 | - `options` contains two parameters: `trackId` and `interrupt`. `trackId` can be used to interrupt only a particular track, for example, to prevent a character talking over themselves. `interrupt` can now be configured to `all` (interrupt all playing audio), `track` (interrupt the specified `trackId` if playing), or `none` (don't interrupt any audio).
162 |
163 | ### v2.1.0
164 |
165 | - Adds the option to pass an `apiKey` to use for authentication for playthrough token creation. This is now the recommended way to authenticate as API keys do not expire (unless regenerated) and are more secure than the `userToken`. `userToken` should no longer be used.
166 |
167 | ### v2.0.0
168 |
169 | This release makes **several breaking changes**. The main change is replacing `socket.io` with `colyseus.js`.
170 |
171 | - Replaces `socket.io` with `colyseus.js`.
172 | - Due to how Colyseus serializes data, `audio` is now an `ArrayBuffer` instead of an object with the `data` property.
173 | - API methods and the `Playthrough` constructor now accept a `baseUrl` option, which is used in preference to `globalBaseUrl`. `globalBaseUrl` is now set with `setGlobalBaseUrl` instead of `setBaseUrl`.
174 | - API methods are now individually exported instead of being static methods on the Charisma class, as well as being exported under a bracket `api` object.
175 | - Improved the implementation of `Microphone`.
176 | - Replace multiple connection events from `Charisma` (`connect`, `disconnect` etc) with single `connection-status` event.
177 | - The `Charisma` class has been renamed to `Playthrough`.
178 | - The `cleanup` function has been renamed to `disconnect`.
179 |
180 | ### v1.10.0
181 |
182 | - Change `imageLayers` field to an array of object, each including `url`, `resizeMode` and `points`.
183 |
184 | ### v1.9.1
185 |
186 | - Add `isImpactShareable` and `impactImageUrl` fields to impacts, and fix the type of impact `id`s to be `string`s.
187 |
188 | ### v1.9.0
189 |
190 | - **BREAKING CHANGE**: `eventId`s are now emitted as `string`s. Please upgrade to this version to continue using the reconnection "catch-up" logic (though everything else should work).
191 |
192 | ### v1.8.1
193 |
194 | - `Speaker` will no longer try to play audio if the context's state is not `running`. This resolves an issue where the user has not granted permission for the audio context to play sound, and so the `play(...)` promise never resolves.
195 |
196 | ### v1.8.0
197 |
198 | - Reconnecting will now fetch and emit messages that were emitted from the server after the last received message.
199 | - Add `impacts` field to `GetPlaythroughResult` type.
200 |
201 | ### v1.7.0
202 |
203 | - Pass through more events: `reconnect`, `reconnecting`, `disconnect` and `problem`.
204 | - Added types for new `panel` message, and added bubble-related types onto the `media` key.
205 | - Adjusted `setMemory` type to accept `null`.
206 | - Removes `scene-complete` event and `stopOnSceneComplete` option.
207 | - Adds `episode-complete` event. The chat engine automatically stops on episode end if the episode is started by an app user.
208 | - Adds `restartFromEpisodeId` and `restartFromEpisodeIndex` methods and removes `restartFromScene` method.
209 |
210 | ### v1.6.1
211 |
212 | - GET requests no longer try to add a body to the request.
213 |
214 | ### v1.6.0
215 |
216 | - Add `start` and `stop` events to `Speaker`.
217 |
218 | ### v1.5.0
219 |
220 | - Adds support for use in Node.js.
221 |
222 | ### v1.4.0
223 |
224 | - `Microphone.startListening(timeout)` now has a timeout parameter to automatically stop the microphone after `timeout` milliseconds.
225 | - `Microphone.resetTimeout(timeout)` will reset the timeout to `timeout` milliseconds.
226 | - Microphone now emits `start` and `stop` events, particularly useful in conjuction with timeout.
227 |
228 | ### v1.3.0
229 |
230 | - Add an `interrupt` option to `Speaker` to ensure replies don't overlap.
231 | - **Breaking**: Target ES2018; drop support for pre-ES2018 environments.
232 |
233 | ### v1.2.0
234 |
235 | - Add `restartFromScene` method to SDK. This can be used to reset the playthrough to the state it was in at the beginning of a particular scene.
236 | - Exports more types and adjusts message types to include `eventId`, `timestamp` and `memories`.
237 |
238 | ### v1.1.0
239 |
240 | - Add `resume` event to SDK. This can be used to resume a conversation from where it left off.
241 |
242 | ### v1.0.5
243 |
244 | - Use `webkitAudioContext` for `Speaker` on Safari.
245 |
246 | ### v1.0.4
247 |
248 | - Export the `Impact` type.
249 |
250 | ### v1.0.3
251 |
252 | - `impacts` are now objects containing their ID as well as the `impact` string.
253 |
254 | ### v1.0.2
255 |
256 | - Rename `setStopOnSceneEnd` to `setStopOnSceneComplete` to ensure consistency with the event name.
257 |
258 | ### v1.0.1
259 |
260 | - Fix `createPlaythroughToken` throwing an error when both `version` and `userToken` are not provided.
261 |
262 | ### v1.0.0
263 |
264 | - Initial stable release.
265 | - Completely overhauls the SDK API, please see the [README](./README.md) for more details on how to use the newer, conversation-based API.
266 |
267 | ## Past major versions
268 |
269 | ### v0.9.2
270 |
271 | - Pass data (containing `impacts`) through on scene complete event.
272 |
273 | ### v0.9.1
274 |
275 | - Pass `stopOnSceneComplete` through to the `CharismaInstance`.
276 |
277 | ### v0.9.0
278 |
279 | - Add `stopOnSceneComplete` option to prevent automatically continuing between scenes.
280 |
281 | ### v0.8.3
282 |
283 | - Add `media` field onto the character message type.
284 |
285 | ### v0.8.2
286 |
287 | - Add `tapToContinue` to message history type.
288 |
289 | ### v0.8.1
290 |
291 | - Add `timestamp` to messages returned from `getMessageHistory`.
292 | - Improved type for `getMessageHistory`.
293 |
294 | ### v0.8.0
295 |
296 | - Can now specify `playthroughToken` to re-use a playthrough instead of creating a new one when connecting.
297 | - Can now fetch message history of the playthrough using `charisma.getMessageHistory`.
298 |
299 | ### v0.7.3
300 |
301 | - Fix `IMessageCharacter.speech` type.
302 |
303 | ### v0.7.2
304 |
305 | - `ISynthesisConfig` and some additional types are now exported.
306 |
307 | ### v0.7.1
308 |
309 | - `speech` now takes a config object as well as a boolean. It can specify the audio encoding to use and whether it returns the raw audio data, or a link to an audio file.
310 |
311 | ### v0.7.0
312 |
313 | - BREAKING: The `reply` event has been renamed to `message`, and now has a `type` field on the payload to distinguish between `character` and `media` events. Other fields have been refactored, such as `character` and `speech`. Please consult [src/types.ts](src/types.ts) to find the new message format.
314 | - A new `tap` event is available for the client to send.
315 |
316 | ### v0.6.0
317 |
318 | - Accidentally published version, but never tagged as `latest`.
319 |
320 | ### v0.5.1
321 |
322 | - Fix broken 0.5.0 publish.
323 |
324 | ### v0.5.0
325 |
326 | - Removed `browser` field from `package.json`. Consumers can use the UMD directly from unpkg.
327 | - Removed `actIndex` as it is no longer supported.
328 |
329 | ### v0.4.2
330 |
331 | - Buffer `set-memory` events until `status: 'ready'` is received.
332 |
333 | ### v0.4.1
334 |
335 | - `actIndex` and `sceneIndex` can now be set on the `start` event to start from a specific story scene.
336 |
337 | ### v0.4.0
338 |
339 | - **BREAKING**: UMD name changed from `Charisma` to `CharismaSDK`. The ES/CJS builds now don't bundle their dependencies.
340 | - Added `setMemory` method to directly set a memory.
341 | - Fixed all ID types to be `number`, not `string`.
342 |
343 | ### v0.3.1
344 |
345 | - Passing no `version` to the `connect` method now results in using the latest published version, rather than the draft version.
346 |
347 | ### v0.3.0
348 |
349 | - Package renamed (rescoped) to `@charisma-ai/sdk`.
350 |
351 | ### v0.2.0
352 |
353 | - The `debug` option has been replaced with the `version` option, which defaults to `undefined` (the latest published story version).
354 |
355 | ### v0.1.2
356 |
357 | - The microphone now stops listening when a reply with `endStory` set to `true` is emitted.
358 |
359 | ### v0.1.1
360 |
361 | - `AudioContext` is now created on-demand rather than on initialisation.
362 |
363 | ### v0.1.0
364 |
365 | - Socket.io now forces websockets, skipping the long-polling check.
366 |
367 | ### v0.0.4
368 |
369 | - Fixed issue where audio was not working on Safari.
370 |
371 | ### v0.0.3
372 |
373 | - Microphone keeps better track of whether to resume speech recognition after speaking.
374 |
375 | ### v0.0.2
376 |
377 | - Support for recording speech-to-text via Chrome SpeechRecognition.
378 | - New events `recognise-interim` and `recognise`.
379 | - Speech recognition is paused while the audio is played.
380 |
381 | ### v0.0.1
382 |
383 | - Initial release.
384 | - Support for `reply` and `start` client events, and `reply`, `start-typing` and `stop-typing` server events.
385 | - Support for playing text-to-speech audio.
386 |
--------------------------------------------------------------------------------
/demos/no_stt/pnpm-lock.yaml:
--------------------------------------------------------------------------------
1 | lockfileVersion: '9.0'
2 |
3 | settings:
4 | autoInstallPeers: true
5 | excludeLinksFromLockfile: false
6 |
7 | importers:
8 |
9 | .:
10 | dependencies:
11 | '@charisma-ai/sdk':
12 | specifier: link:../..
13 | version: link:../..
14 | devDependencies:
15 | vite:
16 | specifier: ^5.3.1
17 | version: 5.3.3
18 |
19 | packages:
20 |
21 | '@esbuild/aix-ppc64@0.21.5':
22 | resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz}
23 | engines: {node: '>=12'}
24 | cpu: [ppc64]
25 | os: [aix]
26 |
27 | '@esbuild/android-arm64@0.21.5':
28 | resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz}
29 | engines: {node: '>=12'}
30 | cpu: [arm64]
31 | os: [android]
32 |
33 | '@esbuild/android-arm@0.21.5':
34 | resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz}
35 | engines: {node: '>=12'}
36 | cpu: [arm]
37 | os: [android]
38 |
39 | '@esbuild/android-x64@0.21.5':
40 | resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz}
41 | engines: {node: '>=12'}
42 | cpu: [x64]
43 | os: [android]
44 |
45 | '@esbuild/darwin-arm64@0.21.5':
46 | resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz}
47 | engines: {node: '>=12'}
48 | cpu: [arm64]
49 | os: [darwin]
50 |
51 | '@esbuild/darwin-x64@0.21.5':
52 | resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz}
53 | engines: {node: '>=12'}
54 | cpu: [x64]
55 | os: [darwin]
56 |
57 | '@esbuild/freebsd-arm64@0.21.5':
58 | resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz}
59 | engines: {node: '>=12'}
60 | cpu: [arm64]
61 | os: [freebsd]
62 |
63 | '@esbuild/freebsd-x64@0.21.5':
64 | resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz}
65 | engines: {node: '>=12'}
66 | cpu: [x64]
67 | os: [freebsd]
68 |
69 | '@esbuild/linux-arm64@0.21.5':
70 | resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz}
71 | engines: {node: '>=12'}
72 | cpu: [arm64]
73 | os: [linux]
74 |
75 | '@esbuild/linux-arm@0.21.5':
76 | resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz}
77 | engines: {node: '>=12'}
78 | cpu: [arm]
79 | os: [linux]
80 |
81 | '@esbuild/linux-ia32@0.21.5':
82 | resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz}
83 | engines: {node: '>=12'}
84 | cpu: [ia32]
85 | os: [linux]
86 |
87 | '@esbuild/linux-loong64@0.21.5':
88 | resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz}
89 | engines: {node: '>=12'}
90 | cpu: [loong64]
91 | os: [linux]
92 |
93 | '@esbuild/linux-mips64el@0.21.5':
94 | resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz}
95 | engines: {node: '>=12'}
96 | cpu: [mips64el]
97 | os: [linux]
98 |
99 | '@esbuild/linux-ppc64@0.21.5':
100 | resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz}
101 | engines: {node: '>=12'}
102 | cpu: [ppc64]
103 | os: [linux]
104 |
105 | '@esbuild/linux-riscv64@0.21.5':
106 | resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz}
107 | engines: {node: '>=12'}
108 | cpu: [riscv64]
109 | os: [linux]
110 |
111 | '@esbuild/linux-s390x@0.21.5':
112 | resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz}
113 | engines: {node: '>=12'}
114 | cpu: [s390x]
115 | os: [linux]
116 |
117 | '@esbuild/linux-x64@0.21.5':
118 | resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz}
119 | engines: {node: '>=12'}
120 | cpu: [x64]
121 | os: [linux]
122 |
123 | '@esbuild/netbsd-x64@0.21.5':
124 | resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz}
125 | engines: {node: '>=12'}
126 | cpu: [x64]
127 | os: [netbsd]
128 |
129 | '@esbuild/openbsd-x64@0.21.5':
130 | resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz}
131 | engines: {node: '>=12'}
132 | cpu: [x64]
133 | os: [openbsd]
134 |
135 | '@esbuild/sunos-x64@0.21.5':
136 | resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz}
137 | engines: {node: '>=12'}
138 | cpu: [x64]
139 | os: [sunos]
140 |
141 | '@esbuild/win32-arm64@0.21.5':
142 | resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz}
143 | engines: {node: '>=12'}
144 | cpu: [arm64]
145 | os: [win32]
146 |
147 | '@esbuild/win32-ia32@0.21.5':
148 | resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz}
149 | engines: {node: '>=12'}
150 | cpu: [ia32]
151 | os: [win32]
152 |
153 | '@esbuild/win32-x64@0.21.5':
154 | resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz}
155 | engines: {node: '>=12'}
156 | cpu: [x64]
157 | os: [win32]
158 |
159 | '@rollup/rollup-android-arm-eabi@4.18.1':
160 | resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.1.tgz}
161 | cpu: [arm]
162 | os: [android]
163 |
164 | '@rollup/rollup-android-arm64@4.18.1':
165 | resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.1.tgz}
166 | cpu: [arm64]
167 | os: [android]
168 |
169 | '@rollup/rollup-darwin-arm64@4.18.1':
170 | resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.1.tgz}
171 | cpu: [arm64]
172 | os: [darwin]
173 |
174 | '@rollup/rollup-darwin-x64@4.18.1':
175 | resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.1.tgz}
176 | cpu: [x64]
177 | os: [darwin]
178 |
179 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1':
180 | resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.1.tgz}
181 | cpu: [arm]
182 | os: [linux]
183 |
184 | '@rollup/rollup-linux-arm-musleabihf@4.18.1':
185 | resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.1.tgz}
186 | cpu: [arm]
187 | os: [linux]
188 |
189 | '@rollup/rollup-linux-arm64-gnu@4.18.1':
190 | resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.1.tgz}
191 | cpu: [arm64]
192 | os: [linux]
193 |
194 | '@rollup/rollup-linux-arm64-musl@4.18.1':
195 | resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.1.tgz}
196 | cpu: [arm64]
197 | os: [linux]
198 |
199 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
200 | resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.1.tgz}
201 | cpu: [ppc64]
202 | os: [linux]
203 |
204 | '@rollup/rollup-linux-riscv64-gnu@4.18.1':
205 | resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.1.tgz}
206 | cpu: [riscv64]
207 | os: [linux]
208 |
209 | '@rollup/rollup-linux-s390x-gnu@4.18.1':
210 | resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.1.tgz}
211 | cpu: [s390x]
212 | os: [linux]
213 |
214 | '@rollup/rollup-linux-x64-gnu@4.18.1':
215 | resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.1.tgz}
216 | cpu: [x64]
217 | os: [linux]
218 |
219 | '@rollup/rollup-linux-x64-musl@4.18.1':
220 | resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.1.tgz}
221 | cpu: [x64]
222 | os: [linux]
223 |
224 | '@rollup/rollup-win32-arm64-msvc@4.18.1':
225 | resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.1.tgz}
226 | cpu: [arm64]
227 | os: [win32]
228 |
229 | '@rollup/rollup-win32-ia32-msvc@4.18.1':
230 | resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.1.tgz}
231 | cpu: [ia32]
232 | os: [win32]
233 |
234 | '@rollup/rollup-win32-x64-msvc@4.18.1':
235 | resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.1.tgz}
236 | cpu: [x64]
237 | os: [win32]
238 |
239 | '@types/estree@1.0.5':
240 | resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==, tarball: https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz}
241 |
242 | esbuild@0.21.5:
243 | resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz}
244 | engines: {node: '>=12'}
245 | hasBin: true
246 |
247 | fsevents@2.3.3:
248 | resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, tarball: https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz}
249 | engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
250 | os: [darwin]
251 |
252 | nanoid@3.3.7:
253 | resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==, tarball: https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz}
254 | engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
255 | hasBin: true
256 |
257 | picocolors@1.0.1:
258 | resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==, tarball: https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz}
259 |
260 | postcss@8.4.39:
261 | resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz}
262 | engines: {node: ^10 || ^12 || >=14}
263 |
264 | rollup@4.18.1:
265 | resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.18.1.tgz}
266 | engines: {node: '>=18.0.0', npm: '>=8.0.0'}
267 | hasBin: true
268 |
269 | source-map-js@1.2.0:
270 | resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==, tarball: https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz}
271 | engines: {node: '>=0.10.0'}
272 |
273 | vite@5.3.3:
274 | resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==, tarball: https://registry.npmjs.org/vite/-/vite-5.3.3.tgz}
275 | engines: {node: ^18.0.0 || >=20.0.0}
276 | hasBin: true
277 | peerDependencies:
278 | '@types/node': ^18.0.0 || >=20.0.0
279 | less: '*'
280 | lightningcss: ^1.21.0
281 | sass: '*'
282 | stylus: '*'
283 | sugarss: '*'
284 | terser: ^5.4.0
285 | peerDependenciesMeta:
286 | '@types/node':
287 | optional: true
288 | less:
289 | optional: true
290 | lightningcss:
291 | optional: true
292 | sass:
293 | optional: true
294 | stylus:
295 | optional: true
296 | sugarss:
297 | optional: true
298 | terser:
299 | optional: true
300 |
301 | snapshots:
302 |
303 | '@esbuild/aix-ppc64@0.21.5':
304 | optional: true
305 |
306 | '@esbuild/android-arm64@0.21.5':
307 | optional: true
308 |
309 | '@esbuild/android-arm@0.21.5':
310 | optional: true
311 |
312 | '@esbuild/android-x64@0.21.5':
313 | optional: true
314 |
315 | '@esbuild/darwin-arm64@0.21.5':
316 | optional: true
317 |
318 | '@esbuild/darwin-x64@0.21.5':
319 | optional: true
320 |
321 | '@esbuild/freebsd-arm64@0.21.5':
322 | optional: true
323 |
324 | '@esbuild/freebsd-x64@0.21.5':
325 | optional: true
326 |
327 | '@esbuild/linux-arm64@0.21.5':
328 | optional: true
329 |
330 | '@esbuild/linux-arm@0.21.5':
331 | optional: true
332 |
333 | '@esbuild/linux-ia32@0.21.5':
334 | optional: true
335 |
336 | '@esbuild/linux-loong64@0.21.5':
337 | optional: true
338 |
339 | '@esbuild/linux-mips64el@0.21.5':
340 | optional: true
341 |
342 | '@esbuild/linux-ppc64@0.21.5':
343 | optional: true
344 |
345 | '@esbuild/linux-riscv64@0.21.5':
346 | optional: true
347 |
348 | '@esbuild/linux-s390x@0.21.5':
349 | optional: true
350 |
351 | '@esbuild/linux-x64@0.21.5':
352 | optional: true
353 |
354 | '@esbuild/netbsd-x64@0.21.5':
355 | optional: true
356 |
357 | '@esbuild/openbsd-x64@0.21.5':
358 | optional: true
359 |
360 | '@esbuild/sunos-x64@0.21.5':
361 | optional: true
362 |
363 | '@esbuild/win32-arm64@0.21.5':
364 | optional: true
365 |
366 | '@esbuild/win32-ia32@0.21.5':
367 | optional: true
368 |
369 | '@esbuild/win32-x64@0.21.5':
370 | optional: true
371 |
372 | '@rollup/rollup-android-arm-eabi@4.18.1':
373 | optional: true
374 |
375 | '@rollup/rollup-android-arm64@4.18.1':
376 | optional: true
377 |
378 | '@rollup/rollup-darwin-arm64@4.18.1':
379 | optional: true
380 |
381 | '@rollup/rollup-darwin-x64@4.18.1':
382 | optional: true
383 |
384 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1':
385 | optional: true
386 |
387 | '@rollup/rollup-linux-arm-musleabihf@4.18.1':
388 | optional: true
389 |
390 | '@rollup/rollup-linux-arm64-gnu@4.18.1':
391 | optional: true
392 |
393 | '@rollup/rollup-linux-arm64-musl@4.18.1':
394 | optional: true
395 |
396 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
397 | optional: true
398 |
399 | '@rollup/rollup-linux-riscv64-gnu@4.18.1':
400 | optional: true
401 |
402 | '@rollup/rollup-linux-s390x-gnu@4.18.1':
403 | optional: true
404 |
405 | '@rollup/rollup-linux-x64-gnu@4.18.1':
406 | optional: true
407 |
408 | '@rollup/rollup-linux-x64-musl@4.18.1':
409 | optional: true
410 |
411 | '@rollup/rollup-win32-arm64-msvc@4.18.1':
412 | optional: true
413 |
414 | '@rollup/rollup-win32-ia32-msvc@4.18.1':
415 | optional: true
416 |
417 | '@rollup/rollup-win32-x64-msvc@4.18.1':
418 | optional: true
419 |
420 | '@types/estree@1.0.5': {}
421 |
422 | esbuild@0.21.5:
423 | optionalDependencies:
424 | '@esbuild/aix-ppc64': 0.21.5
425 | '@esbuild/android-arm': 0.21.5
426 | '@esbuild/android-arm64': 0.21.5
427 | '@esbuild/android-x64': 0.21.5
428 | '@esbuild/darwin-arm64': 0.21.5
429 | '@esbuild/darwin-x64': 0.21.5
430 | '@esbuild/freebsd-arm64': 0.21.5
431 | '@esbuild/freebsd-x64': 0.21.5
432 | '@esbuild/linux-arm': 0.21.5
433 | '@esbuild/linux-arm64': 0.21.5
434 | '@esbuild/linux-ia32': 0.21.5
435 | '@esbuild/linux-loong64': 0.21.5
436 | '@esbuild/linux-mips64el': 0.21.5
437 | '@esbuild/linux-ppc64': 0.21.5
438 | '@esbuild/linux-riscv64': 0.21.5
439 | '@esbuild/linux-s390x': 0.21.5
440 | '@esbuild/linux-x64': 0.21.5
441 | '@esbuild/netbsd-x64': 0.21.5
442 | '@esbuild/openbsd-x64': 0.21.5
443 | '@esbuild/sunos-x64': 0.21.5
444 | '@esbuild/win32-arm64': 0.21.5
445 | '@esbuild/win32-ia32': 0.21.5
446 | '@esbuild/win32-x64': 0.21.5
447 |
448 | fsevents@2.3.3:
449 | optional: true
450 |
451 | nanoid@3.3.7: {}
452 |
453 | picocolors@1.0.1: {}
454 |
455 | postcss@8.4.39:
456 | dependencies:
457 | nanoid: 3.3.7
458 | picocolors: 1.0.1
459 | source-map-js: 1.2.0
460 |
461 | rollup@4.18.1:
462 | dependencies:
463 | '@types/estree': 1.0.5
464 | optionalDependencies:
465 | '@rollup/rollup-android-arm-eabi': 4.18.1
466 | '@rollup/rollup-android-arm64': 4.18.1
467 | '@rollup/rollup-darwin-arm64': 4.18.1
468 | '@rollup/rollup-darwin-x64': 4.18.1
469 | '@rollup/rollup-linux-arm-gnueabihf': 4.18.1
470 | '@rollup/rollup-linux-arm-musleabihf': 4.18.1
471 | '@rollup/rollup-linux-arm64-gnu': 4.18.1
472 | '@rollup/rollup-linux-arm64-musl': 4.18.1
473 | '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1
474 | '@rollup/rollup-linux-riscv64-gnu': 4.18.1
475 | '@rollup/rollup-linux-s390x-gnu': 4.18.1
476 | '@rollup/rollup-linux-x64-gnu': 4.18.1
477 | '@rollup/rollup-linux-x64-musl': 4.18.1
478 | '@rollup/rollup-win32-arm64-msvc': 4.18.1
479 | '@rollup/rollup-win32-ia32-msvc': 4.18.1
480 | '@rollup/rollup-win32-x64-msvc': 4.18.1
481 | fsevents: 2.3.3
482 |
483 | source-map-js@1.2.0: {}
484 |
485 | vite@5.3.3:
486 | dependencies:
487 | esbuild: 0.21.5
488 | postcss: 8.4.39
489 | rollup: 4.18.1
490 | optionalDependencies:
491 | fsevents: 2.3.3
492 |
--------------------------------------------------------------------------------
/demos/browser_stt/pnpm-lock.yaml:
--------------------------------------------------------------------------------
1 | lockfileVersion: '9.0'
2 |
3 | settings:
4 | autoInstallPeers: true
5 | excludeLinksFromLockfile: false
6 |
7 | importers:
8 |
9 | .:
10 | dependencies:
11 | '@charisma-ai/sdk':
12 | specifier: link:../..
13 | version: link:../..
14 | devDependencies:
15 | typescript:
16 | specifier: ^5.2.2
17 | version: 5.5.3
18 | vite:
19 | specifier: ^5.3.1
20 | version: 5.3.3
21 |
22 | packages:
23 |
24 | '@esbuild/aix-ppc64@0.21.5':
25 | resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz}
26 | engines: {node: '>=12'}
27 | cpu: [ppc64]
28 | os: [aix]
29 |
30 | '@esbuild/android-arm64@0.21.5':
31 | resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz}
32 | engines: {node: '>=12'}
33 | cpu: [arm64]
34 | os: [android]
35 |
36 | '@esbuild/android-arm@0.21.5':
37 | resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz}
38 | engines: {node: '>=12'}
39 | cpu: [arm]
40 | os: [android]
41 |
42 | '@esbuild/android-x64@0.21.5':
43 | resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz}
44 | engines: {node: '>=12'}
45 | cpu: [x64]
46 | os: [android]
47 |
48 | '@esbuild/darwin-arm64@0.21.5':
49 | resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz}
50 | engines: {node: '>=12'}
51 | cpu: [arm64]
52 | os: [darwin]
53 |
54 | '@esbuild/darwin-x64@0.21.5':
55 | resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz}
56 | engines: {node: '>=12'}
57 | cpu: [x64]
58 | os: [darwin]
59 |
60 | '@esbuild/freebsd-arm64@0.21.5':
61 | resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz}
62 | engines: {node: '>=12'}
63 | cpu: [arm64]
64 | os: [freebsd]
65 |
66 | '@esbuild/freebsd-x64@0.21.5':
67 | resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz}
68 | engines: {node: '>=12'}
69 | cpu: [x64]
70 | os: [freebsd]
71 |
72 | '@esbuild/linux-arm64@0.21.5':
73 | resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz}
74 | engines: {node: '>=12'}
75 | cpu: [arm64]
76 | os: [linux]
77 |
78 | '@esbuild/linux-arm@0.21.5':
79 | resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz}
80 | engines: {node: '>=12'}
81 | cpu: [arm]
82 | os: [linux]
83 |
84 | '@esbuild/linux-ia32@0.21.5':
85 | resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz}
86 | engines: {node: '>=12'}
87 | cpu: [ia32]
88 | os: [linux]
89 |
90 | '@esbuild/linux-loong64@0.21.5':
91 | resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz}
92 | engines: {node: '>=12'}
93 | cpu: [loong64]
94 | os: [linux]
95 |
96 | '@esbuild/linux-mips64el@0.21.5':
97 | resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz}
98 | engines: {node: '>=12'}
99 | cpu: [mips64el]
100 | os: [linux]
101 |
102 | '@esbuild/linux-ppc64@0.21.5':
103 | resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz}
104 | engines: {node: '>=12'}
105 | cpu: [ppc64]
106 | os: [linux]
107 |
108 | '@esbuild/linux-riscv64@0.21.5':
109 | resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz}
110 | engines: {node: '>=12'}
111 | cpu: [riscv64]
112 | os: [linux]
113 |
114 | '@esbuild/linux-s390x@0.21.5':
115 | resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz}
116 | engines: {node: '>=12'}
117 | cpu: [s390x]
118 | os: [linux]
119 |
120 | '@esbuild/linux-x64@0.21.5':
121 | resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz}
122 | engines: {node: '>=12'}
123 | cpu: [x64]
124 | os: [linux]
125 |
126 | '@esbuild/netbsd-x64@0.21.5':
127 | resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz}
128 | engines: {node: '>=12'}
129 | cpu: [x64]
130 | os: [netbsd]
131 |
132 | '@esbuild/openbsd-x64@0.21.5':
133 | resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz}
134 | engines: {node: '>=12'}
135 | cpu: [x64]
136 | os: [openbsd]
137 |
138 | '@esbuild/sunos-x64@0.21.5':
139 | resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz}
140 | engines: {node: '>=12'}
141 | cpu: [x64]
142 | os: [sunos]
143 |
144 | '@esbuild/win32-arm64@0.21.5':
145 | resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz}
146 | engines: {node: '>=12'}
147 | cpu: [arm64]
148 | os: [win32]
149 |
150 | '@esbuild/win32-ia32@0.21.5':
151 | resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz}
152 | engines: {node: '>=12'}
153 | cpu: [ia32]
154 | os: [win32]
155 |
156 | '@esbuild/win32-x64@0.21.5':
157 | resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz}
158 | engines: {node: '>=12'}
159 | cpu: [x64]
160 | os: [win32]
161 |
162 | '@rollup/rollup-android-arm-eabi@4.18.1':
163 | resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.1.tgz}
164 | cpu: [arm]
165 | os: [android]
166 |
167 | '@rollup/rollup-android-arm64@4.18.1':
168 | resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.1.tgz}
169 | cpu: [arm64]
170 | os: [android]
171 |
172 | '@rollup/rollup-darwin-arm64@4.18.1':
173 | resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.1.tgz}
174 | cpu: [arm64]
175 | os: [darwin]
176 |
177 | '@rollup/rollup-darwin-x64@4.18.1':
178 | resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.1.tgz}
179 | cpu: [x64]
180 | os: [darwin]
181 |
182 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1':
183 | resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.1.tgz}
184 | cpu: [arm]
185 | os: [linux]
186 |
187 | '@rollup/rollup-linux-arm-musleabihf@4.18.1':
188 | resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.1.tgz}
189 | cpu: [arm]
190 | os: [linux]
191 |
192 | '@rollup/rollup-linux-arm64-gnu@4.18.1':
193 | resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.1.tgz}
194 | cpu: [arm64]
195 | os: [linux]
196 |
197 | '@rollup/rollup-linux-arm64-musl@4.18.1':
198 | resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.1.tgz}
199 | cpu: [arm64]
200 | os: [linux]
201 |
202 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
203 | resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.1.tgz}
204 | cpu: [ppc64]
205 | os: [linux]
206 |
207 | '@rollup/rollup-linux-riscv64-gnu@4.18.1':
208 | resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.1.tgz}
209 | cpu: [riscv64]
210 | os: [linux]
211 |
212 | '@rollup/rollup-linux-s390x-gnu@4.18.1':
213 | resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.1.tgz}
214 | cpu: [s390x]
215 | os: [linux]
216 |
217 | '@rollup/rollup-linux-x64-gnu@4.18.1':
218 | resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.1.tgz}
219 | cpu: [x64]
220 | os: [linux]
221 |
222 | '@rollup/rollup-linux-x64-musl@4.18.1':
223 | resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.1.tgz}
224 | cpu: [x64]
225 | os: [linux]
226 |
227 | '@rollup/rollup-win32-arm64-msvc@4.18.1':
228 | resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.1.tgz}
229 | cpu: [arm64]
230 | os: [win32]
231 |
232 | '@rollup/rollup-win32-ia32-msvc@4.18.1':
233 | resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.1.tgz}
234 | cpu: [ia32]
235 | os: [win32]
236 |
237 | '@rollup/rollup-win32-x64-msvc@4.18.1':
238 | resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.1.tgz}
239 | cpu: [x64]
240 | os: [win32]
241 |
242 | '@types/estree@1.0.5':
243 | resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==, tarball: https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz}
244 |
245 | esbuild@0.21.5:
246 | resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz}
247 | engines: {node: '>=12'}
248 | hasBin: true
249 |
250 | fsevents@2.3.3:
251 | resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, tarball: https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz}
252 | engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
253 | os: [darwin]
254 |
255 | nanoid@3.3.7:
256 | resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==, tarball: https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz}
257 | engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
258 | hasBin: true
259 |
260 | picocolors@1.0.1:
261 | resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==, tarball: https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz}
262 |
263 | postcss@8.4.39:
264 | resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz}
265 | engines: {node: ^10 || ^12 || >=14}
266 |
267 | rollup@4.18.1:
268 | resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.18.1.tgz}
269 | engines: {node: '>=18.0.0', npm: '>=8.0.0'}
270 | hasBin: true
271 |
272 | source-map-js@1.2.0:
273 | resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==, tarball: https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz}
274 | engines: {node: '>=0.10.0'}
275 |
276 | typescript@5.5.3:
277 | resolution: {integrity: sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==, tarball: https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz}
278 | engines: {node: '>=14.17'}
279 | hasBin: true
280 |
281 | vite@5.3.3:
282 | resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==, tarball: https://registry.npmjs.org/vite/-/vite-5.3.3.tgz}
283 | engines: {node: ^18.0.0 || >=20.0.0}
284 | hasBin: true
285 | peerDependencies:
286 | '@types/node': ^18.0.0 || >=20.0.0
287 | less: '*'
288 | lightningcss: ^1.21.0
289 | sass: '*'
290 | stylus: '*'
291 | sugarss: '*'
292 | terser: ^5.4.0
293 | peerDependenciesMeta:
294 | '@types/node':
295 | optional: true
296 | less:
297 | optional: true
298 | lightningcss:
299 | optional: true
300 | sass:
301 | optional: true
302 | stylus:
303 | optional: true
304 | sugarss:
305 | optional: true
306 | terser:
307 | optional: true
308 |
309 | snapshots:
310 |
311 | '@esbuild/aix-ppc64@0.21.5':
312 | optional: true
313 |
314 | '@esbuild/android-arm64@0.21.5':
315 | optional: true
316 |
317 | '@esbuild/android-arm@0.21.5':
318 | optional: true
319 |
320 | '@esbuild/android-x64@0.21.5':
321 | optional: true
322 |
323 | '@esbuild/darwin-arm64@0.21.5':
324 | optional: true
325 |
326 | '@esbuild/darwin-x64@0.21.5':
327 | optional: true
328 |
329 | '@esbuild/freebsd-arm64@0.21.5':
330 | optional: true
331 |
332 | '@esbuild/freebsd-x64@0.21.5':
333 | optional: true
334 |
335 | '@esbuild/linux-arm64@0.21.5':
336 | optional: true
337 |
338 | '@esbuild/linux-arm@0.21.5':
339 | optional: true
340 |
341 | '@esbuild/linux-ia32@0.21.5':
342 | optional: true
343 |
344 | '@esbuild/linux-loong64@0.21.5':
345 | optional: true
346 |
347 | '@esbuild/linux-mips64el@0.21.5':
348 | optional: true
349 |
350 | '@esbuild/linux-ppc64@0.21.5':
351 | optional: true
352 |
353 | '@esbuild/linux-riscv64@0.21.5':
354 | optional: true
355 |
356 | '@esbuild/linux-s390x@0.21.5':
357 | optional: true
358 |
359 | '@esbuild/linux-x64@0.21.5':
360 | optional: true
361 |
362 | '@esbuild/netbsd-x64@0.21.5':
363 | optional: true
364 |
365 | '@esbuild/openbsd-x64@0.21.5':
366 | optional: true
367 |
368 | '@esbuild/sunos-x64@0.21.5':
369 | optional: true
370 |
371 | '@esbuild/win32-arm64@0.21.5':
372 | optional: true
373 |
374 | '@esbuild/win32-ia32@0.21.5':
375 | optional: true
376 |
377 | '@esbuild/win32-x64@0.21.5':
378 | optional: true
379 |
380 | '@rollup/rollup-android-arm-eabi@4.18.1':
381 | optional: true
382 |
383 | '@rollup/rollup-android-arm64@4.18.1':
384 | optional: true
385 |
386 | '@rollup/rollup-darwin-arm64@4.18.1':
387 | optional: true
388 |
389 | '@rollup/rollup-darwin-x64@4.18.1':
390 | optional: true
391 |
392 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1':
393 | optional: true
394 |
395 | '@rollup/rollup-linux-arm-musleabihf@4.18.1':
396 | optional: true
397 |
398 | '@rollup/rollup-linux-arm64-gnu@4.18.1':
399 | optional: true
400 |
401 | '@rollup/rollup-linux-arm64-musl@4.18.1':
402 | optional: true
403 |
404 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1':
405 | optional: true
406 |
407 | '@rollup/rollup-linux-riscv64-gnu@4.18.1':
408 | optional: true
409 |
410 | '@rollup/rollup-linux-s390x-gnu@4.18.1':
411 | optional: true
412 |
413 | '@rollup/rollup-linux-x64-gnu@4.18.1':
414 | optional: true
415 |
416 | '@rollup/rollup-linux-x64-musl@4.18.1':
417 | optional: true
418 |
419 | '@rollup/rollup-win32-arm64-msvc@4.18.1':
420 | optional: true
421 |
422 | '@rollup/rollup-win32-ia32-msvc@4.18.1':
423 | optional: true
424 |
425 | '@rollup/rollup-win32-x64-msvc@4.18.1':
426 | optional: true
427 |
428 | '@types/estree@1.0.5': {}
429 |
430 | esbuild@0.21.5:
431 | optionalDependencies:
432 | '@esbuild/aix-ppc64': 0.21.5
433 | '@esbuild/android-arm': 0.21.5
434 | '@esbuild/android-arm64': 0.21.5
435 | '@esbuild/android-x64': 0.21.5
436 | '@esbuild/darwin-arm64': 0.21.5
437 | '@esbuild/darwin-x64': 0.21.5
438 | '@esbuild/freebsd-arm64': 0.21.5
439 | '@esbuild/freebsd-x64': 0.21.5
440 | '@esbuild/linux-arm': 0.21.5
441 | '@esbuild/linux-arm64': 0.21.5
442 | '@esbuild/linux-ia32': 0.21.5
443 | '@esbuild/linux-loong64': 0.21.5
444 | '@esbuild/linux-mips64el': 0.21.5
445 | '@esbuild/linux-ppc64': 0.21.5
446 | '@esbuild/linux-riscv64': 0.21.5
447 | '@esbuild/linux-s390x': 0.21.5
448 | '@esbuild/linux-x64': 0.21.5
449 | '@esbuild/netbsd-x64': 0.21.5
450 | '@esbuild/openbsd-x64': 0.21.5
451 | '@esbuild/sunos-x64': 0.21.5
452 | '@esbuild/win32-arm64': 0.21.5
453 | '@esbuild/win32-ia32': 0.21.5
454 | '@esbuild/win32-x64': 0.21.5
455 |
456 | fsevents@2.3.3:
457 | optional: true
458 |
459 | nanoid@3.3.7: {}
460 |
461 | picocolors@1.0.1: {}
462 |
463 | postcss@8.4.39:
464 | dependencies:
465 | nanoid: 3.3.7
466 | picocolors: 1.0.1
467 | source-map-js: 1.2.0
468 |
469 | rollup@4.18.1:
470 | dependencies:
471 | '@types/estree': 1.0.5
472 | optionalDependencies:
473 | '@rollup/rollup-android-arm-eabi': 4.18.1
474 | '@rollup/rollup-android-arm64': 4.18.1
475 | '@rollup/rollup-darwin-arm64': 4.18.1
476 | '@rollup/rollup-darwin-x64': 4.18.1
477 | '@rollup/rollup-linux-arm-gnueabihf': 4.18.1
478 | '@rollup/rollup-linux-arm-musleabihf': 4.18.1
479 | '@rollup/rollup-linux-arm64-gnu': 4.18.1
480 | '@rollup/rollup-linux-arm64-musl': 4.18.1
481 | '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1
482 | '@rollup/rollup-linux-riscv64-gnu': 4.18.1
483 | '@rollup/rollup-linux-s390x-gnu': 4.18.1
484 | '@rollup/rollup-linux-x64-gnu': 4.18.1
485 | '@rollup/rollup-linux-x64-musl': 4.18.1
486 | '@rollup/rollup-win32-arm64-msvc': 4.18.1
487 | '@rollup/rollup-win32-ia32-msvc': 4.18.1
488 | '@rollup/rollup-win32-x64-msvc': 4.18.1
489 | fsevents: 2.3.3
490 |
491 | source-map-js@1.2.0: {}
492 |
493 | typescript@5.5.3: {}
494 |
495 | vite@5.3.3:
496 | dependencies:
497 | esbuild: 0.21.5
498 | postcss: 8.4.39
499 | rollup: 4.18.1
500 | optionalDependencies:
501 | fsevents: 2.3.3
502 |
--------------------------------------------------------------------------------