├── .eslintignore ├── .eslintrc ├── .github └── workflows │ └── run-tests.yml ├── .gitignore ├── .husky ├── .gitignore └── pre-commit ├── .lintstagedrc.json ├── .npmrc ├── .prettierrc.json ├── .swcrc ├── CHANGELOG.md ├── LICENSE.md ├── README.md ├── demos ├── browser-stt │ ├── .gitignore │ ├── index.html │ ├── package.json │ ├── pnpm-lock.yaml │ ├── readme.md │ ├── src │ │ ├── main.ts │ │ ├── style.css │ │ └── vite-env.d.ts │ └── tsconfig.json ├── deepgram-stt │ ├── .gitignore │ ├── index.html │ ├── package.json │ ├── pnpm-lock.yaml │ ├── readme.md │ ├── src │ │ ├── main.ts │ │ ├── style.css │ │ └── vite-env.d.ts │ └── tsconfig.json └── no-stt │ ├── .gitignore │ ├── index.html │ ├── package.json │ ├── pnpm-lock.yaml │ ├── readme.md │ └── src │ ├── main.js │ └── style.css ├── jest.config.js ├── package.json ├── pnpm-lock.yaml ├── src ├── AudioInputsBrowser.ts ├── AudioInputsService.ts ├── AudioManager.test.ts ├── AudioManager.ts ├── AudioOutputsService.ts ├── AudioTrackManager.test.ts ├── AudioTrackManager.ts ├── Conversation.ts ├── Playthrough.ts ├── __mocks__ │ ├── MockAudioInputsBrowser.ts │ └── MockAudioInputsService.ts ├── api.ts ├── index.ts ├── speech-types.ts └── types.ts ├── tsconfig.eslint.json └── tsconfig.json /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "node": true 5 | }, 6 | "extends": [ 7 | "airbnb-base", 8 | "plugin:@typescript-eslint/eslint-recommended", 9 | "plugin:@typescript-eslint/recommended", 10 | "plugin:@typescript-eslint/recommended-requiring-type-checking", 11 | "plugin:import/typescript", 12 | "plugin:prettier/recommended" 13 | ], 14 | "parser": "@typescript-eslint/parser", 15 | "parserOptions": { 16 | "project": "./tsconfig.eslint.json" 17 | }, 18 | "plugins": ["@typescript-eslint"], 19 | "rules": { 20 | "@typescript-eslint/no-floating-promises": "off", 21 | "@typescript-eslint/no-shadow": "error", 22 | "@typescript-eslint/restrict-template-expressions": "off", 23 | "import/extensions": ["error", "never"], 24 | "import/no-cycle": "off", 25 | "import/no-unresolved": "off", 26 | "no-console": "off", 27 | "no-shadow": "off" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | on: push 3 | jobs: 4 | run-tests: 5 | name: Runs test suite 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Check out code 9 | uses: actions/checkout@v3 10 | with: 11 | persist-credentials: false 12 | - uses: pnpm/action-setup@v4 13 | - uses: actions/setup-node@v4 14 | with: 15 | cache: "pnpm" 16 | node-version: "22.x" 17 | - name: Install dependencies 18 | run: pnpm install --frozen-lockfile 19 | - name: Run tests 20 | run: timeout 10s pnpm test 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Node-related 2 | node_modules/* 3 | npm-debug.log 4 | yarn-error.log 5 | 6 | # Build-related 7 | dist/* 8 | test/dist/* 9 | .parcel-cache 10 | 11 | # IDE 12 | .vscode/* 13 | 14 | .DS_Store -------------------------------------------------------------------------------- /.husky/.gitignore: -------------------------------------------------------------------------------- 1 | _ 2 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | pnpm lint-staged 5 | -------------------------------------------------------------------------------- /.lintstagedrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "src/**/*.ts": ["eslint --fix"], 3 | "*.{md,json}": ["prettier --trailing-comma all --write"] 4 | } 5 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | enable-pre-post-scripts=true 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "all" 3 | } 4 | -------------------------------------------------------------------------------- /.swcrc: -------------------------------------------------------------------------------- 1 | { 2 | "jsc": { 3 | "parser": { 4 | "syntax": "typescript" 5 | }, 6 | "experimental": { 7 | "keepImportAssertions": true 8 | }, 9 | "target": "es2022" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ### v6.0.0 4 | 5 | - Add interim transcripts from the STT service 6 | - Update the deepgram-stt demo to use interim transcripts 7 | - Stop the microphone and emit end-current-transcription, and start the microphone again for next transcription. This uses new functionality on our STT server so the same playthrough-validated socket is kept but new downstream connections to the STT service can be triggered. Without this the results from from earlier recordings can bleed into the current transcription. 8 | - **BREAKING** rename and rework character speech. No longer use audioManager outputServicePlay and now use audioManager playCharacterSpeech and volume will automatically go back to normal level after interruption from microphone. 9 | - **BREAKING** use characterSpeechVolume get and set rather than having a setCharacterSpeechVolume 10 | 11 | ### v5.0.8 12 | 13 | - Emit "stop" event when stopListening is called without a microphone. 14 | 15 | ### v5.0.7 16 | 17 | - Add `disconnect` method to disconnect from the server. 18 | 19 | ### v5.0.6 20 | 21 | - Add `timeout` argument to `startListening` methods. 22 | 23 | ### v5.0.5 24 | 25 | - Fix reconnection logic with a timer between attempts. 26 | 27 | ### v5.0.4 28 | 29 | - Add `audio.outputServiceSetVolume` to set the volume of character speech. 30 | 31 | ### v5.0.3 32 | 33 | - Add sttUrl to AudioManagerOptions. 34 | 35 | ### v5.0.2 36 | 37 | - Add missing file extensions 38 | 39 | ### v5.0.1 40 | 41 | - Add exports for `AudioManagerOptions` and `AudioOutputsServicePlayOptions`. 42 | 43 | ### v5.0.0 44 | 45 | **BREAKING** 46 | 47 | - Added `AudioManager` class to handle audio input and output. 48 | - Removed `Microphone` and `Speaker` classes. Replaced with `AudioManager` methods. 49 | - Use AudioContext API for better browser compatibility. 50 | 51 | ### v4.0.5 52 | 53 | - Updated types SpeechRecognitionStartEvent, SpeechRecognitionResponse, SpeechRecognitionParameters with traceId. 54 | - Add orientation type to ImageLayer. 55 | 56 | ### v4.0.3 57 | 58 | - Speech recognition now uses the correct sample rate in Firefox. 59 | 60 | ### v4.0.2 61 | 62 | - Stopping the microphone now stops the underlying `MediaStreamTrack`, removing the red 'listening' icon. 63 | 64 | ### v4.0.1 65 | 66 | - Fixed an issue where speech recognition was using an incorrect sample rate. 67 | 68 | ### v4.0.0 69 | 70 | - **BREAKING:** This packages now exports ES Module only. 71 | - **BREAKING:** An implementation of `fetch` is no longer included with this library. Consumers of this library should ensure their environment supports `fetch`. 72 | - **BREAKING:** This library now relies on having `URLSearchParams` available in the environment. 73 | - **BREAKING:** This library now relies on having `BigInt` available in the environment. 74 | - **BREAKING:** `playthroughId`s and `conversationId`s have been changed from `number` to `string` type everywhere in this SDK, and renamed to `playthroughUuid` and `conversationUuid`. 75 | - **BREAKING:** `api.createPlaythroughToken`, `api.createConversation` and `api.createCharacterConversation` all now return an object instead of a scalar, to facilitate any future changes to these methods. 76 | - **BREAKING:** `memories` in `getPlaythroughInfo` and in the `message` event now return `saveValue`s as JSON instead of only as strings. For example, counter memories are now actually `number`s and boolean memories are now actually `boolean`s. 77 | - **BREAKING:** `getMessageHistory` has been removed, and `getEventHistory` added as a more fully-featured alternative with much greater support for filtering, and can return all event types. 78 | - `setMemory` now accepts any JSON values instead of only strings. 79 | - **BREAKING:** Speech recognition stream now uses common objects to start up, deliver results and stop, regardless of which downstream service is selected. 80 | - Add start and stop events to the speech recognition stream. 81 | 82 | ### v3.9.0 83 | 84 | - Added `result` event to `Microphone` so clients can subscribe to raw `SpeechRecognition` events. 85 | - `recognise` and `recognise-interim` now emit the text of the _last_ result instead of the _first_ result in the `SpeechRecognition` event if `continuous` is `true`. 86 | 87 | ### v3.8.0 88 | 89 | - Multiple memories can now be set at once using the `setMemory` call. 90 | 91 | ### v3.7.0 92 | 93 | - Add support for `forkPlaythrough` API. This enables a player to upgrade to the latest published version from their old playthrough, copying across memories and emotions into the new playthrough, and returning the new token. Note that conversations are not carried across. 94 | 95 | ### v3.6.1 96 | 97 | - `problem` events scoped to a conversation can now be listened to via `conversation.on("problem", ...)` 98 | 99 | ### v3.6.0 100 | 101 | - It's now possible to specify multiple supported speech encodings in `speechConfig` by passing an array instead of a string. Charisma will use the first encoding that the voice synthesis service supports. 102 | - Added experimental support for intermediate client events. These events can be sent to Charisma to prevent characters from talking if the player is still speaking or typing. This can only be enabled for a story by getting in touch at [hello@charisma.ai](mailto:hello@charisma.ai). 103 | 104 | ### v3.5.0 105 | 106 | - Added support for Decentraland. 107 | 108 | ### v3.4.2 109 | 110 | - SDK info is now also sent upon reconnection to the room. 111 | 112 | ### v3.4.1 113 | 114 | - `package.json` now references correct emitted types location. 115 | 116 | ### v3.4.0 117 | 118 | - Added `languageCode` option to `createPlaythroughToken`, to play Charisma stories in languages other than English. 119 | - Added SDK info to joining a room, for Charisma to track which SDK versions are in use. 120 | 121 | ### v3.3.0 122 | 123 | - It's now possible to subscribe to events that are sent from other players, such as other players' messages. This can be done by adding a subscriber to a conversation to listen for the corresponding event, e.g. `conversation.on("reply", () => { /* remote player's reply */ })`. These handlers will _not_ be fired for messages sent from the local connected client, only for remote clients. 124 | - Add missing `graphId: number` to `MessagePathItem` type. 125 | - Updated dependencies. 126 | 127 | ### v3.2.0 128 | 129 | - Added `startGraphId` and `startGraphReferenceId` to `StartEvent` to start from a specific graph ID. 130 | - Added experimental `pause` and `play` methods to `Playthrough`. 131 | 132 | ### v3.1.0 133 | 134 | - Support for action node/event. 135 | - `SpeechRecognitionStopOptions` is now exported. 136 | 137 | ### v3.0.0 138 | 139 | There is a new emotion engine in Charisma! As a result... 140 | 141 | - `message.characterMoods` has been removed and replaced with `message.emotions`. This contains each character's current mood and relationship with the player, and any active feeling effects. 142 | - `setMood` has been removed. We may add an equivalent API for the new emotion engine in the future. Let us know about your use case if this interests you! 143 | 144 | ### v2.3.0 145 | 146 | - `Microphone.stopListening()` now accepts an `options` parameter with a single option `waitForLastResult`. If set to `true`, then the `recognise` will be called a final time with the result of the audio captured so far. If `false`, the operation will be aborted, so no additional `recognise` event will occur. 147 | 148 | ### v2.2.0 149 | 150 | - `Speaker.play()` now accepts an `options` parameter as its second parameter instead of a boolean value (which used to represent `interrupt`). This change is backwards compatible, but the old boolean way is deprecated and will be removed in the next major release. 151 | - `options` contains two parameters: `trackId` and `interrupt`. `trackId` can be used to interrupt only a particular track, for example, to prevent a character talking over themselves. `interrupt` can now be configured to `all` (interrupt all playing audio), `track` (interrupt the specified `trackId` if playing), or `none` (don't interrupt any audio). 152 | 153 | ### v2.1.0 154 | 155 | - Adds the option to pass an `apiKey` to use for authentication for playthrough token creation. This is now the recommended way to authenticate as API keys do not expire (unless regenerated) and are more secure than the `userToken`. `userToken` should no longer be used. 156 | 157 | ### v2.0.0 158 | 159 | This release makes **several breaking changes**. The main change is replacing `socket.io` with `colyseus.js`. 160 | 161 | - Replaces `socket.io` with `colyseus.js`. 162 | - Due to how Colyseus serializes data, `audio` is now an `ArrayBuffer` instead of an object with the `data` property. 163 | - API methods and the `Playthrough` constructor now accept a `baseUrl` option, which is used in preference to `globalBaseUrl`. `globalBaseUrl` is now set with `setGlobalBaseUrl` instead of `setBaseUrl`. 164 | - API methods are now individually exported instead of being static methods on the Charisma class, as well as being exported under a bracket `api` object. 165 | - Improved the implementation of `Microphone`. 166 | - Replace multiple connection events from `Charisma` (`connect`, `disconnect` etc) with single `connection-status` event. 167 | - The `Charisma` class has been renamed to `Playthrough`. 168 | - The `cleanup` function has been renamed to `disconnect`. 169 | 170 | ### v1.10.0 171 | 172 | - Change `imageLayers` field to an array of object, each including `url`, `resizeMode` and `points`. 173 | 174 | ### v1.9.1 175 | 176 | - Add `isImpactShareable` and `impactImageUrl` fields to impacts, and fix the type of impact `id`s to be `string`s. 177 | 178 | ### v1.9.0 179 | 180 | - **BREAKING CHANGE**: `eventId`s are now emitted as `string`s. Please upgrade to this version to continue using the reconnection "catch-up" logic (though everything else should work). 181 | 182 | ### v1.8.1 183 | 184 | - `Speaker` will no longer try to play audio if the context's state is not `running`. This resolves an issue where the user has not granted permission for the audio context to play sound, and so the `play(...)` promise never resolves. 185 | 186 | ### v1.8.0 187 | 188 | - Reconnecting will now fetch and emit messages that were emitted from the server after the last received message. 189 | - Add `impacts` field to `GetPlaythroughResult` type. 190 | 191 | ### v1.7.0 192 | 193 | - Pass through more events: `reconnect`, `reconnecting`, `disconnect` and `problem`. 194 | - Added types for new `panel` message, and added bubble-related types onto the `media` key. 195 | - Adjusted `setMemory` type to accept `null`. 196 | - Removes `scene-complete` event and `stopOnSceneComplete` option. 197 | - Adds `episode-complete` event. The chat engine automatically stops on episode end if the episode is started by an app user. 198 | - Adds `restartFromEpisodeId` and `restartFromEpisodeIndex` methods and removes `restartFromScene` method. 199 | 200 | ### v1.6.1 201 | 202 | - GET requests no longer try to add a body to the request. 203 | 204 | ### v1.6.0 205 | 206 | - Add `start` and `stop` events to `Speaker`. 207 | 208 | ### v1.5.0 209 | 210 | - Adds support for use in Node.js. 211 | 212 | ### v1.4.0 213 | 214 | - `Microphone.startListening(timeout)` now has a timeout parameter to automatically stop the microphone after `timeout` milliseconds. 215 | - `Microphone.resetTimeout(timeout)` will reset the timeout to `timeout` milliseconds. 216 | - Microphone now emits `start` and `stop` events, particularly useful in conjuction with timeout. 217 | 218 | ### v1.3.0 219 | 220 | - Add an `interrupt` option to `Speaker` to ensure replies don't overlap. 221 | - **Breaking**: Target ES2018; drop support for pre-ES2018 environments. 222 | 223 | ### v1.2.0 224 | 225 | - Add `restartFromScene` method to SDK. This can be used to reset the playthrough to the state it was in at the beginning of a particular scene. 226 | - Exports more types and adjusts message types to include `eventId`, `timestamp` and `memories`. 227 | 228 | ### v1.1.0 229 | 230 | - Add `resume` event to SDK. This can be used to resume a conversation from where it left off. 231 | 232 | ### v1.0.5 233 | 234 | - Use `webkitAudioContext` for `Speaker` on Safari. 235 | 236 | ### v1.0.4 237 | 238 | - Export the `Impact` type. 239 | 240 | ### v1.0.3 241 | 242 | - `impacts` are now objects containing their ID as well as the `impact` string. 243 | 244 | ### v1.0.2 245 | 246 | - Rename `setStopOnSceneEnd` to `setStopOnSceneComplete` to ensure consistency with the event name. 247 | 248 | ### v1.0.1 249 | 250 | - Fix `createPlaythroughToken` throwing an error when both `version` and `userToken` are not provided. 251 | 252 | ### v1.0.0 253 | 254 | - Initial stable release. 255 | - Completely overhauls the SDK API, please see the [README](./README.md) for more details on how to use the newer, conversation-based API. 256 | 257 | ## Past major versions 258 | 259 | ### v0.9.2 260 | 261 | - Pass data (containing `impacts`) through on scene complete event. 262 | 263 | ### v0.9.1 264 | 265 | - Pass `stopOnSceneComplete` through to the `CharismaInstance`. 266 | 267 | ### v0.9.0 268 | 269 | - Add `stopOnSceneComplete` option to prevent automatically continuing between scenes. 270 | 271 | ### v0.8.3 272 | 273 | - Add `media` field onto the character message type. 274 | 275 | ### v0.8.2 276 | 277 | - Add `tapToContinue` to message history type. 278 | 279 | ### v0.8.1 280 | 281 | - Add `timestamp` to messages returned from `getMessageHistory`. 282 | - Improved type for `getMessageHistory`. 283 | 284 | ### v0.8.0 285 | 286 | - Can now specify `playthroughToken` to re-use a playthrough instead of creating a new one when connecting. 287 | - Can now fetch message history of the playthrough using `charisma.getMessageHistory`. 288 | 289 | ### v0.7.3 290 | 291 | - Fix `IMessageCharacter.speech` type. 292 | 293 | ### v0.7.2 294 | 295 | - `ISynthesisConfig` and some additional types are now exported. 296 | 297 | ### v0.7.1 298 | 299 | - `speech` now takes a config object as well as a boolean. It can specify the audio encoding to use and whether it returns the raw audio data, or a link to an audio file. 300 | 301 | ### v0.7.0 302 | 303 | - BREAKING: The `reply` event has been renamed to `message`, and now has a `type` field on the payload to distinguish between `character` and `media` events. Other fields have been refactored, such as `character` and `speech`. Please consult [src/types.ts](src/types.ts) to find the new message format. 304 | - A new `tap` event is available for the client to send. 305 | 306 | ### v0.6.0 307 | 308 | - Accidentally published version, but never tagged as `latest`. 309 | 310 | ### v0.5.1 311 | 312 | - Fix broken 0.5.0 publish. 313 | 314 | ### v0.5.0 315 | 316 | - Removed `browser` field from `package.json`. Consumers can use the UMD directly from unpkg. 317 | - Removed `actIndex` as it is no longer supported. 318 | 319 | ### v0.4.2 320 | 321 | - Buffer `set-memory` events until `status: 'ready'` is received. 322 | 323 | ### v0.4.1 324 | 325 | - `actIndex` and `sceneIndex` can now be set on the `start` event to start from a specific story scene. 326 | 327 | ### v0.4.0 328 | 329 | - **BREAKING**: UMD name changed from `Charisma` to `CharismaSDK`. The ES/CJS builds now don't bundle their dependencies. 330 | - Added `setMemory` method to directly set a memory. 331 | - Fixed all ID types to be `number`, not `string`. 332 | 333 | ### v0.3.1 334 | 335 | - Passing no `version` to the `connect` method now results in using the latest published version, rather than the draft version. 336 | 337 | ### v0.3.0 338 | 339 | - Package renamed (rescoped) to `@charisma-ai/sdk`. 340 | 341 | ### v0.2.0 342 | 343 | - The `debug` option has been replaced with the `version` option, which defaults to `undefined` (the latest published story version). 344 | 345 | ### v0.1.2 346 | 347 | - The microphone now stops listening when a reply with `endStory` set to `true` is emitted. 348 | 349 | ### v0.1.1 350 | 351 | - `AudioContext` is now created on-demand rather than on initialisation. 352 | 353 | ### v0.1.0 354 | 355 | - Socket.io now forces websockets, skipping the long-polling check. 356 | 357 | ### v0.0.4 358 | 359 | - Fixed issue where audio was not working on Safari. 360 | 361 | ### v0.0.3 362 | 363 | - Microphone keeps better track of whether to resume speech recognition after speaking. 364 | 365 | ### v0.0.2 366 | 367 | - Support for recording speech-to-text via Chrome SpeechRecognition. 368 | - New events `recognise-interim` and `recognise`. 369 | - Speech recognition is paused while the audio is played. 370 | 371 | ### v0.0.1 372 | 373 | - Initial release. 374 | - Support for `reply` and `start` client events, and `reply`, `start-typing` and `stop-typing` server events. 375 | - Support for playing text-to-speech audio. 376 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021-2023 Charisma Entertainment Ltd 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Charisma.ai SDK for JavaScript 2 | 3 | ``` 4 | pnpm i @charisma-ai/sdk 5 | ``` 6 | 7 | ## Usage 8 | 9 | ```js 10 | // main.js 11 | import { 12 | Playthrough, 13 | createPlaythroughToken, 14 | createConversation, 15 | } from "@charisma-ai/sdk"; 16 | 17 | let conversation; 18 | 19 | async function start() { 20 | // Get a unique token for the playthrough. 21 | const { token } = await createPlaythroughToken({ storyId: 4 }); 22 | 23 | // Create a new conversation. 24 | const { conversationUuid } = await createConversation(token); 25 | 26 | // Create a new playthrough. 27 | const playthrough = new Playthrough(token); 28 | 29 | // Join the conversation. 30 | conversation = playthrough.joinConversation(conversationUuid); 31 | 32 | // Handle messages in the conversation. 33 | conversation.on("message", (message) => { 34 | console.log(message.message.text); 35 | }); 36 | 37 | conversation.on("problem", console.warn); 38 | 39 | // Prepare the listener to start the conversation when the playthrough is connected. 40 | playthrough.on("connection-status", (status) => { 41 | if (status === "connected") { 42 | conversation.start(); 43 | } 44 | }); 45 | 46 | await playthrough.connect(); 47 | } 48 | 49 | // Send the reply to charisma. 50 | function reply(message) { 51 | conversation.reply({ text: message }); 52 | } 53 | ``` 54 | 55 | ## API Reference 56 | 57 | There are two ways to use the API directly, either by importing `api`, which includes all the API methods, or you can import API methods individually, like `createPlaythroughToken`. 58 | 59 | ```js 60 | import { api, createPlaythroughToken } from "@charisma-ai/sdk"; 61 | 62 | api.createPlaythroughToken(); 63 | createPlaythroughToken(); 64 | ``` 65 | 66 | Most API methods are also callable using an instance of the `Playthrough` class, which automatically scopes the API calls to the playthrough `token` passed when creating the instance: 67 | 68 | ```js 69 | const playthrough = new Playthrough(token); 70 | // No need to pass `token` here! 71 | playthrough.createConversation(); 72 | ``` 73 | 74 | #### createPlaythroughToken 75 | 76 | Use this to set up a new playthrough. 77 | 78 | - `storyId` (`number`): The `id` of the story that you want to create a new playthrough for. The story must be published, unless a Charisma.ai user token has been passed and the user matches the owner of the story. 79 | - `version` (`number`, optional): The `version` of the story that you want to create a new playthrough for. If omitted, it will default to the most recent published version. To get the draft version of a story, pass `-1` and an `apiKey`. 80 | - `apiKey` (`string`, optional): To access draft, test or unpublished versions of your story, pass an `apiKey`. The API key can be found on the story overview page. 81 | - `languageCode` (`string`, optional): To play a story in a language other than English (`en`, the default), pass a BCP-47 `languageCode`. For example, to play in Italian, use `it`. 82 | 83 | Returns a promise that resolves with the token. 84 | 85 | ```js 86 | const { token } = await createPlaythroughToken({ 87 | storyId: 12, 88 | version: 4, 89 | apiKey: "...", 90 | languageCode: "en", 91 | }); 92 | ``` 93 | 94 | #### createConversation 95 | 96 | A playthrough can have many simultaneous conversations. In order to start interacting, a conversation needs to be created, which can then be joined. 97 | 98 | - `playthroughToken` (`string`): The token generated with `createPlaythroughToken`. 99 | 100 | ```js 101 | const { conversationUuid } = await createConversation(token); 102 | ``` 103 | 104 | ## Playthrough 105 | 106 | Create a new `Playthrough` instance to connect to a playthrough and interact with the chat engine. 107 | 108 | - `playthroughToken` (`string`): The `token` generated in `createPlaythroughToken`. 109 | 110 | #### Playthrough.joinConversation 111 | 112 | This makes the `Playthrough` instance listen out for events for a particular conversation, and returns a `Conversation` that events can be called on and event listeners attached. 113 | 114 | - `conversationUuid` (`string`): The conversation UUID generated with `createConversation`. 115 | 116 | Returns a `Conversation`, which can be used to send and receive events bound to that conversation. 117 | 118 | ```js 119 | playthrough.joinConversation(conversationUuid); 120 | ``` 121 | 122 | #### Playthrough.connect 123 | 124 | This is what kicks off the connection to the chat engine. Call this once you're ready to start sending and receiving events. 125 | 126 | Returns an object with a `playerSessionId` property. 127 | 128 | ```js 129 | await playthrough.connect(); 130 | ``` 131 | 132 | #### Playthrough.disconnect 133 | 134 | If you want to end the connection to the playthrough, you can call `playthrough.disconnect()`. 135 | 136 | ```js 137 | playthrough.disconnect(); 138 | ``` 139 | 140 | ## Events 141 | 142 | To interact with the story, events are sent to and from the server that the WebSocket is connected to. 143 | 144 | ### Events sent from client 145 | 146 | #### conversation.start({ ... }) 147 | 148 | ```js 149 | { 150 | // For Pro stories, start the story at a particular subplot with the `startGraphReferenceId`. 151 | // It can be found by clicking '...' next to the subplot in the sidebar, and clicking 'Edit details'. 152 | // For Web Comic stories do not provide `startGraphReferenceId`, the story will start automatically from the first scene 153 | "startGraphReferenceId": "my-id", // Optional, default undefined 154 | } 155 | ``` 156 | 157 | #### conversation.reply({ ... }) 158 | 159 | ```js 160 | { 161 | "text": "Please reply to this!" 162 | } 163 | ``` 164 | 165 | #### conversation.tap({ ... }) 166 | 167 | This event has no fields. 168 | 169 | #### conversation.action({ ... }) 170 | 171 | ```js 172 | { 173 | "action": "pick-up-book" 174 | } 175 | ``` 176 | 177 | #### conversation.resume({ ... }) 178 | 179 | This event has no fields. 180 | 181 | ### Events received by client 182 | 183 | #### conversation.on('message', (event) => { ... }) 184 | 185 | ```js 186 | { 187 | "message": { 188 | "text": "Greetings and good day.", 189 | "character": { 190 | "id": 20, 191 | "name": "Ted Baker", 192 | "avatar": "https://s3.charisma.ai/..." 193 | }, 194 | "speech": { 195 | "duration": 203, 196 | "audio": /* either a buffer, or a URL */, 197 | } 198 | "metadata": { 199 | "myMetadata": "someValue" 200 | }, 201 | "media": null 202 | }, 203 | "endStory": false, 204 | "path": [{ "id": 1, "type": "edge" }, { "id": 2, "type": "node" }] 205 | } 206 | ``` 207 | 208 | #### conversation.on('start-typing', () => { ... }) 209 | 210 | This event has no additional data. 211 | 212 | #### conversation.on('stop-typing', () => { ... }) 213 | 214 | This event has no additional data. 215 | 216 | #### conversation.on('action', (event) => { ... }) 217 | 218 | #### conversation.on('reply', (event) => { ... }) 219 | 220 | #### conversation.on('resume', (event) => { ... }) 221 | 222 | #### conversation.on('start', (event) => { ... }) 223 | 224 | #### conversation.on('tap', (event) => { ... }) 225 | 226 | When another player sends specific events to a Charisma playthrough, they are sent back to all other connected players, so that other players can perform actions based on the events, such as displaying their messages in UI. 227 | 228 | The events that are currently echoed to all clients are `action`, `reply`, `resume`, `start` and `tap`. 229 | 230 | **Important:** These events are **not** emitted for the player that sent the original corresponding event! 231 | 232 | Each event includes its committed `eventId` and `timestamp` as well as the original payload (excluding the `speechConfig`). 233 | 234 | #### conversation.on('problem', (event) => { ... }) 235 | 236 | If a problem occurs during a conversation, such as a pathway not being found after submitting a player message, `problem` will be emitted. 237 | 238 | ### Conversation helpers 239 | 240 | #### conversation.setSpeechConfig(config) 241 | 242 | This sets the speech configuration to use for all events in the conversation until set otherwise: 243 | 244 | ```json 245 | { 246 | "encoding": ["ogg", "mp3"], 247 | "output": "buffer" 248 | } 249 | ``` 250 | 251 | `encoding` is the file format of the resulting speech: `mp3`, `ogg`, `wav` or `pcm`. If an array, Charisma will use the first encoding that the voice supports, useful for cases where a voice synthesis service of a particular voice does not support the "default" encoding you wish to use. 252 | 253 | `output` determines whether the speech received back is a `buffer` (a byte array) or whether it should instead be a `url` pointing to the audio file. 254 | 255 | ## AudioManager 256 | 257 | The audio manager will handle the audio from characters, media and speech-to-text functionality. 258 | 259 | ```js 260 | import { AudioManager } from "@charisma-ai/sdk"; 261 | 262 | const audio = new AudioManager({ 263 | // AudioManager options 264 | handleTranscript: (transcript: string) => { 265 | console.log(transcript); 266 | }, 267 | }); 268 | ``` 269 | 270 | #### AudioManager Options 271 | 272 | | Option | Type | Default | Description | 273 | | ------------------- | ---------------------------------- | --------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | 274 | | `duckVolumeLevel` | `number` | 0 | Volume level when ducking (0 to 1) | 275 | | `normalVolumeLevel` | `number` | 1 | Regular volume level (0 to 1) | 276 | | `sttService` | `"charisma/deepgram" \| "browser"` | `"charisma/deepgram"` | Speech-to-text service to use (see below). | 277 | | `sttUrl` | `string` | `"https://stt.charisma.ai"` | Speech-to-text service URL. | 278 | | `streamTimeslice` | `number` | 100 | The number of milliseconds to record into each Blob. See https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/start#timeslice | 279 | | `handleTranscript` | `(transcript: string) => void` | | Callback to handle transcripts. | 280 | | `handleStartSTT` | `() => void` | | Callback to handle when speech-to-text starts. Can be used to update the UI. | 281 | | `handleStopSTT` | `() => void` | | Callback to handle when speech-to-text stops. | 282 | | `handleError` | `(error: string) => void` | `console.error(error)` | Callback to handle errors. | 283 | | `handleDisconnect` | `(message: string) => void` | `console.error(message)` | Callback to handle when the transcription service disconnects. | 284 | | `handleConnect` | `(message: string) => void` | `console.log(message)` | Callback to handle when the transcription service connects. | 285 | | `debugLogFunction` | `(message: string) => void` | `() => {}` | Callback to handle log messages for debugging. | 286 | 287 | There are currently two speech-to-text services available: 288 | 289 | - `charisma/deepgram`: Deepgram is a neural network based speech-to-text service that that can be accessed through Charsima.ai. 290 | - `browser`: Some browsers have built-in speech recognition, which can be used to provide speech-to-text functionality. **This is only available in browsers that support `SpeechRecognition`. Please refer to [this browser compatibility table](https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition#browser_compatibility) for more details.** 291 | 292 | ### Speech-to-text 293 | 294 | #### audio.startListening(timeout?: number) 295 | 296 | Starts listening for speech. This will call handleStartSTT() when the speech-to-text service starts. 297 | Takes a `timeout` argument in milliseconds, which will automatically stop the speech-to-text service after the timeout. Defaults to 10000 (ten seconds) if not provided. 298 | 299 | #### audio.stopListening() 300 | 301 | Stops listening for speech. This will call handleStopSTT() when the speech-to-text service stops. 302 | 303 | #### audio.connect(token: string, playerSessionId: string) 304 | 305 | Connects the to the speech-to-text service using the playthrough token and player session id to validate. This is only needed when using the `charisma/deepgram` speech-to-text service. 306 | 307 | The `playerSessionId` is returned from `playthrough.connect()`. See the `deepgram-stt` demo for an example. 308 | 309 | #### audio.disconnect() 310 | 311 | Disconnects from the speech-to-text service. 312 | 313 | #### audio.resetTimeout(timeout: number) 314 | 315 | Resets the timeout for the speech-to-text service to `timeout` in milliseconds. If this is not run, the speech-to-text service will default to a timeout of 10 seconds. 316 | After the timeout, the speech-to-text service will automatically stop listening. 317 | 318 | #### audio.browserIsSupported(): boolean 319 | 320 | Returns `true` if the browser supports the `browser` speech recognition service. 321 | 322 | ### Audio Outputs Service 323 | 324 | #### audio.initialise() 325 | 326 | Initialises the audio for characters and media. This method _must_ be called before attempting to play audio from media nodes or character speech. 327 | 328 | This method _must_ also be called from a user interaction event, such as a click or a keypress. This is due to a security restriction in some browsers. We recommend adding it to the "start" button the sets up your playthrough. See the demos for an example. 329 | 330 | #### audio.playCharacterSpeech(audio: ArrayBuffer, options: AudioOutputsServicePlayOptions): Promise 331 | 332 | This plays the generated speech in the message event. Typically, you would want to use this in combination with a `message` conversation handler. 333 | 334 | Returns a Promise that resolves once the speech has ended. 335 | 336 | `options` is an object with two properties: 337 | 338 | ```ts 339 | type SpeakerPlayOptions = { 340 | /** 341 | * Whether to interrupt the same track as the `trackId` passed (`track`), all currently playing audio (`all`), or not to interrupt anything (`none`). Default is `none`. 342 | */ 343 | interrupt?: "track" | "all" | "none"; 344 | /** 345 | * If you want to prevent a particular character to speak over themselves, a `trackId` can be set to a unique string. When playing another speech clip, if the same `trackId` is passed and `interrupt` is set to `true`, then the previous clip will stop playing. Default is unset. 346 | */ 347 | trackId?: string; 348 | }; 349 | ``` 350 | 351 | #### audio.characterSpeechVolume 352 | 353 | Get or set the volume of the character speech. Must be a number between 0 and 1. 354 | 355 | ### Media Track Audio 356 | 357 | #### audio.mediaAudioPlay(audioTracks: AudioTrack[]): void 358 | 359 | Will play the audio tracks in a message event. An empty array can also be passed here so it can be called on every message event. 360 | 361 | #### audio.mediaAudioSetVolume(volume: number): void 362 | 363 | Sets the volume of all media audio tracks. Must be a number between 0 and 1. 364 | 365 | The volume set here will be multiplied by the volume set in the graph editor for each track. For example, if you set the graph editor volume to 0.5 and the SDK volume to 1, the final volume will be 0.5. If you set the graph editor volume to 0.5 and the SDK volume to 0.5, the final volume will be 0.25. 366 | 367 | #### audio.mediaAudioToggleMute() 368 | 369 | Will mute and unmute all media audio tracks. 370 | 371 | #### audio.mediaAudioStopAll() 372 | 373 | Will stop all media audio tracks. 374 | 375 | ## Questions 376 | 377 | For further details or any questions, feel free to get in touch at [hello@charisma.ai](mailto:hello@charisma.ai), or head to the [Charisma docs](https://charisma.ai/docs)! 378 | -------------------------------------------------------------------------------- /demos/browser-stt/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | .env 27 | -------------------------------------------------------------------------------- /demos/browser-stt/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Charisma JS SDK Demo 7 | 8 | 9 |
10 |
11 | Story ID 12 | 13 | Story API Key 14 | 15 | only needed for draft or unpublished versions 16 | 17 | 18 | Version 19 | 20 | use -1 for draft, or undefined for most recent published 21 | 22 | 23 | StartGraphReferenceId 24 | only for pro stories 25 | 26 |
27 | 28 |
29 |
30 | 35 |
36 | 39 | 40 |
41 | 44 |
45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /demos/browser-stt/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite --port 3000", 8 | "build": "tsc && vite build", 9 | "preview": "vite preview" 10 | }, 11 | "devDependencies": { 12 | "typescript": "^5.2.2", 13 | "vite": "^5.3.1" 14 | }, 15 | "dependencies": { 16 | "@charisma-ai/sdk": "link:../.." 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /demos/browser-stt/pnpm-lock.yaml: -------------------------------------------------------------------------------- 1 | lockfileVersion: '9.0' 2 | 3 | settings: 4 | autoInstallPeers: true 5 | excludeLinksFromLockfile: false 6 | 7 | importers: 8 | 9 | .: 10 | dependencies: 11 | '@charisma-ai/sdk': 12 | specifier: link:../.. 13 | version: link:../.. 14 | devDependencies: 15 | typescript: 16 | specifier: ^5.2.2 17 | version: 5.5.3 18 | vite: 19 | specifier: ^5.3.1 20 | version: 5.3.3 21 | 22 | packages: 23 | 24 | '@esbuild/aix-ppc64@0.21.5': 25 | resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz} 26 | engines: {node: '>=12'} 27 | cpu: [ppc64] 28 | os: [aix] 29 | 30 | '@esbuild/android-arm64@0.21.5': 31 | resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz} 32 | engines: {node: '>=12'} 33 | cpu: [arm64] 34 | os: [android] 35 | 36 | '@esbuild/android-arm@0.21.5': 37 | resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz} 38 | engines: {node: '>=12'} 39 | cpu: [arm] 40 | os: [android] 41 | 42 | '@esbuild/android-x64@0.21.5': 43 | resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz} 44 | engines: {node: '>=12'} 45 | cpu: [x64] 46 | os: [android] 47 | 48 | '@esbuild/darwin-arm64@0.21.5': 49 | resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz} 50 | engines: {node: '>=12'} 51 | cpu: [arm64] 52 | os: [darwin] 53 | 54 | '@esbuild/darwin-x64@0.21.5': 55 | resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz} 56 | engines: {node: '>=12'} 57 | cpu: [x64] 58 | os: [darwin] 59 | 60 | '@esbuild/freebsd-arm64@0.21.5': 61 | resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz} 62 | engines: {node: '>=12'} 63 | cpu: [arm64] 64 | os: [freebsd] 65 | 66 | '@esbuild/freebsd-x64@0.21.5': 67 | resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz} 68 | engines: {node: '>=12'} 69 | cpu: [x64] 70 | os: [freebsd] 71 | 72 | '@esbuild/linux-arm64@0.21.5': 73 | resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz} 74 | engines: {node: '>=12'} 75 | cpu: [arm64] 76 | os: [linux] 77 | 78 | '@esbuild/linux-arm@0.21.5': 79 | resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz} 80 | engines: {node: '>=12'} 81 | cpu: [arm] 82 | os: [linux] 83 | 84 | '@esbuild/linux-ia32@0.21.5': 85 | resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz} 86 | engines: {node: '>=12'} 87 | cpu: [ia32] 88 | os: [linux] 89 | 90 | '@esbuild/linux-loong64@0.21.5': 91 | resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz} 92 | engines: {node: '>=12'} 93 | cpu: [loong64] 94 | os: [linux] 95 | 96 | '@esbuild/linux-mips64el@0.21.5': 97 | resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz} 98 | engines: {node: '>=12'} 99 | cpu: [mips64el] 100 | os: [linux] 101 | 102 | '@esbuild/linux-ppc64@0.21.5': 103 | resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz} 104 | engines: {node: '>=12'} 105 | cpu: [ppc64] 106 | os: [linux] 107 | 108 | '@esbuild/linux-riscv64@0.21.5': 109 | resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz} 110 | engines: {node: '>=12'} 111 | cpu: [riscv64] 112 | os: [linux] 113 | 114 | '@esbuild/linux-s390x@0.21.5': 115 | resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz} 116 | engines: {node: '>=12'} 117 | cpu: [s390x] 118 | os: [linux] 119 | 120 | '@esbuild/linux-x64@0.21.5': 121 | resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz} 122 | engines: {node: '>=12'} 123 | cpu: [x64] 124 | os: [linux] 125 | 126 | '@esbuild/netbsd-x64@0.21.5': 127 | resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz} 128 | engines: {node: '>=12'} 129 | cpu: [x64] 130 | os: [netbsd] 131 | 132 | '@esbuild/openbsd-x64@0.21.5': 133 | resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz} 134 | engines: {node: '>=12'} 135 | cpu: [x64] 136 | os: [openbsd] 137 | 138 | '@esbuild/sunos-x64@0.21.5': 139 | resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz} 140 | engines: {node: '>=12'} 141 | cpu: [x64] 142 | os: [sunos] 143 | 144 | '@esbuild/win32-arm64@0.21.5': 145 | resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz} 146 | engines: {node: '>=12'} 147 | cpu: [arm64] 148 | os: [win32] 149 | 150 | '@esbuild/win32-ia32@0.21.5': 151 | resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz} 152 | engines: {node: '>=12'} 153 | cpu: [ia32] 154 | os: [win32] 155 | 156 | '@esbuild/win32-x64@0.21.5': 157 | resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz} 158 | engines: {node: '>=12'} 159 | cpu: [x64] 160 | os: [win32] 161 | 162 | '@rollup/rollup-android-arm-eabi@4.18.1': 163 | resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.1.tgz} 164 | cpu: [arm] 165 | os: [android] 166 | 167 | '@rollup/rollup-android-arm64@4.18.1': 168 | resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.1.tgz} 169 | cpu: [arm64] 170 | os: [android] 171 | 172 | '@rollup/rollup-darwin-arm64@4.18.1': 173 | resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.1.tgz} 174 | cpu: [arm64] 175 | os: [darwin] 176 | 177 | '@rollup/rollup-darwin-x64@4.18.1': 178 | resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.1.tgz} 179 | cpu: [x64] 180 | os: [darwin] 181 | 182 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1': 183 | resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.1.tgz} 184 | cpu: [arm] 185 | os: [linux] 186 | 187 | '@rollup/rollup-linux-arm-musleabihf@4.18.1': 188 | resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.1.tgz} 189 | cpu: [arm] 190 | os: [linux] 191 | 192 | '@rollup/rollup-linux-arm64-gnu@4.18.1': 193 | resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.1.tgz} 194 | cpu: [arm64] 195 | os: [linux] 196 | 197 | '@rollup/rollup-linux-arm64-musl@4.18.1': 198 | resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.1.tgz} 199 | cpu: [arm64] 200 | os: [linux] 201 | 202 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': 203 | resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.1.tgz} 204 | cpu: [ppc64] 205 | os: [linux] 206 | 207 | '@rollup/rollup-linux-riscv64-gnu@4.18.1': 208 | resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.1.tgz} 209 | cpu: [riscv64] 210 | os: [linux] 211 | 212 | '@rollup/rollup-linux-s390x-gnu@4.18.1': 213 | resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.1.tgz} 214 | cpu: [s390x] 215 | os: [linux] 216 | 217 | '@rollup/rollup-linux-x64-gnu@4.18.1': 218 | resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.1.tgz} 219 | cpu: [x64] 220 | os: [linux] 221 | 222 | '@rollup/rollup-linux-x64-musl@4.18.1': 223 | resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.1.tgz} 224 | cpu: [x64] 225 | os: [linux] 226 | 227 | '@rollup/rollup-win32-arm64-msvc@4.18.1': 228 | resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.1.tgz} 229 | cpu: [arm64] 230 | os: [win32] 231 | 232 | '@rollup/rollup-win32-ia32-msvc@4.18.1': 233 | resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.1.tgz} 234 | cpu: [ia32] 235 | os: [win32] 236 | 237 | '@rollup/rollup-win32-x64-msvc@4.18.1': 238 | resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.1.tgz} 239 | cpu: [x64] 240 | os: [win32] 241 | 242 | '@types/estree@1.0.5': 243 | resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==, tarball: https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz} 244 | 245 | esbuild@0.21.5: 246 | resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz} 247 | engines: {node: '>=12'} 248 | hasBin: true 249 | 250 | fsevents@2.3.3: 251 | resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, tarball: https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz} 252 | engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} 253 | os: [darwin] 254 | 255 | nanoid@3.3.7: 256 | resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==, tarball: https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz} 257 | engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} 258 | hasBin: true 259 | 260 | picocolors@1.0.1: 261 | resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==, tarball: https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz} 262 | 263 | postcss@8.4.39: 264 | resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz} 265 | engines: {node: ^10 || ^12 || >=14} 266 | 267 | rollup@4.18.1: 268 | resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.18.1.tgz} 269 | engines: {node: '>=18.0.0', npm: '>=8.0.0'} 270 | hasBin: true 271 | 272 | source-map-js@1.2.0: 273 | resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==, tarball: https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz} 274 | engines: {node: '>=0.10.0'} 275 | 276 | typescript@5.5.3: 277 | resolution: {integrity: sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==, tarball: https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz} 278 | engines: {node: '>=14.17'} 279 | hasBin: true 280 | 281 | vite@5.3.3: 282 | resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==, tarball: https://registry.npmjs.org/vite/-/vite-5.3.3.tgz} 283 | engines: {node: ^18.0.0 || >=20.0.0} 284 | hasBin: true 285 | peerDependencies: 286 | '@types/node': ^18.0.0 || >=20.0.0 287 | less: '*' 288 | lightningcss: ^1.21.0 289 | sass: '*' 290 | stylus: '*' 291 | sugarss: '*' 292 | terser: ^5.4.0 293 | peerDependenciesMeta: 294 | '@types/node': 295 | optional: true 296 | less: 297 | optional: true 298 | lightningcss: 299 | optional: true 300 | sass: 301 | optional: true 302 | stylus: 303 | optional: true 304 | sugarss: 305 | optional: true 306 | terser: 307 | optional: true 308 | 309 | snapshots: 310 | 311 | '@esbuild/aix-ppc64@0.21.5': 312 | optional: true 313 | 314 | '@esbuild/android-arm64@0.21.5': 315 | optional: true 316 | 317 | '@esbuild/android-arm@0.21.5': 318 | optional: true 319 | 320 | '@esbuild/android-x64@0.21.5': 321 | optional: true 322 | 323 | '@esbuild/darwin-arm64@0.21.5': 324 | optional: true 325 | 326 | '@esbuild/darwin-x64@0.21.5': 327 | optional: true 328 | 329 | '@esbuild/freebsd-arm64@0.21.5': 330 | optional: true 331 | 332 | '@esbuild/freebsd-x64@0.21.5': 333 | optional: true 334 | 335 | '@esbuild/linux-arm64@0.21.5': 336 | optional: true 337 | 338 | '@esbuild/linux-arm@0.21.5': 339 | optional: true 340 | 341 | '@esbuild/linux-ia32@0.21.5': 342 | optional: true 343 | 344 | '@esbuild/linux-loong64@0.21.5': 345 | optional: true 346 | 347 | '@esbuild/linux-mips64el@0.21.5': 348 | optional: true 349 | 350 | '@esbuild/linux-ppc64@0.21.5': 351 | optional: true 352 | 353 | '@esbuild/linux-riscv64@0.21.5': 354 | optional: true 355 | 356 | '@esbuild/linux-s390x@0.21.5': 357 | optional: true 358 | 359 | '@esbuild/linux-x64@0.21.5': 360 | optional: true 361 | 362 | '@esbuild/netbsd-x64@0.21.5': 363 | optional: true 364 | 365 | '@esbuild/openbsd-x64@0.21.5': 366 | optional: true 367 | 368 | '@esbuild/sunos-x64@0.21.5': 369 | optional: true 370 | 371 | '@esbuild/win32-arm64@0.21.5': 372 | optional: true 373 | 374 | '@esbuild/win32-ia32@0.21.5': 375 | optional: true 376 | 377 | '@esbuild/win32-x64@0.21.5': 378 | optional: true 379 | 380 | '@rollup/rollup-android-arm-eabi@4.18.1': 381 | optional: true 382 | 383 | '@rollup/rollup-android-arm64@4.18.1': 384 | optional: true 385 | 386 | '@rollup/rollup-darwin-arm64@4.18.1': 387 | optional: true 388 | 389 | '@rollup/rollup-darwin-x64@4.18.1': 390 | optional: true 391 | 392 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1': 393 | optional: true 394 | 395 | '@rollup/rollup-linux-arm-musleabihf@4.18.1': 396 | optional: true 397 | 398 | '@rollup/rollup-linux-arm64-gnu@4.18.1': 399 | optional: true 400 | 401 | '@rollup/rollup-linux-arm64-musl@4.18.1': 402 | optional: true 403 | 404 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': 405 | optional: true 406 | 407 | '@rollup/rollup-linux-riscv64-gnu@4.18.1': 408 | optional: true 409 | 410 | '@rollup/rollup-linux-s390x-gnu@4.18.1': 411 | optional: true 412 | 413 | '@rollup/rollup-linux-x64-gnu@4.18.1': 414 | optional: true 415 | 416 | '@rollup/rollup-linux-x64-musl@4.18.1': 417 | optional: true 418 | 419 | '@rollup/rollup-win32-arm64-msvc@4.18.1': 420 | optional: true 421 | 422 | '@rollup/rollup-win32-ia32-msvc@4.18.1': 423 | optional: true 424 | 425 | '@rollup/rollup-win32-x64-msvc@4.18.1': 426 | optional: true 427 | 428 | '@types/estree@1.0.5': {} 429 | 430 | esbuild@0.21.5: 431 | optionalDependencies: 432 | '@esbuild/aix-ppc64': 0.21.5 433 | '@esbuild/android-arm': 0.21.5 434 | '@esbuild/android-arm64': 0.21.5 435 | '@esbuild/android-x64': 0.21.5 436 | '@esbuild/darwin-arm64': 0.21.5 437 | '@esbuild/darwin-x64': 0.21.5 438 | '@esbuild/freebsd-arm64': 0.21.5 439 | '@esbuild/freebsd-x64': 0.21.5 440 | '@esbuild/linux-arm': 0.21.5 441 | '@esbuild/linux-arm64': 0.21.5 442 | '@esbuild/linux-ia32': 0.21.5 443 | '@esbuild/linux-loong64': 0.21.5 444 | '@esbuild/linux-mips64el': 0.21.5 445 | '@esbuild/linux-ppc64': 0.21.5 446 | '@esbuild/linux-riscv64': 0.21.5 447 | '@esbuild/linux-s390x': 0.21.5 448 | '@esbuild/linux-x64': 0.21.5 449 | '@esbuild/netbsd-x64': 0.21.5 450 | '@esbuild/openbsd-x64': 0.21.5 451 | '@esbuild/sunos-x64': 0.21.5 452 | '@esbuild/win32-arm64': 0.21.5 453 | '@esbuild/win32-ia32': 0.21.5 454 | '@esbuild/win32-x64': 0.21.5 455 | 456 | fsevents@2.3.3: 457 | optional: true 458 | 459 | nanoid@3.3.7: {} 460 | 461 | picocolors@1.0.1: {} 462 | 463 | postcss@8.4.39: 464 | dependencies: 465 | nanoid: 3.3.7 466 | picocolors: 1.0.1 467 | source-map-js: 1.2.0 468 | 469 | rollup@4.18.1: 470 | dependencies: 471 | '@types/estree': 1.0.5 472 | optionalDependencies: 473 | '@rollup/rollup-android-arm-eabi': 4.18.1 474 | '@rollup/rollup-android-arm64': 4.18.1 475 | '@rollup/rollup-darwin-arm64': 4.18.1 476 | '@rollup/rollup-darwin-x64': 4.18.1 477 | '@rollup/rollup-linux-arm-gnueabihf': 4.18.1 478 | '@rollup/rollup-linux-arm-musleabihf': 4.18.1 479 | '@rollup/rollup-linux-arm64-gnu': 4.18.1 480 | '@rollup/rollup-linux-arm64-musl': 4.18.1 481 | '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1 482 | '@rollup/rollup-linux-riscv64-gnu': 4.18.1 483 | '@rollup/rollup-linux-s390x-gnu': 4.18.1 484 | '@rollup/rollup-linux-x64-gnu': 4.18.1 485 | '@rollup/rollup-linux-x64-musl': 4.18.1 486 | '@rollup/rollup-win32-arm64-msvc': 4.18.1 487 | '@rollup/rollup-win32-ia32-msvc': 4.18.1 488 | '@rollup/rollup-win32-x64-msvc': 4.18.1 489 | fsevents: 2.3.3 490 | 491 | source-map-js@1.2.0: {} 492 | 493 | typescript@5.5.3: {} 494 | 495 | vite@5.3.3: 496 | dependencies: 497 | esbuild: 0.21.5 498 | postcss: 8.4.39 499 | rollup: 4.18.1 500 | optionalDependencies: 501 | fsevents: 2.3.3 502 | -------------------------------------------------------------------------------- /demos/browser-stt/readme.md: -------------------------------------------------------------------------------- 1 | # Demo for Web Speech API SpeechRecognition 2 | NOTE: This demo is currently not supported on Firefox or Opera browsers, due to those browsers not fully supporting the Web API for SpeechRecognition. https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition#browser_compatibility 3 | 4 | ## Setup 5 | ``` 6 | pnpm install 7 | ``` 8 | 9 | ## Run Locally 10 | ``` 11 | pnpm run dev 12 | ``` 13 | 14 | Note: The demo uses this local version of the SDK (`"link:../.."` in package.json). If you make changes to the SDK, you'll need to build the SDK again: 15 | ``` 16 | cd ../ 17 | pnpm run build 18 | ``` 19 | -------------------------------------------------------------------------------- /demos/browser-stt/src/main.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-unsafe-call */ 2 | 3 | import "./style.css"; 4 | import { 5 | Playthrough, 6 | AudioManager, 7 | createPlaythroughToken, 8 | createConversation, 9 | Conversation, 10 | Message, 11 | } from "@charisma-ai/sdk"; 12 | 13 | // In this demo, we'll extend the global "window" with the functions we need so we can call them from the HTML. 14 | declare global { 15 | interface Window { 16 | start: () => Promise; 17 | reply: () => void; 18 | onKeyPress: (event: KeyboardEvent) => void; 19 | toggleMuteBackgroundAudio: () => void; 20 | toggleMicrophone: (event: Event) => void; 21 | } 22 | } 23 | 24 | const messagesDiv = document.getElementById("messages"); 25 | const recordButton = document.getElementById("record-button"); 26 | 27 | const appendMessage = (message: string, className: string, name?: string) => { 28 | const div = document.createElement("div"); 29 | div.classList.add(className, "message"); 30 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`; 31 | messagesDiv?.appendChild(div); 32 | }; 33 | 34 | // Keep track of the recording statuses of the microphone so we can update the UI accordingly. 35 | let recordingStatus: "recording" | "off" | "starting" = "off"; 36 | 37 | const handleStartSTT = () => { 38 | recordingStatus = "recording"; 39 | if (recordButton) recordButton.innerHTML = "Stop"; 40 | }; 41 | 42 | const handleStopSTT = () => { 43 | recordingStatus = "off"; 44 | if (recordButton) recordButton.innerHTML = "Record"; 45 | }; 46 | 47 | const handleTranscript = (transcript: string) => { 48 | const replyInput = document.getElementById("reply-input"); 49 | if (replyInput) { 50 | replyInput.value = transcript; 51 | } 52 | }; 53 | 54 | // Setup the audio manager. 55 | const audioManager = new AudioManager({ 56 | duckVolumeLevel: 0.1, 57 | normalVolumeLevel: 1, 58 | sttService: "browser", 59 | streamTimeslice: 100, 60 | handleTranscript, 61 | handleStartSTT, 62 | handleStopSTT, 63 | }); 64 | 65 | if (!audioManager.browserIsSupported()) { 66 | appendMessage( 67 | "Your browser does not support the browser STT service.", 68 | "error-message", 69 | ); 70 | } 71 | 72 | let playthrough: Playthrough; 73 | let conversation: Conversation; 74 | 75 | window.start = async function start() { 76 | // In order to play audio, this method must be called by a user interaction. 77 | // This is due to a security restriction in some browsers. 78 | audioManager.initialise(); 79 | 80 | const storyIdInput = document.getElementById("story-id"); 81 | const storyId = Number(storyIdInput.value); 82 | const storyApiKeyInput = document.getElementById("story-api-key"); 83 | const storyApiKey = storyApiKeyInput.value; 84 | const storyVersionInput = document.getElementById("version"); 85 | const storyVersion = Number(storyVersionInput.value) || undefined; 86 | const StartGraphReferenceIdInput = document.getElementById( 87 | "startGraphReferenceId", 88 | ); 89 | const startGraphReferenceId = StartGraphReferenceIdInput.value; 90 | 91 | const { token } = await createPlaythroughToken({ 92 | storyId, 93 | apiKey: storyApiKey, 94 | version: storyVersion, 95 | }); 96 | 97 | const { conversationUuid } = await createConversation(token); 98 | playthrough = new Playthrough(token); 99 | conversation = playthrough.joinConversation(conversationUuid); 100 | 101 | conversation.setSpeechConfig({ 102 | encoding: ["mp3", "wav"], 103 | output: "buffer", 104 | }); 105 | 106 | conversation.on("message", (message: Message) => { 107 | const characterMessage = 108 | message.type === "character" ? message.message : null; 109 | 110 | // For this demo, we only care about character messages. 111 | if (!characterMessage) return; 112 | 113 | // Put the character message on the page. 114 | appendMessage( 115 | characterMessage.text, 116 | "character-message", 117 | characterMessage.character?.name, 118 | ); 119 | 120 | // Play character speech. 121 | if (characterMessage.speech) { 122 | audioManager.playCharacterSpeech( 123 | characterMessage.speech.audio as ArrayBuffer, 124 | { 125 | trackId: String(characterMessage.character?.id), 126 | interrupt: "track", 127 | }, 128 | ); 129 | } 130 | 131 | if (characterMessage.media) { 132 | if (characterMessage.media.stopAllAudio) { 133 | audioManager.mediaAudioStopAll(); 134 | } 135 | 136 | // Play media audio if it exists in the node. 137 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks); 138 | } 139 | }); 140 | 141 | conversation.on("problem", console.warn); 142 | 143 | // Listen for the playthrough to connect and start the conversation when it does. 144 | let started = false; 145 | playthrough.on("connection-status", (status) => { 146 | appendMessage( 147 | status, 148 | status === "disconnected" ? "disconnected-message" : "connected-message", 149 | ); 150 | 151 | if (status === "connected" && !started) { 152 | const conversationParameters = startGraphReferenceId 153 | ? { startGraphReferenceId } 154 | : undefined; 155 | conversation.start(conversationParameters); 156 | started = true; 157 | } 158 | }); 159 | 160 | await playthrough.connect(); 161 | }; 162 | 163 | const reply = () => { 164 | if (!playthrough || !conversation) return; 165 | 166 | // Stop listening when you send a message. 167 | audioManager.stopListening(); 168 | 169 | const replyInput = document.getElementById("reply-input"); 170 | const text = replyInput.value; 171 | 172 | if (text.trim() === "") return; 173 | 174 | conversation.reply({ text }); 175 | replyInput.value = ""; 176 | 177 | // Put player message on the page. 178 | appendMessage(text, "player-message", "You"); 179 | }; 180 | 181 | // Handle the Enter key press. 182 | window.onKeyPress = function onKeyPress(event) { 183 | if (!event || !event.currentTarget) return; 184 | if (event.key === "Enter") { 185 | reply(); 186 | } 187 | }; 188 | 189 | window.reply = reply; 190 | 191 | // Toggling the microphone will request the stt service to connect. 192 | window.toggleMicrophone = () => { 193 | if (!recordButton) return; 194 | 195 | if (recordingStatus === "off") { 196 | audioManager.startListening(); 197 | recordingStatus = "starting"; 198 | recordButton.innerHTML = "..."; 199 | } else if (recordingStatus === "recording") { 200 | audioManager.stopListening(); 201 | recordingStatus = "off"; 202 | recordButton.innerHTML = "Record"; 203 | } 204 | }; 205 | 206 | window.toggleMuteBackgroundAudio = () => { 207 | audioManager.mediaAudioToggleMute(); 208 | }; 209 | -------------------------------------------------------------------------------- /demos/browser-stt/src/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; 3 | margin: 0; 4 | display: flex; 5 | place-items: center; 6 | flex-direction: column; 7 | min-width: 320px; 8 | min-height: 100vh; 9 | } 10 | 11 | h1 { 12 | font-size: 3.2em; 13 | line-height: 1.1; 14 | } 15 | 16 | #app { 17 | min-width: 40vw; 18 | max-width: 1280px; 19 | margin: 0 auto; 20 | padding: 1rem; 21 | text-align: center; 22 | } 23 | 24 | button { 25 | border-radius: 8px; 26 | border: 1px solid transparent; 27 | padding: 0.6em 1.2em; 28 | font-size: 1em; 29 | font-weight: 500; 30 | font-family: inherit; 31 | background-color: #1a1a1a; 32 | cursor: pointer; 33 | transition: border-color 0.25s; 34 | background-color: #f9f9f9; 35 | margin: 0.5em; 36 | } 37 | button:hover { 38 | background-color: #f2f2f2; 39 | } 40 | 41 | #messages { 42 | margin: 1em 0; 43 | border: 1px solid #ccc; 44 | padding: 1em 0; 45 | width: 100%; 46 | height: 200px; 47 | overflow-y: scroll; 48 | text-align: left; 49 | } 50 | 51 | .message { 52 | margin: 0.5em; 53 | } 54 | 55 | .connected-message { 56 | color: green; 57 | font-size: 0.8em; 58 | padding-left: 0.5em; 59 | } 60 | 61 | .disconnected-message { 62 | color: red; 63 | font-size: 0.8em; 64 | padding-left: 0.5em; 65 | } 66 | 67 | .error-message { 68 | color: red; 69 | padding-left: 0.5em; 70 | } 71 | 72 | .story-parameters-container { 73 | display: flex; 74 | flex-direction: column; 75 | } 76 | 77 | .story-parameters-container input { 78 | margin-bottom: 0.5em; 79 | } 80 | 81 | #reply-input { 82 | width: 100%; 83 | } 84 | 85 | .comment { 86 | font-size: 0.6em; 87 | color: #999; 88 | } 89 | -------------------------------------------------------------------------------- /demos/browser-stt/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /demos/browser-stt/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "module": "ESNext", 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "moduleDetection": "force", 15 | "noEmit": true, 16 | 17 | /* Linting */ 18 | "strict": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "noFallthroughCasesInSwitch": true 22 | }, 23 | "include": ["src"] 24 | } 25 | -------------------------------------------------------------------------------- /demos/deepgram-stt/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | .env 27 | -------------------------------------------------------------------------------- /demos/deepgram-stt/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Charisma JS SDK Demo 7 | 8 | 9 |
10 |
11 | Story ID 12 | 13 | Story API Key 14 | 15 | 16 | only needed for draft or unpublished versions 17 | 18 | 19 | Version 20 | 21 | use -1 for draft, or undefined for most recent published 22 | 23 | 24 | StartGraphReferenceId 25 | only for pro stories 26 | 27 |
28 | 29 |
30 |
31 | 36 |
37 | 40 | 41 |
42 | 45 |
46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /demos/deepgram-stt/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite --port 3000", 8 | "build": "tsc && vite build", 9 | "preview": "vite preview" 10 | }, 11 | "devDependencies": { 12 | "typescript": "^5.2.2", 13 | "vite": "^5.3.1" 14 | }, 15 | "dependencies": { 16 | "@charisma-ai/sdk": "link:../.." 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /demos/deepgram-stt/readme.md: -------------------------------------------------------------------------------- 1 | # Demo Project for Deepgram Speech-to-Text 2 | 3 | ## Setup 4 | ``` 5 | pnpm install 6 | ``` 7 | 8 | ## Run Locally 9 | ``` 10 | pnpm run dev 11 | ``` 12 | 13 | Note: The demo uses this local version of the SDK (`"link:../.."` in package.json). If you make changes to the SDK, you'll need to build the SDK again: 14 | ``` 15 | cd ../ 16 | pnpm run build 17 | ``` 18 | -------------------------------------------------------------------------------- /demos/deepgram-stt/src/main.ts: -------------------------------------------------------------------------------- 1 | import "./style.css"; 2 | import { 3 | Playthrough, 4 | AudioManager, 5 | createPlaythroughToken, 6 | createConversation, 7 | Conversation, 8 | Message, 9 | } from "@charisma-ai/sdk"; 10 | 11 | // In this demo, we'll extend the global "window" with the functions we need so we can call them from the HTML. 12 | declare global { 13 | interface Window { 14 | start: () => Promise; 15 | reply: () => void; 16 | onKeyPress: (event: KeyboardEvent) => void; 17 | toggleMuteBackgroundAudio: () => void; 18 | toggleMicrophone: (event: Event) => void; 19 | } 20 | } 21 | 22 | const messagesDiv = document.getElementById("messages"); 23 | const recordButton = document.getElementById("record-button"); 24 | 25 | const appendMessage = (message: string, className: string, name?: string) => { 26 | const div = document.createElement("div"); 27 | div.classList.add(className, "message"); 28 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`; 29 | messagesDiv?.appendChild(div); 30 | }; 31 | 32 | // Keep track of the recording statuses of the microphone so we can update the UI accordingly. 33 | let recordingStatus: "recording" | "off" | "starting" = "off"; 34 | let confirmedText = ""; 35 | let volatileText = ""; 36 | 37 | const handleStartSTT = () => { 38 | recordingStatus = "recording"; 39 | if (recordButton) recordButton.innerHTML = "Stop"; 40 | const replyInput = document.getElementById("reply-input"); 41 | 42 | if (replyInput) { 43 | replyInput.value = ""; 44 | } 45 | }; 46 | 47 | const handleStopSTT = () => { 48 | recordingStatus = "off"; 49 | if (recordButton) recordButton.innerHTML = "Record"; 50 | }; 51 | 52 | const handleTranscript = (transcript: string) => { 53 | confirmedText = `${confirmedText} ${transcript}`; 54 | volatileText = ""; 55 | const replyInput = document.getElementById("reply-input"); 56 | if (replyInput) { 57 | replyInput.value = confirmedText; 58 | } 59 | }; 60 | 61 | const handleInterimTranscript = (interimTranscript: string) => { 62 | volatileText = interimTranscript; 63 | const replyInput = document.getElementById("reply-input"); 64 | if (replyInput) { 65 | replyInput.value = `${confirmedText} ${volatileText}`; 66 | } 67 | }; 68 | 69 | // Setup the audio manager. 70 | const audioManager = new AudioManager({ 71 | duckVolumeLevel: 0.1, 72 | normalVolumeLevel: 1, 73 | sttService: "charisma/deepgram", 74 | streamTimeslice: 100, 75 | handleTranscript, 76 | handleInterimTranscript, 77 | handleStartSTT, 78 | handleStopSTT, 79 | handleDisconnect: (message: string) => 80 | appendMessage(message, "disconnected-message"), 81 | handleConnect: (message: string) => 82 | appendMessage(message, "connected-message"), 83 | debugLogFunction: (message: string) => 84 | console.log( 85 | `${new Date().toISOString().split("T")[1].slice(0, 12)} ${message}`, 86 | ), 87 | }); 88 | 89 | let playthrough: Playthrough; 90 | let conversation: Conversation; 91 | 92 | window.start = async function start() { 93 | // In order to play audio, this method must be called by a user interaction. 94 | // This is due to a security restriction in some browsers. 95 | audioManager.initialise(); 96 | 97 | const storyIdInput = document.getElementById("story-id"); 98 | const storyId = Number(storyIdInput.value); 99 | const storyApiKeyInput = ( 100 | document.getElementById("story-api-key") 101 | ); 102 | const storyApiKey = storyApiKeyInput.value; 103 | const storyVersionInput = document.getElementById("version"); 104 | const storyVersion = Number(storyVersionInput.value) || undefined; 105 | const StartGraphReferenceIdInput = document.getElementById( 106 | "startGraphReferenceId", 107 | ); 108 | const startGraphReferenceId = StartGraphReferenceIdInput.value; 109 | 110 | const { token } = await createPlaythroughToken({ 111 | storyId, 112 | apiKey: storyApiKey, 113 | version: storyVersion, 114 | }); 115 | 116 | const { conversationUuid } = await createConversation(token); 117 | playthrough = new Playthrough(token); 118 | conversation = playthrough.joinConversation(conversationUuid); 119 | 120 | conversation.setSpeechConfig({ 121 | encoding: ["mp3", "wav"], 122 | output: "buffer", 123 | }); 124 | 125 | conversation.on("message", (message: Message) => { 126 | const characterMessage = 127 | message.type === "character" ? message.message : null; 128 | 129 | // For this demo, we only care about character messages. 130 | if (!characterMessage) return; 131 | 132 | // Put the character message on the page. 133 | appendMessage( 134 | characterMessage.text, 135 | "character-message", 136 | characterMessage.character?.name, 137 | ); 138 | 139 | // Play character speech. 140 | if (characterMessage.speech) { 141 | audioManager.playCharacterSpeech( 142 | characterMessage.speech.audio as ArrayBuffer, 143 | { 144 | trackId: String(characterMessage.character?.id), 145 | interrupt: "track", 146 | }, 147 | ); 148 | } 149 | 150 | if (characterMessage.media) { 151 | if (characterMessage.media.stopAllAudio) { 152 | audioManager.mediaAudioStopAll(); 153 | } 154 | 155 | // Play media audio if it exists in the node. 156 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks); 157 | } 158 | }); 159 | 160 | conversation.on("problem", console.warn); 161 | 162 | // Listen for the playthrough to connect and start the conversation when it does. 163 | let started = false; 164 | playthrough.on("connection-status", (status) => { 165 | appendMessage( 166 | status, 167 | status === "disconnected" ? "disconnected-message" : "connected-message", 168 | ); 169 | 170 | if (status === "connected" && !started) { 171 | const conversationParameters = startGraphReferenceId 172 | ? { startGraphReferenceId } 173 | : undefined; 174 | conversation.start(conversationParameters); 175 | started = true; 176 | } 177 | }); 178 | 179 | const { playerSessionId } = await playthrough.connect(); 180 | audioManager.connect(token, playerSessionId); 181 | }; 182 | 183 | const reply = () => { 184 | if (!playthrough || !conversation) return; 185 | 186 | // Stop listening when you send a message. 187 | audioManager.stopListening(); 188 | 189 | const replyInput = document.getElementById("reply-input"); 190 | const text = replyInput.value; 191 | 192 | if (text.trim() === "") return; 193 | 194 | conversation.reply({ text }); 195 | 196 | // Put player message on the page. 197 | appendMessage(text, "player-message", "You"); 198 | replyInput.value = ""; 199 | }; 200 | 201 | // Handle the Enter key press. 202 | window.onKeyPress = function onKeyPress(event) { 203 | if (!event || !event.currentTarget) return; 204 | if (event.key === "Enter") { 205 | reply(); 206 | } 207 | }; 208 | 209 | window.reply = reply; 210 | 211 | // Toggling the microphone will request the stt service to connect. 212 | window.toggleMicrophone = () => { 213 | if (!recordButton) return; 214 | 215 | if (recordingStatus === "off") { 216 | audioManager.startListening(); 217 | confirmedText = ""; 218 | volatileText = ""; 219 | recordingStatus = "starting"; 220 | recordButton.innerHTML = "..."; 221 | } else if (recordingStatus === "recording") { 222 | audioManager.stopListening(); 223 | recordingStatus = "off"; 224 | recordButton.innerHTML = "Record"; 225 | } 226 | }; 227 | 228 | window.toggleMuteBackgroundAudio = () => { 229 | audioManager.mediaAudioToggleMute(); 230 | }; 231 | -------------------------------------------------------------------------------- /demos/deepgram-stt/src/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; 3 | margin: 0; 4 | display: flex; 5 | place-items: center; 6 | flex-direction: column; 7 | min-width: 320px; 8 | min-height: 100vh; 9 | } 10 | 11 | h1 { 12 | font-size: 3.2em; 13 | line-height: 1.1; 14 | } 15 | 16 | #app { 17 | min-width: 40vw; 18 | max-width: 1280px; 19 | margin: 0 auto; 20 | padding: 1rem; 21 | text-align: center; 22 | } 23 | 24 | button { 25 | border-radius: 8px; 26 | border: 1px solid transparent; 27 | padding: 0.6em 1.2em; 28 | font-size: 1em; 29 | font-weight: 500; 30 | font-family: inherit; 31 | background-color: #1a1a1a; 32 | cursor: pointer; 33 | transition: border-color 0.25s; 34 | background-color: #f9f9f9; 35 | margin: 0.5em; 36 | } 37 | button:hover { 38 | background-color: #f2f2f2; 39 | } 40 | 41 | #messages { 42 | margin: 1em 0; 43 | border: 1px solid #ccc; 44 | padding: 1em 0; 45 | width: 100%; 46 | height: 200px; 47 | overflow-y: scroll; 48 | text-align: left; 49 | } 50 | 51 | .message { 52 | margin: 0.5em; 53 | } 54 | 55 | .connected-message { 56 | color: green; 57 | font-size: 0.8em; 58 | padding-left: 0.5em; 59 | } 60 | 61 | .disconnected-message { 62 | color: red; 63 | font-size: 0.8em; 64 | padding-left: 0.5em; 65 | } 66 | 67 | .story-parameters-container { 68 | display: flex; 69 | flex-direction: column; 70 | } 71 | 72 | .story-parameters-container input { 73 | margin-bottom: 0.5em; 74 | } 75 | 76 | #reply-input { 77 | width: 100%; 78 | } 79 | 80 | .comment { 81 | font-size: 0.6em; 82 | color: #999; 83 | } 84 | -------------------------------------------------------------------------------- /demos/deepgram-stt/src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /demos/deepgram-stt/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "module": "ESNext", 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "moduleDetection": "force", 15 | "noEmit": true, 16 | 17 | /* Linting */ 18 | "strict": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "noFallthroughCasesInSwitch": true 22 | }, 23 | "include": ["src"] 24 | } 25 | -------------------------------------------------------------------------------- /demos/no-stt/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | .env 27 | -------------------------------------------------------------------------------- /demos/no-stt/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Charisma JS SDK Demo 7 | 8 | 9 |
10 |
11 | Story ID 12 | 13 | Story API Key 14 | 15 | only needed for draft or unpublished versions 16 | 17 | 18 | Version 19 | 20 | use -1 for draft, or undefined for most recent published 21 | 22 | 23 | StartGraphReferenceId 24 | only for pro stories 25 | 26 |
27 | 28 |
29 |
30 | 35 |
36 | 37 |
38 | 41 |
42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /demos/no-stt/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite --port 3000", 8 | "build": "vite build", 9 | "preview": "vite preview" 10 | }, 11 | "devDependencies": { 12 | "vite": "^5.3.1" 13 | }, 14 | "dependencies": { 15 | "@charisma-ai/sdk": "link:../.." 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /demos/no-stt/pnpm-lock.yaml: -------------------------------------------------------------------------------- 1 | lockfileVersion: '9.0' 2 | 3 | settings: 4 | autoInstallPeers: true 5 | excludeLinksFromLockfile: false 6 | 7 | importers: 8 | 9 | .: 10 | dependencies: 11 | '@charisma-ai/sdk': 12 | specifier: link:../.. 13 | version: link:../.. 14 | devDependencies: 15 | vite: 16 | specifier: ^5.3.1 17 | version: 5.3.3 18 | 19 | packages: 20 | 21 | '@esbuild/aix-ppc64@0.21.5': 22 | resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz} 23 | engines: {node: '>=12'} 24 | cpu: [ppc64] 25 | os: [aix] 26 | 27 | '@esbuild/android-arm64@0.21.5': 28 | resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz} 29 | engines: {node: '>=12'} 30 | cpu: [arm64] 31 | os: [android] 32 | 33 | '@esbuild/android-arm@0.21.5': 34 | resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz} 35 | engines: {node: '>=12'} 36 | cpu: [arm] 37 | os: [android] 38 | 39 | '@esbuild/android-x64@0.21.5': 40 | resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz} 41 | engines: {node: '>=12'} 42 | cpu: [x64] 43 | os: [android] 44 | 45 | '@esbuild/darwin-arm64@0.21.5': 46 | resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz} 47 | engines: {node: '>=12'} 48 | cpu: [arm64] 49 | os: [darwin] 50 | 51 | '@esbuild/darwin-x64@0.21.5': 52 | resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz} 53 | engines: {node: '>=12'} 54 | cpu: [x64] 55 | os: [darwin] 56 | 57 | '@esbuild/freebsd-arm64@0.21.5': 58 | resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz} 59 | engines: {node: '>=12'} 60 | cpu: [arm64] 61 | os: [freebsd] 62 | 63 | '@esbuild/freebsd-x64@0.21.5': 64 | resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz} 65 | engines: {node: '>=12'} 66 | cpu: [x64] 67 | os: [freebsd] 68 | 69 | '@esbuild/linux-arm64@0.21.5': 70 | resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz} 71 | engines: {node: '>=12'} 72 | cpu: [arm64] 73 | os: [linux] 74 | 75 | '@esbuild/linux-arm@0.21.5': 76 | resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz} 77 | engines: {node: '>=12'} 78 | cpu: [arm] 79 | os: [linux] 80 | 81 | '@esbuild/linux-ia32@0.21.5': 82 | resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz} 83 | engines: {node: '>=12'} 84 | cpu: [ia32] 85 | os: [linux] 86 | 87 | '@esbuild/linux-loong64@0.21.5': 88 | resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz} 89 | engines: {node: '>=12'} 90 | cpu: [loong64] 91 | os: [linux] 92 | 93 | '@esbuild/linux-mips64el@0.21.5': 94 | resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz} 95 | engines: {node: '>=12'} 96 | cpu: [mips64el] 97 | os: [linux] 98 | 99 | '@esbuild/linux-ppc64@0.21.5': 100 | resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz} 101 | engines: {node: '>=12'} 102 | cpu: [ppc64] 103 | os: [linux] 104 | 105 | '@esbuild/linux-riscv64@0.21.5': 106 | resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz} 107 | engines: {node: '>=12'} 108 | cpu: [riscv64] 109 | os: [linux] 110 | 111 | '@esbuild/linux-s390x@0.21.5': 112 | resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz} 113 | engines: {node: '>=12'} 114 | cpu: [s390x] 115 | os: [linux] 116 | 117 | '@esbuild/linux-x64@0.21.5': 118 | resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz} 119 | engines: {node: '>=12'} 120 | cpu: [x64] 121 | os: [linux] 122 | 123 | '@esbuild/netbsd-x64@0.21.5': 124 | resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz} 125 | engines: {node: '>=12'} 126 | cpu: [x64] 127 | os: [netbsd] 128 | 129 | '@esbuild/openbsd-x64@0.21.5': 130 | resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz} 131 | engines: {node: '>=12'} 132 | cpu: [x64] 133 | os: [openbsd] 134 | 135 | '@esbuild/sunos-x64@0.21.5': 136 | resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz} 137 | engines: {node: '>=12'} 138 | cpu: [x64] 139 | os: [sunos] 140 | 141 | '@esbuild/win32-arm64@0.21.5': 142 | resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz} 143 | engines: {node: '>=12'} 144 | cpu: [arm64] 145 | os: [win32] 146 | 147 | '@esbuild/win32-ia32@0.21.5': 148 | resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz} 149 | engines: {node: '>=12'} 150 | cpu: [ia32] 151 | os: [win32] 152 | 153 | '@esbuild/win32-x64@0.21.5': 154 | resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz} 155 | engines: {node: '>=12'} 156 | cpu: [x64] 157 | os: [win32] 158 | 159 | '@rollup/rollup-android-arm-eabi@4.18.1': 160 | resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.1.tgz} 161 | cpu: [arm] 162 | os: [android] 163 | 164 | '@rollup/rollup-android-arm64@4.18.1': 165 | resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==, tarball: https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.1.tgz} 166 | cpu: [arm64] 167 | os: [android] 168 | 169 | '@rollup/rollup-darwin-arm64@4.18.1': 170 | resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.1.tgz} 171 | cpu: [arm64] 172 | os: [darwin] 173 | 174 | '@rollup/rollup-darwin-x64@4.18.1': 175 | resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==, tarball: https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.1.tgz} 176 | cpu: [x64] 177 | os: [darwin] 178 | 179 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1': 180 | resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.1.tgz} 181 | cpu: [arm] 182 | os: [linux] 183 | 184 | '@rollup/rollup-linux-arm-musleabihf@4.18.1': 185 | resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.1.tgz} 186 | cpu: [arm] 187 | os: [linux] 188 | 189 | '@rollup/rollup-linux-arm64-gnu@4.18.1': 190 | resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.1.tgz} 191 | cpu: [arm64] 192 | os: [linux] 193 | 194 | '@rollup/rollup-linux-arm64-musl@4.18.1': 195 | resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.1.tgz} 196 | cpu: [arm64] 197 | os: [linux] 198 | 199 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': 200 | resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.1.tgz} 201 | cpu: [ppc64] 202 | os: [linux] 203 | 204 | '@rollup/rollup-linux-riscv64-gnu@4.18.1': 205 | resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.1.tgz} 206 | cpu: [riscv64] 207 | os: [linux] 208 | 209 | '@rollup/rollup-linux-s390x-gnu@4.18.1': 210 | resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.1.tgz} 211 | cpu: [s390x] 212 | os: [linux] 213 | 214 | '@rollup/rollup-linux-x64-gnu@4.18.1': 215 | resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.1.tgz} 216 | cpu: [x64] 217 | os: [linux] 218 | 219 | '@rollup/rollup-linux-x64-musl@4.18.1': 220 | resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==, tarball: https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.1.tgz} 221 | cpu: [x64] 222 | os: [linux] 223 | 224 | '@rollup/rollup-win32-arm64-msvc@4.18.1': 225 | resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.1.tgz} 226 | cpu: [arm64] 227 | os: [win32] 228 | 229 | '@rollup/rollup-win32-ia32-msvc@4.18.1': 230 | resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.1.tgz} 231 | cpu: [ia32] 232 | os: [win32] 233 | 234 | '@rollup/rollup-win32-x64-msvc@4.18.1': 235 | resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==, tarball: https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.1.tgz} 236 | cpu: [x64] 237 | os: [win32] 238 | 239 | '@types/estree@1.0.5': 240 | resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==, tarball: https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz} 241 | 242 | esbuild@0.21.5: 243 | resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz} 244 | engines: {node: '>=12'} 245 | hasBin: true 246 | 247 | fsevents@2.3.3: 248 | resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, tarball: https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz} 249 | engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} 250 | os: [darwin] 251 | 252 | nanoid@3.3.7: 253 | resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==, tarball: https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz} 254 | engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} 255 | hasBin: true 256 | 257 | picocolors@1.0.1: 258 | resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==, tarball: https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz} 259 | 260 | postcss@8.4.39: 261 | resolution: {integrity: sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz} 262 | engines: {node: ^10 || ^12 || >=14} 263 | 264 | rollup@4.18.1: 265 | resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==, tarball: https://registry.npmjs.org/rollup/-/rollup-4.18.1.tgz} 266 | engines: {node: '>=18.0.0', npm: '>=8.0.0'} 267 | hasBin: true 268 | 269 | source-map-js@1.2.0: 270 | resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==, tarball: https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz} 271 | engines: {node: '>=0.10.0'} 272 | 273 | vite@5.3.3: 274 | resolution: {integrity: sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==, tarball: https://registry.npmjs.org/vite/-/vite-5.3.3.tgz} 275 | engines: {node: ^18.0.0 || >=20.0.0} 276 | hasBin: true 277 | peerDependencies: 278 | '@types/node': ^18.0.0 || >=20.0.0 279 | less: '*' 280 | lightningcss: ^1.21.0 281 | sass: '*' 282 | stylus: '*' 283 | sugarss: '*' 284 | terser: ^5.4.0 285 | peerDependenciesMeta: 286 | '@types/node': 287 | optional: true 288 | less: 289 | optional: true 290 | lightningcss: 291 | optional: true 292 | sass: 293 | optional: true 294 | stylus: 295 | optional: true 296 | sugarss: 297 | optional: true 298 | terser: 299 | optional: true 300 | 301 | snapshots: 302 | 303 | '@esbuild/aix-ppc64@0.21.5': 304 | optional: true 305 | 306 | '@esbuild/android-arm64@0.21.5': 307 | optional: true 308 | 309 | '@esbuild/android-arm@0.21.5': 310 | optional: true 311 | 312 | '@esbuild/android-x64@0.21.5': 313 | optional: true 314 | 315 | '@esbuild/darwin-arm64@0.21.5': 316 | optional: true 317 | 318 | '@esbuild/darwin-x64@0.21.5': 319 | optional: true 320 | 321 | '@esbuild/freebsd-arm64@0.21.5': 322 | optional: true 323 | 324 | '@esbuild/freebsd-x64@0.21.5': 325 | optional: true 326 | 327 | '@esbuild/linux-arm64@0.21.5': 328 | optional: true 329 | 330 | '@esbuild/linux-arm@0.21.5': 331 | optional: true 332 | 333 | '@esbuild/linux-ia32@0.21.5': 334 | optional: true 335 | 336 | '@esbuild/linux-loong64@0.21.5': 337 | optional: true 338 | 339 | '@esbuild/linux-mips64el@0.21.5': 340 | optional: true 341 | 342 | '@esbuild/linux-ppc64@0.21.5': 343 | optional: true 344 | 345 | '@esbuild/linux-riscv64@0.21.5': 346 | optional: true 347 | 348 | '@esbuild/linux-s390x@0.21.5': 349 | optional: true 350 | 351 | '@esbuild/linux-x64@0.21.5': 352 | optional: true 353 | 354 | '@esbuild/netbsd-x64@0.21.5': 355 | optional: true 356 | 357 | '@esbuild/openbsd-x64@0.21.5': 358 | optional: true 359 | 360 | '@esbuild/sunos-x64@0.21.5': 361 | optional: true 362 | 363 | '@esbuild/win32-arm64@0.21.5': 364 | optional: true 365 | 366 | '@esbuild/win32-ia32@0.21.5': 367 | optional: true 368 | 369 | '@esbuild/win32-x64@0.21.5': 370 | optional: true 371 | 372 | '@rollup/rollup-android-arm-eabi@4.18.1': 373 | optional: true 374 | 375 | '@rollup/rollup-android-arm64@4.18.1': 376 | optional: true 377 | 378 | '@rollup/rollup-darwin-arm64@4.18.1': 379 | optional: true 380 | 381 | '@rollup/rollup-darwin-x64@4.18.1': 382 | optional: true 383 | 384 | '@rollup/rollup-linux-arm-gnueabihf@4.18.1': 385 | optional: true 386 | 387 | '@rollup/rollup-linux-arm-musleabihf@4.18.1': 388 | optional: true 389 | 390 | '@rollup/rollup-linux-arm64-gnu@4.18.1': 391 | optional: true 392 | 393 | '@rollup/rollup-linux-arm64-musl@4.18.1': 394 | optional: true 395 | 396 | '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': 397 | optional: true 398 | 399 | '@rollup/rollup-linux-riscv64-gnu@4.18.1': 400 | optional: true 401 | 402 | '@rollup/rollup-linux-s390x-gnu@4.18.1': 403 | optional: true 404 | 405 | '@rollup/rollup-linux-x64-gnu@4.18.1': 406 | optional: true 407 | 408 | '@rollup/rollup-linux-x64-musl@4.18.1': 409 | optional: true 410 | 411 | '@rollup/rollup-win32-arm64-msvc@4.18.1': 412 | optional: true 413 | 414 | '@rollup/rollup-win32-ia32-msvc@4.18.1': 415 | optional: true 416 | 417 | '@rollup/rollup-win32-x64-msvc@4.18.1': 418 | optional: true 419 | 420 | '@types/estree@1.0.5': {} 421 | 422 | esbuild@0.21.5: 423 | optionalDependencies: 424 | '@esbuild/aix-ppc64': 0.21.5 425 | '@esbuild/android-arm': 0.21.5 426 | '@esbuild/android-arm64': 0.21.5 427 | '@esbuild/android-x64': 0.21.5 428 | '@esbuild/darwin-arm64': 0.21.5 429 | '@esbuild/darwin-x64': 0.21.5 430 | '@esbuild/freebsd-arm64': 0.21.5 431 | '@esbuild/freebsd-x64': 0.21.5 432 | '@esbuild/linux-arm': 0.21.5 433 | '@esbuild/linux-arm64': 0.21.5 434 | '@esbuild/linux-ia32': 0.21.5 435 | '@esbuild/linux-loong64': 0.21.5 436 | '@esbuild/linux-mips64el': 0.21.5 437 | '@esbuild/linux-ppc64': 0.21.5 438 | '@esbuild/linux-riscv64': 0.21.5 439 | '@esbuild/linux-s390x': 0.21.5 440 | '@esbuild/linux-x64': 0.21.5 441 | '@esbuild/netbsd-x64': 0.21.5 442 | '@esbuild/openbsd-x64': 0.21.5 443 | '@esbuild/sunos-x64': 0.21.5 444 | '@esbuild/win32-arm64': 0.21.5 445 | '@esbuild/win32-ia32': 0.21.5 446 | '@esbuild/win32-x64': 0.21.5 447 | 448 | fsevents@2.3.3: 449 | optional: true 450 | 451 | nanoid@3.3.7: {} 452 | 453 | picocolors@1.0.1: {} 454 | 455 | postcss@8.4.39: 456 | dependencies: 457 | nanoid: 3.3.7 458 | picocolors: 1.0.1 459 | source-map-js: 1.2.0 460 | 461 | rollup@4.18.1: 462 | dependencies: 463 | '@types/estree': 1.0.5 464 | optionalDependencies: 465 | '@rollup/rollup-android-arm-eabi': 4.18.1 466 | '@rollup/rollup-android-arm64': 4.18.1 467 | '@rollup/rollup-darwin-arm64': 4.18.1 468 | '@rollup/rollup-darwin-x64': 4.18.1 469 | '@rollup/rollup-linux-arm-gnueabihf': 4.18.1 470 | '@rollup/rollup-linux-arm-musleabihf': 4.18.1 471 | '@rollup/rollup-linux-arm64-gnu': 4.18.1 472 | '@rollup/rollup-linux-arm64-musl': 4.18.1 473 | '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1 474 | '@rollup/rollup-linux-riscv64-gnu': 4.18.1 475 | '@rollup/rollup-linux-s390x-gnu': 4.18.1 476 | '@rollup/rollup-linux-x64-gnu': 4.18.1 477 | '@rollup/rollup-linux-x64-musl': 4.18.1 478 | '@rollup/rollup-win32-arm64-msvc': 4.18.1 479 | '@rollup/rollup-win32-ia32-msvc': 4.18.1 480 | '@rollup/rollup-win32-x64-msvc': 4.18.1 481 | fsevents: 2.3.3 482 | 483 | source-map-js@1.2.0: {} 484 | 485 | vite@5.3.3: 486 | dependencies: 487 | esbuild: 0.21.5 488 | postcss: 8.4.39 489 | rollup: 4.18.1 490 | optionalDependencies: 491 | fsevents: 2.3.3 492 | -------------------------------------------------------------------------------- /demos/no-stt/readme.md: -------------------------------------------------------------------------------- 1 | # Demo Project without Speech-to-Text 2 | 3 | ## Setup 4 | ``` 5 | pnpm install 6 | ``` 7 | 8 | ## Run Locally 9 | ``` 10 | pnpm run dev 11 | ``` 12 | 13 | Note: The demo uses this local version of the SDK (`"link:../.."` in package.json). If you make changes to the SDK, you'll need to build the SDK again: 14 | ``` 15 | cd ../ 16 | pnpm run build 17 | ``` 18 | -------------------------------------------------------------------------------- /demos/no-stt/src/main.js: -------------------------------------------------------------------------------- 1 | import "./style.css"; 2 | import { 3 | Playthrough, 4 | AudioManager, 5 | createPlaythroughToken, 6 | createConversation, 7 | } from "@charisma-ai/sdk"; 8 | 9 | const messagesDiv = document.getElementById("messages"); 10 | 11 | const appendMessage = (message, className, name) => { 12 | const div = document.createElement("div"); 13 | div.classList.add(className, "message"); 14 | div.innerHTML = `${name ? `${name}:` : ""} ${message}`; 15 | messagesDiv?.appendChild(div); 16 | }; 17 | 18 | // Setup the audio manager. 19 | const audioManager = new AudioManager({ 20 | duckVolumeLevel: 0.1, 21 | normalVolumeLevel: 1, 22 | sttService: "browser", 23 | streamTimeslice: 100, 24 | }); 25 | 26 | let playthrough; 27 | let conversation; 28 | 29 | window.start = async function start() { 30 | // In order to play audio, this method must be called by a user interaction. 31 | // This is due to a security restriction in some browsers. 32 | audioManager.initialise(); 33 | 34 | const storyIdInput = document.getElementById("story-id"); 35 | const storyId = Number(storyIdInput.value); 36 | const storyApiKeyInput = document.getElementById("story-api-key"); 37 | const storyApiKey = storyApiKeyInput.value; 38 | const storyVersionInput = document.getElementById("version"); 39 | const storyVersion = Number(storyVersionInput.value) || undefined; 40 | const StartGraphReferenceIdInput = document.getElementById( 41 | "startGraphReferenceId", 42 | ); 43 | const startGraphReferenceId = StartGraphReferenceIdInput.value; 44 | 45 | const { token } = await createPlaythroughToken({ 46 | storyId, 47 | apiKey: storyApiKey, 48 | version: storyVersion, 49 | }); 50 | 51 | const { conversationUuid } = await createConversation(token); 52 | playthrough = new Playthrough(token); 53 | conversation = playthrough.joinConversation(conversationUuid); 54 | 55 | conversation.setSpeechConfig({ 56 | encoding: ["mp3", "wav"], 57 | output: "buffer", 58 | }); 59 | 60 | conversation.on("message", (message) => { 61 | const characterMessage = 62 | message.type === "character" ? message.message : null; 63 | 64 | // For this demo, we only care about character messages. 65 | if (!characterMessage) return; 66 | 67 | // Put the character message on the page. 68 | appendMessage( 69 | characterMessage.text, 70 | "character-message", 71 | characterMessage.character?.name, 72 | ); 73 | 74 | // Play character speech. 75 | if (characterMessage.speech) { 76 | audioManager.playCharacterSpeech(characterMessage.speech.audio, { 77 | trackId: String(characterMessage.character?.id), 78 | interrupt: "track", 79 | }); 80 | } 81 | 82 | if (characterMessage.media) { 83 | if (characterMessage.media.stopAllAudio) { 84 | audioManager.mediaAudioStopAll(); 85 | } 86 | 87 | // Play media audio if it exists in the node. 88 | audioManager.mediaAudioPlay(characterMessage.media.audioTracks); 89 | } 90 | }); 91 | 92 | conversation.on("problem", console.warn); 93 | 94 | // Listen for the playthrough to connect and start the conversation when it does. 95 | let started = false; 96 | playthrough.on("connection-status", (status) => { 97 | appendMessage( 98 | status, 99 | status === "disconnected" ? "disconnected-message" : "connected-message", 100 | ); 101 | 102 | if (status === "connected" && !started) { 103 | const conversationParameters = startGraphReferenceId 104 | ? { startGraphReferenceId } 105 | : undefined; 106 | conversation.start(conversationParameters); 107 | started = true; 108 | } 109 | }); 110 | 111 | await playthrough.connect(); 112 | }; 113 | 114 | const reply = () => { 115 | if (!playthrough || !conversation) return; 116 | 117 | const replyInput = document.getElementById("reply-input"); 118 | const text = replyInput.value; 119 | 120 | if (text.trim() === "") return; 121 | 122 | conversation.reply({ text }); 123 | replyInput.value = ""; 124 | 125 | // Put player message on the page. 126 | appendMessage(text, "player-message", "You"); 127 | }; 128 | 129 | // Handle the Enter key press. 130 | window.onKeyPress = function onKeyPress(event) { 131 | if (!event || !event.currentTarget) return; 132 | if (event.key === "Enter") { 133 | reply(); 134 | } 135 | }; 136 | 137 | window.reply = reply; 138 | 139 | window.toggleMuteBackgroundAudio = () => { 140 | audioManager.mediaAudioToggleMute(); 141 | }; 142 | -------------------------------------------------------------------------------- /demos/no-stt/src/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; 3 | margin: 0; 4 | display: flex; 5 | place-items: center; 6 | flex-direction: column; 7 | min-width: 320px; 8 | min-height: 100vh; 9 | } 10 | 11 | h1 { 12 | font-size: 3.2em; 13 | line-height: 1.1; 14 | } 15 | 16 | #app { 17 | min-width: 40vw; 18 | max-width: 1280px; 19 | margin: 0 auto; 20 | padding: 1rem; 21 | text-align: center; 22 | } 23 | 24 | button { 25 | border-radius: 8px; 26 | border: 1px solid transparent; 27 | padding: 0.6em 1.2em; 28 | font-size: 1em; 29 | font-weight: 500; 30 | font-family: inherit; 31 | background-color: #1a1a1a; 32 | cursor: pointer; 33 | transition: border-color 0.25s; 34 | background-color: #f9f9f9; 35 | margin: 0.5em; 36 | } 37 | button:hover { 38 | background-color: #f2f2f2; 39 | } 40 | 41 | #messages { 42 | margin: 1em 0; 43 | border: 1px solid #ccc; 44 | padding: 1em 0; 45 | width: 100%; 46 | height: 200px; 47 | overflow-y: scroll; 48 | text-align: left; 49 | } 50 | 51 | .message { 52 | margin: 0.5em; 53 | } 54 | 55 | .connected-message { 56 | color: green; 57 | font-size: 0.8em; 58 | padding-left: 0.5em; 59 | } 60 | 61 | .disconnected-message { 62 | color: red; 63 | font-size: 0.8em; 64 | padding-left: 0.5em; 65 | } 66 | 67 | .error-message { 68 | color: red; 69 | padding-left: 0.5em; 70 | } 71 | 72 | .story-parameters-container { 73 | display: flex; 74 | flex-direction: column; 75 | } 76 | 77 | .story-parameters-container input { 78 | margin-bottom: 0.5em; 79 | } 80 | 81 | #reply-input { 82 | width: 100%; 83 | } 84 | 85 | .comment { 86 | font-size: 0.6em; 87 | color: #999; 88 | } 89 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | preset: "ts-jest", 3 | testEnvironment: "jsdom", 4 | testMatch: ["**/?(*.)+(spec|test).[tj]s?(x)"], 5 | moduleFileExtensions: ["ts", "js", "json", "node"], 6 | moduleNameMapper: { 7 | "^(.*)\\.js$": "$1", 8 | }, 9 | modulePathIgnorePatterns: ["/dist/"], 10 | globals: { 11 | "ts-jest": { 12 | isolatedModules: true, 13 | tsconfig: "tsconfig.json", 14 | }, 15 | }, 16 | }; 17 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@charisma-ai/sdk", 3 | "version": "6.0.0", 4 | "description": "Charisma.ai SDK for Javascript (browser)", 5 | "source": "src/index.ts", 6 | "type": "module", 7 | "exports": "./dist/index.js", 8 | "types": "./dist/index.d.ts", 9 | "sideEffects": false, 10 | "repository": "https://github.com/charisma-ai/charisma-sdk-js", 11 | "author": "Charisma Entertainment Ltd (https://charisma.ai)", 12 | "license": "MIT", 13 | "keywords": [ 14 | "ai", 15 | "character", 16 | "npc", 17 | "virtual beings", 18 | "voice" 19 | ], 20 | "files": [ 21 | "dist/*" 22 | ], 23 | "scripts": { 24 | "build": "tsc && swc src -d dist", 25 | "cm": "git-cz", 26 | "lint": "eslint --fix --ext js,ts,tsx .", 27 | "prebuild": "rimraf dist", 28 | "prepublish": "pnpm run build", 29 | "semantic-release": "semantic-release", 30 | "test": "jest" 31 | }, 32 | "dependencies": { 33 | "colyseus.js": "^0.14.13", 34 | "eventemitter3": "^5.0.1", 35 | "jwt-decode": "^3.1.2", 36 | "p-queue": "^7.3.4", 37 | "socket.io-client": "^4.7.5" 38 | }, 39 | "devDependencies": { 40 | "@swc/cli": "^0.1.62", 41 | "@swc/core": "^1.3.57", 42 | "@types/jest": "^29.5.12", 43 | "@typescript-eslint/eslint-plugin": "^5.59.5", 44 | "@typescript-eslint/parser": "^5.59.5", 45 | "commitizen": "^4.3.0", 46 | "cz-conventional-changelog": "^3.3.0", 47 | "eslint": "^8.40.0", 48 | "eslint-config-airbnb-base": "^15.0.0", 49 | "eslint-config-prettier": "^8.8.0", 50 | "eslint-plugin-import": "^2.27.5", 51 | "eslint-plugin-jsx-a11y": "^6.7.1", 52 | "eslint-plugin-prettier": "^4.2.1", 53 | "husky": "^8.0.3", 54 | "jest": "^29.7.0", 55 | "jest-environment-jsdom": "^29.7.0", 56 | "lint-staged": "^13.2.2", 57 | "parcel": "2.0.0-nightly.1296", 58 | "prettier": "^2.8.8", 59 | "rimraf": "^5.0.0", 60 | "semantic-release": "^21.0.2", 61 | "ts-jest": "^29.2.3", 62 | "typescript": "^5.0.4" 63 | }, 64 | "config": { 65 | "commitizen": { 66 | "path": "./node_modules/cz-conventional-changelog" 67 | } 68 | }, 69 | "@parcel/resolver-default": { 70 | "packageExports": true 71 | }, 72 | "packageManager": "pnpm@9.15.3+sha512.1f79bc245a66eb0b07c5d4d83131240774642caaa86ef7d0434ab47c0d16f66b04e21e0c086eb61e62c77efc4d7f7ec071afad3796af64892fae66509173893a" 73 | } 74 | -------------------------------------------------------------------------------- /src/AudioInputsBrowser.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | 3 | import type { 4 | SpeechRecognition, 5 | SpeechRecognitionErrorCode, 6 | SpeechRecognitionEvent, 7 | } from "./speech-types.js"; 8 | 9 | interface Constructable { 10 | new (): T; 11 | } 12 | 13 | interface WindowWithSpeechRecognition extends Window { 14 | SpeechRecognition?: Constructable; 15 | webkitSpeechRecognition?: Constructable; 16 | } 17 | 18 | declare const window: WindowWithSpeechRecognition; 19 | 20 | const SpeechRecognitionClass = 21 | typeof window !== "undefined" 22 | ? window.SpeechRecognition || window.webkitSpeechRecognition 23 | : undefined; 24 | 25 | export interface SpeechRecognitionOptions { 26 | continuous?: boolean; 27 | interimResults?: boolean; 28 | lang?: string; 29 | } 30 | 31 | export interface SpeechRecognitionStopOptions { 32 | waitForLastResult?: boolean; 33 | } 34 | 35 | type AudioInputsBrowserEvents = { 36 | result: [SpeechRecognitionEvent]; 37 | transcript: [string]; 38 | "transcript-interim": [string]; 39 | error: [SpeechRecognitionErrorCode]; 40 | timeout: []; 41 | start: []; 42 | stop: []; 43 | }; 44 | 45 | class AudioInputsBrowser extends EventEmitter { 46 | private recognition = SpeechRecognitionClass 47 | ? new SpeechRecognitionClass() 48 | : undefined; 49 | 50 | private timeoutId: number | undefined; 51 | 52 | public isSupported = SpeechRecognitionClass !== undefined; 53 | 54 | public startListening = ( 55 | timeout = 10000, 56 | { 57 | continuous = false, 58 | interimResults = true, 59 | lang = "en-GB", 60 | }: SpeechRecognitionOptions = {}, 61 | ): void => { 62 | if (!this.recognition) { 63 | return; 64 | } 65 | 66 | if (this.timeoutId !== undefined) { 67 | clearTimeout(this.timeoutId); 68 | } 69 | 70 | const { recognition } = this; 71 | recognition.continuous = continuous; 72 | recognition.interimResults = interimResults; 73 | recognition.lang = lang; 74 | recognition.onresult = this.onRecognitionResult; 75 | recognition.onstart = (): void => { 76 | this.emit("start"); 77 | }; 78 | recognition.onend = (): void => { 79 | this.emit("stop"); 80 | recognition.start(); 81 | }; 82 | recognition.onerror = (event): void => { 83 | this.emit("error", event.error); 84 | }; 85 | 86 | try { 87 | recognition.start(); 88 | } catch (err) { 89 | // this is fine, it just means we tried to start/stop a stream when it was already started/stopped 90 | } 91 | 92 | if (timeout !== undefined) { 93 | this.timeoutId = window.setTimeout(this.onTimeout, timeout); 94 | } 95 | }; 96 | 97 | public stopListening = ({ 98 | waitForLastResult = false, 99 | }: SpeechRecognitionStopOptions = {}): void => { 100 | if (this.timeoutId !== undefined) { 101 | clearTimeout(this.timeoutId); 102 | } 103 | 104 | const { recognition } = this; 105 | if (recognition) { 106 | if (!waitForLastResult) { 107 | recognition.onresult = (): void => undefined; 108 | } 109 | recognition.onend = (): void => { 110 | this.emit("stop"); 111 | }; 112 | try { 113 | if (waitForLastResult) { 114 | recognition.stop(); 115 | } else { 116 | recognition.abort(); 117 | } 118 | } catch (err) { 119 | // this is fine, it just means we tried to start/stop a stream when it was already started/stopped 120 | } 121 | } 122 | }; 123 | 124 | public resetTimeout = (timeout: number): void => { 125 | if (this.timeoutId !== undefined) { 126 | clearTimeout(this.timeoutId); 127 | } 128 | 129 | this.timeoutId = window.setTimeout(this.onTimeout, timeout); 130 | }; 131 | 132 | private onTimeout = (): void => { 133 | this.timeoutId = undefined; 134 | this.emit("timeout"); 135 | this.stopListening(); 136 | }; 137 | 138 | private onRecognitionResult = (event: SpeechRecognitionEvent): void => { 139 | this.emit("result", event); 140 | 141 | if (event.results.length === 0) { 142 | return; 143 | } 144 | 145 | const lastResult = event.results[event.results.length - 1]; 146 | const message = lastResult[0].transcript.trim(); 147 | if (lastResult.isFinal) { 148 | this.emit("transcript", message); 149 | } else { 150 | this.emit("transcript-interim", message); 151 | } 152 | }; 153 | } 154 | 155 | export default AudioInputsBrowser; 156 | -------------------------------------------------------------------------------- /src/AudioInputsService.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | import { io, type Socket } from "socket.io-client"; 3 | import type { SpeechRecognitionEvent } from "./speech-types.js"; 4 | 5 | type AudioInputsServiceEvents = { 6 | result: [SpeechRecognitionEvent]; 7 | transcript: [string]; 8 | "transcript-interim": [string]; 9 | error: [string]; 10 | timeout: []; 11 | start: []; 12 | stop: []; 13 | disconnect: [string]; 14 | connect: [string]; 15 | }; 16 | 17 | const setupMicrophone = async (): Promise => { 18 | const userMedia = await navigator.mediaDevices.getUserMedia({ 19 | audio: { 20 | echoCancellation: true, 21 | noiseSuppression: true, 22 | autoGainControl: true, 23 | }, 24 | }); 25 | 26 | const mediaRecorder = new MediaRecorder(userMedia); 27 | return mediaRecorder; 28 | }; 29 | 30 | class AudioInputsService extends EventEmitter { 31 | private timeoutId?: number; 32 | 33 | private microphone?: MediaRecorder; 34 | 35 | private socket?: Socket; 36 | 37 | private streamTimeslice: number; 38 | 39 | private reconnectAttemptsTimeout: number; 40 | 41 | private ready = false; 42 | 43 | private playthroughToken?: string; 44 | 45 | private playerSessionId?: string; 46 | 47 | private sttUrl: string; 48 | 49 | private debugLogFunction: (message: string) => void; 50 | 51 | constructor( 52 | streamTimeslice: number | undefined, 53 | reconnectAttemptsTimeout: number | undefined, 54 | sttUrl: string | undefined, 55 | debugLogFunction: (message: string) => void, 56 | ) { 57 | super(); 58 | 59 | this.debugLogFunction = debugLogFunction; 60 | this.debugLogFunction("AudioInputsService running constructor"); 61 | 62 | this.streamTimeslice = streamTimeslice ?? 100; 63 | this.reconnectAttemptsTimeout = reconnectAttemptsTimeout ?? 60 * 1000; 64 | this.sttUrl = sttUrl ?? "https://stt.charisma.ai"; 65 | } 66 | 67 | private isReconnecting = false; 68 | 69 | private attemptReconnect = (): void => { 70 | this.debugLogFunction("AudioInputsService attemptReconnect"); 71 | if (this.playthroughToken === undefined || this.isReconnecting) return; 72 | 73 | const reconnectIntervalBase = 2000; 74 | const maxAttempts = 5; 75 | 76 | const reconnectAttempts = 0; 77 | let shouldTryAgain = true; 78 | 79 | this.isReconnecting = true; 80 | 81 | const endReconnect = () => { 82 | shouldTryAgain = false; 83 | this.isReconnecting = false; 84 | }; 85 | 86 | const tryReconnect = (attempt: number) => { 87 | this.debugLogFunction( 88 | `AudioInputsService tryReconnect attempt ${attempt}`, 89 | ); 90 | if (!shouldTryAgain) return; 91 | 92 | if (attempt >= maxAttempts) { 93 | this.emit("error", "Maximum reconnect attempts reached."); 94 | endReconnect(); 95 | return; 96 | } 97 | 98 | this.connect( 99 | this.playthroughToken as string, 100 | this.playerSessionId as string, 101 | ) 102 | .then(() => { 103 | this.debugLogFunction("Reconnected Successfully"); 104 | console.log("Reconnected successfully!"); 105 | endReconnect(); 106 | }) 107 | .catch(() => { 108 | // Exponentially back off the next reconnection attempt 109 | const nextInterval = reconnectIntervalBase * 2 ** attempt; 110 | console.log( 111 | `Reconnect attempt failed. Trying again in ${ 112 | nextInterval / 1000 113 | } seconds...`, 114 | ); 115 | 116 | if (shouldTryAgain) { 117 | setTimeout(() => tryReconnect(attempt + 1), nextInterval); 118 | } 119 | }); 120 | }; 121 | 122 | tryReconnect(reconnectAttempts); 123 | 124 | setTimeout(() => { 125 | this.debugLogFunction("Reconnect attempts timed out"); 126 | this.emit("error", "Reconnect attempts timed out."); 127 | endReconnect(); 128 | }, this.reconnectAttemptsTimeout); 129 | }; 130 | 131 | public connect = (token: string, playerSessionId: string): Promise => { 132 | this.debugLogFunction(`AudioInputService connect to ${this.sttUrl}`); 133 | 134 | this.playthroughToken = token; 135 | this.playerSessionId = playerSessionId; 136 | 137 | return new Promise((resolve, reject) => { 138 | if (this.socket) { 139 | this.debugLogFunction("Socket already connected"); 140 | console.log("Socket already connected"); 141 | resolve(); 142 | } 143 | 144 | this.socket = io(this.sttUrl, { 145 | transports: ["websocket"], 146 | query: { 147 | token, 148 | playerSessionId, 149 | }, 150 | reconnection: false, 151 | }); 152 | 153 | this.socket.on("error", (error: string) => { 154 | this.debugLogFunction(`AudioInputService error: ${error}`); 155 | console.error(error); 156 | this.emit("error", error); 157 | reject(error); 158 | }); 159 | 160 | this.socket.on("transcript", (transcript: string) => { 161 | this.debugLogFunction(`AudioInputService transcript: ${transcript}`); 162 | if (transcript) { 163 | queueMicrotask(() => this.emit("transcript", transcript)); 164 | } 165 | }); 166 | 167 | this.socket.on("transcript-interim", (transcript: string) => { 168 | this.debugLogFunction( 169 | `AudioInputService interim transcript: ${transcript}`, 170 | ); 171 | if (transcript) { 172 | queueMicrotask(() => this.emit("transcript-interim", transcript)); 173 | } 174 | }); 175 | 176 | // Attempts to reconnect to the stt server if the connection is lost and we DO have internet. 177 | this.socket.on("disconnect", (reason) => { 178 | this.debugLogFunction(`AudioInputService disconnect. ${reason}`); 179 | console.log("Socket disconnected. Reason:", reason); 180 | 181 | this.emit("disconnect", "Disconnected from speech-to-text server."); 182 | this.ready = false; 183 | 184 | if (this.socket) { 185 | this.socket.close(); 186 | this.socket = undefined; 187 | } 188 | 189 | this.microphone = undefined; 190 | 191 | this.attemptReconnect(); 192 | }); 193 | 194 | this.socket.on("connect", () => { 195 | this.debugLogFunction( 196 | "AudioInputService connected to speech-to-text service.", 197 | ); 198 | this.emit("connect", "Connected to speech-to-text service."); 199 | 200 | // Deepgram requires a short interval before data is sent. 201 | setTimeout(() => { 202 | this.ready = true; 203 | resolve(); 204 | }, 2000); 205 | }); 206 | }); 207 | }; 208 | 209 | public disconnect = () => { 210 | this.debugLogFunction("AudioInputService disconnect"); 211 | this.ready = false; 212 | 213 | if (this.socket) { 214 | this.socket.close(); 215 | this.socket = undefined; 216 | } 217 | 218 | this.microphone = undefined; 219 | this.debugLogFunction( 220 | "AudioInputService disconnected from speech-to-text server.", 221 | ); 222 | this.emit("disconnect", "Disconnected from speech-to-text server."); 223 | }; 224 | 225 | public startListening = async (timeout = 10000): Promise => { 226 | this.debugLogFunction("AudioInputService startListening"); 227 | if (!this.ready) { 228 | this.debugLogFunction("AudioInputService startListening not ready"); 229 | return; 230 | } 231 | 232 | try { 233 | if (!this.microphone) { 234 | this.debugLogFunction( 235 | "AudioInputService startListening setting up microphone", 236 | ); 237 | this.microphone = await setupMicrophone(); 238 | } 239 | } catch (error) { 240 | this.debugLogFunction( 241 | "AudioInputService startListening failed to access microphone", 242 | ); 243 | console.error("Failed to access microphone:", error); 244 | this.emit("error", "Failed to access microphone"); 245 | return; 246 | } 247 | 248 | if (this.timeoutId !== undefined) { 249 | clearTimeout(this.timeoutId); 250 | } 251 | 252 | if (timeout !== undefined) { 253 | this.timeoutId = window.setTimeout(this.onTimeout, timeout); 254 | } 255 | 256 | this.microphone.ondataavailable = (event) => { 257 | if (!this.socket || event.data.size === 0) { 258 | return; 259 | } 260 | this.socket.emit("packet-sent", event.data); 261 | }; 262 | 263 | this.microphone.onstart = () => { 264 | this.emit("start"); 265 | }; 266 | 267 | this.microphone.onstop = () => { 268 | this.emit("stop"); 269 | }; 270 | 271 | this.microphone.onpause = () => { 272 | this.emit("stop"); 273 | }; 274 | 275 | this.microphone.onresume = () => { 276 | this.emit("start"); 277 | }; 278 | 279 | this.microphone.addEventListener("error", (error) => { 280 | this.emit("error", error.toString()); 281 | this.stopListening(); 282 | }); 283 | 284 | this.microphone.start(this.streamTimeslice); 285 | }; 286 | 287 | public stopListening = (): void => { 288 | this.debugLogFunction("AudioInputService stopListening"); 289 | if (this.timeoutId !== undefined) { 290 | clearTimeout(this.timeoutId); 291 | } 292 | 293 | if (!this.microphone) { 294 | this.debugLogFunction("AudioInputService stopListening !this.microphone"); 295 | this.emit("stop"); 296 | return; 297 | } 298 | 299 | this.microphone.stop(); 300 | 301 | if (!this.socket) { 302 | return; 303 | } 304 | this.debugLogFunction("end-current-transcription"); 305 | this.socket.emit("end-current-transcription"); 306 | }; 307 | 308 | public resetTimeout = (timeout: number): void => { 309 | this.debugLogFunction("AudioInputService resetTimeout"); 310 | if (this.timeoutId !== undefined) { 311 | clearTimeout(this.timeoutId); 312 | } 313 | 314 | this.timeoutId = window.setTimeout(this.onTimeout, timeout); 315 | }; 316 | 317 | private onTimeout = (): void => { 318 | this.debugLogFunction("AudioInputService onTimeout"); 319 | this.timeoutId = undefined; 320 | this.emit("timeout"); 321 | this.stopListening(); 322 | }; 323 | } 324 | 325 | export default AudioInputsService; 326 | -------------------------------------------------------------------------------- /src/AudioManager.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable dot-notation */ 2 | import MockAudioInputsService from "./__mocks__/MockAudioInputsService"; 3 | import MockAudioInputsBrowser from "./__mocks__/MockAudioInputsBrowser"; 4 | 5 | import AudioManager, { AudioManagerOptions } from "./AudioManager"; 6 | 7 | jest.mock("./AudioInputsService", () => ({ 8 | __esModule: true, 9 | default: MockAudioInputsService, 10 | })); 11 | 12 | jest.mock("./AudioInputsBrowser", () => ({ 13 | __esModule: true, 14 | default: MockAudioInputsBrowser, 15 | })); 16 | 17 | describe("AudioManager", () => { 18 | afterEach(() => { 19 | jest.clearAllMocks(); 20 | }); 21 | 22 | test("should initialise with default options", () => { 23 | const defaultOptions: AudioManagerOptions = {}; 24 | const audioManager = new AudioManager(defaultOptions); 25 | 26 | expect(audioManager["duckVolumeLevel"]).toBe(0); 27 | expect(audioManager["normalVolumeLevel"]).toBe(1); 28 | expect(audioManager["sttService"]).toBe("charisma/deepgram"); 29 | }); 30 | 31 | test("should initialise with provided options", () => { 32 | const mockOptions: AudioManagerOptions = { 33 | duckVolumeLevel: 0.2, 34 | normalVolumeLevel: 2, 35 | sttService: "browser", 36 | }; 37 | 38 | const audioManager = new AudioManager(mockOptions); 39 | 40 | expect(audioManager["duckVolumeLevel"]).toBe(0.2); 41 | expect(audioManager["normalVolumeLevel"]).toBe(2); 42 | expect(audioManager["sttService"]).toBe("browser"); 43 | }); 44 | 45 | test("microphone methods should call on audioInputsBrowser when browser is used", () => { 46 | const mockOptions: AudioManagerOptions = { 47 | sttService: "browser", 48 | }; 49 | 50 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 51 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 52 | const audioManager = new AudioManager(mockOptions); 53 | 54 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 55 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 56 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 57 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 58 | 59 | audioManager.startListening(); 60 | audioManager.stopListening(); 61 | audioManager.resetTimeout(100); 62 | 63 | expect(mockAudioInputsBrowserInstance.startListening).toHaveBeenCalled(); 64 | expect(mockAudioInputsBrowserInstance.stopListening).toHaveBeenCalled(); 65 | expect(mockAudioInputsBrowserInstance.resetTimeout).toHaveBeenCalledWith( 66 | 100, 67 | ); 68 | 69 | expect( 70 | mockAudioInputsServiceInstance.startListening, 71 | ).not.toHaveBeenCalled(); 72 | expect(mockAudioInputsServiceInstance.stopListening).not.toHaveBeenCalled(); 73 | expect(mockAudioInputsServiceInstance.resetTimeout).not.toHaveBeenCalled(); 74 | }); 75 | 76 | test("startListening should call startListening on audioInputsService", () => { 77 | const mockOptions: AudioManagerOptions = { 78 | sttService: "charisma/deepgram", 79 | }; 80 | 81 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 82 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 83 | const audioManager = new AudioManager(mockOptions); 84 | 85 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 86 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 87 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 88 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 89 | 90 | audioManager.startListening(); 91 | 92 | expect(mockAudioInputsServiceInstance.startListening).toHaveBeenCalled(); 93 | 94 | expect( 95 | mockAudioInputsBrowserInstance.startListening, 96 | ).not.toHaveBeenCalled(); 97 | }); 98 | 99 | test("stopListening should call stopListening on audioInputsService", () => { 100 | const mockOptions: AudioManagerOptions = { 101 | sttService: "charisma/deepgram", 102 | }; 103 | 104 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 105 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 106 | const audioManager = new AudioManager(mockOptions); 107 | 108 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 109 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 110 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 111 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 112 | 113 | audioManager.stopListening(); 114 | 115 | expect(mockAudioInputsServiceInstance.stopListening).toHaveBeenCalled(); 116 | 117 | expect(mockAudioInputsBrowserInstance.stopListening).not.toHaveBeenCalled(); 118 | }); 119 | 120 | test("resetTimeout should call connect on audioInputsService", () => { 121 | const mockOptions: AudioManagerOptions = { 122 | sttService: "charisma/deepgram", 123 | }; 124 | 125 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 126 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 127 | const audioManager = new AudioManager(mockOptions); 128 | 129 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 130 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 131 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 132 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 133 | 134 | audioManager.resetTimeout(100); 135 | 136 | expect(mockAudioInputsServiceInstance.resetTimeout).toHaveBeenCalledWith( 137 | 100, 138 | ); 139 | 140 | expect(mockAudioInputsBrowserInstance.resetTimeout).not.toHaveBeenCalled(); 141 | }); 142 | 143 | test("startListening should call startListening on audioInputsBrowser", () => { 144 | const mockOptions: AudioManagerOptions = { 145 | sttService: "browser", 146 | }; 147 | 148 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 149 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 150 | const audioManager = new AudioManager(mockOptions); 151 | 152 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 153 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 154 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 155 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 156 | 157 | audioManager.startListening(); 158 | 159 | expect(mockAudioInputsBrowserInstance.startListening).toHaveBeenCalled(); 160 | 161 | expect( 162 | mockAudioInputsServiceInstance.startListening, 163 | ).not.toHaveBeenCalled(); 164 | }); 165 | 166 | test("stopListening should call stopListening on audioInputsBrowser", () => { 167 | const mockOptions: AudioManagerOptions = { 168 | sttService: "browser", 169 | }; 170 | 171 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 172 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 173 | const audioManager = new AudioManager(mockOptions); 174 | 175 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 176 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 177 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 178 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 179 | 180 | audioManager.stopListening(); 181 | 182 | expect(mockAudioInputsBrowserInstance.stopListening).toHaveBeenCalled(); 183 | 184 | expect(mockAudioInputsServiceInstance.stopListening).not.toHaveBeenCalled(); 185 | }); 186 | 187 | test("resetTimeout should call connect on audioInputsBRowser", () => { 188 | const mockOptions: AudioManagerOptions = { 189 | sttService: "browser", 190 | }; 191 | 192 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 193 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 194 | const audioManager = new AudioManager(mockOptions); 195 | 196 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 197 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 198 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 199 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 200 | 201 | audioManager.resetTimeout(100); 202 | 203 | expect(mockAudioInputsBrowserInstance.resetTimeout).toHaveBeenCalledWith( 204 | 100, 205 | ); 206 | 207 | expect(mockAudioInputsServiceInstance.resetTimeout).not.toHaveBeenCalled(); 208 | }); 209 | 210 | test("connect should call AudioInputsService.connect with the correct token", () => { 211 | const mockAudioInputsServiceInstance = new MockAudioInputsService(); 212 | const audioManager = new AudioManager({}); 213 | 214 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 215 | (audioManager as any).audioInputsService = mockAudioInputsServiceInstance; 216 | 217 | const token = "test-token"; 218 | const playerSessionId = "test-player-session-id"; 219 | 220 | audioManager.connect(token, playerSessionId); 221 | 222 | expect(mockAudioInputsServiceInstance.connect).toHaveBeenCalledWith( 223 | token, 224 | playerSessionId, 225 | ); 226 | }); 227 | 228 | test("browserIsSupported should return the value from AudioInputsBrowser", () => { 229 | const mockOptions: AudioManagerOptions = { 230 | sttService: "browser", 231 | }; 232 | 233 | const mockAudioInputsBrowserInstance = new MockAudioInputsBrowser(); 234 | const audioManager = new AudioManager(mockOptions); 235 | 236 | // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access 237 | (audioManager as any).audioInputsBrowser = mockAudioInputsBrowserInstance; 238 | 239 | mockAudioInputsBrowserInstance.isSupported = false; 240 | 241 | expect(audioManager.browserIsSupported()).toBe(false); 242 | }); 243 | }); 244 | -------------------------------------------------------------------------------- /src/AudioManager.ts: -------------------------------------------------------------------------------- 1 | import AudioTrackManager from "./AudioTrackManager.js"; 2 | import AudioInputsService from "./AudioInputsService.js"; 3 | import AudioOutputsService, { 4 | AudioOutputsServicePlayOptions, 5 | } from "./AudioOutputsService.js"; 6 | import AudioInputsBrowser from "./AudioInputsBrowser.js"; 7 | import { AudioTrack } from "./types.js"; 8 | 9 | export interface AudioManagerOptions { 10 | duckVolumeLevel?: number; 11 | normalVolumeLevel?: number; 12 | sttService?: "browser" | "charisma/deepgram"; 13 | streamTimeslice?: number; 14 | reconnectAttemptsTimeout?: number; 15 | sttUrl?: string; 16 | handleStartSTT?: () => void; 17 | handleStopSTT?: () => void; 18 | handleTranscript?: (transcript: string) => void; 19 | handleInterimTranscript?: (transcript: string) => void; 20 | handleError?: (error: string) => void; 21 | handleDisconnect?: (message: string) => void; 22 | handleConnect?: (message: string) => void; 23 | debugLogFunction?: (message: string) => void; 24 | } 25 | 26 | class AudioManager { 27 | private audioInputsService: AudioInputsService; 28 | 29 | private audioInputsBrowser: AudioInputsBrowser; 30 | 31 | private audioOutputsService: AudioOutputsService; 32 | 33 | private audioTrackManager: AudioTrackManager; 34 | 35 | private duckVolumeLevel: number; 36 | 37 | private normalVolumeLevel: number; 38 | 39 | private sttService: "browser" | "charisma/deepgram"; 40 | 41 | private microphoneIsOn = false; 42 | 43 | private debugLogFunction: (message: string) => void; 44 | 45 | constructor(options: AudioManagerOptions) { 46 | // eslint-disable-next-line @typescript-eslint/no-empty-function 47 | this.debugLogFunction = options.debugLogFunction || (() => {}); 48 | this.debugLogFunction("AudioManager running constructor"); 49 | this.duckVolumeLevel = options.duckVolumeLevel ?? 0; 50 | this.normalVolumeLevel = options.normalVolumeLevel ?? 1; 51 | this.sttService = options.sttService ?? "charisma/deepgram"; 52 | 53 | this.audioInputsService = new AudioInputsService( 54 | options.streamTimeslice, 55 | options.reconnectAttemptsTimeout, 56 | options.sttUrl, 57 | this.debugLogFunction, 58 | ); 59 | this.audioInputsBrowser = new AudioInputsBrowser(); 60 | this.audioOutputsService = new AudioOutputsService(this.debugLogFunction); 61 | this.audioTrackManager = new AudioTrackManager(); 62 | 63 | // Listen to events from the AudioInputsService 64 | this.audioInputsService.on( 65 | "start", 66 | options.handleStartSTT ?? 67 | (() => console.error("handleStartSTT() is not setup")), 68 | ); 69 | this.audioInputsService.on( 70 | "stop", 71 | options.handleStopSTT ?? 72 | (() => console.error("handleStopSTT() is not setup")), 73 | ); 74 | this.audioInputsService.on( 75 | "transcript", 76 | options.handleTranscript ?? 77 | (() => console.error("handleTranscript() is not setup.")), 78 | ); 79 | this.audioInputsService.on( 80 | "transcript-interim", 81 | options.handleInterimTranscript ?? 82 | (() => console.log("handleInterimTranscript() is not setup.")), 83 | ); 84 | this.audioInputsService.on("error", options.handleError ?? console.error); 85 | this.audioInputsService.on( 86 | "disconnect", 87 | options.handleDisconnect ?? console.error, 88 | ); 89 | this.audioInputsService.on("connect", options.handleConnect ?? console.log); 90 | 91 | // Listen to events from the AudioInputsBrowser 92 | this.audioInputsBrowser.on( 93 | "start", 94 | options.handleStartSTT ?? 95 | (() => console.error("handleStartSTT() is not setup")), 96 | ); 97 | this.audioInputsBrowser.on( 98 | "stop", 99 | options.handleStopSTT ?? 100 | (() => console.error("handleStopSTT() is not setup")), 101 | ); 102 | this.audioInputsBrowser.on( 103 | "transcript", 104 | options.handleTranscript ?? 105 | (() => console.error("handleTranscript() is not setup")), 106 | ); 107 | this.audioInputsBrowser.on( 108 | "transcript-interim", 109 | options.handleInterimTranscript ?? 110 | (() => console.log("handleInterimTranscript() is not setup")), 111 | ); 112 | this.audioInputsBrowser.on("error", options.handleError ?? console.error); 113 | 114 | // Listen to events from the AudioOutputsService 115 | this.audioOutputsService.on("start", () => { 116 | if (this.microphoneIsOn) { 117 | this.audioOutputsService.beginMuting(); 118 | } else { 119 | this.audioOutputsService.endMuting(); 120 | } 121 | }); 122 | this.audioOutputsService.on("stop", () => { 123 | if (this.microphoneIsOn) { 124 | this.audioOutputsService.beginMuting(); 125 | } else { 126 | this.audioOutputsService.endMuting(); 127 | } 128 | }); 129 | this.debugLogFunction("AudioManager finished constructor"); 130 | } 131 | 132 | // ** 133 | // ** Audio Input ** // 134 | // ** 135 | public startListening = (timeout?: number): void => { 136 | this.debugLogFunction("AudioManager startListening"); 137 | if (this.sttService === "browser") { 138 | this.audioInputsBrowser.startListening(timeout); 139 | } else if (this.sttService === "charisma/deepgram") { 140 | this.audioInputsService.startListening(timeout); 141 | } 142 | 143 | this.microphoneIsOn = true; 144 | this.audioOutputsService.beginMuting(); 145 | 146 | if (this.audioTrackManager.isPlaying) { 147 | this.audioTrackManager.setVolume(this.duckVolumeLevel); 148 | } 149 | }; 150 | 151 | public stopListening = (): void => { 152 | this.debugLogFunction("AudioManager stopListening"); 153 | if (this.sttService === "browser") { 154 | this.audioInputsBrowser.stopListening(); 155 | } else if (this.sttService === "charisma/deepgram") { 156 | this.audioInputsService.stopListening(); 157 | } 158 | 159 | this.microphoneIsOn = false; 160 | 161 | this.audioOutputsService.endMuting(); 162 | 163 | if (this.audioTrackManager.isPlaying) { 164 | this.audioTrackManager.setVolume(this.normalVolumeLevel); 165 | } 166 | }; 167 | 168 | public connect = (token: string, playerSessionId: string): void => { 169 | this.debugLogFunction("AudioManager connect"); 170 | if (this.sttService === "charisma/deepgram") { 171 | this.audioInputsService.connect(token, playerSessionId); 172 | } 173 | }; 174 | 175 | public disconnect = (): void => { 176 | this.debugLogFunction("AudioManager disconnect"); 177 | if (this.sttService === "charisma/deepgram") { 178 | this.audioInputsService.disconnect(); 179 | } 180 | }; 181 | 182 | public resetTimeout = (timeout: number): void => { 183 | this.debugLogFunction("AudioManager resetTimeout"); 184 | if (this.sttService === "charisma/deepgram") { 185 | this.audioInputsService.resetTimeout(timeout); 186 | } else { 187 | this.audioInputsBrowser.resetTimeout(timeout); 188 | } 189 | }; 190 | 191 | // ** 192 | // ** Browser STT Service ** // 193 | // ** 194 | public browserIsSupported = (): boolean => { 195 | this.debugLogFunction("AudioManager browserIsSupported"); 196 | return this.audioInputsBrowser.isSupported; 197 | }; 198 | 199 | // ** 200 | // ** Initialise Audio 201 | // ** 202 | public initialise = (): void => { 203 | this.debugLogFunction("AudioManager initialise"); 204 | const outputContext = this.audioOutputsService.getAudioContext(); 205 | const trackContext = this.audioTrackManager.getAudioContext(); 206 | const resumeAudio = () => { 207 | outputContext.resume(); 208 | trackContext.resume(); 209 | }; 210 | document.addEventListener("pointerdown", resumeAudio, { once: true }); 211 | document.addEventListener("keydown", resumeAudio, { once: true }); 212 | }; 213 | 214 | // ** 215 | // ** Audio Outputs Service ** // 216 | // ** 217 | public playCharacterSpeech = ( 218 | audio: ArrayBuffer, 219 | options: boolean | AudioOutputsServicePlayOptions, 220 | ): Promise => { 221 | this.debugLogFunction("AudioManager playCharacterSpeech"); 222 | return this.audioOutputsService.play(audio, options); 223 | }; 224 | 225 | public get characterSpeechVolume(): number { 226 | return this.audioOutputsService.normalVolume; 227 | } 228 | 229 | public set characterSpeechVolume(volume: number) { 230 | this.debugLogFunction("AudioManager outputServiceSetVolume"); 231 | this.audioOutputsService.setNormalVolume(volume); 232 | } 233 | 234 | // ** 235 | // ** Audio Track Manager ** // 236 | // ** 237 | public mediaAudioPlay = (audioTracks: AudioTrack[]): void => { 238 | this.debugLogFunction("AudioManager mediaAudioPlay"); 239 | this.audioTrackManager.play(audioTracks); 240 | }; 241 | 242 | public mediaAudioSetVolume = (volume: number): void => { 243 | this.debugLogFunction("AudioManager mediaAudioSetVolume"); 244 | this.audioTrackManager.setVolume(volume); 245 | }; 246 | 247 | public mediaAudioToggleMute = (): void => { 248 | this.debugLogFunction("AudioManager mediaAudioToggleMute"); 249 | this.audioTrackManager.toggleMute(); 250 | }; 251 | 252 | public mediaAudioStopAll = (): void => { 253 | this.debugLogFunction("AudioManager mediaAudioStopAll"); 254 | this.audioTrackManager.stopAll(); 255 | }; 256 | } 257 | 258 | export default AudioManager; 259 | -------------------------------------------------------------------------------- /src/AudioOutputsService.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | 3 | interface Constructable { 4 | new (): T; 5 | } 6 | 7 | interface WindowWithAudioContext extends Window { 8 | AudioContext?: Constructable; 9 | webkitAudioContext?: Constructable; 10 | } 11 | 12 | declare const window: WindowWithAudioContext; 13 | 14 | type AudioOutputsServiceEvents = { 15 | start: []; 16 | stop: []; 17 | }; 18 | 19 | export type AudioOutputsServicePlayOptions = { 20 | /** 21 | * Whether to interrupt the same track as the `trackId` passed (`track`), all currently playing audio (`all`), or not to interrupt anything (`none`). Default is `none`. 22 | */ 23 | interrupt?: "track" | "all" | "none"; 24 | /** 25 | * If you want to prevent a particular character to speak over themselves, a `trackId` can be set to a unique string. When playing another speech clip, if the same `trackId` is passed and `interrupt` is set to `true`, then the previous clip will stop playing. Default is unset. 26 | */ 27 | trackId?: string; 28 | }; 29 | 30 | type AudioOutputsServiceSource = { 31 | sourceNode: AudioBufferSourceNode; 32 | trackId?: string; 33 | }; 34 | 35 | class AudioOutputsService extends EventEmitter { 36 | private audioContext: AudioContext | undefined; 37 | 38 | private muteGainNode: GainNode | null = null; 39 | 40 | private volumeGainNode: GainNode | null = null; 41 | 42 | public normalVolume = 1; 43 | 44 | private currentSources: AudioOutputsServiceSource[] = []; 45 | 46 | private debugLogFunction: (message: string) => void; 47 | 48 | constructor(debugLogFunction: (message: string) => void) { 49 | super(); 50 | this.debugLogFunction = debugLogFunction; 51 | } 52 | 53 | public getAudioContext = (): AudioContext => { 54 | this.debugLogFunction("AudioOutputsService getAudioContext"); 55 | if (this.audioContext) { 56 | return this.audioContext; 57 | } 58 | 59 | const AudioContextClass = window.AudioContext || window.webkitAudioContext; 60 | 61 | if (!AudioContextClass) { 62 | throw new Error("AudioContext isn't supported in this browser."); 63 | } 64 | 65 | this.audioContext = new AudioContextClass(); 66 | 67 | // Create and store the gain node. 68 | this.muteGainNode = this.audioContext.createGain(); 69 | this.volumeGainNode = this.audioContext.createGain(); 70 | 71 | this.muteGainNode.gain.setValueAtTime(1, this.audioContext.currentTime); 72 | this.volumeGainNode.gain.setValueAtTime( 73 | this.normalVolume, 74 | this.audioContext.currentTime, 75 | ); 76 | 77 | this.volumeGainNode.connect(this.muteGainNode); 78 | this.muteGainNode.connect(this.audioContext.destination); 79 | 80 | return this.audioContext; 81 | }; 82 | 83 | public play = async ( 84 | audio: ArrayBuffer, 85 | options: boolean | AudioOutputsServicePlayOptions = {}, 86 | ): Promise => { 87 | this.debugLogFunction("AudioOutputsService play"); 88 | 89 | // Backwards-compatible with the old boolean `interrupt` parameter 90 | if (typeof options === "boolean") { 91 | console.warn( 92 | "Passing a boolean as the second parameter to `speaker.play()` is deprecated, and should be updated to use an `options` object.", 93 | ); 94 | // eslint-disable-next-line no-param-reassign 95 | options = { interrupt: options ? "all" : "none" }; 96 | } 97 | 98 | const { interrupt = "none", trackId } = options; 99 | 100 | const audioContext = this.getAudioContext(); 101 | 102 | if (!this.volumeGainNode) { 103 | throw new Error("volumeGainNode is not initialized."); 104 | } 105 | 106 | const source = audioContext.createBufferSource(); 107 | source.connect(this.volumeGainNode); 108 | source.buffer = await new Promise((resolve, reject): void => { 109 | audioContext.decodeAudioData(audio, resolve, reject); 110 | }); 111 | 112 | return new Promise((resolve): void => { 113 | source.onended = (): void => { 114 | resolve(); 115 | this.currentSources = this.currentSources.filter( 116 | (currentSource) => currentSource.sourceNode !== source, 117 | ); 118 | if (this.currentSources.length === 0) { 119 | this.emit("stop"); 120 | } 121 | }; 122 | if (this.currentSources.length > 0 && interrupt !== "none") { 123 | this.currentSources.forEach((currentSource) => { 124 | if ( 125 | interrupt === "all" || 126 | (interrupt === "track" && currentSource.trackId === trackId) 127 | ) { 128 | currentSource.sourceNode.stop(); 129 | } 130 | }); 131 | } 132 | if (this.currentSources.length === 0) { 133 | this.emit("start"); 134 | } 135 | this.currentSources.push({ sourceNode: source, trackId }); 136 | source.start(); 137 | }); 138 | }; 139 | 140 | public setNormalVolume = (volume: number): void => { 141 | this.debugLogFunction(`AudioOutputsService setNormalVolume ${volume}`); 142 | if (!this.volumeGainNode || !this.audioContext) return; 143 | 144 | // Clamp the volume to the range [0, 1] 145 | const clampedVolume = Math.max(0, Math.min(1, volume)); 146 | 147 | // record volume on a variable in case volume is requested before ramp has finished 148 | this.normalVolume = clampedVolume; 149 | 150 | // smooth ramp to new value 151 | this.volumeGainNode.gain.setValueAtTime( 152 | this.volumeGainNode.gain.value, 153 | this.audioContext.currentTime, 154 | ); 155 | this.volumeGainNode.gain.linearRampToValueAtTime( 156 | clampedVolume, 157 | this.audioContext.currentTime + 0.1, 158 | ); 159 | }; 160 | 161 | public beginMuting = (): void => { 162 | this.debugLogFunction(`AudioOutputsService beginMuting`); 163 | if (!this.muteGainNode || !this.audioContext) return; 164 | 165 | // Fade out quickly 166 | this.muteGainNode.gain.setValueAtTime( 167 | this.muteGainNode.gain.value, 168 | this.audioContext.currentTime, 169 | ); 170 | this.muteGainNode.gain.linearRampToValueAtTime( 171 | 0, 172 | this.audioContext.currentTime + 0.05, 173 | ); 174 | }; 175 | 176 | public endMuting = (): void => { 177 | this.debugLogFunction(`AudioOutputsService endMuting`); 178 | if (!this.muteGainNode || !this.audioContext) return; 179 | 180 | // Fade in very quickly 181 | this.muteGainNode.gain.setValueAtTime( 182 | this.muteGainNode.gain.value, 183 | this.audioContext.currentTime, 184 | ); 185 | this.muteGainNode.gain.linearRampToValueAtTime( 186 | 1, 187 | this.audioContext.currentTime + 0.01, 188 | ); 189 | }; 190 | } 191 | 192 | export default AudioOutputsService; 193 | -------------------------------------------------------------------------------- /src/AudioTrackManager.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable dot-notation */ 2 | import AudioTrackManager from "./AudioTrackManager"; 3 | import { AudioTrackBehaviour } from "./types"; 4 | 5 | globalThis.AudioContext = jest.fn().mockImplementation(() => { 6 | const gainNodeMock = { 7 | gain: { value: 1 }, 8 | connect: jest.fn().mockReturnThis(), // return `this` to allow chaining 9 | }; 10 | 11 | const bufferSourceMock = { 12 | buffer: null, 13 | loop: false, 14 | connect: jest.fn().mockReturnValue(gainNodeMock), // Mock to allow chaining 15 | start: jest.fn(), 16 | stop: jest.fn(), 17 | onended: jest.fn(), 18 | }; 19 | 20 | return { 21 | createGain: jest.fn().mockReturnValue(gainNodeMock), 22 | createBufferSource: jest.fn().mockReturnValue(bufferSourceMock), 23 | decodeAudioData: jest.fn().mockImplementation(() => 24 | Promise.resolve({ 25 | duration: 120, 26 | sampleRate: 44100, 27 | length: 5292000, 28 | numberOfChannels: 2, 29 | getChannelData: jest.fn(), 30 | }), 31 | ), 32 | destination: { 33 | connect: jest.fn(), // Mock connect on the destination as well 34 | }, 35 | }; 36 | }); 37 | 38 | globalThis.fetch = jest.fn(() => 39 | Promise.resolve({ 40 | ok: true, 41 | status: 200, 42 | statusText: "OK", 43 | headers: new Headers(), 44 | url: "", 45 | redirected: false, 46 | type: "basic", 47 | body: null, 48 | bodyUsed: false, 49 | clone: jest.fn(), 50 | arrayBuffer: () => Promise.resolve(new ArrayBuffer(8)), 51 | json: jest.fn(), 52 | text: jest.fn(), 53 | formData: jest.fn(), 54 | blob: jest.fn(), 55 | } as unknown as Response), 56 | ); 57 | 58 | describe("AudioTrackManager", () => { 59 | afterEach(() => { 60 | jest.clearAllMocks(); 61 | }); 62 | 63 | test("should initialize with isPlaying as false and no currentAudio", () => { 64 | const audioTrackManager = new AudioTrackManager(); 65 | 66 | expect(audioTrackManager.isPlaying).toBe(false); 67 | expect(audioTrackManager["currentAudio"]).toEqual([]); 68 | }); 69 | 70 | test("should play new audio tracks", async () => { 71 | const audioTrackManager = new AudioTrackManager(); 72 | 73 | const audioTracks = [ 74 | { 75 | url: "track1.mp3", 76 | loop: false, 77 | volume: 0.5, 78 | behaviour: AudioTrackBehaviour.Restart, 79 | stopPlaying: false, 80 | }, 81 | { 82 | url: "track2.mp3", 83 | loop: true, 84 | volume: 0.8, 85 | behaviour: AudioTrackBehaviour.Continue, 86 | stopPlaying: false, 87 | }, 88 | ]; 89 | 90 | await audioTrackManager.play(audioTracks); 91 | 92 | expect(audioTrackManager.isPlaying).toBe(true); 93 | expect(audioTrackManager["currentAudio"]).toHaveLength(2); 94 | }); 95 | 96 | test("should not play audio if audioTracks array is empty", () => { 97 | const audioTrackManager = new AudioTrackManager(); 98 | 99 | audioTrackManager.play([]); 100 | 101 | expect(audioTrackManager.isPlaying).toBe(false); 102 | }); 103 | 104 | test("should stop all currently playing audio tracks", async () => { 105 | const audioTrackManager = new AudioTrackManager(); 106 | const audioTracks = [ 107 | { 108 | url: "track1.mp3", 109 | loop: false, 110 | volume: 0.5, 111 | behaviour: AudioTrackBehaviour.Restart, 112 | stopPlaying: false, 113 | }, 114 | { 115 | url: "track2.mp3", 116 | loop: true, 117 | volume: 0.8, 118 | behaviour: AudioTrackBehaviour.Continue, 119 | stopPlaying: false, 120 | }, 121 | ]; 122 | 123 | await audioTrackManager.play(audioTracks); 124 | audioTrackManager.stopAll(); 125 | 126 | expect(audioTrackManager.isPlaying).toBe(false); 127 | expect(audioTrackManager["currentAudio"]).toEqual([]); 128 | }); 129 | 130 | test("should toggle mute on all currently playing audio tracks", async () => { 131 | const audioTrackManager = new AudioTrackManager(); 132 | const audioTracks = [ 133 | { 134 | url: "track1.mp3", 135 | loop: false, 136 | volume: 0.5, 137 | behaviour: AudioTrackBehaviour.Restart, 138 | stopPlaying: false, 139 | }, 140 | ]; 141 | 142 | await audioTrackManager.play(audioTracks); 143 | 144 | audioTrackManager.toggleMute(); 145 | expect(audioTrackManager["currentAudio"][0].gainNode.gain.value).toBe(0); 146 | 147 | audioTrackManager.toggleMute(); 148 | expect(audioTrackManager["currentAudio"][0].gainNode.gain.value).toBe(1); 149 | }); 150 | 151 | test("should set the volume for all currently playing audio tracks", async () => { 152 | const audioTrackManager = new AudioTrackManager(); 153 | const audioTracks = [ 154 | { 155 | url: "track2.mp3", 156 | loop: true, 157 | volume: 0.8, 158 | behaviour: AudioTrackBehaviour.Continue, 159 | stopPlaying: false, 160 | }, 161 | ]; 162 | 163 | await audioTrackManager.play(audioTracks); 164 | 165 | audioTrackManager.setVolume(0.5); 166 | 167 | expect(audioTrackManager["currentAudio"][0].gainNode.gain.value).toBe(0.4); 168 | 169 | audioTrackManager.setVolume(0.25); 170 | 171 | expect(audioTrackManager["currentAudio"][0].gainNode.gain.value).toBe(0.2); 172 | 173 | audioTrackManager.setVolume(1); 174 | 175 | expect(audioTrackManager["currentAudio"][0].gainNode.gain.value).toBe(0.8); 176 | }); 177 | 178 | test("should restart an audio track when behaviour is set to 'restart'", async () => { 179 | const audioTrackManager = new AudioTrackManager(); 180 | const audioTracks = [ 181 | { 182 | url: "track1.mp3", 183 | loop: false, 184 | volume: 0.5, 185 | behaviour: AudioTrackBehaviour.Restart, 186 | stopPlaying: false, 187 | }, 188 | { 189 | url: "track2.mp3", 190 | loop: true, 191 | volume: 0.8, 192 | behaviour: AudioTrackBehaviour.Continue, 193 | stopPlaying: false, 194 | }, 195 | ]; 196 | 197 | await audioTrackManager.play(audioTracks); 198 | 199 | // Play the same track again, triggering the restart behavior 200 | audioTrackManager.play([audioTracks[0]]); 201 | 202 | expect(audioTrackManager["currentAudio"]).toHaveLength(1); 203 | }); 204 | }); 205 | -------------------------------------------------------------------------------- /src/AudioTrackManager.ts: -------------------------------------------------------------------------------- 1 | import { AudioTrack } from "./types.js"; 2 | 3 | interface Constructable { 4 | new (): T; 5 | } 6 | 7 | interface WindowWithAudioContext extends Window { 8 | AudioContext?: Constructable; 9 | webkitAudioContext?: Constructable; 10 | } 11 | 12 | declare const window: WindowWithAudioContext; 13 | 14 | class AudioTrackManager { 15 | private audioContext: AudioContext | undefined; 16 | 17 | public isPlaying: boolean; 18 | 19 | private currentAudio: { 20 | source: AudioBufferSourceNode; 21 | gainNode: GainNode; 22 | url: string; 23 | originalVolume: number; 24 | }[]; 25 | 26 | private muted = false; 27 | 28 | constructor() { 29 | this.isPlaying = false; 30 | this.currentAudio = []; 31 | } 32 | 33 | private async loadAudioBuffer(url: string): Promise { 34 | if (this.audioContext === undefined) return undefined; 35 | 36 | const response = await fetch(url); 37 | const arrayBuffer = await response.arrayBuffer(); 38 | return this.audioContext.decodeAudioData(arrayBuffer); 39 | } 40 | 41 | private async playNewSource(audioTrack: AudioTrack): Promise { 42 | if (!audioTrack.url) return; 43 | 44 | const audioBuffer = await this.loadAudioBuffer(audioTrack.url); 45 | if (this.audioContext === undefined) return; 46 | 47 | const gainNode = this.audioContext.createGain(); 48 | gainNode.gain.value = audioTrack.volume; 49 | 50 | const source = this.audioContext.createBufferSource(); 51 | if (audioBuffer === undefined) return; 52 | source.buffer = audioBuffer; 53 | source.loop = audioTrack.loop; 54 | source.connect(gainNode).connect(this.audioContext.destination); 55 | source.start(0); 56 | 57 | source.onended = () => { 58 | this.currentAudio = this.currentAudio.filter( 59 | (currentAudio) => currentAudio.source !== source, 60 | ); 61 | }; 62 | 63 | this.currentAudio.push({ 64 | source, 65 | gainNode, 66 | url: audioTrack.url, 67 | originalVolume: audioTrack.volume, 68 | }); 69 | } 70 | 71 | public getAudioContext = (): AudioContext => { 72 | if (this.audioContext) { 73 | return this.audioContext; 74 | } 75 | 76 | const AudioContextClass = window.AudioContext || window.webkitAudioContext; 77 | 78 | if (!AudioContextClass) { 79 | throw new Error("AudioContext isn't supported in this browser."); 80 | } 81 | 82 | this.audioContext = new AudioContextClass(); 83 | 84 | return this.audioContext; 85 | }; 86 | 87 | public async play(audioTracks: AudioTrack[]): Promise { 88 | if (audioTracks.length === 0) return; 89 | if (this.audioContext === undefined) { 90 | this.getAudioContext(); 91 | } 92 | 93 | this.isPlaying = true; 94 | 95 | await Promise.all( 96 | audioTracks.map(async (audioTrack) => { 97 | if (!audioTrack.url) return; 98 | 99 | const index = this.currentAudio.findIndex( 100 | (currentAudio) => currentAudio.url === audioTrack.url, 101 | ); 102 | 103 | if (index === -1) { 104 | await this.playNewSource(audioTrack); 105 | } else { 106 | if (audioTrack.stopPlaying) { 107 | this.currentAudio[index].source.stop(); 108 | this.currentAudio = this.currentAudio.filter( 109 | (currentAudio) => currentAudio.url !== audioTrack.url, 110 | ); 111 | return; 112 | } 113 | 114 | if (audioTrack.behaviour === "restart") { 115 | this.currentAudio[index].source.stop(); 116 | this.currentAudio = this.currentAudio.filter( 117 | (currentAudio) => currentAudio.url !== audioTrack.url, 118 | ); 119 | await this.playNewSource(audioTrack); 120 | } 121 | } 122 | }), 123 | ); 124 | } 125 | 126 | public pause(): void { 127 | this.isPlaying = false; 128 | this.currentAudio.forEach(({ source }) => { 129 | source.stop(); 130 | }); 131 | } 132 | 133 | public stopAll(): void { 134 | this.currentAudio.forEach(({ source }) => { 135 | source.stop(); 136 | }); 137 | this.currentAudio = []; 138 | this.isPlaying = false; 139 | } 140 | 141 | public toggleMute(): void { 142 | this.muted = !this.muted; 143 | 144 | this.currentAudio.forEach(({ gainNode }) => { 145 | // eslint-disable-next-line no-param-reassign 146 | gainNode.gain.value = this.muted ? 0 : 1; 147 | }); 148 | } 149 | 150 | public setVolume(volume: number): void { 151 | this.currentAudio.forEach(({ gainNode, originalVolume }) => { 152 | // eslint-disable-next-line no-param-reassign 153 | gainNode.gain.value = originalVolume * volume; 154 | }); 155 | } 156 | } 157 | 158 | export default AudioTrackManager; 159 | -------------------------------------------------------------------------------- /src/Conversation.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | import PQueue from "p-queue"; 3 | 4 | import Playthrough from "./Playthrough.js"; 5 | import { 6 | StartEvent, 7 | ReplyEvent, 8 | ActionEvent, 9 | SpeechConfig, 10 | MessageEvent, 11 | StartTypingEvent, 12 | StopTypingEvent, 13 | EpisodeCompleteEvent, 14 | ConfirmActionEvent, 15 | ConfirmReplyEvent, 16 | ConfirmResumeEvent, 17 | ConfirmStartEvent, 18 | ConfirmTapEvent, 19 | ReplyIntermediateEvent, 20 | ProblemEvent, 21 | Message, 22 | } from "./types.js"; 23 | 24 | export interface ConversationOptions { 25 | speechConfig?: SpeechConfig; 26 | } 27 | 28 | export type ConversationEvents = { 29 | // Events sent from server 30 | message: [MessageEvent]; 31 | "start-typing": [StartTypingEvent]; 32 | "stop-typing": [StopTypingEvent]; 33 | "episode-complete": [EpisodeCompleteEvent]; 34 | problem: [ProblemEvent]; 35 | // Confirmation events sent from server 36 | action: [ConfirmActionEvent]; 37 | reply: [ConfirmReplyEvent]; 38 | resume: [ConfirmResumeEvent]; 39 | start: [ConfirmStartEvent]; 40 | tap: [ConfirmTapEvent]; 41 | // Local events 42 | "playback-start": []; 43 | "playback-stop": []; 44 | }; 45 | 46 | export class Conversation extends EventEmitter { 47 | private uuid: string; 48 | 49 | private eventQueue: PQueue = new PQueue(); 50 | 51 | private lastEventId?: string; 52 | 53 | private playthroughInstance: Playthrough; 54 | 55 | private options: ConversationOptions = {}; 56 | 57 | public constructor( 58 | conversationUuid: string, 59 | playthroughInstance: Playthrough, 60 | options?: ConversationOptions, 61 | ) { 62 | super(); 63 | 64 | this.uuid = conversationUuid; 65 | this.playthroughInstance = playthroughInstance; 66 | 67 | if (options) { 68 | this.options = options; 69 | } 70 | 71 | // Whenever we emit a message, store the last event id so we know where to 72 | // restore from if a disconnection occurs. 73 | this.on("message", (message) => { 74 | this.lastEventId = message.eventId; 75 | }); 76 | 77 | // Please excuse this ghastly hack, but Babel complains about 78 | // transforming a function class property with an arrow function inside 79 | // (only on non-"modern" builds) 80 | this.addIncomingEvent = this.addIncomingEvent.bind(this); 81 | } 82 | 83 | public addIncomingEvent< 84 | T extends EventEmitter.EventNames, 85 | >( 86 | eventName: T, 87 | ...eventArgs: EventEmitter.EventArgs 88 | ): true { 89 | this.eventQueue.add(() => this.emit(eventName, ...eventArgs)); 90 | return true; 91 | } 92 | 93 | public start = (event: StartEvent = {}): void => { 94 | return this.playthroughInstance.addOutgoingEvent("start", { 95 | ...this.options, 96 | ...event, 97 | conversationUuid: this.uuid, 98 | }); 99 | }; 100 | 101 | public reply = (event: ReplyEvent): void => { 102 | return this.playthroughInstance.addOutgoingEvent("reply", { 103 | ...this.options, 104 | ...event, 105 | conversationUuid: this.uuid, 106 | }); 107 | }; 108 | 109 | public replyIntermediate = (event: ReplyIntermediateEvent): void => { 110 | return this.playthroughInstance.addOutgoingEvent("reply-intermediate", { 111 | ...this.options, 112 | ...event, 113 | conversationUuid: this.uuid, 114 | }); 115 | }; 116 | 117 | public tap = (): void => { 118 | return this.playthroughInstance.addOutgoingEvent("tap", { 119 | ...this.options, 120 | conversationUuid: this.uuid, 121 | }); 122 | }; 123 | 124 | public action = (event: ActionEvent): void => { 125 | return this.playthroughInstance.addOutgoingEvent("action", { 126 | ...this.options, 127 | ...event, 128 | conversationUuid: this.uuid, 129 | }); 130 | }; 131 | 132 | public resume = (): void => { 133 | return this.playthroughInstance.addOutgoingEvent("resume", { 134 | ...this.options, 135 | conversationUuid: this.uuid, 136 | }); 137 | }; 138 | 139 | public setSpeechConfig = (speechConfig: SpeechConfig | undefined): void => { 140 | this.options.speechConfig = speechConfig; 141 | }; 142 | 143 | public reconnect = async (): Promise => { 144 | // If we haven't received any messages so far, there's nowhere to playback from. 145 | if (typeof this.lastEventId === "string") { 146 | // Receiving new events when trying to playback is confusing, so pause the event queue. 147 | this.eventQueue.pause(); 148 | try { 149 | const { events } = await this.playthroughInstance.getEventHistory({ 150 | conversationUuid: this.uuid, 151 | minEventId: this.lastEventId, 152 | limit: 1000, 153 | eventTypes: ["message_character"], 154 | }); 155 | if (events.length > 0) { 156 | this.emit("playback-start"); 157 | events.forEach((event) => { 158 | // If we've emitted a new message since playback started, let's ignore playback ones. 159 | if (BigInt(event.id) > BigInt(this.lastEventId as string)) { 160 | this.emit("message", { 161 | ...(event.payload as Message), 162 | conversationUuid: this.uuid, 163 | }); 164 | } 165 | }); 166 | this.emit("playback-stop"); 167 | } 168 | } finally { 169 | // We can restart the queue now playback is finished. 170 | this.eventQueue.start(); 171 | } 172 | } 173 | }; 174 | } 175 | 176 | export default Conversation; 177 | -------------------------------------------------------------------------------- /src/Playthrough.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | import * as Colyseus from "colyseus.js"; 3 | import jwtDecode from "jwt-decode"; 4 | 5 | import * as api from "./api.js"; 6 | 7 | import { 8 | StartTypingEvent, 9 | StopTypingEvent, 10 | MessageEvent, 11 | EpisodeCompleteEvent, 12 | ConfirmActionEvent, 13 | ConfirmReplyEvent, 14 | ConfirmResumeEvent, 15 | ConfirmStartEvent, 16 | ConfirmTapEvent, 17 | ProblemEvent, 18 | JSONValue, 19 | } from "./types.js"; 20 | // eslint-disable-next-line import/no-named-as-default 21 | import Conversation, { ConversationOptions } from "./Conversation.js"; 22 | 23 | export type ConnectionStatus = "disconnected" | "connecting" | "connected"; 24 | 25 | const sdkInfo = { 26 | sdkId: "js", 27 | sdkVersion: "6.0.0", 28 | protocolVersion: 2, 29 | }; 30 | 31 | type PlaythroughEvents = { 32 | "connection-status": [ConnectionStatus]; 33 | error: [any]; 34 | problem: [{ code: string; error: string }]; 35 | }; 36 | 37 | class Playthrough extends EventEmitter { 38 | private token: string; 39 | 40 | private uuid: string; 41 | 42 | private baseUrl?: string; 43 | 44 | private client: Colyseus.Client | undefined; 45 | 46 | private room: Colyseus.Room | undefined; 47 | 48 | private connectionStatus: ConnectionStatus = "disconnected"; 49 | 50 | private shouldReconnect = true; 51 | 52 | private activeConversations = new Map(); 53 | 54 | public playerSessionId?: string; 55 | 56 | public constructor(token: string, baseUrl?: string) { 57 | super(); 58 | 59 | this.token = token; 60 | 61 | const { playthrough_uuid: playthroughUuid } = jwtDecode<{ 62 | // eslint-disable-next-line camelcase 63 | playthrough_uuid: string; 64 | }>(this.token); 65 | 66 | this.uuid = playthroughUuid; 67 | 68 | this.baseUrl = baseUrl; 69 | } 70 | 71 | public async getPlayerSessionId(): Promise { 72 | const DELAY = 100; 73 | const MAX_ATTEMPTS = 100; 74 | 75 | for (let attempts = 0; attempts < MAX_ATTEMPTS; attempts += 1) { 76 | if (this.playerSessionId !== undefined) { 77 | return this.playerSessionId; 78 | } 79 | 80 | // eslint-disable-next-line no-await-in-loop 81 | await new Promise((resolve) => { 82 | setTimeout(resolve, DELAY); 83 | }); 84 | } 85 | 86 | throw new Error( 87 | `Could not get player session id after ${MAX_ATTEMPTS} attempts.`, 88 | ); 89 | } 90 | 91 | public createConversation(): ReturnType { 92 | return api.createConversation(this.token, { baseUrl: this.baseUrl }); 93 | } 94 | 95 | public createCharacterConversation( 96 | characterId: number, 97 | ): ReturnType { 98 | return api.createCharacterConversation(this.token, characterId, { 99 | baseUrl: this.baseUrl, 100 | }); 101 | } 102 | 103 | public getEventHistory( 104 | options: api.GetEventHistoryOptions, 105 | ): ReturnType { 106 | return api.getEventHistory(this.token, options, { 107 | baseUrl: this.baseUrl, 108 | }); 109 | } 110 | 111 | public getPlaythroughInfo(): ReturnType { 112 | return api.getPlaythroughInfo(this.token, { baseUrl: this.baseUrl }); 113 | } 114 | 115 | public setMemory( 116 | recallValue: string, 117 | saveValue: JSONValue | null, 118 | ): ReturnType; 119 | 120 | public setMemory( 121 | memories: api.MemoryToSet[], 122 | ): ReturnType; 123 | 124 | public setMemory( 125 | memoryRecallValueOrMemories: string | api.MemoryToSet[], 126 | saveValue?: JSONValue | null, 127 | ): ReturnType { 128 | let memories: api.MemoryToSet[] = []; 129 | if (Array.isArray(memoryRecallValueOrMemories)) { 130 | memories = memoryRecallValueOrMemories; 131 | } else { 132 | memories = [ 133 | { 134 | recallValue: memoryRecallValueOrMemories, 135 | saveValue: saveValue as JSONValue | null, 136 | }, 137 | ]; 138 | } 139 | 140 | return api.setMemory(this.token, memories, { 141 | baseUrl: this.baseUrl, 142 | }); 143 | } 144 | 145 | public restartFromEpisodeId( 146 | episodeId: number, 147 | ): ReturnType { 148 | return api.restartFromEpisodeId(this.token, episodeId, { 149 | baseUrl: this.baseUrl, 150 | }); 151 | } 152 | 153 | public restartFromEpisodeIndex( 154 | episodeIndex: number, 155 | ): ReturnType { 156 | return api.restartFromEpisodeIndex(this.token, episodeIndex, { 157 | baseUrl: this.baseUrl, 158 | }); 159 | } 160 | 161 | public restartFromEventId( 162 | eventId: string, 163 | ): ReturnType { 164 | return api.restartFromEventId(this.token, eventId, { 165 | baseUrl: this.baseUrl, 166 | }); 167 | } 168 | 169 | public joinConversation = ( 170 | conversationUuid: string, 171 | options?: ConversationOptions, 172 | ): Conversation => { 173 | const conversation = new Conversation(conversationUuid, this, options); 174 | if (this.activeConversations.has(conversationUuid)) { 175 | return this.activeConversations.get(conversationUuid) as Conversation; 176 | } 177 | this.activeConversations.set(conversationUuid, conversation); 178 | return conversation; 179 | }; 180 | 181 | public leaveConversation = (conversationUuid: string): void => { 182 | if (!this.activeConversations.has(conversationUuid)) { 183 | throw new Error( 184 | `The conversation with id \`${conversationUuid}\` has not been joined, so cannot be left.`, 185 | ); 186 | } 187 | this.activeConversations.delete(conversationUuid); 188 | }; 189 | 190 | public getConversation = ( 191 | conversationUuid: string, 192 | ): Conversation | undefined => { 193 | return this.activeConversations.get(conversationUuid); 194 | }; 195 | 196 | public addOutgoingEvent = (eventName: string, eventData?: unknown): void => { 197 | if (this.room) { 198 | if (this.connectionStatus === "connected") { 199 | this.room.send(eventName, eventData); 200 | } else { 201 | console.warn( 202 | `Event \`${eventName}\` was not sent as the socket was not ready. Wait for the \`connection-status\` event to be called with \`connected\` before sending events.`, 203 | ); 204 | } 205 | } else { 206 | console.log( 207 | `Event \`${eventName}\` was not sent as the socket was not initialised. Call \`playthrough.connect()\` to connect the socket.`, 208 | ); 209 | } 210 | }; 211 | 212 | public connect = async (): Promise<{ playerSessionId: string }> => { 213 | const baseUrl = this.baseUrl || api.getGlobalBaseUrl(); 214 | 215 | if (!this.client) { 216 | this.client = new Colyseus.Client(baseUrl.replace(/^http/, "ws")); 217 | } 218 | 219 | this.room = await this.client.joinOrCreate("chat", { 220 | playthroughId: this.uuid, 221 | token: this.token, 222 | sdkInfo, 223 | }); 224 | 225 | this.attachRoomHandlers(this.room); 226 | 227 | this.shouldReconnect = true; 228 | 229 | const playerSessionId = await this.getPlayerSessionId(); 230 | 231 | return { playerSessionId }; 232 | }; 233 | 234 | public pause = (): void => { 235 | this.addOutgoingEvent("pause"); 236 | }; 237 | 238 | public play = (): void => { 239 | this.addOutgoingEvent("play"); 240 | }; 241 | 242 | private attachRoomHandlers = (room: Colyseus.Room) => { 243 | room.onMessage("status", this.onConnected); 244 | room.onMessage("problem", this.onProblem); 245 | room.onMessage("start-typing", this.onStartTyping); 246 | room.onMessage("stop-typing", this.onStopTyping); 247 | room.onMessage("message", this.onMessage); 248 | room.onMessage("episode-complete", this.onEpisodeComplete); 249 | 250 | room.onMessage("action", this.onAction); 251 | room.onMessage("reply", this.onReply); 252 | room.onMessage("resume", this.onResume); 253 | room.onMessage("start", this.onStart); 254 | room.onMessage("tap", this.onTap); 255 | room.onMessage("player-session-id", (playerSessionId: string) => { 256 | this.playerSessionId = playerSessionId; 257 | }); 258 | 259 | room.onError(this.onError); 260 | 261 | // eslint-disable-next-line @typescript-eslint/no-misused-promises 262 | room.onLeave(async (code) => { 263 | room.removeAllListeners(); 264 | this.room = undefined; 265 | 266 | // Normal disconnection codes (i.e. user chose to disconnect explicitly) 267 | if (code === 4000 || !this.shouldReconnect) { 268 | this.onDisconnect(); 269 | return; 270 | } 271 | 272 | let roomExpired = false; 273 | 274 | for (let attempts = 0; attempts < 20; attempts += 1) { 275 | if (!roomExpired) { 276 | try { 277 | // Try to reconnect into the same room. 278 | this.onReconnecting(); 279 | // eslint-disable-next-line no-await-in-loop 280 | const newRoom = await this.client?.reconnect( 281 | room.id, 282 | room.sessionId, 283 | ); 284 | if (newRoom) { 285 | this.attachRoomHandlers(newRoom); 286 | this.room = newRoom; 287 | this.onReconnect(); 288 | this.onConnected(); 289 | return; 290 | } 291 | } catch (err) { 292 | if (/room ".*" not found/.test((err as Error).message)) { 293 | roomExpired = true; 294 | } 295 | } 296 | } 297 | 298 | // If we could reconnect (network is up), but the exact room no longer exists (it expired), try and create a new room. 299 | if (roomExpired) { 300 | try { 301 | // eslint-disable-next-line no-await-in-loop 302 | const newRoom = await this.client?.joinOrCreate("chat", { 303 | playthroughId: this.uuid, 304 | token: this.token, 305 | sdkInfo, 306 | }); 307 | if (newRoom) { 308 | this.attachRoomHandlers(newRoom); 309 | this.room = newRoom; 310 | this.onReconnect(); 311 | this.onConnected(); 312 | return; 313 | } 314 | } catch (err2) { 315 | console.error( 316 | "Could not reconnect to a Charisma playthrough.", 317 | err2, 318 | ); 319 | } 320 | } 321 | 322 | // eslint-disable-next-line no-await-in-loop 323 | await new Promise((resolve) => { 324 | setTimeout(() => resolve(), 5000 + Math.floor(Math.random() * 1000)); 325 | }); 326 | } 327 | 328 | // We failed to both reconnect into the same room, and a new room, so disconnect. 329 | this.onDisconnect(); 330 | }); 331 | }; 332 | 333 | public disconnect = (): void => { 334 | this.shouldReconnect = false; 335 | 336 | if (this.room) { 337 | this.room.leave(); 338 | } 339 | }; 340 | 341 | private changeConnectionStatus = (newStatus: ConnectionStatus): void => { 342 | if (newStatus !== this.connectionStatus) { 343 | this.connectionStatus = newStatus; 344 | this.emit("connection-status", newStatus); 345 | } 346 | }; 347 | 348 | private onReconnect = (): void => { 349 | this.activeConversations.forEach((conversation) => { 350 | conversation.reconnect().catch((err) => { 351 | console.error( 352 | `Something went wrong reconnecting to conversation:`, 353 | err, 354 | ); 355 | }); 356 | }); 357 | }; 358 | 359 | private onReconnecting = (): void => { 360 | this.changeConnectionStatus("connecting"); 361 | }; 362 | 363 | private onDisconnect = (): void => { 364 | this.changeConnectionStatus("disconnected"); 365 | }; 366 | 367 | private onConnected = (): void => { 368 | this.changeConnectionStatus("connected"); 369 | }; 370 | 371 | private onError = (code: number, message?: string): void => { 372 | this.emit("error", { message, code }); 373 | }; 374 | 375 | private onProblem = (event: ProblemEvent): void => { 376 | this.emit("problem", event); 377 | if (event.conversationUuid) { 378 | const conversation = this.activeConversations.get(event.conversationUuid); 379 | if (conversation) { 380 | conversation.addIncomingEvent("problem", event); 381 | } 382 | } 383 | }; 384 | 385 | private onStartTyping = (event: StartTypingEvent): void => { 386 | const conversation = this.activeConversations.get(event.conversationUuid); 387 | if (conversation) { 388 | conversation.addIncomingEvent("start-typing", event); 389 | } 390 | }; 391 | 392 | private onStopTyping = (event: StopTypingEvent): void => { 393 | const conversation = this.activeConversations.get(event.conversationUuid); 394 | if (conversation) { 395 | conversation.addIncomingEvent("stop-typing", event); 396 | } 397 | }; 398 | 399 | private onMessage = (event: MessageEvent): void => { 400 | const conversation = this.activeConversations.get(event.conversationUuid); 401 | if (conversation) { 402 | conversation.addIncomingEvent("message", event); 403 | } 404 | }; 405 | 406 | private onEpisodeComplete = (event: EpisodeCompleteEvent): void => { 407 | const conversation = this.activeConversations.get(event.conversationUuid); 408 | if (conversation) { 409 | conversation.addIncomingEvent("episode-complete", event); 410 | } 411 | }; 412 | 413 | private onAction = (event: ConfirmActionEvent): void => { 414 | const conversation = this.activeConversations.get(event.conversationUuid); 415 | if (conversation) { 416 | conversation.addIncomingEvent("action", event); 417 | } 418 | }; 419 | 420 | private onResume = (event: ConfirmResumeEvent): void => { 421 | const conversation = this.activeConversations.get(event.conversationUuid); 422 | if (conversation) { 423 | conversation.addIncomingEvent("resume", event); 424 | } 425 | }; 426 | 427 | private onReply = (event: ConfirmReplyEvent): void => { 428 | const conversation = this.activeConversations.get(event.conversationUuid); 429 | if (conversation) { 430 | conversation.addIncomingEvent("reply", event); 431 | } 432 | }; 433 | 434 | private onStart = (event: ConfirmStartEvent): void => { 435 | const conversation = this.activeConversations.get(event.conversationUuid); 436 | if (conversation) { 437 | conversation.addIncomingEvent("start", event); 438 | } 439 | }; 440 | 441 | private onTap = (event: ConfirmTapEvent): void => { 442 | const conversation = this.activeConversations.get(event.conversationUuid); 443 | if (conversation) { 444 | conversation.addIncomingEvent("tap", event); 445 | } 446 | }; 447 | } 448 | 449 | export default Playthrough; 450 | -------------------------------------------------------------------------------- /src/__mocks__/MockAudioInputsBrowser.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | 3 | export default class MockAudioInputsBrowser extends EventEmitter { 4 | public isSupported = true; 5 | 6 | public startListening = jest.fn(); 7 | 8 | public stopListening = jest.fn(); 9 | 10 | public resetTimeout = jest.fn(); 11 | } 12 | -------------------------------------------------------------------------------- /src/__mocks__/MockAudioInputsService.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from "eventemitter3"; 2 | 3 | export default class MockAudioInputsService extends EventEmitter { 4 | public startListening = jest.fn(); 5 | 6 | public stopListening = jest.fn(); 7 | 8 | public resetTimeout = jest.fn(); 9 | 10 | public connect = jest.fn(); 11 | 12 | public on = jest.fn(); 13 | } 14 | -------------------------------------------------------------------------------- /src/api.ts: -------------------------------------------------------------------------------- 1 | import type { Emotion, Impact, JSONValue, Memory } from "./types.js"; 2 | 3 | const createSearchParams = < 4 | Params extends Record, 5 | >( 6 | params: Params, 7 | ) => { 8 | const query = new URLSearchParams(); 9 | Object.entries(params).forEach(([key, value]) => { 10 | if (typeof value === "string" || typeof value === "number") { 11 | query.append(key, value.toString()); 12 | } else if (Array.isArray(value)) { 13 | value.forEach((valueInner) => query.append(key, valueInner)); 14 | } 15 | }); 16 | return query; 17 | }; 18 | 19 | const fetchHelper = async ( 20 | endpoint: string, 21 | options: RequestInit = {}, 22 | ): Promise => { 23 | // Always default to `Accept: application/json` 24 | let headers: Record = { 25 | Accept: "application/json", 26 | ...(options.headers as Record), 27 | }; 28 | if ( 29 | typeof options.method === "string" && 30 | options.method.toLowerCase() === "post" 31 | ) { 32 | // If it's a POST method, default to `Content-Type: application/json` for the body 33 | headers = { "Content-Type": "application/json", ...headers }; 34 | } 35 | 36 | const response = await fetch(endpoint, { mode: "cors", ...options, headers }); 37 | 38 | let data: unknown = {}; 39 | try { 40 | // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment 41 | data = await response.json(); 42 | } catch (err) { 43 | // Some endpoints just return a status code and no JSON body data. 44 | } 45 | 46 | if (!response.ok) { 47 | throw new Error( 48 | (data as { error?: string }).error || 49 | `Something went wrong calling \`${endpoint}\``, 50 | ); 51 | } 52 | 53 | return data as T; 54 | }; 55 | 56 | let globalBaseUrl = "https://play.charisma.ai"; 57 | 58 | export const getGlobalBaseUrl = (): string => globalBaseUrl; 59 | 60 | export const setGlobalBaseUrl = (newBaseUrl: string): void => { 61 | globalBaseUrl = newBaseUrl; 62 | }; 63 | 64 | export type CommonApiOptions = { 65 | baseUrl?: string; 66 | }; 67 | 68 | export type CreatePlaythroughTokenOptions = { 69 | /** 70 | * The `id` of the story that you want to create a new playthrough for. The story must be published, unless a Charisma.ai user token has been passed and the user matches the owner of the story. 71 | */ 72 | storyId: number; 73 | /** 74 | * The `version` of the story that you want to create a new playthrough for. If omitted, it will default to the most recent published version. To get the draft version of a story, pass `-1` and an `apiKey`. 75 | */ 76 | version?: number; 77 | /** 78 | * It is recommended to use the more secure `apiKey` instead of `userToken`. To access draft, test or unpublished versions of your story, pass a `userToken`. 79 | */ 80 | userToken?: string; 81 | /** 82 | * To access draft, test or unpublished versions of your story, pass an `apiKey`. The API key can be found on the story overview page. 83 | */ 84 | apiKey?: string; 85 | /** 86 | * To play a story in a language other than English (`en`, the default), pass a BCP-47 `languageCode`. For example, to play in Italian, use `it`. 87 | */ 88 | languageCode?: string; 89 | }; 90 | 91 | export type CreatePlaythroughTokenResult = { 92 | /** 93 | * The playthrough token, used for connecting to this playthrough. It never expires, 94 | * so can be saved in a secure place for players to continue playing between sessions. 95 | * 96 | * To create a playthrough with the token, use `new Playthrough(token)`. 97 | */ 98 | token: string; 99 | /** 100 | * The unique identifier of the playthrough, encoded inside the token. It can be useful 101 | * as a debugging tool. 102 | */ 103 | playthroughUuid: string; 104 | }; 105 | 106 | export async function createPlaythroughToken( 107 | options: CreatePlaythroughTokenOptions, 108 | apiOptions?: CommonApiOptions, 109 | ): Promise { 110 | if ( 111 | options.version === -1 && 112 | options.userToken === undefined && 113 | options.apiKey === undefined 114 | ) { 115 | throw new Error( 116 | "To play the draft version (-1) of a story, an `apiKey` or `userToken` must also be passed.", 117 | ); 118 | } 119 | 120 | let authHeader: string | undefined; 121 | if (options.apiKey) { 122 | authHeader = `API-Key ${options.apiKey}`; 123 | } else if (options.userToken) { 124 | authHeader = `Bearer ${options.userToken}`; 125 | } 126 | 127 | try { 128 | const result = await fetchHelper<{ 129 | token: string; 130 | playthroughUuid: string; 131 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/token`, { 132 | body: JSON.stringify({ 133 | storyId: options.storyId, 134 | version: options.version, 135 | languageCode: options.languageCode, 136 | }), 137 | headers: authHeader ? { Authorization: authHeader } : undefined, 138 | method: "POST", 139 | }); 140 | return result; 141 | } catch (err) { 142 | throw new Error(`A playthrough token could not be generated: ${err}`); 143 | } 144 | } 145 | 146 | export type CreateConversationResult = { 147 | /** 148 | * The unique identifier of the created conversation. Pass this into `playthrough.joinConversation` 149 | * to get a scoped `Conversation` instance. 150 | */ 151 | conversationUuid: string; 152 | }; 153 | 154 | export async function createConversation( 155 | token: string, 156 | apiOptions?: CommonApiOptions, 157 | ): Promise { 158 | const result = await fetchHelper<{ 159 | conversationUuid: string; 160 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/conversation`, { 161 | body: JSON.stringify({}), 162 | headers: { Authorization: `Bearer ${token}` }, 163 | method: "POST", 164 | }); 165 | return result; 166 | } 167 | 168 | export type CreateCharacterConversationResult = { 169 | /** 170 | * The unique identifier of the created conversation. Pass this into `playthrough.joinConversation` 171 | * to get a scoped `Conversation` instance. 172 | */ 173 | conversationUuid: string; 174 | }; 175 | 176 | export async function createCharacterConversation( 177 | token: string, 178 | characterId: number, 179 | apiOptions?: CommonApiOptions, 180 | ): Promise { 181 | const result = await fetchHelper<{ 182 | conversationUuid: string; 183 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/conversation/character`, { 184 | body: JSON.stringify({ characterId }), 185 | headers: { Authorization: `Bearer ${token}` }, 186 | method: "POST", 187 | }); 188 | return result; 189 | } 190 | 191 | export type EventType = 192 | | "start" 193 | | "message_player" 194 | | "message_character" 195 | | "set_mood" 196 | | "resume" 197 | | "set_memory" 198 | | "tap" 199 | | "restart" 200 | | "episode_complete" 201 | | "fork"; 202 | 203 | export type Event = { 204 | id: string; 205 | type: EventType; 206 | timestamp: string; 207 | conversationUuid: string | null; 208 | playthroughUuid: string; 209 | payload: unknown; 210 | }; 211 | 212 | export type GetEventHistoryOptions = { 213 | conversationUuid?: string; 214 | eventTypes?: EventType[]; 215 | minEventId?: string; 216 | maxEventId?: string; 217 | limit: number; 218 | sort?: "asc" | "desc"; 219 | }; 220 | 221 | export type GetEventHistoryResult = { 222 | events: Event[]; 223 | }; 224 | 225 | /** 226 | * Gets the events that have happened in the playthrough, such as character and player messages amongst others. The returned events can be filtered by using options. 227 | */ 228 | export async function getEventHistory( 229 | token: string, 230 | // eslint-disable-next-line default-param-last 231 | options: GetEventHistoryOptions = { limit: 1000 }, 232 | apiOptions?: CommonApiOptions, 233 | ): Promise { 234 | const query = createSearchParams(options); 235 | const result = await fetchHelper( 236 | `${ 237 | apiOptions?.baseUrl || globalBaseUrl 238 | }/play/event-history?${query.toString()}`, 239 | { 240 | headers: { Authorization: `Bearer ${token}` }, 241 | method: "GET", 242 | }, 243 | ); 244 | return result; 245 | } 246 | 247 | export type GetPlaythroughInfoResult = { 248 | emotions: Emotion[]; 249 | memories: Memory[]; 250 | impacts: Impact[]; 251 | }; 252 | 253 | /** 254 | * Returns current information about the playthrough, such as character emotions and memories. 255 | */ 256 | export async function getPlaythroughInfo( 257 | token: string, 258 | apiOptions?: CommonApiOptions, 259 | ): Promise { 260 | const result = await fetchHelper( 261 | `${ 262 | apiOptions?.baseUrl || globalBaseUrl 263 | }/play/playthrough-info?use_typed_memories=1`, 264 | { 265 | headers: { Authorization: `Bearer ${token}` }, 266 | method: "GET", 267 | }, 268 | ); 269 | return result; 270 | } 271 | 272 | export type MemoryToSet = { recallValue: string; saveValue: JSONValue | null }; 273 | 274 | /** 275 | * Directly sets a memory in Charisma. The promise resolves when the memory has been committed so is guaranteed to be set, but it may take a short amount of time (usually < 1s) for the updated value to propagate to any active playthrough instances. 276 | * 277 | * It is highly recommended to call `setMemory` with an array instead of calling `setMemory` multiple times, to only cause one refetch of the current memory values in the chat engine. 278 | */ 279 | export async function setMemory( 280 | token: string, 281 | memoryRecallValue: string, 282 | saveValue: string | null, 283 | apiOptions?: CommonApiOptions, 284 | ): Promise; 285 | export async function setMemory( 286 | token: string, 287 | memoriesToSet: MemoryToSet[], 288 | apiOptions?: CommonApiOptions, 289 | ): Promise; 290 | export async function setMemory( 291 | token: string, 292 | memoryRecallValueOrMemories: string | MemoryToSet[], 293 | saveValueOrApiOptions?: string | null | CommonApiOptions, 294 | apiOptions?: CommonApiOptions, 295 | ): Promise { 296 | let resolvedApiOptions = apiOptions; 297 | 298 | let memories: MemoryToSet[] = []; 299 | if (Array.isArray(memoryRecallValueOrMemories)) { 300 | memories = memoryRecallValueOrMemories; 301 | resolvedApiOptions = saveValueOrApiOptions as CommonApiOptions | undefined; 302 | } else { 303 | memories = [ 304 | { 305 | recallValue: memoryRecallValueOrMemories, 306 | saveValue: saveValueOrApiOptions as JSONValue | null, 307 | }, 308 | ]; 309 | } 310 | 311 | await fetchHelper( 312 | `${resolvedApiOptions?.baseUrl || globalBaseUrl}/play/set-memory`, 313 | { 314 | body: JSON.stringify({ 315 | memories, 316 | }), 317 | headers: { Authorization: `Bearer ${token}` }, 318 | method: "POST", 319 | }, 320 | ); 321 | } 322 | 323 | export async function restartFromEpisodeId( 324 | token: string, 325 | episodeId: number, 326 | apiOptions?: CommonApiOptions, 327 | ): Promise { 328 | await fetchHelper( 329 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-episode`, 330 | { 331 | body: JSON.stringify({ episodeId }), 332 | headers: { Authorization: `Bearer ${token}` }, 333 | method: "POST", 334 | }, 335 | ); 336 | } 337 | 338 | export async function restartFromEpisodeIndex( 339 | token: string, 340 | episodeIndex: number, 341 | apiOptions?: CommonApiOptions, 342 | ): Promise { 343 | await fetchHelper( 344 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-episode`, 345 | { 346 | body: JSON.stringify({ episodeIndex }), 347 | headers: { Authorization: `Bearer ${token}` }, 348 | method: "POST", 349 | }, 350 | ); 351 | } 352 | 353 | export async function restartFromEventId( 354 | token: string, 355 | eventId: string, 356 | apiOptions?: CommonApiOptions, 357 | ): Promise { 358 | await fetchHelper( 359 | `${apiOptions?.baseUrl || globalBaseUrl}/play/restart-from-event`, 360 | { 361 | body: JSON.stringify({ eventId }), 362 | headers: { Authorization: `Bearer ${token}` }, 363 | method: "POST", 364 | }, 365 | ); 366 | } 367 | 368 | export type ForkPlaythroughTokenResult = { 369 | /** 370 | * The playthrough token, used for connecting to this playthrough. It never expires, 371 | * so can be saved in a secure place for players to continue playing between sessions. 372 | * 373 | * To create a playthrough with the token, use `new Playthrough(token)`. 374 | */ 375 | token: string; 376 | /** 377 | * The unique identifier of the playthrough, encoded inside the token. It can be useful 378 | * as a debugging tool. 379 | */ 380 | playthroughUuid: string; 381 | }; 382 | 383 | /** 384 | * Creates a clone of the playthrough, including memories and emotions, and returns a new playthrough linked to the latest promoted story version. 385 | * 386 | * This is useful when you've published a new story version. Since playthroughs are bound to a particular story version, you need to "fork" the playthrough in order to move a player over to the newly published version. 387 | */ 388 | export async function forkPlaythroughToken( 389 | token: string, 390 | apiOptions?: CommonApiOptions, 391 | ): Promise { 392 | const result = await fetchHelper<{ 393 | token: string; 394 | playthroughUuid: string; 395 | }>(`${apiOptions?.baseUrl || globalBaseUrl}/play/fork-playthrough`, { 396 | body: JSON.stringify({}), 397 | headers: { Authorization: `Bearer ${token}` }, 398 | method: "POST", 399 | }); 400 | return result; 401 | } 402 | 403 | export type ResetPlaythroughOptions = { 404 | /** 405 | * The event ID to reset the playthrough to. 406 | */ 407 | eventId: string; 408 | }; 409 | 410 | /** 411 | * Resets a playthrough's state to a particular event ID. If this playthrough has been forked, the event ID can be from any of this playthrough's ancestors. This resets memories and emotions **ONLY**. 412 | */ 413 | export async function resetPlaythrough( 414 | token: string, 415 | options: ResetPlaythroughOptions, 416 | apiOptions?: CommonApiOptions, 417 | ): Promise { 418 | await fetchHelper( 419 | `${apiOptions?.baseUrl || globalBaseUrl}/play/reset-playthrough`, 420 | { 421 | body: JSON.stringify(options), 422 | headers: { Authorization: `Bearer ${token}` }, 423 | method: "POST", 424 | }, 425 | ); 426 | } 427 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * as api from "./api.js"; 2 | export * from "./api.js"; 3 | 4 | export { 5 | default as Playthrough, 6 | type ConnectionStatus, 7 | } from "./Playthrough.js"; 8 | 9 | export { default as AudioManager } from "./AudioManager.js"; 10 | export type { AudioManagerOptions } from "./AudioManager.js"; 11 | 12 | export type { AudioOutputsServicePlayOptions } from "./AudioOutputsService.js"; 13 | 14 | export { 15 | Conversation, 16 | type ConversationOptions, 17 | type ConversationEvents, 18 | } from "./Conversation.js"; 19 | 20 | export type * from "./types.js"; 21 | -------------------------------------------------------------------------------- /src/speech-types.ts: -------------------------------------------------------------------------------- 1 | export type SpeechRecognitionErrorCode = 2 | | "aborted" 3 | | "audio-capture" 4 | | "bad-grammar" 5 | | "language-not-supported" 6 | | "network" 7 | | "no-speech" 8 | | "not-allowed" 9 | | "service-not-allowed"; 10 | 11 | export interface SpeechRecognitionErrorEventInit extends EventInit { 12 | error: SpeechRecognitionErrorCode; 13 | message?: string; 14 | } 15 | 16 | export interface SpeechRecognitionEventInit extends EventInit { 17 | resultIndex?: number; 18 | results: SpeechRecognitionResultList; 19 | } 20 | 21 | export interface SpeechGrammar { 22 | src: string; 23 | weight: number; 24 | } 25 | 26 | export interface SpeechGrammarList { 27 | readonly length: number; 28 | addFromString(string: string, weight?: number): void; 29 | addFromURI(src: string, weight?: number): void; 30 | item(index: number): SpeechGrammar; 31 | [index: number]: SpeechGrammar; 32 | } 33 | 34 | export interface SpeechRecognitionErrorEvent extends Event { 35 | readonly error: SpeechRecognitionErrorCode; 36 | readonly message: string; 37 | } 38 | 39 | export interface SpeechRecognitionEvent extends Event { 40 | readonly resultIndex: number; 41 | readonly results: SpeechRecognitionResultList; 42 | } 43 | 44 | interface SpeechRecognitionEventMap { 45 | audioend: Event; 46 | audiostart: Event; 47 | end: Event; 48 | error: SpeechRecognitionErrorEvent; 49 | nomatch: SpeechRecognitionEvent; 50 | result: SpeechRecognitionEvent; 51 | soundend: Event; 52 | soundstart: Event; 53 | speechend: Event; 54 | speechstart: Event; 55 | start: Event; 56 | } 57 | 58 | export interface SpeechRecognition extends EventTarget { 59 | continuous: boolean; 60 | grammars: SpeechGrammarList; 61 | interimResults: boolean; 62 | lang: string; 63 | maxAlternatives: number; 64 | onaudioend: ((this: SpeechRecognition, ev: Event) => any) | null; 65 | onaudiostart: ((this: SpeechRecognition, ev: Event) => any) | null; 66 | onend: ((this: SpeechRecognition, ev: Event) => any) | null; 67 | onerror: 68 | | ((this: SpeechRecognition, ev: SpeechRecognitionErrorEvent) => any) 69 | | null; 70 | onnomatch: 71 | | ((this: SpeechRecognition, ev: SpeechRecognitionEvent) => any) 72 | | null; 73 | onresult: 74 | | ((this: SpeechRecognition, ev: SpeechRecognitionEvent) => any) 75 | | null; 76 | onsoundend: ((this: SpeechRecognition, ev: Event) => any) | null; 77 | onsoundstart: ((this: SpeechRecognition, ev: Event) => any) | null; 78 | onspeechend: ((this: SpeechRecognition, ev: Event) => any) | null; 79 | onspeechstart: ((this: SpeechRecognition, ev: Event) => any) | null; 80 | onstart: ((this: SpeechRecognition, ev: Event) => any) | null; 81 | abort(): void; 82 | start(): void; 83 | stop(): void; 84 | addEventListener( 85 | type: K, 86 | listener: ( 87 | this: SpeechRecognition, 88 | ev: SpeechRecognitionEventMap[K], 89 | ) => any, 90 | options?: boolean | AddEventListenerOptions, 91 | ): void; 92 | addEventListener( 93 | type: string, 94 | listener: EventListenerOrEventListenerObject, 95 | options?: boolean | AddEventListenerOptions, 96 | ): void; 97 | removeEventListener( 98 | type: K, 99 | listener: ( 100 | this: SpeechRecognition, 101 | ev: SpeechRecognitionEventMap[K], 102 | ) => any, 103 | options?: boolean | EventListenerOptions, 104 | ): void; 105 | removeEventListener( 106 | type: string, 107 | listener: EventListenerOrEventListenerObject, 108 | options?: boolean | EventListenerOptions, 109 | ): void; 110 | } 111 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export type JSONValue = 2 | | string 3 | | number 4 | | boolean 5 | | null 6 | | JSONValue[] 7 | | { [key: string]: JSONValue }; 8 | 9 | // Message field types 10 | 11 | export type MediaType = 12 | | "image" 13 | | "video" 14 | | "audio" 15 | | "youtube" 16 | | "vimeo" 17 | | "unknown"; 18 | 19 | export type ActiveFeelingEffect = { 20 | feeling: string; 21 | intensity: number; 22 | duration: number; 23 | durationRemaining: number; 24 | }; 25 | 26 | export type Emotion = { 27 | id: number; 28 | name: string; 29 | avatar: string | null; 30 | moodPositivity: number; 31 | moodEnergy: number; 32 | playerRelationship: number; 33 | activeEffects: ActiveFeelingEffect[]; 34 | }; 35 | 36 | export type Memory = { 37 | id: number; 38 | recallValue: string; 39 | saveValue: JSONValue | null; 40 | }; 41 | 42 | export type MessagePathItem = { 43 | id: number; 44 | type: "node" | "edge"; 45 | graphId: number; 46 | }; 47 | export type MessagePath = MessagePathItem[]; 48 | 49 | export type Impact = { 50 | id: number; 51 | impact: string; 52 | isImpactShareable: boolean; 53 | impactImageUrl: string | null; 54 | }; 55 | 56 | export type Metadata = { 57 | [key: string]: string | undefined; 58 | }; 59 | 60 | export type Character = { 61 | id: number; 62 | name: string; 63 | avatar: string | null; 64 | }; 65 | 66 | export type Speech = { 67 | audio: ArrayBuffer | string; 68 | duration: number; 69 | }; 70 | 71 | export type BubblePoints = [number, number, number]; 72 | export type BubbleTailPosition = string; 73 | export type BubbleStyle = string; 74 | 75 | export type ImageLayerPoints = [[number, number], [number, number]]; 76 | export enum ImageLayerResizeMode { 77 | Contain = "contain", 78 | Cover = "cover", 79 | } 80 | export type ImageLayerOrientation = "portrait" | "landscape"; 81 | export type ImageLayer = { 82 | url: string | null; 83 | points: ImageLayerPoints; 84 | resizeMode: ImageLayerResizeMode; 85 | orientation: ImageLayerOrientation; 86 | }; 87 | 88 | export enum AudioTrackBehaviour { 89 | Continue = "continue", 90 | Restart = "restart", 91 | } 92 | export type AudioTrack = { 93 | url: string | null; 94 | behaviour: AudioTrackBehaviour; 95 | loop: boolean; 96 | volume: number; 97 | stopPlaying: boolean; 98 | }; 99 | 100 | export type Media = { 101 | animationIn: string | null; 102 | animationOut: string | null; 103 | bubblePoints: BubblePoints | null; 104 | bubbleTailPosition: BubbleTailPosition | null; 105 | bubbleStyle: BubbleStyle | null; 106 | imageLayers: ImageLayer[]; 107 | audioTracks: AudioTrack[]; 108 | stopAllAudio: boolean; 109 | }; 110 | 111 | // Message types 112 | 113 | export type MessageCharacter = { 114 | text: string; 115 | character: Character | null; 116 | metadata: Metadata; 117 | speech: Speech | null; 118 | media: Media; 119 | impact: Impact | null; 120 | }; 121 | 122 | export type MessagePanel = { 123 | metadata: Metadata; 124 | media: Media; 125 | impact: Impact | null; 126 | }; 127 | 128 | export type MessageMedia = { 129 | url: string; 130 | mediaType: MediaType; 131 | }; 132 | 133 | type GenericMessage = { 134 | type: T; 135 | message: S; 136 | eventId: string; 137 | timestamp: number; 138 | endStory: boolean; 139 | tapToContinue: boolean; 140 | path: MessagePath; 141 | emotions: Emotion[]; 142 | memories: Memory[]; 143 | }; 144 | 145 | export type Message = 146 | | GenericMessage<"character", MessageCharacter> 147 | | GenericMessage<"panel", MessagePanel> 148 | | GenericMessage<"media", MessageMedia>; 149 | 150 | // Speech config (set on Conversation) 151 | 152 | export type SpeechEncoding = "mp3" | "ogg" | "pcm" | "wav"; 153 | export type SpeechOutput = "url" | "buffer"; 154 | export interface SpeechConfig { 155 | encoding?: SpeechEncoding | SpeechEncoding[]; 156 | output?: SpeechOutput; 157 | } 158 | 159 | // Events sent to server 160 | 161 | export interface StartEvent { 162 | sceneIndex?: number; 163 | startGraphId?: number; 164 | startGraphReferenceId?: string; 165 | startNodeId?: number; 166 | } 167 | 168 | export type InputType = "keyboard" | "microphone"; 169 | 170 | export interface ReplyEvent { 171 | text: string; 172 | inputType?: InputType; 173 | } 174 | 175 | export interface ReplyIntermediateEvent { 176 | text: string; 177 | inputType: InputType; 178 | } 179 | 180 | export interface ActionEvent { 181 | action: string; 182 | } 183 | 184 | // Events sent to client 185 | 186 | export interface StartTypingEvent { 187 | conversationUuid: string; 188 | } 189 | 190 | export interface StopTypingEvent { 191 | conversationUuid: string; 192 | } 193 | 194 | export type MessageEvent = { 195 | conversationUuid: string; 196 | } & Message; 197 | 198 | export type CharacterMoodChange = { 199 | characterId: number; 200 | characterName: string | null; 201 | characterAvatar: string | null; 202 | // moodChange: Partial; 203 | }; 204 | export type CharacterMoodChanges = CharacterMoodChange[]; 205 | 206 | export interface EpisodeCompleteEvent { 207 | conversationUuid: string; 208 | impacts: Impact[]; 209 | completedEpisodeId: number; 210 | nextEpisodeId: number | null; 211 | characterMoodChanges: CharacterMoodChanges; 212 | } 213 | 214 | export type ProblemEvent = { 215 | code: string; 216 | error: string; 217 | conversationUuid?: string; 218 | }; 219 | 220 | // Confirmation events sent from server 221 | 222 | export type ConfirmActionEventPayload = { 223 | conversationUuid: string; 224 | action: string; 225 | }; 226 | 227 | export type ConfirmReplyEventPayload = { 228 | conversationUuid: string; 229 | text: string; 230 | }; 231 | 232 | export type ConfirmResumeEventPayload = { 233 | conversationUuid: string; 234 | }; 235 | 236 | export type ConfirmStartEventPayload = { 237 | conversationUuid: string; 238 | startGraphId?: number; 239 | startGraphReferenceId?: string; 240 | startNodeId?: number; 241 | sceneIndex?: number; 242 | resetEmotions?: boolean; 243 | }; 244 | 245 | export type ConfirmTapEventPayload = { 246 | conversationUuid: string; 247 | }; 248 | 249 | type ConfirmEvent> = { 250 | eventId: string; 251 | timestamp: number; 252 | playerId: string | null; 253 | } & S; 254 | 255 | export type ConfirmActionEvent = ConfirmEvent; 256 | export type ConfirmReplyEvent = ConfirmEvent; 257 | export type ConfirmResumeEvent = ConfirmEvent; 258 | export type ConfirmStartEvent = ConfirmEvent; 259 | export type ConfirmTapEvent = ConfirmEvent; 260 | 261 | export type SpeechRecognitionStartEvent = { 262 | service: "unified" | "unified:google" | "unified:aws" | "unified:deepgram"; 263 | sampleRate?: number; 264 | languageCode?: string; 265 | encoding?: string; 266 | customServiceParameters?: Record; 267 | returnRaw?: boolean; 268 | traceId?: number; 269 | }; 270 | 271 | export type SpeechRecognitionResponse = { 272 | confidence?: number; 273 | durationInSeconds?: number; 274 | speechFinal: boolean; 275 | isFinal: boolean; 276 | text: string; 277 | traceId?: number; 278 | }; 279 | 280 | type SpeechRecognitionParameters = { 281 | sampleRate: number; 282 | languageCode: string; 283 | encoding: string; 284 | customServiceParameters: unknown; 285 | returnRaw: boolean; 286 | traceId?: number; 287 | }; 288 | 289 | export type SpeechRecognitionStarted = { 290 | id: string; 291 | playerSessionId: string; 292 | service: string; 293 | parameters: SpeechRecognitionParameters; 294 | startedAt: Date; 295 | }; 296 | 297 | export type SpeechRecognitionStopped = { 298 | id: string; 299 | playerSessionId: string; 300 | service: string; 301 | parameters: SpeechRecognitionParameters; 302 | startedAt: Date; 303 | endedAt: Date; 304 | creditCount: number; 305 | }; 306 | -------------------------------------------------------------------------------- /tsconfig.eslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["**/*.js", "**/*.ts"], 4 | "compilerOptions": { 5 | "allowJs": true 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "declaration": true, 4 | "declarationDir": "dist", 5 | "emitDeclarationOnly": true, 6 | "esModuleInterop": true, 7 | "isolatedModules": true, 8 | "lib": ["ES2022", "dom"], 9 | "module": "esnext", 10 | "moduleResolution": "node", 11 | "noFallthroughCasesInSwitch": true, 12 | "noImplicitReturns": true, 13 | "noUnusedLocals": true, 14 | "noUnusedParameters": true, 15 | "outDir": "dist", 16 | "resolveJsonModule": true, 17 | "sourceMap": true, 18 | "skipLibCheck": true, 19 | "strict": true, 20 | "target": "esnext" 21 | }, 22 | "include": ["src"], 23 | "exclude": ["node_modules", "dist", "**/__mocks__"] 24 | } 25 | --------------------------------------------------------------------------------