├── .prettierrc.json ├── lib ├── src │ ├── index.test.ts │ ├── types.ts │ ├── processor │ │ ├── processor.ts │ │ ├── LFO.ts │ │ ├── insertSorted.ts │ │ ├── logger.ts │ │ ├── SynthProcessor.ts │ │ ├── SynthEventScheduler.test.ts │ │ ├── SampleTable.ts │ │ ├── SynthEventScheduler.ts │ │ ├── WavetableOscillator.ts │ │ ├── AmplitudeEnvelope.ts │ │ ├── SynthEventHandler.ts │ │ └── SynthProcessorCore.ts │ ├── @types │ │ ├── string-to-arraybuffer.d.ts │ │ └── webaudioapi.d.ts │ ├── index.ts │ ├── helper │ │ └── math.ts │ ├── renderer │ │ ├── conversion.ts │ │ ├── FastSleep.ts │ │ ├── message.ts │ │ ├── rendererWorker.ts │ │ └── renderAudio.ts │ ├── soundfont │ │ ├── getInstrumentZones.ts │ │ ├── getPresetZones.ts │ │ └── loader.ts │ └── SynthEvent.ts ├── jest.config.js ├── tsconfig.json ├── rollup.config.js └── package.json ├── example ├── .gitignore ├── public │ ├── midi │ │ ├── song.mid │ │ └── example.mid │ └── index.html ├── tsconfig.json ├── package.json ├── rollup.config.js ├── cert │ ├── localhost+1.pem │ └── localhost+1-key.pem └── src │ ├── midiToSynthEvents.ts │ ├── EventScheduler.ts │ ├── MIDIPlayer.ts │ └── index.ts ├── README.md ├── .vscode └── settings.json ├── package.json ├── .github └── workflows │ ├── npm-publish.yml │ └── node.js.yml ├── .gitignore └── LICENSE /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false 3 | } 4 | -------------------------------------------------------------------------------- /lib/src/index.test.ts: -------------------------------------------------------------------------------- 1 | describe("wavelet", () => { 2 | it("has a test", () => {}) 3 | }) 4 | -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | public/js 2 | public/midi-js-soundfonts-with-drums 3 | public/soundfonts 4 | -------------------------------------------------------------------------------- /lib/jest.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | preset: "ts-jest", 3 | testEnvironment: "node", 4 | } 5 | -------------------------------------------------------------------------------- /example/public/midi/song.mid: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ryohey/wavelet/HEAD/example/public/midi/song.mid -------------------------------------------------------------------------------- /example/public/midi/example.mid: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ryohey/wavelet/HEAD/example/public/midi/example.mid -------------------------------------------------------------------------------- /lib/src/types.ts: -------------------------------------------------------------------------------- 1 | export type DistributiveOmit = T extends any 2 | ? Omit 3 | : never 4 | -------------------------------------------------------------------------------- /lib/src/processor/processor.ts: -------------------------------------------------------------------------------- 1 | import { SynthProcessor } from "./SynthProcessor" 2 | 3 | registerProcessor("synth-processor", SynthProcessor) 4 | -------------------------------------------------------------------------------- /lib/src/@types/string-to-arraybuffer.d.ts: -------------------------------------------------------------------------------- 1 | declare module "string-to-arraybuffer" { 2 | export default function stringToArrayBuffer(arg: string): ArrayBuffer 3 | } 4 | -------------------------------------------------------------------------------- /lib/src/@types/webaudioapi.d.ts: -------------------------------------------------------------------------------- 1 | class AudioWorkletProcessor { 2 | port: MessagePort 3 | } 4 | declare function registerProcessor(name: string, processorCtor: any): void 5 | -------------------------------------------------------------------------------- /lib/src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./renderer/conversion" 2 | export * from "./renderer/message" 3 | export * from "./renderer/renderAudio" 4 | export * from "./soundfont/loader" 5 | export * from "./SynthEvent" 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Wavelet 2 | 3 | The single AudioWorkletProcessor which plays midi 4 | 5 | ## Features 6 | 7 | - Wavetable synthesizer 8 | - Never stops the UI thread 9 | - Ability to specify playback timing to prevent rhythm disruptions 10 | - Multi-channel support 11 | - Faster offline rendering 12 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.tabSize": 2, 3 | "editor.formatOnSave": true, 4 | "editor.defaultFormatter": "esbenp.prettier-vscode", 5 | "[javascript]": { 6 | "editor.defaultFormatter": "esbenp.prettier-vscode" 7 | }, 8 | "editor.codeActionsOnSave": { 9 | "source.organizeImports": "explicit" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /lib/src/helper/math.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This is a custom implementation of Math.max to prevent call stack size exceeded error 3 | * when using Math.max(...arr). 4 | */ 5 | export function max(arr: number[]): number | undefined { 6 | if (arr.length === 0) { 7 | return undefined 8 | } 9 | let max = arr[0] 10 | for (let i = 1; i < arr.length; i++) { 11 | if (arr[i] > max) { 12 | max = arr[i] 13 | } 14 | } 15 | return max 16 | } 17 | -------------------------------------------------------------------------------- /lib/src/processor/LFO.ts: -------------------------------------------------------------------------------- 1 | export class LFO { 2 | // Hz 3 | frequency = 5 4 | private phase = 0 5 | private readonly sampleRate: number 6 | 7 | constructor(sampleRate: number) { 8 | this.sampleRate = sampleRate 9 | } 10 | 11 | getValue(bufferSize: number) { 12 | const phase = this.phase 13 | this.phase += 14 | ((Math.PI * 2 * this.frequency) / this.sampleRate) * bufferSize 15 | return Math.sin(phase) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /lib/src/processor/insertSorted.ts: -------------------------------------------------------------------------------- 1 | // https://gist.github.com/fmal/763d9c953c5a5f8b8f9099dbc58da55e 2 | export function insertSorted(arr: T[], item: T, prop: keyof T) { 3 | let low = 0 4 | let high = arr.length 5 | let mid 6 | while (low < high) { 7 | mid = (low + high) >>> 1 // like (num / 2) but faster 8 | if (arr[mid][prop] < item[prop]) { 9 | low = mid + 1 10 | } else { 11 | high = mid 12 | } 13 | } 14 | arr.splice(low, 0, item) 15 | } 16 | -------------------------------------------------------------------------------- /lib/src/processor/logger.ts: -------------------------------------------------------------------------------- 1 | export class Logger { 2 | enabled = true 3 | 4 | log(...args: any) { 5 | if (this.enabled) { 6 | console.log(...args) 7 | } 8 | } 9 | 10 | warn(...args: any) { 11 | if (this.enabled) { 12 | console.warn(...args) 13 | } 14 | } 15 | 16 | error(...args: any) { 17 | if (this.enabled) { 18 | console.error(...args) 19 | } 20 | } 21 | } 22 | 23 | export const logger = new Logger() 24 | logger.enabled = false 25 | -------------------------------------------------------------------------------- /lib/src/renderer/conversion.ts: -------------------------------------------------------------------------------- 1 | import { AudioData } from "./message" 2 | 3 | export const audioDataToAudioBuffer = (audioData: AudioData): AudioBuffer => { 4 | const audioBuffer = new AudioBuffer({ 5 | length: audioData.length, 6 | sampleRate: audioData.sampleRate, 7 | numberOfChannels: 2, 8 | }) 9 | audioBuffer.copyToChannel(new Float32Array(audioData.leftData), 0) 10 | audioBuffer.copyToChannel(new Float32Array(audioData.rightData), 1) 11 | return audioBuffer 12 | } 13 | -------------------------------------------------------------------------------- /lib/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "incremental": true, 4 | "target": "ESNext", 5 | "module": "ESNext", 6 | "rootDir": "./src", 7 | "declaration": true, 8 | "inlineSourceMap": true, 9 | "inlineSources": true, 10 | "outDir": "./dist", 11 | "esModuleInterop": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "strict": true, 14 | "skipLibCheck": true, 15 | "moduleResolution": "node" 16 | }, 17 | "include": ["./src"] 18 | } 19 | -------------------------------------------------------------------------------- /example/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "incremental": true, 4 | "target": "ESNext", 5 | "module": "ESNext", 6 | "rootDir": "./src", 7 | "declaration": false, 8 | "inlineSourceMap": true, 9 | "inlineSources": true, 10 | "outDir": "./public/js", 11 | "esModuleInterop": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "strict": true, 14 | "skipLibCheck": true, 15 | "moduleResolution": "node" 16 | }, 17 | "include": ["./src"] 18 | } 19 | -------------------------------------------------------------------------------- /lib/src/renderer/FastSleep.ts: -------------------------------------------------------------------------------- 1 | // https://stackoverflow.com/a/61339321/1567777 2 | export class FastSleep { 3 | private readonly channel = new MessageChannel() 4 | private promiseResolver: (() => void) | undefined 5 | 6 | constructor() { 7 | this.channel.port2.onmessage = () => { 8 | this.promiseResolver?.() 9 | } 10 | } 11 | 12 | async wait() { 13 | const promise = new Promise((resolve) => { 14 | this.promiseResolver = resolve 15 | }) 16 | this.channel.port1.postMessage(null) 17 | await promise 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /lib/src/processor/SynthProcessor.ts: -------------------------------------------------------------------------------- 1 | import { SynthMessage } from ".." 2 | import { SynthProcessorCore } from "./SynthProcessorCore" 3 | 4 | export class SynthProcessor extends AudioWorkletProcessor { 5 | private readonly synth = new SynthProcessorCore( 6 | sampleRate, 7 | () => currentFrame 8 | ) 9 | 10 | constructor() { 11 | super() 12 | 13 | this.port.onmessage = (e: MessageEvent) => { 14 | this.synth.addEvent(e.data) 15 | } 16 | } 17 | 18 | process(_inputs: Float32Array[][], outputs: Float32Array[][]) { 19 | this.synth.process(outputs[0]) 20 | return true 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "workspace", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "private": true, 7 | "scripts": { 8 | "start": "concurrently \"npm run start:lib\" \"npm run start:example\"", 9 | "start:lib": "npm start --workspace=lib", 10 | "start:example": "npm start --workspace=example", 11 | "build": "npm run build --workspace=lib", 12 | "publish": "npm publish --workspace=lib", 13 | "test": "npm run test --workspace=lib" 14 | }, 15 | "author": "", 16 | "license": "MIT", 17 | "workspaces": [ 18 | "lib", 19 | "example" 20 | ], 21 | "devDependencies": { 22 | "concurrently": "^9.0.0" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /.github/workflows/npm-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will run tests using node and then publish a package to GitHub Packages when a release is created 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages 3 | 4 | name: Node.js Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | publish-npm: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-node@v3 16 | with: 17 | node-version: 18 18 | registry-url: https://registry.npmjs.org/ 19 | - run: npm ci 20 | - run: npm run build 21 | - run: npm run publish 22 | env: 23 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 24 | -------------------------------------------------------------------------------- /lib/src/soundfont/getInstrumentZones.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createGeneraterObject, 3 | getInstrumentGenerators, 4 | ParseResult, 5 | } from "@ryohey/sf2parser" 6 | 7 | export function getInstrumentZones(parsed: ParseResult, instrumentID: number) { 8 | const instrumentGenerators = getInstrumentGenerators(parsed, instrumentID) 9 | const zones = instrumentGenerators.map(createGeneraterObject) 10 | 11 | // If the first zone does not have sampleID, it is a global instrument zone. 12 | let globalZone: any | undefined 13 | const firstInstrumentZone = zones[0] 14 | if (firstInstrumentZone.sampleID === undefined) { 15 | globalZone = zones[0] 16 | } 17 | 18 | return { 19 | zones: zones.filter((zone) => zone.sampleID !== undefined), 20 | globalZone, 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /example/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 |
18 | 19 |
20 |
21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 18 | .grunt 19 | 20 | # node-waf configuration 21 | .lock-wscript 22 | 23 | # Compiled binary addons (http://nodejs.org/api/addons.html) 24 | build/Release 25 | 26 | # Dependency directories 27 | node_modules 28 | jspm_packages 29 | 30 | # Optional npm cache directory 31 | .npm 32 | 33 | # Optional REPL history 34 | .node_repl_history 35 | 36 | static/bundle.js 37 | esdoc 38 | pkg 39 | build 40 | dist 41 | 42 | **/tsconfig.tsbuildinfo 43 | .rollup.cache 44 | -------------------------------------------------------------------------------- /example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "wavelet-example", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "type": "module", 7 | "scripts": { 8 | "start": "rollup --config --watch" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "private": true, 14 | "dependencies": { 15 | "midifile-ts": "^1.5.1", 16 | "wav-encoder": "^1.3.0" 17 | }, 18 | "devDependencies": { 19 | "@guanghechen/rollup-plugin-copy": "^5.0.12", 20 | "@rollup/plugin-commonjs": "^26.0.1", 21 | "@rollup/plugin-node-resolve": "^15.2.3", 22 | "@rollup/plugin-typescript": "^11.1.6", 23 | "@types/wav-encoder": "^1.3.3", 24 | "rollup": "^4.21.2", 25 | "rollup-plugin-serve": "^2.0.2", 26 | "tslib": "^2.7.0", 27 | "typescript": "^5.6.2" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /lib/rollup.config.js: -------------------------------------------------------------------------------- 1 | import commonjs from "@rollup/plugin-commonjs" 2 | import { nodeResolve } from "@rollup/plugin-node-resolve" 3 | import rollupTypescript from "@rollup/plugin-typescript" 4 | 5 | const output = { 6 | dir: "dist", 7 | sourcemap: true, 8 | } 9 | 10 | const plugins = [ 11 | nodeResolve({ preferBuiltins: false, browser: true }), 12 | commonjs(), 13 | rollupTypescript(), 14 | ] 15 | 16 | export default [ 17 | { 18 | input: "src/index.ts", 19 | output: { 20 | ...output, 21 | format: "esm", 22 | }, 23 | plugins, 24 | }, 25 | { 26 | input: "src/processor/processor.ts", 27 | output: { 28 | ...output, 29 | format: "iife", 30 | }, 31 | plugins, 32 | }, 33 | { 34 | input: "src/renderer/rendererWorker.ts", 35 | output: { 36 | ...output, 37 | format: "iife", 38 | }, 39 | plugins, 40 | }, 41 | ] 42 | -------------------------------------------------------------------------------- /lib/src/renderer/message.ts: -------------------------------------------------------------------------------- 1 | import { 2 | LoadSampleEvent, 3 | SampleParameterEvent, 4 | SynthEvent, 5 | } from "../SynthEvent" 6 | 7 | export type InMessage = StartMessage | CancelMessage 8 | export type OutMessage = ProgressMessage | CompleteMessage 9 | 10 | export interface StartMessage { 11 | type: "start" 12 | samples: (LoadSampleEvent | SampleParameterEvent)[] 13 | events: SynthEvent[] 14 | sampleRate: number 15 | bufferSize?: number 16 | } 17 | 18 | export interface CancelMessage { 19 | type: "cancel" 20 | } 21 | 22 | export interface ProgressMessage { 23 | type: "progress" 24 | numBytes: number 25 | totalBytes: number 26 | } 27 | 28 | export interface AudioData { 29 | length: number 30 | sampleRate: number 31 | leftData: ArrayBuffer // Float32Array PCM 32 | rightData: ArrayBuffer // Float32Array PCM 33 | } 34 | 35 | export type CompleteMessage = { 36 | type: "complete" 37 | audioData: AudioData 38 | } 39 | -------------------------------------------------------------------------------- /lib/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@ryohey/wavelet", 3 | "version": "0.7.5", 4 | "description": "A wavetable synthesizer that never stops the UI thread created by AudioWorklet.", 5 | "main": "dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "type": "module", 8 | "scripts": { 9 | "start": "rollup --config --watch", 10 | "build": "rollup --config", 11 | "test": "jest --roots ./src" 12 | }, 13 | "author": "ryohey", 14 | "license": "MIT", 15 | "dependencies": { 16 | "@ryohey/sf2parser": "^1.2.1", 17 | "midifile-ts": "^1.5.1" 18 | }, 19 | "devDependencies": { 20 | "@rollup/plugin-commonjs": "^26.0.1", 21 | "@rollup/plugin-node-resolve": "^15.2.3", 22 | "@rollup/plugin-typescript": "^11.1.6", 23 | "@types/audioworklet": "^0.0.60", 24 | "@types/jest": "^29.5.12", 25 | "jest": "^29.7.0", 26 | "rollup": "^4.21.2", 27 | "ts-jest": "^29.2.5", 28 | "tslib": "^2.7.0", 29 | "typescript": "^5.6.2" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: ["main"] 9 | pull_request: 10 | branches: ["main"] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | matrix: 18 | node-version: [18.x] 19 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Use Node.js ${{ matrix.node-version }} 24 | uses: actions/setup-node@v3 25 | with: 26 | node-version: ${{ matrix.node-version }} 27 | cache: "npm" 28 | - run: npm ci 29 | - run: npm run build --if-present 30 | - run: npm test 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 ryohey 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/src/soundfont/getPresetZones.ts: -------------------------------------------------------------------------------- 1 | import { GeneratorParams } from "@ryohey/sf2parser" 2 | import { GeneratorList } from "@ryohey/sf2parser/bin/Structs" 3 | 4 | export function getPresetZones(generators: GeneratorList[]) { 5 | let globalZone: Partial = {} 6 | const zones: (Partial & { instrument: number })[] = [] 7 | let params: Partial = {} 8 | let zoneCount = 0 9 | 10 | for (const gen of generators) { 11 | const type = gen.type 12 | 13 | if (type === undefined) { 14 | continue 15 | } 16 | 17 | // keyRange or velRange must be the first of zone 18 | if (type === "keyRange" || type === "velRange") { 19 | if (zoneCount === 1 && zones.length === 0) { 20 | // treat previous zone as global zone if it is the first zone and not ended with instrument 21 | globalZone = params 22 | } 23 | params = {} 24 | zoneCount++ 25 | } 26 | 27 | // instrument must be the last of zone 28 | if (type === "instrument") { 29 | zones.push({ ...params, instrument: gen.value as number }) 30 | } 31 | 32 | params[type] = gen.value 33 | } 34 | 35 | return { zones, globalZone } 36 | } 37 | -------------------------------------------------------------------------------- /example/rollup.config.js: -------------------------------------------------------------------------------- 1 | import { copy } from "@guanghechen/rollup-plugin-copy" 2 | import commonjs from "@rollup/plugin-commonjs" 3 | import { nodeResolve } from "@rollup/plugin-node-resolve" 4 | import rollupTypescript from "@rollup/plugin-typescript" 5 | import fs from "fs" 6 | import serve from "rollup-plugin-serve" 7 | 8 | const plugins = [ 9 | nodeResolve({ preferBuiltins: false, browser: true }), 10 | commonjs(), 11 | rollupTypescript(), 12 | ] 13 | 14 | export default { 15 | input: "src/index.ts", 16 | output: { 17 | dir: "public/js", 18 | sourcemap: true, 19 | format: "iife", 20 | }, 21 | plugins: [ 22 | ...plugins, 23 | copy({ 24 | targets: [ 25 | { 26 | src: "../node_modules/@ryohey/wavelet/dist/processor.*", 27 | dest: "public/js", 28 | }, 29 | { 30 | src: "../node_modules/@ryohey/wavelet/dist/rendererWorker.*", 31 | dest: "public/js", 32 | }, 33 | ], 34 | }), 35 | serve({ 36 | contentBase: "public", 37 | open: true, 38 | https: { 39 | key: fs.readFileSync("./cert/localhost+1-key.pem"), 40 | cert: fs.readFileSync("./cert/localhost+1.pem"), 41 | }, 42 | }), 43 | ], 44 | } 45 | -------------------------------------------------------------------------------- /lib/src/renderer/rendererWorker.ts: -------------------------------------------------------------------------------- 1 | import { InMessage } from ".." 2 | import { FastSleep } from "./FastSleep" 3 | import { CompleteMessage, ProgressMessage } from "./message" 4 | import { renderAudio } from "./renderAudio" 5 | 6 | declare global { 7 | function postMessage( 8 | message: ProgressMessage | CompleteMessage, 9 | transfer?: Transferable[] | undefined 10 | ): void 11 | } 12 | 13 | let cancelled: boolean = false 14 | 15 | const fastSleep = new FastSleep() 16 | 17 | onmessage = async (e: MessageEvent) => { 18 | switch (e.data.type) { 19 | case "cancel": { 20 | cancelled = true 21 | break 22 | } 23 | case "start": { 24 | const { samples, events, sampleRate, bufferSize } = e.data 25 | 26 | try { 27 | const audioData = await renderAudio(samples, events, { 28 | sampleRate, 29 | bufferSize, 30 | cancel: () => cancelled, 31 | waitForEventLoop: async () => await fastSleep.wait(), 32 | onProgress: (numBytes, totalBytes) => 33 | postMessage({ 34 | type: "progress", 35 | numBytes, 36 | totalBytes, 37 | }), 38 | }) 39 | postMessage({ type: "complete", audioData }, [ 40 | audioData.leftData, 41 | audioData.rightData, 42 | ]) 43 | } catch (e) { 44 | console.error((e as Error).message) 45 | } 46 | close() 47 | break 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /example/cert/localhost+1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEbTCCAtWgAwIBAgIQZvFN8mrBF12AYt5P1YvF0TANBgkqhkiG9w0BAQsFADCB 3 | mzEeMBwGA1UEChMVbWtjZXJ0IGRldmVsb3BtZW50IENBMTgwNgYDVQQLDC9ERVNL 4 | VE9QLUEwTzVQSzZcd2FyaW5zaWRlQERFU0tUT1AtQTBPNVBLNiAociBrKTE/MD0G 5 | A1UEAww2bWtjZXJ0IERFU0tUT1AtQTBPNVBLNlx3YXJpbnNpZGVAREVTS1RPUC1B 6 | ME81UEs2IChyIGspMB4XDTIxMDkxMTA1MzM1M1oXDTIzMTIxMTA1MzM1M1owYzEn 7 | MCUGA1UEChMebWtjZXJ0IGRldmVsb3BtZW50IGNlcnRpZmljYXRlMTgwNgYDVQQL 8 | DC9ERVNLVE9QLUEwTzVQSzZcd2FyaW5zaWRlQERFU0tUT1AtQTBPNVBLNiAociBr 9 | KTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMCOwTPutedyCLJcvWXz 10 | qxjx/J4xJUiqJpgxcAf1Ao+tsy8d43u2dWu+JnTdOauNqvel46VMvf7GGNFrNx/h 11 | PfYlJf8BgL6b4IMLULEQJEp8EmISR9Hk4NGMMiwVqn9Za+taHxUWG7d/djMvYzVu 12 | 7vVN8K0gqjuZpHeZO9NVQqDTTaugz5XIFTPn6SyAqa6OVP5Iv+3qx0BlK4gYCwow 13 | 2yeoQWOe+03KfoWhYiNx1lqRbEomZZvQ05GEdr97PHIHJsW7GPf0VP9FMmY8SeP4 14 | Ulkcq/jFJ5GT/708QoImYqPp1hZPOP9wB8bDYm7+eI29+8EF+kXT3emO/kKg410m 15 | FJkCAwEAAaNkMGIwDgYDVR0PAQH/BAQDAgWgMBMGA1UdJQQMMAoGCCsGAQUFBwMB 16 | MB8GA1UdIwQYMBaAFAL059dlwqqwlCusQktazagw0m/UMBoGA1UdEQQTMBGCCWxv 17 | Y2FsaG9zdIcEfwAAATANBgkqhkiG9w0BAQsFAAOCAYEAL+4O4eWa4X2d3RbrD7aI 18 | 7L3tEB8XvKj/GnPI6Y8Kb7qVN/lM9yOg7cx7BcA0Egi6PHM1QH6b8fTT7PbYNY68 19 | lY593MOSCUVkdyY9KlUshW5l2H2kv592C1YY1SOHjXzbUNPMUA/W6sXwjx9fUfAC 20 | FEBgqlqtqzLHAlpjet0vuRzZFuCXFhSIQ30U8KLCD7DtLr8TtPqvtHoYHJLRO1dD 21 | tfBOAFIN0Zgp4NnCojorFcncmgNf86dn2EA3/XNbNl1rDI7j/Xbl2UQ3KW4dj0tk 22 | uFzySfUZHHfEo1kHsk4EzI3Pg1UGGts8vUQNfaFuWN5yTMgH2lQ/yZGOAXV6OS1b 23 | ROHJdFJrKyp8O+BLwFaqjnnBoPc6GJlVcp5VqLnUsPUUgUiXdhs5CgGyXkxLm4fj 24 | BNd7V5w/1iwaWylmrbL8JZG6YudHSk/VDjomCVoUySO6O3xA20NRbAOhXxPWsIkL 25 | qPvqVKc9+vGMDl4RbVhTnRJvwsFbtPlvySHbwQ9YOPCO 26 | -----END CERTIFICATE----- 27 | -------------------------------------------------------------------------------- /lib/src/processor/SynthEventScheduler.test.ts: -------------------------------------------------------------------------------- 1 | import { ImmediateEvent, MIDIEventBody } from "../SynthEvent" 2 | import { SynthEventScheduler } from "./SynthEventScheduler" 3 | 4 | describe("SynthEventScheduler", () => { 5 | it("should schedules events", () => { 6 | let currentFrame = 0 7 | let onImmediateEvent = jest.fn((_e: ImmediateEvent) => {}) 8 | let onDelayableEvent = jest.fn((_e: MIDIEventBody) => {}) 9 | const scheduler = new SynthEventScheduler( 10 | () => currentFrame, 11 | (e) => onImmediateEvent(e), 12 | (e) => onDelayableEvent(e) 13 | ) 14 | scheduler.addEvent({ 15 | type: "midi", 16 | midi: { 17 | type: "channel", 18 | subtype: "noteOn", 19 | channel: 1, 20 | noteNumber: 60, 21 | velocity: 100, 22 | }, 23 | delayTime: 10, 24 | sequenceNumber: 0, 25 | }) 26 | scheduler.addEvent({ 27 | type: "midi", 28 | midi: { 29 | type: "channel", 30 | subtype: "noteOff", 31 | channel: 1, 32 | noteNumber: 60, 33 | velocity: 0, 34 | }, 35 | delayTime: 100, 36 | sequenceNumber: 1, 37 | }) 38 | scheduler.addEvent({ 39 | type: "midi", 40 | midi: { 41 | type: "channel", 42 | subtype: "noteOn", 43 | channel: 1, 44 | noteNumber: 60, 45 | velocity: 100, 46 | }, 47 | delayTime: 101, // This event should be ignored in first process 48 | sequenceNumber: 2, 49 | }) 50 | currentFrame = 100 51 | scheduler.processScheduledEvents() 52 | expect(onDelayableEvent.mock.calls.length).toBe(2) 53 | expect(onDelayableEvent.mock.calls[0][0].subtype).toBe("noteOn") 54 | expect(onDelayableEvent.mock.calls[1][0].subtype).toBe("noteOff") 55 | }) 56 | }) 57 | -------------------------------------------------------------------------------- /example/cert/localhost+1-key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDAjsEz7rXncgiy 3 | XL1l86sY8fyeMSVIqiaYMXAH9QKPrbMvHeN7tnVrviZ03Tmrjar3peOlTL3+xhjR 4 | azcf4T32JSX/AYC+m+CDC1CxECRKfBJiEkfR5ODRjDIsFap/WWvrWh8VFhu3f3Yz 5 | L2M1bu71TfCtIKo7maR3mTvTVUKg002roM+VyBUz5+ksgKmujlT+SL/t6sdAZSuI 6 | GAsKMNsnqEFjnvtNyn6FoWIjcdZakWxKJmWb0NORhHa/ezxyBybFuxj39FT/RTJm 7 | PEnj+FJZHKv4xSeRk/+9PEKCJmKj6dYWTzj/cAfGw2Ju/niNvfvBBfpF093pjv5C 8 | oONdJhSZAgMBAAECggEAPjii054qJ2ygxMnU5bHUMs5W++MTkiRxLjKjad7RAg51 9 | 3R7CWEKANN2P3alyri2r9jQjmFFw/3eRo3IKsQF+nSAp4/nfWThTVuT8IQr/PxZS 10 | dmBD1t4ni27K85XYryf9CGLpsk2fjS7hye5hF24ORzP1Nmxctb+S9paMT2C4k/DD 11 | QYpw9cP3erRg3teuzfJWkDiXIN+onFxbnfvFOv/E/psEAQ3CxBqhBPlnxQS9l2gR 12 | 3Ug98TwqTBSB6Y6rnJRzD629Gu3KhNq7OSSYYvznK5Db0DIB2neE1iy9dTRemPnx 13 | 2Nbqm8pUthwl/ZW/ur6cKUAAP0kmSaJ7pWTyvmblgQKBgQDGlNBT6PcsqK7/gSJo 14 | 4vr1Fct20THG4NLsg+UouyNI0HZh91WbIcT3NOnnVODoVKskrcCioLoRoV2WI/UB 15 | yR7/iJRS/7Gq1CuDUWb43wZly5pUkxRzto2emAQj1Pgq3iv83KnFQcpqNpBVUiD7 16 | ozQp9CI6uMrZzOfnRSr5j7STcQKBgQD4PBAfmxtuei3bWpXmsP28ZzyCvrZZI7uv 17 | pEJRjP6z5/SczUzlR/EDQ64aIRzogkVgmxpNTgPqyE9sLnptuyXVZcibGHc1WToz 18 | hoQ7X6qWXhjsoVNOOJkhmXGqe04cJVTirlo1E74Z1CB7YEAHwnwZoeaSKrnderqV 19 | 1kxpWE8vqQKBgQCsB2LqLhlioMbpJS9v+aI6CyV1ywf6J+3RYvl0bZnFjIHhoGiS 20 | uZBuL3KqKoej/SU94x/MQryuZUIzvPaVE2w9Jk6IEGYTfTYszqoL0vkx2k3aEohX 21 | LcBFsZ90gxxx1oH4zhhsf/AgTyB+KWXVWK7p4Up94HQqdi431V3DUFC4oQKBgBin 22 | dn59HDtbvSQ1GgBAgeLtafAeWuP9jY2DRebtlvptRjkwlruK6qAFX8xOj510Gsne 23 | fb1By1fNLz4yW4x8cuR/6G2m/2f4BbTKXmW7OakXjWgrGagCLQtOPyDMgFSBTK36 24 | 3RC6dj4AeH1ocNOUeFjfYJQR1EXwA4oQL6Si1bKJAoGAPFaprxZdIcp+Y19nM7fZ 25 | WZFejWlcl9CMctx0KuS0rOTHwoEBPz3U+mkV6ZoukMEIDLIPbJ1946ijk4xkXIUq 26 | +grjq4EAn+awKWHXTwyDyg/gwnPTDyGM56UX2f2qdcAgh1fRQiX6QMGEVjO11Xe7 27 | igW4Lz4G8zsm6zQGYFN4fIQ= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /example/src/midiToSynthEvents.ts: -------------------------------------------------------------------------------- 1 | import { SynthEvent } from "@ryohey/wavelet" 2 | import { AnyEvent, MidiFile } from "midifile-ts" 3 | 4 | interface Tick { 5 | tick: number 6 | track: number 7 | } 8 | 9 | function addTick(events: AnyEvent[], track: number): (AnyEvent & Tick)[] { 10 | let tick = 0 11 | return events.map((e) => { 12 | tick += e.deltaTime 13 | return { ...e, tick, track } 14 | }) 15 | } 16 | 17 | const tickToMillisec = (tick: number, bpm: number, timebase: number) => 18 | (tick / (timebase / 60) / bpm) * 1000 19 | 20 | interface Keyframe { 21 | tick: number 22 | bpm: number 23 | timestamp: number 24 | } 25 | 26 | export const midiToSynthEvents = ( 27 | midi: MidiFile, 28 | sampleRate: number 29 | ): SynthEvent[] => { 30 | const events = midi.tracks.flatMap(addTick).sort((a, b) => a.tick - b.tick) 31 | let keyframe: Keyframe = { 32 | tick: 0, 33 | bpm: 120, 34 | timestamp: 0, 35 | } 36 | 37 | const synthEvents: SynthEvent[] = [] 38 | 39 | // channel イベントを MIDI Output に送信 40 | // Send Channel Event to MIDI OUTPUT 41 | for (const e of events) { 42 | const timestamp = 43 | tickToMillisec( 44 | e.tick - keyframe.tick, 45 | keyframe.bpm, 46 | midi.header.ticksPerBeat 47 | ) + keyframe.timestamp 48 | const delayTime = (timestamp * sampleRate) / 1000 49 | 50 | switch (e.type) { 51 | case "channel": 52 | synthEvents.push({ 53 | type: "midi", 54 | midi: e, 55 | delayTime, 56 | }) 57 | case "meta": 58 | switch (e.subtype) { 59 | case "setTempo": 60 | keyframe = { 61 | tick: e.tick, 62 | bpm: (60 * 1000000) / e.microsecondsPerBeat, 63 | timestamp, 64 | } 65 | break 66 | } 67 | } 68 | } 69 | 70 | return synthEvents 71 | } 72 | -------------------------------------------------------------------------------- /lib/src/SynthEvent.ts: -------------------------------------------------------------------------------- 1 | import { AnyChannelEvent } from "midifile-ts" 2 | import { AmplitudeEnvelopeParameter } from "./processor/AmplitudeEnvelope" 3 | import { DistributiveOmit } from "./types" 4 | 5 | export type SampleLoop = 6 | | { 7 | type: "no_loop" 8 | } 9 | | { 10 | type: "loop_continuous" | "loop_sustain" 11 | start: number 12 | end: number 13 | } 14 | 15 | export interface SampleParameter { 16 | name: string 17 | sampleID: number 18 | pitch: number 19 | loop: SampleLoop 20 | sampleStart: number 21 | sampleEnd: number 22 | sampleRate: number 23 | amplitudeEnvelope: AmplitudeEnvelopeParameter 24 | // This parameter represents the degree to which MIDI key number influences pitch. 25 | // A value of zero indicates that MIDI key number has no effect on pitch 26 | // a value of 1 represents the usual tempered semitone scale. 27 | scaleTuning: number 28 | pan: number 29 | exclusiveClass?: number | undefined 30 | volume: number // 0 to 1 31 | } 32 | 33 | export interface SampleRange { 34 | bank: number 35 | instrument: number // GM Patch Number 36 | keyRange: [number, number] 37 | velRange: [number, number] 38 | } 39 | 40 | export interface LoadSampleEvent { 41 | type: "loadSample" 42 | data: ArrayBuffer 43 | sampleID: number 44 | } 45 | 46 | export interface SampleParameterEvent { 47 | type: "sampleParameter" 48 | parameter: SampleParameter 49 | range: SampleRange 50 | } 51 | 52 | export type MIDIEventBody = DistributiveOmit 53 | 54 | export type MIDIEvent = { 55 | type: "midi" 56 | midi: MIDIEventBody 57 | // Time to delay the playback of an event. Number of frames 58 | // delayInSeconds = delayTime / sampleRate 59 | delayTime: number 60 | } 61 | 62 | export type ImmediateEvent = LoadSampleEvent | SampleParameterEvent 63 | export type SynthEvent = ImmediateEvent | MIDIEvent 64 | 65 | // the type to be sent by postMessage 66 | export type SynthMessage = SynthEvent & { 67 | // A number assigned to each message to ensure the order in which they are sent is preserved upon reception. 68 | sequenceNumber: number 69 | } 70 | 71 | export const DrumInstrumentNumber = 128 72 | -------------------------------------------------------------------------------- /lib/src/processor/SampleTable.ts: -------------------------------------------------------------------------------- 1 | import { SampleParameter, SampleRange } from "../SynthEvent" 2 | 3 | export type SampleTableItem = SampleParameter & { 4 | velRange: [number, number] 5 | } 6 | 7 | export type Sample = SampleParameter & { 8 | buffer: Float32Array 9 | } 10 | 11 | export class SampleTable { 12 | private samples: { 13 | [sampleID: number]: Float32Array 14 | } = {} 15 | 16 | private sampleParameters: { 17 | [bank: number]: { 18 | [instrument: number]: { [pitch: number]: SampleTableItem[] } 19 | } 20 | } = {} 21 | 22 | addSample(data: Float32Array, sampleID: number) { 23 | this.samples[sampleID] = data 24 | } 25 | 26 | addSampleParameter(parameter: SampleParameter, range: SampleRange) { 27 | const { bank, instrument, keyRange, velRange } = range 28 | for (let i = keyRange[0]; i <= keyRange[1]; i++) { 29 | if (this.sampleParameters[bank] === undefined) { 30 | this.sampleParameters[bank] = {} 31 | } 32 | if (this.sampleParameters[bank][instrument] === undefined) { 33 | this.sampleParameters[bank][instrument] = {} 34 | } 35 | if (this.sampleParameters[bank][instrument][i] === undefined) { 36 | this.sampleParameters[bank][instrument][i] = [] 37 | } 38 | this.sampleParameters[bank][instrument][i].push({ 39 | ...parameter, 40 | velRange, 41 | }) 42 | } 43 | } 44 | 45 | getSamples( 46 | bank: number, 47 | instrument: number, 48 | pitch: number, 49 | velocity: number 50 | ): Sample[] { 51 | const instrumentParameters = 52 | this.sampleParameters[bank]?.[instrument] ?? 53 | this.sampleParameters[0]?.[instrument] ?? // fallback to bank 0 54 | null 55 | 56 | const parameters = 57 | instrumentParameters?.[pitch]?.filter( 58 | (s) => velocity >= s.velRange[0] && velocity <= s.velRange[1] 59 | ) ?? [] 60 | 61 | const samples: Sample[] = [] 62 | 63 | for (const parameter of parameters) { 64 | const buffer = this.samples[parameter.sampleID] 65 | if (buffer === undefined) { 66 | console.warn(`sample not found: ${parameter.sampleID}`) 67 | continue 68 | } 69 | samples.push({ 70 | ...parameter, 71 | buffer, 72 | }) 73 | } 74 | 75 | return samples 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /example/src/EventScheduler.ts: -------------------------------------------------------------------------------- 1 | export interface SchedulableEvent { 2 | tick: number 3 | } 4 | 5 | /** 6 | * Player でイベントを随時読み取るためのクラス 7 | * 精確にスケジューリングするために先読みを行う 8 | * https://www.html5rocks.com/ja/tutorials/audio/scheduling/ 9 | */ 10 | /** 11 | * Player Classes for reading events at any time 12 | * Perform prefetching for accurate scheduling 13 | * https://www.html5rocks.com/ja/tutorials/audio/scheduling/ 14 | */ 15 | export default class EventScheduler { 16 | // 先読み時間 (ms) 17 | // Leading time (MS) 18 | lookAheadTime = 100 19 | 20 | // 1/4 拍子ごとの tick 数 21 | // 1/4 TICK number for each beat 22 | timebase = 480 23 | 24 | private _currentTick = 0 25 | private _scheduledTick = 0 26 | private _prevTime: number | undefined = undefined 27 | private _events: E[] 28 | 29 | constructor(events: E[] = [], tick = 0, timebase = 480, lookAheadTime = 100) { 30 | this._events = events 31 | this._currentTick = tick 32 | this._scheduledTick = tick 33 | this.timebase = timebase 34 | this.lookAheadTime = lookAheadTime 35 | } 36 | 37 | get currentTick() { 38 | return this._currentTick 39 | } 40 | 41 | millisecToTick(ms: number, bpm: number) { 42 | return (((ms / 1000) * bpm) / 60) * this.timebase 43 | } 44 | 45 | tickToMillisec(tick: number, bpm: number) { 46 | return (tick / (this.timebase / 60) / bpm) * 1000 47 | } 48 | 49 | seek(tick: number) { 50 | this._currentTick = this._scheduledTick = Math.max(0, tick) 51 | } 52 | 53 | readNextEvents(bpm: number, timestamp: number) { 54 | if (this._prevTime === undefined) { 55 | this._prevTime = timestamp 56 | } 57 | const delta = timestamp - this._prevTime 58 | const nowTick = Math.floor( 59 | this._currentTick + Math.max(0, this.millisecToTick(delta, bpm)) 60 | ) 61 | 62 | // 先読み時間 63 | // Leading time 64 | const lookAheadTick = Math.floor( 65 | this.millisecToTick(this.lookAheadTime, bpm) 66 | ) 67 | 68 | // 前回スケジュール済みの時点から、 69 | // From the previous scheduled point, 70 | // 先読み時間までを処理の対象とする 71 | // Target of processing up to read time 72 | const startTick = this._scheduledTick 73 | const endTick = nowTick + lookAheadTick 74 | 75 | this._prevTime = timestamp 76 | this._currentTick = nowTick 77 | this._scheduledTick = endTick 78 | 79 | return this._events 80 | .filter((e) => e && e.tick >= startTick && e.tick < endTick) 81 | .map((e) => { 82 | const waitTick = e.tick - nowTick 83 | const delayedTime = 84 | timestamp + Math.max(0, this.tickToMillisec(waitTick, bpm)) 85 | return { event: e, timestamp: delayedTime } 86 | }) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /lib/src/processor/SynthEventScheduler.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ImmediateEvent, 3 | MIDIEvent, 4 | MIDIEventBody, 5 | SynthEvent, 6 | } from "../SynthEvent" 7 | import { insertSorted } from "./insertSorted" 8 | import { logger } from "./logger" 9 | 10 | type DelayedEvent = MIDIEvent & { 11 | scheduledFrame: number 12 | sequenceNumber: number 13 | } 14 | 15 | export class SynthEventScheduler { 16 | private scheduledEvents: DelayedEvent[] = [] 17 | private currentEvents: DelayedEvent[] = [] 18 | 19 | constructor( 20 | private readonly getCurrentFrame: () => number, 21 | private readonly onImmediateEvent: (e: ImmediateEvent) => void, 22 | private readonly onDelayableEvent: (e: MIDIEventBody) => void 23 | ) {} 24 | 25 | private get currentFrame(): number { 26 | return this.getCurrentFrame() 27 | } 28 | 29 | addEvent(e: SynthEvent & { sequenceNumber: number }) { 30 | logger.log(e) 31 | 32 | if ("delayTime" in e) { 33 | // handle in process 34 | insertSorted( 35 | this.scheduledEvents, 36 | { 37 | ...e, 38 | scheduledFrame: this.currentFrame + e.delayTime, 39 | }, 40 | "scheduledFrame" 41 | ) 42 | } else { 43 | this.onImmediateEvent(e) 44 | } 45 | } 46 | 47 | processScheduledEvents() { 48 | if (this.scheduledEvents.length === 0) { 49 | return 50 | } 51 | 52 | while (true) { 53 | const e = this.scheduledEvents[0] 54 | if (e === undefined || e.scheduledFrame > this.currentFrame) { 55 | // scheduledEvents are sorted by scheduledFrame, 56 | // so we can break early instead of iterating through all scheduledEvents, 57 | break 58 | } 59 | this.scheduledEvents.shift() 60 | this.currentEvents.push(e) 61 | } 62 | 63 | this.currentEvents.sort(sortEvents) 64 | 65 | while (true) { 66 | const e = this.currentEvents.shift() 67 | if (e === undefined) { 68 | break 69 | } 70 | this.onDelayableEvent(e.midi) 71 | } 72 | } 73 | 74 | removeScheduledEvents(channel: number) { 75 | this.scheduledEvents = this.scheduledEvents.filter( 76 | (e) => e.midi.channel !== channel 77 | ) 78 | this.currentEvents = this.currentEvents.filter( 79 | (e) => e.midi.channel !== channel 80 | ) 81 | } 82 | } 83 | 84 | function sortEvents< 85 | T extends { scheduledFrame: number; sequenceNumber: number } 86 | >(a: T, b: T): number { 87 | // First, compare by scheduledFrame. 88 | if (a.scheduledFrame < b.scheduledFrame) { 89 | return -1 90 | } else if (a.scheduledFrame > b.scheduledFrame) { 91 | return 1 92 | } 93 | 94 | // If scheduledFrame is the same, compare by sequenceNumber. 95 | if (a.sequenceNumber < b.sequenceNumber) { 96 | return -1 97 | } else if (a.sequenceNumber > b.sequenceNumber) { 98 | return 1 99 | } 100 | 101 | // If both fields are the same. 102 | return 0 103 | } 104 | -------------------------------------------------------------------------------- /lib/src/renderer/renderAudio.ts: -------------------------------------------------------------------------------- 1 | import { 2 | AudioData, 3 | LoadSampleEvent, 4 | SampleParameterEvent, 5 | SynthEvent, 6 | } from ".." 7 | import { max } from "../helper/math" 8 | import { SynthProcessorCore } from "../processor/SynthProcessorCore" 9 | 10 | // returns in frame unit 11 | const getSongLength = (events: SynthEvent[]) => 12 | max(events.map((e) => (e.type === "midi" ? e.delayTime : 0))) ?? 0 13 | 14 | // Maximum time to wait for the note release sound to become silent 15 | const silentTimeoutSec = 5 16 | 17 | export interface RenderAudioOptions { 18 | sampleRate?: number 19 | onProgress?: (numFrames: number, totalFrames: number) => void 20 | cancel?: () => boolean 21 | bufferSize?: number 22 | waitForEventLoop?: () => Promise 23 | } 24 | 25 | const isArrayZero = (arr: ArrayLike) => { 26 | for (let i = 0; i < arr.length; i++) { 27 | if (arr[i] !== 0) { 28 | return false 29 | } 30 | } 31 | return true 32 | } 33 | 34 | export const renderAudio = async ( 35 | samples: (LoadSampleEvent | SampleParameterEvent)[], 36 | events: SynthEvent[], 37 | options?: RenderAudioOptions 38 | ): Promise => { 39 | let currentFrame = 0 40 | const sampleRate = options?.sampleRate ?? 44100 41 | const bufSize = options?.bufferSize ?? 500 42 | 43 | const synth = new SynthProcessorCore(sampleRate, () => currentFrame) 44 | 45 | let sequenceNumber = 0 46 | samples.forEach((e) => 47 | synth.addEvent({ ...e, sequenceNumber: sequenceNumber++ }) 48 | ) 49 | events.forEach((e) => 50 | synth.addEvent({ ...e, sequenceNumber: sequenceNumber++ }) 51 | ) 52 | 53 | const songLengthFrame = getSongLength(events) 54 | const iterCount = Math.ceil(songLengthFrame / bufSize) 55 | const additionalIterCount = Math.ceil( 56 | (silentTimeoutSec * sampleRate) / bufSize 57 | ) 58 | const allIterCount = iterCount + additionalIterCount 59 | const audioBufferSize = allIterCount * bufSize 60 | 61 | const leftData = new Float32Array(audioBufferSize) 62 | const rightData = new Float32Array(audioBufferSize) 63 | 64 | const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)] 65 | 66 | for (let i = 0; i < allIterCount; i++) { 67 | buffer[0].fill(0) 68 | buffer[1].fill(0) 69 | synth.process(buffer) 70 | const offset = i * bufSize 71 | leftData.set(buffer[0], offset) 72 | rightData.set(buffer[1], offset) 73 | currentFrame += bufSize 74 | 75 | // Wait for silence after playback is complete. 76 | if (i > iterCount && isArrayZero(buffer[0]) && isArrayZero(buffer[1])) { 77 | console.log(`early break ${i} in ${iterCount + additionalIterCount}`) 78 | break 79 | } 80 | 81 | // give a chance to terminate the loop or update progress 82 | if (i % 1000 === 0) { 83 | await options?.waitForEventLoop?.() 84 | 85 | options?.onProgress?.(offset, audioBufferSize) 86 | 87 | if (options?.cancel?.()) { 88 | throw new Error("renderAudio cancelled") 89 | } 90 | } 91 | } 92 | 93 | // slice() to delete silent parts 94 | const trimmedLeft = leftData.slice(0, currentFrame) 95 | const trimmedRight = rightData.slice(0, currentFrame) 96 | 97 | return { 98 | length: trimmedLeft.length, 99 | leftData: trimmedLeft.buffer, 100 | rightData: trimmedRight.buffer, 101 | sampleRate, 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /lib/src/processor/WavetableOscillator.ts: -------------------------------------------------------------------------------- 1 | import { AmplitudeEnvelope } from "./AmplitudeEnvelope" 2 | import { LFO } from "./LFO" 3 | import { Sample } from "./SampleTable" 4 | 5 | export class WavetableOscillator { 6 | readonly sample: Sample 7 | private sampleIndex = 0 8 | private _isPlaying = false 9 | private _isNoteOff = false 10 | private baseSpeed = 1 11 | private readonly envelope: AmplitudeEnvelope 12 | private readonly pitchLFO: LFO 13 | private readonly sampleRate: number 14 | 15 | speed = 1 16 | // 0 to 1 17 | private velocity = 1 18 | // 0 to 1 19 | volume = 1 20 | 21 | modulation = 0 22 | 23 | // cent 24 | modulationDepthRange = 50 25 | 26 | // -1 to 1 27 | pan = 0 28 | 29 | // This oscillator should be note off when hold pedal off 30 | isHold = false 31 | 32 | constructor(sample: Sample, sampleRate: number) { 33 | this.sample = sample 34 | this.sampleRate = sampleRate 35 | this.envelope = new AmplitudeEnvelope(sample.amplitudeEnvelope, sampleRate) 36 | this.pitchLFO = new LFO(sampleRate) 37 | } 38 | 39 | noteOn(pitch: number, velocity: number) { 40 | this.velocity = velocity 41 | this._isPlaying = true 42 | this.sampleIndex = this.sample.sampleStart 43 | this.baseSpeed = Math.pow( 44 | 2, 45 | ((pitch - this.sample.pitch) / 12) * this.sample.scaleTuning 46 | ) 47 | this.pitchLFO.frequency = 5 48 | this.envelope.noteOn() 49 | } 50 | 51 | noteOff() { 52 | this.envelope.noteOff() 53 | this._isNoteOff = true 54 | } 55 | 56 | forceStop() { 57 | this.envelope.forceStop() 58 | } 59 | 60 | process(outputs: Float32Array[]) { 61 | if (!this._isPlaying) { 62 | return 63 | } 64 | 65 | const speed = 66 | (this.baseSpeed * this.speed * this.sample.sampleRate) / this.sampleRate 67 | const volume = (this.velocity * this.volume) ** 2 * this.sample.volume 68 | 69 | // zero to pi/2 70 | const panTheta = 71 | ((Math.min(1, Math.max(-1, this.pan + this.sample.pan)) + 1) * Math.PI) / 72 | 4 73 | const leftPanVolume = Math.cos(panTheta) 74 | const rightPanVolume = Math.sin(panTheta) 75 | const gain = this.envelope.getAmplitude(outputs[0].length) 76 | const leftGain = gain * volume * leftPanVolume 77 | const rightGain = gain * volume * rightPanVolume 78 | 79 | const pitchLFOValue = this.pitchLFO.getValue(outputs[0].length) 80 | const pitchModulation = 81 | pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200) 82 | const modulatedSpeed = speed * (1 + pitchModulation) 83 | 84 | for (let i = 0; i < outputs[0].length; ++i) { 85 | const index = Math.floor(this.sampleIndex) 86 | const advancedIndex = this.sampleIndex + modulatedSpeed 87 | let loopIndex: number | null = null 88 | 89 | if ( 90 | (this.sample.loop.type === "loop_continuous" || 91 | (this.sample.loop.type === "loop_sustain" && !this._isNoteOff)) && 92 | advancedIndex >= this.sample.loop.end 93 | ) { 94 | loopIndex = 95 | this.sample.loop.start + (advancedIndex - Math.floor(advancedIndex)) 96 | } 97 | 98 | const nextIndex = 99 | loopIndex !== null 100 | ? Math.floor(loopIndex) 101 | : Math.min(index + 1, this.sample.sampleEnd - 1) 102 | 103 | // linear interpolation 104 | const current = this.sample.buffer[index] 105 | const next = this.sample.buffer[nextIndex] 106 | const level = current + (next - current) * (this.sampleIndex - index) 107 | 108 | outputs[0][i] += level * leftGain 109 | outputs[1][i] += level * rightGain 110 | 111 | this.sampleIndex = loopIndex ?? advancedIndex 112 | 113 | if (this.sampleIndex >= this.sample.sampleEnd) { 114 | this._isPlaying = false 115 | break 116 | } 117 | } 118 | } 119 | 120 | get isPlaying() { 121 | return this._isPlaying && this.envelope.isPlaying 122 | } 123 | 124 | get isNoteOff() { 125 | return this._isNoteOff 126 | } 127 | 128 | get exclusiveClass() { 129 | return this.sample.exclusiveClass 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /example/src/MIDIPlayer.ts: -------------------------------------------------------------------------------- 1 | import { SynthEvent } from "@ryohey/wavelet" 2 | import { 3 | AnyEvent, 4 | EndOfTrackEvent, 5 | MIDIControlEvents, 6 | MidiFile, 7 | } from "midifile-ts" 8 | import EventScheduler from "./EventScheduler" 9 | 10 | interface Tick { 11 | tick: number 12 | track: number 13 | } 14 | 15 | function addTick(events: AnyEvent[], track: number): (AnyEvent & Tick)[] { 16 | let tick = 0 17 | return events.map((e) => { 18 | tick += e.deltaTime 19 | return { ...e, tick, track } 20 | }) 21 | } 22 | 23 | export const isEndOfTrackEvent = (e: AnyEvent): e is EndOfTrackEvent => 24 | "subtype" in e && e.subtype === "endOfTrack" 25 | 26 | const TIMER_INTERVAL = 100 27 | const LOOK_AHEAD_TIME = 50 28 | 29 | export class MIDIPlayer { 30 | private output: (e: SynthEvent) => void 31 | private tempo = 120 32 | private interval: number | undefined 33 | private midi: MidiFile 34 | private sampleRate: number 35 | private tickedEvents: (AnyEvent & Tick)[] 36 | private scheduler: EventScheduler 37 | private endOfSong: number 38 | onProgress?: (progress: number) => void 39 | 40 | constructor( 41 | midi: MidiFile, 42 | sampleRate: number, 43 | output: (e: SynthEvent) => void 44 | ) { 45 | this.midi = midi 46 | this.sampleRate = sampleRate 47 | this.output = output 48 | this.tickedEvents = midi.tracks 49 | .flatMap(addTick) 50 | .sort((a, b) => a.tick - b.tick) 51 | this.scheduler = new EventScheduler( 52 | this.tickedEvents, 53 | 0, 54 | this.midi.header.ticksPerBeat, 55 | TIMER_INTERVAL + LOOK_AHEAD_TIME 56 | ) 57 | this.endOfSong = Math.max( 58 | ...this.tickedEvents.filter(isEndOfTrackEvent).map((e) => e.tick) 59 | ) 60 | this.resetControllers() 61 | } 62 | 63 | resume() { 64 | if (this.interval === undefined) { 65 | this.interval = window.setInterval(() => this.onTimer(), TIMER_INTERVAL) 66 | } 67 | } 68 | 69 | pause() { 70 | clearInterval(this.interval) 71 | this.interval = undefined 72 | this.allSoundsOff() 73 | } 74 | 75 | stop() { 76 | this.pause() 77 | this.resetControllers() 78 | this.scheduler.seek(0) 79 | this.onProgress?.(0) 80 | } 81 | 82 | // 0: start, 1: end 83 | seek(position: number) { 84 | this.allSoundsOff() 85 | this.scheduler.seek(position * this.endOfSong) 86 | } 87 | 88 | private allSoundsOff() { 89 | for (let i = 0; i < 16; i++) { 90 | this.output({ 91 | type: "midi", 92 | midi: { 93 | type: "channel", 94 | subtype: "controller", 95 | controllerType: MIDIControlEvents.ALL_SOUNDS_OFF, 96 | channel: i, 97 | value: 0, 98 | }, 99 | delayTime: 0, 100 | }) 101 | } 102 | } 103 | 104 | private resetControllers() { 105 | for (let i = 0; i < 16; i++) { 106 | this.output({ 107 | type: "midi", 108 | midi: { 109 | type: "channel", 110 | subtype: "controller", 111 | controllerType: MIDIControlEvents.RESET_CONTROLLERS, 112 | channel: i, 113 | value: 0, 114 | }, 115 | delayTime: 0, 116 | }) 117 | } 118 | } 119 | 120 | private onTimer() { 121 | const now = performance.now() 122 | const events = this.scheduler.readNextEvents(this.tempo, now) 123 | 124 | // channel イベントを MIDI Output に送信 125 | // Send Channel Event to MIDI OUTPUT 126 | events.forEach(({ event, timestamp }) => { 127 | const delayTime = ((timestamp - now) / 1000) * this.sampleRate 128 | const synthEvent = this.handleEvent(event, delayTime) 129 | if (synthEvent !== null) { 130 | this.output(synthEvent) 131 | } 132 | }) 133 | 134 | if (this.scheduler.currentTick >= this.endOfSong) { 135 | clearInterval(this.interval) 136 | this.interval = undefined 137 | } 138 | 139 | this.onProgress?.(this.scheduler.currentTick / this.endOfSong) 140 | } 141 | 142 | private handleEvent( 143 | e: AnyEvent & Tick, 144 | delayTime: number 145 | ): SynthEvent | null { 146 | switch (e.type) { 147 | case "channel": 148 | return { 149 | type: "midi", 150 | midi: e, 151 | delayTime, 152 | } 153 | case "meta": 154 | switch (e.subtype) { 155 | case "setTempo": 156 | this.tempo = (60 * 1000000) / e.microsecondsPerBeat 157 | break 158 | default: 159 | console.warn(`not supported meta event`, e) 160 | break 161 | } 162 | } 163 | return null 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /lib/src/processor/AmplitudeEnvelope.ts: -------------------------------------------------------------------------------- 1 | export interface AmplitudeEnvelopeParameter { 2 | attackTime: number 3 | holdTime: number 4 | decayTime: number 5 | sustainLevel: number 6 | releaseTime: number 7 | } 8 | 9 | enum EnvelopePhase { 10 | attack, 11 | hold, 12 | decay, 13 | sustain, 14 | release, 15 | forceStop, 16 | stopped, 17 | } 18 | 19 | const forceStopReleaseTime = 0.1 20 | 21 | export class AmplitudeEnvelope { 22 | private readonly parameter: AmplitudeEnvelopeParameter 23 | private _phase = EnvelopePhase.stopped 24 | private isNoteOff = false 25 | private phaseTime = 0 26 | private decayLevel = 0 // amplitude level at the end of decay phase 27 | private lastAmplitude = 0 28 | private readonly sampleRate: number 29 | 30 | constructor(parameter: AmplitudeEnvelopeParameter, sampleRate: number) { 31 | this.parameter = parameter 32 | this.sampleRate = sampleRate 33 | } 34 | 35 | private get phase() { 36 | return this._phase 37 | } 38 | 39 | private set phase(phase: EnvelopePhase) { 40 | if (this._phase === phase) { 41 | return 42 | } 43 | this._phase = phase 44 | this.phaseTime = 0 45 | } 46 | 47 | noteOn() { 48 | this.phase = EnvelopePhase.attack 49 | this.isNoteOff = false 50 | this.phaseTime = 0 51 | this.decayLevel = this.parameter.sustainLevel 52 | } 53 | 54 | noteOff() { 55 | this.isNoteOff = true 56 | } 57 | 58 | // Rapidly decrease the volume. This method ignores release time parameter 59 | forceStop() { 60 | this.phase = EnvelopePhase.forceStop 61 | } 62 | 63 | calculateAmplitude(bufferSize: number): number { 64 | const { attackTime, holdTime, decayTime, sustainLevel, releaseTime } = 65 | this.parameter 66 | const { sampleRate } = this 67 | 68 | if ( 69 | this.isNoteOff && 70 | (this.phase === EnvelopePhase.decay || 71 | this.phase === EnvelopePhase.sustain) 72 | ) { 73 | this.phase = EnvelopePhase.release 74 | this.decayLevel = this.lastAmplitude 75 | } 76 | 77 | // Attack 78 | switch (this.phase) { 79 | case EnvelopePhase.attack: { 80 | const amplificationPerFrame = 81 | (1 / (attackTime * sampleRate)) * bufferSize 82 | const value = this.lastAmplitude + amplificationPerFrame 83 | if (value >= 1) { 84 | this.phase = EnvelopePhase.hold 85 | return 1 86 | } 87 | return value 88 | } 89 | case EnvelopePhase.hold: { 90 | if (this.phaseTime >= holdTime) { 91 | this.phase = EnvelopePhase.decay 92 | } 93 | return this.lastAmplitude 94 | } 95 | case EnvelopePhase.decay: { 96 | const attenuationDecibel = linearToDecibel(sustainLevel / 1) 97 | const value = logAttenuation( 98 | 1.0, 99 | attenuationDecibel, 100 | decayTime, 101 | this.phaseTime 102 | ) 103 | if (this.phaseTime > decayTime) { 104 | if (sustainLevel <= 0) { 105 | this.phase = EnvelopePhase.stopped 106 | return 0 107 | } else { 108 | this.phase = EnvelopePhase.sustain 109 | return sustainLevel 110 | } 111 | } 112 | return value 113 | } 114 | case EnvelopePhase.sustain: { 115 | return sustainLevel 116 | } 117 | case EnvelopePhase.release: { 118 | const value = logAttenuation( 119 | this.decayLevel, 120 | -100, // -100dB means almost silence 121 | releaseTime, 122 | this.phaseTime 123 | ) 124 | if (this.phaseTime > releaseTime || value <= 0) { 125 | this.phase = EnvelopePhase.stopped 126 | return 0 127 | } 128 | return value 129 | } 130 | case EnvelopePhase.forceStop: { 131 | const attenuationPerFrame = 132 | (1 / (forceStopReleaseTime * sampleRate)) * bufferSize 133 | const value = this.lastAmplitude - attenuationPerFrame 134 | if (value <= 0) { 135 | this.phase = EnvelopePhase.stopped 136 | return 0 137 | } 138 | return value 139 | } 140 | case EnvelopePhase.stopped: { 141 | return 0 142 | } 143 | } 144 | } 145 | 146 | getAmplitude(bufferSize: number): number { 147 | const value = this.calculateAmplitude(bufferSize) 148 | this.lastAmplitude = value 149 | this.phaseTime += bufferSize / sampleRate 150 | return value 151 | } 152 | 153 | get isPlaying() { 154 | return this.phase !== EnvelopePhase.stopped 155 | } 156 | } 157 | 158 | // An exponential decay function. It attenuates the value of decibel over the duration time. 159 | function logAttenuation( 160 | fromLevel: number, 161 | attenuationDecibel: number, 162 | duration: number, 163 | time: number 164 | ): number { 165 | return fromLevel * decibelToLinear((attenuationDecibel / duration) * time) 166 | } 167 | 168 | function linearToDecibel(value: number): number { 169 | return 20 * Math.log10(value) 170 | } 171 | 172 | function decibelToLinear(value: number): number { 173 | return Math.pow(10, value / 20) 174 | } 175 | -------------------------------------------------------------------------------- /lib/src/processor/SynthEventHandler.ts: -------------------------------------------------------------------------------- 1 | import { ControllerEvent, MIDIControlEvents } from "midifile-ts" 2 | import { ImmediateEvent, MIDIEventBody } from "../SynthEvent" 3 | import { DistributiveOmit } from "../types" 4 | import { SynthProcessorCore } from "./SynthProcessorCore" 5 | import { logger } from "./logger" 6 | 7 | type RPNControllerEvent = DistributiveOmit 8 | 9 | interface RPN { 10 | rpnMSB?: RPNControllerEvent 11 | rpnLSB?: RPNControllerEvent 12 | dataMSB?: RPNControllerEvent 13 | dataLSB?: RPNControllerEvent 14 | } 15 | 16 | export class SynthEventHandler { 17 | private rpnEvents: { [channel: number]: RPN | undefined } = {} 18 | private bankSelectMSB: { [channel: number]: number | undefined } = {} 19 | 20 | constructor(private readonly processor: SynthProcessorCore) {} 21 | 22 | handleImmediateEvent(e: ImmediateEvent) { 23 | switch (e.type) { 24 | case "sampleParameter": 25 | this.processor.addSampleParameter(e.parameter, e.range) 26 | break 27 | case "loadSample": 28 | this.processor.addSample(e.data, e.sampleID) 29 | break 30 | } 31 | } 32 | 33 | handleDelayableEvent(e: MIDIEventBody) { 34 | logger.log("handle delayable event", e) 35 | 36 | switch (e.type) { 37 | case "channel": { 38 | switch (e.subtype) { 39 | case "noteOn": 40 | this.processor.noteOn(e.channel, e.noteNumber, e.velocity) 41 | break 42 | case "noteOff": 43 | this.processor.noteOff(e.channel, e.noteNumber) 44 | break 45 | case "pitchBend": 46 | this.processor.pitchBend(e.channel, e.value) 47 | break 48 | case "programChange": 49 | this.processor.programChange(e.channel, e.value) 50 | break 51 | case "controller": { 52 | switch (e.controllerType) { 53 | case MIDIControlEvents.NONREG_PARM_NUM_MSB: 54 | case MIDIControlEvents.NONREG_PARM_NUM_LSB: // NRPN LSB 55 | // Delete the rpn for do not send NRPN data events 56 | delete this.rpnEvents[e.channel] 57 | break 58 | case MIDIControlEvents.REGIST_PARM_NUM_MSB: { 59 | if (e.value === 127) { 60 | delete this.rpnEvents[e.channel] 61 | } else { 62 | this.rpnEvents[e.channel] = { 63 | ...this.rpnEvents[e.channel], 64 | rpnMSB: e, 65 | } 66 | } 67 | break 68 | } 69 | case MIDIControlEvents.REGIST_PARM_NUM_LSB: { 70 | if (e.value === 127) { 71 | delete this.rpnEvents[e.channel] 72 | } else { 73 | this.rpnEvents[e.channel] = { 74 | ...this.rpnEvents[e.channel], 75 | rpnLSB: e, 76 | } 77 | } 78 | break 79 | } 80 | case MIDIControlEvents.MSB_DATA_ENTRY: { 81 | const rpn = { 82 | ...this.rpnEvents[e.channel], 83 | dataMSB: e, 84 | } 85 | this.rpnEvents[e.channel] = rpn 86 | 87 | // In case of pitch bend sensitivity, 88 | // send without waiting for Data LSB event 89 | if (rpn.rpnLSB?.value === 0) { 90 | this.processor.setPitchBendSensitivity( 91 | e.channel, 92 | rpn.dataMSB.value 93 | ) 94 | } 95 | break 96 | } 97 | case MIDIControlEvents.LSB_DATA_ENTRY: { 98 | this.rpnEvents[e.channel] = { 99 | ...this.rpnEvents[e.channel], 100 | dataLSB: e, 101 | } 102 | // TODO: Send other RPN events 103 | break 104 | } 105 | case MIDIControlEvents.MSB_MAIN_VOLUME: 106 | this.processor.setMainVolume(e.channel, e.value) 107 | break 108 | case MIDIControlEvents.MSB_EXPRESSION: 109 | this.processor.expression(e.channel, e.value) 110 | break 111 | case MIDIControlEvents.ALL_SOUNDS_OFF: 112 | this.processor.allSoundsOff(e.channel) 113 | break 114 | case MIDIControlEvents.ALL_NOTES_OFF: 115 | this.processor.allNotesOff(e.channel) 116 | break 117 | case MIDIControlEvents.SUSTAIN: 118 | this.processor.hold(e.channel, e.value) 119 | break 120 | case MIDIControlEvents.MSB_PAN: 121 | this.processor.setPan(e.channel, e.value) 122 | break 123 | case MIDIControlEvents.MSB_MODWHEEL: 124 | this.processor.modulation(e.channel, e.value) 125 | break 126 | case MIDIControlEvents.MSB_BANK: 127 | this.bankSelectMSB[e.channel] = e.value 128 | break 129 | case MIDIControlEvents.LSB_BANK: { 130 | const msb = this.bankSelectMSB[e.channel] 131 | if (msb !== undefined) { 132 | const bank = (msb << 7) + e.value 133 | this.processor.bankSelect(e.channel, bank) 134 | } 135 | break 136 | } 137 | case MIDIControlEvents.RESET_CONTROLLERS: 138 | this.processor.resetChannel(e.channel) 139 | break 140 | } 141 | break 142 | } 143 | } 144 | break 145 | } 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /lib/src/soundfont/loader.ts: -------------------------------------------------------------------------------- 1 | import { 2 | defaultInstrumentZone, 3 | GeneratorParams, 4 | getPresetGenerators, 5 | parse, 6 | } from "@ryohey/sf2parser" 7 | import { AmplitudeEnvelopeParameter } from "../processor/AmplitudeEnvelope" 8 | import { 9 | LoadSampleEvent, 10 | SampleLoop, 11 | SampleParameter, 12 | SampleParameterEvent, 13 | SampleRange, 14 | } from "../SynthEvent" 15 | import { getInstrumentZones } from "./getInstrumentZones" 16 | import { getPresetZones } from "./getPresetZones" 17 | 18 | export interface BufferCreator { 19 | createBuffer( 20 | numberOfChannels: number, 21 | length: number, 22 | sampleRate: number 23 | ): AudioBuffer 24 | } 25 | 26 | const parseSamplesFromSoundFont = (data: Uint8Array) => { 27 | const parsed = parse(data) 28 | const result: { parameter: SampleParameter; range: SampleRange }[] = [] 29 | const convertedSampleBuffers: { [key: number]: Float32Array } = {} 30 | 31 | function addSampleIfNeeded(sampleID: number) { 32 | const cached = convertedSampleBuffers[sampleID] 33 | if (cached) { 34 | return cached 35 | } 36 | 37 | const sample = parsed.samples[sampleID] 38 | const audioData = new Float32Array(sample.length) 39 | for (let i = 0; i < sample.length; i++) { 40 | audioData[i] = sample[i] / 32767 41 | } 42 | 43 | convertedSampleBuffers[sampleID] = audioData 44 | return audioData 45 | } 46 | 47 | for (let i = 0; i < parsed.presetHeaders.length; i++) { 48 | const presetHeader = parsed.presetHeaders[i] 49 | const presetGenerators = getPresetGenerators(parsed, i) 50 | 51 | const presetZones = getPresetZones(presetGenerators) 52 | 53 | for (const presetZone of presetZones.zones) { 54 | const presetGen = { 55 | ...removeUndefined(presetZones.globalZone ?? {}), 56 | ...removeUndefined(presetZone), 57 | } 58 | 59 | const instrumentID = presetZone.instrument 60 | const instrumentZones = getInstrumentZones(parsed, instrumentID) 61 | 62 | for (const zone of instrumentZones.zones) { 63 | const sampleID = zone.sampleID! 64 | const sampleHeader = parsed.sampleHeaders[sampleID] 65 | 66 | const { velRange: defaultVelRange, ...generatorDefault } = 67 | defaultInstrumentZone 68 | 69 | const gen = { 70 | ...generatorDefault, 71 | ...removeUndefined(instrumentZones.globalZone ?? {}), 72 | ...removeUndefined(zone), 73 | } 74 | 75 | // inherit preset's velRange 76 | gen.velRange = gen.velRange ?? presetGen.velRange ?? defaultVelRange 77 | 78 | // add presetGenerator value 79 | for (const key of Object.keys(gen) as (keyof GeneratorParams)[]) { 80 | if ( 81 | key in presetGen && 82 | typeof gen[key] === "number" && 83 | typeof presetGen[key] === "number" 84 | ) { 85 | gen[key] += presetGen[key] 86 | } 87 | } 88 | 89 | const tune = gen.coarseTune + gen.fineTune / 100 90 | 91 | const basePitch = 92 | tune + 93 | sampleHeader.pitchCorrection / 100 - 94 | (gen.overridingRootKey ?? sampleHeader.originalPitch) 95 | 96 | const sampleStart = 97 | gen.startAddrsCoarseOffset * 32768 + gen.startAddrsOffset 98 | 99 | const sampleEnd = gen.endAddrsCoarseOffset * 32768 + gen.endAddrsOffset 100 | 101 | const loopStart = 102 | sampleHeader.loopStart + 103 | gen.startloopAddrsCoarseOffset * 32768 + 104 | gen.startloopAddrsOffset 105 | 106 | const loopEnd = 107 | sampleHeader.loopEnd + 108 | gen.endloopAddrsCoarseOffset * 32768 + 109 | gen.endloopAddrsOffset 110 | 111 | const audioData = addSampleIfNeeded(sampleID) 112 | 113 | const amplitudeEnvelope: AmplitudeEnvelopeParameter = { 114 | attackTime: timeCentToSec(gen.attackVolEnv), 115 | holdTime: timeCentToSec(gen.holdVolEnv), 116 | decayTime: timeCentToSec(gen.decayVolEnv), 117 | sustainLevel: 1 / centibelToLinear(gen.sustainVolEnv), 118 | releaseTime: timeCentToSec(gen.releaseVolEnv), 119 | } 120 | 121 | const loop: SampleLoop = (() => { 122 | switch (gen.sampleModes) { 123 | case 0: 124 | // no_loop 125 | break 126 | case 1: 127 | if (loopEnd > 0) { 128 | return { 129 | type: "loop_continuous", 130 | start: loopStart, 131 | end: loopEnd, 132 | } 133 | } 134 | case 3: 135 | if (loopEnd > 0) { 136 | return { 137 | type: "loop_sustain", 138 | start: loopStart, 139 | end: loopEnd, 140 | } 141 | } 142 | break 143 | } 144 | // fallback as no_loop 145 | return { type: "no_loop" } 146 | })() 147 | 148 | const parameter: SampleParameter = { 149 | sampleID: sampleID, 150 | pitch: -basePitch, 151 | name: sampleHeader.sampleName, 152 | sampleStart, 153 | sampleEnd: sampleEnd === 0 ? audioData.length : sampleEnd, 154 | loop, 155 | sampleRate: sampleHeader.sampleRate, 156 | amplitudeEnvelope, 157 | scaleTuning: gen.scaleTuning / 100, 158 | pan: (gen.pan ?? 0) / 500, 159 | exclusiveClass: gen.exclusiveClass, 160 | volume: centibelToLinear(-gen.initialAttenuation), 161 | } 162 | 163 | const range: SampleRange = { 164 | instrument: presetHeader.preset, 165 | bank: presetHeader.bank, 166 | keyRange: [gen.keyRange.lo, gen.keyRange.hi], 167 | velRange: [gen.velRange.lo, gen.velRange.hi], 168 | } 169 | 170 | result.push({ parameter, range }) 171 | } 172 | } 173 | } 174 | 175 | return { 176 | parameters: result, 177 | samples: convertedSampleBuffers, 178 | } 179 | } 180 | 181 | export const getSampleEventsFromSoundFont = ( 182 | data: Uint8Array 183 | ): { 184 | event: LoadSampleEvent | SampleParameterEvent 185 | transfer?: Transferable[] 186 | }[] => { 187 | const { samples, parameters } = parseSamplesFromSoundFont(data) 188 | 189 | const loadSampleEvents: LoadSampleEvent[] = Object.entries(samples).map( 190 | ([key, value]) => ({ 191 | type: "loadSample", 192 | sampleID: Number(key), 193 | data: value.buffer, 194 | }) 195 | ) 196 | 197 | const sampleParameterEvents: SampleParameterEvent[] = parameters.map( 198 | ({ parameter, range }) => ({ type: "sampleParameter", parameter, range }) 199 | ) 200 | 201 | return [ 202 | ...loadSampleEvents.map((event) => ({ event, transfer: [event.data] })), 203 | ...sampleParameterEvents.map((event) => ({ event })), 204 | ] 205 | } 206 | 207 | function convertTime(value: number) { 208 | return Math.pow(2, value / 1200) 209 | } 210 | 211 | function timeCentToSec(value: number) { 212 | if (value <= -32768) { 213 | return 0 214 | } 215 | 216 | if (value < -12000) { 217 | value = -12000 218 | } 219 | 220 | if (value > 8000) { 221 | value = 8000 222 | } 223 | 224 | return convertTime(value) 225 | } 226 | 227 | function centibelToLinear(value: number) { 228 | return Math.pow(10, value / 200) 229 | } 230 | 231 | function removeUndefined(obj: T) { 232 | const result: Partial = {} 233 | for (let key in obj) { 234 | if (obj[key] !== undefined) { 235 | result[key] = obj[key] 236 | } 237 | } 238 | return result 239 | } 240 | -------------------------------------------------------------------------------- /lib/src/processor/SynthProcessorCore.ts: -------------------------------------------------------------------------------- 1 | import { SampleParameter, SampleRange, SynthEvent } from "../SynthEvent" 2 | import { logger } from "./logger" 3 | import { Sample, SampleTable } from "./SampleTable" 4 | import { SynthEventHandler } from "./SynthEventHandler" 5 | import { SynthEventScheduler } from "./SynthEventScheduler" 6 | import { WavetableOscillator } from "./WavetableOscillator" 7 | 8 | interface ChannelState { 9 | volume: number // 0 to 1 10 | bank: number 11 | instrument: number 12 | pitchBend: number // in semitone 13 | pitchBendSensitivity: number // in semitone 14 | expression: number // 0 to 1 15 | pan: number // -1 to 1 16 | modulation: number 17 | oscillators: { [key: number]: WavetableOscillator[] } 18 | hold: boolean 19 | } 20 | 21 | const initialChannelState = (): ChannelState => ({ 22 | volume: 1, 23 | bank: 0, 24 | instrument: 0, 25 | pitchBend: 0, 26 | pitchBendSensitivity: 2, 27 | oscillators: {}, 28 | expression: 1, 29 | pan: 0, 30 | modulation: 0, 31 | hold: false, 32 | }) 33 | 34 | const RHYTHM_CHANNEL = 9 35 | const RHYTHM_BANK = 128 36 | 37 | export class SynthProcessorCore { 38 | private sampleTable = new SampleTable() 39 | private channels: { [key: number]: ChannelState } = {} 40 | private readonly eventScheduler: SynthEventScheduler 41 | 42 | constructor( 43 | private readonly sampleRate: number, 44 | private readonly getCurrentFrame: () => number 45 | ) { 46 | const eventHandler = new SynthEventHandler(this) 47 | this.eventScheduler = new SynthEventScheduler( 48 | getCurrentFrame, 49 | (e) => eventHandler.handleImmediateEvent(e), 50 | (e) => eventHandler.handleDelayableEvent(e) 51 | ) 52 | this.sampleRate = sampleRate 53 | this.getCurrentFrame = getCurrentFrame 54 | } 55 | 56 | get currentFrame(): number { 57 | return this.getCurrentFrame() 58 | } 59 | 60 | private getSamples( 61 | channel: number, 62 | pitch: number, 63 | velocity: number 64 | ): Sample[] { 65 | const state = this.getChannelState(channel) 66 | // Play drums for CH.10 67 | const bank = channel === RHYTHM_CHANNEL ? RHYTHM_BANK : state.bank 68 | return this.sampleTable.getSamples(bank, state.instrument, pitch, velocity) 69 | } 70 | 71 | addSample(data: ArrayBuffer, sampleID: number) { 72 | this.sampleTable.addSample(new Float32Array(data), sampleID) 73 | } 74 | 75 | addSampleParameter(parameter: SampleParameter, range: SampleRange) { 76 | this.sampleTable.addSampleParameter(parameter, range) 77 | } 78 | 79 | addEvent(e: SynthEvent & { sequenceNumber: number }) { 80 | this.eventScheduler.addEvent(e) 81 | } 82 | 83 | noteOn(channel: number, pitch: number, velocity: number) { 84 | const state = this.getChannelState(channel) 85 | 86 | const samples = this.getSamples(channel, pitch, velocity) 87 | 88 | if (samples.length === 0) { 89 | logger.warn( 90 | `There is no sample for noteNumber ${pitch} in instrument ${state.instrument} in bank ${state.bank}` 91 | ) 92 | return 93 | } 94 | 95 | for (const sample of samples) { 96 | const oscillator = new WavetableOscillator(sample, this.sampleRate) 97 | 98 | const volume = velocity / 127 99 | oscillator.noteOn(pitch, volume) 100 | 101 | if (state.oscillators[pitch] === undefined) { 102 | state.oscillators[pitch] = [] 103 | } 104 | 105 | if (sample.exclusiveClass !== undefined) { 106 | for (const key in state.oscillators) { 107 | for (const osc of state.oscillators[key]) { 108 | if (osc.exclusiveClass === sample.exclusiveClass) { 109 | osc.forceStop() 110 | } 111 | } 112 | } 113 | } 114 | 115 | state.oscillators[pitch].push(oscillator) 116 | } 117 | } 118 | 119 | noteOff(channel: number, pitch: number) { 120 | const state = this.getChannelState(channel) 121 | 122 | if (state.oscillators[pitch] === undefined) { 123 | return 124 | } 125 | 126 | for (const osc of state.oscillators[pitch]) { 127 | if (!osc.isNoteOff) { 128 | if (state.hold) { 129 | osc.isHold = true 130 | } else { 131 | osc.noteOff() 132 | } 133 | } 134 | } 135 | } 136 | 137 | pitchBend(channel: number, value: number) { 138 | const state = this.getChannelState(channel) 139 | state.pitchBend = (value / 0x2000 - 1) * state.pitchBendSensitivity 140 | } 141 | 142 | programChange(channel: number, value: number) { 143 | const state = this.getChannelState(channel) 144 | state.instrument = value 145 | } 146 | 147 | setPitchBendSensitivity(channel: number, value: number) { 148 | const state = this.getChannelState(channel) 149 | state.pitchBendSensitivity = value 150 | } 151 | 152 | setMainVolume(channel: number, value: number) { 153 | const state = this.getChannelState(channel) 154 | state.volume = value / 127 155 | } 156 | 157 | expression(channel: number, value: number) { 158 | const state = this.getChannelState(channel) 159 | state.expression = value / 127 160 | } 161 | 162 | allSoundsOff(channel: number) { 163 | this.eventScheduler.removeScheduledEvents(channel) 164 | const state = this.getChannelState(channel) 165 | 166 | for (const key in state.oscillators) { 167 | for (const osc of state.oscillators[key]) { 168 | osc.forceStop() 169 | } 170 | } 171 | } 172 | 173 | allNotesOff(channel: number) { 174 | const state = this.getChannelState(channel) 175 | 176 | for (const key in state.oscillators) { 177 | for (const osc of state.oscillators[key]) { 178 | osc.noteOff() 179 | } 180 | } 181 | } 182 | 183 | hold(channel: number, value: number) { 184 | const hold = value >= 64 185 | const state = this.getChannelState(channel) 186 | state.hold = hold 187 | 188 | if (hold) { 189 | return 190 | } 191 | 192 | for (const key in state.oscillators) { 193 | for (const osc of state.oscillators[key]) { 194 | if (osc.isHold) { 195 | osc.noteOff() 196 | } 197 | } 198 | } 199 | } 200 | 201 | setPan(channel: number, value: number) { 202 | const state = this.getChannelState(channel) 203 | state.pan = (value / 127 - 0.5) * 2 204 | } 205 | 206 | bankSelect(channel: number, value: number) { 207 | const state = this.getChannelState(channel) 208 | state.bank = value 209 | } 210 | 211 | modulation(channel: number, value: number) { 212 | const state = this.getChannelState(channel) 213 | state.modulation = value / 127 214 | } 215 | 216 | resetChannel(channel: number) { 217 | delete this.channels[channel] 218 | } 219 | 220 | private getChannelState(channel: number): ChannelState { 221 | const state = this.channels[channel] 222 | if (state !== undefined) { 223 | return state 224 | } 225 | const newState = initialChannelState() 226 | this.channels[channel] = newState 227 | return newState 228 | } 229 | 230 | process(outputs: Float32Array[]): void { 231 | this.eventScheduler.processScheduledEvents() 232 | 233 | for (const channel in this.channels) { 234 | const state = this.channels[channel] 235 | 236 | for (let key in state.oscillators) { 237 | state.oscillators[key] = state.oscillators[key].filter((oscillator) => { 238 | oscillator.speed = Math.pow(2, state.pitchBend / 12) 239 | oscillator.volume = state.volume * state.expression 240 | oscillator.pan = state.pan 241 | oscillator.modulation = state.modulation 242 | oscillator.process([outputs[0], outputs[1]]) 243 | 244 | if (!oscillator.isPlaying) { 245 | return false 246 | } 247 | return true 248 | }) 249 | } 250 | } 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /example/src/index.ts: -------------------------------------------------------------------------------- 1 | import { 2 | AudioData, 3 | audioDataToAudioBuffer, 4 | CancelMessage, 5 | getSampleEventsFromSoundFont, 6 | OutMessage, 7 | renderAudio, 8 | StartMessage, 9 | SynthEvent, 10 | } from "@ryohey/wavelet" 11 | import { deserialize, MidiFile, read, Stream } from "midifile-ts" 12 | import { encode } from "wav-encoder" 13 | import { MIDIPlayer } from "./MIDIPlayer" 14 | import { midiToSynthEvents } from "./midiToSynthEvents" 15 | 16 | // const soundFontUrl = "soundfonts/A320U.sf2" 17 | const soundFontUrl = "soundfonts/SGM-V2.01.sf2" 18 | 19 | const Sleep = (time: number) => 20 | new Promise((resolve) => setTimeout(resolve, time)) 21 | 22 | const waitForAnimationFrame = () => 23 | new Promise((resolve) => window.requestAnimationFrame(() => resolve())) 24 | 25 | const main = async () => { 26 | const context = new AudioContext() 27 | let synth: AudioWorkletNode 28 | let soundFontData: ArrayBuffer | null = null 29 | 30 | const setup = async () => { 31 | try { 32 | await context.audioWorklet.addModule("js/processor.js") 33 | } catch (e) { 34 | console.error("Failed to add AudioWorklet module", e) 35 | } 36 | synth = new AudioWorkletNode(context, "synth-processor", { 37 | numberOfInputs: 0, 38 | outputChannelCount: [2], 39 | } as any) 40 | synth.connect(context.destination) 41 | } 42 | 43 | let sequenceNumber = 0 44 | 45 | const postSynthMessage = (e: SynthEvent, transfer?: Transferable[]) => { 46 | synth.port.postMessage( 47 | { ...e, sequenceNumber: sequenceNumber++ }, 48 | transfer ?? [] 49 | ) 50 | } 51 | 52 | const loadSoundFont = async () => { 53 | let startDate = Date.now() 54 | console.log("Loading soundfont...") 55 | soundFontData = await (await fetch(soundFontUrl)).arrayBuffer() 56 | console.log( 57 | `Soundfont loaded. (${Date.now() - startDate}ms, ${ 58 | soundFontData.byteLength 59 | } bytes)` 60 | ) 61 | 62 | startDate = Date.now() 63 | console.log("Parsing soundfont...") 64 | const sampleEvents = getSampleEventsFromSoundFont( 65 | new Uint8Array(soundFontData) 66 | ) 67 | console.log(`Soundfont parsed. (${Date.now() - startDate}ms)`) 68 | 69 | for (const event of sampleEvents) { 70 | postSynthMessage(event.event, event.transfer) 71 | } 72 | } 73 | 74 | const setupMIDIInput = async () => { 75 | const midiAccess = await (navigator as any).requestMIDIAccess({ 76 | sysex: false, 77 | }) 78 | 79 | midiAccess.inputs.forEach((entry: any) => { 80 | entry.onmidimessage = (event: any) => { 81 | const e = deserialize(new Stream(event.data), 0, () => {}) 82 | if ("channel" in e) { 83 | postSynthMessage({ type: "midi", midi: e, delayTime: 0 }) 84 | } 85 | } 86 | }) 87 | } 88 | 89 | await setup() 90 | 91 | loadSoundFont().catch((e) => console.error(e)) 92 | setupMIDIInput().catch((e) => console.error(e)) 93 | 94 | const fileInput = document.getElementById("open")! 95 | const playButton = document.getElementById("button-play")! 96 | const pauseButton = document.getElementById("button-pause")! 97 | const stopButton = document.getElementById("button-stop")! 98 | const exampleButton = document.getElementById("button-example")! 99 | const exportButton = document.getElementById("button-export")! 100 | const exportPanel = document.getElementById("export-panel")! 101 | const benchmarkButton = document.getElementById("button-benchmark")! 102 | const workerBenchmarkButton = document.getElementById( 103 | "button-benchmark-worker" 104 | )! 105 | 106 | const seekbar = document.getElementById("seekbar")! as HTMLInputElement 107 | seekbar.setAttribute("max", "1") 108 | seekbar.setAttribute("step", "0.0001") 109 | seekbar.addEventListener("change", (e) => { 110 | midiPlayer?.seek(seekbar.valueAsNumber) 111 | }) 112 | let isSeekbarDragging = false 113 | seekbar.addEventListener("mousedown", () => { 114 | isSeekbarDragging = true 115 | }) 116 | seekbar.addEventListener("mouseup", () => { 117 | isSeekbarDragging = false 118 | }) 119 | 120 | let midiPlayer: MIDIPlayer | null = null 121 | let midi: MidiFile | null = null 122 | 123 | const playMIDI = (midi: MidiFile) => { 124 | midiPlayer?.pause() 125 | context.resume() 126 | midiPlayer = new MIDIPlayer(midi, context.sampleRate, postSynthMessage) 127 | midiPlayer.onProgress = (progress) => { 128 | if (!isSeekbarDragging) { 129 | seekbar.valueAsNumber = progress 130 | } 131 | } 132 | midiPlayer?.resume() 133 | } 134 | 135 | fileInput.addEventListener("change", (e) => { 136 | context.resume() 137 | const reader = new FileReader() 138 | reader.onload = async () => { 139 | midi = read(reader.result as ArrayBuffer) 140 | playMIDI(midi) 141 | } 142 | const input = e.currentTarget as HTMLInputElement 143 | const file = input.files?.[0] 144 | reader.readAsArrayBuffer(file!) 145 | }) 146 | 147 | exampleButton.addEventListener("click", async () => { 148 | const midiData = await (await fetch("/midi/example.mid")).arrayBuffer() 149 | midi = read(midiData) 150 | playMIDI(midi) 151 | }) 152 | 153 | playButton.addEventListener("click", () => { 154 | context.resume() 155 | midiPlayer?.resume() 156 | }) 157 | 158 | pauseButton.addEventListener("click", () => { 159 | midiPlayer?.pause() 160 | }) 161 | 162 | stopButton.addEventListener("click", () => { 163 | midiPlayer?.stop() 164 | }) 165 | 166 | const exportAudio = async (midi: MidiFile, type: "worker" | "mainthread") => { 167 | if (soundFontData === null) { 168 | return 169 | } 170 | const sampleEvents = getSampleEventsFromSoundFont( 171 | new Uint8Array(soundFontData) 172 | ) 173 | const sampleRate = 44100 174 | const events = midiToSynthEvents(midi, sampleRate) 175 | 176 | const progress = document.createElement("progress") 177 | progress.value = 0 178 | exportPanel.appendChild(progress) 179 | 180 | const exportOnMainThread = async () => { 181 | const cancelButton = document.createElement("button") 182 | cancelButton.textContent = "cancel" 183 | let cancel = false 184 | cancelButton.onclick = () => (cancel = true) 185 | exportPanel.appendChild(cancelButton) 186 | 187 | const result = await renderAudio( 188 | sampleEvents.map((event) => event.event), 189 | events, 190 | { 191 | sampleRate, 192 | bufferSize: 256, 193 | cancel: () => cancel, 194 | waitForEventLoop: waitForAnimationFrame, 195 | onProgress: (numFrames, totalFrames) => 196 | (progress.value = numFrames / totalFrames), 197 | } 198 | ) 199 | 200 | cancelButton.remove() 201 | 202 | return result 203 | } 204 | 205 | const exportOnWorker = () => 206 | new Promise((resolve) => { 207 | if (soundFontData === null) { 208 | return 209 | } 210 | const worker = new Worker("/js/rendererWorker.js") 211 | const sampleEvents = getSampleEventsFromSoundFont( 212 | new Uint8Array(soundFontData) 213 | ) 214 | const sampleRate = 44100 215 | const events = midiToSynthEvents(midi, sampleRate) 216 | const message: StartMessage = { 217 | type: "start", 218 | samples: sampleEvents.map((e) => e.event), 219 | events, 220 | sampleRate, 221 | bufferSize: 128, 222 | } 223 | worker.postMessage(message) 224 | 225 | const cancelButton = document.createElement("button") 226 | cancelButton.textContent = "cancel" 227 | cancelButton.onclick = () => { 228 | const message: CancelMessage = { 229 | type: "cancel", 230 | } 231 | worker.postMessage(message) 232 | } 233 | exportPanel.appendChild(cancelButton) 234 | 235 | worker.onmessage = async (e: MessageEvent) => { 236 | switch (e.data.type) { 237 | case "progress": { 238 | progress.value = e.data.numBytes / e.data.totalBytes 239 | break 240 | } 241 | case "complete": { 242 | progress.remove() 243 | cancelButton.remove() 244 | resolve(e.data.audioData) 245 | break 246 | } 247 | } 248 | } 249 | }) 250 | 251 | let audioData: AudioData 252 | 253 | switch (type) { 254 | case "mainthread": 255 | audioData = await exportOnMainThread() 256 | break 257 | case "worker": 258 | audioData = await exportOnWorker() 259 | break 260 | } 261 | 262 | progress.remove() 263 | 264 | const audioBuffer = audioDataToAudioBuffer(audioData) 265 | 266 | const wavData = await encode({ 267 | sampleRate: audioBuffer.sampleRate, 268 | channelData: [ 269 | audioBuffer.getChannelData(0), 270 | audioBuffer.getChannelData(1), 271 | ], 272 | }) 273 | 274 | const blob = new Blob([wavData], { type: "audio/wav" }) 275 | const audio = new Audio() 276 | const url = window.URL.createObjectURL(blob) 277 | audio.src = url 278 | audio.controls = true 279 | exportPanel.appendChild(audio) 280 | 281 | return audioData 282 | } 283 | 284 | exportButton.addEventListener("click", async () => { 285 | if (midi === null || soundFontData === null) { 286 | return 287 | } 288 | await exportAudio(midi, "worker") 289 | }) 290 | 291 | const benchmark = async (type: "mainthread" | "worker") => { 292 | if (soundFontData === null) { 293 | console.error("SoundFont is not loaded") 294 | return 295 | } 296 | const midiData = await (await fetch("/midi/song.mid")).arrayBuffer() 297 | const midi = read(midiData) 298 | 299 | exportPanel.innerHTML += "

Benchmark test started.

" 300 | const startTime = performance.now() 301 | 302 | const result = await exportAudio(midi, type) 303 | 304 | if (result === undefined) { 305 | return 306 | } 307 | 308 | const endTime = performance.now() 309 | const songLength = result.length / result.sampleRate 310 | const processTime = endTime - startTime 311 | exportPanel.innerHTML += ` 312 |

Benchmark test completed.

313 |
    314 |
  • ${ 315 | result.rightData.byteLength + result.leftData.byteLength 316 | } bytes
  • 317 |
  • ${result.length} frames
  • 318 |
  • ${songLength} seconds
  • 319 |
  • Take ${processTime} milliseconds
  • 320 |
  • x${songLength / (processTime / 1000)} speed
  • 321 |
322 | ` 323 | } 324 | 325 | benchmarkButton.addEventListener("click", async () => { 326 | benchmark("mainthread") 327 | }) 328 | 329 | workerBenchmarkButton.addEventListener("click", async () => { 330 | benchmark("worker") 331 | }) 332 | } 333 | 334 | main().catch((e) => { 335 | console.error(e) 336 | }) 337 | --------------------------------------------------------------------------------