├── packages
├── recordy
│ ├── .npmignore
│ ├── dist
│ │ └── index.html
│ ├── package.json
│ ├── src
│ │ ├── getInput.js
│ │ └── index.js
│ ├── package-lock.json
│ ├── webpack.config.js
│ └── README.md
├── wmstr
│ ├── .npmignore
│ ├── dist
│ │ └── index.html
│ ├── package.json
│ ├── src
│ │ └── index.js
│ ├── README.md
│ ├── package-lock.json
│ └── webpack.config.js
├── audiolooper
│ ├── .npmignore
│ ├── graphic.ods
│ ├── graphic.png
│ ├── dist
│ │ ├── index.html
│ │ └── audiolooper.min.js
│ ├── package.json
│ ├── webpack.config.js
│ ├── src
│ │ └── index.js
│ └── README.md
├── soundcyclejs
│ ├── .npmignore
│ ├── package.json
│ ├── src
│ │ ├── index.js
│ │ └── soundcycle.js
│ ├── package-lock.json
│ ├── webpack.config.js
│ └── README.md
├── wrecorder
│ ├── src
│ │ ├── index.js
│ │ └── recorder.js
│ ├── README.md
│ ├── package.json
│ └── webpack.config.js
├── webaudio-chnl
│ ├── dist
│ │ └── index.html
│ ├── package.json
│ ├── package-lock.json
│ ├── src
│ │ └── index.js
│ ├── webpack.config.js
│ └── README.md
├── audiochnl
│ ├── dist
│ │ └── index.html
│ ├── package.json
│ ├── README.md
│ ├── src
│ │ └── index.js
│ ├── package-lock.json
│ └── webpack.config.js
├── audiobufferchnl
│ ├── dist
│ │ └── index.html
│ ├── package.json
│ ├── README.md
│ ├── package-lock.json
│ ├── src
│ │ └── index.js
│ └── webpack.config.js
├── webaudio-effect-unit
│ ├── package.json
│ ├── src
│ │ ├── index.js
│ │ ├── util.js
│ │ └── EffectUnit.js
│ ├── webpack.config.js
│ ├── README.md
│ └── dist
│ │ └── webaudio-effect-unit.min.js
└── webaudio-effect-units-collection
│ ├── package-lock.json
│ ├── package.json
│ ├── src
│ ├── effects
│ │ ├── highpass.js
│ │ ├── lowpass.js
│ │ ├── gain.js
│ │ ├── pizzicato.js
│ │ ├── moog.js
│ │ ├── bitcrusher.js
│ │ ├── chorus.js
│ │ ├── tremolo.js
│ │ ├── pingPongDelay.js
│ │ ├── delay.js
│ │ ├── phaser.js
│ │ ├── wahwah.js
│ │ ├── compressor.js
│ │ ├── reverb.js
│ │ └── dubDelay.js
│ └── index.js
│ ├── webpack.config.js
│ └── README.md
├── .babelrc
├── GIT.md
├── .eslintrc
├── config
├── webpack.config.dev.js
├── webpack.config.prod.js
├── webpack.config.js
├── README.md
├── webpack.config.common.js
└── util.js
├── .gitignore
├── package.json
├── playground.js
└── README.md
/packages/recordy/.npmignore:
--------------------------------------------------------------------------------
1 | js/
2 |
--------------------------------------------------------------------------------
/packages/wmstr/.npmignore:
--------------------------------------------------------------------------------
1 | src/
2 |
--------------------------------------------------------------------------------
/packages/audiolooper/.npmignore:
--------------------------------------------------------------------------------
1 | js/
2 | graphic.ods
3 | graphic.png
4 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/.npmignore:
--------------------------------------------------------------------------------
1 | js/
2 | graphic.ods
3 | graphic.png
4 |
--------------------------------------------------------------------------------
/packages/wrecorder/src/index.js:
--------------------------------------------------------------------------------
1 | import recorder from './recorder';
2 |
3 | export default recorder;
4 |
--------------------------------------------------------------------------------
/packages/audiolooper/graphic.ods:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptify/sountility/HEAD/packages/audiolooper/graphic.ods
--------------------------------------------------------------------------------
/packages/audiolooper/graphic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptify/sountility/HEAD/packages/audiolooper/graphic.png
--------------------------------------------------------------------------------
/packages/wmstr/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | wmstr
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/packages/recordy/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | recordy
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | chnl
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/packages/audiochnl/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | audiochnl
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/packages/audiolooper/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | audiolooper
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/dist/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | audiobufferchnl
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | "es2015",
4 | "stage-0"
5 | ],
6 | "plugins": [
7 | "transform-async-to-generator",
8 | "transform-async-to-module-method",
9 | ["transform-runtime", {
10 | "polyfill": false,
11 | "regenerator": true
12 | }]
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/GIT.md:
--------------------------------------------------------------------------------
1 | # Rules for git
2 | ## Branches naming conventions
3 |
4 | - __Bugfixes etc.__: bug/[bug-name]
5 | - __New features__: feat/[feature-name]
6 | - __Testing area/experiments__: exp/[experiment-name]
7 | - __Drastic changes/Total rewrites__: new/[rewrite-name]
8 | - __Other changes__: other/[descriptive-name]
9 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiobufferchnl",
3 | "version": "0.0.2",
4 | "description": "AudioBufferChnl = Chnl + BufferSourceNode",
5 | "main": "dist/audiobufferchnl.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "dependencies": {
14 | "webaudio-chnl": "0.0.8"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "parser": "babel-eslint",
3 | "extends": "airbnb",
4 | "env": {
5 | "browser": true,
6 | "node": true
7 | },
8 | "rules": {
9 | "linebreak-style": 0,
10 | "no-plusplus": 0,
11 | "max-len": 0,
12 | "comma-dangle": 0,
13 | "quotes": ["warn", "backtick"],
14 | "curly": ["error", "multi-or-nest"],
15 | "no-param-reassign": 0,
16 | "no-use-before-define": 1,
17 | "no-shadow": 1,
18 | "no-console": 1
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/packages/wrecorder/README.md:
--------------------------------------------------------------------------------
1 | # Wrecorder
2 | ## means Webaudio-recorder
3 |
4 | This is a fork of mattdiamonds recorderjs. Due the fact that he dropped support on this project, i forked my own copy to have the possibility to fix bugs and add customizations.
5 | For an official documentation refer to this repository:
6 | [https://github.com/mattdiamond/Recorderjs](Matt Diamonds recorderjs)
7 |
8 | ## Bugs fixed
9 | - The click event in the forceDownload-function didn't work properly. Replaced the event.
10 |
--------------------------------------------------------------------------------
/packages/audiolooper/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiolooper",
3 | "version": "0.0.10",
4 | "description": "AudioLooper - Loop your tracks with automatic synchronization.",
5 | "main": "dist/audiolooper.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/audiolooper.git"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/packages/audiochnl/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiochnl",
3 | "version": "0.0.10",
4 | "description": "AudioChnl - One Audio object, multiple effects. Easy.",
5 | "main": "dist/audiochnl.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/AudioChnl.git"
16 | },
17 | "dependencies": {
18 | "webaudio-chnl": "0.0.8"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webaudio-chnl",
3 | "version": "0.0.8",
4 | "description": "Chnl - one channel, all effects.",
5 | "main": "dist/webaudio-chnl.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/Chnl.git"
16 | },
17 | "dependencies": {
18 | "webaudio-effect-units-collection": "^1.0.5"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/packages/wrecorder/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "wrecorder",
3 | "version": "1.0.0",
4 | "description": "A fork of mattdiamonds recorderjs. I made some customizations and fixed a few bugs.",
5 | "main": "build/bundle.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": ""
16 | },
17 | "dependencies": {
18 | "inline-worker": "^1.1.0"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/packages/wmstr/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "wmstr",
3 | "version": "0.0.5",
4 | "description": "Wmstr - Webaudio Master Channel. Managing audio.",
5 | "main": "dist/wmstr.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/Wmstr.git"
16 | },
17 | "dependencies": {
18 | "webaudio-chnl": "0.0.8",
19 | "wrecorder": "github:scriptify/Wrecorder"
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/packages/recordy/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "recordy",
3 | "version": "0.0.5",
4 | "description": "Record your microphone (or any other input) and add effects to it!",
5 | "main": "dist/recordy.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/Recordy.git"
16 | },
17 | "dependencies": {
18 | "webaudio-chnl": "0.0.8",
19 | "wrecorder": "github:scriptify/Wrecorder"
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webaudio-effect-unit",
3 | "version": "1.1.4",
4 | "description": "Effect unit to add functionality to the WebAudio API. The effect unit lets you enable/disable effects and alter the interior state with custom methods.",
5 | "main": "dist/webaudio-effect-unit.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/scriptify/webaudio-effect-unit.git"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/config/webpack.config.dev.js:
--------------------------------------------------------------------------------
1 | const path = require(`path`);
2 | const webpack = require(`webpack`);
3 | const { PATHS } = require(`./util.js`);
4 |
5 | module.exports = {
6 | entry: path.resolve(__dirname, `..`, `playground.js`),
7 | output: {
8 | path: PATHS.playgroundBuild,
9 | filename: `bundle.js`
10 | },
11 | devServer: {
12 | contentBase: PATHS.playgroundBuild,
13 | historyApiFallback: true,
14 | hot: true,
15 | inline: true,
16 | stats: `errors-only`,
17 | host: `0.0.0.0`
18 | },
19 | plugins: [
20 | new webpack.HotModuleReplacementPlugin()
21 | ],
22 | devtool: `eval-source-map`
23 | };
24 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "soundcyclejs",
3 | "version": "0.0.2",
4 | "description": "soundcyclejs - a loopstation eniterly written in js, for browsers.",
5 | "main": "dist/soundcyclejs.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "repository": {
14 | "url": "",
15 | "type": "git"
16 | },
17 | "dependencies": {
18 | "audiobufferchnl": "0.0.2",
19 | "audiolooper": "0.0.10",
20 | "recordy": "0.0.5",
21 | "uuid": "^3.1.0",
22 | "wmstr": "0.0.5"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webaudio-effect-units-collection",
3 | "version": "1.0.5",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "tunajs": {
8 | "version": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
9 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
10 | },
11 | "webaudio-effect-unit": {
12 | "version": "1.1.4",
13 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
14 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webaudio-effect-units-collection",
3 | "version": "1.0.5",
4 | "description": "A bunch of different EffectUnits which can be used right away. They are all based on the webaudio-effect-unit.",
5 | "main": "dist/webaudio-effect-units-collection.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server",
8 | "start:prod": "webpack"
9 | },
10 | "keywords": [],
11 | "author": "Maximilian Torggler",
12 | "license": "ISC",
13 | "dependencies": {
14 | "tunajs": "*",
15 | "webaudio-effect-unit": "^1.1.4"
16 | },
17 | "repository": {
18 | "type": "git",
19 | "url": "https://github.com/scriptify/webaudio-effect-units-collection.git"
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/src/index.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from './EffectUnit';
2 |
3 | export default EffectUnit;
4 |
5 | /* const audioCtx = new AudioContext();
6 | const effect = new EffectUnit({
7 | name: 'gain',
8 | effectChain: {
9 | gain: audioCtx.createGain()
10 | },
11 | values: [
12 | {
13 | name: 'gain',
14 | options: {
15 | defaultValue: 1
16 | },
17 | set: (effectChain, val) => {
18 | effectChain.gain.gain.value = val;
19 | }
20 | }
21 | ]
22 | }, audioCtx);
23 |
24 | effect.setValue('gain', 0.8);
25 |
26 | const osci = audioCtx.createOscillator();
27 | osci.frequency.value = 600;
28 | osci.connect(effect.input);
29 | effect.connect(audioCtx.destination);
30 | osci.start();
31 |
32 | window.setTimeout(() => {
33 | effect.disconnect();
34 | }, 1000);*/
35 |
--------------------------------------------------------------------------------
/packages/wmstr/src/index.js:
--------------------------------------------------------------------------------
1 | import Chnl from 'webaudio-chnl';
2 | import Recorder from 'wrecorder';
3 |
4 | export default class Wmstr extends Chnl {
5 |
6 | recorder;
7 |
8 | constructor(audioCtx, connectToSpeakers = true) {
9 | super(audioCtx);
10 |
11 | if (connectToSpeakers)
12 | this.connect(audioCtx.destination);
13 |
14 | this.recorder = new Recorder(this);
15 | }
16 |
17 | startRecording() {
18 | this.recorder.record();
19 | }
20 |
21 | stopRecording(filename = ``) {
22 | return new Promise((resolve) => {
23 | this.recorder.stop();
24 |
25 | this.recorder.exportWAV((blob) => {
26 | if (filename !== ``)
27 | Recorder.forceDownload(blob, filename);
28 |
29 | resolve(blob);
30 | this.recorder.clear();
31 | });
32 | });
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 |
6 | # Runtime data
7 | pids
8 | *.pid
9 | *.seed
10 |
11 | # Directory for instrumented libs generated by jscoverage/JSCover
12 | lib-cov
13 |
14 | # Coverage directory used by tools like istanbul
15 | coverage
16 |
17 | <<<<<<< 6f87c16601e1b81e9c889801c5598faf1495fe8e
18 | # nyc test coverage
19 | .nyc_output
20 |
21 | =======
22 | >>>>>>> First commit - Adding the whole project
23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
24 | .grunt
25 |
26 | # node-waf configuration
27 | .lock-wscript
28 |
29 | # Compiled binary addons (http://nodejs.org/api/addons.html)
30 | build/Release
31 |
32 | # Dependency directories
33 | node_modules
34 | jspm_packages
35 |
36 | # Optional npm cache directory
37 | .npm
38 |
39 | # Optional REPL history
40 | .node_repl_history
41 |
--------------------------------------------------------------------------------
/packages/recordy/src/getInput.js:
--------------------------------------------------------------------------------
1 | export default function getInput() {
2 | const getUserMedia = (navigator.getUserMedia ||
3 | navigator.webkitGetUserMedia ||
4 | navigator.mozGetUserMedia);
5 |
6 | const constraints = {
7 | audio: {
8 | mandatory: {
9 | googEchoCancellation: `false`,
10 | googAutoGainControl: `false`,
11 | googNoiseSuppression: `false`,
12 | googHighpassFilter: `false`
13 | },
14 | optional: []
15 | },
16 | };
17 |
18 | if (!getUserMedia)
19 | Promise.reject(new Error(`getUserMedia not supported!`));
20 |
21 | if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia)
22 | return navigator.mediaDevices.getUserMedia(constraints);
23 |
24 | return new Promise((resolve, reject) => {
25 | getUserMedia.call(navigator, constraints, constraints, resolve, reject);
26 | });
27 | }
28 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/src/index.js:
--------------------------------------------------------------------------------
1 | import SoundCycle from './soundcycle';
2 | /*
3 | const soundcycle = new SoundCycle();
4 |
5 | soundcycle.startRecording();
6 |
7 | window.setTimeout(() => {
8 | soundcycle.stopRecording()
9 | .then(res => {
10 | soundcycle.setCurrentLane(res.laneId);
11 | soundcycle.setMode(soundcycle.MODES.ADD_TO_LANE);
12 | soundcycle.startRecording();
13 |
14 | window.setTimeout(() => {
15 | soundcycle.stopRecording()
16 | .then(res1 => {
17 | soundcycle.stopTrack({ id: res.chnlId });
18 | soundcycle.stopTrack({ id: res1.chnlId });
19 | window.setTimeout(() => {
20 | console.log('play again')
21 | soundcycle.playTrack({ id: res.chnlId });
22 | }, 1000);
23 | });
24 | }, 1000);
25 | });
26 | }, 1000);*/
27 |
28 | export default SoundCycle;
29 |
--------------------------------------------------------------------------------
/config/webpack.config.prod.js:
--------------------------------------------------------------------------------
1 | const path = require(`path`);
2 | const { PATHS } = require(`./util.js`);
3 | const webpack = require(`webpack`);
4 |
5 | module.exports = function createProductionConfiguration(packageName) {
6 | return {
7 | entry: {
8 | [packageName]: path.resolve(PATHS.packages, `${packageName}/src/index.js`),
9 | [`${packageName}.min`]: path.resolve(PATHS.packages, `${packageName}/src/index.js`)
10 | },
11 | output: {
12 | path: path.resolve(PATHS.packages, `${packageName}/dist`),
13 | filename: `[name].js`,
14 | libraryTarget: `umd`,
15 | library: packageName,
16 | umdNamedDefine: true
17 | },
18 | plugins: [
19 | new webpack.DefinePlugin({
20 | 'process.env': {
21 | NODE_ENV: JSON.stringify(`production`)
22 | }
23 | }),
24 | new webpack.optimize.UglifyJsPlugin({
25 | include: /\.min\.js$/
26 | })
27 | ]
28 | };
29 | };
30 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/highpass.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 |
3 | const DEFAULT_FREQUENCY = 0;
4 |
5 | export const highpassData = {
6 | name: `highpass`,
7 | values: [
8 |
9 | {
10 | name: `frequency`,
11 | options: {
12 | type: `range`,
13 | defaultValue: DEFAULT_FREQUENCY,
14 | min: 0,
15 | max: 20000,
16 | step: 20
17 | },
18 | set: (effectChain, value) => {
19 | effectChain.highpass.frequency.value = value;
20 | }
21 | }
22 |
23 | ]
24 | };
25 |
26 | export default function createLowpass(audioCtx) {
27 | return new EffectUnit({
28 | ...highpassData,
29 | effectChain: {
30 | highpass: () => {
31 | const hp = audioCtx.createBiquadFilter();
32 | hp.type = `highpass`;
33 | hp.frequency.value = DEFAULT_FREQUENCY;
34 | return hp;
35 | }
36 | }
37 | }, audioCtx);
38 | }
39 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/lowpass.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 |
3 | const DEFAULT_FREQUENCY = 20000;
4 |
5 | export const lowpassData = {
6 | name: `lowpass`,
7 | values: [
8 |
9 | {
10 | name: `frequency`,
11 | options: {
12 | type: `range`,
13 | defaultValue: DEFAULT_FREQUENCY,
14 | min: 0,
15 | max: 20000,
16 | step: 20
17 | },
18 | set: (effectChain, value) => {
19 | effectChain.lowpass.frequency.value = value;
20 | }
21 | }
22 |
23 | ]
24 | };
25 |
26 | export default function createLowpass(audioCtx) {
27 | return new EffectUnit({
28 | ...lowpassData,
29 | effectChain: {
30 | lowpass: () => {
31 | const lp = audioCtx.createBiquadFilter();
32 | lp.type = `lowpass`;
33 | lp.frequency.value = DEFAULT_FREQUENCY;
34 | return lp;
35 | }
36 | }
37 | }, audioCtx);
38 | }
39 |
--------------------------------------------------------------------------------
/config/webpack.config.js:
--------------------------------------------------------------------------------
1 | const merge = require(`webpack-merge`);
2 |
3 | const DEV_CONFIGURATION = require(`./webpack.config.dev.js`);
4 | const COMMON_CONFIGURATION = require(`./webpack.config.common.js`);
5 | const createProductionConfiguration = require(`./webpack.config.prod.js`);
6 | const { commandObject } = require(`./util.js`);
7 |
8 | // Maybe remove with new version:
9 | process.noDeprecation = true;
10 |
11 | switch (commandObject.cmd) {
12 |
13 | case `dev`:
14 | module.exports = merge(COMMON_CONFIGURATION, DEV_CONFIGURATION);
15 | break;
16 |
17 | case `prod`:
18 | if (commandObject.params.length < 1)
19 | throw new Error(`Missing parameter to build the package: start:prod:`);
20 |
21 | console.log(`Building: ${commandObject.params}`);
22 |
23 | module.exports = merge(COMMON_CONFIGURATION, createProductionConfiguration(commandObject.params[0]));
24 | break;
25 |
26 | default:
27 | throw new Error(`No such command: ${commandObject.cmd}`);
28 | }
29 |
--------------------------------------------------------------------------------
/packages/audiochnl/README.md:
--------------------------------------------------------------------------------
1 | # AudioChnl
2 | ### One Audio object, multiple effects. Easy.
3 |
4 | ## What is an AudioChnl?
5 | It's an extension of the chnl module: This module is made for playing, pausing, stopping and manipulating Audio-objects.
6 |
7 | If you are not familiar with the [chnl module, just have a look at it.](../webaudio-chnl/README.md)
8 |
9 | ## Usage
10 | ### Creating an AudioChnl
11 | The constructor requires 2 arguments:
12 | 1. Your AudioContext
13 | 2. Your Audio-object
14 |
15 | ```javascript
16 | const audioCtx = new AudioContext();
17 | const audio = new Audio('song.mp3');
18 | const audioChnl = new AudioChnl(audioCtx, audioObj);
19 | ```
20 |
21 | ### Methods
22 | ```javascript
23 | .start()
24 | ```
25 | ```javascript
26 | .stop()
27 | ```
28 | ```javascript
29 | .pause()
30 | ```
31 | ### Example
32 | ```javascript
33 | const audioCtx = new AudioContext();
34 | const audio = new Audio('song.mp3');
35 | const audioChnl = new AudioChnl(audioCtx, audioObj);
36 | audioChnl.start();
37 | ```
38 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webaudio-chnl",
3 | "version": "0.0.8",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "tunajs": {
8 | "version": "1.0.0",
9 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
10 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
11 | },
12 | "webaudio-effect-unit": {
13 | "version": "1.1.4",
14 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
15 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
16 | },
17 | "webaudio-effect-units-collection": {
18 | "version": "1.0.5",
19 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
20 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
21 | "requires": {
22 | "tunajs": "1.0.0",
23 | "webaudio-effect-unit": "1.1.4"
24 | }
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "soundcyclejs-monorepo",
3 | "version": "0.0.1",
4 | "description": "",
5 | "main": "build/bundle.js",
6 | "scripts": {
7 | "start:dev": "webpack-dev-server --config config/webpack.config.js",
8 | "start:prod": "webpack --config config/webpack.config.js"
9 | },
10 | "devDependencies": {
11 | "babel-core": "*",
12 | "babel-eslint": "^7.2.1",
13 | "babel-loader": "*",
14 | "babel-plugin-transform-async-to-generator": "*",
15 | "babel-plugin-transform-async-to-module-method": "*",
16 | "babel-plugin-transform-runtime": "*",
17 | "babel-polyfill": "*",
18 | "babel-preset-es2015": "*",
19 | "babel-preset-stage-0": "*",
20 | "eslint": "^3.18.0",
21 | "eslint-config-airbnb": "^14.1.0",
22 | "eslint-loader": "^1.7.0",
23 | "eslint-plugin-import": "^2.2.0",
24 | "eslint-plugin-jsx-a11y": "^4.0.0",
25 | "eslint-plugin-react": "^6.10.3",
26 | "file-loader": "*",
27 | "webpack": "*",
28 | "webpack-dev-server": "*",
29 | "webpack-merge": "*",
30 | "worker-loader": "*"
31 | },
32 | "author": "",
33 | "license": "ISC"
34 | }
35 |
--------------------------------------------------------------------------------
/config/README.md:
--------------------------------------------------------------------------------
1 | # How to maintain this monorepo
2 |
3 | ## Adding a new package
4 | Create a new folder with the packagename as the folder name under the 'packages' directory.
5 | The newly created folder must have the following folder structure:
6 | ```
7 |
8 | dist
9 | src
10 | package.json
11 | ```
12 |
13 | That's it.
14 | Now you can require the package from any other package or in the playground!
15 |
16 | ## Publishing a package
17 |
18 | First, build the package:
19 |
20 | ```
21 | npm run start:prod --
22 | ```
23 |
24 | Now, in the dist folder of the package there are two files:
25 |
26 | ```
27 |
28 | .js
29 | .min.js
30 | ```
31 |
32 | Before you publish it to npm, one important step must be fullfilled:
33 | All local dependencies (so all packages in the monorepo on which the actual package depends) must be added to the package.json as a dependeny. This requires those packages, on which the actual one is dependent, to be already hosted on npm. This is required because if someone installs this package, the required dependencies must be installed.
34 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/gain.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 |
3 | export const gainData = {
4 | name: `gain`,
5 | values: [
6 |
7 | {
8 | name: `gain`,
9 | options: {
10 | type: `range`,
11 | defaultValue: 1,
12 | min: 0,
13 | max: 1,
14 | step: 0.01
15 | },
16 | set: (effectChain, value) => {
17 | effectChain.gain.gain.value = value;
18 | }
19 | },
20 |
21 | {
22 | name: `muted`,
23 | options: {
24 | type: `single`,
25 | defaultValue: false
26 | },
27 | set: (effectChain, value) => {
28 | effectChain.gain.gain.value = value ? 0 : 1;
29 | }
30 | }
31 |
32 | ]
33 | };
34 |
35 | export default function createGain(audioCtx) {
36 | const gainNode = new EffectUnit({
37 | ...gainData,
38 | effectChain: {
39 | gain: function createGainNode() {
40 | return audioCtx.createGain();
41 | }
42 | }
43 | }, audioCtx);
44 |
45 | gainNode.enable();
46 |
47 | return gainNode;
48 | }
49 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/pizzicato.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 |
3 | export var baseEffect = Object.create(null, {
4 |
5 | connect: {
6 | enumerable: true,
7 |
8 | value: function(audioNode) {
9 | this.output.connect(audioNode);
10 | return this;
11 | }
12 | },
13 |
14 | disconnect: {
15 | enumerable: true,
16 |
17 | value: function(audioNode) {
18 | this.output.disconnect(audioNode);
19 | return this;
20 | }
21 | }
22 | });
23 |
24 | function isNumber(arg) {
25 | return toString.call(arg) === '[object Number]' && arg === +arg;
26 | }
27 |
28 | export function isInRange(arg, min, max) {
29 | if (!isNumber(arg) || !isNumber(min) || !isNumber(max))
30 | return false;
31 |
32 | return arg >= min && arg <= max;
33 | }
34 |
35 | export function getWetLevel(mix) {
36 | if (!isNumber(mix) || mix > 1 || mix < 0)
37 | return 0;
38 |
39 | if (mix >= 0.5)
40 | return 1;
41 |
42 | return 1 - ((0.5 - mix) * 2);
43 | }
44 |
45 | export function getDryLevel(mix) {
46 | if (!isNumber(mix) || mix > 1 || mix < 0)
47 | return 0;
48 |
49 | if (mix <= 0.5)
50 | return 1;
51 |
52 | return 1 - ((mix - 0.5) * 2);
53 | }
54 |
--------------------------------------------------------------------------------
/packages/audiochnl/src/index.js:
--------------------------------------------------------------------------------
1 | import Chnl from 'webaudio-chnl';
2 |
3 |
4 | export default class AudioChnl extends Chnl {
5 |
6 | audioObj;
7 | isReady = false;
8 |
9 | constructor(audioCtx, audioObj, loaded = () => {}) {
10 | super(audioCtx);
11 | this.audioObj = audioObj;
12 |
13 | this.audioObj.addEventListener(`loadedmetadata`, () => {
14 | this.isReady = true;
15 | loaded();
16 | });
17 |
18 | const mediaSource = audioCtx.createMediaElementSource(audioObj);
19 | mediaSource.connect(this.input);
20 | }
21 |
22 | start() {
23 | this.audioObj.play();
24 | }
25 |
26 | stop() {
27 | this.audioObj.currentTime = 0;
28 | this.pause();
29 | }
30 |
31 | pause() {
32 | this.audioObj.pause();
33 | }
34 |
35 | seek(time) {
36 | // Time in seconds
37 | this.audioObj.currentTime = time;
38 | }
39 |
40 | }
41 |
42 | /* const audioCtx = new AudioContext();
43 | const audio = new Audio(song);
44 | const audioChnl = new AudioChnl(audioCtx, audio);
45 | audioChnl.start();
46 | window.setTimeout(() => {
47 | audioChnl.pause();
48 | window.setTimeout(() => {
49 | audioChnl.start();
50 | }, 4000);
51 | }, 6000); */
52 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/src/util.js:
--------------------------------------------------------------------------------
1 | export const bindMethodsToValues = (values, param) =>
2 | values.map((value) => {
3 | if (!(typeof value.set === `function`))
4 | throw new Error(`The specified value for the 'set'-field of the '${value.name}' - value is not a function!`);
5 |
6 | return {
7 | ...value,
8 | set: value.set.bind(undefined, param)
9 | };
10 | });
11 |
12 | export const functionsToValues = (obj) => {
13 | // If a member of the object (obj) is a function, the return value of this function will be set as the value of this property
14 | const retObj = Object.assign({}, obj);
15 |
16 | Object.keys(retObj).forEach((key) => {
17 | if (typeof retObj[key] === `function`)
18 | retObj[key] = retObj[key]();
19 | });
20 |
21 | return retObj;
22 | };
23 |
24 | export const objToArray = (obj) => {
25 | const array = [];
26 | Object.keys(obj).forEach((key) => {
27 | array.push(obj[key]);
28 | });
29 | return array;
30 | };
31 |
32 | export const filterValue = (values, valueName) => {
33 | const filteredValue = values.filter(val => val.name === valueName)[0];
34 |
35 | if (!filteredValue)
36 | throw new Error(`Tried to access inexistent value '${valueName}'.`);
37 |
38 | return filteredValue;
39 | };
40 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/README.md:
--------------------------------------------------------------------------------
1 | # audiobufferchnl
2 | ## audiobufferchnl = BufferSourceNode + chnl
3 |
4 | This package extends the chnl module and adds the possibility to turn a webaudio BufferSourceNode into a chnl.
5 | This adds a lot of advantages. For further information, have a look at [chnl](../webaudio-chnl/README.md).
6 |
7 | __audiobufferchnl is a part of the sountility collection!__
8 |
9 | ## Methods
10 |
11 | ### Constructor
12 | ```javascript
13 | new AudioBufferChnl(audioCtx, bufferSourceNode)
14 | ```
15 |
16 | To create a new instance, use the constructor like this.
17 | The first parameter _audioCtx_ must be an _AudioContext_ object.
18 | The second parameter _bufferSourceNode_ must be a BufferSourceNode object.
19 |
20 | ### Change the BufferSourceNode
21 | ```javascript
22 | .setBufferSourceNode(bufferSourceNode)
23 | ```
24 |
25 | ## Accessing the BufferSourceNode
26 | To directly gain access the BufferSourceNode object, use the _bufferSourceNode_ field.
27 |
28 | ```javascript
29 | .bufferSourceNode
30 | ```
31 |
32 | ## Example
33 | ```javascript
34 | const audioCtx = new AudioContext();
35 | const buffer = audioCtx.createBuffer(2, audioCtx.sampleRate * 2.0, audioCtx.sampleRate);
36 | const sourceNode = audioCtx.createBufferSource();
37 | sourceNode.buffer = buffer;
38 |
39 | const bufferChnl = new AudioBufferChnl(sourceNode, audioCtx);
40 | // Do something with the bufferChnl
41 | ```
42 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/moog.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_CUTOFF = 0.065;
5 | const DEFAULT_RESONANCE = 3.5;
6 |
7 | export const moogData = {
8 | name: `moog`,
9 | values: [
10 |
11 | {
12 | name: `cutoff`,
13 | options: {
14 | type: `range`,
15 | defaultValue: DEFAULT_CUTOFF,
16 | min: 0,
17 | max: 1,
18 | step: 0.01
19 | },
20 | set: (effectChain, value) => {
21 | effectChain.moog.cutoff = value;
22 | }
23 | },
24 |
25 | {
26 | name: `resonance`,
27 | options: {
28 | type: `range`,
29 | defaultValue: DEFAULT_RESONANCE,
30 | min: 0,
31 | max: 4,
32 | step: 0.01
33 | },
34 | set: (effectChain, value) => {
35 | effectChain.moog.resonance = value;
36 | }
37 | }
38 |
39 | ]
40 | };
41 |
42 | export default function createMoog(audioCtx, tuna = new Tuna(audioCtx)) {
43 | // Tuna is optional
44 |
45 | return new EffectUnit({
46 | ...moogData,
47 | effectChain: {
48 | moog: () => new tuna.MoogFilter({
49 | cutoff: 0.065, // 0 to 1
50 | resonance: 3.5, // 0 to 4
51 | bufferSize: 4096 // 256 to 16384, NOT INCLUDED AS EDITABLE!
52 | })
53 | }
54 | },
55 | audioCtx);
56 | }
57 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/bitcrusher.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_BITS = 4;
5 | const DEFAULT_NORMFREQ = 0.1;
6 |
7 | export const bitcrusherData = {
8 | name: `bitcrusher`,
9 | values: [
10 |
11 | {
12 | name: `bits`,
13 | options: {
14 | type: `range`,
15 | defaultValue: DEFAULT_BITS,
16 | min: 1,
17 | max: 16,
18 | step: 1
19 | },
20 | set: (effectChain, value) => {
21 | effectChain.bitcrusher.bits = value;
22 | }
23 | },
24 |
25 | {
26 | name: `normfreq`,
27 | options: {
28 | type: `range`,
29 | defaultValue: DEFAULT_NORMFREQ,
30 | min: 0.1,
31 | max: 1,
32 | step: 0.01
33 | },
34 | set: (effectChain, value) => {
35 | effectChain.bitcrusher.normfreq = value;
36 | }
37 | }
38 |
39 | ]
40 | };
41 |
42 | export default function createBitcrusher(audioCtx, tuna = new Tuna(audioCtx)) {
43 | // Tuna is optional
44 |
45 | return new EffectUnit({
46 | ...bitcrusherData,
47 | effectChain: {
48 | bitcrusher: () => new tuna.Bitcrusher({
49 | bits: 4, // 1 to 16
50 | normfreq: 0.1, // 0 to 1
51 | bufferSize: 4096 // 256 to 16384, NOT INCLUDED AS EDITABLE!
52 | })
53 | }
54 | },
55 | audioCtx);
56 | }
57 |
--------------------------------------------------------------------------------
/packages/audiochnl/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiochnl",
3 | "version": "0.0.10",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "tunajs": {
8 | "version": "1.0.0",
9 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
10 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
11 | },
12 | "webaudio-chnl": {
13 | "version": "0.0.8",
14 | "resolved": "https://registry.npmjs.org/webaudio-chnl/-/webaudio-chnl-0.0.8.tgz",
15 | "integrity": "sha512-y4Mg46CM1yp8DweigkD3dQlCv8fVqMLsqV7Kf4XFT02tfO0ayiqityCjFr+6eCOFajSqhd7Uzpu69R+M1NHsGA==",
16 | "requires": {
17 | "webaudio-effect-units-collection": "1.0.5"
18 | }
19 | },
20 | "webaudio-effect-unit": {
21 | "version": "1.1.4",
22 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
23 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
24 | },
25 | "webaudio-effect-units-collection": {
26 | "version": "1.0.5",
27 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
28 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
29 | "requires": {
30 | "tunajs": "1.0.0",
31 | "webaudio-effect-unit": "1.1.4"
32 | }
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiobufferchnl",
3 | "version": "0.0.2",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "tunajs": {
8 | "version": "1.0.0",
9 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
10 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
11 | },
12 | "webaudio-chnl": {
13 | "version": "0.0.8",
14 | "resolved": "https://registry.npmjs.org/webaudio-chnl/-/webaudio-chnl-0.0.8.tgz",
15 | "integrity": "sha512-y4Mg46CM1yp8DweigkD3dQlCv8fVqMLsqV7Kf4XFT02tfO0ayiqityCjFr+6eCOFajSqhd7Uzpu69R+M1NHsGA==",
16 | "requires": {
17 | "webaudio-effect-units-collection": "1.0.5"
18 | }
19 | },
20 | "webaudio-effect-unit": {
21 | "version": "1.1.4",
22 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
23 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
24 | },
25 | "webaudio-effect-units-collection": {
26 | "version": "1.0.5",
27 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
28 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
29 | "requires": {
30 | "tunajs": "1.0.0",
31 | "webaudio-effect-unit": "1.1.4"
32 | }
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/playground.js:
--------------------------------------------------------------------------------
1 | /* eslint import/no-extraneous-dependencies: 0 */
2 | /* eslint import/no-unresolved: 0 */
3 | /* eslint import/extensions: 0 */
4 |
5 | import Chnl from 'webaudio-chnl';
6 | import Recordy from 'recordy';
7 |
8 | const audioCtx = new AudioContext();
9 | const recordy = new Recordy(audioCtx);
10 |
11 |
12 | const chnl = new Chnl(audioCtx);
13 | const analyser = chnl.getAnalyser();
14 | const data = new Uint8Array(analyser.frequencyBinCount);
15 |
16 | recordy.getInput()
17 | .then(() => recordy.connect(chnl));
18 |
19 | /* const osci = audioCtx.createOscillator();
20 | osci.frequency.value = 20;
21 |
22 | osci.connect(chnl);*/
23 | chnl.connect(analyser);
24 | analyser.connect(audioCtx.destination);
25 | // chnl.connect(audioCtx.destination);
26 |
27 | // Activate effects
28 | chnl.addEffect(`pingPongDelay`);
29 | chnl.addEffect(`tremolo`);
30 |
31 | // osci.start();
32 |
33 | const body = document.querySelector(`body`);
34 | body.setAttribute(`style`, `padding: 0; margin: 0;`);
35 |
36 | function draw() {
37 | analyser.getByteFrequencyData(data);
38 | const divW = window.innerWidth / data.length;
39 | body.innerHTML = ``;
40 | data.forEach((val) => {
41 | const div = document.createElement(`div`);
42 | div.setAttribute(`style`, `width: ${divW}px; height: ${window.innerHeight}px; display: inline-block; background: rgb(${val - 100},${val - 50},${val - 12});`);
43 | body.appendChild(div);
44 | });
45 | window.requestAnimationFrame(draw);
46 | /* if (osci.frequency.value <= 1000)
47 | osci.frequency.value += 20;
48 | else {
49 | const multi = (Math.random() >= 0.5) ? 1 : -1;
50 | osci.frequency.value += Math.random() * 10 * multi;
51 | }*/
52 | }
53 |
54 | window.requestAnimationFrame(draw);
55 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/chorus.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_RATE = 1.5;
5 | const DEFAULT_FEEDBACK = 0.2;
6 | const DEFAULT_DELAY = 0.0045;
7 |
8 | export const chorusData = {
9 | name: `chorus`,
10 | values: [
11 |
12 | {
13 | name: `rate`,
14 | options: {
15 | type: `range`,
16 | defaultValue: DEFAULT_RATE,
17 | min: 0.01,
18 | max: 8,
19 | step: 0.01
20 | },
21 | set: (effectChain, value) => {
22 | effectChain.chorus.rate = value;
23 | }
24 | },
25 |
26 | {
27 | name: `feedback`,
28 | options: {
29 | type: `range`,
30 | defaultValue: DEFAULT_FEEDBACK,
31 | min: 0,
32 | max: 1,
33 | step: 0.01
34 | },
35 | set: (effectChain, value) => {
36 | effectChain.chorus.feedback = value;
37 | }
38 | },
39 |
40 | {
41 | name: `delay`,
42 | options: {
43 | type: `range`,
44 | defaultValue: DEFAULT_DELAY,
45 | min: 0,
46 | max: 1,
47 | step: 0.01
48 | },
49 | set: (effectChain, value) => {
50 | effectChain.chorus.delay = value;
51 | }
52 | }
53 |
54 | ]
55 | };
56 |
57 | export default function createChorus(audioCtx, tuna = new Tuna(audioCtx)) {
58 | // Tuna is optional
59 |
60 | return new EffectUnit({
61 | ...chorusData,
62 | effectChain: {
63 | chorus: () => new tuna.Chorus({
64 | rate: DEFAULT_RATE, // 0.01 - 8
65 | feedback: DEFAULT_FEEDBACK, // 0 - 1
66 | delay: DEFAULT_DELAY // 0 - 1
67 | })
68 | }
69 | },
70 | audioCtx);
71 | }
72 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/tremolo.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_INTENSITY = 0.3;
5 | const DEFAULT_RATE = 4;
6 | const DEFAULT_STEREOPHASE = 0;
7 |
8 | export const tremoloData = {
9 | name: `tremolo`,
10 | values: [
11 |
12 | {
13 | name: `intensity`,
14 | options: {
15 | type: `range`,
16 | defaultValue: DEFAULT_INTENSITY,
17 | min: 0.0001,
18 | max: 1,
19 | step: 0.01
20 | },
21 | set: (effectChain, value) => {
22 | effectChain.tremolo.intensity = value;
23 | }
24 | },
25 |
26 | {
27 | name: `rate`,
28 | options: {
29 | type: `range`,
30 | defaultValue: DEFAULT_RATE,
31 | min: 0.001,
32 | max: 8,
33 | step: 0.01
34 | },
35 | set: (effectChain, value) => {
36 | effectChain.tremolo.rate = value;
37 | }
38 | },
39 |
40 | {
41 | name: `stereoPhase`,
42 | options: {
43 | type: `range`,
44 | defaultValue: DEFAULT_STEREOPHASE,
45 | min: 0,
46 | max: 180,
47 | step: 1
48 | },
49 | set: (effectChain, value) => {
50 | effectChain.tremolo.stereoPhase = value;
51 | }
52 | }
53 |
54 | ]
55 | };
56 |
57 | export default function createTremolo(audioCtx, tuna = new Tuna(audioCtx)) {
58 | // Tuna is optional
59 |
60 | return new EffectUnit({
61 | ...tremoloData,
62 | effectChain: {
63 | tremolo: () => new tuna.Tremolo({
64 | intensity: DEFAULT_INTENSITY, // 0 to 1
65 | rate: DEFAULT_RATE, // 0.001 to 8
66 | stereoPhase: DEFAULT_STEREOPHASE // 0 to 180
67 | })
68 | }
69 | },
70 | audioCtx);
71 | }
72 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/src/index.js:
--------------------------------------------------------------------------------
1 | import Chnl from 'webaudio-chnl';
2 |
3 | export default class AudioBufferChnl extends Chnl {
4 |
5 | bufferSourceNode;
6 | canStop = true;
7 |
8 | constructor(audioCtx, bufferSourceNode) {
9 | super(audioCtx);
10 | this.setBufferSourceNode(bufferSourceNode);
11 | }
12 |
13 | setBufferSourceNode(bufferSourceNode) {
14 | this.bufferSourceNode = bufferSourceNode;
15 | this.bufferSourceNode.connect(this.input);
16 | }
17 |
18 | stop() {
19 | /* const newAudioBuffer = this.context.createBuffer(this.bufferSourceNode.buffer.numberOfChannels, this.bufferSourceNode.buffer.length, this.bufferSourceNode.buffer.sampleRate);
20 |
21 | for (let channel = 0; channel < newAudioBuffer.numberOfChannels; channel++) {
22 | const channelDataNew = newAudioBuffer.getChannelData(channel);
23 | const channelDataCurrent = this.bufferSourceNode.buffer.getChannelData(channel);
24 | for (let i = 0; i < channelDataCurrent.length; i++)
25 | channelDataNew[i] = channelDataCurrent[i];
26 | } */
27 | if (this.canStop)
28 | this.bufferSourceNode.stop();
29 | }
30 |
31 | play() {
32 | this.bufferSourceNode.start(0);
33 |
34 | const newBufferSource = this.context.createBufferSource();
35 | newBufferSource.buffer = this.bufferSourceNode.buffer;
36 | newBufferSource.loop = this.bufferSourceNode.loop;
37 | this.setBufferSourceNode(newBufferSource);
38 | this.canStop = false;
39 | }
40 |
41 | }
42 |
43 | /* const audioCtx = new AudioContext();
44 | const buffer = audioCtx.createBuffer(2, audioCtx.sampleRate * 2.0, audioCtx.sampleRate);
45 | const sourceNode = audioCtx.createBufferSource();
46 | sourceNode.buffer = buffer;
47 |
48 | const bufferChnl = new AudioBufferChnl(sourceNode, audioCtx);
49 | console.log(bufferChnl); */
50 |
--------------------------------------------------------------------------------
/config/webpack.config.common.js:
--------------------------------------------------------------------------------
1 | const webpack = require(`webpack`);
2 | const { packagesToAliases, getPackages, commandObject, PATHS } = require(`./util.js`);
3 |
4 | const packages = getPackages(PATHS.packages);
5 |
6 | module.exports = {
7 | resolve: {
8 | extensions: [`.js`, `.jsx`], // Resolve these extensions
9 | alias: packagesToAliases(packages, PATHS.packages)
10 | },
11 | module: {
12 | rules: [
13 | {
14 | test: /\.jsx?$/,
15 | use: [
16 | {
17 | loader: `babel-loader`,
18 | options: {
19 | cacheDirectory: true
20 | }
21 | },
22 | {
23 | loader: `eslint-loader`
24 | }
25 | ],
26 | include: PATHS.packages,
27 | exclude: /node_modules/
28 | },
29 | {
30 | test: /\.(jpe?g|png|gif|svg)$/i,
31 | use: [
32 | {
33 | loader: `file-loader`,
34 | options: {
35 | hash: `sha512`,
36 | digest: `hex`,
37 | name: `[hash].[ext]`
38 | }
39 | },
40 | {
41 | loader: `image-webpack-loader`,
42 | options: {
43 | bypassOnDebug: true
44 | }
45 | }
46 | ],
47 | include: PATHS.packages,
48 | exclude: /node_modules/
49 | }
50 | ]
51 | },
52 | plugins: [
53 | new webpack.DefinePlugin({
54 | ENVIRONMENT: JSON.stringify(commandObject.cmd === `dev` ? `development` : `production`)
55 | }),
56 | new webpack.LoaderOptionsPlugin({
57 | test: /\.(jpe?g|png|gif|svg)$/i,
58 | options: {
59 | imageWebpackLoader: {
60 | gifsicle: {
61 | interlaced: false
62 | },
63 | optipng: {
64 | optimizationLevel: 7
65 | }
66 | }
67 | }
68 | })
69 | ]
70 | };
71 |
--------------------------------------------------------------------------------
/packages/wmstr/README.md:
--------------------------------------------------------------------------------
1 | # Wmstr - Webaudio Master Channel
2 |
3 | ## What is Wmstr?
4 | The scope of this module is to manage the input of many audio-channels in one instance.
5 | It's just a simple extension of the [chnl](../chnl/README.md) module, with the only difference that you can record all the input to it and output the recorded data directly to a file.
6 |
7 | __Attention__: Since the [webaudio-effect-unit](../webaudio-effect-unit/README.md) has reached v.1.1.0, the way how the effects work has changed. Have a look at it's repository for more details. Make sure to do this BEFORE you update. If you have difficulties or questions, just open an issue! I am always glad if I can help. :smile:
8 |
9 | ## Installation
10 | The package is hosted on npm. You can consume it with any package manager supporting npm packages.
11 | ```bash
12 | npm i wmstr
13 | ```
14 |
15 | ## Usage
16 | ### Constructing
17 | ```javascript
18 | new Wmstr(audioCtx, connectToSpeakers)
19 | ```
20 |
21 | There are exactly tow arguments.
22 | The first one has to be an AudioContext-object.
23 | The second one is optional, as it has a default value of _true_. I this parameter evaluates to true, this channel will automatically connect to the speakers(audioCtx.destination). If it evaluates to false, the channel won't be connected to the speakers.
24 |
25 | Now, you can use this object like a normal Chnl-object and use the extra methods.
26 |
27 | ### Start recording
28 | ```javascript
29 | .startRecording()
30 | ```
31 |
32 | Simply starts recording the output of this channel.
33 |
34 | ### Stop recording
35 | ```javascript
36 | .stopRecording(filename)
37 | ```
38 |
39 | This method stops the recording you previously started.
40 | You can pass one parameter, which is __optional__.
41 | If it has a value, the recorded audio gets automatically downloaded with the specified filename.
42 |
43 | The method returns a Promise which returns the recorder audio as binary data(blob).
44 |
--------------------------------------------------------------------------------
/packages/recordy/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "recordy",
3 | "version": "0.0.4",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "inline-worker": {
8 | "version": "https://registry.npmjs.org/inline-worker/-/inline-worker-1.1.0.tgz",
9 | "integrity": "sha1-VelvVJFaZCsAhyotqm/oMrQkyY0="
10 | },
11 | "tunajs": {
12 | "version": "1.0.0",
13 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
14 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
15 | },
16 | "webaudio-chnl": {
17 | "version": "0.0.8",
18 | "resolved": "https://registry.npmjs.org/webaudio-chnl/-/webaudio-chnl-0.0.8.tgz",
19 | "integrity": "sha512-y4Mg46CM1yp8DweigkD3dQlCv8fVqMLsqV7Kf4XFT02tfO0ayiqityCjFr+6eCOFajSqhd7Uzpu69R+M1NHsGA==",
20 | "requires": {
21 | "webaudio-effect-units-collection": "1.0.5"
22 | }
23 | },
24 | "webaudio-effect-unit": {
25 | "version": "1.1.4",
26 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
27 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
28 | },
29 | "webaudio-effect-units-collection": {
30 | "version": "1.0.5",
31 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
32 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
33 | "requires": {
34 | "tunajs": "1.0.0",
35 | "webaudio-effect-unit": "1.1.4"
36 | }
37 | },
38 | "wrecorder": {
39 | "version": "git://github.com/scriptify/Wrecorder.git#46a04b521ab92ef3efc24d18b4b84f36ac7a362b",
40 | "integrity": "sha1-wVxOCPf2l6qFseQ5MbykVnSIifY=",
41 | "requires": {
42 | "inline-worker": "https://registry.npmjs.org/inline-worker/-/inline-worker-1.1.0.tgz"
43 | }
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/packages/wmstr/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "wmstr",
3 | "version": "0.0.4",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "inline-worker": {
8 | "version": "https://registry.npmjs.org/inline-worker/-/inline-worker-1.1.0.tgz",
9 | "integrity": "sha1-VelvVJFaZCsAhyotqm/oMrQkyY0="
10 | },
11 | "tunajs": {
12 | "version": "1.0.0",
13 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
14 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
15 | },
16 | "webaudio-chnl": {
17 | "version": "0.0.8",
18 | "resolved": "https://registry.npmjs.org/webaudio-chnl/-/webaudio-chnl-0.0.8.tgz",
19 | "integrity": "sha512-y4Mg46CM1yp8DweigkD3dQlCv8fVqMLsqV7Kf4XFT02tfO0ayiqityCjFr+6eCOFajSqhd7Uzpu69R+M1NHsGA==",
20 | "requires": {
21 | "webaudio-effect-units-collection": "1.0.5"
22 | }
23 | },
24 | "webaudio-effect-unit": {
25 | "version": "1.1.4",
26 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
27 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
28 | },
29 | "webaudio-effect-units-collection": {
30 | "version": "1.0.5",
31 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
32 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
33 | "requires": {
34 | "tunajs": "1.0.0",
35 | "webaudio-effect-unit": "1.1.4"
36 | }
37 | },
38 | "wrecorder": {
39 | "version": "git://github.com/scriptify/Wrecorder.git#46a04b521ab92ef3efc24d18b4b84f36ac7a362b",
40 | "integrity": "sha1-cjNheeNb5/imA95eFpa0Pk2l8ms=",
41 | "requires": {
42 | "inline-worker": "https://registry.npmjs.org/inline-worker/-/inline-worker-1.1.0.tgz"
43 | }
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/packages/wrecorder/webpack.config.js:
--------------------------------------------------------------------------------
1 | var path = require('path'),
2 | webpack = require('webpack'), // Da bundling modules!
3 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
4 | merge = require('webpack-merge'); // Merge together configurations!
5 |
6 | const PATHS = {
7 | js: path.join(__dirname, './js/'),
8 | build: path.join(__dirname, './build')
9 | };
10 |
11 | const TARGET = process.env.npm_lifecycle_event;
12 |
13 | const COMMON_CONFIGURATION = {
14 | entry: {
15 | app: PATHS.js
16 | },
17 | resolve: {
18 | extensions: ['', '.js'], // Resolve these extensions
19 | },
20 | output: {
21 | path: PATHS.build,
22 | filename: 'bundle.js',
23 | libraryTarget: 'umd',
24 | library: 'EffectUnit'
25 | },
26 | module: {
27 | loaders: [
28 | {
29 | test: /\.js$/,
30 | loaders: ['babel?cacheDirectory'],
31 | include: PATHS.js
32 | }
33 | ]
34 | }
35 | };
36 |
37 | switch(TARGET) {
38 | // Which procedure was started?
39 | default:
40 | case 'start:dev': {
41 | module.exports = merge(COMMON_CONFIGURATION, {
42 | devServer: {
43 | contentBase: PATHS.build,
44 | historyApiFallback: true,
45 | hot: true,
46 | inline: true,
47 | progress: true,
48 | stats: 'errors-only'
49 | },
50 | plugins: [
51 | new webpack.HotModuleReplacementPlugin(),
52 | new NpmInstallPlugin({
53 | save: true
54 | })
55 | ],
56 | devtool: 'eval-source-map'
57 | });
58 | }
59 | break;
60 | case 'start:prod': {
61 | module.exports = merge(COMMON_CONFIGURATION, {
62 | plugins: [
63 | new webpack.DefinePlugin({
64 | 'process.env': {
65 | 'NODE_ENV': JSON.stringify('production')
66 | }
67 | }),
68 | new webpack.optimize.UglifyJsPlugin({
69 | compress: { warnings: false }
70 | }),
71 | new webpack.optimize.DedupePlugin()
72 | ]
73 | });
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/webpack.config.js:
--------------------------------------------------------------------------------
1 | var path = require('path'),
2 | webpack = require('webpack'), // Da bundling modules!
3 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
4 | merge = require('webpack-merge'); // Merge together configurations!
5 |
6 | const PATHS = {
7 | js: path.join(__dirname, './js/'),
8 | build: path.join(__dirname, './build')
9 | };
10 |
11 | const TARGET = process.env.npm_lifecycle_event;
12 |
13 | const COMMON_CONFIGURATION = {
14 | entry: {
15 | app: PATHS.js
16 | },
17 | resolve: {
18 | extensions: ['', '.js'], // Resolve these extensions
19 | },
20 | output: {
21 | path: PATHS.build,
22 | filename: 'bundle.js',
23 | libraryTarget: 'umd',
24 | library: 'EffectUnit'
25 | },
26 | module: {
27 | loaders: [
28 | {
29 | test: /\.js$/,
30 | loaders: ['babel?cacheDirectory'],
31 | include: PATHS.js
32 | }
33 | ]
34 | }
35 | };
36 |
37 | switch(TARGET) {
38 | // Which procedure was started?
39 | default:
40 | case 'start:dev': {
41 | module.exports = merge(COMMON_CONFIGURATION, {
42 | devServer: {
43 | contentBase: PATHS.build,
44 | historyApiFallback: true,
45 | hot: true,
46 | inline: true,
47 | progress: true,
48 | stats: 'errors-only'
49 | },
50 | plugins: [
51 | new webpack.HotModuleReplacementPlugin(),
52 | new NpmInstallPlugin({
53 | save: true
54 | })
55 | ],
56 | devtool: 'eval-source-map'
57 | });
58 | }
59 | break;
60 | case 'start:prod': {
61 | module.exports = merge(COMMON_CONFIGURATION, {
62 | plugins: [
63 | new webpack.DefinePlugin({
64 | 'process.env': {
65 | 'NODE_ENV': JSON.stringify('production')
66 | }
67 | }),
68 | new webpack.optimize.UglifyJsPlugin({
69 | compress: { warnings: false }
70 | }),
71 | new webpack.optimize.DedupePlugin()
72 | ]
73 | });
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/index.js:
--------------------------------------------------------------------------------
1 | import Tuna from 'tunajs';
2 | import createChorus, { chorusData } from './effects/chorus';
3 | import createDelay, { delayData } from './effects/delay';
4 | import createPhaser, { phaserData } from './effects/phaser';
5 | import createCompressor, { compressorData } from './effects/compressor';
6 | import createTremolo, { tremoloData } from './effects/tremolo';
7 | import createWahWah, { wahWahData } from './effects/wahwah';
8 | import createBitcrusher, { bitcrusherData } from './effects/bitcrusher';
9 | import createMoog, { moogData } from './effects/moog';
10 | import createPingPongDelay, { pingPongDelayData } from './effects/pingPongDelay';
11 |
12 | import createGain, { gainData } from './effects/gain';
13 | import createLowpass, { lowpassData } from './effects/lowpass';
14 | import createHighpass, { highpassData } from './effects/highpass';
15 | import createDubDelay, { dubDelayData } from './effects/dubDelay';
16 | import createReverb, { reverbData } from './effects/reverb';
17 |
18 | export const EFFECT_DATA = [
19 | gainData,
20 | highpassData,
21 | lowpassData,
22 | dubDelayData,
23 | reverbData,
24 | chorusData,
25 | delayData,
26 | phaserData,
27 | compressorData,
28 | tremoloData,
29 | wahWahData,
30 | bitcrusherData,
31 | moogData,
32 | pingPongDelayData
33 | ];
34 |
35 | export default function createEffectCollection(audioCtx) {
36 | const tuna = new Tuna(audioCtx);
37 |
38 | return {
39 | gain: createGain(audioCtx),
40 | lowpass: createLowpass(audioCtx),
41 | highpass: createHighpass(audioCtx),
42 | dubDelay: createDubDelay(audioCtx),
43 | reverb: createReverb(audioCtx),
44 | chorus: createChorus(audioCtx, tuna),
45 | delay: createDelay(audioCtx, tuna),
46 | phaser: createPhaser(audioCtx, tuna),
47 | compressor: createCompressor(audioCtx, tuna),
48 | tremolo: createTremolo(audioCtx, tuna),
49 | wahwah: createWahWah(audioCtx, tuna),
50 | bitcrusher: createBitcrusher(audioCtx, tuna),
51 | moog: createMoog(audioCtx, tuna),
52 | pingPongDelay: createPingPongDelay(audioCtx, tuna)
53 | };
54 | }
55 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/webpack.config.js:
--------------------------------------------------------------------------------
1 | var path = require('path'),
2 | webpack = require('webpack'), // Da bundling modules!
3 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
4 | merge = require('webpack-merge'); // Merge together configurations!
5 |
6 | const PATHS = {
7 | js: path.join(__dirname, './js/'),
8 | build: path.join(__dirname, './build')
9 | };
10 |
11 | const TARGET = process.env.npm_lifecycle_event;
12 |
13 | const COMMON_CONFIGURATION = {
14 | entry: {
15 | app: PATHS.js
16 | },
17 | resolve: {
18 | extensions: ['', '.js'], // Resolve these extensions
19 | },
20 | output: {
21 | path: PATHS.build,
22 | filename: 'bundle.js',
23 | libraryTarget: 'umd',
24 | library: 'EffectUnit'
25 | },
26 | module: {
27 | loaders: [
28 | {
29 | test: /\.js$/,
30 | loaders: ['babel?cacheDirectory'],
31 | include: PATHS.js
32 | },
33 | {
34 | test: /\.jpe?g$|\.gif$|\.png$|\.svg$|\.woff$|\.ttf$|\.wav$|\.mp3$/,
35 | loader: 'file'
36 | }
37 | ]
38 | }
39 | };
40 |
41 | switch(TARGET) {
42 | // Which procedure was started?
43 | default:
44 | case 'start:dev': {
45 | module.exports = merge(COMMON_CONFIGURATION, {
46 | devServer: {
47 | contentBase: PATHS.build,
48 | historyApiFallback: true,
49 | hot: true,
50 | inline: true,
51 | progress: true,
52 | stats: 'errors-only'
53 | },
54 | plugins: [
55 | new webpack.HotModuleReplacementPlugin(),
56 | new NpmInstallPlugin({
57 | save: true
58 | })
59 | ],
60 | devtool: 'eval-source-map'
61 | });
62 | }
63 | break;
64 | case 'start:prod': {
65 | module.exports = merge(COMMON_CONFIGURATION, {
66 | plugins: [
67 | new webpack.DefinePlugin({
68 | 'process.env': {
69 | 'NODE_ENV': JSON.stringify('production')
70 | }
71 | }),
72 | new webpack.optimize.UglifyJsPlugin({
73 | compress: { warnings: false }
74 | }),
75 | new webpack.optimize.DedupePlugin()
76 | ]
77 | });
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/pingPongDelay.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_WETLEVEL = 0.5;
5 | const DEFAULT_FEEDBACK = 0.3;
6 | const DEFAULT_DELAYTIMELEFT = 150;
7 | const DEFAULT_DELAYTIMERIGHT = 150;
8 |
9 | export const pingPongDelayData = {
10 | name: `pingPongDelay`,
11 | values: [
12 |
13 | {
14 | name: `wetLevel`,
15 | options: {
16 | type: `range`,
17 | defaultValue: DEFAULT_WETLEVEL,
18 | min: 0,
19 | max: 1,
20 | step: 0.01
21 | },
22 | set: (effectChain, value) => {
23 | effectChain.pingpong.wetLevel = value;
24 | }
25 | },
26 |
27 | {
28 | name: `feedback`,
29 | options: {
30 | type: `range`,
31 | defaultValue: DEFAULT_FEEDBACK,
32 | min: 0,
33 | max: 1,
34 | step: 0.01
35 | },
36 | set: (effectChain, value) => {
37 | effectChain.pingpong.feedback = value;
38 | }
39 | },
40 |
41 | {
42 | name: `delayTimeLeft`,
43 | options: {
44 | type: `range`,
45 | defaultValue: DEFAULT_DELAYTIMELEFT,
46 | min: 1,
47 | max: 4000,
48 | step: 1
49 | },
50 | set: (effectChain, value) => {
51 | effectChain.pingpong.delayTimeLeft = value;
52 | }
53 | },
54 |
55 | {
56 | name: `delayTimeRight`,
57 | options: {
58 | type: `range`,
59 | defaultValue: DEFAULT_DELAYTIMERIGHT,
60 | min: 1,
61 | max: 4000,
62 | step: 1
63 | },
64 | set: (effectChain, value) => {
65 | effectChain.pingpong.DEFAULT_DELAYTIMERIGHT = value;
66 | }
67 | }
68 |
69 | ]
70 | };
71 |
72 | export default function createPingPongDelay(audioCtx, tuna = new Tuna(audioCtx)) {
73 | // Tuna is optional
74 |
75 | return new EffectUnit({
76 | ...pingPongDelayData,
77 | effectChain: {
78 | pingpong: () => new tuna.PingPongDelay({
79 | wetLevel: 0.5, // 0 to 1
80 | feedback: 0.3, // 0 to 1
81 | delayTimeLeft: 150, // 1 to 10000 (milliseconds)
82 | delayTimeRight: 200 // 1 to 10000 (milliseconds)
83 | })
84 | }
85 | },
86 | audioCtx);
87 | }
88 |
--------------------------------------------------------------------------------
/config/util.js:
--------------------------------------------------------------------------------
1 | const path = require(`path`);
2 | const fs = require(`fs`);
3 |
4 | const PATHS = {
5 | packages: path.join(__dirname, `..`, `packages`),
6 | playgroundBuild: path.join(__dirname, `..`, `build`)
7 | };
8 |
9 | function getPackages(packagesDir) {
10 | const packages = [];
11 | const files = fs.readdirSync(packagesDir);
12 |
13 | files.forEach((file) => {
14 | const fPath = path.join(packagesDir, file);
15 | const isDir = fs.statSync(fPath).isDirectory();
16 |
17 | if (isDir) {
18 | const packageFiles = fs.readdirSync(fPath);
19 |
20 | const structure = {
21 | dirs: [],
22 | files: []
23 | };
24 |
25 | packageFiles.forEach((pFile) => {
26 | const pPath = path.join(packagesDir, file, pFile);
27 | const isDirP = fs.statSync(pPath).isDirectory();
28 | if (isDirP)
29 | structure.dirs.push(pFile);
30 | else
31 | structure.files.push(pFile);
32 | });
33 |
34 | let isPackage = true;
35 | [`src`, `dist`].forEach((d) => {
36 | if (!structure.dirs.includes(d)) {
37 | isPackage = false;
38 | return 0;
39 | }
40 | return true;
41 | });
42 |
43 | [`package.json`].forEach((f) => {
44 | if (!structure.files.includes(f)) {
45 | isPackage = false;
46 | return 0;
47 | }
48 | return true;
49 | });
50 |
51 | if (isPackage)
52 | packages.push(file);
53 | }
54 | });
55 |
56 | return packages;
57 | }
58 |
59 |
60 | function createCommandObject(command) {
61 | const split = command.split(`start:`);
62 | const ret = {
63 | cmd: ``,
64 | params: process.argv.filter((arg, i) => i >= 4)
65 | };
66 |
67 | if (split.length <= 1)
68 | throw new Error(`Invalid command: ${command}`);
69 |
70 | ret.cmd = split[1];
71 |
72 | return ret;
73 | }
74 |
75 |
76 | function packagesToAliases(packages, packagesPath) {
77 | let retObj = {};
78 |
79 | packages.forEach((p) => {
80 | retObj = Object.assign({}, retObj, {
81 | [p]: path.resolve(packagesPath, `${p}/src`)
82 | });
83 | });
84 |
85 | return retObj;
86 | }
87 |
88 | const TARGET = process.env.npm_lifecycle_event;
89 | const commandObject = createCommandObject(TARGET);
90 |
91 | module.exports = {
92 | packagesToAliases,
93 | commandObject,
94 | getPackages,
95 | PATHS
96 | };
97 |
--------------------------------------------------------------------------------
/packages/recordy/src/index.js:
--------------------------------------------------------------------------------
1 | import Chnl from 'webaudio-chnl';
2 | import Recorder from 'wrecorder';
3 | import getInput from './getInput';
4 |
5 | export default class Recordy extends Chnl {
6 |
7 | recorder;
8 | directOutGain;
9 |
10 | constructor(audioCtx) {
11 | super(audioCtx);
12 | this.recorder = new Recorder(this);
13 |
14 | // Set direct output to speakers
15 | this.directOutGain = audioCtx.createGain();
16 | this.directOutGain.gain.value = 0;
17 | this.connect(this.directOutGain);
18 | this.directOutGain.connect(audioCtx.destination);
19 | this.stream = null
20 | }
21 |
22 | toSpeaker(val) {
23 | this.directOutGain.gain.value = val;
24 | }
25 |
26 | async getInput() {
27 | this.stream = await getInput();
28 | const mediaStream = this.context.createMediaStreamSource(this.stream);
29 | mediaStream.connect(this.input);
30 | return true;
31 | }
32 |
33 | startRecording() {
34 | this.recorder.record();
35 | }
36 |
37 | stopRecording({ type = `blob` } = {}) { // type can be -> blob, audio or buffer
38 | // If asAudioObject evaluates to true, a window.Audio-object will be returned; otherwise, a blob will be returned;
39 | return new Promise((resolve) => {
40 | this.recorder.stop();
41 |
42 | this.recorder.exportWAV((blob) => {
43 | this.recorder.clear();
44 | // remove record icon from windows title bar
45 | if (this.stream) this.stream.getAudioTracks().forEach((track) => track.stop());
46 |
47 | switch (type) {
48 |
49 | case `blob`:
50 | resolve(blob);
51 | break;
52 |
53 | case `audio`: {
54 | const url = URL.createObjectURL(blob);
55 | const audio = new Audio(url);
56 | resolve(audio);
57 | break;
58 | }
59 |
60 | case `buffer`: {
61 | const fileReader = new FileReader();
62 | fileReader.addEventListener(`loadend`, () => {
63 | this.context.decodeAudioData(fileReader.result)
64 | .then(resolve);
65 | });
66 | fileReader.readAsArrayBuffer(blob);
67 | break;
68 | }
69 |
70 | default:
71 | throw new Error(`[Recordy] Invalid type, must be one of those: blob, audio or buffer.`);
72 |
73 | }
74 | });
75 | });
76 | }
77 |
78 | }
79 |
80 | /* const audioCtx = new AudioContext();
81 | const r = new Recordy(audioCtx);
82 |
83 | r.getInput()
84 | .then(val => {
85 | r.startRecording();
86 |
87 | window.setTimeout(() => {
88 | r.stopRecording({ type: 'audio' })
89 | .then(audio => {
90 | audio.play();
91 | });
92 | }, 1000);
93 | });*/
94 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/delay.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_FEEDBACK = 0.45;
5 | const DEFAULT_DELAYTIME = 150;
6 | const DEFAULT_WETLEVEL = 0.25;
7 | const DEFAULT_DRYLEVEL = 1;
8 | const DEFAULT_CUTOFF = 2000;
9 |
10 | export const delayData = {
11 | name: `delay`,
12 | values: [
13 |
14 | {
15 | name: `feedback`,
16 | options: {
17 | type: `range`,
18 | defaultValue: DEFAULT_FEEDBACK,
19 | min: 0,
20 | max: 1,
21 | step: 0.01
22 | },
23 | set: (effectChain, value) => {
24 | effectChain.delay.feedback = value;
25 | }
26 | },
27 |
28 | {
29 | name: `delayTime`,
30 | options: {
31 | type: `range`,
32 | defaultValue: DEFAULT_DELAYTIME,
33 | min: 1,
34 | max: 4000,
35 | step: 1
36 | },
37 | set: (effectChain, value) => {
38 | effectChain.delay.delayTime = value;
39 | }
40 | },
41 |
42 | {
43 | name: `wetLevel`,
44 | options: {
45 | type: `range`,
46 | defaultValue: DEFAULT_WETLEVEL,
47 | min: 0,
48 | max: 1,
49 | step: 0.01
50 | },
51 | set: (effectChain, value) => {
52 | effectChain.delay.wetLevel = value;
53 | }
54 | },
55 |
56 | {
57 | name: `dryLevel`,
58 | options: {
59 | type: `range`,
60 | defaultValue: DEFAULT_DRYLEVEL,
61 | min: 0,
62 | max: 1,
63 | step: 0.01
64 | },
65 | set: (effectChain, value) => {
66 | effectChain.delay.dryLevel = value;
67 | }
68 | },
69 |
70 | {
71 | name: `cutoff`,
72 | options: {
73 | type: `range`,
74 | defaultValue: DEFAULT_CUTOFF,
75 | min: 20,
76 | max: 22050,
77 | step: 1
78 | },
79 | set: (effectChain, value) => {
80 | effectChain.delay.cutoff = value;
81 | }
82 | }
83 |
84 | ]
85 | };
86 |
87 | export default function createDelay(audioCtx, tuna = new Tuna(audioCtx)) {
88 | // Tuna is optional
89 |
90 | return new EffectUnit({
91 | ...delayData,
92 | effectChain: {
93 | delay: () => new tuna.Delay({
94 | feedback: DEFAULT_FEEDBACK, // 0 to 1+
95 | delayTime: DEFAULT_DELAYTIME, // 1 to 10000 milliseconds
96 | wetLevel: DEFAULT_WETLEVEL, // 0 to 1+
97 | dryLevel: DEFAULT_DRYLEVEL, // 0 to 1+
98 | cutoff: DEFAULT_CUTOFF // cutoff frequency of the built in lowpass-filter. 20 to 22050
99 | })
100 | }
101 | },
102 | audioCtx);
103 | }
104 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/phaser.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_RATE = 1.2;
5 | const DEFAULT_DEPTH = 0.3;
6 | const DEFAULT_FEEDBACK = 0.2;
7 | const DEFAULT_STEREOPHASE = 30;
8 | const DEFAULT_BASEMODULATIONFREQUENCY = 700;
9 |
10 | export const phaserData = {
11 | name: `phaser`,
12 | values: [
13 |
14 | {
15 | name: `rate`,
16 | options: {
17 | type: `range`,
18 | defaultValue: DEFAULT_RATE,
19 | min: 0.01,
20 | max: 8,
21 | step: 0.01
22 | },
23 | set: (effectChain, value) => {
24 | effectChain.phaser.rate = value;
25 | }
26 | },
27 |
28 | {
29 | name: `depth`,
30 | options: {
31 | type: `range`,
32 | defaultValue: DEFAULT_DEPTH,
33 | min: 0,
34 | max: 1,
35 | step: 0.01
36 | },
37 | set: (effectChain, value) => {
38 | effectChain.phaser.depth = value;
39 | }
40 | },
41 |
42 | {
43 | name: `feedback`,
44 | options: {
45 | type: `range`,
46 | defaultValue: DEFAULT_FEEDBACK,
47 | min: 0,
48 | max: 1,
49 | step: 0.01
50 | },
51 | set: (effectChain, value) => {
52 | effectChain.phaser.feedback = value;
53 | }
54 | },
55 |
56 | {
57 | name: `stereoPhase`,
58 | options: {
59 | type: `range`,
60 | defaultValue: DEFAULT_STEREOPHASE,
61 | min: 0,
62 | max: 180,
63 | step: 0.1
64 | },
65 | set: (effectChain, value) => {
66 | effectChain.phaser.stereoPhase = value;
67 | }
68 | },
69 |
70 | {
71 | name: `baseModulationFrequency`,
72 | options: {
73 | type: `range`,
74 | defaultValue: DEFAULT_BASEMODULATIONFREQUENCY,
75 | min: 500,
76 | max: 1500,
77 | step: 1
78 | },
79 | set: (effectChain, value) => {
80 | effectChain.phaser.baseModulationFrequency = value;
81 | }
82 | }
83 |
84 | ]
85 | };
86 |
87 | export default function createPhaser(audioCtx, tuna = new Tuna(audioCtx)) {
88 | // Tuna is optional
89 |
90 | return new EffectUnit({
91 | ...phaserData,
92 | effectChain: {
93 | phaser: () => new tuna.Phaser({
94 | rate: 1.2, // 0.01 to 8 is a decent range, but higher values are possible
95 | depth: 0.3, // 0 to 1
96 | feedback: 0.2, // 0 to 1+
97 | stereoPhase: 30, // 0 to 180
98 | baseModulationFrequency: 700 // 500 to 1500
99 | })
100 | }
101 | },
102 | audioCtx);
103 | }
104 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/wahwah.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_AUTOMODE = true;
5 | const DEFAULT_BASEFREQUENCY = 0.5;
6 | const DEFAULT_EXCURSIONOCTAVES = 2;
7 | const DEFAULT_SWEEP = 0.2;
8 | const DEFAULT_RESONANCE = 10;
9 | const DEFAULT_SENSITIVITY = 0.5;
10 |
11 | export const wahWahData = {
12 | name: `wahwah`,
13 | values: [
14 |
15 | {
16 | name: `automode`,
17 | options: {
18 | type: `single`,
19 | defaultValue: DEFAULT_AUTOMODE
20 | },
21 | set: (effectChain, value) => {
22 | effectChain.wahwah.automode = value;
23 | }
24 | },
25 |
26 | {
27 | name: `baseFrequency`,
28 | options: {
29 | type: `range`,
30 | defaultValue: DEFAULT_BASEFREQUENCY,
31 | min: 0,
32 | max: 1,
33 | step: 0.01
34 | },
35 | set: (effectChain, value) => {
36 | effectChain.wahwah.baseFrequency = value;
37 | }
38 | },
39 |
40 | {
41 | name: `excursionOctaves`,
42 | options: {
43 | type: `range`,
44 | defaultValue: DEFAULT_EXCURSIONOCTAVES,
45 | min: 0,
46 | max: 6,
47 | step: 1
48 | },
49 | set: (effectChain, value) => {
50 | effectChain.wahwah.excursionOctaves = value;
51 | }
52 | },
53 |
54 | {
55 | name: `sweep`,
56 | options: {
57 | type: `range`,
58 | defaultValue: DEFAULT_SWEEP,
59 | min: 0,
60 | max: 1,
61 | step: 0.01
62 | },
63 | set: (effectChain, value) => {
64 | effectChain.wahwah.sweep = value;
65 | }
66 | },
67 |
68 | {
69 | name: `resonance`,
70 | options: {
71 | type: `range`,
72 | defaultValue: DEFAULT_RESONANCE,
73 | min: 0,
74 | max: 100,
75 | step: 1
76 | },
77 | set: (effectChain, value) => {
78 | effectChain.wahwah.resonance = value;
79 | }
80 | },
81 |
82 | {
83 | name: `sensitivity`,
84 | options: {
85 | type: `range`,
86 | defaultValue: DEFAULT_SENSITIVITY,
87 | min: 0,
88 | max: 1,
89 | step: 0.01
90 | },
91 | set: (effectChain, value) => {
92 | effectChain.wahwah.sensitivity = value;
93 | }
94 | }
95 |
96 | ]
97 | };
98 |
99 | export default function createWahWah(audioCtx, tuna = new Tuna(audioCtx)) {
100 | // Tuna is optional
101 |
102 | return new EffectUnit({
103 | ...wahWahData,
104 | effectChain: {
105 | wahwah: () => new tuna.WahWah({
106 | automode: DEFAULT_AUTOMODE, // true/false
107 | baseFrequency: DEFAULT_BASEFREQUENCY, // 0 to 1
108 | excursionOctaves: DEFAULT_EXCURSIONOCTAVES, // 1 to 6
109 | sweep: DEFAULT_SWEEP, // 0 to 1
110 | resonance: DEFAULT_RESONANCE, // 1 to 100
111 | sensitivity: DEFAULT_SENSITIVITY // -1 to 1
112 | })
113 | }
114 | },
115 | audioCtx);
116 | }
117 |
--------------------------------------------------------------------------------
/packages/recordy/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'recordy';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/src/index.js:
--------------------------------------------------------------------------------
1 | import createEffects from 'webaudio-effect-units-collection';
2 |
3 | export default class Chnl {
4 | input;
5 | output;
6 | effects;
7 | analyser;
8 | isChnl = true;
9 |
10 | currentGraph = [];
11 |
12 | constructor(audioCtx) {
13 | this.context = audioCtx;
14 | this.input = audioCtx.createGain();
15 | this.output = audioCtx.createGain();
16 | this.analyser = audioCtx.createAnalyser();
17 | this.output.connect(this.analyser);
18 | this.effects = createEffects(audioCtx);
19 | // Setup initial graph
20 | this.setupGraph([this.input, this.effects.gain, this.output]);
21 | }
22 |
23 | setupGraph(graph) {
24 | // first of all, clear all connections (all nodes but the output)
25 | for (let i = 0; i < (this.currentGraph.length - 1); i++) {
26 | const currNode = this.currentGraph[i];
27 | // Disconnect all outgoing connections
28 | currNode.disconnect();
29 | }
30 |
31 | for (let i = 0; i < (graph.length - 1); i++) {
32 | const currNode = graph[i];
33 | const nextNode = graph[i + 1];
34 | if (nextNode.isEffectUnit)
35 | currNode.connect(nextNode.input);
36 | else
37 | currNode.connect(nextNode);
38 | }
39 |
40 | this.currentGraph = graph;
41 | }
42 |
43 | addEffect(name) {
44 | const effect = this.effects[name];
45 |
46 | if (!effect)
47 | throw new Error(`You tried to add an inexistent effect.`);
48 |
49 |
50 | if (!effect.name)
51 | this.effects[name].name = name;
52 |
53 | // Create new graph: input -> (all effects which are already present in the graph) -> effectToAdd -> output
54 | const newGraph = [
55 | this.input,
56 | ...this.currentGraph.filter(node => (node !== this.input && node !== this.output)),
57 | effect,
58 | this.output
59 | ];
60 |
61 | this.setupGraph(newGraph);
62 | }
63 |
64 | removeEffect(name) {
65 | this.setupGraph(this.currentGraph.filter(node => node.name !== name));
66 | }
67 |
68 | connect(node) {
69 | if (node.isChnl)
70 | this.output.connect(node.input);
71 | else
72 | this.output.connect(node);
73 | }
74 |
75 | getAnalyser() {
76 | return this.analyser;
77 | }
78 |
79 | }
80 |
81 | /* const audioCtx = new AudioContext();
82 | const audioElem = new Audio(song);
83 | const audioElem2 = new Audio(song);
84 | const audio = audioCtx.createMediaElementSource(audioElem);
85 | const audio2 = audioCtx.createMediaElementSource(audioElem2);
86 | const chnl = new Chnl(audioCtx);
87 | const chnl2 = new Chnl(audioCtx);
88 |
89 | audio.connect(chnl);
90 | chnl.connect(audioCtx.destination);
91 | chnl.addEffect(`delay`);
92 |
93 | audio2.connect(chnl2);
94 | chnl2.connect(audioCtx.destination);
95 |
96 | window.setTimeout(() => {
97 | //audioElem2.play();
98 | }, 500)
99 |
100 | audioElem.play();*/
101 |
102 | /*
103 | const audioCtx = new AudioContext();
104 | const chnl = new Chnl(audioCtx);
105 |
106 | const osci = audioCtx.createOscillator();
107 | osci.frequency.value = 300;
108 |
109 | osci.connect(chnl);
110 | chnl.connect(audioCtx.destination);
111 |
112 | // Activate effects
113 | chnl.addEffect(`highpass`);
114 | chnl.addEffect(`bitcrusher`);
115 |
116 | chnl.effects.gain.setValue(`gain`, 0.2);
117 | chnl.effects.highpass.setValue(`frequency`, 500);
118 | chnl.effects.bitcrusher.setValue(`bits`, 4);
119 |
120 | osci.start();
121 | */
122 |
--------------------------------------------------------------------------------
/packages/wmstr/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'wmstr';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/audiochnl/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'audiochnl';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'chnl';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/audiolooper/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'audiolooper';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "soundcyclejs",
3 | "version": "0.0.2",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "audiobufferchnl": {
8 | "version": "0.0.2",
9 | "resolved": "https://registry.npmjs.org/audiobufferchnl/-/audiobufferchnl-0.0.2.tgz",
10 | "integrity": "sha512-CLKqmFmBBhNI5924SCzn1q5OXvWGWhQWUE1ZPnyVeoQ59zMO0dIjn68WFkNDnpy4/8fV9H9j9bokzyuOD+27UQ==",
11 | "requires": {
12 | "webaudio-chnl": "0.0.8"
13 | }
14 | },
15 | "audiolooper": {
16 | "version": "0.0.10",
17 | "resolved": "https://registry.npmjs.org/audiolooper/-/audiolooper-0.0.10.tgz",
18 | "integrity": "sha512-RNE/215YQ/1hViQC9j1jmmmep1vHRcT0kSCiPbK5o+7lJPrNAkU7JnnkMI03XnfNzmGQrFibQp+76Wgz1bJsUw=="
19 | },
20 | "inline-worker": {
21 | "version": "1.1.0",
22 | "resolved": "https://registry.npmjs.org/inline-worker/-/inline-worker-1.1.0.tgz",
23 | "integrity": "sha1-VelvVJFaZCsAhyotqm/oMrQkyY0="
24 | },
25 | "recordy": {
26 | "version": "0.0.5",
27 | "resolved": "https://registry.npmjs.org/recordy/-/recordy-0.0.5.tgz",
28 | "integrity": "sha512-oxhK7yJX0mEcSnTL6yaZbma3o+azfnRn4DzpV5J5usj5DHS+y3IN2V9lXECVBTBjExQn4TUNE+GWJk/KVXE7WA==",
29 | "requires": {
30 | "webaudio-chnl": "0.0.8",
31 | "wrecorder": "github:scriptify/Wrecorder#46a04b521ab92ef3efc24d18b4b84f36ac7a362b"
32 | }
33 | },
34 | "tunajs": {
35 | "version": "1.0.0",
36 | "resolved": "https://registry.npmjs.org/tunajs/-/tunajs-1.0.0.tgz",
37 | "integrity": "sha1-SI8AoPvpWkwuMKqpEEdr29Xyclg="
38 | },
39 | "uuid": {
40 | "version": "3.1.0",
41 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz",
42 | "integrity": "sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g=="
43 | },
44 | "webaudio-chnl": {
45 | "version": "0.0.8",
46 | "resolved": "https://registry.npmjs.org/webaudio-chnl/-/webaudio-chnl-0.0.8.tgz",
47 | "integrity": "sha512-y4Mg46CM1yp8DweigkD3dQlCv8fVqMLsqV7Kf4XFT02tfO0ayiqityCjFr+6eCOFajSqhd7Uzpu69R+M1NHsGA==",
48 | "requires": {
49 | "webaudio-effect-units-collection": "1.0.5"
50 | }
51 | },
52 | "webaudio-effect-unit": {
53 | "version": "1.1.4",
54 | "resolved": "https://registry.npmjs.org/webaudio-effect-unit/-/webaudio-effect-unit-1.1.4.tgz",
55 | "integrity": "sha512-cACj0AwMWV5xAfbpsOZAoN/QmadVV31oUjJB86/3dTUrdv20uZEkUrCYXiO/Do3GnDZXJWKlQQnY7IczYoxEYw=="
56 | },
57 | "webaudio-effect-units-collection": {
58 | "version": "1.0.5",
59 | "resolved": "https://registry.npmjs.org/webaudio-effect-units-collection/-/webaudio-effect-units-collection-1.0.5.tgz",
60 | "integrity": "sha512-CrD3eEoJwzI2XA85I/a/7UF3ZThECGE4Gu9Z2fZGoZTzm9w7UxVDJzr6loGtUeCW71xHYuHnky4CY/A89Sbdbw==",
61 | "requires": {
62 | "tunajs": "1.0.0",
63 | "webaudio-effect-unit": "1.1.4"
64 | }
65 | },
66 | "wmstr": {
67 | "version": "0.0.5",
68 | "resolved": "https://registry.npmjs.org/wmstr/-/wmstr-0.0.5.tgz",
69 | "integrity": "sha512-1t/u/OZIR5nepNfeNFwqRx5v/eJ5iJ2c4YifmI77BrbcBOcWK+K6S06JLULej70v1vsVzSF0ThRORIWPieOfeg==",
70 | "requires": {
71 | "webaudio-chnl": "0.0.8",
72 | "wrecorder": "github:scriptify/Wrecorder#46a04b521ab92ef3efc24d18b4b84f36ac7a362b"
73 | }
74 | },
75 | "wrecorder": {
76 | "version": "github:scriptify/Wrecorder#46a04b521ab92ef3efc24d18b4b84f36ac7a362b",
77 | "requires": {
78 | "inline-worker": "1.1.0"
79 | }
80 | }
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'soundcycle';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/packages/audiobufferchnl/webpack.config.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path'),
3 | webpack = require('webpack'), // Da bundling modules!
4 | NpmInstallPlugin = require('npm-install-webpack-plugin'), // Install client dependencies automatically!
5 | merge = require('webpack-merge'), // Merge together configurations!
6 | HtmlWebpackExcludeAssetsPlugin = require('html-webpack-exclude-assets-plugin'),
7 | HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const PATHS = {
10 | src: path.join(__dirname, 'src'),
11 | build: path.join(__dirname, 'dist')
12 | };
13 |
14 | const moduleName = 'audiobufferchnl';
15 |
16 | const TARGET = process.env.npm_lifecycle_event;
17 |
18 | const COMMON_CONFIGURATION = {
19 | entry: {
20 | [moduleName]: path.join(PATHS.src, 'index.js'),
21 | [moduleName + '.min']: path.join(PATHS.src, 'index.js')
22 | },
23 | resolve: {
24 | extensions: ['.js'], // Resolve these extensions
25 | },
26 | output: {
27 | path: PATHS.build,
28 | filename: '[name].js',
29 | libraryTarget: 'umd',
30 | library: moduleName,
31 | umdNamedDefine: true
32 | },
33 | module: {
34 | rules: [
35 | {
36 | test: /\.js$/,
37 | loader: 'babel-loader',
38 | include: PATHS.src,
39 | options: {
40 | cacheDirectory: true
41 | }
42 | },
43 | {
44 | test: /\.(jpe?g|png|gif|svg|wav|mp3)$/i,
45 | use: [
46 | {
47 | loader: 'file-loader',
48 | options: {
49 | hash: 'sha512',
50 | digest: 'hex',
51 | name: '[hash].[ext]'
52 | }
53 | },
54 | {
55 | loader: 'image-webpack-loader',
56 | options: {
57 | bypassOnDebug: true
58 | }
59 | }
60 | ],
61 | include: PATHS.src
62 | }
63 | ]
64 | },
65 | plugins: [
66 | new webpack.DefinePlugin({
67 | ENVIRONMENT: JSON.stringify(TARGET === 'start:dev' ? 'development' : 'production')
68 | }),
69 | new webpack.LoaderOptionsPlugin({
70 | test: /\.(jpe?g|png|gif|svg)$/i,
71 | options: {
72 | imageWebpackLoader: {
73 | gifsicle: {
74 | interlaced: false
75 | },
76 | optipng: {
77 | optimizationLevel: 7
78 | }
79 | }
80 | }
81 | }),
82 | new HtmlWebpackPlugin({
83 | excludeAssets: [/\.min\.js$/],
84 | title: moduleName
85 | }),
86 | new HtmlWebpackExcludeAssetsPlugin()
87 | ]
88 | };
89 |
90 | switch(TARGET) {
91 | case 'start:dev': {
92 | module.exports = merge(COMMON_CONFIGURATION, {
93 | devServer: {
94 | contentBase: PATHS.build,
95 | historyApiFallback: true,
96 | hot: true,
97 | inline: true,
98 | stats: 'errors-only'
99 | },
100 | plugins: [
101 | new webpack.HotModuleReplacementPlugin()
102 | ],
103 | devtool: 'eval-source-map'
104 | });
105 | }
106 | break;
107 | case 'start:prod': {
108 | module.exports = merge(COMMON_CONFIGURATION, {
109 | plugins: [
110 | new webpack.DefinePlugin({
111 | 'process.env': {
112 | 'NODE_ENV': JSON.stringify('production')
113 | }
114 | }),
115 | new webpack.optimize.UglifyJsPlugin({
116 | include: /\.min\.js$/
117 | })
118 | ]
119 | });
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # sountility
2 | ### a collection of useful utility libraries for the webaudio-api
3 |
4 | This repository contains many different packages which I created in order to use them in my audio software projects. Initially, those packages were all split and had there own repository. But as you can probably immagine, it's a pain to maintain them in that way. As soon as I realized that, I tried to find a way how I could manage them more easily. And sountility is a good answer for me.
5 |
6 | ## Packages
7 | ### Here's a list of all packages with a short description.
8 | #### [webaudio-chnl](./packages/webaudio-chnl/README.md)
9 | I needed something with a LOT of audio effects integrated which can be manipulated in many different aspects. And I needed to use this in many different ways: Every native AudioNode should be able to connect to it as it would be a normal AudioNode, but also other Chnls should be able to connect to another Chnl.
10 | So I could simply and intuitively create audio graphs with a set of effects.
11 | No matter if I connect a song, mic input or even a synthesizer.
12 | #### [webaudio-effect-unit](./packages/webaudio-effect-unit/README.md)
13 | Sometimes you want to include some effects or other audioprocessors in an audio graph which can be enabled and disabled.
14 | E.g. a lowpass which can be toggled by the user.
15 | This is currently not directly possible with the Web Audio API.
16 | So The effect unit does this for you.
17 | Additionally, when you create an audio effect with the EffectUnit, you always define clear interfaces which manipulate the effect. Together with some metadata you provide, this can be very powerful, especially if you have a set of effects and a User Interface where the values of those effects should be editable.
18 | #### [webaudio-effect-units-collection](./packages/webaudio-effect-units-collection/README.md)
19 | An effects collection based on the webaudio-effect-unit.
20 | #### [audiobufferchnl](./packages/audiobufferchnl/README.md)
21 | This package extends the chnl module and adds the possibility to turn a webaudio BufferSourceNode into a chnl.
22 | #### [audiochnl](./packages/audiochnl/README.md)
23 | It's an extension of the chnl module: This module is made for playing, pausing, stopping and manipulating Audio-objects.
24 | #### [audiolooper](./packages/audiolooper/README.md)
25 | An audiolooper let's you loop your audiotracks in a very simple and intuitive way.
26 | The looping algorithm keeps the tracks automatically in sync.
27 | #### [recordy](./packages/recordy/README.md)
28 | This module abstracts away the logic needed to record audio in your browser.
29 | Since it's based on the chnl module, a lot of effects can be added to the input.
30 | #### [soundcyclejs](./packages/soundcyclejs/README.md)
31 | This library combines all of the modules of the __sountility__ collection and creates a full-featured looping library.
32 | #### [wmstr](./packages/wmstr/README.md)
33 | The scope of this module is to manage the input of many audio-channels in one instance.
34 | It's just a simple extension of the chnl module, with the only difference that you can record all the input to it and output the recorded data directly to a file.
35 | #### [wrecorder](./packages/wrecorder/README.md)
36 | This is a fork of mattdiamonds recorderjs. Due the fact that he dropped support on this project, i forked my own copy to have the possibility to fix bugs and add customizations.
37 |
38 | ## Build instructions
39 | It's easy to build a sountility package.
40 | Just use the following command:
41 | ```bash
42 | npm run start:prod --
43 | ```
44 | Replace _package-name_ with one of the packages mentioned above.
45 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/compressor.js:
--------------------------------------------------------------------------------
1 | import EffectUnit from 'webaudio-effect-unit';
2 | import Tuna from 'tunajs';
3 |
4 | const DEFAULT_THRESHOLD = -1;
5 | const DEFAULT_MAKEUPGAIN = 1;
6 | const DEFAULT_ATTACK = 1;
7 | const DEFAULT_RELEASE = 0;
8 | const DEFAULT_RATIO = 4;
9 | const DEFAULT_KNEE = 5;
10 | const DEFAULT_AUTOMAKEUP = true;
11 |
12 | export const compressorData = {
13 | name: `compressor`,
14 | values: [
15 |
16 | {
17 | name: `threshold`,
18 | options: {
19 | type: `range`,
20 | defaultValue: DEFAULT_THRESHOLD,
21 | min: -100,
22 | max: 0,
23 | step: 0.1
24 | },
25 | set: (effectChain, value) => {
26 | effectChain.compressor.threshold = value;
27 | }
28 | },
29 |
30 | {
31 | name: `makeupGain`,
32 | options: {
33 | type: `range`,
34 | defaultValue: DEFAULT_MAKEUPGAIN,
35 | min: 0,
36 | max: 1,
37 | step: 0.01
38 | },
39 | set: (effectChain, value) => {
40 | effectChain.compressor.makeupGain = value;
41 | }
42 | },
43 |
44 | {
45 | name: `attack`,
46 | options: {
47 | type: `range`,
48 | defaultValue: DEFAULT_ATTACK,
49 | min: 0,
50 | max: 1000,
51 | step: 1
52 | },
53 | set: (effectChain, value) => {
54 | effectChain.compressor.attack = value;
55 | }
56 | },
57 |
58 | {
59 | name: `release`,
60 | options: {
61 | type: `range`,
62 | defaultValue: DEFAULT_RELEASE,
63 | min: 0,
64 | max: 1,
65 | step: 0.01
66 | },
67 | set: (effectChain, value) => {
68 | effectChain.compressor.release = value;
69 | }
70 | },
71 |
72 | {
73 | name: `ratio`,
74 | options: {
75 | type: `range`,
76 | defaultValue: DEFAULT_RATIO,
77 | min: 1,
78 | max: 20,
79 | step: 1
80 | },
81 | set: (effectChain, value) => {
82 | effectChain.compressor.ratio = value;
83 | }
84 | },
85 |
86 | {
87 | name: `knee`,
88 | options: {
89 | type: `range`,
90 | defaultValue: DEFAULT_KNEE,
91 | min: 0,
92 | max: 40,
93 | step: 1
94 | },
95 | set: (effectChain, value) => {
96 | effectChain.compressor.knee = value;
97 | }
98 | },
99 |
100 | {
101 | name: `automakeup`,
102 | options: {
103 | type: `single`,
104 | defaultValue: DEFAULT_AUTOMAKEUP
105 | },
106 | set: (effectChain, value) => {
107 | effectChain.compressor.automakeup = value;
108 | }
109 | }
110 |
111 | ]
112 | };
113 |
114 | export default function createCompressor(audioCtx, tuna = new Tuna(audioCtx)) {
115 | // Tuna is optional
116 |
117 | return new EffectUnit({
118 | ...compressorData,
119 | effectChain: {
120 | compressor: () => new tuna.Compressor({
121 | threshold: DEFAULT_THRESHOLD, // -100 to 0
122 | makeupGain: DEFAULT_MAKEUPGAIN, // 0 and up
123 | attack: DEFAULT_ATTACK, // 0 to 1000
124 | release: DEFAULT_RELEASE, // 0 to 3000
125 | ratio: DEFAULT_RATIO, // 1 to 20
126 | knee: DEFAULT_KNEE, // 0 to 40
127 | automakeup: DEFAULT_AUTOMAKEUP // true/false
128 | })
129 | }
130 | },
131 | audioCtx);
132 | }
133 |
--------------------------------------------------------------------------------
/packages/recordy/README.md:
--------------------------------------------------------------------------------
1 | # Recordy
2 | ## Recording for browsers - the easy way
3 | This module abstracts away the logic needed to record audio in your browser.
4 | Since it's based on the [chnl](../webaudio-chnl/README.md) module, a lot of effects can be added to the input. For information about this aspect just have a look a the documentation of Chnl.
5 | You can treat any Recordy-instance as a Chnl, because Recordy is extending Chnl.
6 | To record the input, I'm using a fork of the popular recorder.js library from Matt Diamond, [wrecorder](.../wrecorder/README.md), which allows us to record the output of WebAudio-nodes. Big thanks for this awesome work!
7 |
8 | __Attention__: Since the [webaudio-effect-unit](../webaudio-effect-unit/README.md) has reached v.1.1.0, the way how the effects work has changed. Have a look at it's repository for more details. Make sure to do this BEFORE you update. If you have difficulties or questions, just open an issue! I am always glad if I can help. :smile:
9 |
10 | ## Installation
11 | The package is hosted on npm. You can consume it with any package manager supporting npm packages.
12 | ```bash
13 | npm i recordy -S
14 | ```
15 |
16 | ## Usage
17 | ### Creating an instance
18 | ```javascript
19 | new Recordy(audioCtx)
20 | ```
21 |
22 | To create a Recordy-instance, you have to pass exactly one argument to the constructor: an AudioContext object.
23 | Now, you can request audio input(have a look at the example for more information).
24 |
25 | ### Getting input
26 | ```javascript
27 | .getInput()
28 | ```
29 |
30 | This method needs to be executed before you can start recording. It asynchronously requests audio input. So the return value is a __Promise__, which returns a __boolean__ value. This value evaluates to true if the request for the microphone/audio-input was successfully and to false if it wasn't.
31 |
32 | ### Start recording
33 | ```javascript
34 | .startRecording()
35 | ```
36 |
37 | This method is really self-explanatory.
38 | Recody will record until you call the .stopRecording(...) method.
39 |
40 | ### Stop recording
41 | ```javascript
42 | .stopRecording({ type })
43 | ```
44 |
45 | This methods stops a previously started recording.
46 | It accepts exactly one parameter: an object with the property _'type'_.
47 | This property can ba one of the following: _'blob'_, _'audio'_ or _'buffer'_
48 |
49 | This method, due to its asynchronous nature, returns a promise.
50 |
51 | The promise resolves based on the value of _'type'_.
52 |
53 | ### Outputting to the speakers
54 | ```javascript
55 | .toSpeaker(gainValue)
56 | ```
57 |
58 | Recordy allows you to directly output the audio-input to the speakers, so you could directly hear the effects you apply etc. The method accepts exactly one parameter: The volume of the output. This can be a number from 0 - 1. If you set a value of 0 it's muted, if you set a value of 1 it's the maximal possible volume.
59 | __ATTENTION:__ Due to the lack of support of advanced and latency-free audio protocols like ASIO(...) in the actual browsers, there's a quite high latency between input and output (it's clearly noticeable).
60 | Therefore, it's muted by default.
61 |
62 |
63 | # Example
64 |
65 | This is a simple example which records an one second long track. The track gets returned as an Audio-object so it can be directly played. Also, the input is directly outputted to the speakers with a gain of 0.4.
66 | In addition, some functionality of the Chnl module was applied: The bitcrusher effect was enabled.
67 |
68 | ```javascript
69 | const audioCtx = new AudioContext();
70 | const r = new Recordy(audioCtx);
71 |
72 | r.getInput()
73 | .then(val => {
74 | r.startRecording();
75 |
76 | window.setTimeout(() => {
77 | r.stopRecording({ type: `audio` })
78 | .then(audio => {
79 | audio.play();
80 | });
81 | }, 1000);
82 | r.toSpeaker(0.4);
83 | r.effects.bitcrusher.enable();
84 | });
85 | ```
86 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/src/EffectUnit.js:
--------------------------------------------------------------------------------
1 | import { functionsToValues, bindMethodsToValues, objToArray, filterValue } from './util';
2 |
3 |
4 | class EffectUnit {
5 |
6 | audioCtx;
7 | name;
8 | effectChain;
9 | values;
10 | isEffectUnit = true;
11 | wasInitialized = false;
12 |
13 | effectGain;
14 | directGain;
15 |
16 | output;
17 | input;
18 |
19 | options;
20 |
21 | constructor(options = { name: ``, effectChain: {}, values: [] }, audioCtx) {
22 | /*
23 | The options object must have the following structure:
24 | {
25 | name: The name of the effect to identify it later
26 | effectChain: The object which contains the audioprocessors,
27 | values: An array which contains the available values for this effect and the according methods to edit them
28 | }
29 | */
30 |
31 | if (!audioCtx)
32 | throw new Error(`The AudioContext specified (3° parameter) is not defined!`);
33 |
34 | this.name = name;
35 | this.audioCtx = audioCtx;
36 | this.options = options;
37 |
38 | this.effectGain = this.audioCtx.createGain(); // Set to 1 ==> Effect is on; Set to 0 ==> Effect is off
39 | this.directGain = this.audioCtx.createGain(); // Set to 0 ==> Effect is on; Set to 1 ==> Effect is off
40 |
41 | this.output = this.audioCtx.createGain();
42 | this.input = this.audioCtx.createGain();
43 |
44 | this.input.connect(this.effectGain);
45 | this.input.connect(this.directGain);
46 |
47 | // Connect direct gain to ouput
48 | this.directGain.connect(this.output);
49 | }
50 |
51 | static connectNodes(nodeA, nodeB) {
52 | if (nodeB.isEffectUnit || nodeB.input)
53 | nodeA.connect(nodeB.input);
54 | else
55 | nodeA.connect(nodeB);
56 | }
57 |
58 | init() {
59 | if (this.wasInitialized)
60 | return;
61 |
62 | this.effectChain = functionsToValues(this.options.effectChain);
63 |
64 | // Give all 'set'-methods of the specified values the effectChain as the first parameter
65 | this.values = bindMethodsToValues(this.options.values, this.effectChain);
66 |
67 | // Now execute all 'set'-methods of the according values which have a 'defaultValue'-field in their 'options'-object
68 | this.values.forEach((value) => {
69 | if (value.options.defaultValue)
70 | value.set(value.options.defaultValue);
71 | });
72 |
73 | this.setupEffectChain();
74 | this.wasInitialized = true;
75 | }
76 |
77 | enable() {
78 | this.init();
79 | this.effectGain.gain.value = 1;
80 | this.directGain.gain.value = 0;
81 | }
82 |
83 | disable() {
84 | this.effectGain.gain.value = 0;
85 | this.directGain.gain.value = 1;
86 | }
87 |
88 | connect(node) {
89 | if (node.isEffectUnit || node.input)
90 | this.output.connect(node.input);
91 | else {
92 | // Common audioNode
93 | this.output.connect(node);
94 | }
95 | }
96 |
97 | setValue(valueName, value) {
98 | filterValue(this.values, valueName).set(value);
99 | }
100 |
101 | getValueOptions(valueName) {
102 | return filterValue(this.values, valueName).options;
103 | }
104 |
105 | setupEffectChain() {
106 | // Connect the effectChain
107 | const effects = objToArray(this.effectChain);
108 |
109 | // Effect chain not empty?
110 | if (effects.length >= 1) {
111 | // Connect effect gain to first effect
112 | EffectUnit.connectNodes(this.effectGain, effects[0]);
113 | // Connect all other effect to the following effect
114 | for (let i = 0; i < (effects.length - 1); i++)
115 | EffectUnit.connectNodes(effects[i], effects[i + 1]);
116 |
117 | // Connect the last effect to the output gain
118 | effects[effects.length - 1].connect(this.output);
119 | }
120 | }
121 |
122 | disconnect() {
123 | // Disconnect all outgoing connections
124 | this.output.disconnect();
125 | }
126 |
127 | }
128 |
129 | export default EffectUnit;
130 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/README.md:
--------------------------------------------------------------------------------
1 | # Webaudio effect units collection
2 |
3 | Effects for everyone!
4 |
5 | All effects of this collection are based on the [webaudio-effect-unit](../webaudio-effect-unit/README.md) module.
6 | If you want to gain a deeper knowledge on how you can use those effects, have a look at this repository.
7 |
8 | Here a list of all the effects included in this package:
9 |
10 | * Gain
11 | * Chorus
12 | * Delay
13 | * Phaser
14 | * Overdrive
15 | * Compressor
16 | * Lowpass
17 | * Highpass
18 | * Tremolo
19 | * Wahwah
20 | * Bitcrusher
21 | * Moog
22 | * PingPongDelay
23 | * Reverb
24 | * Dub Delay
25 |
26 | ## Installation
27 | Through npm:
28 | `npm i webaudio-effect-units-collection -S`
29 |
30 | ## Module usage
31 | #### Default export
32 | Per default, the module exports the following function:
33 | ```
34 | createEffectCollection(audioCtx)
35 | ```
36 | _Example_
37 | ```javascript
38 | import createEffectCollection from 'webaudio-effect-units-collection';
39 | const audioCtx = new AudioContext();
40 | const { gain, chorus, delay, phaser } = createEffectCollection(audioCtx)
41 | ```
42 |
43 | This function requires an AudioContext object as its 1° argument.
44 | It returns an object with all effects contained by this package. The property names of this object are equivalent to the effect names.
45 |
46 | #### Effect data
47 | If you are using this package, it is likely that you want to include a possiblity to manipulate them via an User Interface.
48 | Therefore, the needed information to accordingly represent those effects in an UI is exported manually (so you could seamlessly integrate it in the application state, if needed).
49 |
50 | An array with the name 'EFFECT_DATA' is exported for this purpose.
51 |
52 | To understand the following, you should have read the documentation of the [webaudio-effect-unit](https://github.com/scriptify/webaudio-effect-unit) module.
53 |
54 | Here's an example of an object contained by this array:
55 |
56 | ```javascript
57 | {
58 | name: 'chorus',
59 | values: [
60 |
61 | {
62 | name: 'rate',
63 | options: {
64 | type: 'range',
65 | defaultValue: 1.5,
66 | min: 0.01,
67 | max: 8,
68 | step: 0.01
69 | },
70 | set: (effectChain, value) => {
71 | effectChain.chorus.rate = value;
72 | }
73 | },
74 |
75 | {
76 | name: 'feedback',
77 | options: {
78 | type: 'range',
79 | defaultValue: 0.2,
80 | min: 0,
81 | max: 1,
82 | step: 0.01
83 | },
84 | set: (effectChain, value) => {
85 | effectChain.chorus.feedback = value;
86 | }
87 | },
88 |
89 | {
90 | name: 'delay',
91 | options: {
92 | type: 'range',
93 | defaultValue: 0.0045,
94 | min: 0,
95 | max: 1,
96 | step: 0.01
97 | },
98 | set: (effectChain, value) => {
99 | effectChain.chorus.delay = value;
100 | }
101 | }
102 |
103 | ]
104 | }
105 | ```
106 |
107 | If you correctly understood the [webaudio-effect-unit](https://github.com/scriptify/webaudio-effect-unit) module, you should have got it. If it's to unclear, please feel free to open an issue with questions! I am glad if I can help you. :smile:
108 |
109 | #### Use just single effects
110 | If you don't want to import all effects, you can also only import single ones. The effect-files are contained in the 'effects' directory.
111 |
112 | The exported function to create the EffectUnit follows the following pattern:
113 | __create{EffectName}(audioCtx: AudioContext)__
114 |
115 | To just import the data for an effect, the following pattern applies:
116 | __{EffectName}Data__
117 |
118 | _Example_
119 | ```javascript
120 | import gainData from 'webaudio-effect-units-collection/effects/gain';
121 | import createGain from 'webaudio-effect-units-collection/effects/gain';
122 |
123 | const audioCtx = new AudioContext();
124 | const gainEffect = createGain(audioCtx);
125 |
126 | gainEffect.setValue('gain', 0.4);
127 |
128 | ```
129 |
--------------------------------------------------------------------------------
/packages/webaudio-chnl/README.md:
--------------------------------------------------------------------------------
1 | # Chnl - one channel, all effects.
2 |
3 | ## Why would I ever want a _Chnl_?
4 |
5 | I needed something with a LOT of audio effects integrated which can be manipulated in many different aspects. And I needed to use this in many different ways: Every native AudioNode should be able to connect to it as it would be a normal AudioNode, but also other Chnls should be able to connect to another Chnl.
6 | So I could simply and intuitively create audio graphs with a set of effects.
7 | No matter if I connect a song, mic input or even a synthesizer.
8 |
9 | __Therefore I created _Chnl_.__
10 |
11 | ## Installation
12 | Via npm
13 | ```
14 | npm i webaudio-chnl -S
15 | ```
16 |
17 |
18 | ## Usage
19 | It's really simple. And intuitive.
20 | ### Creating a __Chnl__
21 | You can create a new _Chnl_ instance by constructing the _Chnl_-class with your _AudioContext object_ as the 1° parameter.
22 | ```javascript
23 | new Channel(audioCtx)
24 | ```
25 |
26 | ### Effects
27 | You have access to __a lot of effects__.
28 | Under the hood, _Chnl_ uses the [webaudio-effect-units-collection](../webaudio-effect-units-collection/README.md) module. So you have access to a lot of effects which can be enabled and disabled.
29 |
30 | You can access the effects with the _effects_ property of your _Chnl_ instance.
31 |
32 |
33 | _Example_
34 | ```javascript
35 | const channel = new Chnl(audioCtx);
36 | const {
37 | gain,
38 | chorus,
39 | delay,
40 | phaser,
41 | overdrive,
42 | compressor,
43 | lowpass,
44 | highpass,
45 | tremolo,
46 | wahwah,
47 | bitcrusher,
48 | moog,
49 | pingPongDelay
50 | } = channel.effects;
51 |
52 | gain.setValue('gain', 0.55);
53 |
54 | delay.addEffect('delay');
55 | delay.setValue('feedback', 0.2);
56 | ```
57 |
58 | ### Connecting
59 | #### Connect to a Chnl
60 | You can connect any _normal AudioNode_ to a _Chnl_:
61 | ```javascript
62 | const channel = new Chnl(audioCtx);
63 | const gain = audioCtx.createGain();
64 | gain.connect(channel);
65 | ```
66 | But you can also connect a _Chnl_ to a _normal AudioNode_:
67 | ```javascript
68 | const channel = new Chnl(audioCtx);
69 | const gain = audioCtx.createGain();
70 | channel.connect(gain);
71 | ```
72 | You can even connect one _Chnl_ to another one:
73 | ```javascript
74 | const channel1 = new Chnl(audioCtx);
75 | const channel2 = new Chnl(audioCtx);
76 | channel1.connect(channel2);
77 | ```
78 | Have fun connecting!
79 |
80 | ### Activating an effect (since v0.0.6)
81 | Per default, no effect is connected in the interior audio graph. In previous versions, this was the case. I decided to revise the way how effects are used. Because if all effects are initially actively connected, there's way more needless audio processing (also if the effects are initially turned off). Therefore I decided to connect the effects only if they are explicitly needed.
82 |
83 | __TLDR:__ Before using an effect, you need to activate it. When activating an effect, the whole audiograph will be rebuilt.
84 |
85 | __Note:__ The 'gain'-effect is already activated by default.
86 |
87 | _Example_:
88 | ```javascript
89 | const chnl = new Chnl(audioCtx);
90 | chnl.addEffect('delay');
91 | chnl.addEffect('chorus');
92 | chnl.effects.delay.setValue('delayTime', 500);
93 | ```
94 |
95 | ### Disabling an effect (since v0.0.6)
96 | Since you can activate an effect, it's no surprise that you can also disable the same effect. When you disable an effect, it will be removed from the audiograph to prevent needless processing.
97 | _Example_:
98 | ```javascript
99 | const chnl = new Chnl(audioCtx);
100 | chnl.addEffect('delay');
101 | chnl.effects.delay.setValue('delayTime', 500);
102 | chnl.removeEffect('chorus');
103 | ```
104 |
105 | ### Final example
106 | This a bit more advanced example, which connects an oscillator to a Chnl and applies some effects.
107 | ```javascript
108 | const audioCtx = new AudioContext();
109 | const chnl = new Chnl(audioCtx);
110 |
111 | const osci = audioCtx.createOscillator();
112 | osci.frequency.value = 300;
113 |
114 | osci.connect(chnl);
115 | chnl.connect(audioCtx.destination);
116 |
117 | // Activate effects
118 | chnl.addEffect('highpass');
119 | chnl.addEffect('bitcrusher');
120 |
121 | chnl.effects.gain.setValue('gain', 0.2);
122 | chnl.effects.highpass.setValue('frequency', 500);
123 | chnl.effects.bitcrusher.setValue('bits', 4);
124 |
125 | osci.start();
126 | ```
127 |
--------------------------------------------------------------------------------
/packages/audiolooper/src/index.js:
--------------------------------------------------------------------------------
1 | export default class AudioLooper {
2 |
3 | pausedTracks = new Map();
4 | bufferNodes = new Map();
5 | audioCtx;
6 | firstTrack;
7 |
8 | constructor(audioCtx = new AudioContext()) {
9 | this.audioCtx = audioCtx;
10 | }
11 |
12 | addTrack({ id, audioBuffer, doProcessing = true, trackAdded = () => {} }) {
13 | const isFirstTrack = (this.bufferNodes.size === 0);
14 | let finalAudioBuffer;
15 |
16 | // Prepare buffer!
17 | // Step 1: fade-in + fade-out
18 | for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
19 | const channelData = audioBuffer.getChannelData(channel);
20 | const FADE_LENGTH = 100;
21 | for (let i = 0; i < FADE_LENGTH && i < channelData.length; i++) {
22 | const fadeOutPos = channelData.length - i - 1;
23 | channelData[i] = (channelData[i] * i) / FADE_LENGTH;
24 | channelData[fadeOutPos] = (channelData[fadeOutPos] * i) / FADE_LENGTH;
25 | }
26 | }
27 |
28 | if (!isFirstTrack && doProcessing) {
29 | // Step 2: fit it the first track
30 | const firstTrackBuffer = this.firstTrack.bufferNode.buffer;
31 | const percentualRatio = Math.ceil(audioBuffer.length / firstTrackBuffer.length);
32 | const newAudioBuffer = this.audioCtx.createBuffer(audioBuffer.numberOfChannels, firstTrackBuffer.length * percentualRatio, firstTrackBuffer.sampleRate);
33 | /* console.log(`newAudioBuffer duration: ${ newAudioBuffer.duration }`);
34 | console.log(`oldAudioBuffer duration: ${ audioBuffer.duration }`);
35 | console.log(`firstTrack duration: ${ this.firstTrack.duration }`); */
36 |
37 | // is this even needed or is it enough to:
38 | // newAudioBuffer.copyFromChannel(audioBuffer, 2, 0); ????
39 | for (let channel = 0; channel < newAudioBuffer.numberOfChannels; channel++) {
40 | const channelDataNew = newAudioBuffer.getChannelData(channel);
41 | const channelDataCurrent = audioBuffer.getChannelData(channel);
42 | channelDataNew.set(channelDataCurrent, 0);
43 | }
44 |
45 | finalAudioBuffer = newAudioBuffer;
46 | } else
47 | finalAudioBuffer = audioBuffer;
48 |
49 | // Create buffersourcenode
50 | const bufferNode = this.audioCtx.createBufferSource();
51 | bufferNode.buffer = finalAudioBuffer;
52 | bufferNode.loop = true;
53 |
54 | const track = {
55 | duration: bufferNode.buffer.duration,
56 | startedAt: this.audioCtx.currentTime,
57 | getCurrentTime: () => this.audioCtx.currentTime - track.startedAt,
58 | bufferNode,
59 | trackAdded // Save for later use!
60 | };
61 |
62 | this.bufferNodes.set(id, track);
63 |
64 | if (isFirstTrack)
65 | this.firstTrack = track;
66 |
67 | const part = this.audioCtx.currentTime - this.firstTrack.startedAt;
68 | const numParts = Math.floor(part / this.firstTrack.duration);
69 | const offset = part - (numParts * this.firstTrack.duration);
70 |
71 | // const startTrackAt = isFirstTrack ? 0 : track.duration - offset;
72 | bufferNode.start(0, offset);
73 |
74 | // Return bufferNode, so user can connect it ecc.
75 | trackAdded(bufferNode);
76 | }
77 |
78 | pauseTrack({ id }) {
79 | if (this.exists(id)) {
80 | const track = this.bufferNodes.get(id);
81 | track.bufferNode.stop();
82 | this.pausedTracks.set(id, track);
83 | this.removeTrack({ id });
84 | }
85 | }
86 |
87 | playTrack({ id }) {
88 | if (this.pausedTracks.has(id)) {
89 | const { bufferNode: { buffer: audioBuffer }, trackAdded } = this.pausedTracks.get(id);
90 | this.addTrack({ id, audioBuffer, doProcessing: false, trackAdded });
91 | } else
92 | throw new Error(`You tried to pause an inexistent track!`);
93 | }
94 |
95 | removeTrack({ id }) {
96 | if (this.exists(id)) {
97 | const track = this.bufferNodes.get(id);
98 | track.bufferNode.stop();
99 | this.bufferNodes.delete(id);
100 | }
101 | }
102 |
103 | getCurrentTime({ id }) {
104 | if (this.exists(id)) {
105 | const track = this.bufferNodes.get(id);
106 | return track.getCurrentTime();
107 | }
108 | throw new Error(`You tried to access an inexistent track!`);
109 | }
110 |
111 | exists(id) {
112 | if (!this.bufferNodes.has(id))
113 | throw new Error(`You tried to access an inexistent track!`);
114 |
115 | return true;
116 | }
117 |
118 | }
119 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/reverb.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 |
3 | import { baseEffect, isNumber, isInRange, getWetLevel, getDryLevel } from './pizzicato';
4 | import EffectUnit from 'webaudio-effect-unit';
5 |
6 | let audioCtx;
7 |
8 | const Reverb = function(audioCtxA, options) {
9 | var self = this;
10 | audioCtx = audioCtxA;
11 |
12 | this.options = {};
13 | options = options || this.options;
14 |
15 | var defaults = {
16 | mix: 0.5,
17 | time: 0.01,
18 | decay: 0.01,
19 | reverse: false
20 | };
21 |
22 | this.input = audioCtx.createGain();
23 | this.reverbNode = audioCtx.createConvolver();
24 | this.output = audioCtx.createGain();
25 | this.wetGainNode = audioCtx.createGain();
26 | this.dryGainNode = audioCtx.createGain();
27 |
28 | this.input.connect(this.reverbNode);
29 | this.reverbNode.connect(this.wetGainNode);
30 | this.input.connect(this.dryGainNode);
31 | this.dryGainNode.connect(this.output);
32 | this.wetGainNode.connect(this.output);
33 |
34 | for (var key in defaults) {
35 | this[key] = options[key];
36 | this[key] = (this[key] === undefined || this[key] === null) ? defaults[key] : this[key];
37 | }
38 |
39 | (buildImpulse.bind(this))();
40 | };
41 |
42 | Reverb.prototype = Object.create(baseEffect, {
43 |
44 | mix: {
45 | enumerable: true,
46 |
47 | get: function() {
48 | return this.options.mix;
49 | },
50 |
51 | set: function (mix) {
52 | if (!isInRange(mix, 0, 1))
53 | return;
54 |
55 | this.options.mix = mix;
56 | this.dryGainNode.gain.value = getDryLevel(this.mix);
57 | this.wetGainNode.gain.value = getWetLevel(this.mix);
58 | }
59 | },
60 |
61 | time: {
62 | enumerable: true,
63 |
64 | get: function () {
65 | return this.options.time;
66 | },
67 |
68 | set: function (time) {
69 | if (!isInRange(time, 0.0001, 10))
70 | return;
71 |
72 | this.options.time = time;
73 | (buildImpulse.bind(this))();
74 | }
75 | },
76 |
77 | decay: {
78 | enumerable: true,
79 |
80 | get: function () {
81 | return this.options.decay;
82 | },
83 |
84 | set: function (decay) {
85 | if (!isInRange(decay, 0.0001, 10))
86 | return;
87 |
88 | this.options.decay = decay;
89 | (buildImpulse.bind(this))();
90 | }
91 |
92 | },
93 |
94 | reverse: {
95 | enumerable: true,
96 |
97 | get: function () {
98 | return this.options.reverse;
99 | },
100 |
101 | set: function (reverse) {
102 | this.options.reverse = reverse;
103 | (buildImpulse.bind(this))();
104 | }
105 | }
106 |
107 | });
108 |
109 | function buildImpulse() {
110 |
111 | var length = audioCtx.sampleRate * this.time;
112 | var impulse = audioCtx.createBuffer(2, length, audioCtx.sampleRate);
113 | var impulseL = impulse.getChannelData(0);
114 | var impulseR = impulse.getChannelData(1);
115 | var n, i;
116 |
117 | for (i = 0; i < length; i++) {
118 | n = this.reverse ? length - i : i;
119 | impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, this.decay);
120 | impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, this.decay);
121 | }
122 |
123 | this.reverbNode.buffer = impulse;
124 | }
125 |
126 | export const reverbData = {
127 | name: `reverb`,
128 | values: [
129 |
130 | {
131 | name: `mix`,
132 | options: {
133 | type: `range`,
134 | defaultValue: 0.5,
135 | min: 0,
136 | max: 1,
137 | step: 0.01
138 | },
139 | set: (effectChain, value) => {
140 | effectChain.reverb.mix = value;
141 | }
142 | },
143 |
144 | {
145 | name: `time`,
146 | options: {
147 | type: `range`,
148 | defaultValue: 5,
149 | min: 0.0001,
150 | max: 10,
151 | step: 0.001
152 | },
153 | set: (effectChain, value) => {
154 | effectChain.reverb.time = value;
155 | }
156 | },
157 |
158 | {
159 | name: `decay`,
160 | options: {
161 | type: `range`,
162 | defaultValue: 3,
163 | min: 0.0001,
164 | max: 10,
165 | step: 0.001
166 | },
167 | set: (effectChain, value) => {
168 | effectChain.reverb.decay = value;
169 | }
170 | },
171 |
172 | {
173 | name: `reverse`,
174 | options: {
175 | type: `switch`,
176 | defaultValue: false
177 | },
178 | set: (effectChain, value) => {
179 | effectChain.reverse.reverse = value;
180 | }
181 | }
182 |
183 | ]
184 | };
185 |
186 | export default function createReverbDelay(audioCtx) {
187 | return new EffectUnit({
188 | ...reverbData,
189 | effectChain: {
190 | reverb: () => new Reverb(audioCtx)
191 | }
192 | }, audioCtx);
193 | }
194 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-units-collection/src/effects/dubDelay.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 |
3 | import { baseEffect, isNumber, isInRange, getWetLevel, getDryLevel } from './pizzicato';
4 | import EffectUnit from 'webaudio-effect-unit';
5 |
6 | let DubDelay = function(audioCtx, options) {
7 |
8 | this.options = {};
9 | options = options || this.options;
10 |
11 | var defaults = {
12 | feedback: 0.6,
13 | time: 0.7,
14 | mix: 0.5,
15 | cutoff: 700
16 | };
17 |
18 | this.input = audioCtx.createGain();
19 | this.output = audioCtx.createGain();
20 | this.dryGainNode = audioCtx.createGain();
21 | this.wetGainNode = audioCtx.createGain();
22 | this.feedbackGainNode = audioCtx.createGain();
23 | this.delayNode = audioCtx.createDelay();
24 | this.bqFilterNode = audioCtx.createBiquadFilter();
25 |
26 |
27 | // dry mix
28 | this.input.connect(this.dryGainNode);
29 | this.dryGainNode.connect(this.output);
30 |
31 | // wet mix
32 | this.input.connect(this.wetGainNode);
33 | this.input.connect(this.feedbackGainNode);
34 |
35 | this.feedbackGainNode.connect(this.bqFilterNode);
36 | this.bqFilterNode.connect(this.delayNode);
37 | this.delayNode.connect(this.feedbackGainNode);
38 | this.delayNode.connect(this.wetGainNode);
39 |
40 | this.wetGainNode.connect(this.output);
41 |
42 | for (var key in defaults) {
43 | this[key] = options[key];
44 | this[key] = (this[key] === undefined || this[key] === null) ? defaults[key] : this[key];
45 | }
46 | };
47 |
48 | DubDelay.prototype = Object.create(baseEffect, {
49 |
50 | /**
51 | * Gets and sets the dry/wet mix.
52 | */
53 | mix: {
54 | enumerable: true,
55 |
56 | get: function() {
57 | return this.options.mix ;
58 | },
59 |
60 | set: function(mix) {
61 | if (!isInRange(mix, 0, 1))
62 | return;
63 |
64 | this.options.mix = mix;
65 | this.dryGainNode.gain.value = getDryLevel(this.mix);
66 | this.wetGainNode.gain.value = getWetLevel(this.mix);
67 | }
68 | },
69 |
70 | /**
71 | * Time between each delayed sound
72 | */
73 | time: {
74 | enumerable: true,
75 |
76 | get: function() {
77 | return this.options.time;
78 | },
79 |
80 | set: function(time) {
81 | if (!isInRange(time, 0, 180))
82 | return;
83 |
84 | this.options.time = time;
85 | this.delayNode.delayTime.value = time;
86 | }
87 | },
88 |
89 | /**
90 | * Strength of each of the echoed delayed sounds.
91 | */
92 | feedback: {
93 | enumerable: true,
94 |
95 | get: function() {
96 | return this.options.feedback;
97 | },
98 |
99 | set: function(feedback) {
100 | if (!isInRange(feedback, 0, 1))
101 | return;
102 |
103 | this.options.feedback = parseFloat(feedback, 10);
104 | this.feedbackGainNode.gain.value = this.feedback;
105 | }
106 | },
107 |
108 | /**
109 | * Frequency on delay repeats
110 | */
111 | cutoff: {
112 | enumerable: true,
113 |
114 | get: function() {
115 | return this.options.cutoff;
116 | },
117 |
118 | set: function(cutoff) {
119 | if (!isInRange(cutoff, 0, 4000))
120 | return;
121 |
122 | this.options.cutoff = cutoff;
123 | this.bqFilterNode.frequency.value = this.cutoff;
124 | }
125 | }
126 |
127 | });
128 |
129 | export const dubDelayData = {
130 | name: `dubDelay`,
131 | values: [
132 |
133 | {
134 | name: `mix`,
135 | options: {
136 | type: `range`,
137 | defaultValue: 0.5,
138 | min: 0,
139 | max: 1,
140 | step: 0.01
141 | },
142 | set: (effectChain, value) => {
143 | effectChain.dubDelay.mix = value;
144 | }
145 | },
146 |
147 | {
148 | name: `feedback`,
149 | options: {
150 | type: `range`,
151 | defaultValue: 0.6,
152 | min: 0,
153 | max: 1,
154 | step: 0.01
155 | },
156 | set: (effectChain, value) => {
157 | effectChain.dubDelay.feedback = value;
158 | }
159 | },
160 |
161 | {
162 | name: `time`,
163 | options: {
164 | type: `range`,
165 | defaultValue: 0.7,
166 | min: 0,
167 | max: 180,
168 | step: 0.01
169 | },
170 | set: (effectChain, value) => {
171 | effectChain.dubDelay.time = value;
172 | }
173 | },
174 |
175 | {
176 | name: `cutoff`,
177 | options: {
178 | type: `range`,
179 | defaultValue: 700,
180 | min: 0,
181 | max: 4000,
182 | step: 100
183 | },
184 | set: (effectChain, value) => {
185 | effectChain.dubDelay.cutoff = value;
186 | }
187 | }
188 |
189 | ]
190 | };
191 |
192 | export default function createDubDelay(audioCtx) {
193 | return new EffectUnit({
194 | ...dubDelayData,
195 | effectChain: {
196 | dubDelay: () => new DubDelay(audioCtx)
197 | }
198 | }, audioCtx);
199 | }
200 |
--------------------------------------------------------------------------------
/packages/audiolooper/dist/audiolooper.min.js:
--------------------------------------------------------------------------------
1 | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define("audiolooper",[],e):"object"==typeof exports?exports.audiolooper=e():t.audiolooper=e()}(this,function(){return function(t){function e(n){if(r[n])return r[n].exports;var o=r[n]={i:n,l:!1,exports:{}};return t[n].call(o.exports,o,o.exports,e),o.l=!0,o.exports}var r={};return e.m=t,e.c=r,e.i=function(t){return t},e.d=function(t,r,n){e.o(t,r)||Object.defineProperty(t,r,{configurable:!1,enumerable:!0,get:n})},e.n=function(t){var r=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(r,"a",r),r},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=8)}([function(t,e,r){t.exports=!r(3)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(t,e){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,e){var r=t.exports={version:"2.4.0"};"number"==typeof __e&&(__e=r)},function(t,e){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,e){var r=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=r)},function(t,e,r){var n=r(12),o=r(17),u=r(19),i=Object.defineProperty;e.f=r(0)?Object.defineProperty:function(t,e,r){if(n(t),e=u(e,!0),n(r),o)try{return i(t,e,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported!");return"value"in r&&(t[e]=r.value),t}},function(t,e,r){"use strict";e.__esModule=!0,e.default=function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}},function(t,e,r){"use strict";e.__esModule=!0;var n=r(9),o=function(t){return t&&t.__esModule?t:{default:t}}(n);e.default=function(){function t(t,e){for(var r=0;r0&&void 0!==arguments[0]?arguments[0]:new AudioContext;(0,u.default)(this,t),this.pausedTracks=new Map,this.bufferNodes=new Map,this.audioCtx=e}return(0,f.default)(t,[{key:"addTrack",value:function(t){for(var e=this,r=t.id,n=t.audioBuffer,o=t.doProcessing,u=void 0===o||o,i=t.trackAdded,f=void 0===i?function(){}:i,a=0===this.bufferNodes.size,c=void 0,s=0;s {
130 | if (hasInput)
131 | console.log(`Got mic input!`);
132 | else
133 | console.error(`Could not get mic input.`);
134 | });
135 |
136 | function render() {
137 | const looper = new AudioLooper(audioCtx);
138 |
139 |
140 | const mainDiv = document.createElement(`div`);
141 | mainDiv.class = `main`;
142 |
143 | const recordBtn = document.createElement(`button`);
144 | const stopRecordBtn = document.createElement(`button`);
145 |
146 | recordBtn.textContent = `Start recording`;
147 | stopRecordBtn.textContent = `Stop recording`;
148 |
149 | recordBtn.addEventListener(`click`, () => {
150 | recordy.startRecording();
151 | });
152 |
153 | stopRecordBtn.addEventListener(`click`, () => {
154 | recordy.stopRecording() // TRUE == Create audio object, FALSE = return blob
155 | .then((blob) => {
156 | // create arraybuffer from blob
157 | const fileReader = new FileReader();
158 | fileReader.addEventListener(`loadend`, () => {
159 | audioCtx.decodeAudioData(fileReader.result)
160 | .then((audioBuffer) => {
161 | const id = Math.random() * 1000;
162 |
163 | looper.addTrack({
164 | id,
165 | audioBuffer,
166 | trackAdded: (bufferNode) => {
167 | bufferNode.connect(audioCtx.destination);
168 | }
169 | });
170 | });
171 | });
172 | fileReader.readAsArrayBuffer(blob);
173 | });
174 | });
175 |
176 | mainDiv.appendChild(recordBtn);
177 | mainDiv.appendChild(stopRecordBtn);
178 |
179 | document.querySelector(`body`).appendChild(mainDiv);
180 | }
181 |
182 | render(recordy, audioCtx);
183 | ```
184 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/README.md:
--------------------------------------------------------------------------------
1 | # webaudio effect unit
2 | Simply create effects or other audio processors with the Web Audio API which can be enabled and disabled.
3 |
4 | ## Why?
5 | Sometimes you want to include some effects or other audioprocessors in an audio graph which can be enabled and disabled.
6 | E.g. a lowpass which can be toggled by the user.
7 | This is currently not directly possible with the Web Audio API.
8 | So The effect unit does this for you.
9 | Additionally, when you create an audio effect with the EffectUnit, you always define clear interfaces which manipulate the effect. Together with some metadata you provide, this can be very powerful, especially if you have a set of effects and a User Interface where the values of those effects should be editable.
10 | But more on that later.
11 |
12 | ## How?
13 | It's quite simple.
14 | ### Constructor
15 | The constructor of the EffectUnit has the following signature:
16 |
17 | EffectUnit(options: Object, audioCtx: AudioContext)
18 |
19 | ##### The options object
20 | The options object needs to have the following structure:
21 | ```javascript
22 | {
23 | name: String,
24 | effectChain: Object,
25 | values: Array
26 | }
27 | ```
28 | The fields of the options-object are clarified in details below.
29 |
30 | ##### The name field
31 | In the name field, you can specify the name of the EffectUnit you create. This is optional but recommended for a possible identification later on.
32 |
33 | ##### The effectChain field
34 | Each member of the effectChain-object will be a part of the audio graph, so they need to be a valid AudioNode. Note: You can also specify a function which returns one.
35 | See the example at the bottom for more details.
36 |
37 | ##### The values field
38 | The values of an EffectUnit represent the values an EffectUnit has and how they can be edited. Besides fixed values which are essential for the effect (e.g. the frequency of a highpass), you can invent your own values (you could e.g. add a isMuted value to a GainNode).
39 |
40 | The values field __must__ be an array. This array must contain objects with the following structure:
41 |
42 | ```javascript
43 |
44 | {
45 | name: String,
46 | options: {
47 | type: String
48 | defaultValue: ANY,
49 | [...]
50 | },
51 | set: function(effectChain, value)
52 | }
53 |
54 | ```
55 |
56 | - __name__: Specify the name of the value
57 | - __options__: With this object, you define the metadata of this value, which can later on be used e.g. by the User Interface. The whole object, with an exception of the __defaultValue__ field, does not impact the functioning of the EffectUnit: It has just a representational value. The __defaultValue__ field is the only field which makes a difference, because the __set__-function will initially be called with the __defaultValue__ as an argument.
58 | - __set__: Here, the actual function, which manipulates the value, gets implemented. This function receives the __effectChain__ as the 1° argument and the value, which needs to be set, as the 2° argument. Based on the __effectChain__-object you created before and the value, implement this function.
59 |
60 | #### The AudioContext
61 | As the 3° argument you need to specify the AudioContext you want to be used.
62 |
63 | ### Methods
64 | Now, there are a few simple methods which can be executed on an EffectUnit-object:
65 |
66 | #### Enabling
67 |
68 | .enable()
69 | Enable the effect unit.
70 |
71 | #### Disabling
72 |
73 | .disable()
74 | Disable the effect unit.
75 |
76 | #### Connecting from an EffectUnit
77 |
78 | .connect(node: AudioNode || EffectUnit)
79 | Connect the EffectUnit to an AudioNode or to another EffectUnit-object.
80 |
81 | #### Connecting to an EffectUnit
82 |
83 | Ok, good to know. But how can I connect a simple AudioNode to the EffectUnit?
84 | That's also quite simple.
85 | Just use the input field of your EffectUnit-object.
86 |
87 | anAudioNode.connect( anEffectUnit.input );
88 |
89 | #### Disconnecting all outputs
90 |
91 | This method let's you disconnect all outgoing connections of an EffectUnit.
92 |
93 | .disconnect();
94 |
95 | #### Setting a value
96 |
97 | .setValue(valueName:String, value:ANY)
98 |
99 | That's quite easy: If you want to edit a value of an EffectUnit, specify the name you defined before and the value you want to set.
100 |
101 | #### Getting the options-object of a value
102 |
103 | .getValueOptions(valueName:String)
104 |
105 | Sometimes you want to get the options object of an EffectUnit. That's how to do it!
106 |
107 | ## Installation
108 | Simple. Just type:
109 |
110 | npm i webaudio-effect-unit -S
111 |
112 | ## Example
113 | Here a more advanced exampled to clarify everything:
114 |
115 | ```javascript
116 | import EffectUnit from 'webaudio-effect-unit'; // Import the effect unit
117 |
118 | const main = () => {
119 | const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
120 |
121 | // Create a gain-EffectUnit
122 | const gainEff = new EffectUnit({
123 | name: 'gain',
124 | values: [
125 | {
126 | name: 'isMuted',
127 | options: {
128 | type: 'single',
129 | defaultValue: true
130 | },
131 | set: (effectChain, val) => {
132 | effectChain.gain.gain.value = val ? 0 : 0.6;
133 | }
134 | },
135 | {
136 | name: 'gain',
137 | options: { // Those values are all just optional metadata for you (e.g. a UI-component could use this to know how to configure the range slider)
138 | type: 'range',
139 | defaultValue: 1, // The only value which could be of real interest here: If this field is present, the 'set'-method gets executed once with it's value
140 | min: 0,
141 | max: 0.6,
142 | step: 0.1
143 | },
144 | set: (effectChain, val) => {
145 | effectChain.gain.gain.value = val;
146 | }
147 | }
148 | ],
149 | effectChain: {
150 | gain: audioCtx.createGain()
151 | }
152 | }, audioCtx);
153 |
154 | const hpEff = new EffectUnit({
155 | name: 'highpass',
156 | effectChain: {
157 | hp: () => {
158 | /*
159 | Because some setup is needed to create a highpass-filter, a function,
160 | which returns a valid AudioNode, can be used.
161 | */
162 | const hp = audioCtx.createBiquadFilter();
163 | hp.type = 'highpass';
164 | return hp;
165 | }
166 | },
167 | values: [
168 | {
169 | name: 'frequency',
170 | options: {
171 | type: 'range',
172 | defaultValue: 0,
173 | min: 0,
174 | max: 200,
175 | step: 10
176 | },
177 | set: (effectChain, value) => {
178 | effectChain.hp.frequency.value = value;
179 | }
180 | }
181 | ]
182 | }, audioCtx);
183 |
184 | const osci = audioCtx.createOscillator();
185 | osci.type = 'square';
186 | osci.frequency.value = 200;
187 | osci.connect(gainEff.input);
188 |
189 | gainEff.connect( hpEff );
190 | hpEff.connect( audioCtx.destination );
191 |
192 | osci.start();
193 |
194 | const hpOptions = hpEff.getValueOptions('frequency');
195 | let currHpFreq = hpOptions.defaultValue;
196 | let up = true;
197 | window.setInterval(() => {
198 | if(up)
199 | currHpFreq += hpOptions.step;
200 | else
201 | currHpFreq -= hpOptions.step;
202 |
203 | if(currHpFreq >= hpOptions.max)
204 | up = false;
205 |
206 | if(currHpFreq <= hpOptions.min)
207 | up = true;
208 |
209 | if(currHpFreq % 100 === 0)
210 | gainEff.setValue('isMuted', true);
211 | else
212 | gainEff.setValue('isMuted', false);
213 |
214 | hpEff.setValue('frequency', currHpFreq);
215 | }, 100);
216 |
217 | };
218 |
219 | main();
220 | ```
221 |
--------------------------------------------------------------------------------
/packages/wrecorder/src/recorder.js:
--------------------------------------------------------------------------------
1 | /* eslint no-use-before-define: 0 */
2 | /* eslint no-shadow: 0 */
3 | import InlineWorker from 'inline-worker';
4 |
5 | export class Recorder {
6 | config = {
7 | bufferLen: 4096,
8 | numChannels: 2,
9 | mimeType: `audio/wav`
10 | };
11 |
12 | recording = false;
13 |
14 | callbacks = {
15 | getBuffer: [],
16 | exportWAV: []
17 | };
18 |
19 | constructor(source, cfg) {
20 | Object.assign(this.config, cfg);
21 | this.context = source.context;
22 | this.node = (this.context.createScriptProcessor ||
23 | this.context.createJavaScriptNode).call(this.context,
24 | this.config.bufferLen, this.config.numChannels, this.config.numChannels);
25 |
26 | this.node.onaudioprocess = (e) => {
27 | if (!this.recording) return;
28 |
29 | const buffer = [];
30 | for (let channel = 0; channel < this.config.numChannels; channel++)
31 | buffer.push(e.inputBuffer.getChannelData(channel));
32 |
33 | this.worker.postMessage({
34 | command: `record`,
35 | buffer
36 | });
37 | };
38 |
39 | source.connect(this.node);
40 | this.node.connect(this.context.destination); // this should not be necessary
41 |
42 | const self = {};
43 | this.worker = new InlineWorker(function on() {
44 | let recLength = 0;
45 | let recBuffers = [];
46 | let sampleRate;
47 | let numChannels;
48 |
49 | this.onmessage = function onmsg(e) {
50 | switch (e.data.command) {
51 | case `init`:
52 | init(e.data.config);
53 | break;
54 | case `record`:
55 | record(e.data.buffer);
56 | break;
57 | case `exportWAV`:
58 | exportWAV(e.data.type);
59 | break;
60 | case `getBuffer`:
61 | getBuffer();
62 | break;
63 | case `clear`:
64 | clear();
65 | break;
66 | default:
67 | throw new Error(`[wrecorder] Invalid command!`);
68 | }
69 | };
70 |
71 | function init(config) {
72 | sampleRate = config.sampleRate;
73 | numChannels = config.numChannels;
74 | initBuffers();
75 | }
76 |
77 | function record(inputBuffer) {
78 | for (let channel = 0; channel < numChannels; channel++)
79 | recBuffers[channel].push(inputBuffer[channel]);
80 |
81 | recLength += inputBuffer[0].length;
82 | }
83 |
84 | function exportWAV(type) {
85 | const buffers = [];
86 | for (let channel = 0; channel < numChannels; channel++)
87 | buffers.push(mergeBuffers(recBuffers[channel], recLength));
88 |
89 | let interleaved;
90 | if (numChannels === 2)
91 | interleaved = interleave(buffers[0], buffers[1]);
92 | else
93 | interleaved = buffers[0];
94 |
95 | const dataview = encodeWAV(interleaved);
96 | const audioBlob = new Blob([dataview], { type });
97 |
98 | this.postMessage({ command: `exportWAV`, data: audioBlob });
99 | }
100 |
101 | function getBuffer() {
102 | const buffers = [];
103 | for (let channel = 0; channel < numChannels; channel++)
104 | buffers.push(mergeBuffers(recBuffers[channel], recLength));
105 |
106 | this.postMessage({ command: `getBuffer`, data: buffers });
107 | }
108 |
109 | function clear() {
110 | recLength = 0;
111 | recBuffers = [];
112 | initBuffers();
113 | }
114 |
115 | function initBuffers() {
116 | for (let channel = 0; channel < numChannels; channel++)
117 | recBuffers[channel] = [];
118 | }
119 |
120 | function mergeBuffers(recBuffers, recLength) {
121 | const result = new Float32Array(recLength);
122 | let offset = 0;
123 | for (let i = 0; i < recBuffers.length; i++) {
124 | result.set(recBuffers[i], offset);
125 | offset += recBuffers[i].length;
126 | }
127 | return result;
128 | }
129 |
130 | function interleave(inputL, inputR) {
131 | const length = inputL.length + inputR.length;
132 | const result = new Float32Array(length);
133 |
134 | let index = 0;
135 | let inputIndex = 0;
136 |
137 | while (index < length) {
138 | result[index++] = inputL[inputIndex];
139 | result[index++] = inputR[inputIndex];
140 | inputIndex++;
141 | }
142 | return result;
143 | }
144 |
145 | function floatTo16BitPCM(output, offset, input) {
146 | for (let i = 0; i < input.length; i++, offset += 2) {
147 | const s = Math.max(-1, Math.min(1, input[i]));
148 | output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
149 | }
150 | }
151 |
152 | function writeString(view, offset, string) {
153 | for (let i = 0; i < string.length; i++)
154 | view.setUint8(offset + i, string.charCodeAt(i));
155 | }
156 |
157 | function encodeWAV(samples) {
158 | const buffer = new ArrayBuffer(44 + (samples.length * 2));
159 | const view = new DataView(buffer);
160 |
161 | /* RIFF identifier */
162 | writeString(view, 0, `RIFF`);
163 | /* RIFF chunk length */
164 | view.setUint32(4, 36 + (samples.length * 2), true);
165 | /* RIFF type */
166 | writeString(view, 8, `WAVE`);
167 | /* format chunk identifier */
168 | writeString(view, 12, `fmt `);
169 | /* format chunk length */
170 | view.setUint32(16, 16, true);
171 | /* sample format (raw) */
172 | view.setUint16(20, 1, true);
173 | /* channel count */
174 | view.setUint16(22, numChannels, true);
175 | /* sample rate */
176 | view.setUint32(24, sampleRate, true);
177 | /* byte rate (sample rate * block align) */
178 | view.setUint32(28, sampleRate * 4, true);
179 | /* block align (channel count * bytes per sample) */
180 | view.setUint16(32, numChannels * 2, true);
181 | /* bits per sample */
182 | view.setUint16(34, 16, true);
183 | /* data chunk identifier */
184 | writeString(view, 36, `data`);
185 | /* data chunk length */
186 | view.setUint32(40, samples.length * 2, true);
187 |
188 | floatTo16BitPCM(view, 44, samples);
189 |
190 | return view;
191 | }
192 | }, self);
193 |
194 | this.worker.postMessage({
195 | command: `init`,
196 | config: {
197 | sampleRate: this.context.sampleRate,
198 | numChannels: this.config.numChannels
199 | }
200 | });
201 |
202 | this.worker.onmessage = (e) => {
203 | const cb = this.callbacks[e.data.command].pop();
204 | if (typeof cb === `function`)
205 | cb(e.data.data);
206 | };
207 | }
208 |
209 |
210 | record() {
211 | this.recording = true;
212 | }
213 |
214 | stop() {
215 | this.recording = false;
216 | }
217 |
218 | clear() {
219 | this.worker.postMessage({ command: `clear` });
220 | }
221 |
222 | getBuffer(cb) {
223 | cb = cb || this.config.callback;
224 | if (!cb) throw new Error(`Callback not set`);
225 |
226 | this.callbacks.getBuffer.push(cb);
227 |
228 | this.worker.postMessage({ command: `getBuffer` });
229 | }
230 |
231 | exportWAV(cb, mimeType) {
232 | mimeType = mimeType || this.config.mimeType;
233 | cb = cb || this.config.callback;
234 | if (!cb) throw new Error(`Callback not set`);
235 |
236 | this.callbacks.exportWAV.push(cb);
237 |
238 | this.worker.postMessage({
239 | command: `exportWAV`,
240 | type: mimeType
241 | });
242 | }
243 |
244 | static
245 | forceDownload(blob, filename) {
246 | const url = (window.URL || window.webkitURL).createObjectURL(blob);
247 | const link = window.document.createElement(`a`);
248 | link.href = url;
249 | link.download = filename || `output.wav`;
250 | link.click();
251 | }
252 | }
253 |
254 | export default Recorder;
255 |
--------------------------------------------------------------------------------
/packages/webaudio-effect-unit/dist/webaudio-effect-unit.min.js:
--------------------------------------------------------------------------------
1 | !function(t,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define("webaudio-effect-unit",[],n):"object"==typeof exports?exports["webaudio-effect-unit"]=n():t["webaudio-effect-unit"]=n()}(this,function(){return function(t){function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}var e={};return n.m=t,n.c=e,n.i=function(t){return t},n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{configurable:!1,enumerable:!0,get:r})},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},n.p="",n(n.s=12)}([function(t,n,e){t.exports=!e(2)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(t,n){var e=t.exports={version:"2.4.0"};"number"==typeof __e&&(__e=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n){t.exports=function(t){if(void 0==t)throw TypeError("Can't call method on "+t);return t}},function(t,n,e){var r=e(3),o=e(1),i=e(25),u=e(29),f=function(t,n,e){var c,a,s,l=t&f.F,p=t&f.G,d=t&f.S,h=t&f.P,v=t&f.B,y=t&f.W,b=p?o:o[n]||(o[n]={}),x=b.prototype,_=p?r:d?r[n]:(r[n]||{}).prototype;p&&(e=n);for(c in e)(a=!l&&_&&void 0!==_[c])&&c in b||(s=a?_[c]:e[c],b[c]=p&&"function"!=typeof _[c]?e[c]:v&&a?i(s,r):y&&_[c]==s?function(t){var n=function(n,e,r){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,e)}return new t(n,e,r)}return t.apply(this,arguments)};return n.prototype=t.prototype,n}(s):h&&"function"==typeof s?i(Function.call,s):s,h&&((b.virtual||(b.virtual={}))[c]=s,t&f.R&&x&&!x[c]&&u(x,c,s)))};f.F=1,f.G=2,f.S=4,f.P=8,f.B=16,f.W=32,f.U=64,f.R=128,t.exports=f},function(t,n,e){var r=e(24);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==r(t)?t.split(""):Object(t)}},function(t,n,e){var r=e(22),o=e(30),i=e(42),u=Object.defineProperty;n.f=e(0)?Object.defineProperty:function(t,n,e){if(r(t),n=i(n,!0),r(e),o)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n){var e=Math.ceil,r=Math.floor;t.exports=function(t){return isNaN(t=+t)?0:(t>0?r:e)(t)}},function(t,n,e){var r=e(7),o=e(5);t.exports=function(t){return r(o(t))}},function(t,n,e){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}Object.defineProperty(n,"__esModule",{value:!0});var o=e(16),i=r(o),u=e(17),f=r(u),c=e(13),a=function(){function t(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{name:"",effectChain:{},values:[]},e=arguments[1];if((0,i.default)(this,t),this.isEffectUnit=!0,this.wasInitialized=!1,!e)throw new Error("The AudioContext specified (3° parameter) is not defined!");this.name=name,this.audioCtx=e,this.options=n,this.effectGain=this.audioCtx.createGain(),this.directGain=this.audioCtx.createGain(),this.output=this.audioCtx.createGain(),this.input=this.audioCtx.createGain(),this.input.connect(this.effectGain),this.input.connect(this.directGain),this.directGain.connect(this.output)}return(0,f.default)(t,[{key:"init",value:function(){this.wasInitialized||(this.effectChain=(0,c.functionsToValues)(this.options.effectChain),this.values=(0,c.bindMethodsToValues)(this.options.values,this.effectChain),this.values.forEach(function(t){t.options.defaultValue&&t.set(t.options.defaultValue)}),this.setupEffectChain(),this.wasInitialized=!0)}},{key:"enable",value:function(){this.init(),this.effectGain.gain.value=1,this.directGain.gain.value=0}},{key:"disable",value:function(){this.effectGain.gain.value=0,this.directGain.gain.value=1}},{key:"connect",value:function(t){t.isEffectUnit||t.input?this.output.connect(t.input):this.output.connect(t)}},{key:"setValue",value:function(t,n){(0,c.filterValue)(this.values,t).set(n)}},{key:"getValueOptions",value:function(t){return(0,c.filterValue)(this.values,t).options}},{key:"setupEffectChain",value:function(){var n=(0,c.objToArray)(this.effectChain);if(n.length>=1){t.connectNodes(this.effectGain,n[0]);for(var e=0;es;)if((f=c[s++])!=f)return!0}else for(;a>s;s++)if((t||s in c)&&c[s]===e)return t||s||0;return!t&&-1}}},function(t,n){var e={}.toString;t.exports=function(t){return e.call(t).slice(8,-1)}},function(t,n,e){var r=e(21);t.exports=function(t,n,e){if(r(t),void 0===n)return t;switch(e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,o){return t.call(n,e,r,o)}}return function(){return t.apply(n,arguments)}}},function(t,n,e){var r=e(4),o=e(3).document,i=r(o)&&r(o.createElement);t.exports=function(t){return i?o.createElement(t):{}}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}},function(t,n,e){var r=e(8),o=e(36);t.exports=e(0)?function(t,n,e){return r.f(t,n,o(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){t.exports=!e(0)&&!e(2)(function(){return 7!=Object.defineProperty(e(26)("div"),"a",{get:function(){return 7}}).a})},function(t,n,e){"use strict";var r=e(34),o=e(32),i=e(35),u=e(41),f=e(7),c=Object.assign;t.exports=!c||e(2)(function(){var t={},n={},e=Symbol(),r="abcdefghijklmnopqrst";return t[e]=7,r.split("").forEach(function(t){n[t]=t}),7!=c({},t)[e]||Object.keys(c({},n)).join("")!=r})?function(t,n){for(var e=u(t),c=arguments.length,a=1,s=o.f,l=i.f;c>a;)for(var p,d=f(arguments[a++]),h=s?r(d).concat(s(d)):r(d),v=h.length,y=0;v>y;)l.call(d,p=h[y++])&&(e[p]=d[p]);return e}:c},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,e){var r=e(28),o=e(10),i=e(23)(!1),u=e(37)("IE_PROTO");t.exports=function(t,n){var e,f=o(t),c=0,a=[];for(e in f)e!=u&&r(f,e)&&a.push(e);for(;n.length>c;)r(f,e=n[c++])&&(~i(a,e)||a.push(e));return a}},function(t,n,e){var r=e(33),o=e(27);t.exports=Object.keys||function(t){return r(t,o)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n,e){var r=e(38)("keys"),o=e(43);t.exports=function(t){return r[t]||(r[t]=o(t))}},function(t,n,e){var r=e(3),o=r["__core-js_shared__"]||(r["__core-js_shared__"]={});t.exports=function(t){return o[t]||(o[t]={})}},function(t,n,e){var r=e(9),o=Math.max,i=Math.min;t.exports=function(t,n){return t=r(t),t<0?o(t+n,0):i(t,n)}},function(t,n,e){var r=e(9),o=Math.min;t.exports=function(t){return t>0?o(r(t),9007199254740991):0}},function(t,n,e){var r=e(5);t.exports=function(t){return Object(r(t))}},function(t,n,e){var r=e(4);t.exports=function(t,n){if(!r(t))return t;var e,o;if(n&&"function"==typeof(e=t.toString)&&!r(o=e.call(t)))return o;if("function"==typeof(e=t.valueOf)&&!r(o=e.call(t)))return o;if(!n&&"function"==typeof(e=t.toString)&&!r(o=e.call(t)))return o;throw TypeError("Can't convert object to primitive value")}},function(t,n){var e=0,r=Math.random();t.exports=function(t){return"Symbol(".concat(void 0===t?"":t,")_",(++e+r).toString(36))}},function(t,n,e){var r=e(6);r(r.S+r.F,"Object",{assign:e(31)})},function(t,n,e){var r=e(6);r(r.S+r.F*!e(0),"Object",{defineProperty:e(8).f})}])});
--------------------------------------------------------------------------------
/packages/soundcyclejs/src/soundcycle.js:
--------------------------------------------------------------------------------
1 | import Recordy from 'recordy';
2 | import AudioLooper from 'audiolooper';
3 | import Wmstr from 'wmstr';
4 | import AudioBufferChnl from 'audiobufferchnl';
5 |
6 | import { v4 } from 'uuid';
7 |
8 | export default class SoundCycle {
9 |
10 | recorder;
11 | wmstr;
12 | audioCtx;
13 |
14 | static MODES = {
15 | ADD_TO_LANE: `ADD_TO_LANE`,
16 | NEW_LANE: `NEW_LANE`,
17 | SINGLE_SEQUENCE: `SINGLE_SEQUENCE`,
18 | FREE_LOOPING: `FREE_LOOPING`
19 | };
20 |
21 | tracks = new Map();
22 | loopers = new Map();
23 | currentLane = ``;
24 | currentMode;
25 | projectName;
26 |
27 | static masterChnlId = `MASTER_ID`;
28 | static recorderChnlId = `RECORDER_ID`;
29 |
30 | constructor(readyCb = () => {}) {
31 | const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
32 | this.audioCtx = audioCtx;
33 | this.recorder = new Recordy(audioCtx);
34 | this.recorder.getInput()
35 | .then(readyCb);
36 |
37 | this.wmstr = new Wmstr(audioCtx);
38 | this.currentMode = SoundCycle.MODES.NEW_LANE;
39 |
40 | this.tracks.set(SoundCycle.masterChnlId, this.wmstr);
41 | this.tracks.set(SoundCycle.recorderChnlId, this.recorder);
42 |
43 | this.wmstr.connect(this.audioCtx.destination);
44 | }
45 |
46 | setProjectName(name) {
47 | this.projectName = name;
48 | }
49 |
50 | getProjectName() {
51 | if (!this.projectName) {
52 | const date = new Date();
53 | return `project-${date.getDate()}-${date.getMonth() + 1}-${date.getFullYear()}.wav`;
54 | }
55 |
56 | return `${this.projectName}.wav`;
57 | }
58 |
59 | /* eslint-disable */
60 | getModes() {
61 | return SoundCycle.MODES;
62 | }
63 | /* eslint-ensable */
64 |
65 | getCurrentMode() {
66 | return this.currentMode;
67 | }
68 |
69 | setMode(mode) {
70 | this.currentMode = mode;
71 | }
72 |
73 | setCurrentLane(laneId) {
74 | this.currentLane = laneId;
75 | }
76 |
77 | startRecording() {
78 | this.recorder.startRecording();
79 | }
80 |
81 | async stopRecording() {
82 | const newTrackId = v4();
83 |
84 | switch (this.currentMode) {
85 |
86 | case SoundCycle.MODES.NEW_LANE: {
87 | // Add new looper
88 | const looper = new AudioLooper(this.audioCtx);
89 | const audioBuffer = await this.recorder.stopRecording({ type: `buffer` });
90 | const newLooperId = v4();
91 |
92 | looper.addTrack({
93 | id: newTrackId,
94 | audioBuffer,
95 | trackAdded: (bufferSourceNode) => {
96 | this.trackAddedToLaneCb(bufferSourceNode, newTrackId, newLooperId);
97 |
98 | if (!this.loopers.has(newLooperId))
99 | this.loopers.set(newLooperId, looper);
100 | } });
101 |
102 | return {
103 | chnlId: newTrackId,
104 | laneId: newLooperId
105 | };
106 | }
107 |
108 | case SoundCycle.MODES.SINGLE_SEQUENCE: {
109 | const audioBuffer = await this.recorder.stopRecording({ type: `buffer` });
110 |
111 | const bufferNode = this.audioCtx.createBufferSource();
112 | bufferNode.buffer = audioBuffer;
113 |
114 | // Create fade
115 | for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
116 | const channelData = audioBuffer.getChannelData(channel);
117 | const FADE_LENGTH = 100;
118 | for (let i = 0; i < FADE_LENGTH && i < channelData.length; i++) {
119 | const fadeOutPos = channelData.length - i - 1;
120 | channelData[i] = (channelData[i] * i) / FADE_LENGTH;
121 | channelData[fadeOutPos] = (channelData[fadeOutPos] * i) / FADE_LENGTH;
122 | }
123 | }
124 |
125 | const audioBufferChnl = new AudioBufferChnl(this.audioCtx, bufferNode);
126 | audioBufferChnl.connect(this.wmstr);
127 |
128 | this.tracks.set(newTrackId, {
129 | chnl: audioBufferChnl,
130 | });
131 |
132 | return {
133 | chnlId: newTrackId
134 | };
135 | }
136 |
137 | case SoundCycle.MODES.ADD_TO_LANE: {
138 | if (!this.loopers.has(this.currentLane))
139 | throw new Error(`You tried to access an inexistent lane!`);
140 |
141 | const looper = this.loopers.get(this.currentLane);
142 |
143 | const audioBuffer = await this.recorder.stopRecording({ type: `buffer` });
144 |
145 | looper.addTrack({
146 | id: newTrackId,
147 | audioBuffer,
148 | trackAdded: (bufferSourceNode) => {
149 | this.trackAddedToLaneCb(bufferSourceNode, newTrackId, this.currentLane);
150 | } });
151 |
152 | return {
153 | chnlId: newTrackId
154 | };
155 | }
156 |
157 | case SoundCycle.MODES.FREE_LOOPING: {
158 | const audioBuffer = await this.recorder.stopRecording({ type: `buffer` });
159 |
160 | const bufferNode = this.audioCtx.createBufferSource();
161 | bufferNode.buffer = audioBuffer;
162 | bufferNode.loop = true;
163 |
164 | // Create fade
165 | for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
166 | const channelData = audioBuffer.getChannelData(channel);
167 | const FADE_LENGTH = 100;
168 | for (let i = 0; i < FADE_LENGTH && i < channelData.length; i++) {
169 | const fadeOutPos = channelData.length - i - 1;
170 | channelData[i] = (channelData[i] * i) / FADE_LENGTH;
171 | channelData[fadeOutPos] = (channelData[fadeOutPos] * i) / FADE_LENGTH;
172 | }
173 | }
174 |
175 | const audioBufferChnl = new AudioBufferChnl(this.audioCtx, bufferNode);
176 | audioBufferChnl.connect(this.wmstr);
177 |
178 | bufferNode.start(0);
179 |
180 | this.tracks.set(newTrackId, {
181 | chnl: audioBufferChnl,
182 | });
183 |
184 | return {
185 | chnlId: newTrackId
186 | };
187 | }
188 |
189 | default:
190 | throw new Error(`Invalid method!`);
191 |
192 | }
193 | }
194 |
195 | stopTrack({ id }) {
196 | if (!this.tracks.has(id))
197 | throw new Error(`You tried to stop an inexistent track!`);
198 |
199 | const track = this.tracks.get(id);
200 |
201 | if (!track.looperId)
202 | track.chnl.stop();
203 | else {
204 | const looper = this.loopers.get(track.looperId);
205 | looper.pauseTrack({ id });
206 | }
207 | }
208 |
209 | playTrack({ id }) {
210 | if (!this.tracks.has(id))
211 | throw new Error(`You tried to play an inexistent track!`);
212 |
213 | const track = this.tracks.get(id);
214 | if (!track.looperId)
215 | track.chnl.play();
216 | else {
217 | const looper = this.loopers.get(track.looperId);
218 | looper.playTrack({ id });
219 | }
220 | }
221 |
222 | removeTrack({ id }) {
223 | if (!this.tracks.has(id))
224 | throw new Error(`You tried to remove an inexistent track!`);
225 |
226 | const track = this.tracks.get(id);
227 | if (track.looperId) {
228 | const looper = this.loopers.get(track.looperId);
229 | looper.removeTrack({ id });
230 | }
231 | if (track.chnl.bufferSourceNode)
232 | track.chnl.stop();
233 |
234 | this.tracks.delete(id);
235 | }
236 |
237 | removeLane({ looperId }) {
238 | if (!this.loopers.has(looperId))
239 | throw new Error(`You tried to remove an inexistent lane!`);
240 |
241 | const looper = this.loopers.get(looperId);
242 |
243 | // Search all tracks of looper and delete them
244 | this.tracks.forEach(({ looperId: trackLooperId }, trackId) => {
245 | if (trackLooperId === looperId) {
246 | looper.removeTrack({ id: trackId });
247 | this.tracks.delete(trackId);
248 | }
249 | });
250 |
251 | this.loopers.delete(looperId);
252 | }
253 |
254 | enableEffect({ chnlId, effectName }) {
255 | const chnlToEdit = this.getChnlById(chnlId);
256 | chnlToEdit.addEffect(effectName);
257 | SoundCycle.getEffectByName(chnlToEdit, effectName).enable();
258 | }
259 |
260 | disableEffect({ chnlId, effectName }) {
261 | const chnlToEdit = this.getChnlById(chnlId);
262 | chnlToEdit.removeEffect(effectName);
263 | SoundCycle.getEffectByName(chnlToEdit, effectName).disable();
264 | }
265 |
266 | setEffectValue({ chnlId, effectName, valueType, value }) {
267 | const chnlToEdit = this.getChnlById(chnlId);
268 |
269 | SoundCycle.getEffectByName(chnlToEdit, effectName).setValue(valueType, value);
270 | }
271 |
272 | /* eslint-disable */
273 | getMasterChnlId() {
274 | return SoundCycle.masterChnlId;
275 | }
276 | /* eslint-enable */
277 |
278 | /* eslint-disable */
279 | getRecorderChnlId() {
280 | return SoundCycle.recorderChnlId;
281 | }
282 | /* eslint-enable */
283 |
284 | startProjectRecording() {
285 | this.wmstr.startRecording();
286 | }
287 |
288 | stopProjectRecording() {
289 | this.wmstr.stopRecording(this.getProjectName());
290 | }
291 |
292 | getAnalyser({ chnlId }) {
293 | return this.getChnlById(chnlId).getAnalyser();
294 | }
295 |
296 | /* INTERIOR FUNCTIONALITIES */
297 |
298 | getChnlById(id) {
299 | if (id === SoundCycle.masterChnlId)
300 | return this.wmstr;
301 | else if (id === SoundCycle.recorderChnlId)
302 | return this.recorder;
303 |
304 | if (!this.tracks.has(id))
305 | throw new Error(`You tried to access an inexistent track!`);
306 | return this.tracks.get(id).chnl;
307 | }
308 |
309 | static getEffectByName(chnl, effectName) {
310 | if (chnl.effects[effectName])
311 | return chnl.effects[effectName];
312 |
313 | throw new Error(`You tried to access an inexistent effect!`);
314 | }
315 |
316 | trackAddedToLaneCb(bufferSourceNode, newTrackId, looperId) {
317 | if (this.tracks.has(newTrackId)) {
318 | const { chnl } = this.tracks.get(newTrackId);
319 | chnl.setBufferSourceNode(bufferSourceNode);
320 | } else {
321 | const audioBufferChnl = new AudioBufferChnl(this.audioCtx, bufferSourceNode);
322 | audioBufferChnl.connect(this.wmstr);
323 |
324 | this.tracks.set(newTrackId, {
325 | chnl: audioBufferChnl,
326 | looperId
327 | });
328 | }
329 | }
330 |
331 | }
332 |
--------------------------------------------------------------------------------
/packages/soundcyclejs/README.md:
--------------------------------------------------------------------------------
1 | # soundcyclejs - cycle up your sounds
2 | ## a javascript looping library for the browser
3 |
4 | # Why soundcyclejs?
5 | Firstly, I was always on the search for a good software loopstation. But I didn't find any real good one. Secondly, I'm a convinced javascript and web developer. That's why I decided to create __soundcyclejs__.
6 | I combined both my passion for beatboxing and my passion for programming into this library. So I am very passionate about it and motivated to continue development till I think this library reached perfection (so maybe never, nothing and nobody except my girlfriend is perfect).
7 |
8 | ## Here's an example of what I was able to create with soundcyclejs
9 | I am beatboxing on the mic and a friend of mine plays the guitar. It's just a short sample which shows what can be done with soundcyclejs with not that much effort.
10 | [Click here to go to the sample](https://scriptify.github.io/files/ohyeaa.wav)
11 |
12 | # How does it work?
13 | soundcyclejs is a looping library which, under the hood, uses a set of modules which could also be used independently. This library is just the product of the combination of those modules.
14 | All those modules, including this one, are now part of the __sountility__ collection.
15 | This module uses every single module of the sountility collection.
16 |
17 | If you want to gain a deeper knowledge on how soundcyclejs works, just have a look at the __sountility__ collection and its modules.
18 |
19 | But essentially, it's dead simple to use it.
20 |
21 | # Using soundcyclejs
22 |
23 | With soundcyclejs you record tracks and specify how they should be handled. In addition, you can control the overall output of the looper with the [Wmstr](../wmstr/README.md) module.
24 |
25 | You can specify if you want your track to be put in a lane, if you want to create a new lane with it or if you want to use it as a single sequence track. More on that later.
26 |
27 | ## Understanding lanes
28 |
29 | The most important concept to understand here is the concept of lanes (I called them like this and during this document I will continue to do so, but you can invent your own name :sunglasses:).
30 |
31 | In this context, a lane is it's own independent looping unit.
32 | To fully understand this chapter, I would recommend you reading the documentation of the [AudioLooper](../audiolooper/README.md) module.
33 |
34 | Now, as you know what an AudioLooper is and how it works, let's continue.
35 | A lane essentially consists of an AudioLooper-object and it's tracks. That's it. Not much magic about it. :sparkles:
36 |
37 | ## Recording and looping tracks
38 |
39 | When you are creating a soundcyclejs-object, you need to construct it as follows:
40 | ### Constructing
41 | ```javascript
42 | new SoundCycle(gotInput)
43 | ```
44 |
45 | 1. __gotInput__: This must be a function and gets executed as soon as the user grants audio input in browser. It can accept one parameter, a __boolean__: If it evaluates to true, the audio input was succesfully retrieved. If it evaluates to false, there was an error.
46 |
47 | ### Recording audio
48 | Now let's talk about the more interesting part: Recording.
49 | Under the hood, soundcyclejs uses the [Recordy](../recordy/README.md) module.
50 | But the recorder of soundcyclejs has one important extension:
51 | the _.stopRecording_ method. This method now doesn't simply stop the recording and pass an audio object to the user of the module. You decide what you want to do:
52 | 1. __Create a new lane with the recorded track__
53 | 2. __Add the track to an existing lane__
54 | 3. __Create a single sequence track__
55 |
56 | Use this method as follows:
57 | ```javascript
58 | soundcycle.recorder.startRecording()
59 | ```
60 | The use of this method gets clarified below.
61 |
62 | #### Recording single sequence tracks
63 | Now you will certainly ask: _What the hell are single sequence tracks_:question:
64 |
65 | I use this word to clarify that those tracks are recorded with the intention to be just played once in a while (as soon as the user wants it to be played) and not to be looped over.
66 |
67 | They could e.g. be used for effects or drums (you could even use them like a launchpad).
68 |
69 | An to actually record single sequence tracks, do the following:
70 | ```javascript
71 | mySoundcycleObject.setMode( mySoundcycleObject.getModes().SINGLE_SEQUENCE );
72 | mySoundcycleObject.stopRecording().then(({ chnlId }) => {
73 | // Do something with the chnlId, e.g. saving it somewhere for later use
74 | });
75 | ```
76 | Just set the mode to SINGLE_SEQUENCE and stop recording (of course, you started recording before). The stop recording method then returns a promise which resolves to an object containing the id of the track you just created.
77 |
78 | #### Creating a new lane
79 |
80 | To create a new lane, use the _.stopRecording_ method as follows:
81 |
82 | ```javascript
83 | mySoundcycleObject.setMode( mySoundcycleObject.getModes().NEW_LANE );
84 | mySoundcycleObject.stopRecording().then(({ chnlId, looperId }) => {
85 | // Do something with the chnlId and the looperId, e.g. saving them somewhere for later use
86 | });
87 | ```
88 | Just set the mode to NEW_LANE and stop recording (of course, you started recording before). The stop recording method then returns a promise which resolves to an object containing the id of the track and the id of the looper you just created.
89 |
90 | #### Adding a new track to a lane
91 |
92 | To add a track to an existing lane, use the _.stopRecording_ method as follows:
93 |
94 | ```javascript
95 | mySoundcycleObject.setMode( mySoundcycleObject.getModes().ADD_TO_LANE );
96 | mySoundcycleObject.setCurrentLane(currentLaneId);
97 | mySoundcycleObject.stopRecording().then(({ chnlId }) => {
98 | // Do something with the chnlId, e.g. saving it somewhere for later use
99 | });
100 | ```
101 | Just set it the mode to ADD_TO_LANE and stop recording (of course, you started recording before). The stop recording method then returns a promise which resolves to an object containing the id of the track you just created. But before you stop recording, you have to specify the lane you want to add the track to. To do so, use the _.setCurrentLane_ method. This method expects one parameter: The id of the lane (you got it before when you created the lane).
102 |
103 | #### Pausing tracks
104 | ```javascript
105 | .stopTrack({ id })
106 | ```
107 |
108 | To stop any playing track (be it a track of a lane or a single sequence track), use this method. It expects one parameter which has to be an object with a field _id_ (the id of the chnl you got when you stopped recording).
109 |
110 | #### Playing tracks
111 | ```javascript
112 | .playTrack({ id })
113 | ```
114 |
115 | To play any track (be it a paused track of a lane or a single sequence track), use this method. It expects one parameter which has to be an object with a field _id_ (the id of the chnl you got when you stopped recording).
116 |
117 | #### Removing a track
118 | ```javascript
119 | .removeTrack({ id })
120 | ```
121 |
122 | To remove any track (be it a paused track of a lane or a single sequence track), use this method. It expects one parameter which has to be an object with a field _id_ (the id of the chnl you got when you stopped recording).
123 |
124 | #### Removing a lane
125 | ```javascript
126 | .removeLane({ looperId })
127 | ```
128 |
129 | To remove a lane, use this method. It accepts on parameter which must be an object containing the field _looperId_ (the id of the lane you want to delete, you received it when you stopped recording with the __NEW_LANE__ mode).
130 |
131 | ### Handling effects
132 |
133 | #### Enabling effects
134 | ```javascript
135 | .enableEffect({ chnlId, effectName })
136 | ```
137 | All tracks of soundcycle are based on the chnl module, which offers a great number of effects (for further information and a list of all available effects, have a look at the webaudio-effect-units-collection).
138 |
139 | To enable a specific effect, use this method. I accepts one parameter, which must be an object containing the following fields:
140 | 1. __chnlId__: The id of the chnl whose effect you want to enable.
141 | 2. __effectName__: The name of the effect you want to enable.
142 |
143 |
144 | #### Disabling effects
145 | ```javascript
146 | .disableEffect({ chnlId, effectName })
147 | ```
148 |
149 | Besides disabling an effects, this method works exactly the same as _.enableEffect_.
150 |
151 | #### Setting an effects value
152 | ```javascript
153 | .setEffectValue({ chnlId, effectName, valueType, value })
154 | ```
155 |
156 | This method changes the value of a specific effect. To understand what the fields _effectName_, _valueType_ and _value_ are for, refer to the webaudio-effect-unit documentation (since all effects of the chnl module base on it).
157 |
158 | ### Manipulating the recorder and the master chnl
159 | All chnls of soundcycle can be controlled through the same interfaces and since the master channel and the recorder channel are chnls too, they can be manipulated with the same methods. To do so, simply use the according ids for them:
160 | ```javascript
161 | .getMasterChnlId()
162 | .getRecorderChnlId();
163 | ```
164 |
165 | ### Setting/getting the current projects name
166 | ```javascript
167 | .setProjectName(name)
168 | .getProjectName()
169 | ```
170 |
171 | Use those methods to get/set the projects name. If no project name was set, the projects name will default to the current date.
172 |
173 | ### The master chnl
174 | All the output soundcycle produces, flows through the master chnl and get redirected to the speakers. This allows you to control the output of to loopstation centrally.
175 |
176 | In addition, the master chnl enables you recording its output. With that, you can record whole tracks!
177 | Those methods allow you to record the project:
178 |
179 | ```javascript
180 | .startProjectRecording()
181 | /* ... */
182 | .stopProjectRecording()
183 | ```
184 |
185 | When calling the _.stopRecording_ method, a _.wav_ file will automatically be downloaded. The files name will be the projects name.
186 |
187 |
188 | # Code Example
189 |
190 | This example doesn't essentially make sense, it just aims to show the use of soundcyclejs in a simplified way.
191 |
192 | ```javascript
193 | import SoundCycle from 'soundcyclejs';
194 | const soundcycle = new SoundCycle();
195 |
196 | // Start recording. The default mode is NEW_LANE
197 | soundcycle.startRecording();
198 |
199 | window.setTimeout(() => {
200 | // Stop recording after 1s
201 | soundcycle.stopRecording()
202 | .then((res) => {
203 | // Set current lane to the lane which was just created
204 | soundcycle.setCurrentLane(res.laneId);
205 | // Set the mode to ADD_TO_LANE; so all newly recorded tracks are added to the current lane
206 | soundcycle.setMode(soundcycle.getModes().ADD_TO_LANE);
207 | // Start recording again
208 | soundcycle.startRecording();
209 |
210 | window.setTimeout(() => {
211 | // Stop recording after 1s
212 | soundcycle.stopRecording()
213 | .then((res1) => {
214 | // Stop both tracks
215 | soundcycle.stopTrack({ id: res.chnlId });
216 | soundcycle.stopTrack({ id: res1.chnlId });
217 | window.setTimeout(() => {
218 | // Play the first one again
219 | soundcycle.playTrack({ id: res.chnlId });
220 | }, 1000);
221 | });
222 | }, 1000);
223 | });
224 | }, 1000);
225 | ```
226 |
--------------------------------------------------------------------------------