├── .gitignore ├── LICENSE ├── index.js ├── package.json ├── process.md ├── readme.md ├── test ├── index.js └── sounds │ ├── acid.glsl │ ├── noisy.glsl │ └── sine.glsl └── todo.md /.gitignore: -------------------------------------------------------------------------------- 1 | shadertoy 2 | 3 | # Compiled source # 4 | ################### 5 | *.com 6 | *.class 7 | *.dll 8 | *.exe 9 | *.o 10 | *.so 11 | 12 | # Packages # 13 | ############ 14 | # it's better to unpack these files and commit the raw source 15 | # git has its own built in compression methods 16 | *.7z 17 | *.dmg 18 | *.gz 19 | *.iso 20 | *.jar 21 | *.rar 22 | *.tar 23 | *.zip 24 | 25 | # Logs and databases # 26 | ###################### 27 | *.log 28 | *.sql 29 | *.sqlite 30 | 31 | # OS generated files # 32 | ###################### 33 | .DS_Store 34 | .DS_Store? 35 | ._* 36 | .Spotlight-V100 37 | .Trashes 38 | # Icon? 39 | ehthumbs.db 40 | Thumbs.db 41 | .cache 42 | .project 43 | .settings 44 | .tmproj 45 | *.esproj 46 | nbproject 47 | 48 | # Numerous always-ignore extensions # 49 | ##################################### 50 | *.diff 51 | *.err 52 | *.orig 53 | *.rej 54 | *.swn 55 | *.swo 56 | *.swp 57 | *.vi 58 | *~ 59 | *.sass-cache 60 | *.grunt 61 | *.tmp 62 | 63 | # Dreamweaver added files # 64 | ########################### 65 | _notes 66 | dwsync.xml 67 | 68 | # Komodo # 69 | ########################### 70 | *.komodoproject 71 | .komodotools 72 | 73 | # Node # 74 | ##################### 75 | node_modules 76 | 77 | # Bower # 78 | ##################### 79 | bower_components 80 | 81 | # Folders to ignore # 82 | ##################### 83 | .hg 84 | .svn 85 | .CVS 86 | intermediate 87 | publish 88 | .idea 89 | .graphics 90 | _test 91 | _archive 92 | uploads 93 | tmp 94 | 95 | # Vim files to ignore # 96 | ####################### 97 | .VimballRecord 98 | .netrwhist 99 | 100 | bundle.* 101 | 102 | _demo -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2018 Dima Yvanow 3 | 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 18 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 19 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 20 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 21 | OR OTHER DEALINGS IN THE SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @module audio-shader 3 | */ 4 | 5 | var Through = require('audio-through'); 6 | var inherits = require('inherits'); 7 | var createShader = require('nogl-shader-output'); 8 | var createContext = require('nogl'); 9 | var Shader = require('gl-shader'); 10 | 11 | 12 | /** 13 | * @constructor 14 | * 15 | * @param {Function} fn Shader code 16 | * @param {Object} options Options 17 | */ 18 | function AudioShader (shaderCode, options) { 19 | if (!(this instanceof AudioShader)) return new AudioShader(shaderCode, options); 20 | 21 | options = options || {}; 22 | 23 | Through.call(this, options); 24 | 25 | //refine number of channels - vec4 is max output 26 | var channels = this.format.channels = Math.min(this.format.channels, 4); 27 | 28 | //refine shader code, if not passed 29 | if (!shaderCode) { 30 | var vecType = channels === 1 ? 'float' : ('vec' + channels); 31 | shaderCode = `${vecType} mainSound( float time ){ 32 | return ${vecType}( sin(6.2831*440.0*time)*exp(-3.0*time) ); 33 | }`; 34 | } 35 | 36 | var gl = options.gl || createContext(); 37 | 38 | var shader = Shader(gl, ` 39 | precision mediump float; 40 | precision mediump int; 41 | 42 | attribute vec2 position; 43 | 44 | uniform float iGlobalTime; 45 | uniform float iSampleRate; 46 | uniform vec3 iResolution; 47 | 48 | varying float time; 49 | 50 | void main (void) { 51 | gl_Position = vec4(position, 0, 1); 52 | time = iGlobalTime + (position.x * 0.5 + 0.5) * iResolution.x / iSampleRate; 53 | } 54 | `, ` 55 | precision mediump float; 56 | precision mediump int; 57 | 58 | uniform vec3 iResolution; // viewport resolution (in pixels) 59 | uniform float iGlobalTime; // shader playback time (in seconds) 60 | uniform int iFrame; // shader playback frame 61 | uniform vec4 iDate; // (year, month, day, time in seconds) 62 | uniform float iSampleRate; // sound sample rate (i.e., 44100) 63 | uniform vec3 iChannelResolution[4]; // channel resolution (in pixels) 64 | // uniform sampler2D iChannel0; // input channel1 65 | // uniform sampler2D iChannel1; // input channel2 66 | // uniform sampler2D iChannel2; // input channel3 67 | // uniform sampler2D iChannel3; // input channel4 68 | 69 | varying float time; 70 | 71 | ${shaderCode} 72 | 73 | void main (void) { 74 | vec4 result = vec4(mainSound(time)${ 75 | channels === 1 ? ', 0, 0, 0' : 76 | channels === 3 ? ', 0' : 77 | channels === 4 ? '' : ', 0, 0' 78 | }); 79 | gl_FragColor = result; 80 | }`); 81 | 82 | //setup shader 83 | this.draw = createShader(shader, { 84 | width: this.format.samplesPerFrame, 85 | height: 1 86 | }); 87 | 88 | //clean on end 89 | this.on('end', function () { 90 | throw Error('Unimplemented'); 91 | }); 92 | } 93 | 94 | inherits(AudioShader, Through); 95 | 96 | 97 | 98 | /** 99 | * Send chunk to audio-shader, invoke done on return. 100 | * The strategy: render each audio channel to it’s own line in result 101 | * TODO: thing to replace with textures 102 | * TODO: provide input channels as textures 103 | * TODO: provide values of previous input/output to implement filters 104 | */ 105 | AudioShader.prototype.process = function (chunk, done) { 106 | var w = this.format.samplesPerFrame; 107 | var channels = Math.min(chunk.numberOfChannels, this.format.channels); 108 | 109 | //set up current chunk as a channels data 110 | // for (var channel = 0; channel < channels; channel++) { 111 | // var texture = new Texture(gl, chunk.getChannelData(channel)); 112 | // this.shader.uniforms[`iChannel${channel}`] = texture.bind(); 113 | // this.shader.uniforms.iChannelResolution[channel] = [chunk.length, 1, 1]; 114 | // this.shader.uniforms.iChannelTime[channel] = []; 115 | // } 116 | 117 | //preset new time value 118 | var d = new Date(); 119 | 120 | //render chunk 121 | var result = this.draw({ 122 | iResolution: [w, 1, 1], 123 | iSampleRate: this.format.sampleRate, 124 | iGlobalTime: this.time, 125 | iFrame: this.frame, 126 | iDate: [ 127 | d.getFullYear(), // the year (four digits) 128 | d.getMonth(), // the month (from 0-11) 129 | d.getDate(), // the day of the month (from 1-31) 130 | d.getHours()*60.0*60 + d.getMinutes()*60 + d.getSeconds() 131 | ] 132 | }); 133 | 134 | //transform result to buffer channels (color channel per audio channel) 135 | for (var channel = 0; channel < channels; channel++) { 136 | var cData = chunk.getChannelData(channel); 137 | for (var i = 0; i < w; i++) { 138 | cData[i] = result[i * 4 + channel]; 139 | } 140 | } 141 | 142 | done(); 143 | } 144 | 145 | 146 | module.exports = AudioShader; 147 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "audio-shader", 3 | "version": "1.2.3", 4 | "description": "Shader based audio processing stream", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "node test/index.js", 8 | "test:browser": "budo test/index.js" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/audio-lab/audio-shader.git" 13 | }, 14 | "browserify": { 15 | "transform": [ 16 | "brfs", 17 | "glslify" 18 | ] 19 | }, 20 | "browser": { 21 | "nogl-shader-output": "gl-shader-output", 22 | "nogl": "webgl-context" 23 | }, 24 | "keywords": [ 25 | "shader", 26 | "audio", 27 | "sound", 28 | "dsp", 29 | "stackgl", 30 | "webgl", 31 | "shadertoy", 32 | "headless-js", 33 | "GPU sound", 34 | "processing", 35 | "gl", 36 | "glsl", 37 | "nogl" 38 | ], 39 | "author": "Dima Yvanow ", 40 | "license": "MIT", 41 | "bugs": { 42 | "url": "https://github.com/audio-lab/audio-shader/issues" 43 | }, 44 | "homepage": "https://github.com/audio-lab/audio-shader#readme", 45 | "dependencies": { 46 | "brfs": "^1.4.3", 47 | "gl-shader": "^4.2.0", 48 | "gl-shader-output": "^2.0.1", 49 | "inherits": "^2.0.1", 50 | "nogl": "^1.1.0", 51 | "nogl-shader-output": "^1.4.0", 52 | "webgl-context": "^2.2.0", 53 | "xtend": "^4.0.1" 54 | }, 55 | "devDependencies": { 56 | "audio-buffer-utils": "^3.0.1", 57 | "audio-context": "^0.1.0", 58 | "audio-generator": "^2.0.2", 59 | "audio-slice": "^1.1.0", 60 | "audio-speaker": "^1.2.2", 61 | "audio-through": "^2.0.0", 62 | "gl-fbo": "^2.0.5", 63 | "gl-texture2d": "^2.0.10", 64 | "glslify": "^5.0.2", 65 | "is-browser": "^2.0.1", 66 | "tst": "^1.1.8" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /process.md: -------------------------------------------------------------------------------- 1 | ## Questions 2 | 3 | * Can we readPixels data per rendered channel? 4 | * There is a difficulty with that. As far we want to return vec2, vec3 or vec4, our fragshader should encode valuw 5 | * How to walk the audio chunk within the shader? 6 | * Shadertoy interpolates chunk from the vertex shader by a fragment shader so that it goes for each time value for 2 channels. Just as you would walk the triangle. 7 | * It passes chunk data as a 2d texture of a size 512 8 | * How to get the calculated "color" (in fact - sound) value back to audio shader? 9 | * readPixels gets the data from the canvas (current framebuffer) - that is done in shadertoy 10 | * but we should try passing by reference, or modifying texture, or something else 11 | * How to control pressure? 12 | * No need. Shader is always sync-style coded, so each audio-through callback fetches data from the framebuffer, sets new framebuffer processing and sends the fetched data. It causes delay, but it ensures data is processed in a flow with no delays. Pressure is controlled by audio-speaker. 13 | 14 | * What are the principles, the difference audio-shadertoy and audio-shader? 15 | * Audio-shader can take any input shader, the only contract is that the output is treated as audiobuffer. Shadertoy takes only source code. 16 | * Audio-shader processes rect chunks - triangles, shadertoy processes fragments. 17 | * Channel should not be represented by lines, as vec/mat form is easier to manipulate cross-channels. 18 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | Webgl-based audio processing stream. 2 | 3 | [![npm install audio-shader](https://nodei.co/npm/audio-shader.png?mini=true)](https://npmjs.org/package/audio-shader/) 4 | 5 | ```js 6 | var AudioShader = require('audio-shader'); 7 | var Speaker = require('audio-speaker'); 8 | 9 | //Create shader stream based on sound processing function 10 | AudioShader(` 11 | vec2 mainSound( float time ){ 12 | return vec2( sin(6.2831*880.0*time)*exp(-3.0*time) ); 13 | } 14 | `) 15 | 16 | //Send generated sound to speaker 17 | .pipe(Speaker()); 18 | ``` 19 | 20 | ### API 21 | 22 | API is fully compatible with [shadertoy](https://www.shadertoy.com/) to copy-paste and run it’s code locally. Note that shadertoy limits output sound to `60s`, whereas _audio-shader_ runs till it is stoped. 23 | 24 | It also might be found helpful to use [glslify](https://www.npmjs.com/package/glslify) to get code inserted neatly: 25 | 26 | ```js 27 | //index.js 28 | var Shader = require('audio-shader'); 29 | var Speaker = require('speaker'); 30 | var glslify = require('glslify'); 31 | 32 | Shader(glslify('./sound.glsl'), options?).pipe(Speaker()); 33 | ``` 34 | 35 | ```glsl 36 | //sound.glsl 37 | vec2 mainSound( float time ){ 38 | return vec2( sin(6.2831*440.0*time)*exp(-3.0*time) ); 39 | } 40 | ``` 41 | 42 | _Audio-shader_ can also be used as a processing stream. It inherits [audio-through](https://github.com/audio-lab/audio-through), which is basically a [transform stream](https://nodejs.org/api/stream.html#stream_class_stream_transform), so it can be used with other node streams. 43 | 44 | ```js 45 | var MusicXML = require('musicxml-to-pcm'); 46 | var Processor = require('audio-shader'); 47 | var Speaker = require('speaker'); 48 | 49 | MusicXML() 50 | .pipe(Processor(` 51 | vec2 main (float time) { 52 | //TODO test this example and document it, shadertoy is down 53 | return vec2(); 54 | } 55 | `)) 56 | .pipe(Speaker()); 57 | ``` 58 | 59 | 60 | ### Related 61 | 62 | * [nogl-shader-output](http://npmjs.org/nogl-shader-output) — process fragment shader in node. 63 | * [audio-through](https://github.com/audio-lab/audio-through) — audio processing stream for node/browser. 64 | * [gl-compute](https://www.npmjs.com/package/gl-compute) — computations on shaders. 65 | * [shadertoy-audio](https://www.npmjs.org/package/shadertoy-audio) — audio shader for processing shadertoy audio. -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | var Shader = require('../'); 2 | var Generator = require('audio-generator'); 3 | var Speaker = require('audio-speaker'); 4 | var glslify = require('glslify'); 5 | var test = require('tst'); 6 | var Slice = require('audio-slice'); 7 | var fs = require('fs'); 8 | var isBrowser = require('is-browser'); 9 | 10 | 11 | test('Inline code', function (done) { 12 | Shader(` 13 | vec2 mainSound( float time ){ 14 | return vec2( sin(6.2831*880.0*time)*exp(-3.0*time) ); 15 | } 16 | `) 17 | .pipe(Slice(1)) 18 | .pipe(Speaker()); 19 | 20 | setTimeout(done, 500); 21 | }); 22 | 23 | test('No-params', function (done) { 24 | Shader() 25 | .pipe(Speaker()); 26 | 27 | setTimeout(done, 500); 28 | }); 29 | 30 | test('Glslify', function (done) { 31 | var src = fs.readFileSync(__dirname + '/sounds/sine.glsl', 'utf-8'); 32 | 33 | // Shader(glslify('./sounds/sine')) 34 | Shader(src) 35 | .pipe(Speaker()); 36 | 37 | setTimeout(done, 500); 38 | }); 39 | 40 | test.skip('Shadertoy env', function (done) { 41 | Shader(` 42 | vec2 mainSound (float time) { 43 | return vec2(texture2D(), texture2D()); 44 | } 45 | `) 46 | .pipe(Speaker()); 47 | 48 | setTimeout(done, 1000); 49 | }); 50 | 51 | test('Moving average filter'); 52 | 53 | test.skip('Transform', function (done) { 54 | Shader(` 55 | vec2 mainSound (float time) { 56 | return vec2(texture2D(), texture2D()); 57 | } 58 | `) 59 | .pipe(Speaker()); 60 | 61 | setTimeout(done, 1000); 62 | }); 63 | 64 | test.skip('Multiple inputs', function () { 65 | var mixer = Shader(` 66 | 67 | `) 68 | }); 69 | 70 | test('Mono channel'); 71 | 72 | test('4 channels'); 73 | 74 | test('Different format'); 75 | 76 | test('Being a destination'); 77 | 78 | test.skip('Node-speaker', function () { 79 | 80 | }); 81 | 82 | test('Noisy', function (done) { 83 | //FIXME: this example does not work in node, guess the compiler is guilty 84 | if (!isBrowser) return; 85 | 86 | this.timeout(Infinity); 87 | 88 | var src = fs.readFileSync(__dirname + '/sounds/noisy.glsl', 'utf-8'); 89 | 90 | var s = Shader(src) 91 | 92 | .pipe(Speaker()); 93 | // setTimeout(done, 5000); 94 | // s.process([], function () {}); 95 | }); 96 | 97 | test('audio-shader vs audio-through') 98 | -------------------------------------------------------------------------------- /test/sounds/acid.glsl: -------------------------------------------------------------------------------- 1 | /** 2 | * https://www.shadertoy.com/view/ldfSW2 3 | */ 4 | 5 | // srtuss, 2014 6 | // quick and dirty 303 emulation (sort of) aswell as as some percussion and some other noise 7 | 8 | // most of the code and the values in it are just experimental. i'll tidy it up soon. 9 | 10 | // number of synthesized harmonics (tune for quality/preformance) 11 | #define NSPC 256 12 | 13 | #define pi2 6.283185307179586476925286766559 14 | 15 | // cheap and unrealistic distortion 16 | float dist(float s, float d) 17 | { 18 | return clamp(s * d, -1.0, 1.0); 19 | } 20 | vec2 dist(vec2 s, float d) 21 | { 22 | return clamp(s * d, -1.0, 1.0); 23 | } 24 | 25 | // quantize 26 | float quan(float s, float c) 27 | { 28 | return floor(s / c) * c; 29 | } 30 | 31 | // a resonant lowpass filter's frequency response 32 | float filter(float h, float cut, float res) 33 | { 34 | cut -= 20.0; 35 | float df = max(h - cut, 0.0), df2 = abs(h - cut); 36 | return exp(-0.005 * df * df) * 0.5 + exp(df2 * df2 * -0.1) * 2.2; 37 | } 38 | 39 | // randomize 40 | float nse(float x) 41 | { 42 | return fract(sin(x * 110.082) * 19871.8972); 43 | //return fract(sin(x * 110.082) * 13485.8372); 44 | } 45 | float nse_slide(float x) 46 | { 47 | float fl = floor(x); 48 | return mix(nse(fl), nse(fl + 1.0), smoothstep(0.0, 1.0, fract(x))); 49 | } 50 | 51 | // note number to frequency 52 | float ntof(float n) 53 | { 54 | return 440.0 * pow(2.0, (n - 69.0) / 12.0); 55 | } 56 | 57 | // tb303 core 58 | vec2 synth(float tseq, float t) 59 | { 60 | vec2 v = vec2(0.0); 61 | 62 | float tnote = fract(tseq); 63 | float dr = 0.26; 64 | float amp = smoothstep(0.05, 0.0, abs(tnote - dr - 0.05) - dr) * exp(tnote * -1.0); 65 | float seqn = nse(floor(tseq)); 66 | //float seqn = nse_slide(tseq); 67 | float n = 20.0 + floor(seqn * 38.0);//50.0 + floor(time * 2.0); 68 | float f = ntof(n); 69 | 70 | float sqr = smoothstep(0.0, 0.01, abs(mod(t * 9.0, 64.0) - 20.0) - 20.0); 71 | 72 | float base = f;//50.0 + sin(sin(t * 0.1) * t) * 20.0; 73 | float flt = exp(tnote * -1.5) * 50.0 + pow(cos(t * 1.0) * 0.5 + 0.5, 4.0) * 80.0 - 0.0; 74 | for(int i = 0; i < NSPC; i ++) 75 | { 76 | float h = float(i + 1); 77 | float inten = 1.0 / h; 78 | //inten *= sin((pow(h, sin(t) * 0.5 + 0.5) + t * 0.5) * pi2) * 0.9 + 0.1; 79 | 80 | inten = mix(inten, inten * mod(h, 2.0), sqr); 81 | 82 | inten *= exp(-1.0 * max(2.0 - h, 0.0));// + exp(abs(h - flt) * -2.0) * 8.0; 83 | 84 | inten *= filter(h, flt, 4.0); 85 | 86 | 87 | v.x += inten * sin((pi2 + 0.01) * (t * base * h)); 88 | v.y += inten * sin(pi2 * (t * base * h)); 89 | } 90 | 91 | 92 | float o = v.x * amp;//exp(max(tnote - 0.3, 0.0) * -5.0); 93 | 94 | //o = dist(o, 2.5); 95 | 96 | return vec2(dist(v * amp, 2.0)); 97 | } 98 | 99 | // heavy 909-ish bassdrum 100 | float kick(float tb, float time) 101 | { 102 | tb = fract(tb / 4.0) * 0.5; 103 | float aa = 5.0; 104 | tb = sqrt(tb * aa) / aa; 105 | 106 | float amp = exp(max(tb - 0.15, 0.0) * -10.0); 107 | float v = sin(tb * 100.0 * pi2) * amp; 108 | v = dist(v, 4.0) * amp; 109 | v += nse(quan(tb, 0.001)) * nse(quan(tb, 0.00001)) * exp(tb * -20.0) * 2.5; 110 | return v; 111 | } 112 | 113 | // bad 909-ish open hihat 114 | float hat(float tb) 115 | { 116 | tb = fract(tb / 4.0) * 0.5; 117 | float aa = 4.0; 118 | //tb = sqrt(tb * aa) / aa; 119 | return nse(sin(tb * 4000.0) * 0.0001) * smoothstep(0.0, 0.01, tb - 0.25) * exp(tb * -5.0); 120 | } 121 | 122 | float gate1(float t) 123 | { 124 | #define stp 0.0625 125 | float v; 126 | v = abs(t - 0.00 - 0.015) - 0.015; 127 | v = min(v, abs(t - stp*1. - 0.015) - 0.015); 128 | v = min(v, abs(t - stp*2. - 0.015) - 0.015); 129 | v = min(v, abs(t - stp*4. - 0.015) - 0.015); 130 | v = min(v, abs(t - stp*6. - 0.015) - 0.015); 131 | v = min(v, abs(t - stp*8. - 0.05) - 0.05); 132 | v = min(v, abs(t - stp*11. - 0.05) - 0.05); 133 | v = min(v, abs(t - stp*14. - 0.05) - 0.05); 134 | 135 | return smoothstep(0.001, 0.0, v); 136 | } 137 | 138 | vec2 synth2(float time) 139 | { 140 | float tb = mod(time * 9.0, 16.0) / 16.0; 141 | 142 | float f = time * pi2 * ntof(87.0 - 12.0 + mod(tb, 4.0)); 143 | float v = dist(sin(f + sin(f * 0.5)), 5.0) * gate1(tb); 144 | 145 | return vec2(v); 146 | } 147 | 148 | vec2 synth2_echo(float time, float tb) 149 | { 150 | vec2 mx; 151 | mx = synth2(time) * 0.5;// + synth2(time) * 0.5; 152 | float ec = 0.3, fb = 0.6, et = 3.0 / 9.0, tm = 2.0 / 9.0; 153 | mx += synth2(time - et) * ec * vec2(1.0, 0.2); ec *= fb; et += tm; 154 | mx += synth2(time - et) * ec * vec2(0.2, 1.0); ec *= fb; et += tm; 155 | mx += synth2(time - et) * ec * vec2(1.0, 0.2); ec *= fb; et += tm; 156 | mx += synth2(time - et) * ec * vec2(0.2, 1.0); ec *= fb; et += tm; 157 | return mx; 158 | } 159 | 160 | // oldschool explosion sound fx 161 | float expl(float tb) 162 | { 163 | //tb = fract(tb / 4.0) * 0.5; 164 | float aa = 20.0; 165 | tb = sqrt(tb * aa) / aa; 166 | 167 | float amp = exp(max(tb - 0.15, 0.0) * -10.0); 168 | float v = nse(quan(mod(tb, 0.1), 0.0001)); 169 | v = dist(v, 4.0) * amp; 170 | return v; 171 | } 172 | 173 | vec2 synth1_echo(float tb, float time) 174 | { 175 | vec2 v; 176 | v = synth(tb, time) * 0.5;// + synth2(time) * 0.5; 177 | float ec = 0.4, fb = 0.6, et = 2.0 / 9.0, tm = 2.0 / 9.0; 178 | v += synth(tb, time - et) * ec * vec2(1.0, 0.5); ec *= fb; et += tm; 179 | v += synth(tb, time - et).yx * ec * vec2(0.5, 1.0); ec *= fb; et += tm; 180 | v += synth(tb, time - et) * ec * vec2(1.0, 0.5); ec *= fb; et += tm; 181 | v += synth(tb, time - et).yx * ec * vec2(0.5, 1.0); ec *= fb; et += tm; 182 | 183 | return v; 184 | } 185 | 186 | vec2 mainSound(float time) 187 | { 188 | vec2 mx = vec2(0.0); 189 | 190 | float tb = mod(time * 9.0, 16.0); 191 | 192 | 193 | mx = synth1_echo(tb, time) * 0.8 * smoothstep(0.0, 0.01, abs(mod(time * 9.0, 256.0) + 8.0 - 128.0) - 8.0); 194 | 195 | float hi = 1.0; 196 | float ki = smoothstep(0.01, 0.0, abs(mod(time * 9.0, 256.0) - 64.0 - 128.0) - 64.0); 197 | float s2i = 1.0 - smoothstep(0.01, 0.0, abs(mod(time * 9.0, 256.0) - 64.0 - 128.0) - 64.0); 198 | hi = ki; 199 | 200 | mx += expl(mod(time * 9.0, 64.0) / 4.5) * 0.4 * s2i; 201 | 202 | mx += vec2(hat(tb) * 1.5) * hi; 203 | 204 | //mx += dist(fract(tb / 16.0) * sin(ntof(77.0 - 36.0) * pi2 * time), 8.0) * 0.2; 205 | //mx += expl(tb) * 0.5; 206 | 207 | mx += vec2(synth2_echo(time, tb)) * 0.2 * s2i; 208 | 209 | 210 | mx = mix(mx, mx * (1.0 - fract(tb / 4.0) * 0.5), ki); 211 | float sc = sin(pi2 * tb) * 0.4 + 0.6; 212 | float k = kick(tb, time) * 0.8 * sc * ki;// - kick(tb, time - 0.004) * 0.5 - kick(tb, time - 0.008) * 0.25); 213 | 214 | mx += vec2(k); 215 | 216 | 217 | 218 | mx = dist(mx, 1.00); 219 | 220 | return vec2(mx); 221 | } -------------------------------------------------------------------------------- /test/sounds/noisy.glsl: -------------------------------------------------------------------------------- 1 | /** 2 | * https://www.shadertoy.com/view/4ts3z2 3 | */ 4 | 5 | //Audio by Dave_Hoskins 6 | 7 | 8 | vec2 add = vec2(1.0, 0.0); 9 | #define MOD2 vec2(.16632,.17369) 10 | #define MOD3 vec3(.16532,.17369,.15787) 11 | //---------------------------------------------------------------------------------------- 12 | // 1 out, 1 in ... 13 | 14 | float hash11(float p) 15 | { 16 | vec2 p2 = fract(vec2(p) * MOD2); 17 | p2 += dot(p2.yx, p2.xy+19.19); 18 | 19 | return fract(p2.x * p2.y); 20 | } 21 | 22 | //---------------------------------------------------------------------------------------- 23 | // 2 out, 1 in... 24 | vec2 hash21(float p) 25 | { 26 | //p = fract(p * MOD3); 27 | vec3 p3 = fract(vec3(p) * MOD3); 28 | p3 += dot(p3.xyz, p3.yzx + 19.19); 29 | return fract(vec2(p3.x * p3.y, p3.z*p3.x))-.5; 30 | } 31 | //---------------------------------------------------------------------------------------- 32 | /// 2 out, 2 in... 33 | vec2 hash22(vec2 p) 34 | { 35 | vec3 p3 = fract(vec3(p.xyx) * MOD3); 36 | p3 += dot(p3.zxy, p3.yxz+19.19); 37 | return fract(vec2(p3.x * p3.y, p3.z*p3.x)); 38 | } 39 | 40 | //---------------------------------------------------------------------------------------- 41 | // 2 out, 1 in... 42 | vec2 Noise21(float x) 43 | { 44 | float p = floor(x); 45 | float f = fract(x); 46 | f = f*f*(3.0-2.0*f); 47 | return mix( hash21(p), hash21(p + 1.0), f)-.5; 48 | 49 | } 50 | 51 | 52 | //---------------------------------------------------------------------------------------- 53 | // 2 out, 1 in... 54 | float Noise11(float x) 55 | { 56 | float p = floor(x); 57 | float f = fract(x); 58 | f = f*f*(3.0-2.0*f); 59 | return mix( hash11(p), hash11(p + 1.0), f)-.5; 60 | 61 | } 62 | 63 | //---------------------------------------------------------------------------------------- 64 | // 2 out, 2 in... 65 | vec2 Noise22(vec2 x) 66 | { 67 | vec2 p = floor(x); 68 | vec2 f = fract(x); 69 | f = f*f*(3.0-2.0*f); 70 | 71 | vec2 res = mix(mix( hash22(p), hash22(p + add.xy),f.x), 72 | mix( hash22(p + add.yx), hash22(p + add.xx),f.x),f.y); 73 | return res-.5; 74 | } 75 | 76 | //---------------------------------------------------------------------------------------- 77 | // Fractal Brownian Motion... 78 | vec2 FBM21(float v) 79 | { 80 | vec2 r = vec2(0.0); 81 | vec2 x = vec2(v, v*1.3+23.333); 82 | float a = .6; 83 | 84 | for (int i = 0; i < 8; i++) 85 | { 86 | r += Noise22(x * a) / a; 87 | a += a; 88 | } 89 | 90 | return r; 91 | } 92 | 93 | //---------------------------------------------------------------------------------------- 94 | // Fractal Brownian Motion... 95 | vec2 FBM22(vec2 x) 96 | { 97 | vec2 r = vec2(0.0); 98 | 99 | float a = .6; 100 | 101 | for (int i = 0; i < 8; i++) 102 | { 103 | r += Noise22(x * a) / a; 104 | a += a; 105 | } 106 | 107 | return r; 108 | } 109 | 110 | 111 | vec2 mainSound(float time) 112 | { 113 | vec2 audio = vec2(.0); 114 | for (float t = 0.0; t < 1.0; t+=.5) 115 | { 116 | time = time+t; 117 | vec2 n1 = FBM22( time*(Noise21(time*3.25)*40.0+Noise21(time*.03)*3500.0+8500.0)) * (abs(Noise21(time*.8)))*.25; 118 | vec2 n2 = FBM22( time*(Noise21(time*.4)+900.0))*abs(Noise21(time*1.5)); 119 | vec2 n3 = FBM22( time*(Noise21(time*1.3)+Noise21(-time*.03)*200.0+540.0))*(.5+abs(Noise21(time-99.)))*1.5; 120 | vec2 s1 = sin(time*1300.+(Noise21(time*.23))*(Noise21(-time*.12)*3000.0+4000.0))*abs(Noise21(time*42.3+199.))*abs(Noise21(-time*.04+9.))*1.7; 121 | 122 | audio += (n1+n2+n3+s1)/4.0; 123 | } 124 | float foot = sin(time*.9*3.141); 125 | audio += Noise11(time*380.0)*Noise11(time*880.0)* smoothstep(0.7,1.1,abs(foot)) * 2.; 126 | 127 | return clamp(audio, -1.0, 1.0) * smoothstep(0.0, 3.0, time) * smoothstep(60.0, 55.0, time); 128 | } 129 | -------------------------------------------------------------------------------- /test/sounds/sine.glsl: -------------------------------------------------------------------------------- 1 | vec2 mainSound( float time ){ 2 | return vec2( sin(6.2831*660.0*time)*exp(-3.0*time) ); 3 | } -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | * fix noisy example in node 2 | * think of threads for nogl-shader-output 3 | * try to hook up headless-gl still. It is faster. nogl-shader is unbearably slow. 4 | 5 | * think of providing input channels as multiple connections, where each channel is represented by a texture with N height. It is logical at least. 6 | * think of providing output as a texture of a height N, where each row is for separate channel. 7 | * facilitate interconnected audio-shaders so that they just pass textures one to another instead of converting/unconverting audio buffers. So to let us process sound in GPU with minimal possible delays. 8 | * ensure timeDelta is delta between chunks, not the rendering time 9 | * add destructors, destruct & clean memory on stream end 10 | * detect output type from shader code automatically 11 | * benchmark comparison with native js, e.g. filter node 12 | * pass input chunk data 13 | * shadertoy tests 14 | * frequency shader, where not time but f-domain data passed --------------------------------------------------------------------------------