├── .DS_Store ├── DesktopTemplate ├── sketch.properties ├── .DS_Store ├── data │ ├── .DS_Store │ └── mykbeat.wav ├── AndroidManifest.xml ├── DesktopTemplate.pde ├── Licence.txt └── MaximJava_api.pde ├── AndroidTemplate ├── sketch.properties ├── .DS_Store ├── data │ ├── .DS_Store │ └── mybeat.wav ├── web-export │ ├── mybeat.wav │ ├── index.html │ └── AndroidTemplate.pde ├── AndroidManifest.xml ├── Licence.txt ├── AndroidTemplate.pde └── android_api.pde ├── JavaScriptTemplate ├── .DS_Store ├── sketch.properties ├── data │ ├── .DS_Store │ └── mykbeat.wav ├── template │ ├── .DS_Store │ └── template.html ├── web-export │ ├── mykbeat.wav │ ├── JavaScriptTemplate.pde │ ├── index.html │ └── maxim.js ├── JavaScriptTemplate.pde ├── Licence.txt └── maxim.js └── README.md /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/.DS_Store -------------------------------------------------------------------------------- /DesktopTemplate/sketch.properties: -------------------------------------------------------------------------------- 1 | mode.id=processing.mode.java.JavaMode 2 | mode=Java 3 | -------------------------------------------------------------------------------- /AndroidTemplate/sketch.properties: -------------------------------------------------------------------------------- 1 | mode.id=processing.mode.android.AndroidMode 2 | mode=Android 3 | -------------------------------------------------------------------------------- /AndroidTemplate/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/AndroidTemplate/.DS_Store -------------------------------------------------------------------------------- /DesktopTemplate/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/DesktopTemplate/.DS_Store -------------------------------------------------------------------------------- /JavaScriptTemplate/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/JavaScriptTemplate/.DS_Store -------------------------------------------------------------------------------- /AndroidTemplate/data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/AndroidTemplate/data/.DS_Store -------------------------------------------------------------------------------- /AndroidTemplate/data/mybeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/AndroidTemplate/data/mybeat.wav -------------------------------------------------------------------------------- /DesktopTemplate/data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/DesktopTemplate/data/.DS_Store -------------------------------------------------------------------------------- /JavaScriptTemplate/sketch.properties: -------------------------------------------------------------------------------- 1 | mode.id=de.bezier.mode.javascript.JavaScriptMode 2 | mode=JavaScript 3 | -------------------------------------------------------------------------------- /DesktopTemplate/data/mykbeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/DesktopTemplate/data/mykbeat.wav -------------------------------------------------------------------------------- /JavaScriptTemplate/data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/JavaScriptTemplate/data/.DS_Store -------------------------------------------------------------------------------- /JavaScriptTemplate/data/mykbeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/JavaScriptTemplate/data/mykbeat.wav -------------------------------------------------------------------------------- /AndroidTemplate/web-export/mybeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/AndroidTemplate/web-export/mybeat.wav -------------------------------------------------------------------------------- /JavaScriptTemplate/template/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/JavaScriptTemplate/template/.DS_Store -------------------------------------------------------------------------------- /JavaScriptTemplate/web-export/mykbeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/micknoise/Maxim/HEAD/JavaScriptTemplate/web-export/mykbeat.wav -------------------------------------------------------------------------------- /AndroidTemplate/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /DesktopTemplate/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /DesktopTemplate/DesktopTemplate.pde: -------------------------------------------------------------------------------- 1 | //The MIT License (MIT) - See Licence.txt for details 2 | 3 | //Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | 6 | Maxim maxim; 7 | 8 | 9 | void setup() 10 | { 11 | size(640, 960); 12 | 13 | background(0); 14 | 15 | 16 | } 17 | 18 | void draw() 19 | { 20 | // code that happens every frame 21 | } 22 | 23 | void mouseDragged() 24 | { 25 | // code that happens when the mouse moves 26 | // with the button down 27 | } 28 | 29 | void mousePressed() 30 | { 31 | // code that happens when the mouse button 32 | // is pressed 33 | } 34 | 35 | void mouseReleased() 36 | { 37 | // code that happens when the mouse button 38 | // is released 39 | } 40 | 41 | -------------------------------------------------------------------------------- /JavaScriptTemplate/JavaScriptTemplate.pde: -------------------------------------------------------------------------------- 1 | //The MIT License (MIT) - See Licence.txt for details 2 | 3 | //Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | 6 | Maxim maxim; 7 | 8 | 9 | void setup() 10 | { 11 | size(640, 960); 12 | 13 | background(0); 14 | 15 | 16 | } 17 | 18 | void draw() 19 | { 20 | // code that happens every frame 21 | } 22 | 23 | void mouseDragged() 24 | { 25 | // code that happens when the mouse moves 26 | // with the button down 27 | } 28 | 29 | void mousePressed() 30 | { 31 | // code that happens when the mouse button 32 | // is pressed 33 | } 34 | 35 | void mouseReleased() 36 | { 37 | // code that happens when the mouse button 38 | // is released 39 | } 40 | 41 | -------------------------------------------------------------------------------- /JavaScriptTemplate/web-export/JavaScriptTemplate.pde: -------------------------------------------------------------------------------- 1 | //The MIT License (MIT) - See Licence.txt for details 2 | 3 | //Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | 6 | Maxim maxim; 7 | 8 | 9 | void setup() 10 | { 11 | size(640, 960); 12 | 13 | background(0); 14 | 15 | 16 | } 17 | 18 | void draw() 19 | { 20 | // code that happens every frame 21 | } 22 | 23 | void mouseDragged() 24 | { 25 | // code that happens when the mouse moves 26 | // with the button down 27 | } 28 | 29 | void mousePressed() 30 | { 31 | // code that happens when the mouse button 32 | // is pressed 33 | } 34 | 35 | void mouseReleased() 36 | { 37 | // code that happens when the mouse button 38 | // is released 39 | } 40 | 41 | 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Maxim 2 | ===== 3 | 4 | OK so this was a quickly hacked together thing we did for a MOOC in 2013. I really don't think you want to use this anymore but I'll leave it here for posterity. 5 | 6 | ------------------ 7 | 8 | Cross Platform JavaScript/Java Audio DSP and Mobile Web Development Library Compatible with Processing 9 | 10 | Maxim is designed to make it easier to program cross platform audio for desktops amd mobile platforms. 11 | It provides a single API for building complex audio applications on Android, iOS and the Desktop, using 12 | the WebAudioAPI in combination with traditional Java approaches for compatibility. 13 | 14 | It's a work in progress, but vastly simplifies the process of getting started writing audio and music software 15 | for mobile platforms 16 | 17 | Some notes : 18 | 19 | If you are using javascript mode, make sure your browser supports WebAudioAPI properly. 20 | 21 | See here for a list of browsers that support WebAudio 22 | 23 | http://caniuse.com/audio-api 24 | -------------------------------------------------------------------------------- /AndroidTemplate/Licence.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: 6 | The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /DesktopTemplate/Licence.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: 6 | The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /JavaScriptTemplate/Licence.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: 6 | The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /AndroidTemplate/AndroidTemplate.pde: -------------------------------------------------------------------------------- 1 | //When running on the iPad or iPhone, you won't see anything unless you tap the screen. 2 | //If it doesn't appear to work first time, always try refreshing the browser. 3 | 4 | Maxim maxim; 5 | AudioPlayer player; 6 | float go; 7 | boolean playit; 8 | 9 | int elements = 20;// This is the number of points and lines we will calculate at once. 1000 is alot actually. 10 | 11 | void setup() { 12 | //The size is iPad Portrait. 13 | //If you want landscape, you should swap the values. 14 | size(768, 1024); 15 | 16 | frameRate(25); // this is the framerate. Tweak for performance 17 | maxim = new Maxim(this); 18 | player = maxim.loadFile("mybeat.wav"); 19 | player.setLooping(true); 20 | player.setAnalysing(true); 21 | noStroke(); 22 | rectMode(CENTER); 23 | background(0); 24 | colorMode(HSB); 25 | } 26 | 27 | void draw() { 28 | 29 | if (playit) { 30 | 31 | player.play(); 32 | float power = player.getAveragePower(); 33 | //go+=power*50; 34 | go = power * 50; 35 | translate(width/2, height/2);// we translate the whole sketch to the centre of the screen, so 0,0 is in the middle. 36 | for (int i = elements; i > 0;i--) { 37 | fill((5*i+go)%255, power*512, 255); // this for loop calculates the x and y position for each node in the system and draws a line between it and the next. 38 | ellipse((mouseX-(width/2))*(elements-i)/elements, (mouseY-(height/2))*(elements-i)/elements, width*1.5/elements*i, height*1.5/elements*i); 39 | } 40 | player.speed((float) mouseX / (float) width); 41 | } 42 | } 43 | 44 | void mousePressed() { 45 | 46 | playit = !playit; 47 | 48 | if (playit) { 49 | 50 | player.play(); 51 | } 52 | else { 53 | 54 | player.stop(); 55 | } 56 | } 57 | 58 | -------------------------------------------------------------------------------- /AndroidTemplate/web-export/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | AndroidTemplate : Built with Processing and Processing.js 5 | 6 | 7 | 15 | 46 | 49 | 50 | 54 | 55 | 56 | 57 |
58 |
59 | 61 |

Your browser does not support the canvas tag.

62 | 63 |
64 | 67 |
68 |

AndroidTemplate

69 |

70 |

Source code: AndroidTemplate

71 |

72 | Built with Processing 73 | and Processing.js 74 |

75 |
76 | 77 | 78 | -------------------------------------------------------------------------------- /JavaScriptTemplate/template/template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | @@sketch@@ : Built with Processing and Processing.js 6 | 7 | 8 | 9 | 10 | 18 | 71 | 74 | 75 | @@scripts@@ 76 | 77 | 78 |
79 |
80 | 82 |

Your browser does not support the canvas tag.

83 | 84 |
85 | 88 |
89 | 90 | 91 | -------------------------------------------------------------------------------- /JavaScriptTemplate/web-export/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | JavaScriptTemplate : Built with Processing and Processing.js 6 | 7 | 8 | 9 | 10 | 18 | 71 | 74 | 75 | 79 | 80 | 81 | 82 | 83 |
84 |
85 | 87 |

Your browser does not support the canvas tag.

88 | 89 |
90 | 93 |
94 | 95 | 96 | -------------------------------------------------------------------------------- /JavaScriptTemplate/web-export/maxim.js: -------------------------------------------------------------------------------- 1 | /* 2 | The MIT License (MIT) 3 | 4 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy
of 7 | this software and associated documentation files (the "Software"), to 8 | deal
in the Software without restriction, including without limitation 9 | the rights
to use, copy, modify, merge, publish, distribute, sublicense, 10 | and/or sell
copies of the Software, and to permit persons to whom the 11 | Software is
furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included 14 | in 
all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 17 | */ 18 | 19 | var mtof = [0, 8.661957, 9.177024, 9.722718, 10.3, 10.913383, 11.562325, 12.25, 12.978271, 13.75, 14.567617, 15.433853, 16.351599, 17.323914, 18.354048, 19.445436, 20.601723, 21.826765, 23.124651, 24.5, 25.956543, 27.5, 29.135235, 30.867706, 32.703197, 34.647827, 36.708096, 38.890873, 41.203445, 43.65353, 46.249302, 49., 51.913086, 55., 58.27047, 61.735413, 65.406395, 69.295654, 73.416191, 77.781746, 82.406891, 87.30706, 92.498604, 97.998856, 103.826172, 110., 116.540939, 123.470825, 130.81279, 138.591309, 146.832382, 155.563492, 164.813782, 174.61412, 184.997208, 195.997711, 207.652344, 220., 233.081879, 246.94165, 261.62558, 277.182617, 293.664764, 311.126984, 329.627563, 349.228241, 369.994415, 391.995422, 415.304688, 440., 466.163757, 493.883301, 523.25116, 554.365234, 587.329529, 622.253967, 659.255127, 698.456482, 739.988831, 783.990845, 830.609375, 880., 932.327515, 987.766602, 1046.502319, 1108.730469, 1174.659058, 1244.507935, 1318.510254, 1396.912964, 1479.977661, 1567.981689, 1661.21875, 1760., 1864.655029, 1975.533203, 2093.004639, 2217.460938, 2349.318115, 2489.015869, 2637.020508, 2793.825928, 2959.955322, 3135.963379, 3322.4375, 3520., 3729.31, 3951.066406, 4186.009277, 4434.921875, 4698.63623, 4978.031738, 5274.041016, 5587.651855, 5919.910645, 6271.926758, 6644.875, 7040., 7458.620117, 7902.132812, 8372.018555, 8869.84375, 9397.272461, 9956.063477, 10548.082031, 11175.303711, 11839.821289, 12543.853516, 13289.75]; 20 | var context = new webkitAudioContext(); 21 | 22 | function Maxim(t) { 23 | 24 | this.loadFile = function(filename) { 25 | var audio = new XMLHttpRequest(); 26 | var source = null; 27 | var myAudioBuffer = null; 28 | var playing=false; 29 | var isLooping=false; 30 | var startTime=0; 31 | var endTime = 0; 32 | var currentSpeed = 1.0; 33 | var sampleLength = 1.0; 34 | var volume = 1.0; 35 | var gainNode = null; 36 | var filter = null; 37 | var analyser = null; 38 | var analysing = true; 39 | var attack = 0; 40 | var release = 0; 41 | var envTime = 0; 42 | var flux = 0; 43 | var averageSpectrumPower = 0; 44 | var FFTData = null; 45 | audio.open('GET', filename, true); 46 | audio.responseType = 'arraybuffer'; 47 | audio.onload = function() { 48 | // alert("sound loaded"); //test 49 | context.decodeAudioData(audio.response, function(buffer) { 50 | myAudioBuffer = buffer; 51 | // alert("sound decoded"); //test 52 | source = context.createBufferSource(); 53 | gainNode = context.createGainNode(); 54 | filter = context.createBiquadFilter(); 55 | analyser = context.createAnalyser(); 56 | filter.type = 0; 57 | filter.frequency.value = 20000; 58 | envTime = 1.0; 59 | source.buffer = myAudioBuffer; 60 | source.playbackRate.value = currentSpeed; 61 | source.connect(filter); 62 | filter.connect(gainNode); 63 | gainNode.gain.value = volume; 64 | gainNode.connect(context.destination); 65 | sampleLength = source.buffer.duration*1000; 66 | } 67 | ); 68 | } 69 | 70 | audio.send(); 71 | audio.isPlaying = function() { 72 | 73 | return playing; 74 | } 75 | 76 | audio.setLooping = function(t) { 77 | isLooping=t; 78 | } 79 | 80 | audio.cue = function(time) { 81 | 82 | startTime=time/1000; 83 | } 84 | 85 | audio.speed = function(speed) { 86 | if (source) { 87 | 88 | currentSpeed = speed; 89 | 90 | source.playbackRate.value = speed; 91 | } 92 | } 93 | 94 | audio.getLengthMs = function() { 95 | if (source) { 96 | // alert(source.buffer.duration*1000); 97 | return sampleLength; 98 | } 99 | } 100 | 101 | audio.volume = function(gain) { 102 | 103 | volume=gain; 104 | 105 | if (playing) { 106 | gainNode.gain.value = volume; 107 | } 108 | } 109 | 110 | audio.play = function() { 111 | if (source) { 112 | if (!playing) { 113 | source = context.createBufferSource(); 114 | gainNode = context.createGainNode() 115 | filter = context.createBiquadFilter(); 116 | filter.type = 0; 117 | filter.frequency.value = 20000; 118 | envTime = 1.0; 119 | source.buffer = myAudioBuffer; 120 | source.playbackRate.value = currentSpeed; 121 | sampleLength = source.buffer.duration*1000; 122 | source.connect(filter); 123 | filter.connect(gainNode); 124 | gainNode.connect(context.destination); 125 | gainNode.gain.value = volume; 126 | // alert("source connected"); //test 127 | if (isLooping) source.loop = true; 128 | // source.loopStart = startTime/1000; 129 | // source.loopEnd = source.buffer.duration; 130 | source.noteGrainOn(0, startTime, source.buffer.duration-startTime); 131 | playing=true; 132 | } 133 | if (analysing==true) { 134 | gainNode.connect(analyser); 135 | FFTData = new Float32Array(analyser.frequencyBinCount); 136 | analyser.getFloatFrequencyData(FFTData); 137 | } 138 | } 139 | } 140 | 141 | audio.stop = function() { 142 | if (source) { 143 | source.noteOff(0); 144 | playing=false; 145 | } 146 | } 147 | 148 | audio.setFilter = function(freq, res) { 149 | 150 | filter.frequency.value = freq; 151 | filter.Q.value = res; 152 | } 153 | 154 | audio.filterRamp = function(freq, envTime) { 155 | 156 | filter.frequency.cancelScheduledValues(context.currentTime); 157 | filter.frequency.linearRampToValueAtTime(filter.frequency.value, context.currentTime); // THIS IS THE CHANGE FROM PREVIOUS CODE EXAMPLE 158 | filter.frequency.linearRampToValueAtTime(freq, context.currentTime + envTime/1000.); 159 | } 160 | 161 | //This function allows you to set the amplitude of the waveform 162 | audio.setAmplitude = function(amplitude) { 163 | 164 | gainNode.gain.cancelScheduledValues(context.currentTime); 165 | gainNode.gain.linearRampToValueAtTime(gainNode.gain.value, context.currentTime); 166 | gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + 10); 167 | } 168 | 169 | audio.ramp = function(amplitude, envTime) { 170 | 171 | gainNode.gain.cancelScheduledValues(context.currentTime); 172 | gainNode.gain.linearRampToValueAtTime(gainNode.gain.value, context.currentTime); 173 | gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + envTime/1000.); 174 | } 175 | 176 | audio.getAveragePower = function() { 177 | averageSpectrumPower = 0 178 | for (var i=0;i= (this.waveFormSize-3) ) this.phase -= (this.waveFormSize-2) ; 255 | remainder = this.phase - Math.floor(this.phase); 256 | data[i]=(1-remainder) * this.wave[1+Math.floor(this.phase)] + remainder * this.wave[2+Math.floor(this.phase)]; 257 | } 258 | // console.log('data = ' + this.frequency); 259 | } 260 | 261 | //This function allows you to 'play' the waveform 262 | Synth.prototype.play = function() { 263 | this.node.connect(this.filter); 264 | this.filter.connect(this.gainNode); 265 | this.gainNode.connect(this.context.destination); 266 | this.gainNode.connect(this.delay); 267 | this.delay.connect(this.delayGain); 268 | this.delayGain.connect(this.delay); 269 | this.delay.connect(this.context.destination); 270 | this.isPlaying=true; 271 | } 272 | 273 | //This function allows you to set the frequency of the waveform 274 | Synth.prototype.setFrequency = function(frequency) { 275 | this.frequency = frequency; 276 | } 277 | 278 | //This function allows you to set the amplitude of the waveform 279 | Synth.prototype.setAmplitude = function(amplitude) { 280 | 281 | this.gainNode.gain.cancelScheduledValues(context.currentTime); 282 | this.gainNode.gain.linearRampToValueAtTime(this.gainNode.gain.value, context.currentTime); 283 | this.gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + 10); 284 | } 285 | 286 | Synth.prototype.ramp = function(amplitude, envTime) { 287 | 288 | this.gainNode.gain.cancelScheduledValues(context.currentTime); 289 | this.gainNode.gain.linearRampToValueAtTime(this.gainNode.gain.value, context.currentTime); 290 | this.gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + envTime/1000.); 291 | } 292 | 293 | //This allows us to stop the waveform generator 294 | Synth.prototype.stop = function() { 295 | this.node.disconnect(); 296 | this.isPlaying=false; 297 | } 298 | 299 | Synth.prototype.setDelayTime = function(t) { 300 | 301 | this.delay.delayTime.value = t; 302 | 303 | } 304 | 305 | Synth.prototype.setDelayAmount = function(t) { 306 | 307 | this.delayGain.gain.value = t; 308 | 309 | // this.delayGain.gain.cancelScheduledValues(context.currentTime); 310 | // this.delayGain.gain.linearRampToValueAtTime(this.delayGain.gain.value, context.currentTime); 311 | // this.delayGain.gain.linearRampToValueAtTime(this.delayGain.gain.value, context.currentTime,100); 312 | 313 | } 314 | 315 | Synth.prototype.setFilter = function(freq, res) { 316 | 317 | this.filter.frequency.value = freq; 318 | this.filter.Q.value = res; 319 | } 320 | 321 | Synth.prototype.filterRamp = function(freq, envTime) { 322 | 323 | this.filter.frequency.cancelScheduledValues(context.currentTime); 324 | this.filter.frequency.linearRampToValueAtTime(this.filter.frequency.value, context.currentTime); // THIS IS THE CHANGE FROM PREVIOUS CODE EXAMPLE 325 | this.filter.frequency.linearRampToValueAtTime(freq, context.currentTime + envTime/1000.); 326 | // this.filter.frequency.value = freq; 327 | // this.filter.Q.value = res; 328 | } 329 | -------------------------------------------------------------------------------- /JavaScriptTemplate/maxim.js: -------------------------------------------------------------------------------- 1 | /* 2 | * The MIT License (MIT) 3 | * 4 | * Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: 7 | * The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software. 8 | 9 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 10 | */ 11 | 12 | 13 | var mtof = [0, 8.661957, 9.177024, 9.722718, 10.3, 10.913383, 11.562325, 12.25, 12.978271, 13.75, 14.567617, 15.433853, 16.351599, 17.323914, 18.354048, 19.445436, 20.601723, 21.826765, 23.124651, 24.5, 25.956543, 27.5, 29.135235, 30.867706, 32.703197, 34.647827, 36.708096, 38.890873, 41.203445, 43.65353, 46.249302, 49., 51.913086, 55., 58.27047, 61.735413, 65.406395, 69.295654, 73.416191, 77.781746, 82.406891, 87.30706, 92.498604, 97.998856, 103.826172, 110., 116.540939, 123.470825, 130.81279, 138.591309, 146.832382, 155.563492, 164.813782, 174.61412, 184.997208, 195.997711, 207.652344, 220., 233.081879, 246.94165, 261.62558, 277.182617, 293.664764, 311.126984, 329.627563, 349.228241, 369.994415, 391.995422, 415.304688, 440., 466.163757, 493.883301, 523.25116, 554.365234, 587.329529, 622.253967, 659.255127, 698.456482, 739.988831, 783.990845, 830.609375, 880., 932.327515, 987.766602, 1046.502319, 1108.730469, 1174.659058, 1244.507935, 1318.510254, 1396.912964, 1479.977661, 1567.981689, 1661.21875, 1760., 1864.655029, 1975.533203, 2093.004639, 2217.460938, 2349.318115, 2489.015869, 2637.020508, 2793.825928, 2959.955322, 3135.963379, 3322.4375, 3520., 3729.31, 3951.066406, 4186.009277, 4434.921875, 4698.63623, 4978.031738, 5274.041016, 5587.651855, 5919.910645, 6271.926758, 6644.875, 7040., 7458.620117, 7902.132812, 8372.018555, 8869.84375, 9397.272461, 9956.063477, 10548.082031, 11175.303711, 11839.821289, 12543.853516, 13289.75]; 14 | var context = new webkitAudioContext() || new AudioContext; 15 | 16 | function Maxim(t) { 17 | 18 | this.loadFile = function(filename) { 19 | var audio = new XMLHttpRequest(); 20 | var source = null; 21 | var myAudioBuffer = null; 22 | var playing=false; 23 | var isLooping=false; 24 | var startTime=0; 25 | var endTime = 0; 26 | var currentSpeed = 1.0; 27 | var sampleLength = 1.0; 28 | var volume = 1.0; 29 | var gainNode = null; 30 | var filter = null; 31 | var analyser = null; 32 | var analysing = true; 33 | var attack = 0; 34 | var release = 0; 35 | var envTime = 0; 36 | var flux = 0; 37 | var averageSpectrumPower = 0; 38 | var FFTData = null; 39 | audio.open('GET', filename, true); 40 | audio.responseType = 'arraybuffer'; 41 | audio.onload = function() { 42 | // alert("sound loaded"); //test 43 | context.decodeAudioData(audio.response, function(buffer) { 44 | myAudioBuffer = buffer; 45 | // alert("sound decoded"); //test 46 | source = context.createBufferSource(); 47 | gainNode = context.createGain(); 48 | filter = context.createBiquadFilter(); 49 | analyser = context.createAnalyser(); 50 | filter.type = "lowpass"; 51 | filter.frequency.value = 20000; 52 | envTime = 1.0; 53 | source.buffer = myAudioBuffer; 54 | source.playbackRate.value = currentSpeed; 55 | source.connect(filter); 56 | filter.connect(gainNode); 57 | gainNode.gain.value = volume; 58 | gainNode.connect(context.destination); 59 | sampleLength = source.buffer.duration*1000; 60 | } 61 | ); 62 | } 63 | 64 | audio.send(); 65 | audio.isPlaying = function() { 66 | 67 | return playing; 68 | } 69 | 70 | audio.setLooping = function(t) { 71 | isLooping=t; 72 | } 73 | 74 | audio.cue = function(time) { 75 | 76 | audio.stop(); 77 | startTime=time/1000; 78 | } 79 | 80 | audio.speed = function(speed) { 81 | if (source) { 82 | 83 | currentSpeed = speed; 84 | 85 | source.playbackRate.value = speed; 86 | } 87 | } 88 | 89 | audio.getLengthMs = function() { 90 | if (source) { 91 | // alert(source.buffer.duration*1000); 92 | return sampleLength; 93 | } 94 | } 95 | 96 | audio.setAnalysing = function(analysing_) { 97 | //this.analysing = analysing_; 98 | } 99 | 100 | audio.volume = function(gain) { 101 | 102 | volume=gain; 103 | 104 | if (playing) { 105 | gainNode.gain.value = volume; 106 | } 107 | } 108 | 109 | audio.play = function() { 110 | if (source && !playing) { 111 | source = context.createBufferSource(); 112 | gainNode = context.createGain() 113 | filter = context.createBiquadFilter(); 114 | filter.type = "lowpass"; 115 | filter.frequency.value = 20000; 116 | envTime = 1.0; 117 | source.buffer = myAudioBuffer; 118 | source.playbackRate.value = currentSpeed; 119 | sampleLength = source.buffer.duration*1000; 120 | source.connect(filter); 121 | filter.connect(gainNode); 122 | gainNode.connect(context.destination); 123 | gainNode.gain.value = volume; 124 | // alert("source connected"); //test 125 | if (isLooping) source.loop = true; 126 | // source.loopStart = startTime/1000; 127 | // source.loopEnd = source.buffer.duration; 128 | source.start(0, startTime, source.buffer.duration-startTime); 129 | playing=true; 130 | } 131 | if (analysing==true && playing) { 132 | gainNode.connect(analyser); 133 | FFTData = new Float32Array(analyser.frequencyBinCount); 134 | analyser.getFloatFrequencyData(FFTData); 135 | } 136 | } 137 | 138 | audio.stop = function() { 139 | if (source) { 140 | source.stop(0); 141 | playing=false; 142 | } 143 | } 144 | 145 | audio.setFilter = function(freq, res) { 146 | 147 | filter.frequency.value = freq; 148 | filter.Q.value = res; 149 | } 150 | 151 | audio.filterRamp = function(freq, envTime) { 152 | 153 | filter.frequency.cancelScheduledValues(context.currentTime); 154 | filter.frequency.linearRampToValueAtTime(filter.frequency.value, context.currentTime); // THIS IS THE CHANGE FROM PREVIOUS CODE EXAMPLE 155 | filter.frequency.linearRampToValueAtTime(freq, context.currentTime + envTime/1000.); 156 | } 157 | 158 | //This function allows you to set the amplitude of the waveform 159 | audio.setAmplitude = function(amplitude) { 160 | 161 | gainNode.gain.cancelScheduledValues(context.currentTime); 162 | gainNode.gain.linearRampToValueAtTime(gainNode.gain.value, context.currentTime); 163 | gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + 10); 164 | } 165 | 166 | audio.ramp = function(amplitude, envTime) { 167 | 168 | gainNode.gain.cancelScheduledValues(context.currentTime); 169 | gainNode.gain.linearRampToValueAtTime(gainNode.gain.value, context.currentTime); 170 | gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + envTime/1000.); 171 | } 172 | 173 | audio.getAveragePower = function() { 174 | if (source) { 175 | averageSpectrumPower = 0 176 | for (var i=0;i= (this.waveFormSize-3) ) this.phase -= (this.waveFormSize-2) ; 262 | remainder = this.phase - Math.floor(this.phase); 263 | data[i]=(1-remainder) * this.wave[1+Math.floor(this.phase)] + remainder * this.wave[2+Math.floor(this.phase)]; 264 | } 265 | // console.log('data = ' + this.frequency); 266 | } 267 | 268 | //This function allows you to 'play' the waveform 269 | Synth.prototype.play = function() { 270 | this.node.connect(this.filter); 271 | this.filter.connect(this.gainNode); 272 | this.gainNode.connect(this.context.destination); 273 | this.gainNode.connect(this.delay); 274 | this.delay.connect(this.delayGain); 275 | this.delayGain.connect(this.delay); 276 | this.delay.connect(this.context.destination); 277 | this.isPlaying=true; 278 | } 279 | 280 | //This function allows you to set the frequency of the waveform 281 | Synth.prototype.setFrequency = function(frequency) { 282 | this.frequency = frequency; 283 | } 284 | 285 | //This function allows you to set the amplitude of the waveform 286 | Synth.prototype.setAmplitude = function(amplitude) { 287 | 288 | this.gainNode.gain.cancelScheduledValues(context.currentTime); 289 | this.gainNode.gain.linearRampToValueAtTime(this.gainNode.gain.value, context.currentTime); 290 | this.gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + 10); 291 | } 292 | 293 | Synth.prototype.ramp = function(amplitude, envTime) { 294 | 295 | this.gainNode.gain.cancelScheduledValues(context.currentTime); 296 | this.gainNode.gain.linearRampToValueAtTime(this.gainNode.gain.value, context.currentTime); 297 | this.gainNode.gain.linearRampToValueAtTime(amplitude, context.currentTime + envTime/1000.); 298 | } 299 | 300 | //This allows us to stop the waveform generator 301 | Synth.prototype.stop = function() { 302 | this.node.disconnect(); 303 | this.isPlaying=false; 304 | } 305 | 306 | Synth.prototype.setDelayTime = function(t) { 307 | 308 | this.delay.delayTime.value = t; 309 | } 310 | 311 | Synth.prototype.setDelayAmount = function(t) { 312 | 313 | this.delayGain.gain.value = t; 314 | 315 | // this.delayGain.gain.cancelScheduledValues(context.currentTime); 316 | // this.delayGain.gain.linearRampToValueAtTime(this.delayGain.gain.value, context.currentTime); 317 | // this.delayGain.gain.linearRampToValueAtTime(this.delayGain.gain.value, context.currentTime,100); 318 | } 319 | 320 | Synth.prototype.setFilter = function(freq, res) { 321 | 322 | this.filter.frequency.value = freq; 323 | this.filter.Q.value = res; 324 | } 325 | 326 | Synth.prototype.filterRamp = function(freq, envTime) { 327 | 328 | this.filter.frequency.cancelScheduledValues(context.currentTime); 329 | this.filter.frequency.linearRampToValueAtTime(this.filter.frequency.value, context.currentTime); // THIS IS THE CHANGE FROM PREVIOUS CODE EXAMPLE 330 | this.filter.frequency.linearRampToValueAtTime(freq, context.currentTime + envTime/1000.); 331 | // this.filter.frequency.value = freq; 332 | // this.filter.Q.value = res; 333 | } 334 | 335 | -------------------------------------------------------------------------------- /AndroidTemplate/android_api.pde: -------------------------------------------------------------------------------- 1 | /* 2 | The MIT License (MIT) 3 | 4 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy
of 7 | this software and associated documentation files (the "Software"), to 8 | deal
in the Software without restriction, including without limitation 9 | the rights
to use, copy, modify, merge, publish, distribute, sublicense, 10 | and/or sell
copies of the Software, and to permit persons to whom the 11 | Software is
furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included 14 | in 
all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 17 | */ 18 | 19 | 20 | import java.io.File; 21 | import java.io.FileInputStream; 22 | import java.io.FileNotFoundException; 23 | import java.io.IOException; 24 | import java.io.BufferedInputStream; 25 | import java.net.MalformedURLException; 26 | import java.net.URL; 27 | //import android.content.res.Resources; 28 | import android.app.Activity; 29 | import android.os.Bundle; 30 | import android.media.*; 31 | import android.media.audiofx.Visualizer; 32 | import android.content.res.AssetFileDescriptor; 33 | import android.hardware.*; 34 | 35 | 36 | public class Maxim { 37 | 38 | private float sampleRate = 44100; 39 | 40 | public final float[] mtof = { 41 | 0, 8.661957, 9.177024, 9.722718, 10.3, 10.913383, 11.562325, 12.25, 12.978271, 13.75, 14.567617, 15.433853, 16.351599, 17.323914, 18.354048, 19.445436, 20.601723, 21.826765, 23.124651, 24.5, 25.956543, 27.5, 29.135235, 30.867706, 32.703197, 34.647827, 36.708096, 38.890873, 41.203445, 43.65353, 46.249302, 49., 51.913086, 55., 58.27047, 61.735413, 65.406395, 69.295654, 73.416191, 77.781746, 82.406891, 87.30706, 92.498604, 97.998856, 103.826172, 110., 116.540939, 123.470825, 130.81279, 138.591309, 146.832382, 155.563492, 164.813782, 174.61412, 184.997208, 195.997711, 207.652344, 220., 233.081879, 246.94165, 261.62558, 277.182617, 293.664764, 311.126984, 329.627563, 349.228241, 369.994415, 391.995422, 415.304688, 440., 466.163757, 493.883301, 523.25116, 554.365234, 587.329529, 622.253967, 659.255127, 698.456482, 739.988831, 783.990845, 830.609375, 880., 932.327515, 987.766602, 1046.502319, 1108.730469, 1174.659058, 1244.507935, 1318.510254, 1396.912964, 1479.977661, 1567.981689, 1661.21875, 1760., 1864.655029, 1975.533203, 2093.004639, 2217.460938, 2349.318115, 2489.015869, 2637.020508, 2793.825928, 2959.955322, 3135.963379, 3322.4375, 3520., 3729.31, 3951.066406, 4186.009277, 4434.921875, 4698.63623, 4978.031738, 5274.041016, 5587.651855, 5919.910645, 6271.926758, 6644.875, 7040., 7458.620117, 7902.132812, 8372.018555, 8869.84375, 9397.272461, 9956.063477, 10548.082031, 11175.303711, 11839.821289, 12543.853516, 13289.75 42 | }; 43 | 44 | private AndroidAudioThread audioThread; 45 | 46 | public Maxim (PApplet app) { 47 | audioThread = new AndroidAudioThread(sampleRate, 256, false); 48 | audioThread.start(); 49 | } 50 | 51 | public float[] getPowerSpectrum() { 52 | return audioThread.getPowerSpectrum(); 53 | } 54 | 55 | /** 56 | * load the sent file into an audio player and return it. Use 57 | * this if your audio file is not too long want precision control 58 | * over looping and play head position 59 | * @param String filename - the file to load 60 | * @return AudioPlayer - an audio player which can play the file 61 | */ 62 | public AudioPlayer loadFile(String filename) { 63 | // this will load the complete audio file into memory 64 | AudioPlayer ap = new AudioPlayer(filename, sampleRate); 65 | audioThread.addAudioGenerator(ap); 66 | // now we need to tell the audiothread 67 | // to ask the audioplayer for samples 68 | return ap; 69 | } 70 | 71 | /** 72 | * Create a wavetable player object with a wavetable of the sent 73 | * size. Small wavetables (<128) make for a 'nastier' sound! 74 | * 75 | */ 76 | public WavetableSynth createWavetableSynth(int size) { 77 | // this will load the complete audio file into memory 78 | WavetableSynth ap = new WavetableSynth(size, sampleRate); 79 | audioThread.addAudioGenerator(ap); 80 | // now we need to tell the audiothread 81 | // to ask the audioplayer for samples 82 | return ap; 83 | } 84 | /** 85 | * Create an AudioStreamPlayer which can stream audio from the 86 | * internet as well as local files. Does not provide precise 87 | * control over looping and playhead like AudioPlayer does. Use this for 88 | * longer audio files and audio from the internet. 89 | */ 90 | public AudioStreamPlayer createAudioStreamPlayer(String url) { 91 | AudioStreamPlayer asp = new AudioStreamPlayer(url); 92 | return asp; 93 | } 94 | } 95 | 96 | 97 | 98 | 99 | /** 100 | * This class can play audio files and includes an fx chain 101 | */ 102 | public class AudioPlayer implements Synth, AudioGenerator { 103 | private FXChain fxChain; 104 | private boolean isPlaying; 105 | private boolean isLooping; 106 | private boolean analysing; 107 | private FFT fft; 108 | private int fftInd; 109 | private float[] fftFrame; 110 | private float[] powerSpectrum; 111 | 112 | //private float startTimeSecs; 113 | //private float speed; 114 | private int length; 115 | private short[] audioData; 116 | private float startPos; 117 | private float readHead; 118 | private float dReadHead; 119 | private float sampleRate; 120 | private float masterVolume; 121 | 122 | float x1, x2, y1, y2, x3, y3; 123 | 124 | public AudioPlayer(float sampleRate) { 125 | this.sampleRate = sampleRate; 126 | fxChain = new FXChain(sampleRate); 127 | } 128 | 129 | public AudioPlayer (String filename, float sampleRate) { 130 | //super(filename); 131 | this(sampleRate); 132 | try { 133 | // how long is the file in bytes? 134 | long byteCount = getAssets().openFd(filename).getLength(); 135 | //System.out.println("bytes in "+filename+" "+byteCount); 136 | 137 | // check the format of the audio file first! 138 | // only accept mono 16 bit wavs 139 | InputStream is = getAssets().open(filename); 140 | BufferedInputStream bis = new BufferedInputStream(is); 141 | 142 | // chop!! 143 | 144 | int bitDepth; 145 | int channels; 146 | boolean isPCM; 147 | // allows us to read up to 4 bytes at a time 148 | byte[] byteBuff = new byte[4]; 149 | 150 | // skip 20 bytes to get file format 151 | // (1 byte) 152 | bis.skip(20); 153 | bis.read(byteBuff, 0, 2); // read 2 so we are at 22 now 154 | isPCM = ((short)byteBuff[0]) == 1 ? true:false; 155 | //System.out.println("File isPCM "+isPCM); 156 | 157 | // skip 22 bytes to get # channels 158 | // (1 byte) 159 | bis.read(byteBuff, 0, 2);// read 2 so we are at 24 now 160 | channels = (short)byteBuff[0]; 161 | //System.out.println("#channels "+channels+" "+byteBuff[0]); 162 | // skip 24 bytes to get sampleRate 163 | // (32 bit int) 164 | bis.read(byteBuff, 0, 4); // read 4 so now we are at 28 165 | sampleRate = bytesToInt(byteBuff, 4); 166 | //System.out.println("Sample rate "+sampleRate); 167 | // skip 34 bytes to get bits per sample 168 | // (1 byte) 169 | bis.skip(6); // we were at 28... 170 | bis.read(byteBuff, 0, 2);// read 2 so we are at 36 now 171 | bitDepth = (short)byteBuff[0]; 172 | //System.out.println("bit depth "+bitDepth); 173 | // convert to word count... 174 | bitDepth /= 8; 175 | // now start processing the raw data 176 | // data starts at byte 36 177 | int sampleCount = (int) ((byteCount - 36) / (bitDepth * channels)); 178 | audioData = new short[sampleCount]; 179 | int skip = (channels -1) * bitDepth; 180 | int sample = 0; 181 | // skip a few sample as it sounds like shit 182 | bis.skip(bitDepth * 4); 183 | while (bis.available () >= (bitDepth+skip)) { 184 | bis.read(byteBuff, 0, bitDepth);// read 2 so we are at 36 now 185 | //int val = bytesToInt(byteBuff, bitDepth); 186 | // resample to 16 bit by casting to a short 187 | audioData[sample] = (short) bytesToInt(byteBuff, bitDepth); 188 | bis.skip(skip); 189 | sample ++; 190 | } 191 | 192 | float secs = (float)sample / (float)sampleRate; 193 | //System.out.println("Read "+sample+" samples expected "+sampleCount+" time "+secs+" secs "); 194 | bis.close(); 195 | 196 | 197 | // unchop 198 | readHead = 0; 199 | startPos = 0; 200 | // default to 1 sample shift per tick 201 | dReadHead = 1; 202 | isPlaying = false; 203 | isLooping = true; 204 | masterVolume = 1; 205 | } 206 | catch (FileNotFoundException e) { 207 | 208 | e.printStackTrace(); 209 | } 210 | catch (IOException e) { 211 | e.printStackTrace(); 212 | } 213 | } 214 | 215 | public void setAnalysing(boolean analysing_) { 216 | this.analysing = analysing_; 217 | if (analysing) {// initialise the fft 218 | fft = new FFT(); 219 | fftInd = 0; 220 | fftFrame = new float[1024]; 221 | powerSpectrum = new float[fftFrame.length/2]; 222 | } 223 | } 224 | 225 | public float getAveragePower() { 226 | if (analysing) { 227 | // calc the average 228 | float sum = 0; 229 | for (int i=0;i=0; i--) { 259 | val <<= 8; 260 | val |= (int)bytes[i] & 0xFF; 261 | } 262 | return val; 263 | } 264 | 265 | /** 266 | * Test if this audioplayer is playing right now 267 | * @return true if it is playing, false otherwise 268 | */ 269 | public boolean isPlaying() { 270 | return isPlaying; 271 | } 272 | 273 | /** 274 | * Set the loop mode for this audio player 275 | * @param looping 276 | */ 277 | public void setLooping(boolean looping) { 278 | isLooping = looping; 279 | } 280 | 281 | /** 282 | * Move the start pointer of the audio player to the sent time in ms 283 | * @param timeMs - the time in ms 284 | */ 285 | public void cue(int timeMs) { 286 | //startPos = ((timeMs / 1000) * sampleRate) % audioData.length; 287 | //readHead = startPos; 288 | //println("AudioPlayer Cueing to "+timeMs); 289 | if (timeMs >= 0) {// ignore crazy values 290 | readHead = (((float)timeMs / 1000f) * sampleRate) % audioData.length; 291 | //println("Read head went to "+readHead); 292 | } 293 | } 294 | 295 | /** 296 | * Set the playback speed, 297 | * @param speed - playback speed where 1 is normal speed, 2 is double speed 298 | */ 299 | public void speed(float speed) { 300 | //println("setting speed to "+speed); 301 | dReadHead = speed; 302 | } 303 | 304 | /** 305 | * Set the master volume of the AudioPlayer 306 | */ 307 | 308 | public void volume(float volume) { 309 | masterVolume = volume; 310 | } 311 | 312 | /** 313 | * Get the length of the audio file in samples 314 | * @return int - the length of the audio file in samples 315 | */ 316 | public int getLength() { 317 | return audioData.length; 318 | } 319 | /** 320 | * Get the length of the sound in ms, suitable for sending to 'cue' 321 | */ 322 | public float getLengthMs() { 323 | return (audioData.length / sampleRate * 1000); 324 | } 325 | 326 | /** 327 | * Start playing the sound. 328 | */ 329 | public void play() { 330 | isPlaying = true; 331 | } 332 | 333 | /** 334 | * Stop playing the sound 335 | */ 336 | public void stop() { 337 | isPlaying = false; 338 | } 339 | 340 | /** 341 | * implementation of the AudioGenerator interface 342 | */ 343 | public short getSample() { 344 | if (!isPlaying) { 345 | return 0; 346 | } 347 | else { 348 | short sample; 349 | readHead += dReadHead; 350 | if (readHead > (audioData.length - 1)) {// got to the end 351 | //% (float)audioData.length; 352 | if (isLooping) {// back to the start for loop mode 353 | readHead = readHead % (float)audioData.length; 354 | } 355 | else { 356 | readHead = 0; 357 | isPlaying = false; 358 | } 359 | } 360 | 361 | // linear interpolation here 362 | // declaring these at the top... 363 | // easy to understand version... 364 | // float x1, x2, y1, y2, x3, y3; 365 | x1 = floor(readHead); 366 | x2 = x1 + 1; 367 | y1 = audioData[(int)x1]; 368 | y2 = audioData[(int) (x2 % audioData.length)]; 369 | x3 = readHead; 370 | // calc 371 | y3 = y1 + ((x3 - x1) * (y2 - y1)); 372 | y3 *= masterVolume; 373 | sample = fxChain.getSample((short) y3); 374 | if (analysing) { 375 | // accumulate samples for the fft 376 | fftFrame[fftInd] = (float)sample / 32768f; 377 | fftInd ++; 378 | if (fftInd == fftFrame.length - 1) {// got a frame 379 | powerSpectrum = fft.process(fftFrame, true); 380 | fftInd = 0; 381 | } 382 | } 383 | 384 | //return sample; 385 | return (short)y3; 386 | } 387 | } 388 | 389 | public void setAudioData(short[] audioData) { 390 | this.audioData = audioData; 391 | } 392 | 393 | public short[] getAudioData() { 394 | return audioData; 395 | } 396 | 397 | public void setDReadHead(float dReadHead) { 398 | this.dReadHead = dReadHead; 399 | } 400 | 401 | /// 402 | //the synth interface 403 | // 404 | 405 | public void ramp(float val, float timeMs) { 406 | fxChain.ramp(val, timeMs); 407 | } 408 | 409 | 410 | 411 | public void setDelayTime(float delayMs) { 412 | fxChain.setDelayTime( delayMs); 413 | } 414 | 415 | public void setDelayFeedback(float fb) { 416 | fxChain.setDelayFeedback(fb); 417 | } 418 | 419 | public void setFilter(float cutoff, float resonance) { 420 | fxChain.setFilter( cutoff, resonance); 421 | } 422 | } 423 | 424 | /** 425 | * This class can play wavetables and includes an fx chain 426 | */ 427 | public class WavetableSynth extends AudioPlayer { 428 | 429 | private short[] sine; 430 | private short[] saw; 431 | private short[] wavetable; 432 | private float sampleRate; 433 | 434 | public WavetableSynth(int size, float sampleRate) { 435 | super(sampleRate); 436 | sine = new short[size]; 437 | for (float i = 0; i < sine.length; i++) { 438 | float phase; 439 | phase = TWO_PI / size * i; 440 | sine[(int)i] = (short) (sin(phase) * 32768); 441 | } 442 | saw = new short[size]; 443 | for (float i = 0; i 0) { 454 | //println("freq freq "+freq); 455 | setDReadHead((float)getAudioData().length / sampleRate * freq); 456 | } 457 | } 458 | 459 | public void loadWaveForm(float[] wavetable_) { 460 | if (wavetable == null || wavetable_.length != wavetable.length) { 461 | // only reallocate if there is a change in length 462 | wavetable = new short[wavetable_.length]; 463 | } 464 | for (int i=0;i 0) { 552 | for (int j=0;j currentAmp) { 606 | goingUp = true; 607 | } 608 | else { 609 | goingUp = false; 610 | } 611 | } 612 | 613 | 614 | public void setDelayTime(float delayMs) { 615 | } 616 | 617 | public void setDelayFeedback(float fb) { 618 | } 619 | 620 | public void volume(float volume) { 621 | } 622 | 623 | 624 | public short getSample(short input) { 625 | float in; 626 | in = (float) input / 32768;// -1 to 1 627 | 628 | in = filter.applyFilter(in); 629 | if (goingUp && currentAmp < targetAmp) { 630 | currentAmp += dAmp; 631 | } 632 | else if (!goingUp && currentAmp > targetAmp) { 633 | currentAmp += dAmp; 634 | } 635 | 636 | if (currentAmp > 1) { 637 | currentAmp = 1; 638 | } 639 | if (currentAmp < 0) { 640 | currentAmp = 0; 641 | } 642 | in *= currentAmp; 643 | return (short) (in * 32768); 644 | } 645 | 646 | public void setFilter(float f, float r) { 647 | filter.setFilter(f, r); 648 | } 649 | } 650 | 651 | 652 | /** 653 | * Represents an audio source is streamed as opposed to being completely loaded (as WavSource is) 654 | */ 655 | public class AudioStreamPlayer { 656 | /** a class from the android API*/ 657 | private MediaPlayer mediaPlayer; 658 | /** a class from the android API*/ 659 | private Visualizer viz; 660 | private byte[] waveformBuffer; 661 | private byte[] fftBuffer; 662 | private byte[] powerSpectrum; 663 | 664 | /** 665 | * create a stream source from the sent url 666 | */ 667 | public AudioStreamPlayer(String url) { 668 | try { 669 | mediaPlayer = new MediaPlayer(); 670 | //mp.setAuxEffectSendLevel(1); 671 | mediaPlayer.setLooping(true); 672 | 673 | // try to parse the URL... if that fails, we assume it 674 | // is a local file in the assets folder 675 | try { 676 | URL uRL = new URL(url); 677 | mediaPlayer.setDataSource(url); 678 | } 679 | catch (MalformedURLException eek) { 680 | // couldn't parse the url, assume its a local file 681 | AssetFileDescriptor afd = getAssets().openFd(url); 682 | //mp.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength()); 683 | mediaPlayer.setDataSource(afd.getFileDescriptor()); 684 | afd.close(); 685 | } 686 | 687 | mediaPlayer.prepare(); 688 | //mediaPlayer.start(); 689 | //println("Created audio with id "+mediaPlayer.getAudioSessionId()); 690 | viz = new Visualizer(mediaPlayer.getAudioSessionId()); 691 | viz.setEnabled(true); 692 | waveformBuffer = new byte[viz.getCaptureSize()]; 693 | fftBuffer = new byte[viz.getCaptureSize()/2]; 694 | powerSpectrum = new byte[viz.getCaptureSize()/2]; 695 | } 696 | catch (Exception e) { 697 | println("StreamSource could not be initialised. Check url... "+url+ " and that you have added the permission INTERNET, RECORD_AUDIO and MODIFY_AUDIO_SETTINGS to the manifest,"); 698 | e.printStackTrace(); 699 | } 700 | } 701 | 702 | public void play() { 703 | mediaPlayer.start(); 704 | } 705 | 706 | public int getLengthMs() { 707 | return mediaPlayer.getDuration(); 708 | } 709 | 710 | public void cue(float timeMs) { 711 | if (timeMs >= 0 && timeMs < getLengthMs()) {// ignore crazy values 712 | mediaPlayer.seekTo((int)timeMs); 713 | } 714 | } 715 | 716 | /** 717 | * Returns a recent snapshot of the power spectrum as 8 bit values 718 | */ 719 | public byte[] getPowerSpectrum() { 720 | // calculate the spectrum 721 | viz.getFft(fftBuffer); 722 | short real, imag; 723 | for (int i=2;i high resonance! 824 | r = 1-r; 825 | // remap to appropriate ranges 826 | f = map(f, 0, sampleRate/4, 30, sampleRate / 4); 827 | r = map(r, 0, 1, 0.005, 2); 828 | 829 | println("rlpf: f "+f+" r "+r); 830 | 831 | this.freq = f * TWO_PI / sampleRate; 832 | this.reson = r; 833 | changed = true; 834 | } 835 | 836 | public float applyFilter(float in) { 837 | float y0; 838 | if (changed) { 839 | float D = tan(freq * reson * 0.5f); 840 | float C = ((1.f-D)/(1.f+D)); 841 | float cosf = cos(freq); 842 | b1 = (1.f + C) * cosf; 843 | b2 = -C; 844 | a0 = (1.f + C - b1) * .25f; 845 | changed = false; 846 | } 847 | y0 = a0 * in + b1 * y1 + b2 * y2; 848 | y2 = y1; 849 | y1 = y0; 850 | if (Float.isNaN(y0)) { 851 | reset(); 852 | } 853 | return y0; 854 | } 855 | } 856 | 857 | /** https://github.com/micknoise/Maximilian/blob/master/maximilian.cpp */ 858 | 859 | class MickFilter implements Filter { 860 | 861 | private float f, res; 862 | private float cutoff, z, c, x, y, out; 863 | private float sampleRate; 864 | 865 | MickFilter(float sampleRate) { 866 | this.sampleRate = sampleRate; 867 | } 868 | 869 | void setFilter(float f, float r) { 870 | f = constrain(f, 0, 1); 871 | res = constrain(r, 0, 1); 872 | f = map(f, 0, 1, 25, sampleRate / 4); 873 | r = map(r, 0, 1, 1, 25); 874 | this.f = f; 875 | this.res = r; 876 | 877 | //println("mickF: f "+f+" r "+r); 878 | } 879 | float applyFilter(float in) { 880 | return lores(in, f, res); 881 | } 882 | 883 | float lores(float input, float cutoff1, float resonance) { 884 | //cutoff=cutoff1*0.5; 885 | //if (cutoff<10) cutoff=10; 886 | //if (cutoff>(sampleRate*0.5)) cutoff=(sampleRate*0.5); 887 | //if (resonance<1.) resonance = 1.; 888 | 889 | //if (resonance>2.4) resonance = 2.4; 890 | z=cos(TWO_PI*cutoff/sampleRate); 891 | c=2-2*z; 892 | float r=(sqrt(2.0)*sqrt(-pow((z-1.0), 3.0))+resonance*(z-1))/(resonance*(z-1)); 893 | x=x+(input-y)*c; 894 | y=y+x; 895 | x=x*r; 896 | out=y; 897 | return out; 898 | } 899 | } 900 | 901 | 902 | /* 903 | * This file is part of Beads. See http://www.beadsproject.net for all information. 904 | * CREDIT: This class uses portions of code taken from MPEG7AudioEnc. See readme/CREDITS.txt. 905 | */ 906 | 907 | /** 908 | * FFT performs a Fast Fourier Transform and forwards the complex data to any listeners. 909 | * The complex data is a float of the form float[2][frameSize], with real and imaginary 910 | * parts stored respectively. 911 | * 912 | * @beads.category analysis 913 | */ 914 | public class FFT { 915 | 916 | /** The real part. */ 917 | protected float[] fftReal; 918 | 919 | /** The imaginary part. */ 920 | protected float[] fftImag; 921 | 922 | private float[] dataCopy = null; 923 | private float[][] features; 924 | private float[] powers; 925 | private int numFeatures; 926 | 927 | /** 928 | * Instantiates a new FFT. 929 | */ 930 | public FFT() { 931 | features = new float[2][]; 932 | } 933 | 934 | /* (non-Javadoc) 935 | * @see com.olliebown.beads.core.UGen#calculateBuffer() 936 | */ 937 | public float[] process(float[] data, boolean direction) { 938 | if (powers == null) powers = new float[data.length/2]; 939 | if (dataCopy==null || dataCopy.length!=data.length) 940 | dataCopy = new float[data.length]; 941 | System.arraycopy(data, 0, dataCopy, 0, data.length); 942 | 943 | fft(dataCopy, dataCopy.length, direction); 944 | numFeatures = dataCopy.length; 945 | fftReal = calculateReal(dataCopy, dataCopy.length); 946 | fftImag = calculateImaginary(dataCopy, dataCopy.length); 947 | features[0] = fftReal; 948 | features[1] = fftImag; 949 | // now calc the powers 950 | return specToPowers(fftReal, fftImag, powers); 951 | } 952 | 953 | public float[] specToPowers(float[] real, float[] imag, float[] powers) { 954 | float re, im; 955 | double pow; 956 | for (int i=0;i>1); 1059 | if (isign) { 1060 | c2 = -.5f; 1061 | four1(data, n>>1, true); 1062 | } 1063 | else { 1064 | c2 = .5f; 1065 | theta = -theta; 1066 | } 1067 | wtemp = Math.sin(.5*theta); 1068 | wpr = -2.*wtemp*wtemp; 1069 | wpi = Math.sin(theta); 1070 | wr = 1. + wpr; 1071 | wi = wpi; 1072 | int np3 = n + 3; 1073 | for (int i=2,imax = n >> 2, i1, i2, i3, i4; i <= imax; ++i) { 1074 | /** @TODO this can be optimized */ 1075 | i4 = 1 + (i3 = np3 - (i2 = 1 + (i1 = i + i - 1))); 1076 | --i4; 1077 | --i2; 1078 | --i3; 1079 | --i1; 1080 | h1i = c1*(data[i2] - data[i4]); 1081 | h2r = -c2*(data[i2] + data[i4]); 1082 | h1r = c1*(data[i1] + data[i3]); 1083 | h2i = c2*(data[i1] - data[i3]); 1084 | data[i1] = (float) ( h1r + wr*h2r - wi*h2i); 1085 | data[i2] = (float) ( h1i + wr*h2i + wi*h2r); 1086 | data[i3] = (float) ( h1r - wr*h2r + wi*h2i); 1087 | data[i4] = (float) (-h1i + wr*h2i + wi*h2r); 1088 | wr = (wtemp=wr)*wpr - wi*wpi + wr; 1089 | wi = wi*wpr + wtemp*wpi + wi; 1090 | } 1091 | if (isign) { 1092 | float tmp = data[0]; 1093 | data[0] += data[1]; 1094 | data[1] = tmp - data[1]; 1095 | } 1096 | else { 1097 | float tmp = data[0]; 1098 | data[0] = c1 * (tmp + data[1]); 1099 | data[1] = c1 * (tmp - data[1]); 1100 | four1(data, n>>1, false); 1101 | } 1102 | } 1103 | 1104 | /** 1105 | * four1 algorithm. 1106 | * 1107 | * @param data 1108 | * the data. 1109 | * @param nn 1110 | * the nn. 1111 | * @param isign 1112 | * regular or inverse. 1113 | */ 1114 | private void four1(float data[], int nn, boolean isign) { 1115 | int n, mmax, istep; 1116 | double wtemp, wr, wpr, wpi, wi, theta; 1117 | float tempr, tempi; 1118 | 1119 | n = nn << 1; 1120 | for (int i = 1, j = 1; i < n; i += 2) { 1121 | if (j > i) { 1122 | // SWAP(data[j], data[i]); 1123 | float swap = data[j-1]; 1124 | data[j-1] = data[i-1]; 1125 | data[i-1] = swap; 1126 | // SWAP(data[j+1], data[i+1]); 1127 | swap = data[j]; 1128 | data[j] = data[i]; 1129 | data[i] = swap; 1130 | } 1131 | int m = n >> 1; 1132 | while (m >= 2 && j > m) { 1133 | j -= m; 1134 | m >>= 1; 1135 | } 1136 | j += m; 1137 | } 1138 | mmax = 2; 1139 | while (n > mmax) { 1140 | istep = mmax << 1; 1141 | theta = 6.28318530717959 / mmax; 1142 | if (!isign) 1143 | theta = -theta; 1144 | wtemp = Math.sin(0.5 * theta); 1145 | wpr = -2.0 * wtemp * wtemp; 1146 | wpi = Math.sin(theta); 1147 | wr = 1.0; 1148 | wi = 0.0; 1149 | for (int m = 1; m < mmax; m += 2) { 1150 | for (int i = m; i <= n; i += istep) { 1151 | int j = i + mmax; 1152 | tempr = (float) (wr * data[j-1] - wi * data[j]); 1153 | tempi = (float) (wr * data[j] + wi * data[j-1]); 1154 | data[j-1] = data[i-1] - tempr; 1155 | data[j] = data[i] - tempi; 1156 | data[i-1] += tempr; 1157 | data[i] += tempi; 1158 | } 1159 | wr = (wtemp = wr) * wpr - wi * wpi + wr; 1160 | wi = wi * wpr + wtemp * wpi + wi; 1161 | } 1162 | mmax = istep; 1163 | } 1164 | } 1165 | } 1166 | 1167 | 1168 | -------------------------------------------------------------------------------- /DesktopTemplate/MaximJava_api.pde: -------------------------------------------------------------------------------- 1 | //The MIT License (MIT) 2 | 3 | //Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 4 | 5 | //Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: 6 | //The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software. 7 | 8 | //THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 9 | 10 | 11 | 12 | import java.util.ArrayList; 13 | import java.io.File; 14 | import java.io.FileInputStream; 15 | import java.io.FileNotFoundException; 16 | import java.io.IOException; 17 | import java.io.BufferedInputStream; 18 | import java.net.MalformedURLException; 19 | import java.net.URL; 20 | import javax.sound.sampled.AudioFormat; 21 | import javax.sound.sampled.AudioSystem; 22 | import javax.sound.sampled.DataLine; 23 | import javax.sound.sampled.LineUnavailableException; 24 | import javax.sound.sampled.SourceDataLine; 25 | 26 | import processing.core.*; 27 | import processing.data.*; 28 | import processing.event.*; 29 | import processing.opengl.*; 30 | 31 | //import android.content.res.Resources; 32 | // import android.app.Activity; 33 | // import android.os.Bundle; 34 | // import android.media.*; 35 | // import android.media.audiofx.Visualizer; 36 | // import android.content.res.AssetFileDescriptor; 37 | // import android.hardware.*; 38 | 39 | 40 | public class Maxim { 41 | 42 | private float sampleRate; 43 | 44 | public final float[] mtof = { 45 | 0f, 8.661957f, 9.177024f, 9.722718f, 10.3f, 10.913383f, 11.562325f, 12.25f, 12.978271f, 13.75f, 14.567617f, 15.433853f, 16.351599f, 17.323914f, 18.354048f, 19.445436f, 20.601723f, 21.826765f, 23.124651f, 24.5f, 25.956543f, 27.5f, 29.135235f, 30.867706f, 32.703197f, 34.647827f, 36.708096f, 38.890873f, 41.203445f, 43.65353f, 46.249302f, 49.f, 51.913086f, 55.f, 58.27047f, 61.735413f, 65.406395f, 69.295654f, 73.416191f, 77.781746f, 82.406891f, 87.30706f, 92.498604f, 97.998856f, 103.826172f, 110.f, 116.540939f, 123.470825f, 130.81279f, 138.591309f, 146.832382f, 155.563492f, 164.813782f, 174.61412f, 184.997208f, 195.997711f, 207.652344f, 220.f, 233.081879f, 246.94165f, 261.62558f, 277.182617f, 293.664764f, 311.126984f, 329.627563f, 349.228241f, 369.994415f, 391.995422f, 415.304688f, 440.f, 466.163757f, 493.883301f, 523.25116f, 554.365234f, 587.329529f, 622.253967f, 659.255127f, 698.456482f, 739.988831f, 783.990845f, 830.609375f, 880.f, 932.327515f, 987.766602f, 1046.502319f, 1108.730469f, 1174.659058f, 1244.507935f, 1318.510254f, 1396.912964f, 1479.977661f, 1567.981689f, 1661.21875f, 1760.f, 1864.655029f, 1975.533203f, 2093.004639f, 2217.460938f, 2349.318115f, 2489.015869f, 2637.020508f, 2793.825928f, 2959.955322f, 3135.963379f, 3322.4375f, 3520.f, 3729.31f, 3951.066406f, 4186.009277f, 4434.921875f, 4698.63623f, 4978.031738f, 5274.041016f, 5587.651855f, 5919.910645f, 6271.926758f, 6644.875f, 7040.f, 7458.620117f, 7902.132812f, 8372.018555f, 8869.84375f, 9397.272461f, 9956.063477f, 10548.082031f, 11175.303711f, 11839.821289f, 12543.853516f, 13289.75f 46 | }; 47 | 48 | private AudioThread audioThread; 49 | private PApplet processing; 50 | 51 | public Maxim (PApplet processing) { 52 | this.processing = processing; 53 | sampleRate = 44100f; 54 | audioThread = new AudioThread(sampleRate, 4096, false); 55 | audioThread.start(); 56 | 57 | } 58 | 59 | public float[] getPowerSpectrum() { 60 | return audioThread.getPowerSpectrum(); 61 | } 62 | 63 | /** 64 | * load the sent file into an audio player and return it. Use 65 | * this if your audio file is not too long want precision control 66 | * over looping and play head position 67 | * @param String filename - the file to load 68 | * @return AudioPlayer - an audio player which can play the file 69 | */ 70 | public AudioPlayer loadFile(String filename) { 71 | // this will load the complete audio file into memory 72 | AudioPlayer ap = new AudioPlayer(filename, sampleRate, processing); 73 | audioThread.addAudioGenerator(ap); 74 | // now we need to tell the audiothread 75 | // to ask the audioplayer for samples 76 | return ap; 77 | } 78 | 79 | /** 80 | * Create a wavetable player object with a wavetable of the sent 81 | * size. Small wavetables (<128) make for a 'nastier' sound! 82 | * 83 | */ 84 | public WavetableSynth createWavetableSynth(int size) { 85 | // this will load the complete audio file into memory 86 | WavetableSynth ap = new WavetableSynth(size, sampleRate); 87 | audioThread.addAudioGenerator(ap); 88 | // now we need to tell the audiothread 89 | // to ask the audioplayer for samples 90 | return ap; 91 | } 92 | // /** 93 | // * Create an AudioStreamPlayer which can stream audio from the 94 | // * internet as well as local files. Does not provide precise 95 | // * control over looping and playhead like AudioPlayer does. Use this for 96 | // * longer audio files and audio from the internet. 97 | // */ 98 | // public AudioStreamPlayer createAudioStreamPlayer(String url) { 99 | // AudioStreamPlayer asp = new AudioStreamPlayer(url); 100 | // return asp; 101 | // } 102 | } 103 | 104 | 105 | 106 | 107 | /** 108 | * This class can play audio files and includes an fx chain 109 | */ 110 | public class AudioPlayer implements Synth, AudioGenerator { 111 | private FXChain fxChain; 112 | private boolean isPlaying; 113 | private boolean isLooping; 114 | private boolean analysing; 115 | private FFT fft; 116 | private int fftInd; 117 | private float[] fftFrame; 118 | private float[] powerSpectrum; 119 | 120 | //private float startTimeSecs; 121 | //private float speed; 122 | private int length; 123 | private short[] audioData; 124 | private float startPos; 125 | private float readHead; 126 | private float dReadHead; 127 | private float sampleRate; 128 | private float masterVolume; 129 | 130 | float x1, x2, y1, y2, x3, y3; 131 | 132 | public AudioPlayer(float sampleRate) { 133 | fxChain = new FXChain(sampleRate); 134 | this.sampleRate = sampleRate; 135 | } 136 | 137 | public AudioPlayer (String filename, float sampleRate, PApplet processing) { 138 | //super(filename); 139 | this(sampleRate); 140 | try { 141 | // how long is the file in bytes? 142 | //long byteCount = getAssets().openFd(filename).getLength(); 143 | File f = new File(processing.dataPath(filename)); 144 | long byteCount = f.length(); 145 | //System.out.println("bytes in "+filename+" "+byteCount); 146 | 147 | // check the format of the audio file first! 148 | // only accept mono 16 bit wavs 149 | //InputStream is = getAssets().open(filename); 150 | BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f)); 151 | 152 | // chop!! 153 | 154 | int bitDepth; 155 | int channels; 156 | boolean isPCM; 157 | // allows us to read up to 4 bytes at a time 158 | byte[] byteBuff = new byte[4]; 159 | 160 | // skip 20 bytes to get file format 161 | // (1 byte) 162 | bis.skip(20); 163 | bis.read(byteBuff, 0, 2); // read 2 so we are at 22 now 164 | isPCM = ((short)byteBuff[0]) == 1 ? true:false; 165 | //System.out.println("File isPCM "+isPCM); 166 | 167 | // skip 22 bytes to get # channels 168 | // (1 byte) 169 | bis.read(byteBuff, 0, 2);// read 2 so we are at 24 now 170 | channels = (short)byteBuff[0]; 171 | //System.out.println("#channels "+channels+" "+byteBuff[0]); 172 | // skip 24 bytes to get sampleRate 173 | // (32 bit int) 174 | bis.read(byteBuff, 0, 4); // read 4 so now we are at 28 175 | sampleRate = bytesToInt(byteBuff, 4); 176 | //System.out.println("Sample rate "+sampleRate); 177 | // skip 34 bytes to get bits per sample 178 | // (1 byte) 179 | bis.skip(6); // we were at 28... 180 | bis.read(byteBuff, 0, 2);// read 2 so we are at 36 now 181 | bitDepth = (short)byteBuff[0]; 182 | //System.out.println("bit depth "+bitDepth); 183 | // convert to word count... 184 | bitDepth /= 8; 185 | // now start processing the raw data 186 | // data starts at byte 36 187 | int sampleCount = (int) ((byteCount - 36) / (bitDepth * channels)); 188 | audioData = new short[sampleCount]; 189 | int skip = (channels -1) * bitDepth; 190 | int sample = 0; 191 | // skip a few sample as it sounds like shit 192 | bis.skip(bitDepth * 4); 193 | while (bis.available () >= (bitDepth+skip)) { 194 | bis.read(byteBuff, 0, bitDepth);// read 2 so we are at 36 now 195 | //int val = bytesToInt(byteBuff, bitDepth); 196 | // resample to 16 bit by casting to a short 197 | audioData[sample] = (short) bytesToInt(byteBuff, bitDepth); 198 | bis.skip(skip); 199 | sample ++; 200 | } 201 | 202 | float secs = (float)sample / (float)sampleRate; 203 | //System.out.println("Read "+sample+" samples expected "+sampleCount+" time "+secs+" secs "); 204 | bis.close(); 205 | 206 | 207 | // unchop 208 | readHead = 0; 209 | startPos = 0; 210 | // default to 1 sample shift per tick 211 | dReadHead = 1; 212 | isPlaying = false; 213 | isLooping = true; 214 | masterVolume = 1; 215 | } 216 | catch (FileNotFoundException e) { 217 | 218 | e.printStackTrace(); 219 | } 220 | catch (IOException e) { 221 | e.printStackTrace(); 222 | } 223 | } 224 | 225 | public void setAnalysing(boolean analysing_) { 226 | this.analysing = analysing_; 227 | if (analysing) {// initialise the fft 228 | fft = new FFT(); 229 | fftInd = 0; 230 | fftFrame = new float[1024]; 231 | powerSpectrum = new float[fftFrame.length/2]; 232 | } 233 | } 234 | 235 | public float getAveragePower() { 236 | if (analysing) { 237 | // calc the average 238 | float sum = 0; 239 | for (int i=0;i=0; i--) { 269 | val <<= 8; 270 | val |= (int)bytes[i] & 0xFF; 271 | } 272 | return val; 273 | } 274 | 275 | /** 276 | * Test if this audioplayer is playing right now 277 | * @return true if it is playing, false otherwise 278 | */ 279 | public boolean isPlaying() { 280 | return isPlaying; 281 | } 282 | 283 | /** 284 | * Set the loop mode for this audio player 285 | * @param looping 286 | */ 287 | public void setLooping(boolean looping) { 288 | isLooping = looping; 289 | } 290 | 291 | /** 292 | * Move the start pointer of the audio player to the sent time in ms 293 | * @param timeMs - the time in ms 294 | */ 295 | public void cue(int timeMs) { 296 | //startPos = ((timeMs / 1000) * sampleRate) % audioData.length; 297 | //readHead = startPos; 298 | //System.out.println("AudioPlayer Cueing to "+timeMs); 299 | if (timeMs >= 0) {// ignore crazy values 300 | readHead = (((float)timeMs / 1000f) * sampleRate) % audioData.length; 301 | //System.out.println("Read head went to "+readHead); 302 | } 303 | } 304 | 305 | /** 306 | * Set the playback speed, 307 | * @param speed - playback speed where 1 is normal speed, 2 is double speed 308 | */ 309 | public void speed(float speed) { 310 | //System.out.println("setting speed to "+speed); 311 | dReadHead = speed; 312 | } 313 | 314 | /** 315 | * Set the master volume of the AudioPlayer 316 | */ 317 | 318 | public void volume(float volume) { 319 | masterVolume = volume; 320 | } 321 | 322 | /** 323 | * Get the length of the audio file in samples 324 | * @return int - the length of the audio file in samples 325 | */ 326 | public int getLength() { 327 | return audioData.length; 328 | } 329 | /** 330 | * Get the length of the sound in ms, suitable for sending to 'cue' 331 | */ 332 | public float getLengthMs() { 333 | return ((float) audioData.length / sampleRate * 1000f); 334 | } 335 | 336 | /** 337 | * Start playing the sound. 338 | */ 339 | public void play() { 340 | isPlaying = true; 341 | } 342 | 343 | /** 344 | * Stop playing the sound 345 | */ 346 | public void stop() { 347 | isPlaying = false; 348 | } 349 | 350 | /** 351 | * implementation of the AudioGenerator interface 352 | */ 353 | public short getSample() { 354 | if (!isPlaying) { 355 | return 0; 356 | } 357 | else { 358 | short sample; 359 | readHead += dReadHead; 360 | if (readHead > (audioData.length - 1)) {// got to the end 361 | //% (float)audioData.length; 362 | if (isLooping) {// back to the start for loop mode 363 | readHead = readHead % (float)audioData.length; 364 | } 365 | else { 366 | readHead = 0; 367 | isPlaying = false; 368 | } 369 | } 370 | 371 | // linear interpolation here 372 | // declaring these at the top... 373 | // easy to understand version... 374 | // float x1, x2, y1, y2, x3, y3; 375 | x1 = floor(readHead); 376 | x2 = x1 + 1; 377 | y1 = audioData[(int)x1]; 378 | y2 = audioData[(int) (x2 % audioData.length)]; 379 | x3 = readHead; 380 | // calc 381 | y3 = y1 + ((x3 - x1) * (y2 - y1)); 382 | y3 *= masterVolume; 383 | sample = fxChain.getSample((short) y3); 384 | if (analysing) { 385 | // accumulate samples for the fft 386 | fftFrame[fftInd] = (float)sample / 32768f; 387 | fftInd ++; 388 | if (fftInd == fftFrame.length - 1) {// got a frame 389 | powerSpectrum = fft.process(fftFrame, true); 390 | fftInd = 0; 391 | } 392 | } 393 | 394 | //return sample; 395 | return (short)y3; 396 | } 397 | } 398 | 399 | public void setAudioData(short[] audioData) { 400 | this.audioData = audioData; 401 | } 402 | 403 | public short[] getAudioData() { 404 | return audioData; 405 | } 406 | 407 | public void setDReadHead(float dReadHead) { 408 | this.dReadHead = dReadHead; 409 | } 410 | 411 | /// 412 | //the synth interface 413 | // 414 | 415 | public void ramp(float val, float timeMs) { 416 | fxChain.ramp(val, timeMs); 417 | } 418 | 419 | 420 | 421 | public void setDelayTime(float delayMs) { 422 | fxChain.setDelayTime( delayMs); 423 | } 424 | 425 | public void setDelayFeedback(float fb) { 426 | fxChain.setDelayFeedback(fb); 427 | } 428 | 429 | public void setFilter(float cutoff, float resonance) { 430 | fxChain.setFilter( cutoff, resonance); 431 | } 432 | } 433 | 434 | /** 435 | * This class can play wavetables and includes an fx chain 436 | */ 437 | public class WavetableSynth extends AudioPlayer { 438 | 439 | private short[] sine; 440 | private short[] saw; 441 | private short[] wavetable; 442 | private float sampleRate; 443 | 444 | public WavetableSynth(int size, float sampleRate) { 445 | super(sampleRate); 446 | sine = new short[size]; 447 | for (float i = 0; i < sine.length; i++) { 448 | float phase; 449 | phase = TWO_PI / size * i; 450 | sine[(int)i] = (short) (sin(phase) * 32768); 451 | } 452 | saw = new short[size]; 453 | for (float i = 0; i 0) { 464 | //System.out.println("freq freq "+freq); 465 | setDReadHead((float)getAudioData().length / sampleRate * freq); 466 | } 467 | } 468 | 469 | public void loadWaveForm(float[] wavetable_) { 470 | if (wavetable == null || wavetable_.length != wavetable.length) { 471 | // only reallocate if there is a change in length 472 | wavetable = new short[wavetable_.length]; 473 | } 474 | for (int i=0;i 0) { 558 | for (int j=0;j> 8); 587 | } 588 | return bData; 589 | } 590 | 591 | /** 592 | * Returns a recent snapshot of the power spectrum 593 | */ 594 | public float[] getPowerSpectrum() { 595 | // process the last buffer that was calculated 596 | if (fftFrame == null) { 597 | fftFrame = new float[bufferS.length]; 598 | } 599 | for (int i=0;i currentAmp) { 643 | goingUp = true; 644 | } 645 | else { 646 | goingUp = false; 647 | } 648 | } 649 | 650 | 651 | public void setDelayTime(float delayMs) { 652 | } 653 | 654 | public void setDelayFeedback(float fb) { 655 | } 656 | 657 | public void volume(float volume) { 658 | } 659 | 660 | 661 | public short getSample(short input) { 662 | float in; 663 | in = (float) input / 32768;// -1 to 1 664 | 665 | in = filter.applyFilter(in); 666 | if (goingUp && currentAmp < targetAmp) { 667 | currentAmp += dAmp; 668 | } 669 | else if (!goingUp && currentAmp > targetAmp) { 670 | currentAmp += dAmp; 671 | } 672 | 673 | if (currentAmp > 1) { 674 | currentAmp = 1; 675 | } 676 | if (currentAmp < 0) { 677 | currentAmp = 0; 678 | } 679 | in *= currentAmp; 680 | return (short) (in * 32768); 681 | } 682 | 683 | public void setFilter(float f, float r) { 684 | filter.setFilter(f, r); 685 | } 686 | } 687 | 688 | 689 | // /** 690 | // * Represents an audio source is streamed as opposed to being completely loaded (as WavSource is) 691 | // */ 692 | // public class AudioStreamPlayer { 693 | // /** a class from the android API*/ 694 | // private MediaPlayer mediaPlayer; 695 | // /** a class from the android API*/ 696 | // private Visualizer viz; 697 | // private byte[] waveformBuffer; 698 | // private byte[] fftBuffer; 699 | // private byte[] powerSpectrum; 700 | 701 | // /** 702 | // * create a stream source from the sent url 703 | // */ 704 | // public AudioStreamPlayer(String url) { 705 | // try { 706 | // mediaPlayer = new MediaPlayer(); 707 | // //mp.setAuxEffectSendLevel(1); 708 | // mediaPlayer.setLooping(true); 709 | 710 | // // try to parse the URL... if that fails, we assume it 711 | // // is a local file in the assets folder 712 | // try { 713 | // URL uRL = new URL(url); 714 | // mediaPlayer.setDataSource(url); 715 | // } 716 | // catch (MalformedURLException eek) { 717 | // // couldn't parse the url, assume its a local file 718 | // AssetFileDescriptor afd = getAssets().openFd(url); 719 | // //mp.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength()); 720 | // mediaPlayer.setDataSource(afd.getFileDescriptor()); 721 | // afd.close(); 722 | // } 723 | 724 | // mediaPlayer.prepare(); 725 | // //mediaPlayer.start(); 726 | // //System.out.println("Created audio with id "+mediaPlayer.getAudioSessionId()); 727 | // viz = new Visualizer(mediaPlayer.getAudioSessionId()); 728 | // viz.setEnabled(true); 729 | // waveformBuffer = new byte[viz.getCaptureSize()]; 730 | // fftBuffer = new byte[viz.getCaptureSize()/2]; 731 | // powerSpectrum = new byte[viz.getCaptureSize()/2]; 732 | // } 733 | // catch (Exception e) { 734 | // System.out.println("StreamSource could not be initialised. Check url... "+url+ " and that you have added the permission INTERNET, RECORD_AUDIO and MODIFY_AUDIO_SETTINGS to the manifest,"); 735 | // e.printStackTrace(); 736 | // } 737 | // } 738 | 739 | // public void play() { 740 | // mediaPlayer.start(); 741 | // } 742 | 743 | // public int getLengthMs() { 744 | // return mediaPlayer.getDuration(); 745 | // } 746 | 747 | // public void cue(float timeMs) { 748 | // if (timeMs >= 0 && timeMs < getLengthMs()) {// ignore crazy values 749 | // mediaPlayer.seekTo((int)timeMs); 750 | // } 751 | // } 752 | 753 | // /** 754 | // * Returns a recent snapshot of the power spectrum as 8 bit values 755 | // */ 756 | // public byte[] getPowerSpectrum() { 757 | // // calculate the spectrum 758 | // viz.getFft(fftBuffer); 759 | // short real, imag; 760 | // for (int i=2;i high resonance! 845 | r = 1-r; 846 | // remap to appropriate ranges 847 | f = map(f, 0f, sampleRate/4, 30f, sampleRate / 4); 848 | r = map(r, 0f, 1f, 0.005f, 2f); 849 | 850 | System.out.println("rlpf: f "+f+" r "+r); 851 | 852 | this.freq = f * TWO_PI / sampleRate; 853 | this.reson = r; 854 | changed = true; 855 | } 856 | 857 | public float applyFilter(float in) { 858 | float y0; 859 | if (changed) { 860 | float D = tan(freq * reson * 0.5f); 861 | float C = ((1.f-D)/(1.f+D)); 862 | float cosf = cos(freq); 863 | b1 = (1.f + C) * cosf; 864 | b2 = -C; 865 | a0 = (1.f + C - b1) * .25f; 866 | changed = false; 867 | } 868 | y0 = a0 * in + b1 * y1 + b2 * y2; 869 | y2 = y1; 870 | y1 = y0; 871 | if (Float.isNaN(y0)) { 872 | reset(); 873 | } 874 | return y0; 875 | } 876 | } 877 | 878 | /** https://github.com/micknoise/Maximilian/blob/master/maximilian.cpp */ 879 | 880 | class MickFilter implements Filter { 881 | 882 | private float f, res; 883 | private float cutoff, z, c, x, y, out; 884 | private float sampleRate; 885 | 886 | MickFilter(float sampleRate) { 887 | this.sampleRate = sampleRate; 888 | } 889 | 890 | public void setFilter(float f, float r) { 891 | f = constrain(f, 0, 1); 892 | res = constrain(r, 0, 1); 893 | f = map(f, 0, 1, 25, sampleRate / 4); 894 | r = map(r, 0, 1, 1, 25); 895 | this.f = f; 896 | this.res = r; 897 | 898 | //System.out.println("mickF: f "+f+" r "+r); 899 | } 900 | public float applyFilter(float in) { 901 | return lores(in, f, res); 902 | } 903 | 904 | public float lores(float input, float cutoff1, float resonance) { 905 | //cutoff=cutoff1*0.5; 906 | //if (cutoff<10) cutoff=10; 907 | //if (cutoff>(sampleRate*0.5)) cutoff=(sampleRate*0.5); 908 | //if (resonance<1.) resonance = 1.; 909 | 910 | //if (resonance>2.4) resonance = 2.4; 911 | z=cos(TWO_PI*cutoff/sampleRate); 912 | c=2-2*z; 913 | float r=(sqrt(2.0f)*sqrt(-pow((z-1.0f), 3.0f))+resonance*(z-1))/(resonance*(z-1)); 914 | x=x+(input-y)*c; 915 | y=y+x; 916 | x=x*r; 917 | out=y; 918 | return out; 919 | } 920 | } 921 | 922 | 923 | /* 924 | * This file is part of Beads. See http://www.beadsproject.net for all information. 925 | * CREDIT: This class uses portions of code taken from MPEG7AudioEnc. See readme/CREDITS.txt. 926 | */ 927 | 928 | /** 929 | * FFT performs a Fast Fourier Transform and forwards the complex data to any listeners. 930 | * The complex data is a float of the form float[2][frameSize], with real and imaginary 931 | * parts stored respectively. 932 | * 933 | * @beads.category analysis 934 | */ 935 | public class FFT { 936 | 937 | /** The real part. */ 938 | protected float[] fftReal; 939 | 940 | /** The imaginary part. */ 941 | protected float[] fftImag; 942 | 943 | private float[] dataCopy = null; 944 | private float[][] features; 945 | private float[] powers; 946 | private int numFeatures; 947 | 948 | /** 949 | * Instantiates a new FFT. 950 | */ 951 | public FFT() { 952 | features = new float[2][]; 953 | } 954 | 955 | /* (non-Javadoc) 956 | * @see com.olliebown.beads.core.UGen#calculateBuffer() 957 | */ 958 | public float[] process(float[] data, boolean direction) { 959 | if (powers == null) powers = new float[data.length/2]; 960 | if (dataCopy==null || dataCopy.length!=data.length) 961 | dataCopy = new float[data.length]; 962 | System.arraycopy(data, 0, dataCopy, 0, data.length); 963 | 964 | fft(dataCopy, dataCopy.length, direction); 965 | numFeatures = dataCopy.length; 966 | fftReal = calculateReal(dataCopy, dataCopy.length); 967 | fftImag = calculateImaginary(dataCopy, dataCopy.length); 968 | features[0] = fftReal; 969 | features[1] = fftImag; 970 | // now calc the powers 971 | return specToPowers(fftReal, fftImag, powers); 972 | } 973 | 974 | public float[] specToPowers(float[] real, float[] imag, float[] powers) { 975 | float re, im; 976 | double pow; 977 | for (int i=0;i>1); 1080 | if (isign) { 1081 | c2 = -.5f; 1082 | four1(data, n>>1, true); 1083 | } 1084 | else { 1085 | c2 = .5f; 1086 | theta = -theta; 1087 | } 1088 | wtemp = Math.sin(.5*theta); 1089 | wpr = -2.*wtemp*wtemp; 1090 | wpi = Math.sin(theta); 1091 | wr = 1. + wpr; 1092 | wi = wpi; 1093 | int np3 = n + 3; 1094 | for (int i=2,imax = n >> 2, i1, i2, i3, i4; i <= imax; ++i) { 1095 | /** @TODO this can be optimized */ 1096 | i4 = 1 + (i3 = np3 - (i2 = 1 + (i1 = i + i - 1))); 1097 | --i4; 1098 | --i2; 1099 | --i3; 1100 | --i1; 1101 | h1i = c1*(data[i2] - data[i4]); 1102 | h2r = -c2*(data[i2] + data[i4]); 1103 | h1r = c1*(data[i1] + data[i3]); 1104 | h2i = c2*(data[i1] - data[i3]); 1105 | data[i1] = (float) ( h1r + wr*h2r - wi*h2i); 1106 | data[i2] = (float) ( h1i + wr*h2i + wi*h2r); 1107 | data[i3] = (float) ( h1r - wr*h2r + wi*h2i); 1108 | data[i4] = (float) (-h1i + wr*h2i + wi*h2r); 1109 | wr = (wtemp=wr)*wpr - wi*wpi + wr; 1110 | wi = wi*wpr + wtemp*wpi + wi; 1111 | } 1112 | if (isign) { 1113 | float tmp = data[0]; 1114 | data[0] += data[1]; 1115 | data[1] = tmp - data[1]; 1116 | } 1117 | else { 1118 | float tmp = data[0]; 1119 | data[0] = c1 * (tmp + data[1]); 1120 | data[1] = c1 * (tmp - data[1]); 1121 | four1(data, n>>1, false); 1122 | } 1123 | } 1124 | 1125 | /** 1126 | * four1 algorithm. 1127 | * 1128 | * @param data 1129 | * the data. 1130 | * @param nn 1131 | * the nn. 1132 | * @param isign 1133 | * regular or inverse. 1134 | */ 1135 | private void four1(float data[], int nn, boolean isign) { 1136 | int n, mmax, istep; 1137 | double wtemp, wr, wpr, wpi, wi, theta; 1138 | float tempr, tempi; 1139 | 1140 | n = nn << 1; 1141 | for (int i = 1, j = 1; i < n; i += 2) { 1142 | if (j > i) { 1143 | // SWAP(data[j], data[i]); 1144 | float swap = data[j-1]; 1145 | data[j-1] = data[i-1]; 1146 | data[i-1] = swap; 1147 | // SWAP(data[j+1], data[i+1]); 1148 | swap = data[j]; 1149 | data[j] = data[i]; 1150 | data[i] = swap; 1151 | } 1152 | int m = n >> 1; 1153 | while (m >= 2 && j > m) { 1154 | j -= m; 1155 | m >>= 1; 1156 | } 1157 | j += m; 1158 | } 1159 | mmax = 2; 1160 | while (n > mmax) { 1161 | istep = mmax << 1; 1162 | theta = 6.28318530717959 / mmax; 1163 | if (!isign) 1164 | theta = -theta; 1165 | wtemp = Math.sin(0.5 * theta); 1166 | wpr = -2.0 * wtemp * wtemp; 1167 | wpi = Math.sin(theta); 1168 | wr = 1.0; 1169 | wi = 0.0; 1170 | for (int m = 1; m < mmax; m += 2) { 1171 | for (int i = m; i <= n; i += istep) { 1172 | int j = i + mmax; 1173 | tempr = (float) (wr * data[j-1] - wi * data[j]); 1174 | tempi = (float) (wr * data[j] + wi * data[j-1]); 1175 | data[j-1] = data[i-1] - tempr; 1176 | data[j] = data[i] - tempi; 1177 | data[i-1] += tempr; 1178 | data[i] += tempi; 1179 | } 1180 | wr = (wtemp = wr) * wpr - wi * wpi + wr; 1181 | wi = wi * wpr + wtemp * wpi + wi; 1182 | } 1183 | mmax = istep; 1184 | } 1185 | } 1186 | } 1187 | 1188 | 1189 | -------------------------------------------------------------------------------- /AndroidTemplate/web-export/AndroidTemplate.pde: -------------------------------------------------------------------------------- 1 | //When running on the iPad or iPhone, you won't see anything unless you tap the screen. 2 | //If it doesn't appear to work first time, always try refreshing the browser. 3 | 4 | Maxim maxim; 5 | AudioPlayer player; 6 | float go; 7 | boolean playit; 8 | 9 | int elements = 20;// This is the number of points and lines we will calculate at once. 1000 is alot actually. 10 | 11 | void setup() { 12 | //The size is iPad Portrait. 13 | //If you want landscape, you should swap the values. 14 | size(768, 1024); 15 | 16 | frameRate(25); // this is the framerate. Tweak for performance 17 | maxim = new Maxim(this); 18 | player = maxim.loadFile("mybeat.wav"); 19 | player.setLooping(true); 20 | player.setAnalysing(true); 21 | noStroke(); 22 | rectMode(CENTER); 23 | background(0); 24 | colorMode(HSB); 25 | } 26 | 27 | void draw() { 28 | 29 | if (playit) { 30 | 31 | player.play(); 32 | float power = player.getAveragePower(); 33 | go+=power*50; 34 | translate(width/2, height/2);// we translate the whole sketch to the centre of the screen, so 0,0 is in the middle. 35 | for (int i = elements; i > 0;i--) { 36 | fill((5*i+go)%255, power*512, 255); // this for loop calculates the x and y position for each node in the system and draws a line between it and the next. 37 | ellipse((mouseX-(width/2))*(elements-i)/elements, (mouseY-(height/2))*(elements-i)/elements, width*1.5/elements*i, height*1.5/elements*i); 38 | } 39 | player.speed((float) mouseX / (float) width); 40 | } 41 | } 42 | 43 | void mousePressed() { 44 | 45 | playit = !playit; 46 | 47 | if (playit) { 48 | 49 | player.play(); 50 | } 51 | else { 52 | 53 | player.stop(); 54 | } 55 | } 56 | 57 | /* 58 | The MIT License (MIT) 59 | 60 | Copyright (c) 2013 Mick Grierson, Matthew Yee-King, Marco Gillies 61 | 62 | Permission is hereby granted, free of charge, to any person obtaining a copy
of 63 | this software and associated documentation files (the "Software"), to 64 | deal
in the Software without restriction, including without limitation 65 | the rights
to use, copy, modify, merge, publish, distribute, sublicense, 66 | and/or sell
copies of the Software, and to permit persons to whom the 67 | Software is
furnished to do so, subject to the following conditions: 68 | 69 | The above copyright notice and this permission notice shall be included 70 | in 
all copies or substantial portions of the Software. 71 | 72 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. 73 | */ 74 | 75 | 76 | import java.io.File; 77 | import java.io.FileInputStream; 78 | import java.io.FileNotFoundException; 79 | import java.io.IOException; 80 | import java.io.BufferedInputStream; 81 | import java.net.MalformedURLException; 82 | import java.net.URL; 83 | //import android.content.res.Resources; 84 | import android.app.Activity; 85 | import android.os.Bundle; 86 | import android.media.*; 87 | import android.media.audiofx.Visualizer; 88 | import android.content.res.AssetFileDescriptor; 89 | import android.hardware.*; 90 | 91 | 92 | public class Maxim { 93 | 94 | private float sampleRate = 44100; 95 | 96 | public final float[] mtof = { 97 | 0, 8.661957, 9.177024, 9.722718, 10.3, 10.913383, 11.562325, 12.25, 12.978271, 13.75, 14.567617, 15.433853, 16.351599, 17.323914, 18.354048, 19.445436, 20.601723, 21.826765, 23.124651, 24.5, 25.956543, 27.5, 29.135235, 30.867706, 32.703197, 34.647827, 36.708096, 38.890873, 41.203445, 43.65353, 46.249302, 49., 51.913086, 55., 58.27047, 61.735413, 65.406395, 69.295654, 73.416191, 77.781746, 82.406891, 87.30706, 92.498604, 97.998856, 103.826172, 110., 116.540939, 123.470825, 130.81279, 138.591309, 146.832382, 155.563492, 164.813782, 174.61412, 184.997208, 195.997711, 207.652344, 220., 233.081879, 246.94165, 261.62558, 277.182617, 293.664764, 311.126984, 329.627563, 349.228241, 369.994415, 391.995422, 415.304688, 440., 466.163757, 493.883301, 523.25116, 554.365234, 587.329529, 622.253967, 659.255127, 698.456482, 739.988831, 783.990845, 830.609375, 880., 932.327515, 987.766602, 1046.502319, 1108.730469, 1174.659058, 1244.507935, 1318.510254, 1396.912964, 1479.977661, 1567.981689, 1661.21875, 1760., 1864.655029, 1975.533203, 2093.004639, 2217.460938, 2349.318115, 2489.015869, 2637.020508, 2793.825928, 2959.955322, 3135.963379, 3322.4375, 3520., 3729.31, 3951.066406, 4186.009277, 4434.921875, 4698.63623, 4978.031738, 5274.041016, 5587.651855, 5919.910645, 6271.926758, 6644.875, 7040., 7458.620117, 7902.132812, 8372.018555, 8869.84375, 9397.272461, 9956.063477, 10548.082031, 11175.303711, 11839.821289, 12543.853516, 13289.75 98 | }; 99 | 100 | private AndroidAudioThread audioThread; 101 | 102 | public Maxim (PApplet app) { 103 | audioThread = new AndroidAudioThread(sampleRate, 256, false); 104 | audioThread.start(); 105 | } 106 | 107 | public float[] getPowerSpectrum() { 108 | return audioThread.getPowerSpectrum(); 109 | } 110 | 111 | /** 112 | * load the sent file into an audio player and return it. Use 113 | * this if your audio file is not too long want precision control 114 | * over looping and play head position 115 | * @param String filename - the file to load 116 | * @return AudioPlayer - an audio player which can play the file 117 | */ 118 | public AudioPlayer loadFile(String filename) { 119 | // this will load the complete audio file into memory 120 | AudioPlayer ap = new AudioPlayer(filename, sampleRate); 121 | audioThread.addAudioGenerator(ap); 122 | // now we need to tell the audiothread 123 | // to ask the audioplayer for samples 124 | return ap; 125 | } 126 | 127 | /** 128 | * Create a wavetable player object with a wavetable of the sent 129 | * size. Small wavetables (<128) make for a 'nastier' sound! 130 | * 131 | */ 132 | public WavetableSynth createWavetableSynth(int size) { 133 | // this will load the complete audio file into memory 134 | WavetableSynth ap = new WavetableSynth(size, sampleRate); 135 | audioThread.addAudioGenerator(ap); 136 | // now we need to tell the audiothread 137 | // to ask the audioplayer for samples 138 | return ap; 139 | } 140 | /** 141 | * Create an AudioStreamPlayer which can stream audio from the 142 | * internet as well as local files. Does not provide precise 143 | * control over looping and playhead like AudioPlayer does. Use this for 144 | * longer audio files and audio from the internet. 145 | */ 146 | public AudioStreamPlayer createAudioStreamPlayer(String url) { 147 | AudioStreamPlayer asp = new AudioStreamPlayer(url); 148 | return asp; 149 | } 150 | } 151 | 152 | 153 | 154 | 155 | /** 156 | * This class can play audio files and includes an fx chain 157 | */ 158 | public class AudioPlayer implements Synth, AudioGenerator { 159 | private FXChain fxChain; 160 | private boolean isPlaying; 161 | private boolean isLooping; 162 | private boolean analysing; 163 | private FFT fft; 164 | private int fftInd; 165 | private float[] fftFrame; 166 | private float[] powerSpectrum; 167 | 168 | //private float startTimeSecs; 169 | //private float speed; 170 | private int length; 171 | private short[] audioData; 172 | private float startPos; 173 | private float readHead; 174 | private float dReadHead; 175 | private float sampleRate; 176 | private float masterVolume; 177 | 178 | float x1, x2, y1, y2, x3, y3; 179 | 180 | public AudioPlayer(float sampleRate) { 181 | this.sampleRate = sampleRate; 182 | fxChain = new FXChain(sampleRate); 183 | } 184 | 185 | public AudioPlayer (String filename, float sampleRate) { 186 | //super(filename); 187 | this(sampleRate); 188 | try { 189 | // how long is the file in bytes? 190 | long byteCount = getAssets().openFd(filename).getLength(); 191 | //System.out.println("bytes in "+filename+" "+byteCount); 192 | 193 | // check the format of the audio file first! 194 | // only accept mono 16 bit wavs 195 | InputStream is = getAssets().open(filename); 196 | BufferedInputStream bis = new BufferedInputStream(is); 197 | 198 | // chop!! 199 | 200 | int bitDepth; 201 | int channels; 202 | boolean isPCM; 203 | // allows us to read up to 4 bytes at a time 204 | byte[] byteBuff = new byte[4]; 205 | 206 | // skip 20 bytes to get file format 207 | // (1 byte) 208 | bis.skip(20); 209 | bis.read(byteBuff, 0, 2); // read 2 so we are at 22 now 210 | isPCM = ((short)byteBuff[0]) == 1 ? true:false; 211 | //System.out.println("File isPCM "+isPCM); 212 | 213 | // skip 22 bytes to get # channels 214 | // (1 byte) 215 | bis.read(byteBuff, 0, 2);// read 2 so we are at 24 now 216 | channels = (short)byteBuff[0]; 217 | //System.out.println("#channels "+channels+" "+byteBuff[0]); 218 | // skip 24 bytes to get sampleRate 219 | // (32 bit int) 220 | bis.read(byteBuff, 0, 4); // read 4 so now we are at 28 221 | sampleRate = bytesToInt(byteBuff, 4); 222 | //System.out.println("Sample rate "+sampleRate); 223 | // skip 34 bytes to get bits per sample 224 | // (1 byte) 225 | bis.skip(6); // we were at 28... 226 | bis.read(byteBuff, 0, 2);// read 2 so we are at 36 now 227 | bitDepth = (short)byteBuff[0]; 228 | //System.out.println("bit depth "+bitDepth); 229 | // convert to word count... 230 | bitDepth /= 8; 231 | // now start processing the raw data 232 | // data starts at byte 36 233 | int sampleCount = (int) ((byteCount - 36) / (bitDepth * channels)); 234 | audioData = new short[sampleCount]; 235 | int skip = (channels -1) * bitDepth; 236 | int sample = 0; 237 | // skip a few sample as it sounds like shit 238 | bis.skip(bitDepth * 4); 239 | while (bis.available () >= (bitDepth+skip)) { 240 | bis.read(byteBuff, 0, bitDepth);// read 2 so we are at 36 now 241 | //int val = bytesToInt(byteBuff, bitDepth); 242 | // resample to 16 bit by casting to a short 243 | audioData[sample] = (short) bytesToInt(byteBuff, bitDepth); 244 | bis.skip(skip); 245 | sample ++; 246 | } 247 | 248 | float secs = (float)sample / (float)sampleRate; 249 | //System.out.println("Read "+sample+" samples expected "+sampleCount+" time "+secs+" secs "); 250 | bis.close(); 251 | 252 | 253 | // unchop 254 | readHead = 0; 255 | startPos = 0; 256 | // default to 1 sample shift per tick 257 | dReadHead = 1; 258 | isPlaying = false; 259 | isLooping = true; 260 | masterVolume = 1; 261 | } 262 | catch (FileNotFoundException e) { 263 | 264 | e.printStackTrace(); 265 | } 266 | catch (IOException e) { 267 | e.printStackTrace(); 268 | } 269 | } 270 | 271 | public void setAnalysing(boolean analysing_) { 272 | this.analysing = analysing_; 273 | if (analysing) {// initialise the fft 274 | fft = new FFT(); 275 | fftInd = 0; 276 | fftFrame = new float[1024]; 277 | powerSpectrum = new float[fftFrame.length/2]; 278 | } 279 | } 280 | 281 | public float getAveragePower() { 282 | if (analysing) { 283 | // calc the average 284 | float sum = 0; 285 | for (int i=0;i=0; i--) { 315 | val <<= 8; 316 | val |= (int)bytes[i] & 0xFF; 317 | } 318 | return val; 319 | } 320 | 321 | /** 322 | * Test if this audioplayer is playing right now 323 | * @return true if it is playing, false otherwise 324 | */ 325 | public boolean isPlaying() { 326 | return isPlaying; 327 | } 328 | 329 | /** 330 | * Set the loop mode for this audio player 331 | * @param looping 332 | */ 333 | public void setLooping(boolean looping) { 334 | isLooping = looping; 335 | } 336 | 337 | /** 338 | * Move the start pointer of the audio player to the sent time in ms 339 | * @param timeMs - the time in ms 340 | */ 341 | public void cue(int timeMs) { 342 | //startPos = ((timeMs / 1000) * sampleRate) % audioData.length; 343 | //readHead = startPos; 344 | //println("AudioPlayer Cueing to "+timeMs); 345 | if (timeMs >= 0) {// ignore crazy values 346 | readHead = (((float)timeMs / 1000f) * sampleRate) % audioData.length; 347 | //println("Read head went to "+readHead); 348 | } 349 | } 350 | 351 | /** 352 | * Set the playback speed, 353 | * @param speed - playback speed where 1 is normal speed, 2 is double speed 354 | */ 355 | public void speed(float speed) { 356 | //println("setting speed to "+speed); 357 | dReadHead = speed; 358 | } 359 | 360 | /** 361 | * Set the master volume of the AudioPlayer 362 | */ 363 | 364 | public void volume(float volume) { 365 | masterVolume = volume; 366 | } 367 | 368 | /** 369 | * Get the length of the audio file in samples 370 | * @return int - the length of the audio file in samples 371 | */ 372 | public int getLength() { 373 | return audioData.length; 374 | } 375 | /** 376 | * Get the length of the sound in ms, suitable for sending to 'cue' 377 | */ 378 | public float getLengthMs() { 379 | return (audioData.length / sampleRate * 1000); 380 | } 381 | 382 | /** 383 | * Start playing the sound. 384 | */ 385 | public void play() { 386 | isPlaying = true; 387 | } 388 | 389 | /** 390 | * Stop playing the sound 391 | */ 392 | public void stop() { 393 | isPlaying = false; 394 | } 395 | 396 | /** 397 | * implementation of the AudioGenerator interface 398 | */ 399 | public short getSample() { 400 | if (!isPlaying) { 401 | return 0; 402 | } 403 | else { 404 | short sample; 405 | readHead += dReadHead; 406 | if (readHead > (audioData.length - 1)) {// got to the end 407 | //% (float)audioData.length; 408 | if (isLooping) {// back to the start for loop mode 409 | readHead = readHead % (float)audioData.length; 410 | } 411 | else { 412 | readHead = 0; 413 | isPlaying = false; 414 | } 415 | } 416 | 417 | // linear interpolation here 418 | // declaring these at the top... 419 | // easy to understand version... 420 | // float x1, x2, y1, y2, x3, y3; 421 | x1 = floor(readHead); 422 | x2 = x1 + 1; 423 | y1 = audioData[(int)x1]; 424 | y2 = audioData[(int) (x2 % audioData.length)]; 425 | x3 = readHead; 426 | // calc 427 | y3 = y1 + ((x3 - x1) * (y2 - y1)); 428 | y3 *= masterVolume; 429 | sample = fxChain.getSample((short) y3); 430 | if (analysing) { 431 | // accumulate samples for the fft 432 | fftFrame[fftInd] = (float)sample / 32768f; 433 | fftInd ++; 434 | if (fftInd == fftFrame.length - 1) {// got a frame 435 | powerSpectrum = fft.process(fftFrame, true); 436 | fftInd = 0; 437 | } 438 | } 439 | 440 | //return sample; 441 | return (short)y3; 442 | } 443 | } 444 | 445 | public void setAudioData(short[] audioData) { 446 | this.audioData = audioData; 447 | } 448 | 449 | public short[] getAudioData() { 450 | return audioData; 451 | } 452 | 453 | public void setDReadHead(float dReadHead) { 454 | this.dReadHead = dReadHead; 455 | } 456 | 457 | /// 458 | //the synth interface 459 | // 460 | 461 | public void ramp(float val, float timeMs) { 462 | fxChain.ramp(val, timeMs); 463 | } 464 | 465 | 466 | 467 | public void setDelayTime(float delayMs) { 468 | fxChain.setDelayTime( delayMs); 469 | } 470 | 471 | public void setDelayFeedback(float fb) { 472 | fxChain.setDelayFeedback(fb); 473 | } 474 | 475 | public void setFilter(float cutoff, float resonance) { 476 | fxChain.setFilter( cutoff, resonance); 477 | } 478 | } 479 | 480 | /** 481 | * This class can play wavetables and includes an fx chain 482 | */ 483 | public class WavetableSynth extends AudioPlayer { 484 | 485 | private short[] sine; 486 | private short[] saw; 487 | private short[] wavetable; 488 | private float sampleRate; 489 | 490 | public WavetableSynth(int size, float sampleRate) { 491 | super(sampleRate); 492 | sine = new short[size]; 493 | for (float i = 0; i < sine.length; i++) { 494 | float phase; 495 | phase = TWO_PI / size * i; 496 | sine[(int)i] = (short) (sin(phase) * 32768); 497 | } 498 | saw = new short[size]; 499 | for (float i = 0; i 0) { 510 | //println("freq freq "+freq); 511 | setDReadHead((float)getAudioData().length / sampleRate * freq); 512 | } 513 | } 514 | 515 | public void loadWaveForm(float[] wavetable_) { 516 | if (wavetable == null || wavetable_.length != wavetable.length) { 517 | // only reallocate if there is a change in length 518 | wavetable = new short[wavetable_.length]; 519 | } 520 | for (int i=0;i 0) { 608 | for (int j=0;j currentAmp) { 662 | goingUp = true; 663 | } 664 | else { 665 | goingUp = false; 666 | } 667 | } 668 | 669 | 670 | public void setDelayTime(float delayMs) { 671 | } 672 | 673 | public void setDelayFeedback(float fb) { 674 | } 675 | 676 | public void volume(float volume) { 677 | } 678 | 679 | 680 | public short getSample(short input) { 681 | float in; 682 | in = (float) input / 32768;// -1 to 1 683 | 684 | in = filter.applyFilter(in); 685 | if (goingUp && currentAmp < targetAmp) { 686 | currentAmp += dAmp; 687 | } 688 | else if (!goingUp && currentAmp > targetAmp) { 689 | currentAmp += dAmp; 690 | } 691 | 692 | if (currentAmp > 1) { 693 | currentAmp = 1; 694 | } 695 | if (currentAmp < 0) { 696 | currentAmp = 0; 697 | } 698 | in *= currentAmp; 699 | return (short) (in * 32768); 700 | } 701 | 702 | public void setFilter(float f, float r) { 703 | filter.setFilter(f, r); 704 | } 705 | } 706 | 707 | 708 | /** 709 | * Represents an audio source is streamed as opposed to being completely loaded (as WavSource is) 710 | */ 711 | public class AudioStreamPlayer { 712 | /** a class from the android API*/ 713 | private MediaPlayer mediaPlayer; 714 | /** a class from the android API*/ 715 | private Visualizer viz; 716 | private byte[] waveformBuffer; 717 | private byte[] fftBuffer; 718 | private byte[] powerSpectrum; 719 | 720 | /** 721 | * create a stream source from the sent url 722 | */ 723 | public AudioStreamPlayer(String url) { 724 | try { 725 | mediaPlayer = new MediaPlayer(); 726 | //mp.setAuxEffectSendLevel(1); 727 | mediaPlayer.setLooping(true); 728 | 729 | // try to parse the URL... if that fails, we assume it 730 | // is a local file in the assets folder 731 | try { 732 | URL uRL = new URL(url); 733 | mediaPlayer.setDataSource(url); 734 | } 735 | catch (MalformedURLException eek) { 736 | // couldn't parse the url, assume its a local file 737 | AssetFileDescriptor afd = getAssets().openFd(url); 738 | //mp.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength()); 739 | mediaPlayer.setDataSource(afd.getFileDescriptor()); 740 | afd.close(); 741 | } 742 | 743 | mediaPlayer.prepare(); 744 | //mediaPlayer.start(); 745 | //println("Created audio with id "+mediaPlayer.getAudioSessionId()); 746 | viz = new Visualizer(mediaPlayer.getAudioSessionId()); 747 | viz.setEnabled(true); 748 | waveformBuffer = new byte[viz.getCaptureSize()]; 749 | fftBuffer = new byte[viz.getCaptureSize()/2]; 750 | powerSpectrum = new byte[viz.getCaptureSize()/2]; 751 | } 752 | catch (Exception e) { 753 | println("StreamSource could not be initialised. Check url... "+url+ " and that you have added the permission INTERNET, RECORD_AUDIO and MODIFY_AUDIO_SETTINGS to the manifest,"); 754 | e.printStackTrace(); 755 | } 756 | } 757 | 758 | public void play() { 759 | mediaPlayer.start(); 760 | } 761 | 762 | public int getLengthMs() { 763 | return mediaPlayer.getDuration(); 764 | } 765 | 766 | public void cue(float timeMs) { 767 | if (timeMs >= 0 && timeMs < getLengthMs()) {// ignore crazy values 768 | mediaPlayer.seekTo((int)timeMs); 769 | } 770 | } 771 | 772 | /** 773 | * Returns a recent snapshot of the power spectrum as 8 bit values 774 | */ 775 | public byte[] getPowerSpectrum() { 776 | // calculate the spectrum 777 | viz.getFft(fftBuffer); 778 | short real, imag; 779 | for (int i=2;i high resonance! 880 | r = 1-r; 881 | // remap to appropriate ranges 882 | f = map(f, 0, sampleRate/4, 30, sampleRate / 4); 883 | r = map(r, 0, 1, 0.005, 2); 884 | 885 | println("rlpf: f "+f+" r "+r); 886 | 887 | this.freq = f * TWO_PI / sampleRate; 888 | this.reson = r; 889 | changed = true; 890 | } 891 | 892 | public float applyFilter(float in) { 893 | float y0; 894 | if (changed) { 895 | float D = tan(freq * reson * 0.5f); 896 | float C = ((1.f-D)/(1.f+D)); 897 | float cosf = cos(freq); 898 | b1 = (1.f + C) * cosf; 899 | b2 = -C; 900 | a0 = (1.f + C - b1) * .25f; 901 | changed = false; 902 | } 903 | y0 = a0 * in + b1 * y1 + b2 * y2; 904 | y2 = y1; 905 | y1 = y0; 906 | if (Float.isNaN(y0)) { 907 | reset(); 908 | } 909 | return y0; 910 | } 911 | } 912 | 913 | /** https://github.com/micknoise/Maximilian/blob/master/maximilian.cpp */ 914 | 915 | class MickFilter implements Filter { 916 | 917 | private float f, res; 918 | private float cutoff, z, c, x, y, out; 919 | private float sampleRate; 920 | 921 | MickFilter(float sampleRate) { 922 | this.sampleRate = sampleRate; 923 | } 924 | 925 | void setFilter(float f, float r) { 926 | f = constrain(f, 0, 1); 927 | res = constrain(r, 0, 1); 928 | f = map(f, 0, 1, 25, sampleRate / 4); 929 | r = map(r, 0, 1, 1, 25); 930 | this.f = f; 931 | this.res = r; 932 | 933 | //println("mickF: f "+f+" r "+r); 934 | } 935 | float applyFilter(float in) { 936 | return lores(in, f, res); 937 | } 938 | 939 | float lores(float input, float cutoff1, float resonance) { 940 | //cutoff=cutoff1*0.5; 941 | //if (cutoff<10) cutoff=10; 942 | //if (cutoff>(sampleRate*0.5)) cutoff=(sampleRate*0.5); 943 | //if (resonance<1.) resonance = 1.; 944 | 945 | //if (resonance>2.4) resonance = 2.4; 946 | z=cos(TWO_PI*cutoff/sampleRate); 947 | c=2-2*z; 948 | float r=(sqrt(2.0)*sqrt(-pow((z-1.0), 3.0))+resonance*(z-1))/(resonance*(z-1)); 949 | x=x+(input-y)*c; 950 | y=y+x; 951 | x=x*r; 952 | out=y; 953 | return out; 954 | } 955 | } 956 | 957 | 958 | /* 959 | * This file is part of Beads. See http://www.beadsproject.net for all information. 960 | * CREDIT: This class uses portions of code taken from MPEG7AudioEnc. See readme/CREDITS.txt. 961 | */ 962 | 963 | /** 964 | * FFT performs a Fast Fourier Transform and forwards the complex data to any listeners. 965 | * The complex data is a float of the form float[2][frameSize], with real and imaginary 966 | * parts stored respectively. 967 | * 968 | * @beads.category analysis 969 | */ 970 | public class FFT { 971 | 972 | /** The real part. */ 973 | protected float[] fftReal; 974 | 975 | /** The imaginary part. */ 976 | protected float[] fftImag; 977 | 978 | private float[] dataCopy = null; 979 | private float[][] features; 980 | private float[] powers; 981 | private int numFeatures; 982 | 983 | /** 984 | * Instantiates a new FFT. 985 | */ 986 | public FFT() { 987 | features = new float[2][]; 988 | } 989 | 990 | /* (non-Javadoc) 991 | * @see com.olliebown.beads.core.UGen#calculateBuffer() 992 | */ 993 | public float[] process(float[] data, boolean direction) { 994 | if (powers == null) powers = new float[data.length/2]; 995 | if (dataCopy==null || dataCopy.length!=data.length) 996 | dataCopy = new float[data.length]; 997 | System.arraycopy(data, 0, dataCopy, 0, data.length); 998 | 999 | fft(dataCopy, dataCopy.length, direction); 1000 | numFeatures = dataCopy.length; 1001 | fftReal = calculateReal(dataCopy, dataCopy.length); 1002 | fftImag = calculateImaginary(dataCopy, dataCopy.length); 1003 | features[0] = fftReal; 1004 | features[1] = fftImag; 1005 | // now calc the powers 1006 | return specToPowers(fftReal, fftImag, powers); 1007 | } 1008 | 1009 | public float[] specToPowers(float[] real, float[] imag, float[] powers) { 1010 | float re, im; 1011 | double pow; 1012 | for (int i=0;i>1); 1115 | if (isign) { 1116 | c2 = -.5f; 1117 | four1(data, n>>1, true); 1118 | } 1119 | else { 1120 | c2 = .5f; 1121 | theta = -theta; 1122 | } 1123 | wtemp = Math.sin(.5*theta); 1124 | wpr = -2.*wtemp*wtemp; 1125 | wpi = Math.sin(theta); 1126 | wr = 1. + wpr; 1127 | wi = wpi; 1128 | int np3 = n + 3; 1129 | for (int i=2,imax = n >> 2, i1, i2, i3, i4; i <= imax; ++i) { 1130 | /** @TODO this can be optimized */ 1131 | i4 = 1 + (i3 = np3 - (i2 = 1 + (i1 = i + i - 1))); 1132 | --i4; 1133 | --i2; 1134 | --i3; 1135 | --i1; 1136 | h1i = c1*(data[i2] - data[i4]); 1137 | h2r = -c2*(data[i2] + data[i4]); 1138 | h1r = c1*(data[i1] + data[i3]); 1139 | h2i = c2*(data[i1] - data[i3]); 1140 | data[i1] = (float) ( h1r + wr*h2r - wi*h2i); 1141 | data[i2] = (float) ( h1i + wr*h2i + wi*h2r); 1142 | data[i3] = (float) ( h1r - wr*h2r + wi*h2i); 1143 | data[i4] = (float) (-h1i + wr*h2i + wi*h2r); 1144 | wr = (wtemp=wr)*wpr - wi*wpi + wr; 1145 | wi = wi*wpr + wtemp*wpi + wi; 1146 | } 1147 | if (isign) { 1148 | float tmp = data[0]; 1149 | data[0] += data[1]; 1150 | data[1] = tmp - data[1]; 1151 | } 1152 | else { 1153 | float tmp = data[0]; 1154 | data[0] = c1 * (tmp + data[1]); 1155 | data[1] = c1 * (tmp - data[1]); 1156 | four1(data, n>>1, false); 1157 | } 1158 | } 1159 | 1160 | /** 1161 | * four1 algorithm. 1162 | * 1163 | * @param data 1164 | * the data. 1165 | * @param nn 1166 | * the nn. 1167 | * @param isign 1168 | * regular or inverse. 1169 | */ 1170 | private void four1(float data[], int nn, boolean isign) { 1171 | int n, mmax, istep; 1172 | double wtemp, wr, wpr, wpi, wi, theta; 1173 | float tempr, tempi; 1174 | 1175 | n = nn << 1; 1176 | for (int i = 1, j = 1; i < n; i += 2) { 1177 | if (j > i) { 1178 | // SWAP(data[j], data[i]); 1179 | float swap = data[j-1]; 1180 | data[j-1] = data[i-1]; 1181 | data[i-1] = swap; 1182 | // SWAP(data[j+1], data[i+1]); 1183 | swap = data[j]; 1184 | data[j] = data[i]; 1185 | data[i] = swap; 1186 | } 1187 | int m = n >> 1; 1188 | while (m >= 2 && j > m) { 1189 | j -= m; 1190 | m >>= 1; 1191 | } 1192 | j += m; 1193 | } 1194 | mmax = 2; 1195 | while (n > mmax) { 1196 | istep = mmax << 1; 1197 | theta = 6.28318530717959 / mmax; 1198 | if (!isign) 1199 | theta = -theta; 1200 | wtemp = Math.sin(0.5 * theta); 1201 | wpr = -2.0 * wtemp * wtemp; 1202 | wpi = Math.sin(theta); 1203 | wr = 1.0; 1204 | wi = 0.0; 1205 | for (int m = 1; m < mmax; m += 2) { 1206 | for (int i = m; i <= n; i += istep) { 1207 | int j = i + mmax; 1208 | tempr = (float) (wr * data[j-1] - wi * data[j]); 1209 | tempi = (float) (wr * data[j] + wi * data[j-1]); 1210 | data[j-1] = data[i-1] - tempr; 1211 | data[j] = data[i] - tempi; 1212 | data[i-1] += tempr; 1213 | data[i] += tempi; 1214 | } 1215 | wr = (wtemp = wr) * wpr - wi * wpi + wr; 1216 | wi = wi * wpr + wtemp * wpi + wi; 1217 | } 1218 | mmax = istep; 1219 | } 1220 | } 1221 | } 1222 | 1223 | 1224 | 1225 | --------------------------------------------------------------------------------