├── README.md ├── css └── style.css ├── img └── background.png ├── index.html ├── js ├── SpectrumShader.js └── main.js └── lib └── three.min.js /README.md: -------------------------------------------------------------------------------- 1 | # As Seen on AgilityFeat's Blog 2 | 3 | ![Demo of audio spectrum reacting to WebRTC audio from microphone](http://i.giphy.com/3o85xx5BmjSSt34VG0.gif) 4 | 5 | [(Click to Read Full Article)](http://goo.gl/2x1koe) 6 | -------------------------------------------------------------------------------- /css/style.css: -------------------------------------------------------------------------------- 1 | html,body{margin:0;padding:0;} 2 | body{background:url('../img/background.png') top left no-repeat #000;} 3 | button{border-radius:5px;background:#97ff00;color:#e50000;padding:26px 56px;border:none;font-size:24px;font-weight:bold;font-family:sans-serif;cursor:pointer;position:absolute;top:570px;left:100px;box-shadow:1px 1px 1px 1px #000;} 4 | button:hover{background:#53d69a;} 5 | button:active{background:#ff9700;} -------------------------------------------------------------------------------- /img/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agilityfeat/WebRTC-Spectrum-Demo/b548fda7b0cf551aaed3d08ade5ad05464186f4b/img/background.png -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /js/SpectrumShader.js: -------------------------------------------------------------------------------- 1 | THREE.SpectrumShader = { 2 | defines: {}, 3 | 4 | attributes: { 5 | "translation": { type:'v3', value:[] }, 6 | "idx": { type:'f', value:[] } 7 | }, 8 | 9 | uniforms: { 10 | "amplitude": { type: "fv1", value: [] }, 11 | "opacity": { type: "f", value: 1.0 } 12 | }, 13 | 14 | vertexShader: [ 15 | 16 | "attribute vec3 translation;", 17 | 18 | "attribute float idx;", 19 | 20 | "uniform float amplitude[ 50 ];", 21 | 22 | "varying vec3 vNormal;", 23 | 24 | "varying float amp;", 25 | 26 | "void main() {", 27 | 28 | "gl_PointSize = 1.0;", 29 | 30 | "vNormal = normal;", 31 | 32 | "highp int index = int(idx);", 33 | 34 | "amp = amplitude[index];", 35 | 36 | "vec3 newPosition = position + normal + vec3(translation * amp);", 37 | 38 | "gl_Position = projectionMatrix * modelViewMatrix * vec4(newPosition, 1.0);", 39 | 40 | "}" 41 | 42 | ].join("\n"), 43 | 44 | fragmentShader: [ 45 | 46 | "uniform float opacity;", 47 | 48 | "varying float amp;", 49 | 50 | "void main() {", 51 | 52 | "gl_FragColor = vec4(-(amp) + 1.0, 0.5 + (amp/2.0), 0.0, opacity);", 53 | 54 | "}" 55 | 56 | ].join("\n") 57 | }; -------------------------------------------------------------------------------- /js/main.js: -------------------------------------------------------------------------------- 1 | // standard global variables 2 | var container, scene, camera, renderer; 3 | 4 | // SCENE 5 | scene = new THREE.Scene(); 6 | 7 | // CAMERA 8 | camera = new THREE.OrthographicCamera( -250, 250, -403, 150, 0, 10000 ); 9 | scene.add( camera ); 10 | camera.position.set( 0, 0, -100 ); 11 | camera.lookAt( scene.position ); 12 | 13 | // RENDERER 14 | renderer = new THREE.WebGLRenderer( { antialias: true, alpha: true } ); 15 | renderer.setClearColor( 0xffffff, 0 ); 16 | renderer.setSize( 375, 553 ); 17 | 18 | container = document.body; 19 | container.appendChild( renderer.domElement ); 20 | 21 | material = new THREE.ShaderMaterial( THREE.SpectrumShader ); 22 | geometry = new THREE.Geometry(); 23 | 24 | function addBar(x,width){ 25 | var idx = geometry.vertices.length / 4; 26 | 27 | geometry.vertices.push( 28 | new THREE.Vector3( x, 0, 0 ), 29 | new THREE.Vector3( x + width - 1, 0, 0 ), 30 | new THREE.Vector3( x + width - 1, 150, 0 ), 31 | new THREE.Vector3( x, 150, 0 ) 32 | ); 33 | 34 | var face = new THREE.Face3( idx * 4 + 0, idx * 4 + 1, idx * 4 + 2 ); 35 | //face.color.setHex( 0xBADA55 ); 36 | geometry.faces.push( face ); 37 | 38 | face = new THREE.Face3( idx * 4 + 0, idx * 4 + 2, idx * 4 + 3 ); 39 | //face.color.setHex( 0xBADA55 ); 40 | geometry.faces.push( face ); 41 | 42 | geometry.faceVertexUvs[0].push([ 43 | new THREE.Vector2( 0, 1 ), 44 | new THREE.Vector2( 1, 1 ), 45 | new THREE.Vector2( 1, 0 ) 46 | ]); 47 | 48 | geometry.faceVertexUvs[0].push([ 49 | new THREE.Vector2( 0, 1 ), 50 | new THREE.Vector2( 1, 0 ), 51 | new THREE.Vector2( 0, 0 ) 52 | ]); 53 | 54 | for(var i = 0; i < 4; i++){ 55 | material.attributes.translation.value.push(new THREE.Vector3(0.0, 145.0, 0.0)); 56 | material.attributes.idx.value.push(idx); 57 | if ((i % 4) === 0) material.uniforms.amplitude.value.push(1.0); 58 | } 59 | } 60 | 61 | for(var i = 0; i < 50; i += 1){ 62 | addBar( -((10 * i) - 250) - 10, 10 ); 63 | } 64 | 65 | //console.log(material.uniforms); 66 | //material.vertexColors = THREE.FaceColors; 67 | 68 | mesh = new THREE.Mesh( 69 | geometry, 70 | material 71 | ); 72 | 73 | threeobj = new THREE.Object3D(); 74 | threeobj.add( mesh ); 75 | 76 | scene.add( threeobj ); 77 | 78 | navigator.getUserMedia = navigator.getUserMedia || 79 | navigator.webkitGetUserMedia || 80 | navigator.mozGetUserMedia || 81 | navigator.msGetUserMedia; 82 | 83 | var audioCtx; 84 | var analyser; 85 | var source; 86 | var bufferLength; 87 | var dataArray; 88 | 89 | function animate(){ 90 | requestAnimationFrame( animate ); 91 | 92 | analyser.getByteFrequencyData(dataArray); 93 | 94 | for(var i = 0; i < 50; i++) { 95 | material.uniforms.amplitude.value[i] = -(dataArray[(i + 10) * 2] / 255) + 1; 96 | }; 97 | 98 | render(); 99 | } 100 | 101 | function render(){ 102 | renderer.render( scene, camera ); 103 | } 104 | 105 | render(); 106 | 107 | function start_mic(){ 108 | if (navigator.getUserMedia) { 109 | navigator.getUserMedia({ audio: true, video: false }, function( stream ) { 110 | audioCtx = new (window.AudioContext || window.webkitAudioContext)(); 111 | analyser = audioCtx.createAnalyser(); 112 | source = audioCtx.createMediaStreamSource( stream ); 113 | 114 | source.connect(analyser); 115 | 116 | analyser.fftSize = 2048; 117 | bufferLength = analyser.frequencyBinCount; 118 | dataArray = new Uint8Array( bufferLength ); 119 | 120 | animate(); 121 | }, function(){}); 122 | } else { 123 | // fallback. 124 | } 125 | } --------------------------------------------------------------------------------