├── BROKEN MANTRA.mp3
├── BROKEN MANTRA.ogg
├── index.html
└── js
├── AudioAnalyser.js
├── GPGPU.js
├── ModifiedVREffect.js
├── VRControls.js
├── WebVR.js
├── gpgpu
├── BlurShader.js
├── CopyShader.js
├── MixShader.js
└── SimulationShader.js
├── text.js
└── three.min.js
/BROKEN MANTRA.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mrdoob/brokenmantra/528c17bd9a81acc08a6dbe4912c146402aa51d59/BROKEN MANTRA.mp3
--------------------------------------------------------------------------------
/BROKEN MANTRA.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mrdoob/brokenmantra/528c17bd9a81acc08a6dbe4912c146402aa51d59/BROKEN MANTRA.ogg
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Broken Mantra
8 |
60 |
61 |
62 |
63 |
64 |
Broken Mantra
65 |
66 |
67 |
START
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
111 |
112 |
129 |
130 |
680 |
681 |
682 |
--------------------------------------------------------------------------------
/js/AudioAnalyser.js:
--------------------------------------------------------------------------------
1 | var AudioAnalyser = function ( element ) {
2 |
3 | var AudioContext = window.AudioContext || window.webkitAudioContext;
4 | var context = new AudioContext();
5 |
6 | var analyser = context.createAnalyser();
7 | analyser.fftSize = 32;
8 | analyser.connect( context.destination );
9 |
10 | var source = context.createMediaElementSource( element );
11 | source.connect( analyser );
12 |
13 | //
14 |
15 | var frequencyData = new Uint8Array( analyser.frequencyBinCount );
16 |
17 | var debug = location.search === '?debug';
18 |
19 | if ( debug ) {
20 |
21 | var canvas = document.createElement( 'canvas' );
22 | canvas.width = 128;
23 | canvas.height = 64;
24 | canvas.style.position = 'absolute';
25 | canvas.style.top = '0';
26 | canvas.style.left = '0';
27 | document.body.appendChild( canvas );
28 |
29 | var context = canvas.getContext( '2d' );
30 |
31 | var bar = {
32 | width: canvas.width / analyser.frequencyBinCount,
33 | height: canvas.height
34 | };
35 |
36 | }
37 |
38 | return {
39 |
40 | getFrequency: function () {
41 |
42 | return frequencyData;
43 |
44 | },
45 | update: function () {
46 |
47 | analyser.getByteFrequencyData( frequencyData );
48 |
49 | if ( debug ) {
50 |
51 | context.fillStyle = 'darkblue';
52 | context.fillRect( 0, 0, canvas.width, canvas.height );
53 |
54 | context.fillStyle = 'blue';
55 |
56 | for ( var i = 0; i < frequencyData.length; i ++ ) {
57 |
58 | var frequency = frequencyData[ i ];
59 | context.fillRect( i * bar.width, ( 1 - ( frequency / 256 ) ) * bar.height, bar.width - 1, ( frequency / 256 ) * bar.height );
60 |
61 | }
62 |
63 | }
64 |
65 | }
66 |
67 | };
68 |
69 | };
70 |
--------------------------------------------------------------------------------
/js/GPGPU.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://www.mrdoob.com
3 | */
4 |
5 | var GPGPU = function ( renderer ) {
6 |
7 | var camera = new THREE.OrthographicCamera( - 0.5, 0.5, 0.5, - 0.5, 0, 1 );
8 |
9 | var scene = new THREE.Scene();
10 |
11 | var mesh = new THREE.Mesh( new THREE.PlaneBufferGeometry( 1, 1 ) );
12 | scene.add( mesh );
13 |
14 | this.render = function ( _scene, _camera, target ) {
15 |
16 | renderer.render( _scene, _camera, target, false );
17 |
18 | };
19 |
20 | this.pass = function ( shader, target ) {
21 |
22 | mesh.material = shader.material;
23 | renderer.render( scene, camera, target, false );
24 |
25 | };
26 |
27 | this.out = function ( shader ) {
28 |
29 | mesh.material = shader.material;
30 | renderer.render( scene, camera );
31 |
32 | };
33 |
34 | };
35 |
--------------------------------------------------------------------------------
/js/ModifiedVREffect.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author dmarcos / https://github.com/dmarcos
3 | * @author mrdoob / http://mrdoob.com
4 | *
5 | * WebVR Spec: http://mozvr.github.io/webvr-spec/webvr.html
6 | *
7 | * Firefox: http://mozvr.com/downloads/
8 | * Chromium: https://drive.google.com/folderview?id=0BzudLt22BqGRbW9WTHMtOWMzNjQ&usp=sharing#list
9 | *
10 | */
11 |
12 | THREE.VREffect = function ( renderer, onError ) {
13 |
14 | var vrHMD;
15 | var isDeprecatedAPI = false;
16 | var eyeTranslationL = new THREE.Vector3();
17 | var eyeTranslationR = new THREE.Vector3();
18 | var renderRectL, renderRectR;
19 | var eyeFOVL, eyeFOVR;
20 |
21 | function gotVRDevices( devices ) {
22 |
23 | for ( var i = 0; i < devices.length; i ++ ) {
24 |
25 | if ( 'VRDisplay' in window && devices[ i ] instanceof VRDisplay ) {
26 |
27 | vrHMD = devices[ i ];
28 | isDeprecatedAPI = false;
29 | break; // We keep the first we encounter
30 |
31 | } else if ( 'HMDVRDevice' in window && devices[ i ] instanceof HMDVRDevice ) {
32 |
33 | vrHMD = devices[ i ];
34 | isDeprecatedAPI = true;
35 | break; // We keep the first we encounter
36 |
37 | }
38 |
39 | }
40 |
41 | if ( vrHMD === undefined ) {
42 |
43 | if ( onError ) onError( 'HMD not available' );
44 |
45 | }
46 |
47 | }
48 |
49 | if ( navigator.getVRDisplays ) {
50 |
51 | navigator.getVRDisplays().then( gotVRDevices );
52 |
53 | } else if ( navigator.getVRDevices ) {
54 |
55 | // Deprecated API.
56 | navigator.getVRDevices().then( gotVRDevices );
57 |
58 | }
59 |
60 | //
61 |
62 | this.scale = 1;
63 |
64 | var isPresenting = false;
65 |
66 | var rendererSize = renderer.getSize();
67 | var rendererPixelRatio = renderer.getPixelRatio();
68 |
69 | this.setSize = function ( width, height ) {
70 |
71 | rendererSize = { width: width, height: height };
72 |
73 | if ( isPresenting ) {
74 |
75 | var eyeParamsL = vrHMD.getEyeParameters( 'left' );
76 | renderer.setPixelRatio( 1 );
77 |
78 | if ( isDeprecatedAPI ) {
79 |
80 | renderer.setSize( eyeParamsL.renderRect.width * 2, eyeParamsL.renderRect.height, false );
81 |
82 | } else {
83 |
84 | renderer.setSize( eyeParamsL.renderWidth * 2, eyeParamsL.renderHeight, false );
85 |
86 | }
87 |
88 |
89 | } else {
90 |
91 | renderer.setPixelRatio( rendererPixelRatio );
92 | renderer.setSize( width, height );
93 |
94 | }
95 |
96 | };
97 |
98 | // fullscreen
99 |
100 | var canvas = renderer.domElement;
101 | var requestFullscreen;
102 | var exitFullscreen;
103 | var fullscreenElement;
104 |
105 | function onFullscreenChange () {
106 |
107 | var wasPresenting = isPresenting;
108 | isPresenting = vrHMD !== undefined && ( vrHMD.isPresenting || ( isDeprecatedAPI && document[ fullscreenElement ] instanceof window.HTMLElement ) );
109 |
110 | if ( wasPresenting === isPresenting ) {
111 |
112 | return;
113 |
114 | }
115 |
116 | if ( isPresenting ) {
117 |
118 | rendererPixelRatio = renderer.getPixelRatio();
119 | rendererSize = renderer.getSize();
120 |
121 | var eyeParamsL = vrHMD.getEyeParameters( 'left' );
122 | var eyeWidth, eyeHeight;
123 |
124 | if ( isDeprecatedAPI ) {
125 |
126 | eyeWidth = eyeParamsL.renderRect.width;
127 | eyeHeight = eyeParamsL.renderRect.height;
128 |
129 | } else {
130 |
131 | eyeWidth = eyeParamsL.renderWidth;
132 | eyeHeight = eyeParamsL.renderHeight;
133 |
134 | }
135 |
136 | renderer.setPixelRatio( 1 );
137 | renderer.setSize( eyeWidth * 2, eyeHeight, false );
138 |
139 | } else {
140 |
141 | renderer.setPixelRatio( rendererPixelRatio );
142 | renderer.setSize( rendererSize.width, rendererSize.height );
143 |
144 | }
145 |
146 | }
147 |
148 | if ( canvas.requestFullscreen ) {
149 |
150 | requestFullscreen = 'requestFullscreen';
151 | fullscreenElement = 'fullscreenElement';
152 | exitFullscreen = 'exitFullscreen';
153 | document.addEventListener( 'fullscreenchange', onFullscreenChange, false );
154 |
155 | } else if ( canvas.mozRequestFullScreen ) {
156 |
157 | requestFullscreen = 'mozRequestFullScreen';
158 | fullscreenElement = 'mozFullScreenElement';
159 | exitFullscreen = 'mozCancelFullScreen';
160 | document.addEventListener( 'mozfullscreenchange', onFullscreenChange, false );
161 |
162 | } else {
163 |
164 | requestFullscreen = 'webkitRequestFullscreen';
165 | fullscreenElement = 'webkitFullscreenElement';
166 | exitFullscreen = 'webkitExitFullscreen';
167 | document.addEventListener( 'webkitfullscreenchange', onFullscreenChange, false );
168 |
169 | }
170 |
171 | window.addEventListener( 'vrdisplaypresentchange', onFullscreenChange, false );
172 |
173 | this.setFullScreen = function ( boolean ) {
174 |
175 | return new Promise( function ( resolve, reject ) {
176 |
177 | if ( vrHMD === undefined ) {
178 |
179 | reject( new Error( 'No VR hardware found.' ) );
180 | return;
181 |
182 | }
183 | if ( isPresenting === boolean ) {
184 |
185 | resolve();
186 | return;
187 |
188 | }
189 |
190 | if ( ! isDeprecatedAPI ) {
191 |
192 | if ( boolean ) {
193 |
194 | resolve( vrHMD.requestPresent( [ { source: canvas } ] ) );
195 |
196 | } else {
197 |
198 | resolve( vrHMD.exitPresent() );
199 |
200 | }
201 |
202 | } else {
203 |
204 | if ( canvas[ requestFullscreen ] ) {
205 |
206 | canvas[ boolean ? requestFullscreen : exitFullscreen ]( { vrDisplay: vrHMD } );
207 | resolve();
208 |
209 | } else {
210 |
211 | console.error( 'No compatible requestFullscreen method found.' );
212 | reject( new Error( 'No compatible requestFullscreen method found.' ) );
213 |
214 | }
215 |
216 | }
217 |
218 | } );
219 |
220 | };
221 |
222 | this.requestPresent = function () {
223 |
224 | return this.setFullScreen( true );
225 |
226 | };
227 |
228 | this.exitPresent = function () {
229 |
230 | return this.setFullScreen( false );
231 |
232 | };
233 |
234 | // render
235 |
236 | var cameraL = new THREE.PerspectiveCamera();
237 | cameraL.layers.enable( 1 );
238 |
239 | var cameraR = new THREE.PerspectiveCamera();
240 | cameraR.layers.enable( 2 );
241 |
242 | this.render = function ( scene, camera ) {
243 |
244 | if ( vrHMD && isPresenting ) {
245 |
246 | var autoUpdate = scene.autoUpdate;
247 |
248 | if ( autoUpdate ) {
249 |
250 | scene.updateMatrixWorld();
251 | scene.autoUpdate = false;
252 |
253 | }
254 |
255 | var eyeParamsL = vrHMD.getEyeParameters( 'left' );
256 | var eyeParamsR = vrHMD.getEyeParameters( 'right' );
257 |
258 | if ( ! isDeprecatedAPI ) {
259 |
260 | eyeTranslationL.fromArray( eyeParamsL.offset );
261 | eyeTranslationR.fromArray( eyeParamsR.offset );
262 | eyeFOVL = eyeParamsL.fieldOfView;
263 | eyeFOVR = eyeParamsR.fieldOfView;
264 |
265 | } else {
266 |
267 | eyeTranslationL.copy( eyeParamsL.eyeTranslation );
268 | eyeTranslationR.copy( eyeParamsR.eyeTranslation );
269 | eyeFOVL = eyeParamsL.recommendedFieldOfView;
270 | eyeFOVR = eyeParamsR.recommendedFieldOfView;
271 |
272 | }
273 |
274 | if ( Array.isArray( scene ) ) {
275 |
276 | console.warn( 'THREE.VREffect.render() no longer supports arrays. Use object.layers instead.' );
277 | scene = scene[ 0 ];
278 |
279 | }
280 |
281 | // When rendering we don't care what the recommended size is, only what the actual size
282 | // of the backbuffer is.
283 | var size = renderer.getSize();
284 | renderRectL = { x: 0, y: 0, width: size.width / 2, height: size.height };
285 | renderRectR = { x: size.width / 2, y: 0, width: size.width / 2, height: size.height };
286 |
287 | renderer.setScissorTest( true );
288 | // renderer.clear(); // NOTE Modification for Broken Mantra
289 |
290 | if ( camera.parent === null ) camera.updateMatrixWorld();
291 |
292 | cameraL.projectionMatrix = fovToProjection( eyeFOVL, true, camera.near, camera.far );
293 | cameraR.projectionMatrix = fovToProjection( eyeFOVR, true, camera.near, camera.far );
294 |
295 | camera.matrixWorld.decompose( cameraL.position, cameraL.quaternion, cameraL.scale );
296 | camera.matrixWorld.decompose( cameraR.position, cameraR.quaternion, cameraR.scale );
297 |
298 | var scale = this.scale;
299 | cameraL.translateOnAxis( eyeTranslationL, scale );
300 | cameraR.translateOnAxis( eyeTranslationR, scale );
301 |
302 |
303 | // render left eye
304 | renderer.setViewport( renderRectL.x, renderRectL.y, renderRectL.width, renderRectL.height );
305 | renderer.setScissor( renderRectL.x, renderRectL.y, renderRectL.width, renderRectL.height );
306 | renderer.render( scene, cameraL );
307 |
308 | // render right eye
309 | renderer.setViewport( renderRectR.x, renderRectR.y, renderRectR.width, renderRectR.height );
310 | renderer.setScissor( renderRectR.x, renderRectR.y, renderRectR.width, renderRectR.height );
311 | renderer.render( scene, cameraR );
312 |
313 | renderer.setScissorTest( false );
314 |
315 | if ( autoUpdate ) {
316 |
317 | scene.autoUpdate = true;
318 |
319 | }
320 |
321 | if ( ! isDeprecatedAPI ) {
322 |
323 | vrHMD.submitFrame();
324 |
325 | }
326 |
327 | return;
328 |
329 | }
330 |
331 | // Regular render mode if not HMD
332 |
333 | renderer.render( scene, camera );
334 |
335 | };
336 |
337 | //
338 |
339 | function fovToNDCScaleOffset( fov ) {
340 |
341 | var pxscale = 2.0 / ( fov.leftTan + fov.rightTan );
342 | var pxoffset = ( fov.leftTan - fov.rightTan ) * pxscale * 0.5;
343 | var pyscale = 2.0 / ( fov.upTan + fov.downTan );
344 | var pyoffset = ( fov.upTan - fov.downTan ) * pyscale * 0.5;
345 | return { scale: [ pxscale, pyscale ], offset: [ pxoffset, pyoffset ] };
346 |
347 | }
348 |
349 | function fovPortToProjection( fov, rightHanded, zNear, zFar ) {
350 |
351 | rightHanded = rightHanded === undefined ? true : rightHanded;
352 | zNear = zNear === undefined ? 0.01 : zNear;
353 | zFar = zFar === undefined ? 10000.0 : zFar;
354 |
355 | var handednessScale = rightHanded ? - 1.0 : 1.0;
356 |
357 | // start with an identity matrix
358 | var mobj = new THREE.Matrix4();
359 | var m = mobj.elements;
360 |
361 | // and with scale/offset info for normalized device coords
362 | var scaleAndOffset = fovToNDCScaleOffset( fov );
363 |
364 | // X result, map clip edges to [-w,+w]
365 | m[ 0 * 4 + 0 ] = scaleAndOffset.scale[ 0 ];
366 | m[ 0 * 4 + 1 ] = 0.0;
367 | m[ 0 * 4 + 2 ] = scaleAndOffset.offset[ 0 ] * handednessScale;
368 | m[ 0 * 4 + 3 ] = 0.0;
369 |
370 | // Y result, map clip edges to [-w,+w]
371 | // Y offset is negated because this proj matrix transforms from world coords with Y=up,
372 | // but the NDC scaling has Y=down (thanks D3D?)
373 | m[ 1 * 4 + 0 ] = 0.0;
374 | m[ 1 * 4 + 1 ] = scaleAndOffset.scale[ 1 ];
375 | m[ 1 * 4 + 2 ] = - scaleAndOffset.offset[ 1 ] * handednessScale;
376 | m[ 1 * 4 + 3 ] = 0.0;
377 |
378 | // Z result (up to the app)
379 | m[ 2 * 4 + 0 ] = 0.0;
380 | m[ 2 * 4 + 1 ] = 0.0;
381 | m[ 2 * 4 + 2 ] = zFar / ( zNear - zFar ) * - handednessScale;
382 | m[ 2 * 4 + 3 ] = ( zFar * zNear ) / ( zNear - zFar );
383 |
384 | // W result (= Z in)
385 | m[ 3 * 4 + 0 ] = 0.0;
386 | m[ 3 * 4 + 1 ] = 0.0;
387 | m[ 3 * 4 + 2 ] = handednessScale;
388 | m[ 3 * 4 + 3 ] = 0.0;
389 |
390 | mobj.transpose();
391 |
392 | return mobj;
393 |
394 | }
395 |
396 | function fovToProjection( fov, rightHanded, zNear, zFar ) {
397 |
398 | var DEG2RAD = Math.PI / 180.0;
399 |
400 | var fovPort = {
401 | upTan: Math.tan( fov.upDegrees * DEG2RAD ),
402 | downTan: Math.tan( fov.downDegrees * DEG2RAD ),
403 | leftTan: Math.tan( fov.leftDegrees * DEG2RAD ),
404 | rightTan: Math.tan( fov.rightDegrees * DEG2RAD )
405 | };
406 |
407 | return fovPortToProjection( fovPort, rightHanded, zNear, zFar );
408 |
409 | }
410 |
411 | };
412 |
--------------------------------------------------------------------------------
/js/VRControls.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author dmarcos / https://github.com/dmarcos
3 | * @author mrdoob / http://mrdoob.com
4 | */
5 |
6 | THREE.VRControls = function ( object, onError ) {
7 |
8 | var scope = this;
9 |
10 | var vrInput;
11 |
12 | var standingMatrix = new THREE.Matrix4();
13 |
14 | function gotVRDevices( devices ) {
15 |
16 | for ( var i = 0; i < devices.length; i ++ ) {
17 |
18 | if ( ( 'VRDisplay' in window && devices[ i ] instanceof VRDisplay ) ||
19 | ( 'PositionSensorVRDevice' in window && devices[ i ] instanceof PositionSensorVRDevice ) ) {
20 |
21 | vrInput = devices[ i ];
22 | break; // We keep the first we encounter
23 |
24 | }
25 |
26 | }
27 |
28 | if ( !vrInput ) {
29 |
30 | if ( onError ) onError( 'VR input not available.' );
31 |
32 | }
33 |
34 | }
35 |
36 | if ( navigator.getVRDisplays ) {
37 |
38 | navigator.getVRDisplays().then( gotVRDevices );
39 |
40 | } else if ( navigator.getVRDevices ) {
41 |
42 | // Deprecated API.
43 | navigator.getVRDevices().then( gotVRDevices );
44 |
45 | }
46 |
47 | // the Rift SDK returns the position in meters
48 | // this scale factor allows the user to define how meters
49 | // are converted to scene units.
50 |
51 | this.scale = 1;
52 |
53 | // If true will use "standing space" coordinate system where y=0 is the
54 | // floor and x=0, z=0 is the center of the room.
55 | this.standing = false;
56 |
57 | // Distance from the users eyes to the floor in meters. Used when
58 | // standing=true but the VRDisplay doesn't provide stageParameters.
59 | this.userHeight = 1.6;
60 |
61 | this.update = function () {
62 |
63 | if ( vrInput ) {
64 |
65 | if ( vrInput.getPose ) {
66 |
67 | var pose = vrInput.getPose();
68 |
69 | if ( pose.orientation !== null ) {
70 |
71 | object.quaternion.fromArray( pose.orientation );
72 |
73 | }
74 |
75 | if ( pose.position !== null ) {
76 |
77 | object.position.fromArray( pose.position );
78 |
79 | } else {
80 |
81 | object.position.set( 0, 0, 0 );
82 |
83 | }
84 |
85 | } else {
86 |
87 | // Deprecated API.
88 | var state = vrInput.getState();
89 |
90 | if ( state.orientation !== null ) {
91 |
92 | object.quaternion.copy( state.orientation );
93 |
94 | }
95 |
96 | if ( state.position !== null ) {
97 |
98 | object.position.copy( state.position );
99 |
100 | } else {
101 |
102 | object.position.set( 0, 0, 0 );
103 |
104 | }
105 |
106 | }
107 |
108 | if ( this.standing ) {
109 |
110 | if ( vrInput.stageParameters ) {
111 |
112 | object.updateMatrix();
113 |
114 | standingMatrix.fromArray(vrInput.stageParameters.sittingToStandingTransform);
115 | object.applyMatrix( standingMatrix );
116 |
117 | } else {
118 |
119 | object.position.setY( object.position.y + this.userHeight );
120 |
121 | }
122 |
123 | }
124 |
125 | object.position.multiplyScalar( scope.scale );
126 |
127 | }
128 |
129 | };
130 |
131 | this.resetPose = function () {
132 |
133 | if ( vrInput ) {
134 |
135 | if ( vrInput.resetPose !== undefined ) {
136 |
137 | vrInput.resetPose();
138 |
139 | } else if ( vrInput.resetSensor !== undefined ) {
140 |
141 | // Deprecated API.
142 | vrInput.resetSensor();
143 |
144 | } else if ( vrInput.zeroSensor !== undefined ) {
145 |
146 | // Really deprecated API.
147 | vrInput.zeroSensor();
148 |
149 | }
150 |
151 | }
152 |
153 | };
154 |
155 | this.resetSensor = function () {
156 |
157 | console.warn( 'THREE.VRControls: .resetSensor() is now .resetPose().' );
158 | this.resetPose();
159 |
160 | };
161 |
162 | this.zeroSensor = function () {
163 |
164 | console.warn( 'THREE.VRControls: .zeroSensor() is now .resetPose().' );
165 | this.resetPose();
166 |
167 | };
168 |
169 | this.dispose = function () {
170 |
171 | vrInput = null;
172 |
173 | };
174 |
175 | };
176 |
--------------------------------------------------------------------------------
/js/WebVR.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://mrdoob.com
3 | * Based on @tojiro's vr-samples-utils.js
4 | */
5 |
6 | var WEBVR = {
7 |
8 | isLatestAvailable: function () {
9 |
10 | return navigator.getVRDisplays !== undefined;
11 |
12 | },
13 |
14 | isAvailable: function () {
15 |
16 | return navigator.getVRDisplays !== undefined || navigator.getVRDevices !== undefined;
17 |
18 | },
19 |
20 | getMessage: function () {
21 |
22 | var message;
23 |
24 | if ( navigator.getVRDisplays ) {
25 |
26 | navigator.getVRDisplays().then( function ( displays ) {
27 |
28 | if ( displays.length === 0 ) message = 'WebVR supported, but no VRDisplays found.';
29 |
30 | } );
31 |
32 | } else if ( navigator.getVRDevices ) {
33 |
34 | message = 'Your browser supports WebVR but not the latest version. See webvr.info for more info.';
35 |
36 | } else {
37 |
38 | message = 'Your browser does not support WebVR. See webvr.info for assistance.';
39 |
40 | }
41 |
42 | if ( message !== undefined ) {
43 |
44 | var container = document.createElement( 'div' );
45 | container.style.position = 'absolute';
46 | container.style.left = '0';
47 | container.style.top = '0';
48 | container.style.right = '0';
49 | container.style.zIndex = '999';
50 | container.align = 'center';
51 |
52 | var error = document.createElement( 'div' );
53 | error.style.fontFamily = 'sans-serif';
54 | error.style.fontSize = '16px';
55 | error.style.fontStyle = 'normal';
56 | error.style.lineHeight = '26px';
57 | error.style.backgroundColor = '#fff';
58 | error.style.color = '#000';
59 | error.style.padding = '10px 20px';
60 | error.style.margin = '50px';
61 | error.style.display = 'inline-block';
62 | error.innerHTML = message;
63 | container.appendChild( error );
64 |
65 | return container;
66 |
67 | }
68 |
69 | },
70 |
71 | getButton: function ( effect ) {
72 |
73 | var button = document.createElement( 'button' );
74 | button.style.position = 'absolute';
75 | button.style.left = 'calc(50% - 30px)';
76 | button.style.bottom = '20px';
77 | button.style.border = '0';
78 | button.style.padding = '8px';
79 | button.style.cursor = 'pointer';
80 | button.style.backgroundColor = '#000';
81 | button.style.color = '#fff';
82 | button.style.fontFamily = 'sans-serif';
83 | button.style.fontSize = '13px';
84 | button.style.fontStyle = 'normal';
85 | button.style.zIndex = '999';
86 | button.textContent = 'ENTER VR';
87 | button.onclick = function() {
88 |
89 | effect.setFullScreen( true );
90 |
91 | };
92 |
93 | return button;
94 |
95 | }
96 |
97 | };
98 |
--------------------------------------------------------------------------------
/js/gpgpu/BlurShader.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://www.mrdoob.com
3 | */
4 |
5 | GPGPU.BlurShader = function () {
6 |
7 | var material = new THREE.ShaderMaterial( {
8 |
9 | uniforms: {
10 | texture: { type: 't', value: null },
11 | delta: { type: 'v2', value: new THREE.Vector2() }
12 | },
13 | vertexShader: [
14 |
15 | 'varying vec2 vUv;',
16 |
17 | 'void main() {',
18 | ' vUv = vec2( uv.x, uv.y );',
19 | ' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
20 | '} '
21 |
22 | ].join( '\n' ),
23 | fragmentShader: [
24 |
25 | 'varying vec2 vUv;',
26 | 'uniform sampler2D texture;',
27 | 'uniform vec2 delta;',
28 |
29 | 'void main() {',
30 | "vec4 color = vec4( 0.0 );",
31 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) - ( delta * 4.0 ) ) * 0.051;",
32 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) - ( delta * 3.0 ) ) * 0.0918;",
33 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) - ( delta * 2.0 ) ) * 0.12245;",
34 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) - delta ) * 0.1531;",
35 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) ) * 0.1633;",
36 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) + delta ) * 0.1531;",
37 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) + ( delta * 2.0 ) ) * 0.12245;",
38 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) + ( delta * 3.0 ) ) * 0.0918;",
39 | "color += texture2D( texture, vec2( vUv.x, vUv.y ) + ( delta * 4.0 ) ) * 0.051;",
40 | ' gl_FragColor = color;',
41 | '}'
42 |
43 | ].join( '\n' )
44 |
45 | } );
46 |
47 | return {
48 |
49 | material: material,
50 |
51 | setTexture: function ( texture ) {
52 |
53 | material.uniforms.texture.value = texture;
54 |
55 | return this;
56 |
57 | },
58 |
59 | setDelta: function ( x, y ) {
60 |
61 | material.uniforms.delta.value.set( x, y );
62 |
63 | return this;
64 |
65 | }
66 |
67 | }
68 |
69 | };
--------------------------------------------------------------------------------
/js/gpgpu/CopyShader.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://www.mrdoob.com
3 | */
4 |
5 | GPGPU.CopyShader = function () {
6 |
7 | var material = new THREE.ShaderMaterial( {
8 |
9 | uniforms: {
10 | texture: { type: 't', value: null }
11 | },
12 | vertexShader: [
13 |
14 | 'varying vec2 vUv;',
15 |
16 | 'void main() {',
17 | ' vUv = uv;',
18 | ' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
19 | '} '
20 |
21 | ].join( '\n' ),
22 | fragmentShader: [
23 |
24 | 'varying vec2 vUv;',
25 | 'uniform sampler2D texture;',
26 |
27 | 'void main() {',
28 | ' gl_FragColor = texture2D( texture, vUv );',
29 | '}'
30 |
31 | ].join( '\n' )
32 |
33 | } );
34 |
35 | return {
36 |
37 | material: material,
38 |
39 | setTexture: function ( texture ) {
40 |
41 | material.uniforms.texture.value = texture;
42 |
43 | return this;
44 |
45 | }
46 |
47 | }
48 |
49 | };
--------------------------------------------------------------------------------
/js/gpgpu/MixShader.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://www.mrdoob.com
3 | */
4 |
5 | GPGPU.MixShader = function () {
6 |
7 | var material = new THREE.ShaderMaterial( {
8 |
9 | uniforms: {
10 | texture1: { type: 't', value: null },
11 | texture2: { type: 't', value: null }
12 | },
13 | vertexShader: [
14 |
15 | 'varying vec2 vUv;',
16 |
17 | 'void main() {',
18 | ' vUv = vec2( uv.x, uv.y );',
19 | ' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
20 | '} '
21 |
22 | ].join( '\n' ),
23 | fragmentShader: [
24 |
25 | 'varying vec2 vUv;',
26 | 'uniform sampler2D texture1;',
27 | 'uniform sampler2D texture2;',
28 |
29 | 'void main() {',
30 | ' vec3 color1 = texture2D( texture1, vUv ).xyz;',
31 | ' vec3 color2 = texture2D( texture2, vUv ).xyz;',
32 | ' gl_FragColor = vec4( color1 + color2 * 1.5, 1.0 );',
33 | '}'
34 |
35 | ].join( '\n' )
36 |
37 | } );
38 |
39 | return {
40 |
41 | material: material,
42 |
43 | setTextures: function ( texture1, texture2 ) {
44 |
45 | material.uniforms.texture1.value = texture1;
46 | material.uniforms.texture2.value = texture2;
47 |
48 | return this;
49 |
50 | }
51 |
52 | }
53 |
54 | };
--------------------------------------------------------------------------------
/js/gpgpu/SimulationShader.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://www.mrdoob.com
3 | */
4 |
5 | GPGPU.SimulationShader = function () {
6 |
7 | var material = new THREE.ShaderMaterial( {
8 |
9 | uniforms: {
10 | tPositions: { type: "t", value: null },
11 | tOrigins: { type: "t", value: null },
12 | opacity: { type: "f", value: 0 },
13 | timer: { type: "f", value: 0 }
14 | },
15 | vertexShader: [
16 |
17 | 'varying vec2 vUv;',
18 |
19 | 'void main() {',
20 | ' vUv = vec2( uv.x, 1.0 - uv.y );',
21 | ' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
22 | '}'
23 |
24 | ].join( '\n' ),
25 | fragmentShader: [
26 |
27 | 'uniform float opacity;',
28 |
29 | 'uniform sampler2D tPositions;',
30 | 'uniform sampler2D tOrigins;',
31 |
32 | 'uniform float timer;',
33 |
34 | 'varying vec2 vUv;',
35 |
36 | /*
37 | 'float rand(vec2 co){',
38 | ' return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);',
39 | '}',
40 | */
41 |
42 | 'void main() {',
43 |
44 | ' vec4 pos = texture2D( tPositions, vUv );',
45 |
46 | ' if ( pos.w <= 0.0 ) {',
47 |
48 | ' vec4 sample = texture2D( tOrigins, vUv );',
49 | ' pos.xyz = sample.xyz;',
50 | ' pos.w = sample.w * opacity;',
51 |
52 | ' } else {',
53 |
54 | ' float x = pos.x + timer * 5.0;',
55 | ' float y = pos.y;',
56 | ' float z = pos.z + timer * 4.0;',
57 |
58 | ' pos.x += sin( y * 0.133 ) * cos( z * 0.137 ) * 0.4;',
59 | ' pos.y += sin( x * 0.135 ) * cos( x * 0.135 ) * 0.4;',
60 | ' pos.z += sin( x * 0.137 ) * cos( y * 0.133 ) * 0.4;',
61 | ' pos.w -= 0.01;',
62 |
63 | ' }',
64 |
65 | ' gl_FragColor = pos;',
66 |
67 | '}',
68 |
69 | ].join( '\n' )
70 |
71 | } );
72 |
73 | return {
74 |
75 | material: material,
76 |
77 | setPositionsTexture: function ( positions ) {
78 |
79 | material.uniforms.tPositions.value = positions;
80 |
81 | return this;
82 |
83 | },
84 |
85 | setOriginsTexture: function ( origins ) {
86 |
87 | material.uniforms.tOrigins.value = origins;
88 |
89 | return this;
90 |
91 | },
92 |
93 | setOpacity: function ( opacity ) {
94 |
95 | material.uniforms.opacity.value = opacity;
96 |
97 | return this;
98 |
99 | },
100 |
101 | setTimer: function ( timer ) {
102 |
103 | material.uniforms.timer.value = timer;
104 |
105 | return this;
106 |
107 | }
108 |
109 | }
110 |
111 | };
112 |
--------------------------------------------------------------------------------
/js/text.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author mrdoob / http://mrdoob.com
3 | */
4 |
5 | ( function () {
6 |
7 | var geometry = null;
8 |
9 | var posx = 0, offsetx = 0;
10 | var posy = 0, offsety = 0;
11 |
12 | var moveTo = function ( x, y ) {
13 |
14 | posx = x;
15 | posy = y;
16 |
17 | };
18 |
19 | var lineTo = function ( x, y ) {
20 |
21 | geometry.vertices.push( new THREE.Vector3( posx + offsetx, posy + offsety ) );
22 | geometry.vertices.push( new THREE.Vector3( x + offsetx, y + offsety ) );
23 |
24 | moveTo( x, y );
25 |
26 | };
27 |
28 | var addChar = function ( string, x, y ) {
29 |
30 | offsetx = x;
31 | offsety = y;
32 |
33 | switch ( string ) {
34 |
35 | case " ":
36 |
37 | break;
38 |
39 | case "A":
40 |
41 | moveTo( 0, 0 );
42 | lineTo( 1, 4 );
43 | lineTo( 2, 4 );
44 | lineTo( 2, 0 );
45 |
46 | break;
47 |
48 | case "B":
49 |
50 | moveTo( 0, 0 );
51 | lineTo( 0, 4 );
52 | lineTo( 2, 4 );
53 | lineTo( 1, 2 );
54 | lineTo( 2, 0 );
55 | lineTo( 0, 0 );
56 |
57 | break;
58 |
59 | case "C":
60 |
61 | moveTo( 2, 0 );
62 | lineTo( 0, 0 );
63 | lineTo( 0, 4 );
64 | lineTo( 2, 4 );
65 |
66 | break;
67 |
68 | case "D":
69 |
70 | moveTo( 0, 0 );
71 | lineTo( 0, 4 );
72 | lineTo( 1, 4 );
73 | lineTo( 2, 0 );
74 | lineTo( 0, 0 );
75 |
76 | break;
77 |
78 | case "E":
79 |
80 | moveTo( 2, 0 );
81 | lineTo( 0, 0 );
82 | lineTo( 0, 4 );
83 | lineTo( 2, 4 );
84 | moveTo( 0, 2 );
85 | lineTo( 1, 2 );
86 |
87 | break;
88 |
89 | case "F":
90 |
91 | moveTo( 0, 0 );
92 | lineTo( 0, 4 );
93 | lineTo( 2, 4 );
94 | moveTo( 0, 2 );
95 | lineTo( 1, 2 );
96 |
97 | break;
98 |
99 | case "G":
100 |
101 | moveTo( 2, 2 );
102 | lineTo( 2, 0 );
103 | lineTo( 0, 0 );
104 | lineTo( 0, 4 );
105 | lineTo( 2, 4 );
106 |
107 | break;
108 |
109 | case "H":
110 |
111 | moveTo( 0, 0 );
112 | lineTo( 0, 4 );
113 | moveTo( 2, 0 );
114 | lineTo( 2, 4 );
115 | moveTo( 0, 2 );
116 | lineTo( 2, 2 );
117 |
118 | break;
119 |
120 | case "I":
121 |
122 | moveTo( 1, 0 );
123 | lineTo( 1, 4 );
124 | moveTo( 0, 4 );
125 | lineTo( 2, 4 );
126 | moveTo( 0, 0 );
127 | lineTo( 2, 0 );
128 |
129 |
130 | break;
131 |
132 | case "J":
133 |
134 | moveTo( 0, 0 );
135 | lineTo( 2, 0 );
136 | lineTo( 2, 4 );
137 |
138 | break;
139 |
140 | case "K":
141 |
142 | moveTo( 0, 0 );
143 | lineTo( 0, 4 );
144 | moveTo( 2, 0 );
145 | lineTo( 1, 2 );
146 | lineTo( 2, 4 );
147 |
148 | break;
149 |
150 | case "L":
151 |
152 | moveTo( 2, 0 );
153 | lineTo( 0, 0 );
154 | lineTo( 0, 4 );
155 |
156 | break;
157 |
158 | case "M":
159 |
160 | moveTo( 0, 0 );
161 | lineTo( 0, 4 );
162 | lineTo( 1, 2 );
163 | lineTo( 2, 4 );
164 | lineTo( 2, 0 );
165 |
166 | break;
167 |
168 | case "N":
169 |
170 | moveTo( 0, 0 );
171 | lineTo( 0, 4 );
172 | lineTo( 2, 0 );
173 | lineTo( 2, 4 );
174 |
175 | break;
176 |
177 | case "O":
178 |
179 | moveTo( 0, 0 );
180 | lineTo( 0, 4 );
181 | lineTo( 2, 4 );
182 | lineTo( 2, 0 );
183 | lineTo( 0, 0 );
184 |
185 | break;
186 |
187 | case "P":
188 |
189 | moveTo( 0, 0 );
190 | lineTo( 0, 4 );
191 | lineTo( 2, 4 );
192 | lineTo( 1, 2 );
193 |
194 | break;
195 |
196 | case "Q":
197 |
198 | moveTo( 0, 0 );
199 | lineTo( 0, 4 );
200 | lineTo( 2, 4 );
201 | lineTo( 2, 0 );
202 | lineTo( 0, 0 );
203 | moveTo( 1, 1 );
204 | lineTo( 2, - 1 );
205 |
206 | break;
207 |
208 | case "R":
209 |
210 | moveTo( 0, 0 );
211 | lineTo( 0, 4 );
212 | lineTo( 2, 4 );
213 | lineTo( 1, 2 );
214 | lineTo( 2, 0 );
215 |
216 | break;
217 |
218 | case "S":
219 |
220 | moveTo( 0, 0 );
221 | lineTo( 2, 0 );
222 | lineTo( 0, 4 );
223 | lineTo( 2, 4 );
224 |
225 | break;
226 |
227 | case "T":
228 |
229 | moveTo( 1, 0 );
230 | lineTo( 1, 4 );
231 | moveTo( 0, 4 );
232 | lineTo( 2, 4 );
233 |
234 | break;
235 |
236 | case "U":
237 |
238 | moveTo( 0, 4 );
239 | lineTo( 0, 0 );
240 | lineTo( 2, 0 );
241 | lineTo( 2, 4 );
242 |
243 | break;
244 |
245 | case "V":
246 |
247 | moveTo( 0, 4 );
248 | lineTo( 1, 0 );
249 | lineTo( 2, 4 );
250 |
251 | break;
252 |
253 | case "W":
254 |
255 | moveTo( 0, 4 );
256 | lineTo( 0, 0 );
257 | lineTo( 1, 2 );
258 | lineTo( 2, 0 );
259 | lineTo( 2, 4 );
260 |
261 | break;
262 |
263 | case "X":
264 |
265 | moveTo( 0, 0 );
266 | lineTo( 2, 4 );
267 | moveTo( 0, 4 );
268 | lineTo( 2, 0 );
269 |
270 | break;
271 |
272 | case "Y":
273 |
274 | moveTo( 0, 4 );
275 | lineTo( 1, 2 );
276 | lineTo( 1, 0 );
277 | moveTo( 2, 4 );
278 | lineTo( 1, 2 );
279 |
280 | break;
281 |
282 | case "Z":
283 |
284 | moveTo( 0, 4 );
285 | lineTo( 2, 4 );
286 | lineTo( 0, 0 );
287 | lineTo( 2, 0 );
288 |
289 | break;
290 |
291 | case "0":
292 |
293 | moveTo( 0, 0 );
294 | lineTo( 0, 4 );
295 | lineTo( 2, 4 );
296 | lineTo( 2, 0 );
297 | lineTo( 0, 0 );
298 |
299 | break;
300 |
301 | case "1":
302 |
303 | moveTo( 1, 0 );
304 | lineTo( 1, 4 );
305 | moveTo( 0, 4 );
306 | lineTo( 1, 4 );
307 |
308 | break;
309 |
310 | case "2":
311 |
312 | moveTo( 0, 4 );
313 | lineTo( 2, 4 );
314 | lineTo( 0, 0 );
315 | lineTo( 2, 0 );
316 |
317 | break;
318 |
319 | case "6":
320 |
321 | moveTo( 0, 2 );
322 | lineTo( 2, 2 );
323 | lineTo( 2, 0 );
324 | lineTo( 0, 0 );
325 | lineTo( 0, 4 );
326 | lineTo( 2, 4 );
327 |
328 | break;
329 |
330 |
331 | case "9":
332 |
333 | moveTo( 2, 2 );
334 | lineTo( 0, 2 );
335 | lineTo( 0, 4 );
336 | lineTo( 2, 4 );
337 | lineTo( 2, 0 );
338 |
339 | break;
340 |
341 | case "/":
342 |
343 | moveTo( 0, 0 );
344 | lineTo( 2, 4 );
345 |
346 | break;
347 |
348 | default:
349 |
350 | moveTo( 0, 1 );
351 | lineTo( 2, 1 );
352 | moveTo( 0, 3 );
353 | lineTo( 2, 3 );
354 |
355 | }
356 |
357 | };
358 |
359 | Text = function ( string ) {
360 |
361 | geometry = new THREE.Geometry();
362 |
363 | var chars = string.split('');
364 |
365 | for ( var i = 0; i < chars.length; i ++ ) {
366 |
367 | addChar( chars[ i ], i * 3, 0 );
368 |
369 | }
370 |
371 | geometry.center();
372 |
373 | return geometry;
374 |
375 | };
376 |
377 | } )();
378 |
--------------------------------------------------------------------------------