├── .gitignore ├── LICENSE ├── README.md ├── dist ├── three-effects.js ├── three-effects.module.js └── three-effects.old.js ├── examples ├── WebVR.js ├── basic │ └── index.html ├── canary │ ├── index.html │ └── three-effects.js ├── defer │ └── index.html └── madrid │ ├── assets │ ├── column │ │ ├── diffuse.basis │ │ ├── diffuse.png │ │ ├── index.obj │ │ └── normals.png │ ├── ground │ │ ├── arg.basis │ │ ├── arg.png │ │ ├── diffuse.basis │ │ ├── diffuse.jpg │ │ ├── diffuse.png │ │ ├── normals.basis │ │ └── normals.png │ ├── tick.wav │ ├── venus │ │ ├── arg.basis │ │ ├── arg.png │ │ ├── diffuse.basis │ │ ├── diffuse.jpg │ │ ├── diffuse.png │ │ ├── index.obj │ │ └── normals.png │ ├── voop.wav │ ├── woosh.wav │ └── zit.wav │ ├── index.html │ └── src │ ├── ground.js │ ├── index.js │ ├── interact.js │ ├── label.js │ ├── lib │ └── Sky.js │ ├── loader.js │ ├── loader │ ├── BasisTextureLoader.js │ ├── OBJLoader.js │ ├── basis_transcoder.js │ └── basis_transcoder.wasm │ ├── pop.js │ ├── sky.js │ └── statues.js ├── index.js ├── package-lock.json ├── package.json ├── rollup.config.js └── src ├── chunk.js ├── ecs.js ├── fx.js └── lib ├── bloom └── index.js ├── colors └── index.js ├── filmgrain └── index.js ├── fxaa └── index.js ├── glitch └── index.js ├── godrays └── index.js ├── index.js └── outline └── index.js /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # TypeScript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # next.js build output 61 | .next 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Samsung Internet Dev Rel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # three-effects 2 | 3 | A minimal framework for three.js development. It eases the implemention of performant post processing and entity component systems 4 | 5 | ## Post Processing 6 | 7 | The library works by attaching listeners on the scene.onBeforeRender and onAfterRender and after that post processing happens automatically when renderer.render is called with the enchanced scene object. 8 | 9 | A single function, attachEffects, is exposed from the module that takes THREE.Scene objects as argument and returns functions/closures tied to the provided scene object. 10 | 11 | Internally, attachEffects binds the scene.onBeforeRender and .onAfterRender callback handlers to swap render targets and perform post processing transparently. 12 | 13 | The returned control function is used as the final composition shader which outputs to the screen/hmd or whatever render target was bound when renderer.render() was called. 14 | 15 | The full fragment shader needs to be passed to the function as a string argument. The scene.userData property is used as the uniforms container for the final step. 16 | 17 | Some default uniforms are provided on initialization, colorTexture and depthTexture, to give access to the color and depth textures that are generated from the base rendering. 18 | 19 | ### Multi Pass Compositing 20 | 21 | Effects that need to access surrounding pixels, like FXAA or Glitch, will need to run on a pass of their own after all the pixels of the previous chain have been resolved. 22 | 23 | To deal with this, some simple shader preprocessor logic to split the shader into multiple passes is provided using an uber shader approach with preprocessor defines. 24 | 25 | 26 | ```cpp 27 | 28 | void main(void) { 29 | 30 | #if defined FX_PASS_1 31 | //do something like compositing bloom with the base color rendering 32 | #else 33 | // Final pass which needs to check nearby fully resolved pixels like Antialiasing 34 | #endif 35 | 36 | } 37 | 38 | ``` 39 | 40 | Defines like FX_PASS_N will be detected resulting in the generation of several shaders/passes. The colorTexture uniform will always be the result of the previous pass. 41 | 42 | ### Effect Plugins 43 | 44 | The Scene.onBeforeRender and onAfterRender callbacks are utilized to dispatch several events during rendering to handle multiple recipients(the effect modules). 45 | 46 | Modules attach listeners for the event afterRender or afterPass, to perform their operations like generating textures and setting up uniforms for the final step. 47 | 48 | All communication with the final step is handled via uniforms on the Scene.userData property. Effect modules can thus be entirely independent from the core mechanism. 49 | 50 | Still a convention is encouraged where modules are defined a functions that get the scene object as argument, and attach listeners to events on the scene. 51 | 52 | These functions should return a control function that when run with no argument, remove the event listeners from the scene object and perform any cleanup needed. 53 | 54 | Passing arguments on the control functions can be used for effect state updates. This convention is useful for wrapping the functionality, eg as aframe components. 55 | 56 | ```js 57 | 58 | import { attachEffects } from "three-effects"; 59 | 60 | var effectModule = function(scene) { 61 | 62 | var textureUniform = { value: ... }; 63 | 64 | // Setup the uniforms to communicate with the final composition step 65 | scene.userData["effect_texture"] = textureUniform; 66 | 67 | function generateTexturesOnAfterRender (ev) { 68 | 69 | /* ev === { 70 | type: "afterRender" || "beforeRender" || "afterPass" || "beforePass" || "afterEffects", 71 | renderer, 72 | scene, 73 | camera, 74 | renderTarget, // This is the final renderTarget, if null it means we output to screen 75 | realTarget // This is the renderTarget that contains the base scene rendering 76 | } */ 77 | 78 | textureUniform.value = someGeneratedTexture; 79 | } 80 | 81 | function generateTexturesOnAfterPass (ev) { 82 | 83 | // afterPass may be dispatched multiple times. You can set when to actually perform the work based on event.passId which will be "main" during the last pass. 84 | if(ev.passId !== "FX_PASS_N") return; 85 | 86 | textureUniform.value = someGeneratedTexture; 87 | 88 | } 89 | 90 | // Attach generateTextures on afterRender event to run it every frame after the scene is rendered(but before the final compositing step passes) 91 | scene.addEventListener("afterRender", generateTexturesAfterRender); 92 | 93 | // Alternatively listen on beforePass/afterPass to tap anywhere in the final compositing pipeline. You'll need to check the event.passId property. 94 | scene.addEventListener("afterPass", generateTexturesAfterPass); 95 | 96 | // afterEffects is emmited once all the passes are completed and the original renderTarget set and drawn. Texts/hud should be drawn here directly. 97 | scene.addEventListener("afterEffects", generateTexturesAfterRender); 98 | 99 | // Return an control function to control the effect instance and perform cleanup if/when needed 100 | return function (args) { 101 | if(!args) { 102 | delete scene.userData["effect_texture"]; 103 | scene.removeEventListener("afterRender", generateTextures); 104 | } else { 105 | // use args to update the effect instance state 106 | } 107 | } 108 | } 109 | 110 | // Attach effects core on the scene object and get control function 111 | var fx = attachEffects(scene); 112 | 113 | 114 | // Attach an effect instance on the scene object. Keep reference of the instance control function 115 | var controlFunction = effectModule(scene); 116 | 117 | 118 | // Set final shader through the core control function, a null argument will disable post proc 119 | fx(` 120 | uniform sampler2D effect_texture; 121 | 122 | void main(void) { 123 | 124 | // Fetch the base render, vUv is provided automatically and contains the coordinates 125 | // colorTexture and depthTexture sampler2Ds are also provided to get the base render 126 | 127 | vec4 base_color = texture2D(colorTexture, vUv); 128 | 129 | // Additively blend the effect generated texture with the base one, it could be a bloom effect 130 | 131 | vec4 effect_color = texture2D(effect_texture, vUv); 132 | 133 | gl_FragColor = base_color + effect_color; 134 | } 135 | `); 136 | 137 | 138 | // We don't need to do anything special with the renderer. Just render the enhanced scene and the output will be post processed 139 | renderer.render(scene, camera); 140 | 141 | // Passing null will disable post processing for the associated scene 142 | fx(null); 143 | 144 | // This is just a convention but having effects return a control function to configure/cleanup effects is recommended 145 | controlFunction(null); 146 | 147 | ``` 148 | ## Builtin Effects 149 | 150 | The library exposes some builtin effects throught an "attach" map of functions using the format described above. Currently bloom, filmgrain, fxaa, glitch are provided 151 | 152 | ```js 153 | 154 | import {attachEffects, attach} from "three-effects" 155 | 156 | var fx = attachEffects(scene); 157 | 158 | var bloomControl = attach.bloom(scene); 159 | 160 | bloomControl({ strength: 0.5, radius: 1, threshold: 0.5 }); 161 | 162 | ``` 163 | 164 | ## Entity Component Systems 165 | 166 | TODO 167 | -------------------------------------------------------------------------------- /dist/three-effects.module.js: -------------------------------------------------------------------------------- 1 | import { WebGLRenderTarget, ShaderMaterial, Vector2, Scene, OrthographicCamera, Mesh, PlaneBufferGeometry, ShaderChunk, Math as Math$1, DataTexture, RGBFormat, FloatType, WebGLMultisampleRenderTarget, DepthTexture, DepthStencilFormat, UnsignedInt248Type } from 'three'; 2 | import * as THREE from 'three'; 3 | export { THREE }; 4 | 5 | ShaderChunk["bloom_pars"] = ` 6 | uniform sampler2D bloom_texture; 7 | 8 | void bloom_apply(inout vec4 fragColor, in vec2 uv) { 9 | fragColor.rgb += texture2D(bloom_texture, uv).rgb; 10 | } 11 | `; 12 | 13 | function index (scene, config) { 14 | 15 | config = config || {}; 16 | 17 | var inp = new WebGLRenderTarget(1,1); 18 | var ping = [ new WebGLRenderTarget(1,1), new WebGLRenderTarget(1,1), new WebGLRenderTarget(1,1) ]; 19 | var pong = [ new WebGLRenderTarget(1,1), new WebGLRenderTarget(1,1), new WebGLRenderTarget(1,1) ]; 20 | 21 | var passId = config.before || "main"; 22 | 23 | function getPass(src, uniforms) { 24 | return new ShaderMaterial({ 25 | uniforms: uniforms, 26 | vertexShader: ` 27 | varying vec2 vUv; 28 | 29 | void main(void) { 30 | vUv = uv; 31 | gl_Position = vec4(position.xy, 0., 1.); 32 | } 33 | `, 34 | fragmentShader: "varying vec2 vUv;\n" + src, 35 | depthWrite: false, 36 | depthTest: false 37 | }); 38 | } 39 | 40 | var controlUniforms = {}; 41 | 42 | var preUniforms = config.inputUniforms || { 43 | colorTexture: { value: null }, 44 | depthTexture: { value: null }, 45 | threshold: { value: config.threshold || 0.9 }, 46 | smooth: { value: config.smooth || 0.01 } 47 | }; 48 | 49 | var prePass = getPass(config.inputShader || ` 50 | uniform sampler2D colorTexture; 51 | uniform sampler2D depthTexture; 52 | uniform float threshold; 53 | 54 | void main(void) { 55 | vec4 texel = texture2D( colorTexture, vUv ); 56 | vec3 luma = vec3( 0.299, 0.587, 0.114 ); 57 | float v = dot( texel.xyz, luma ); 58 | vec4 outputColor = vec4( 0., 0., 0., 1. ); 59 | float alpha = smoothstep( threshold, threshold + 0.01, v ); 60 | 61 | gl_FragColor = mix( outputColor, texel, alpha ); 62 | } 63 | `, preUniforms); 64 | 65 | var blurUniforms = { 66 | colorTexture: { value: null }, 67 | direction: { value: new Vector2(1, 0) }, 68 | resolution: { value: new Vector2(1, 1) } 69 | }; 70 | 71 | var blurPasses = [ 72 | getPass(` 73 | #include 74 | #include 75 | 76 | uniform sampler2D colorTexture; 77 | uniform vec2 direction; 78 | uniform vec2 resolution; 79 | 80 | void main(void) { 81 | gl_FragColor = blur5(colorTexture, vUv, direction, resolution); 82 | } 83 | `, blurUniforms), 84 | getPass(` 85 | #include 86 | #include 87 | 88 | uniform sampler2D colorTexture; 89 | uniform vec2 direction; 90 | uniform vec2 resolution; 91 | 92 | void main(void) { 93 | gl_FragColor = blur9(colorTexture, vUv, direction, resolution); 94 | } 95 | `, blurUniforms), 96 | getPass(` 97 | #include 98 | #include 99 | 100 | uniform sampler2D colorTexture; 101 | uniform vec2 direction; 102 | uniform vec2 resolution; 103 | 104 | void main(void) { 105 | gl_FragColor = blur13(colorTexture, vUv, direction, resolution); 106 | } 107 | `, blurUniforms), 108 | ]; 109 | 110 | var postUniforms = { 111 | strength: { value: 0.5 }, 112 | radius: { value: 1 }, 113 | blurTexture1: { value: pong[0].texture }, 114 | blurTexture2: { value: pong[1].texture }, 115 | blurTexture3: { value: pong[2].texture }, 116 | colorTexture: { value: null } 117 | }; 118 | 119 | controlUniforms.strength = scene.userData.bloom_strength = postUniforms.strength; 120 | controlUniforms.radius = scene.userData.bloom_radius = postUniforms.radius; 121 | if (preUniforms.threshold) controlUniforms.threshold = scene.userData.bloom_threshold = preUniforms.threshold; 122 | scene.userData.bloom_texture = { value: ping[0].texture }; 123 | 124 | var postPass = getPass(` 125 | uniform sampler2D blurTexture1; 126 | uniform sampler2D blurTexture2; 127 | uniform sampler2D blurTexture3; 128 | uniform float strength; 129 | uniform float radius; 130 | 131 | float lerpBloomFactor(const in float factor, const in float mirrorFactor) { 132 | return mix(factor, mirrorFactor, radius); 133 | } 134 | 135 | void main() { 136 | gl_FragColor = strength * ( lerpBloomFactor(1., 0.2) * texture2D(blurTexture1, vUv) + \ 137 | lerpBloomFactor(0.2, 0.8) * texture2D(blurTexture2, vUv) + \ 138 | lerpBloomFactor(0.2, 1.) * texture2D(blurTexture3, vUv) );\ 139 | } 140 | `, postUniforms); 141 | 142 | scene.userData.bloom_internal = {prePass, blurPasses, postPass}; 143 | 144 | var _scene = new Scene(); 145 | var _ortho = new OrthographicCamera(1,1,1,1,1,10); 146 | var _quad = new Mesh(new PlaneBufferGeometry(2,2), null); 147 | _quad.frustumCulled = false; 148 | _scene.add(_quad); 149 | 150 | function performPass(renderer, m, inputTarget, outputTarget) { 151 | _quad.material = m; 152 | if (m.uniforms.colorTexture) 153 | m.uniforms.colorTexture.value = inputTarget ? inputTarget.texture : null; 154 | if (m.uniforms.depthTexture) 155 | m.uniforms.depthTexture.value = inputTarget ? inputTarget.depthTexture: null; 156 | if (m.uniforms.resolution) 157 | m.uniforms.resolution.value.set(outputTarget.width, outputTarget.height); 158 | renderer.setRenderTarget(outputTarget); 159 | renderer.render(_scene, _ortho); 160 | } 161 | 162 | var fn = function (e) { 163 | if(passId !== e.passId) return; 164 | 165 | blurUniforms.VR = { value: 0 }; 166 | 167 | performPass(e.renderer, prePass, e.renderTarget, inp); 168 | 169 | blurUniforms.VR.value = e.scene.userData.VR.value * 0.25; 170 | 171 | for(var i=0; i< 3; i++) { 172 | blurUniforms.direction.value.set(0, 1); 173 | performPass(e.renderer, blurPasses[i], i ? pong[i - 1] : inp, ping[i]); 174 | 175 | blurUniforms.direction.value.set(1, 0); 176 | performPass(e.renderer, blurPasses[i], ping[i], pong[i]); 177 | blurUniforms.VR.value *= 0.5; 178 | } 179 | 180 | performPass(e.renderer, postPass, false, ping[0]); 181 | }; 182 | 183 | scene.addEventListener("beforePass", fn); 184 | 185 | var fr = function (e) { 186 | var w = e.size.x * 0.5, h = e.size.y * 0.5; 187 | inp.setSize(w, h); 188 | for(var i=0; i< 3; i++) { 189 | w = Math.floor(w * 0.5); 190 | h = Math.floor(h * 0.5); 191 | ping[i].setSize(w, h); 192 | pong[i].setSize(w, h); 193 | } 194 | }; 195 | 196 | scene.addEventListener("resizeEffects", fr); 197 | 198 | return function (arg) { 199 | if ( arg ) { 200 | if(arg.before) passId = arg.before; 201 | for ( var k in arg) { 202 | if (controlUniforms[k]) { 203 | controlUniforms[k].value = arg[k]; 204 | } 205 | } 206 | } else { 207 | scene.removeEventListener("beforePass", fn); 208 | scene.removeEventListener("resizeEffects", fr); 209 | 210 | inp.dispose(); 211 | for(var i = 0; i < 3; i++) { 212 | ping[i].dispose(); 213 | pong[i].dispose(); 214 | blurPasses[i].dispose(); 215 | } 216 | 217 | prePass.dispose(); 218 | postPass.dispose(); 219 | 220 | delete scene.userData.bloom_internal; 221 | delete scene.userData.bloom_strength; 222 | delete scene.userData.bloom_radius; 223 | delete scene.userData.bloom_threshold; 224 | delete scene.userData.bloom_texture; 225 | } 226 | } 227 | } 228 | 229 | ShaderChunk["fxaa_pars"] = ` 230 | #define FXAA_REDUCE_MIN (1.0/ 128.0) 231 | #define FXAA_REDUCE_MUL (1.0 / 8.0) 232 | #define FXAA_SPAN_MAX 8.0 233 | 234 | void fxaa_apply(inout vec4 color, in vec2 uv) 235 | { 236 | vec2 inverseVP = vec2(1.0 / resolution.x, 1.0 / resolution.y); 237 | vec3 rgbNW = texture2D(colorTexture, uv + vec2(-1.0, -1.0) * inverseVP).xyz; 238 | vec3 rgbNE = texture2D(colorTexture, uv + vec2(1.0, -1.0) * inverseVP).xyz; 239 | vec3 rgbSW = texture2D(colorTexture, uv + vec2(-1.0, 1.0) * inverseVP).xyz; 240 | vec3 rgbSE = texture2D(colorTexture, uv + vec2(1.0, 1.0) * inverseVP).xyz; 241 | vec3 rgbM = color.rgb; 242 | vec3 luma = vec3(0.299, 0.587, 0.114); 243 | float lumaNW = dot(rgbNW, luma); 244 | float lumaNE = dot(rgbNE, luma); 245 | float lumaSW = dot(rgbSW, luma); 246 | float lumaSE = dot(rgbSE, luma); 247 | float lumaM = dot(rgbM, luma); 248 | float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE))); 249 | float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE))); 250 | 251 | vec2 dir; 252 | dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE)); 253 | dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE)); 254 | 255 | float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) * 256 | (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN); 257 | 258 | float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce); 259 | dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX), 260 | max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX), 261 | dir * rcpDirMin)) * inverseVP; 262 | 263 | vec3 rgbA = 0.5 * ( 264 | texture2D(colorTexture, uv + dir * (1.0 / 3.0 - 0.5)).xyz + 265 | texture2D(colorTexture, uv + dir * (2.0 / 3.0 - 0.5)).xyz); 266 | vec3 rgbB = rgbA * 0.5 + 0.25 * ( 267 | texture2D(colorTexture, uv + dir * -0.5).xyz + 268 | texture2D(colorTexture, uv + dir * 0.5).xyz); 269 | 270 | float lumaB = dot(rgbB, luma); 271 | if ((lumaB < lumaMin) || (lumaB > lumaMax)) 272 | color.rgb = rgbA; 273 | else 274 | color.rgb = rgbB; 275 | } 276 | 277 | `; 278 | // FXAA doesn't do any texture generation or need uniforms but we stay consistent with the other effects 279 | function index$1(){ 280 | return function () {} 281 | } 282 | 283 | ShaderChunk["filmgrain_pars"] = ` 284 | uniform float filmgrain_time; 285 | uniform float filmgrain_sCount; 286 | uniform float filmgrain_sIntensity; 287 | uniform float filmgrain_nIntensity; 288 | 289 | void filmgrain_apply(inout vec4 color, in vec2 uv) { 290 | vec4 cTextureScreen = color; 291 | float dx = rand( uv + mod(filmgrain_time, 3.14) ); 292 | vec3 cResult = cTextureScreen.rgb + cTextureScreen.rgb * clamp( 0.1 + dx, 0.0, 1.0 ); 293 | vec2 sc = vec2( sin( uv.y * filmgrain_sCount ), cos( uv.y * filmgrain_sCount ) ); 294 | cResult += cTextureScreen.rgb * vec3( sc.x, sc.y, sc.x ) * filmgrain_sIntensity; 295 | cResult = cTextureScreen.rgb + clamp( filmgrain_nIntensity, 0.0,1.0 ) * ( cResult - cTextureScreen.rgb ); 296 | color.rgb = cResult; 297 | } 298 | `; 299 | 300 | function index$2 (scene, config) { 301 | 302 | var controlUniforms = { 303 | "time": { type: "f", value: 0.0 }, 304 | "nIntensity": { type: "f", value: 0.3 }, 305 | "sIntensity": { type: "f", value: 0.03 }, 306 | "sCount": { type: "f", value: 4096 } 307 | }; 308 | 309 | function handleConf(conf) { 310 | for(var k in conf) { 311 | if(k in controlUniforms){ 312 | controlUniforms[k].value = conf[k]; 313 | } 314 | } 315 | } 316 | 317 | if(config) handleConf(config); 318 | 319 | scene.userData["filmgrain_time"] = controlUniforms["time"]; 320 | scene.userData["filmgrain_sCount"] = controlUniforms["sCount"]; 321 | scene.userData["filmgrain_sIntensity"] = controlUniforms["sIntensity"]; 322 | scene.userData["filmgrain_nIntensity"] = controlUniforms["nIntensity"]; 323 | 324 | return function (arg) { 325 | if(arg) { 326 | handleConf(arg); 327 | return; 328 | } 329 | delete scene.userData["filmgrain_time"]; 330 | delete scene.userData["filmgrain_sCount"]; 331 | delete scene.userData["filmgrain_sIntensity"]; 332 | delete scene.userData["filmgrain_nIntensity"]; 333 | } 334 | 335 | } 336 | 337 | ShaderChunk["glitch_pars"] = ` 338 | uniform sampler2D glitch_tDisp; 339 | uniform float glitch_amount; 340 | uniform float glitch_snow; 341 | uniform float glitch_angle; 342 | uniform float glitch_seed; 343 | uniform float glitch_seed_x; 344 | uniform float glitch_seed_y; 345 | uniform float glitch_distortion_x; 346 | uniform float glitch_distortion_y; 347 | uniform float glitch_col_s; 348 | uniform float glitch_intensity; 349 | 350 | float glitch_rand(vec2 co){ 351 | return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453); 352 | } 353 | 354 | void glitch_apply(inout vec4 fragColor, vec2 uv) { 355 | vec2 p = uv; 356 | vec2 p2 = p; 357 | float xs = floor(gl_FragCoord.x / 0.5); 358 | float ys = floor(gl_FragCoord.y / 0.5); 359 | 360 | //based on staffantans glitch shader for unity https://github.com/staffantan/unityglitch 361 | vec4 normal = texture2D(glitch_tDisp, p2 * glitch_seed * glitch_seed); 362 | if(p2.y < glitch_distortion_x + glitch_col_s && p2.y > glitch_distortion_x - glitch_col_s * glitch_seed) { 363 | if(glitch_seed_x>0.){ 364 | p.y = 1. - (p.y + glitch_distortion_y); 365 | } 366 | else { 367 | p.y = glitch_distortion_y; 368 | } 369 | } 370 | if(p2.x < glitch_distortion_y + glitch_col_s && p2.x > glitch_distortion_y - glitch_col_s * glitch_seed) { 371 | if( glitch_seed_y > 0.){ 372 | p.x = glitch_distortion_x; 373 | } 374 | else { 375 | p.x = 1. - (p.x + glitch_distortion_x); 376 | } 377 | } 378 | p.x+=normal.x* glitch_seed_x * (glitch_seed/5.); 379 | p.y+=normal.y* glitch_seed_y * (glitch_seed/5.); 380 | 381 | //base from RGB shift shader 382 | vec2 offset = glitch_amount * vec2( cos(glitch_angle), sin(glitch_angle)); 383 | vec4 cr = texture2D(colorTexture, p + offset); 384 | vec4 cga = texture2D(colorTexture, p); 385 | vec4 cb = texture2D(colorTexture, p - offset); 386 | vec4 color = vec4(cr.r, cga.g, cb.b, cga.a); 387 | 388 | //add noise 389 | vec4 snow = 200.*glitch_amount*vec4(glitch_rand(vec2(xs * glitch_seed,ys * glitch_seed*50.))*0.2); 390 | color = color + glitch_snow * snow; 391 | 392 | fragColor = mix(fragColor, color, glitch_intensity); 393 | } 394 | `; 395 | 396 | function index$3 (scene, config) { 397 | var curF = 0; 398 | var randX = 0; 399 | 400 | var generateTrigger = function() { 401 | 402 | randX = Math$1.randInt( 120, 240 ); 403 | 404 | }; 405 | 406 | var generateHeightmap = function( dt_size ) { 407 | 408 | var data_arr = new Float32Array( dt_size * dt_size * 3 ); 409 | var length = dt_size * dt_size; 410 | 411 | for ( var i = 0; i < length; i ++ ) { 412 | 413 | var val = Math$1.randFloat( 0, 1 ); 414 | data_arr[ i * 3 + 0 ] = val; 415 | data_arr[ i * 3 + 1 ] = val; 416 | data_arr[ i * 3 + 2 ] = val; 417 | 418 | } 419 | 420 | var texture = new DataTexture( data_arr, dt_size, dt_size, RGBFormat, FloatType ); 421 | texture.needsUpdate = true; 422 | return texture; 423 | 424 | }; 425 | 426 | var controlUniforms = { 427 | "tDisp": { type: "t", value: generateHeightmap( 64 ) }, 428 | "amount": { type: "f", value: 0.08 }, 429 | "snow": { type: "f", value: 0.5 }, 430 | "angle": { type: "f", value: 0.02 }, 431 | "seed": { type: "f", value: 0.02 }, 432 | "seed_x": { type: "f", value: 0.02 },//-1,1 433 | "seed_y": { type: "f", value: 0.02 },//-1,1 434 | "distortion_x": { type: "f", value: 0.5 }, 435 | "distortion_y": { type: "f", value: 0.6 }, 436 | "col_s": { type: "f", value: 0.05 }, 437 | "intensity": { type: "f", value: 0.33 } 438 | }; 439 | 440 | for(var k in controlUniforms) { 441 | scene.userData["glitch_" + k] = controlUniforms[k]; 442 | } 443 | 444 | scene.addEventListener("beforeRender", function () { 445 | controlUniforms[ 'seed' ].value = Math.random();//default seeding 446 | if ( curF % randX == 0) { 447 | controlUniforms[ 'amount' ].value = Math.random() / 30; 448 | controlUniforms[ 'angle' ].value = Math$1.randFloat( - Math.PI, Math.PI ); 449 | controlUniforms[ 'seed_x' ].value = Math$1.randFloat( - 1, 1 ); 450 | controlUniforms[ 'seed_y' ].value = Math$1.randFloat( - 1, 1 ); 451 | controlUniforms[ 'distortion_x' ].value = Math$1.randFloat( 0, 1 ); 452 | controlUniforms[ 'distortion_y' ].value = Math$1.randFloat( 0, 1 ); 453 | curF = 0; 454 | generateTrigger(); 455 | } else if ( curF % randX < randX / 5 ) { 456 | controlUniforms[ 'amount' ].value = Math.random() / 90; 457 | controlUniforms[ 'angle' ].value = Math$1.randFloat( - Math.PI, Math.PI ); 458 | controlUniforms[ 'distortion_x' ].value = Math$1.randFloat( 0, 1 ); 459 | controlUniforms[ 'distortion_y' ].value = Math$1.randFloat( 0, 1 ); 460 | controlUniforms[ 'seed_x' ].value = Math$1.randFloat( - 0.3, 0.3 ); 461 | controlUniforms[ 'seed_y' ].value = Math$1.randFloat( - 0.3, 0.3 ); 462 | } 463 | curF++; 464 | }); 465 | 466 | var fn = function (arg) { 467 | if(arg) { 468 | 469 | for(var k in controlUniforms) { 470 | if(arg[k] !== undefined && k in controlUniforms) controlUniforms[k].value = arg[k]; 471 | } 472 | 473 | curF = 0; 474 | generateTrigger(); 475 | } else { 476 | for(k in controlUniforms) { 477 | delete scene.userData["glitch_" + k]; 478 | } 479 | } 480 | }; 481 | 482 | fn(config); 483 | 484 | return fn; 485 | } 486 | 487 | 488 | 489 | var index$4 = /*#__PURE__*/Object.freeze({ 490 | __proto__: null, 491 | bloom: index, 492 | fxaa: index$1, 493 | filmgrain: index$2, 494 | glitch: index$3 495 | }); 496 | 497 | /* 498 | * Copyright (c) 2016-2018, Yannis Gravezas 499 | * Copyright (c) 2019 Samsung Internet 500 | * Available under the MIT license. 501 | */ 502 | 503 | function fx (scene, antialias) { 504 | var renderTargets = [new WebGLRenderTarget(1, 1), new WebGLRenderTarget(1, 1)]; 505 | var multiTarget = new WebGLMultisampleRenderTarget(1, 1); 506 | multiTarget.samples = antialias === true ? 4 : antialias; 507 | var depthTexture = new DepthTexture(); 508 | depthTexture.format = DepthStencilFormat; 509 | depthTexture.type = UnsignedInt248Type; 510 | 511 | renderTargets[0].depthTexture = multiTarget.depthTexture = depthTexture; 512 | 513 | scene.userData.VR = { value: 0 }; 514 | scene.userData.colorTexture = { value: null }; 515 | scene.userData.depthTexture = { value: depthTexture }; 516 | 517 | var passes = []; 518 | 519 | var realTarget; 520 | 521 | var _scene = new Scene(); 522 | var _ortho = new OrthographicCamera(1,1,1,1,1,10); 523 | var _quad = new Mesh(new PlaneBufferGeometry(2,2), null); 524 | _quad.frustumCulled = false; 525 | _scene.add(_quad); 526 | 527 | var vsize = new Vector2(); 528 | 529 | scene.userData.resolution = { value: vsize }; 530 | 531 | var event = { type: "beforeRender", scene: null, renderer: null, camera: null, size: vsize, time: 0 }; 532 | 533 | function dispatch(type) { 534 | event.type = type; 535 | scene.dispatchEvent(event); 536 | } 537 | 538 | scene.onBeforeRender = function (renderer, scene, camera, renderTarget) { 539 | 540 | event.time = window.performance.now(); 541 | 542 | 543 | if (renderTarget) { 544 | vsize.set(renderTarget.width, renderTarget.height); 545 | } else { 546 | renderer.getDrawingBufferSize(vsize); 547 | } 548 | 549 | if (!passes.length) { 550 | dispatch("beforeRender"); 551 | return; 552 | } 553 | 554 | if(vsize.x !== renderTargets[0].width || vsize.y !== renderTargets[0].height) { 555 | renderTargets[0].setSize(vsize.x, vsize.y); 556 | renderTargets[1].setSize(vsize.x, vsize.y); 557 | multiTarget.setSize(vsize.x, vsize.y); 558 | dispatch("resizeEffects"); 559 | } 560 | 561 | scene.userData.VR.value = renderer.vr.isPresenting() ? vsize.x * 0.5 : 0; 562 | 563 | event.renderer = renderer; 564 | event.scene = scene; 565 | event.camera = camera; 566 | realTarget = event.outputTarget = renderTarget; 567 | event.renderTarget = renderTargets[0]; 568 | 569 | 570 | dispatch("beforeRender"); 571 | 572 | renderer.setRenderTarget(antialias && renderer.capabilities.isWebGL2 ? multiTarget : renderTargets[0]); 573 | 574 | }; 575 | 576 | scene.onAfterRender = function (renderer, scene, camera) { 577 | if (!passes.length) return; 578 | 579 | var vrEnabled = renderer.vr.enabled; 580 | renderer.vr.enabled = false; 581 | 582 | var u = scene.userData; 583 | event.renderTarget = antialias && renderer.capabilities.isWebGL2 ? multiTarget : renderTargets[0]; 584 | u.colorTexture.value = event.renderTarget.texture; 585 | 586 | dispatch("afterRender"); 587 | 588 | passes.forEach(function (p, i) { 589 | event.passId = p.passId; 590 | dispatch("beforePass"); 591 | 592 | var rt = (i == (passes.length - 1)) ? realTarget : renderTargets[(i + 1) & 1]; 593 | 594 | _quad.material = p; 595 | renderer.setRenderTarget(rt); 596 | //renderer.setViewport(0, 0, vsize.x, vsize.y); 597 | renderer.render(_scene, _ortho); 598 | 599 | u.colorTexture.value = rt ? rt.texture : null; 600 | event.renderTarget = rt; 601 | dispatch("afterPass"); 602 | }); 603 | 604 | delete event.passId; 605 | dispatch("afterEffects"); 606 | 607 | renderer.vr.enabled = vrEnabled; 608 | }; 609 | 610 | var fxPattern = /FX_PASS_[0-9]+/gm; 611 | var symPattern = /^\w+$/; 612 | var uPattern = /^\s*uniform\s+/; 613 | 614 | function parsePasses( src ) { 615 | var arr = src.match(fxPattern); 616 | if(!arr) return ["main"]; 617 | var set = new Set(arr); 618 | arr = [...set]; 619 | arr.sort(function(a, b) { 620 | return a.localeCompare(b); 621 | }); 622 | arr.push("main"); 623 | return arr; 624 | } 625 | 626 | return function ( src ) { 627 | passes.forEach(function(m){ m.dispose(); }); 628 | passes = []; 629 | 630 | if(!src) return; 631 | 632 | if (Array.isArray(src)) { 633 | var head = []; 634 | var body = []; 635 | var bc = 0, c = 0; 636 | 637 | src.forEach(function (s, i) { 638 | if(i && s[0] === "!") bc++; 639 | }); 640 | 641 | if(bc) body.push(`#if defined FX_PASS_${c}`); 642 | 643 | src.forEach(function (s, i) { 644 | 645 | if(bc && i && s[0] === "!") { 646 | body.push(c < bc - 1 ? `#elif defined FX_PASS_${++c}` : "#else"); 647 | } 648 | 649 | s = s.replace("!", "").trim(); 650 | 651 | if(!s) return; 652 | 653 | if(s[0] === "#") { 654 | head.push(`#include <${s.replace("#", "")}>`); 655 | } else if(s.match(symPattern)) { 656 | head.push(`#include <${s}_pars>`); 657 | body.push(`\t${s}_apply(color, uv);`); 658 | } else if(s.match(uPattern)){ 659 | head.push(s); 660 | } else { 661 | body.push(s); 662 | } 663 | 664 | }); 665 | 666 | //body.push("fragColor.a = 1.0;") 667 | if(bc) body.push("#endif"); 668 | 669 | src = [ 670 | head.join("\n"), 671 | "", 672 | "void main(void){", 673 | "\tvec2 uv = vUv;", 674 | "\tvec4 color = texture2D(colorTexture, uv);", 675 | body.join("\n"), 676 | "\tgl_FragColor = color;", 677 | "}" 678 | ].join("\n"); 679 | } 680 | 681 | var def = parsePasses(src); 682 | 683 | src = [ 684 | "#include ", 685 | "uniform sampler2D colorTexture;", 686 | "uniform sampler2D depthTexture;", 687 | "uniform vec2 resolution;", 688 | "varying vec2 vUv;", 689 | src 690 | ].join("\n"); 691 | 692 | def.forEach(function (d){ 693 | var defines = {}; 694 | if(d !== "main") defines[d] = 1; 695 | var m = new ShaderMaterial({ 696 | defines: defines, 697 | uniforms: scene.userData, 698 | vertexShader: ` 699 | varying vec2 vUv; 700 | 701 | void main(void) { 702 | vUv = uv; 703 | gl_Position = vec4(position.xy, 0., 1.); 704 | } 705 | `, 706 | fragmentShader: src, 707 | depthWrite: false, 708 | depthTest: false, 709 | extensions: { 710 | derivatives: true, 711 | shaderTextureLOD: true 712 | }, 713 | fog: false, 714 | lights: false 715 | }); 716 | m.passId = d; 717 | passes.push(m); 718 | }); 719 | } 720 | } 721 | 722 | /* 723 | * Copyright (c) 2016-2018, Yannis Gravezas 724 | * Copyright (c) 2019 Samsung Internet 725 | * Available under the MIT license. 726 | */ 727 | 728 | function ecs (obj, name, api) { 729 | 730 | var objects = []; 731 | 732 | var listeners = {}; 733 | 734 | function addListener(lname, fn) { 735 | listeners[lname] = fn; 736 | obj.addEventListener(lname, listeners[lname]); 737 | } 738 | 739 | addListener(name + "/register", function(e) { 740 | var index = objects.indexOf(e.entity); 741 | if( index !== -1) { 742 | objects.splice(index, 1); 743 | if(api.remove) api.remove(e, objects, name); 744 | delete e.entity.userdata[name]; 745 | } 746 | objects.push(e.entity); 747 | e.entity.userData[name] = api.init(e, objects, name, e.reset); 748 | }); 749 | 750 | addListener(name + "/unregister", function(e) { 751 | var index = objects.indexOf(e.entity); 752 | if(index !== -1) { 753 | objects.splice(index, 1); 754 | if(api.remove) api.remove(e, objects, name); 755 | delete e.entity.userData[name]; 756 | } 757 | }); 758 | 759 | for (var k in api) { 760 | switch(k) { 761 | case "init": 762 | case "remove": 763 | case "control": continue; 764 | default: 765 | addListener(k, function(e) { 766 | api[k](e, objects, name); 767 | }); 768 | break; 769 | } 770 | } 771 | 772 | return function (arg) { 773 | if(!arg) { 774 | objects.forEach( function (obj) { 775 | if (api.remove) api.remove({ entity: obj }, objects, name); 776 | delete e.entity.userdata[name]; 777 | }); 778 | for(var k in listeners) { 779 | obj.removeEventListener(k, listeners[k]); 780 | } 781 | } else if (api.control) { 782 | api.control(arg, objects, name); 783 | } 784 | } 785 | } 786 | 787 | ShaderChunk["vr_pars"] = ` 788 | #ifndef VR_PARS 789 | 790 | #define VR_PARS 1 791 | uniform float VR; 792 | 793 | #define selectVR(novr, left, right) ( (VR > 0.) ? ( (gl_FragCoord.x < VR) ? (left) : (right) ): (novr)) 794 | 795 | vec4 textureVR(in sampler2D tex, in vec2 uv) { 796 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x) ); 797 | return texture2D(tex, uv); 798 | } 799 | 800 | vec4 textureVR(in sampler2D tex, in vec2 uv, float bias) { 801 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x)); 802 | return texture2D(tex, uv, bias); 803 | } 804 | 805 | #ifdef TEXTURE_LOD_EXT 806 | 807 | vec4 textureVRLod(in sampler2D tex, in vec2 uv, float lod) { 808 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x)); 809 | return texture2DLodEXT(tex, uv, bias); 810 | } 811 | 812 | #endif 813 | 814 | #endif 815 | `; 816 | 817 | ShaderChunk["blur_pars"] = ` 818 | #ifndef BLUR_PARS 819 | 820 | #define BLUR_PARS 1 821 | 822 | #ifndef VR_PARS 823 | #define textureVR(t, u) texture2D(t, u) 824 | #endif 825 | 826 | #ifndef BLUR_WEIGHT 827 | #define BLUR_WEIGHT(v, uv) v.a; 828 | #endif 829 | 830 | #define BLUR_MAX_RADIUS 255 831 | 832 | float blur_gaussian_pdf(in float x, in float sigma) { 833 | return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma; 834 | } 835 | 836 | vec4 blur_weighted(const float fSigma, const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 837 | vec2 invSize = 1.0 / resolution; 838 | float weightSum = blur_gaussian_pdf(0.0, fSigma); 839 | vec4 diffuseSum = textureVR( tex, uv) * weightSum; 840 | for( int i = 1; i < BLUR_MAX_RADIUS; i ++ ) { 841 | if(float(i) > fSigma) break; 842 | float x = float(i); 843 | float w = blur_gaussian_pdf(x, fSigma); 844 | vec2 uvOffset = direction * invSize * x; 845 | vec2 uvv = uv + uvOffset; 846 | vec4 sample1 = textureVR( tex, uvv); 847 | float w1 = BLUR_WEIGHT(sample1, uvv); 848 | uvv = uv - uvOffset; 849 | vec4 sample2 = textureVR( tex, uvv); 850 | float w2 = BLUR_WEIGHT(sample1, uvv); 851 | diffuseSum += (sample1 * w1 + sample2 * w2) * w; 852 | weightSum += (w1 + w2) * w; 853 | } 854 | return diffuseSum/weightSum; 855 | } 856 | 857 | vec4 blur(const float fSigma, const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 858 | vec2 invSize = 1.0 / resolution; 859 | float weightSum = blur_gaussian_pdf(0.0, fSigma); 860 | vec4 diffuseSum = textureVR( tex, uv) * weightSum; 861 | for( int i = 1; i < BLUR_MAX_RADIUS; i ++ ) { 862 | if(float(i) > fSigma) break; 863 | float x = float(i); 864 | float w = blur_gaussian_pdf(x, fSigma); 865 | vec2 uvOffset = direction * invSize * x; 866 | vec4 sample1 = textureVR( tex, uv + uvOffset); 867 | vec4 sample2 = textureVR( tex, uv - uvOffset); 868 | diffuseSum += (sample1 + sample2) * w; 869 | weightSum += 2.0 * w; 870 | } 871 | return diffuseSum/weightSum; 872 | } 873 | 874 | vec4 blur5(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 875 | vec4 color = vec4(0.0); 876 | vec2 off1 = vec2(1.3333333333333333) * direction; 877 | color += textureVR(tex, uv) * 0.29411764705882354; 878 | color += textureVR(tex, uv + (off1 / resolution)) * 0.35294117647058826; 879 | color += textureVR(tex, uv - (off1 / resolution)) * 0.35294117647058826; 880 | return color; 881 | } 882 | 883 | vec4 blur9(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 884 | vec4 color = vec4(0.0); 885 | vec2 off1 = vec2(1.3846153846) * direction; 886 | vec2 off2 = vec2(3.2307692308) * direction; 887 | color += textureVR(tex, vUv) * 0.2270270270; 888 | color += textureVR(tex, vUv + (off1 / resolution)) * 0.3162162162; 889 | color += textureVR(tex, vUv - (off1 / resolution)) * 0.3162162162; 890 | color += textureVR(tex, vUv + (off2 / resolution)) * 0.0702702703; 891 | color += textureVR(tex, vUv - (off2 / resolution)) * 0.0702702703; 892 | return color; 893 | } 894 | 895 | vec4 blur13(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 896 | vec4 color = vec4(0.0); 897 | vec2 off1 = vec2(1.411764705882353) * direction; 898 | vec2 off2 = vec2(3.2941176470588234) * direction; 899 | vec2 off3 = vec2(5.176470588235294) * direction; 900 | color += textureVR(tex, vUv) * 0.1964825501511404; 901 | color += textureVR(tex, vUv + (off1 / resolution)) * 0.2969069646728344; 902 | color += textureVR(tex, vUv - (off1 / resolution)) * 0.2969069646728344; 903 | color += textureVR(tex, vUv + (off2 / resolution)) * 0.09447039785044732; 904 | color += textureVR(tex, vUv - (off2 / resolution)) * 0.09447039785044732; 905 | color += textureVR(tex, vUv + (off3 / resolution)) * 0.010381362401148057; 906 | color += textureVR(tex, vUv - (off3 / resolution)) * 0.010381362401148057; 907 | return color; 908 | } 909 | #endif 910 | `; 911 | 912 | export { index$4 as attach, fx as attachEffects, ecs as attachSystem }; 913 | -------------------------------------------------------------------------------- /examples/WebVR.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author mrdoob / http://mrdoob.com 3 | * @author Mugen87 / https://github.com/Mugen87 4 | * 5 | * Based on @tojiro's vr-samples-utils.js 6 | */ 7 | 8 | 9 | 10 | var WEBVR = { 11 | 12 | createButton: function ( renderer, options ) { 13 | 14 | if ( options && options.referenceSpaceType ) { 15 | 16 | renderer.vr.setReferenceSpaceType( options.referenceSpaceType ); 17 | 18 | } 19 | 20 | function showEnterVR( device ) { 21 | 22 | button.style.display = ''; 23 | 24 | button.style.cursor = 'pointer'; 25 | button.style.left = 'calc(50% - 50px)'; 26 | button.style.width = '100px'; 27 | 28 | button.textContent = 'ENTER VR'; 29 | 30 | button.onmouseenter = function () { 31 | 32 | button.style.opacity = '1.0'; 33 | 34 | }; 35 | 36 | button.onmouseleave = function () { 37 | 38 | button.style.opacity = '0.5'; 39 | 40 | }; 41 | 42 | button.onclick = function () { 43 | 44 | device.isPresenting ? device.exitPresent() : device.requestPresent( [ { source: renderer.domElement } ] ); 45 | 46 | }; 47 | 48 | renderer.vr.setDevice( device ); 49 | 50 | } 51 | 52 | function showEnterXR( /*device*/ ) { 53 | 54 | var currentSession = null; 55 | 56 | function onSessionStarted( session ) { 57 | 58 | session.addEventListener( 'end', onSessionEnded ); 59 | 60 | renderer.vr.setSession( session ); 61 | button.textContent = 'EXIT XR'; 62 | 63 | currentSession = session; 64 | 65 | } 66 | 67 | function onSessionEnded( /*event*/ ) { 68 | 69 | currentSession.removeEventListener( 'end', onSessionEnded ); 70 | 71 | renderer.vr.setSession( null ); 72 | button.textContent = 'ENTER XR'; 73 | 74 | currentSession = null; 75 | 76 | } 77 | 78 | // 79 | 80 | button.style.display = ''; 81 | 82 | button.style.cursor = 'pointer'; 83 | button.style.left = 'calc(50% - 50px)'; 84 | button.style.width = '100px'; 85 | 86 | button.textContent = 'ENTER XR'; 87 | 88 | button.onmouseenter = function () { 89 | 90 | button.style.opacity = '1.0'; 91 | 92 | }; 93 | 94 | button.onmouseleave = function () { 95 | 96 | button.style.opacity = '0.5'; 97 | 98 | }; 99 | 100 | button.onclick = function () { 101 | 102 | if ( currentSession === null ) { 103 | 104 | // WebXR's requestReferenceSpace only works if the corresponding feature 105 | // was requested at session creation time. For simplicity, just ask for 106 | // the interesting ones as optional features, but be aware that the 107 | // requestReferenceSpace call will fail if it turns out to be unavailable. 108 | // ('local' is always available for immersive sessions and doesn't need to 109 | // be requested separately.) 110 | 111 | var sessionInit = { optionalFeatures: [ 'local-floor', 'bounded-floor' ] }; 112 | navigator.xr.requestSession( 'immersive-vr', sessionInit ).then( onSessionStarted ); 113 | 114 | } else { 115 | 116 | currentSession.end(); 117 | 118 | } 119 | 120 | }; 121 | 122 | } 123 | 124 | function disableButton() { 125 | 126 | button.style.display = ''; 127 | 128 | button.style.cursor = 'auto'; 129 | button.style.left = 'calc(50% - 75px)'; 130 | button.style.width = '150px'; 131 | 132 | button.onmouseenter = null; 133 | button.onmouseleave = null; 134 | 135 | button.onclick = null; 136 | 137 | } 138 | 139 | function showVRNotFound() { 140 | 141 | disableButton(); 142 | 143 | button.textContent = 'VR NOT FOUND'; 144 | 145 | renderer.vr.setDevice( null ); 146 | 147 | } 148 | 149 | function showXRNotFound() { 150 | 151 | disableButton(); 152 | 153 | button.textContent = 'XR NOT FOUND'; 154 | 155 | } 156 | 157 | function stylizeElement( element ) { 158 | 159 | element.style.position = 'absolute'; 160 | element.style.bottom = '20px'; 161 | element.style.padding = '12px 6px'; 162 | element.style.border = '1px solid #fff'; 163 | element.style.borderRadius = '4px'; 164 | element.style.background = 'rgba(0,0,0,0.1)'; 165 | element.style.color = '#fff'; 166 | element.style.font = 'normal 13px sans-serif'; 167 | element.style.textAlign = 'center'; 168 | element.style.opacity = '0.5'; 169 | element.style.outline = 'none'; 170 | element.style.zIndex = '999'; 171 | 172 | } 173 | 174 | if ( 'xr' in navigator && 'supportsSession' in navigator.xr ) { 175 | 176 | var button = document.createElement( 'button' ); 177 | button.style.display = 'none'; 178 | 179 | stylizeElement( button ); 180 | 181 | navigator.xr.supportsSession( 'immersive-vr' ).then( showEnterXR ).catch( showXRNotFound ); 182 | 183 | return button; 184 | 185 | } else if ( 'getVRDisplays' in navigator ) { 186 | 187 | var button = document.createElement( 'button' ); 188 | button.style.display = 'none'; 189 | 190 | stylizeElement( button ); 191 | 192 | window.addEventListener( 'vrdisplayconnect', function ( event ) { 193 | 194 | showEnterVR( event.display ); 195 | 196 | }, false ); 197 | 198 | window.addEventListener( 'vrdisplaydisconnect', function ( /*event*/ ) { 199 | 200 | showVRNotFound(); 201 | 202 | }, false ); 203 | 204 | window.addEventListener( 'vrdisplaypresentchange', function ( event ) { 205 | 206 | button.textContent = event.display.isPresenting ? 'EXIT VR' : 'ENTER VR'; 207 | 208 | }, false ); 209 | 210 | window.addEventListener( 'vrdisplayactivate', function ( event ) { 211 | 212 | event.display.requestPresent( [ { source: renderer.domElement } ] ); 213 | 214 | }, false ); 215 | 216 | navigator.getVRDisplays() 217 | .then( function ( displays ) { 218 | 219 | if ( displays.length > 0 ) { 220 | 221 | showEnterVR( displays[ 0 ] ); 222 | 223 | } else { 224 | 225 | showVRNotFound(); 226 | 227 | } 228 | 229 | } ).catch( showVRNotFound ); 230 | 231 | return button; 232 | 233 | } else { 234 | 235 | var message = document.createElement( 'a' ); 236 | message.href = 'https://webvr.info'; 237 | message.innerHTML = 'WEBVR NOT SUPPORTED'; 238 | 239 | message.style.left = 'calc(50% - 90px)'; 240 | message.style.width = '180px'; 241 | message.style.textDecoration = 'none'; 242 | 243 | stylizeElement( message ); 244 | 245 | return message; 246 | 247 | } 248 | 249 | } 250 | 251 | }; 252 | 253 | export { WEBVR }; 254 | -------------------------------------------------------------------------------- /examples/basic/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Click/trigger to cycle views 5 | 6 | 7 | 8 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 199 | 200 | 201 | -------------------------------------------------------------------------------- /examples/canary/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | three.js effects 5 | 6 | 7 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 207 | 208 | 209 | -------------------------------------------------------------------------------- /examples/defer/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | three.js effects 5 | 6 | 7 | 8 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 216 | 217 | 218 | -------------------------------------------------------------------------------- /examples/madrid/assets/column/diffuse.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/column/diffuse.basis -------------------------------------------------------------------------------- /examples/madrid/assets/column/diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/column/diffuse.png -------------------------------------------------------------------------------- /examples/madrid/assets/column/normals.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/column/normals.png -------------------------------------------------------------------------------- /examples/madrid/assets/ground/arg.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/arg.basis -------------------------------------------------------------------------------- /examples/madrid/assets/ground/arg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/arg.png -------------------------------------------------------------------------------- /examples/madrid/assets/ground/diffuse.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/diffuse.basis -------------------------------------------------------------------------------- /examples/madrid/assets/ground/diffuse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/diffuse.jpg -------------------------------------------------------------------------------- /examples/madrid/assets/ground/diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/diffuse.png -------------------------------------------------------------------------------- /examples/madrid/assets/ground/normals.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/normals.basis -------------------------------------------------------------------------------- /examples/madrid/assets/ground/normals.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/ground/normals.png -------------------------------------------------------------------------------- /examples/madrid/assets/tick.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/tick.wav -------------------------------------------------------------------------------- /examples/madrid/assets/venus/arg.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/arg.basis -------------------------------------------------------------------------------- /examples/madrid/assets/venus/arg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/arg.png -------------------------------------------------------------------------------- /examples/madrid/assets/venus/diffuse.basis: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/diffuse.basis -------------------------------------------------------------------------------- /examples/madrid/assets/venus/diffuse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/diffuse.jpg -------------------------------------------------------------------------------- /examples/madrid/assets/venus/diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/diffuse.png -------------------------------------------------------------------------------- /examples/madrid/assets/venus/normals.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/venus/normals.png -------------------------------------------------------------------------------- /examples/madrid/assets/voop.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/voop.wav -------------------------------------------------------------------------------- /examples/madrid/assets/woosh.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/woosh.wav -------------------------------------------------------------------------------- /examples/madrid/assets/zit.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/assets/zit.wav -------------------------------------------------------------------------------- /examples/madrid/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | three-effects :: Madrid 5 | 6 | 7 | 8 | 53 | 54 | 55 | 56 | 57 | 58 | 59 |
60 |
61 |
62 |
63 |
64 | 156 | 157 | 158 | -------------------------------------------------------------------------------- /examples/madrid/src/ground.js: -------------------------------------------------------------------------------- 1 | import {THREE} from "../../../dist/three-effects.js"; 2 | import pop from "./pop.js"; 3 | 4 | export default function (renderer, scene, camera, assets) { 5 | pop(scene); 6 | 7 | var group = new THREE.Group(); 8 | 9 | var material = new THREE.MeshStandardMaterial({ 10 | roughness: 1, 11 | aoMapIntensity: 2, 12 | aoMap: assets["ground_material"], 13 | map: assets["ground_diffuse"], 14 | roughnessMap: assets["ground_material"], 15 | normalMap: assets["ground_normals"] 16 | }); 17 | 18 | (["map", "roughnessMap", "normalMap", "metalnessMap", "aoMap"]).forEach(function(k){ 19 | if(!material[k]) return; 20 | material[k].wrapS = THREE.MirroredRepeatWrapping; 21 | material[k].wrapT = THREE.MirroredRepeatWrapping; 22 | material[k].anisotropy = 4; 23 | material[k].repeat.set(333, 333); 24 | }); 25 | 26 | var mesh = new THREE.Mesh(new THREE.PlaneBufferGeometry(1000,1000), material); 27 | 28 | mesh.receiveShadow = true; 29 | 30 | mesh.rotation.x = -Math.PI / 2; 31 | //mesh.position.y = -1; 32 | //mesh.visible = false; 33 | 34 | group.add(mesh); 35 | 36 | var pg = new THREE.PlaneBufferGeometry(2,2); 37 | pg.rotateX(-Math.PI / 2); 38 | 39 | 40 | var pointers = ([0, 1]).map(function(i){ 41 | var timeUniform = { value: 0 }; 42 | 43 | var pointer = new THREE.Mesh(pg, new THREE.MeshBasicMaterial({ depthWrite: false, depthTest: true, transparent: true })); 44 | pointer.material.onBeforeCompile = function(shader){ 45 | shader.uniforms.time = timeUniform; 46 | shader.vertexShader = "varying vec2 vUv;\n" + shader.vertexShader.replace("#include ", ` 47 | vUv = uv; 48 | ` 49 | ); 50 | shader.fragmentShader = "varying vec2 vUv;uniform float time;\n" + shader.fragmentShader.replace("gl_FragColor", ` 51 | float d = distance(vUv, vec2(0.5)); 52 | d = min(d, smoothstep(0.5, 0.3 + 0.06 * sin(time), d)) ; 53 | diffuseColor.a = pow(d, 3.) * 9.; 54 | gl_FragColor` 55 | ); 56 | }; 57 | 58 | pointer.time = timeUniform; 59 | pointer.material.color.setHex(0xCCCCCC); 60 | pointer.visible = false; 61 | group.add(pointer); 62 | return pointer; 63 | }); 64 | 65 | scene.dispatchEvent({ type: "interact/register", entity: mesh }); 66 | 67 | function getPointer(e) { 68 | return pointers[e.hand.index || 0]; 69 | } 70 | 71 | mesh.addEventListener("interact/enter", function(e){ 72 | getPointer(e).visible = true; 73 | }); 74 | 75 | mesh.addEventListener("interact/leave", function(e){ 76 | getPointer(e).visible = false; 77 | }); 78 | 79 | mesh.addEventListener("interact/move", function(e){ 80 | var pointer = getPointer(e); 81 | if(e.hand.hold) { 82 | var sc = 1 - e.hand.hold; 83 | pointer.scale.set(sc,sc,sc); 84 | } else { 85 | pointer.scale.set(1,1,1); 86 | } 87 | pointer.position.copy(e.point); 88 | pointer.position.y += 0.01; 89 | pointer.time.value += 0.02; 90 | }); 91 | 92 | var evt = { type: "teleport", position: new THREE.Vector3() }; 93 | 94 | mesh.addEventListener("interact/hold", function(e){ 95 | evt.position.copy(e.point); 96 | scene.dispatchEvent(evt); 97 | }); 98 | 99 | assets["column_model"].translate(0, -4.4, 0); 100 | 101 | assets["column_model"].scale(0.66, 0.66, 0.66); 102 | 103 | assets["column_model"].computeBoundingBox(); 104 | 105 | function addColumn(pos) { 106 | var mesh = new THREE.Mesh(assets["column_model"], new THREE.MeshStandardMaterial({ 107 | metalness: 0, 108 | roughness:0.66, 109 | aoMap: assets["column_diffuse"], 110 | map: assets["column_diffuse"], 111 | roughnessMap: assets["column_diffuse"], 112 | normalMap: assets["column_normals"] 113 | })); 114 | 115 | 116 | scene.dispatchEvent({ type: "interact/register", entity: mesh }); 117 | 118 | mesh.addEventListener("interact/enter", function(e){ 119 | mesh.material.color.setHex(0x999999); 120 | scene.dispatchEvent({ type: "audio/tick" }); 121 | }); 122 | 123 | mesh.addEventListener("interact/leave", function(e){ 124 | mesh.material.color.setHex(0xFFFFFF); 125 | }); 126 | 127 | mesh.addEventListener("interact/release", function(e){ 128 | scene.dispatchEvent({ type: "popout/register", entity: mesh }); 129 | //group.remove(mesh); 130 | }); 131 | 132 | mesh.position.copy(pos); 133 | 134 | scene.dispatchEvent({ type: "popin/register", entity: mesh }); 135 | 136 | mesh.castShadow = true; 137 | 138 | mesh.receiveShadow = true; 139 | 140 | renderer.shadowMap.needsUpdate = true; 141 | 142 | scene.dispatchEvent({ type: "audio/voop" }); 143 | 144 | group.add(mesh); 145 | } 146 | 147 | mesh.addEventListener("interact/release", function(e){ 148 | if(e.point) addColumn(e.point); 149 | }); 150 | 151 | return group; 152 | } -------------------------------------------------------------------------------- /examples/madrid/src/index.js: -------------------------------------------------------------------------------- 1 | import { THREE, attachEffects, attach } from "../../../dist/three-effects.js"; 2 | 3 | import initGround from "./ground.js"; 4 | import initSky from "./sky.js"; 5 | import initStatues from "./statues.js"; 6 | import attachInteract from "./interact.js"; 7 | import attachLabel from "./label.js"; 8 | 9 | export default function (renderer, scene, camera, assets) { 10 | 11 | renderer.shadowMap.enabled = true; 12 | renderer.shadowMap.autoUpdate = false; 13 | renderer.shadowMap.needsUpdate = true; 14 | 15 | var user = new THREE.Group(); 16 | user.add(camera); 17 | user.add(renderer.vr.getController(0),renderer.vr.getController(1)); 18 | scene.add(user); 19 | 20 | var targetPos = new THREE.Vector3(); 21 | var origPos = new THREE.Vector3(); 22 | 23 | var teleportTime = 0; 24 | 25 | scene.addEventListener("teleport", function(e) { 26 | scene.dispatchEvent({ type: "audio/woosh" }); 27 | origPos.copy(user.position); 28 | targetPos.copy(e.position); 29 | teleportTime = window.performance.now(); 30 | }); 31 | 32 | scene.addEventListener("beforeRender", function(e) { 33 | user.position.copy(origPos); 34 | user.position.lerp( targetPos, 1 - Math.pow( 1 - Math.min(1, (e.time - teleportTime) / 900 ), 6) ); 35 | }); 36 | 37 | 38 | camera.position.y = 1.75; 39 | 40 | var fx = attachEffects(scene); 41 | 42 | window.fx = fx; 43 | 44 | var allFX = { 45 | // ssao: true, 46 | // outline: false, 47 | // godrays: true, 48 | // colors: false, 49 | "!fxaa": false, 50 | bloom: false, 51 | "!glitch": false, 52 | filmgrain: false 53 | } 54 | 55 | attach.bloom(scene, { strength: 0.33, radius: 1, threshold: 0.5 }); 56 | attach.glitch(scene, { snow:0.2 }); 57 | 58 | scene.userData.glitch_intensity.value = 0.8; 59 | window.scene = scene; 60 | scene.userData.bloom_internal.prePass.onBeforeCompile = function (shader) { 61 | shader.fragmentShader = shader.fragmentShader.replace("gl_FragColor", "alpha *= smoothstep(1., 0.999, texture2D(depthTexture, vUv).r);\ngl_FragColor"); 62 | } 63 | 64 | attach.filmgrain(scene); 65 | 66 | attachLabel(scene, assets); 67 | 68 | attachInteract(scene, {debug: true}); 69 | 70 | 71 | function setupFX() { 72 | var arr = []; 73 | for(var k in allFX) { 74 | if(allFX[k]) arr.push(k); 75 | } 76 | fx(arr); 77 | } 78 | 79 | setupFX(); 80 | 81 | //fx(null); 82 | 83 | scene.addEventListener("beforeRender", function(e) { 84 | scene.userData["filmgrain_time"].value = e.time / 1000; 85 | }); 86 | 87 | scene.addEventListener("option", function(e) { 88 | if(e.name in allFX) { 89 | allFX[e.name] = e.value; 90 | setupFX(); 91 | } 92 | }); 93 | 94 | var objects = { 95 | sky: initSky(renderer, scene, camera, assets), 96 | ground: initGround(renderer, scene, camera, assets), 97 | statues: initStatues(renderer, scene, camera, assets) 98 | } 99 | 100 | Object.values(objects).forEach(function (o) { scene.add(o); }); 101 | 102 | var listener; 103 | 104 | var firstClick = function () { 105 | listener = new THREE.AudioListener(); 106 | listener.context.resume(); 107 | camera.add( listener ); 108 | 109 | function attachSound(name) { 110 | var s = new THREE.Audio( listener ); 111 | s.setBuffer(assets[name]); 112 | s.setVolume( 1.0 ); 113 | scene.addEventListener("audio/" + name, function (){ 114 | if(s.isPlaying) { 115 | var s2 = new THREE.Audio( listener ); 116 | s2.setBuffer(assets[name]); 117 | s2.setVolume( 1.0 ); 118 | s2.play(); 119 | } else { 120 | s.play(); 121 | } 122 | }); 123 | } 124 | 125 | (["woosh", "tick", "voop", "zit"]).forEach(attachSound); 126 | 127 | window.removeEventListener("click", firstClick); 128 | } 129 | 130 | window.addEventListener("click", firstClick); 131 | } 132 | -------------------------------------------------------------------------------- /examples/madrid/src/interact.js: -------------------------------------------------------------------------------- 1 | import { THREE, attachSystem } from "../../../dist/three-effects.js"; 2 | 3 | export default function (scene, config) { 4 | config = config || {}; 5 | 6 | var holdDuration = config.holdDuration || 1000; 7 | 8 | var tubeGeometry = new THREE.CylinderBufferGeometry(0.01, 0.01, 1, 12, 1, true); 9 | tubeGeometry.translate(0, 0.5, 0); 10 | tubeGeometry.rotateX(Math.PI / 2); 11 | 12 | function getHand(renderer, id) { 13 | var c = renderer ? renderer.vr.getController(id) : new THREE.Group(); 14 | 15 | var ret = { 16 | index: id || 0, 17 | controller: c, 18 | ray: new THREE.Ray(), 19 | armed: false, 20 | pressTime: 0, 21 | pressed: false, 22 | object: null, 23 | mesh: renderer ? new THREE.Mesh(tubeGeometry, new THREE.MeshBasicMaterial({ 24 | color: 0xDDEEFF, 25 | transparent: true, 26 | depthTest: false, 27 | depthWrite: false 28 | })) : new THREE.Group(), 29 | raycaster: new THREE.Raycaster(), 30 | isMouse: !renderer 31 | } 32 | 33 | ret.mesh.visible = false; 34 | 35 | if(ret.mesh.material) { 36 | ret.mesh.material.opacity = 0.1; 37 | scene.add(ret.mesh); 38 | } 39 | 40 | ret.startFn = function (e) { 41 | ret.armed = true; 42 | ret.pressed = true; 43 | ret.pressTime = window.performance.now(); 44 | if(ret.object) dispatch(ret.object, ret.hit || event, "interact/press") 45 | } 46 | 47 | ret.endFn = function (e) { 48 | ret.pressed = false; 49 | if(ret.object && ret.armed) dispatch(ret.object, ret.hit || event, "interact/release"); 50 | ret.armed = false; 51 | } 52 | 53 | c.addEventListener("selectstart", ret.startFn); 54 | c.addEventListener("selectend", ret.endFn); 55 | 56 | return ret; 57 | } 58 | 59 | var nohands = [getHand()]; 60 | var hands; 61 | 62 | var event = { type: ""}; 63 | 64 | function dispatch(obj, ev, s) { 65 | ev.type = s; 66 | if(s !== "interact/move" && config.debug) console.log(s, ev.hand.index); 67 | obj.dispatchEvent(ev); 68 | 69 | } 70 | 71 | var vfrom = new THREE.Vector3(0, 0, 1); 72 | 73 | var mouse = new THREE.Vector2(); 74 | 75 | var isRotating = false; 76 | 77 | if(config.debug) { 78 | 79 | nohands[0].mesh.visible = true; 80 | 81 | function onMouseMove( event ) { 82 | mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1; 83 | mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1; 84 | 85 | } 86 | 87 | window.addEventListener( 'mousemove', onMouseMove, false ); 88 | 89 | 90 | function onMouseDown( event ) { 91 | if(event.which === 3) { 92 | isRotating = true; 93 | } else { 94 | nohands[0].controller.dispatchEvent({ type: "selectstart" }); 95 | } 96 | } 97 | 98 | window.addEventListener( 'mousedown', onMouseDown, false ); 99 | 100 | function onMouseUp( event ) { 101 | if (event.which === 3) { 102 | isRotating = false; 103 | } else { 104 | nohands[0].controller.dispatchEvent({ type: "selectend" }); 105 | } 106 | } 107 | 108 | window.addEventListener( 'mouseup', onMouseUp, false ); 109 | 110 | document.body.addEventListener("contextmenu", function(evt){evt.preventDefault();return false;}); 111 | } 112 | 113 | var euler = new THREE.Euler( 0, 0, 0, 'YXZ' ); 114 | 115 | attachSystem(scene, "interact", { 116 | init: function (e, objects, name) { 117 | var data = e.data || {}; 118 | return { 119 | important: data.important || false 120 | } 121 | }, 122 | 123 | beforeRender: function (e, objects, name) { 124 | if(!hands) hands = [getHand(e.renderer, 0), getHand(e.renderer, 1)]; 125 | 126 | var t = window.performance.now(); 127 | 128 | event.time = t; 129 | 130 | (e.renderer.vr.isPresenting() ? hands : nohands).forEach(function (hand) { 131 | var currentObject = hand.object; 132 | var c = hand.controller; 133 | hand.mesh.visible = c.visible; 134 | 135 | if(!c.visible) return; 136 | 137 | var r = hand.ray; 138 | 139 | if(hand.isMouse) { 140 | if(isRotating) { 141 | euler.y += -mouse.x * 0.01; 142 | euler.x += mouse.y * 0.01; 143 | euler.x = Math.min(Math.PI * 0.49, Math.max(-Math.PI * 0.49, euler.x)); 144 | } 145 | e.camera.quaternion.setFromEuler(euler); 146 | hand.raycaster.setFromCamera( mouse, e.camera ); 147 | } else { 148 | c.getWorldPosition(r.origin); 149 | c.getWorldDirection(r.direction); 150 | r.direction.negate(); 151 | hand.raycaster.ray.origin.lerp(r.origin, 0.3); 152 | hand.raycaster.ray.direction.lerp(r.direction, 0.2); 153 | } 154 | 155 | hand.mesh.quaternion.setFromUnitVectors( vfrom, hand.raycaster.ray.direction ); 156 | 157 | hand.mesh.position.copy( hand.raycaster.ray.origin); 158 | 159 | var intersects = hand.raycaster.intersectObjects( objects ); 160 | 161 | delete hand.hit; 162 | 163 | event.hand = hand; 164 | 165 | if(intersects.length) { 166 | var hit = intersects[0]; 167 | var obj = hit.object; 168 | hit.hand = hand; 169 | hand.hit = hit; 170 | hit.time = t; 171 | 172 | hand.mesh.scale.z = hit.distance; 173 | 174 | if(obj !== currentObject){ 175 | dispatch(obj, hit, "interact/enter"); 176 | if(currentObject) { 177 | hand.armed = false; 178 | dispatch(currentObject, event, "interact/leave"); 179 | } 180 | currentObject = obj; 181 | } 182 | 183 | hand.hold = hand.armed ? (t - hand.pressTime) / holdDuration : 0; 184 | 185 | if(hand.armed && t >= hand.pressTime + holdDuration){ 186 | dispatch(obj, hit, "interact/hold"); 187 | hand.hold = 0; 188 | hand.armed = false; 189 | hand.pressTime = t; 190 | } 191 | 192 | dispatch(currentObject, hit, "interact/move"); 193 | } else if(currentObject){ 194 | hand.armed = false; 195 | hand.hold = 0; 196 | dispatch(currentObject, event, "interact/leave"); 197 | currentObject = null; 198 | } 199 | 200 | hand.object = currentObject; 201 | 202 | if(hand.mesh.material) hand.mesh.material.opacity = (hand.hold ? 0.1 : 0) + (currentObject && currentObject.userData.interact.important ? 0.2 : 0.1); 203 | 204 | //hand.mesh.visible = false; 205 | }) 206 | } 207 | }) 208 | } -------------------------------------------------------------------------------- /examples/madrid/src/label.js: -------------------------------------------------------------------------------- 1 | import { attachSystem, THREE } from "../../../dist/three-effects.js"; 2 | 3 | export default function (scene, config) { 4 | 5 | config = config || {} 6 | 7 | var _scene = new THREE.Scene(); 8 | 9 | var lods = config.lods || [0, 10]; 10 | 11 | var template = config.template || function(d, lod) { 12 | var w = lod ? 256 : 1024; 13 | var h = lod ? 64 : 256; 14 | 15 | var text = lod ? d.text.toUpperCase() : d.text; 16 | 17 | var samsung = ` 18 | 20 | 24 | 25 | 26 | 27 | 29 | 30 | 31 | 32 | 33 | 34 | `; 35 | return `` + 36 | (lod ? "" : ` `) + 37 | `${text} 39 | 40 | ${text} 42 | 43 | `; 44 | } 45 | 46 | attachSystem(scene, "label", { 47 | init: function (e, objects, name) { 48 | var ret = Object.assign({}, e); 49 | 50 | if(!ret.visible) ret.visible = false; 51 | 52 | ret[""] = new THREE.LOD(); 53 | 54 | lods.forEach(function (ds, i) { 55 | var mesh = mesh = new THREE.Sprite(new THREE.SpriteMaterial( { 56 | map: new THREE.Texture(new Image()), 57 | color: 0xffffff, 58 | transparent: true 59 | } )); 60 | 61 | mesh.visible = ret.visible; 62 | 63 | mesh.material.map.minFilter = THREE.LinearFilter; 64 | 65 | function isPow2(n) { 66 | return n && (n & (n - 1)) === 0; 67 | } 68 | 69 | mesh.material.map.image.onload = function () { 70 | mesh.material.map.needsUpdate = true; 71 | mesh.scale.set(this.naturalWidth / this.naturalHeight, 1, 1); 72 | if(isPow2(this.naturalWidth) && isPow2(this.naturalHeight)) { 73 | mesh.material.map.minFilter = THREE.LinearMipmapLinearFilter; 74 | console.log(this.naturalWidth, this.naturalHeight) 75 | } else { 76 | mesh.material.map.minFilter = THREE.LinearFilter; 77 | } 78 | }; 79 | 80 | ret[""].addLevel(mesh, ds); 81 | }); 82 | 83 | _scene.add(ret[""]); 84 | 85 | ret.needsUpdate = true; 86 | 87 | return ret; 88 | }, 89 | 90 | remove: function (e, objects, name) { 91 | var d = e.entity.userData[name]; 92 | d.mesh.material.map.dispose(); 93 | d.mesh.material.dispose(); 94 | _scene.remove(d.mesh); 95 | }, 96 | 97 | afterEffects: function(e, objects, name) { 98 | objects.forEach(function (obj) { 99 | var d = obj.userData[name]; 100 | 101 | if(d.needsUpdate) { 102 | d[""].levels.forEach(function(l, i){ 103 | var o = l.object; 104 | o.material.map.image.src = "data:image/svg+xml;utf8," + encodeURIComponent((d.template || template)(d, i)); 105 | document.body.appendChild(o.material.map.image) 106 | }); 107 | delete d.needsUpdate; 108 | }; 109 | 110 | project(obj, d); 111 | }); 112 | 113 | var old = e.renderer.autoClear; 114 | e.renderer.autoClear = false; 115 | e.renderer.render(_scene, e.camera); 116 | e.renderer.autoClear = old; 117 | } 118 | }); 119 | 120 | var box = new THREE.Box3(); 121 | 122 | function project(obj, d) { 123 | var m = d[""]; 124 | if(!d.visible || !obj.visible) { 125 | m.visible = false; 126 | return; 127 | } 128 | 129 | m.visible = true; 130 | 131 | if(!obj.geometry.boundingBox) obj.geometry.computeBoundingBox(); 132 | 133 | box.copy(obj.geometry.boundingBox); 134 | box.applyMatrix4(obj.matrixWorld); 135 | 136 | obj.getWorldPosition(d[""].position); 137 | 138 | d[""].position.y = box.max.y + 1; 139 | } 140 | } -------------------------------------------------------------------------------- /examples/madrid/src/lib/Sky.js: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * @author zz85 / https://github.com/zz85 4 | * 5 | * Based on "A Practical Analytic Model for Daylight" 6 | * aka The Preetham Model, the de facto standard analytic skydome model 7 | * http://www.cs.utah.edu/~shirley/papers/sunsky/sunsky.pdf 8 | * 9 | * First implemented by Simon Wallner 10 | * http://www.simonwallner.at/projects/atmospheric-scattering 11 | * 12 | * Improved by Martin Upitis 13 | * http://blenderartists.org/forum/showthread.php?245954-preethams-sky-impementation-HDR 14 | * 15 | * Three.js integration by zz85 http://twitter.com/blurspline 16 | */ 17 | 18 | import { THREE } from "../../../../dist/three-effects.js"; 19 | 20 | const { 21 | BackSide, 22 | BoxBufferGeometry, 23 | Mesh, 24 | ShaderMaterial, 25 | UniformsUtils, 26 | Vector3 27 | } = THREE; 28 | 29 | 30 | var Sky = function () { 31 | 32 | var shader = Sky.SkyShader; 33 | 34 | var material = new ShaderMaterial( { 35 | fragmentShader: shader.fragmentShader, 36 | vertexShader: shader.vertexShader, 37 | uniforms: UniformsUtils.clone( shader.uniforms ), 38 | side: BackSide 39 | } ); 40 | 41 | Mesh.call( this, new BoxBufferGeometry( 1000, 1000, 1000 ), material ); 42 | 43 | }; 44 | 45 | Sky.prototype = Object.create( Mesh.prototype ); 46 | 47 | Sky.SkyShader = { 48 | 49 | uniforms: { 50 | "luminance": { value: 1 }, 51 | "turbidity": { value: 2 }, 52 | "rayleigh": { value: 1 }, 53 | "mieCoefficient": { value: 0.005 }, 54 | "mieDirectionalG": { value: 0.8 }, 55 | "sunPosition": { value: new Vector3(0, 1, 1).normalize() }, 56 | "up": { value: new Vector3( 0, 1, 0 ) } 57 | }, 58 | 59 | vertexShader: [ 60 | 'uniform vec3 sunPosition;', 61 | 'uniform float rayleigh;', 62 | 'uniform float turbidity;', 63 | 'uniform float mieCoefficient;', 64 | 'uniform vec3 up;', 65 | 66 | 'varying vec3 vWorldPosition;', 67 | 'varying vec3 vSunDirection;', 68 | 'varying float vSunfade;', 69 | 'varying vec3 vBetaR;', 70 | 'varying vec3 vBetaM;', 71 | 'varying float vSunE;', 72 | 73 | // constants for atmospheric scattering 74 | 'const float e = 2.71828182845904523536028747135266249775724709369995957;', 75 | 'const float pi = 3.141592653589793238462643383279502884197169;', 76 | 77 | // wavelength of used primaries, according to preetham 78 | 'const vec3 lambda = vec3( 680E-9, 550E-9, 450E-9 );', 79 | // this pre-calcuation replaces older TotalRayleigh(vec3 lambda) function: 80 | // (8.0 * pow(pi, 3.0) * pow(pow(n, 2.0) - 1.0, 2.0) * (6.0 + 3.0 * pn)) / (3.0 * N * pow(lambda, vec3(4.0)) * (6.0 - 7.0 * pn)) 81 | 'const vec3 totalRayleigh = vec3( 5.804542996261093E-6, 1.3562911419845635E-5, 3.0265902468824876E-5 );', 82 | 83 | // mie stuff 84 | // K coefficient for the primaries 85 | 'const float v = 4.0;', 86 | 'const vec3 K = vec3( 0.686, 0.678, 0.666 );', 87 | // MieConst = pi * pow( ( 2.0 * pi ) / lambda, vec3( v - 2.0 ) ) * K 88 | 'const vec3 MieConst = vec3( 1.8399918514433978E14, 2.7798023919660528E14, 4.0790479543861094E14 );', 89 | 90 | // earth shadow hack 91 | // cutoffAngle = pi / 1.95; 92 | 'const float cutoffAngle = 1.6110731556870734;', 93 | 'const float steepness = 1.5;', 94 | 'const float EE = 1000.0;', 95 | 96 | 'float sunIntensity( float zenithAngleCos ) {', 97 | ' zenithAngleCos = clamp( zenithAngleCos, -1.0, 1.0 );', 98 | ' return EE * max( 0.0, 1.0 - pow( e, -( ( cutoffAngle - acos( zenithAngleCos ) ) / steepness ) ) );', 99 | '}', 100 | 101 | 'vec3 totalMie( float T ) {', 102 | ' float c = ( 0.2 * T ) * 10E-18;', 103 | ' return 0.434 * c * MieConst;', 104 | '}', 105 | 106 | 'void main() {', 107 | 108 | ' vec4 worldPosition = modelMatrix * vec4( position, 1.0 );', 109 | ' vWorldPosition = worldPosition.xyz;', 110 | 111 | ' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', 112 | ' gl_Position.z = gl_Position.w;', // set z to camera.far 113 | 114 | ' vSunDirection = normalize( sunPosition );', 115 | ' vSunDirection.x *= -1.;', 116 | ' vSunE = sunIntensity( dot( vSunDirection, up ) );', 117 | 118 | ' vSunfade = 1.0 - clamp( 1.0 - exp( ( vSunDirection.y / 450000.0 ) ), 0.0, 1.0 );', 119 | 120 | ' float rayleighCoefficient = rayleigh - ( 1.0 * ( 1.0 - vSunfade ) );', 121 | 122 | // extinction (absorbtion + out scattering) 123 | // rayleigh coefficients 124 | ' vBetaR = totalRayleigh * rayleighCoefficient;', 125 | 126 | // mie coefficients 127 | ' vBetaM = totalMie( turbidity ) * mieCoefficient;', 128 | 129 | '}' 130 | ].join( '\n' ), 131 | 132 | fragmentShader: [ 133 | 'varying vec3 vWorldPosition;', 134 | 'varying vec3 vSunDirection;', 135 | 'varying float vSunfade;', 136 | 'varying vec3 vBetaR;', 137 | 'varying vec3 vBetaM;', 138 | 'varying float vSunE;', 139 | 140 | 'uniform float luminance;', 141 | 'uniform float mieDirectionalG;', 142 | 'uniform vec3 up;', 143 | 144 | 'const vec3 cameraPos = vec3( 0.0, 0.0, 0.0 );', 145 | 146 | // constants for atmospheric scattering 147 | 'const float pi = 3.141592653589793238462643383279502884197169;', 148 | 149 | 'const float n = 1.0003;', // refractive index of air 150 | 'const float N = 2.545E25;', // number of molecules per unit volume for air at 288.15K and 1013mb (sea level -45 celsius) 151 | 152 | // optical length at zenith for molecules 153 | 'const float rayleighZenithLength = 8.4E3;', 154 | 'const float mieZenithLength = 1.25E3;', 155 | // 66 arc seconds -> degrees, and the cosine of that 156 | 'const float sunAngularDiameterCos = 0.999956676946448443553574619906976478926848692873900859324;', 157 | 158 | // 3.0 / ( 16.0 * pi ) 159 | 'const float THREE_OVER_SIXTEENPI = 0.05968310365946075;', 160 | // 1.0 / ( 4.0 * pi ) 161 | 'const float ONE_OVER_FOURPI = 0.07957747154594767;', 162 | 163 | 'float rayleighPhase( float cosTheta ) {', 164 | ' return THREE_OVER_SIXTEENPI * ( 1.0 + pow( cosTheta, 2.0 ) );', 165 | '}', 166 | 167 | 'float hgPhase( float cosTheta, float g ) {', 168 | ' float g2 = pow( g, 2.0 );', 169 | ' float inverse = 1.0 / pow( 1.0 - 2.0 * g * cosTheta + g2, 1.5 );', 170 | ' return ONE_OVER_FOURPI * ( ( 1.0 - g2 ) * inverse );', 171 | '}', 172 | 173 | // Filmic ToneMapping http://filmicgames.com/archives/75 174 | 'const float A = 0.15;', 175 | 'const float B = 0.50;', 176 | 'const float C = 0.10;', 177 | 'const float D = 0.20;', 178 | 'const float E = 0.02;', 179 | 'const float F = 0.30;', 180 | 181 | 'const float whiteScale = 1.0748724675633854;', // 1.0 / Uncharted2Tonemap(1000.0) 182 | 183 | 'vec3 Uncharted2Tonemap( vec3 x ) {', 184 | ' return ( ( x * ( A * x + C * B ) + D * E ) / ( x * ( A * x + B ) + D * F ) ) - E / F;', 185 | '}', 186 | 187 | 188 | 'void main() {', 189 | // optical length 190 | // cutoff angle at 90 to avoid singularity in next formula. 191 | ' float zenithAngle = acos( max( 0.0, dot( up, normalize( vWorldPosition - cameraPos ) ) ) );', 192 | ' float inverse = 1.0 / ( cos( zenithAngle ) + 0.15 * pow( 93.885 - ( ( zenithAngle * 180.0 ) / pi ), -1.253 ) );', 193 | ' float sR = rayleighZenithLength * inverse;', 194 | ' float sM = mieZenithLength * inverse;', 195 | 196 | // combined extinction factor 197 | ' vec3 Fex = exp( -( vBetaR * sR + vBetaM * sM ) );', 198 | 199 | // in scattering 200 | ' float cosTheta = dot( normalize( vWorldPosition - cameraPos ), vSunDirection );', 201 | 202 | ' float rPhase = rayleighPhase( cosTheta * 0.5 + 0.5 );', 203 | ' vec3 betaRTheta = vBetaR * rPhase;', 204 | 205 | ' float mPhase = hgPhase( cosTheta, mieDirectionalG );', 206 | ' vec3 betaMTheta = vBetaM * mPhase;', 207 | 208 | ' vec3 Lin = pow( vSunE * ( ( betaRTheta + betaMTheta ) / ( vBetaR + vBetaM ) ) * ( 1.0 - Fex ), vec3( 1.5 ) );', 209 | ' Lin *= mix( vec3( 1.0 ), pow( vSunE * ( ( betaRTheta + betaMTheta ) / ( vBetaR + vBetaM ) ) * Fex, vec3( 1.0 / 2.0 ) ), clamp( pow( 1.0 - dot( up, vSunDirection ), 5.0 ), 0.0, 1.0 ) );', 210 | 211 | // nightsky 212 | ' vec3 direction = normalize( vWorldPosition - cameraPos );', 213 | ' float theta = acos( direction.y ); // elevation --> y-axis, [-pi/2, pi/2]', 214 | ' float phi = atan( direction.z, direction.x ); // azimuth --> x-axis [-pi/2, pi/2]', 215 | ' vec2 uv = vec2( phi, theta ) / vec2( 2.0 * pi, pi ) + vec2( 0.5, 0.0 );', 216 | ' vec3 L0 = vec3( 0.1 ) * Fex;', 217 | 218 | // composition + solar disc 219 | ' float sundisk = smoothstep( sunAngularDiameterCos, sunAngularDiameterCos + 0.005, cosTheta );', 220 | ' L0 += ( vSunE * 19000.0 * Fex ) * sundisk;', 221 | 222 | ' vec3 texColor = ( Lin + L0 ) * 0.04 + vec3( 0.0, 0.0003, 0.00075 );', 223 | 224 | ' vec3 curr = Uncharted2Tonemap( ( log2( 2.0 / pow( luminance, 8.0 ) ) ) * texColor );', 225 | ' vec3 color = curr * whiteScale;', 226 | 227 | ' vec3 retColor = pow( color, vec3( 1.0 / ( 1.2 + ( 1.2 * vSunfade ) ) ) );', 228 | 229 | ' gl_FragColor = vec4( retColor, 1.0 );', 230 | 231 | '}' 232 | ].join( '\n' ) 233 | 234 | }; 235 | 236 | export { Sky }; -------------------------------------------------------------------------------- /examples/madrid/src/loader.js: -------------------------------------------------------------------------------- 1 | import { THREE } from "../../../dist/three-effects.js"; 2 | import { OBJLoader } from "./loader/OBJLoader.js"; 3 | import { BasisTextureLoader } from "./loader/BasisTextureLoader.js"; 4 | 5 | var basis_path = './src/loader/'; 6 | 7 | export default function(renderer, files, progressCb) { 8 | var handlers = { 9 | "jpg": THREE.TextureLoader, 10 | "jpeg": THREE.TextureLoader, 11 | "png": THREE.TextureLoader, 12 | "gif": THREE.TextureLoader, 13 | "basis": BasisTextureLoader, 14 | "obj": OBJLoader, 15 | "wav": THREE.AudioLoader, 16 | "mp3": THREE.AudioLoader, 17 | "ogg": THREE.AudioLoader 18 | } 19 | 20 | var assets = {}; 21 | 22 | var total = 0, count = 0; 23 | 24 | function handle (cls, file, key) { 25 | return new Promise(function(resolve){ 26 | var loader = new cls(); 27 | if(cls === BasisTextureLoader) { 28 | loader.setTranscoderPath( basis_path ); 29 | loader.detectSupport( renderer ); 30 | } 31 | total++; 32 | loader.load(file, function ( obj ) { 33 | count++; 34 | assets[key] = obj.type ==="Group" ? obj.children[0].geometry : obj; 35 | progressCb(count/total); 36 | resolve(); 37 | }, 38 | undefined, 39 | function ( err ) { 40 | console.error( 'LOAD URL ERROR: ' + file ); 41 | } 42 | ); 43 | }); 44 | 45 | } 46 | 47 | var wp = []; 48 | 49 | for( var k in files) { 50 | var url = files[k]; 51 | var ext = url.split(".").pop().toLowerCase(); 52 | if(ext in handlers) { 53 | wp.push(handle(handlers[ext], url, k)); 54 | } else { 55 | console.warn( 'LOAD EXTENSION UNHANDLED: ' + url ); 56 | } 57 | } 58 | 59 | if(document.fonts) wp.push(document.fonts.ready); 60 | 61 | return Promise.all(wp).then(function () { return assets; }); 62 | } -------------------------------------------------------------------------------- /examples/madrid/src/loader/BasisTextureLoader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author Don McCurdy / https://www.donmccurdy.com 3 | * @author Austin Eng / https://github.com/austinEng 4 | * @author Shrek Shao / https://github.com/shrekshao 5 | */ 6 | import { THREE} from "../../../../dist/three-effects.js"; 7 | 8 | const { 9 | CompressedTexture, 10 | FileLoader, 11 | LinearFilter, 12 | LinearMipmapLinearFilter, 13 | Loader, 14 | RGBA_ASTC_4x4_Format, 15 | RGBA_PVRTC_4BPPV1_Format, 16 | RGB_ETC1_Format, 17 | RGB_PVRTC_4BPPV1_Format, 18 | UnsignedByteType 19 | } = THREE; 20 | 21 | /** 22 | * Loader for Basis Universal GPU Texture Codec. 23 | * 24 | * Basis Universal is a "supercompressed" GPU texture and texture video 25 | * compression system that outputs a highly compressed intermediate file format 26 | * (.basis) that can be quickly transcoded to a wide variety of GPU texture 27 | * compression formats. 28 | * 29 | * This loader parallelizes the transcoding process across a configurable number 30 | * of web workers, before transferring the transcoded compressed texture back 31 | * to the main thread. 32 | */ 33 | var BasisTextureLoader = function ( manager ) { 34 | 35 | Loader.call( this, manager ); 36 | 37 | this.transcoderPath = ''; 38 | this.transcoderBinary = null; 39 | this.transcoderPending = null; 40 | 41 | this.workerLimit = 4; 42 | this.workerPool = []; 43 | this.workerNextTaskID = 1; 44 | this.workerSourceURL = ''; 45 | this.workerConfig = { 46 | format: null, 47 | astcSupported: false, 48 | etcSupported: false, 49 | dxtSupported: false, 50 | pvrtcSupported: false, 51 | }; 52 | 53 | }; 54 | 55 | BasisTextureLoader.prototype = Object.assign( Object.create( Loader.prototype ), { 56 | 57 | constructor: BasisTextureLoader, 58 | 59 | setTranscoderPath: function ( path ) { 60 | 61 | this.transcoderPath = path; 62 | 63 | return this; 64 | 65 | }, 66 | 67 | setWorkerLimit: function ( workerLimit ) { 68 | 69 | this.workerLimit = workerLimit; 70 | 71 | return this; 72 | 73 | }, 74 | 75 | detectSupport: function ( renderer ) { 76 | 77 | var config = this.workerConfig; 78 | 79 | config.astcSupported = !! renderer.extensions.get( 'WEBGL_compressed_texture_astc' ); 80 | config.etcSupported = !! renderer.extensions.get( 'WEBGL_compressed_texture_etc1' ); 81 | config.dxtSupported = !! renderer.extensions.get( 'WEBGL_compressed_texture_s3tc' ); 82 | config.pvrtcSupported = !! renderer.extensions.get( 'WEBGL_compressed_texture_pvrtc' ) 83 | || !! renderer.extensions.get( 'WEBKIT_WEBGL_compressed_texture_pvrtc' ); 84 | 85 | if ( config.astcSupported ) { 86 | 87 | config.format = BasisTextureLoader.BASIS_FORMAT.cTFASTC_4x4; 88 | 89 | } else if ( config.dxtSupported ) { 90 | 91 | config.format = BasisTextureLoader.BASIS_FORMAT.cTFBC3; 92 | 93 | } else if ( config.pvrtcSupported ) { 94 | 95 | config.format = BasisTextureLoader.BASIS_FORMAT.cTFPVRTC1_4_RGBA; 96 | 97 | } else if ( config.etcSupported ) { 98 | 99 | config.format = BasisTextureLoader.BASIS_FORMAT.cTFETC1; 100 | 101 | } else { 102 | 103 | throw new Error( 'THREE.BasisTextureLoader: No suitable compressed texture format found.' ); 104 | 105 | } 106 | 107 | return this; 108 | 109 | }, 110 | 111 | load: function ( url, onLoad, onProgress, onError ) { 112 | 113 | var loader = new FileLoader( this.manager ); 114 | 115 | loader.setResponseType( 'arraybuffer' ); 116 | 117 | loader.load( url, ( buffer ) => { 118 | 119 | this._createTexture( buffer ) 120 | .then( onLoad ) 121 | .catch( onError ); 122 | 123 | }, onProgress, onError ); 124 | 125 | }, 126 | 127 | /** 128 | * @param {ArrayBuffer} buffer 129 | * @return {Promise} 130 | */ 131 | _createTexture: function ( buffer ) { 132 | 133 | var worker; 134 | var taskID; 135 | 136 | var texturePending = this._getWorker() 137 | .then( ( _worker ) => { 138 | 139 | worker = _worker; 140 | taskID = this.workerNextTaskID ++; 141 | 142 | return new Promise( ( resolve, reject ) => { 143 | 144 | worker._callbacks[ taskID ] = { resolve, reject }; 145 | worker._taskCosts[ taskID ] = buffer.byteLength; 146 | worker._taskLoad += worker._taskCosts[ taskID ]; 147 | 148 | worker.postMessage( { type: 'transcode', id: taskID, buffer }, [ buffer ] ); 149 | 150 | } ); 151 | 152 | } ) 153 | .then( ( message ) => { 154 | 155 | var config = this.workerConfig; 156 | 157 | var { width, height, mipmaps, format } = message; 158 | 159 | var texture; 160 | 161 | switch ( format ) { 162 | 163 | case BasisTextureLoader.BASIS_FORMAT.cTFASTC_4x4: 164 | texture = new CompressedTexture( mipmaps, width, height, RGBA_ASTC_4x4_Format ); 165 | break; 166 | case BasisTextureLoader.BASIS_FORMAT.cTFBC1: 167 | case BasisTextureLoader.BASIS_FORMAT.cTFBC3: 168 | texture = new CompressedTexture( mipmaps, width, height, BasisTextureLoader.DXT_FORMAT_MAP[ config.format ], UnsignedByteType ); 169 | break; 170 | case BasisTextureLoader.BASIS_FORMAT.cTFETC1: 171 | texture = new CompressedTexture( mipmaps, width, height, RGB_ETC1_Format ); 172 | break; 173 | case BasisTextureLoader.BASIS_FORMAT.cTFPVRTC1_4_RGB: 174 | texture = new CompressedTexture( mipmaps, width, height, RGB_PVRTC_4BPPV1_Format ); 175 | break; 176 | case BasisTextureLoader.BASIS_FORMAT.cTFPVRTC1_4_RGBA: 177 | texture = new CompressedTexture( mipmaps, width, height, RGBA_PVRTC_4BPPV1_Format ); 178 | break; 179 | default: 180 | throw new Error( 'THREE.BasisTextureLoader: No supported format available.' ); 181 | 182 | } 183 | 184 | texture.minFilter = mipmaps.length === 1 ? LinearFilter : LinearMipmapLinearFilter; 185 | texture.magFilter = LinearFilter; 186 | texture.generateMipmaps = false; 187 | texture.needsUpdate = true; 188 | 189 | return texture; 190 | 191 | } ); 192 | 193 | texturePending 194 | .finally( () => { 195 | 196 | if ( worker && taskID ) { 197 | 198 | worker._taskLoad -= worker._taskCosts[ taskID ]; 199 | delete worker._callbacks[ taskID ]; 200 | delete worker._taskCosts[ taskID ]; 201 | 202 | } 203 | 204 | } ); 205 | 206 | return texturePending; 207 | 208 | }, 209 | 210 | _initTranscoder: function () { 211 | 212 | if ( ! this.transcoderBinary ) { 213 | 214 | // Load transcoder wrapper. 215 | var jsLoader = new FileLoader( this.manager ); 216 | jsLoader.setPath( this.transcoderPath ); 217 | var jsContent = new Promise( ( resolve, reject ) => { 218 | 219 | jsLoader.load( 'basis_transcoder.js', resolve, undefined, reject ); 220 | 221 | } ); 222 | 223 | // Load transcoder WASM binary. 224 | var binaryLoader = new FileLoader( this.manager ); 225 | binaryLoader.setPath( this.transcoderPath ); 226 | binaryLoader.setResponseType( 'arraybuffer' ); 227 | var binaryContent = new Promise( ( resolve, reject ) => { 228 | 229 | binaryLoader.load( 'basis_transcoder.wasm', resolve, undefined, reject ); 230 | 231 | } ); 232 | 233 | this.transcoderPending = Promise.all( [ jsContent, binaryContent ] ) 234 | .then( ( [ jsContent, binaryContent ] ) => { 235 | 236 | var fn = BasisTextureLoader.BasisWorker.toString(); 237 | 238 | var body = [ 239 | '/* basis_transcoder.js */', 240 | jsContent, 241 | '/* worker */', 242 | fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) ) 243 | ].join( '\n' ); 244 | 245 | this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) ); 246 | this.transcoderBinary = binaryContent; 247 | 248 | } ); 249 | 250 | } 251 | 252 | return this.transcoderPending; 253 | 254 | }, 255 | 256 | _getWorker: function () { 257 | 258 | return this._initTranscoder().then( () => { 259 | 260 | if ( this.workerPool.length < this.workerLimit ) { 261 | 262 | var worker = new Worker( this.workerSourceURL ); 263 | 264 | worker._callbacks = {}; 265 | worker._taskCosts = {}; 266 | worker._taskLoad = 0; 267 | 268 | worker.postMessage( { 269 | type: 'init', 270 | config: this.workerConfig, 271 | transcoderBinary: this.transcoderBinary, 272 | } ); 273 | 274 | worker.onmessage = function ( e ) { 275 | 276 | var message = e.data; 277 | 278 | switch ( message.type ) { 279 | 280 | case 'transcode': 281 | worker._callbacks[ message.id ].resolve( message ); 282 | break; 283 | 284 | case 'error': 285 | worker._callbacks[ message.id ].reject( message ); 286 | break; 287 | 288 | default: 289 | console.error( 'THREE.BasisTextureLoader: Unexpected message, "' + message.type + '"' ); 290 | 291 | } 292 | 293 | }; 294 | 295 | this.workerPool.push( worker ); 296 | 297 | } else { 298 | 299 | this.workerPool.sort( function ( a, b ) { 300 | 301 | return a._taskLoad > b._taskLoad ? - 1 : 1; 302 | 303 | } ); 304 | 305 | } 306 | 307 | return this.workerPool[ this.workerPool.length - 1 ]; 308 | 309 | } ); 310 | 311 | }, 312 | 313 | dispose: function () { 314 | 315 | for ( var i = 0; i < this.workerPool.length; i ++ ) { 316 | 317 | this.workerPool[ i ].terminate(); 318 | 319 | } 320 | 321 | this.workerPool.length = 0; 322 | 323 | return this; 324 | 325 | } 326 | 327 | } ); 328 | 329 | /* CONSTANTS */ 330 | 331 | BasisTextureLoader.BASIS_FORMAT = { 332 | cTFETC1: 0, 333 | cTFETC2: 1, 334 | cTFBC1: 2, 335 | cTFBC3: 3, 336 | cTFBC4: 4, 337 | cTFBC5: 5, 338 | cTFBC7_M6_OPAQUE_ONLY: 6, 339 | cTFBC7_M5: 7, 340 | cTFPVRTC1_4_RGB: 8, 341 | cTFPVRTC1_4_RGBA: 9, 342 | cTFASTC_4x4: 10, 343 | cTFATC_RGB: 11, 344 | cTFATC_RGBA_INTERPOLATED_ALPHA: 12, 345 | cTFRGBA32: 13, 346 | cTFRGB565: 14, 347 | cTFBGR565: 15, 348 | cTFRGBA4444: 16, 349 | }; 350 | 351 | // DXT formats, from: 352 | // http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/ 353 | BasisTextureLoader.DXT_FORMAT = { 354 | COMPRESSED_RGB_S3TC_DXT1_EXT: 0x83F0, 355 | COMPRESSED_RGBA_S3TC_DXT1_EXT: 0x83F1, 356 | COMPRESSED_RGBA_S3TC_DXT3_EXT: 0x83F2, 357 | COMPRESSED_RGBA_S3TC_DXT5_EXT: 0x83F3, 358 | }; 359 | BasisTextureLoader.DXT_FORMAT_MAP = {}; 360 | BasisTextureLoader.DXT_FORMAT_MAP[ BasisTextureLoader.BASIS_FORMAT.cTFBC1 ] = 361 | BasisTextureLoader.DXT_FORMAT.COMPRESSED_RGB_S3TC_DXT1_EXT; 362 | BasisTextureLoader.DXT_FORMAT_MAP[ BasisTextureLoader.BASIS_FORMAT.cTFBC3 ] = 363 | BasisTextureLoader.DXT_FORMAT.COMPRESSED_RGBA_S3TC_DXT5_EXT; 364 | 365 | /* WEB WORKER */ 366 | 367 | BasisTextureLoader.BasisWorker = function () { 368 | 369 | var config; 370 | var transcoderPending; 371 | var _BasisFile; 372 | 373 | onmessage = function ( e ) { 374 | 375 | var message = e.data; 376 | 377 | switch ( message.type ) { 378 | 379 | case 'init': 380 | config = message.config; 381 | init( message.transcoderBinary ); 382 | break; 383 | 384 | case 'transcode': 385 | transcoderPending.then( () => { 386 | 387 | try { 388 | 389 | var { width, height, hasAlpha, mipmaps, format } = transcode( message.buffer ); 390 | 391 | var buffers = []; 392 | 393 | for ( var i = 0; i < mipmaps.length; ++ i ) { 394 | 395 | buffers.push( mipmaps[ i ].data.buffer ); 396 | 397 | } 398 | 399 | self.postMessage( { type: 'transcode', id: message.id, width, height, hasAlpha, mipmaps, format }, buffers ); 400 | 401 | } catch ( error ) { 402 | 403 | console.error( error ); 404 | 405 | self.postMessage( { type: 'error', id: message.id, error: error.message } ); 406 | 407 | } 408 | 409 | } ); 410 | break; 411 | 412 | } 413 | 414 | }; 415 | 416 | function init( wasmBinary ) { 417 | 418 | var BasisModule; 419 | transcoderPending = new Promise( ( resolve ) => { 420 | 421 | BasisModule = { wasmBinary, onRuntimeInitialized: resolve }; 422 | BASIS( BasisModule ); 423 | 424 | } ).then( () => { 425 | 426 | var { BasisFile, initializeBasis } = BasisModule; 427 | 428 | _BasisFile = BasisFile; 429 | 430 | initializeBasis(); 431 | 432 | } ); 433 | 434 | } 435 | 436 | function transcode( buffer ) { 437 | 438 | var basisFile = new _BasisFile( new Uint8Array( buffer ) ); 439 | 440 | var width = basisFile.getImageWidth( 0, 0 ); 441 | var height = basisFile.getImageHeight( 0, 0 ); 442 | var levels = basisFile.getNumLevels( 0 ); 443 | var hasAlpha = basisFile.getHasAlpha(); 444 | 445 | function cleanup() { 446 | 447 | basisFile.close(); 448 | basisFile.delete(); 449 | 450 | } 451 | 452 | if ( ! hasAlpha ) { 453 | 454 | switch ( config.format ) { 455 | 456 | case 9: // Hardcoded: BasisTextureLoader.BASIS_FORMAT.cTFPVRTC1_4_RGBA 457 | config.format = 8; // Hardcoded: BasisTextureLoader.BASIS_FORMAT.cTFPVRTC1_4_RGB; 458 | break; 459 | default: 460 | break; 461 | 462 | } 463 | 464 | } 465 | 466 | if ( ! width || ! height || ! levels ) { 467 | 468 | cleanup(); 469 | throw new Error( 'THREE.BasisTextureLoader: Invalid .basis file' ); 470 | 471 | } 472 | 473 | if ( ! basisFile.startTranscoding() ) { 474 | 475 | cleanup(); 476 | throw new Error( 'THREE.BasisTextureLoader: .startTranscoding failed' ); 477 | 478 | } 479 | 480 | var mipmaps = []; 481 | 482 | for ( var mip = 0; mip < levels; mip ++ ) { 483 | 484 | var mipWidth = basisFile.getImageWidth( 0, mip ); 485 | var mipHeight = basisFile.getImageHeight( 0, mip ); 486 | var dst = new Uint8Array( basisFile.getImageTranscodedSizeInBytes( 0, mip, config.format ) ); 487 | 488 | var status = basisFile.transcodeImage( 489 | dst, 490 | 0, 491 | mip, 492 | config.format, 493 | 0, 494 | hasAlpha 495 | ); 496 | 497 | if ( ! status ) { 498 | 499 | cleanup(); 500 | throw new Error( 'THREE.BasisTextureLoader: .transcodeImage failed.' ); 501 | 502 | } 503 | 504 | mipmaps.push( { data: dst, width: mipWidth, height: mipHeight } ); 505 | 506 | } 507 | 508 | cleanup(); 509 | 510 | return { width, height, hasAlpha, mipmaps, format: config.format }; 511 | 512 | } 513 | 514 | }; 515 | 516 | export { BasisTextureLoader }; 517 | -------------------------------------------------------------------------------- /examples/madrid/src/loader/OBJLoader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author mrdoob / http://mrdoob.com/ 3 | */ 4 | import { THREE} from "../../../../dist/three-effects.js"; 5 | 6 | const { 7 | BufferGeometry, 8 | FileLoader, 9 | Float32BufferAttribute, 10 | Group, 11 | LineBasicMaterial, 12 | LineSegments, 13 | Loader, 14 | Material, 15 | Mesh, 16 | MeshPhongMaterial, 17 | NoColors, 18 | Points, 19 | PointsMaterial, 20 | VertexColors 21 | } = THREE; 22 | 23 | var OBJLoader = ( function () { 24 | 25 | // o object_name | g group_name 26 | var object_pattern = /^[og]\s*(.+)?/; 27 | // mtllib file_reference 28 | var material_library_pattern = /^mtllib /; 29 | // usemtl material_name 30 | var material_use_pattern = /^usemtl /; 31 | 32 | function ParserState() { 33 | 34 | var state = { 35 | objects: [], 36 | object: {}, 37 | 38 | vertices: [], 39 | normals: [], 40 | colors: [], 41 | uvs: [], 42 | 43 | materialLibraries: [], 44 | 45 | startObject: function ( name, fromDeclaration ) { 46 | 47 | // If the current object (initial from reset) is not from a g/o declaration in the parsed 48 | // file. We need to use it for the first parsed g/o to keep things in sync. 49 | if ( this.object && this.object.fromDeclaration === false ) { 50 | 51 | this.object.name = name; 52 | this.object.fromDeclaration = ( fromDeclaration !== false ); 53 | return; 54 | 55 | } 56 | 57 | var previousMaterial = ( this.object && typeof this.object.currentMaterial === 'function' ? this.object.currentMaterial() : undefined ); 58 | 59 | if ( this.object && typeof this.object._finalize === 'function' ) { 60 | 61 | this.object._finalize( true ); 62 | 63 | } 64 | 65 | this.object = { 66 | name: name || '', 67 | fromDeclaration: ( fromDeclaration !== false ), 68 | 69 | geometry: { 70 | vertices: [], 71 | normals: [], 72 | colors: [], 73 | uvs: [] 74 | }, 75 | materials: [], 76 | smooth: true, 77 | 78 | startMaterial: function ( name, libraries ) { 79 | 80 | var previous = this._finalize( false ); 81 | 82 | // New usemtl declaration overwrites an inherited material, except if faces were declared 83 | // after the material, then it must be preserved for proper MultiMaterial continuation. 84 | if ( previous && ( previous.inherited || previous.groupCount <= 0 ) ) { 85 | 86 | this.materials.splice( previous.index, 1 ); 87 | 88 | } 89 | 90 | var material = { 91 | index: this.materials.length, 92 | name: name || '', 93 | mtllib: ( Array.isArray( libraries ) && libraries.length > 0 ? libraries[ libraries.length - 1 ] : '' ), 94 | smooth: ( previous !== undefined ? previous.smooth : this.smooth ), 95 | groupStart: ( previous !== undefined ? previous.groupEnd : 0 ), 96 | groupEnd: - 1, 97 | groupCount: - 1, 98 | inherited: false, 99 | 100 | clone: function ( index ) { 101 | 102 | var cloned = { 103 | index: ( typeof index === 'number' ? index : this.index ), 104 | name: this.name, 105 | mtllib: this.mtllib, 106 | smooth: this.smooth, 107 | groupStart: 0, 108 | groupEnd: - 1, 109 | groupCount: - 1, 110 | inherited: false 111 | }; 112 | cloned.clone = this.clone.bind( cloned ); 113 | return cloned; 114 | 115 | } 116 | }; 117 | 118 | this.materials.push( material ); 119 | 120 | return material; 121 | 122 | }, 123 | 124 | currentMaterial: function () { 125 | 126 | if ( this.materials.length > 0 ) { 127 | 128 | return this.materials[ this.materials.length - 1 ]; 129 | 130 | } 131 | 132 | return undefined; 133 | 134 | }, 135 | 136 | _finalize: function ( end ) { 137 | 138 | var lastMultiMaterial = this.currentMaterial(); 139 | if ( lastMultiMaterial && lastMultiMaterial.groupEnd === - 1 ) { 140 | 141 | lastMultiMaterial.groupEnd = this.geometry.vertices.length / 3; 142 | lastMultiMaterial.groupCount = lastMultiMaterial.groupEnd - lastMultiMaterial.groupStart; 143 | lastMultiMaterial.inherited = false; 144 | 145 | } 146 | 147 | // Ignore objects tail materials if no face declarations followed them before a new o/g started. 148 | if ( end && this.materials.length > 1 ) { 149 | 150 | for ( var mi = this.materials.length - 1; mi >= 0; mi -- ) { 151 | 152 | if ( this.materials[ mi ].groupCount <= 0 ) { 153 | 154 | this.materials.splice( mi, 1 ); 155 | 156 | } 157 | 158 | } 159 | 160 | } 161 | 162 | // Guarantee at least one empty material, this makes the creation later more straight forward. 163 | if ( end && this.materials.length === 0 ) { 164 | 165 | this.materials.push( { 166 | name: '', 167 | smooth: this.smooth 168 | } ); 169 | 170 | } 171 | 172 | return lastMultiMaterial; 173 | 174 | } 175 | }; 176 | 177 | // Inherit previous objects material. 178 | // Spec tells us that a declared material must be set to all objects until a new material is declared. 179 | // If a usemtl declaration is encountered while this new object is being parsed, it will 180 | // overwrite the inherited material. Exception being that there was already face declarations 181 | // to the inherited material, then it will be preserved for proper MultiMaterial continuation. 182 | 183 | if ( previousMaterial && previousMaterial.name && typeof previousMaterial.clone === 'function' ) { 184 | 185 | var declared = previousMaterial.clone( 0 ); 186 | declared.inherited = true; 187 | this.object.materials.push( declared ); 188 | 189 | } 190 | 191 | this.objects.push( this.object ); 192 | 193 | }, 194 | 195 | finalize: function () { 196 | 197 | if ( this.object && typeof this.object._finalize === 'function' ) { 198 | 199 | this.object._finalize( true ); 200 | 201 | } 202 | 203 | }, 204 | 205 | parseVertexIndex: function ( value, len ) { 206 | 207 | var index = parseInt( value, 10 ); 208 | return ( index >= 0 ? index - 1 : index + len / 3 ) * 3; 209 | 210 | }, 211 | 212 | parseNormalIndex: function ( value, len ) { 213 | 214 | var index = parseInt( value, 10 ); 215 | return ( index >= 0 ? index - 1 : index + len / 3 ) * 3; 216 | 217 | }, 218 | 219 | parseUVIndex: function ( value, len ) { 220 | 221 | var index = parseInt( value, 10 ); 222 | return ( index >= 0 ? index - 1 : index + len / 2 ) * 2; 223 | 224 | }, 225 | 226 | addVertex: function ( a, b, c ) { 227 | 228 | var src = this.vertices; 229 | var dst = this.object.geometry.vertices; 230 | 231 | dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] ); 232 | dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] ); 233 | dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] ); 234 | 235 | }, 236 | 237 | addVertexPoint: function ( a ) { 238 | 239 | var src = this.vertices; 240 | var dst = this.object.geometry.vertices; 241 | 242 | dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] ); 243 | 244 | }, 245 | 246 | addVertexLine: function ( a ) { 247 | 248 | var src = this.vertices; 249 | var dst = this.object.geometry.vertices; 250 | 251 | dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] ); 252 | 253 | }, 254 | 255 | addNormal: function ( a, b, c ) { 256 | 257 | var src = this.normals; 258 | var dst = this.object.geometry.normals; 259 | 260 | dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] ); 261 | dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] ); 262 | dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] ); 263 | 264 | }, 265 | 266 | addColor: function ( a, b, c ) { 267 | 268 | var src = this.colors; 269 | var dst = this.object.geometry.colors; 270 | 271 | dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] ); 272 | dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] ); 273 | dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] ); 274 | 275 | }, 276 | 277 | addUV: function ( a, b, c ) { 278 | 279 | var src = this.uvs; 280 | var dst = this.object.geometry.uvs; 281 | 282 | dst.push( src[ a + 0 ], src[ a + 1 ] ); 283 | dst.push( src[ b + 0 ], src[ b + 1 ] ); 284 | dst.push( src[ c + 0 ], src[ c + 1 ] ); 285 | 286 | }, 287 | 288 | addUVLine: function ( a ) { 289 | 290 | var src = this.uvs; 291 | var dst = this.object.geometry.uvs; 292 | 293 | dst.push( src[ a + 0 ], src[ a + 1 ] ); 294 | 295 | }, 296 | 297 | addFace: function ( a, b, c, ua, ub, uc, na, nb, nc ) { 298 | 299 | var vLen = this.vertices.length; 300 | 301 | var ia = this.parseVertexIndex( a, vLen ); 302 | var ib = this.parseVertexIndex( b, vLen ); 303 | var ic = this.parseVertexIndex( c, vLen ); 304 | 305 | this.addVertex( ia, ib, ic ); 306 | 307 | if ( this.colors.length > 0 ) { 308 | 309 | this.addColor( ia, ib, ic ); 310 | 311 | } 312 | 313 | if ( ua !== undefined && ua !== '' ) { 314 | 315 | var uvLen = this.uvs.length; 316 | ia = this.parseUVIndex( ua, uvLen ); 317 | ib = this.parseUVIndex( ub, uvLen ); 318 | ic = this.parseUVIndex( uc, uvLen ); 319 | this.addUV( ia, ib, ic ); 320 | 321 | } 322 | 323 | if ( na !== undefined && na !== '' ) { 324 | 325 | // Normals are many times the same. If so, skip function call and parseInt. 326 | var nLen = this.normals.length; 327 | ia = this.parseNormalIndex( na, nLen ); 328 | 329 | ib = na === nb ? ia : this.parseNormalIndex( nb, nLen ); 330 | ic = na === nc ? ia : this.parseNormalIndex( nc, nLen ); 331 | 332 | this.addNormal( ia, ib, ic ); 333 | 334 | } 335 | 336 | }, 337 | 338 | addPointGeometry: function ( vertices ) { 339 | 340 | this.object.geometry.type = 'Points'; 341 | 342 | var vLen = this.vertices.length; 343 | 344 | for ( var vi = 0, l = vertices.length; vi < l; vi ++ ) { 345 | 346 | this.addVertexPoint( this.parseVertexIndex( vertices[ vi ], vLen ) ); 347 | 348 | } 349 | 350 | }, 351 | 352 | addLineGeometry: function ( vertices, uvs ) { 353 | 354 | this.object.geometry.type = 'Line'; 355 | 356 | var vLen = this.vertices.length; 357 | var uvLen = this.uvs.length; 358 | 359 | for ( var vi = 0, l = vertices.length; vi < l; vi ++ ) { 360 | 361 | this.addVertexLine( this.parseVertexIndex( vertices[ vi ], vLen ) ); 362 | 363 | } 364 | 365 | for ( var uvi = 0, l = uvs.length; uvi < l; uvi ++ ) { 366 | 367 | this.addUVLine( this.parseUVIndex( uvs[ uvi ], uvLen ) ); 368 | 369 | } 370 | 371 | } 372 | 373 | }; 374 | 375 | state.startObject( '', false ); 376 | 377 | return state; 378 | 379 | } 380 | 381 | // 382 | 383 | function OBJLoader( manager ) { 384 | 385 | Loader.call( this, manager ); 386 | 387 | this.materials = null; 388 | 389 | } 390 | 391 | OBJLoader.prototype = Object.assign( Object.create( Loader.prototype ), { 392 | 393 | constructor: OBJLoader, 394 | 395 | load: function ( url, onLoad, onProgress, onError ) { 396 | 397 | var scope = this; 398 | 399 | var loader = new FileLoader( scope.manager ); 400 | loader.setPath( this.path ); 401 | loader.load( url, function ( text ) { 402 | 403 | onLoad( scope.parse( text ) ); 404 | 405 | }, onProgress, onError ); 406 | 407 | }, 408 | 409 | setMaterials: function ( materials ) { 410 | 411 | this.materials = materials; 412 | 413 | return this; 414 | 415 | }, 416 | 417 | parse: function ( text ) { 418 | 419 | console.time( 'OBJLoader' ); 420 | 421 | var state = new ParserState(); 422 | 423 | if ( text.indexOf( '\r\n' ) !== - 1 ) { 424 | 425 | // This is faster than String.split with regex that splits on both 426 | text = text.replace( /\r\n/g, '\n' ); 427 | 428 | } 429 | 430 | if ( text.indexOf( '\\\n' ) !== - 1 ) { 431 | 432 | // join lines separated by a line continuation character (\) 433 | text = text.replace( /\\\n/g, '' ); 434 | 435 | } 436 | 437 | var lines = text.split( '\n' ); 438 | var line = '', lineFirstChar = ''; 439 | var lineLength = 0; 440 | var result = []; 441 | 442 | // Faster to just trim left side of the line. Use if available. 443 | var trimLeft = ( typeof ''.trimLeft === 'function' ); 444 | 445 | for ( var i = 0, l = lines.length; i < l; i ++ ) { 446 | 447 | line = lines[ i ]; 448 | 449 | line = trimLeft ? line.trimLeft() : line.trim(); 450 | 451 | lineLength = line.length; 452 | 453 | if ( lineLength === 0 ) continue; 454 | 455 | lineFirstChar = line.charAt( 0 ); 456 | 457 | // @todo invoke passed in handler if any 458 | if ( lineFirstChar === '#' ) continue; 459 | 460 | if ( lineFirstChar === 'v' ) { 461 | 462 | var data = line.split( /\s+/ ); 463 | 464 | switch ( data[ 0 ] ) { 465 | 466 | case 'v': 467 | state.vertices.push( 468 | parseFloat( data[ 1 ] ), 469 | parseFloat( data[ 2 ] ), 470 | parseFloat( data[ 3 ] ) 471 | ); 472 | if ( data.length >= 7 ) { 473 | 474 | state.colors.push( 475 | parseFloat( data[ 4 ] ), 476 | parseFloat( data[ 5 ] ), 477 | parseFloat( data[ 6 ] ) 478 | 479 | ); 480 | 481 | } 482 | break; 483 | case 'vn': 484 | state.normals.push( 485 | parseFloat( data[ 1 ] ), 486 | parseFloat( data[ 2 ] ), 487 | parseFloat( data[ 3 ] ) 488 | ); 489 | break; 490 | case 'vt': 491 | state.uvs.push( 492 | parseFloat( data[ 1 ] ), 493 | parseFloat( data[ 2 ] ) 494 | ); 495 | break; 496 | 497 | } 498 | 499 | } else if ( lineFirstChar === 'f' ) { 500 | 501 | var lineData = line.substr( 1 ).trim(); 502 | var vertexData = lineData.split( /\s+/ ); 503 | var faceVertices = []; 504 | 505 | // Parse the face vertex data into an easy to work with format 506 | 507 | for ( var j = 0, jl = vertexData.length; j < jl; j ++ ) { 508 | 509 | var vertex = vertexData[ j ]; 510 | 511 | if ( vertex.length > 0 ) { 512 | 513 | var vertexParts = vertex.split( '/' ); 514 | faceVertices.push( vertexParts ); 515 | 516 | } 517 | 518 | } 519 | 520 | // Draw an edge between the first vertex and all subsequent vertices to form an n-gon 521 | 522 | var v1 = faceVertices[ 0 ]; 523 | 524 | for ( var j = 1, jl = faceVertices.length - 1; j < jl; j ++ ) { 525 | 526 | var v2 = faceVertices[ j ]; 527 | var v3 = faceVertices[ j + 1 ]; 528 | 529 | state.addFace( 530 | v1[ 0 ], v2[ 0 ], v3[ 0 ], 531 | v1[ 1 ], v2[ 1 ], v3[ 1 ], 532 | v1[ 2 ], v2[ 2 ], v3[ 2 ] 533 | ); 534 | 535 | } 536 | 537 | } else if ( lineFirstChar === 'l' ) { 538 | 539 | var lineParts = line.substring( 1 ).trim().split( " " ); 540 | var lineVertices = [], lineUVs = []; 541 | 542 | if ( line.indexOf( "/" ) === - 1 ) { 543 | 544 | lineVertices = lineParts; 545 | 546 | } else { 547 | 548 | for ( var li = 0, llen = lineParts.length; li < llen; li ++ ) { 549 | 550 | var parts = lineParts[ li ].split( "/" ); 551 | 552 | if ( parts[ 0 ] !== "" ) lineVertices.push( parts[ 0 ] ); 553 | if ( parts[ 1 ] !== "" ) lineUVs.push( parts[ 1 ] ); 554 | 555 | } 556 | 557 | } 558 | state.addLineGeometry( lineVertices, lineUVs ); 559 | 560 | } else if ( lineFirstChar === 'p' ) { 561 | 562 | var lineData = line.substr( 1 ).trim(); 563 | var pointData = lineData.split( " " ); 564 | 565 | state.addPointGeometry( pointData ); 566 | 567 | } else if ( ( result = object_pattern.exec( line ) ) !== null ) { 568 | 569 | // o object_name 570 | // or 571 | // g group_name 572 | 573 | // WORKAROUND: https://bugs.chromium.org/p/v8/issues/detail?id=2869 574 | // var name = result[ 0 ].substr( 1 ).trim(); 575 | var name = ( " " + result[ 0 ].substr( 1 ).trim() ).substr( 1 ); 576 | 577 | state.startObject( name ); 578 | 579 | } else if ( material_use_pattern.test( line ) ) { 580 | 581 | // material 582 | 583 | state.object.startMaterial( line.substring( 7 ).trim(), state.materialLibraries ); 584 | 585 | } else if ( material_library_pattern.test( line ) ) { 586 | 587 | // mtl file 588 | 589 | state.materialLibraries.push( line.substring( 7 ).trim() ); 590 | 591 | } else if ( lineFirstChar === 's' ) { 592 | 593 | result = line.split( ' ' ); 594 | 595 | // smooth shading 596 | 597 | // @todo Handle files that have varying smooth values for a set of faces inside one geometry, 598 | // but does not define a usemtl for each face set. 599 | // This should be detected and a dummy material created (later MultiMaterial and geometry groups). 600 | // This requires some care to not create extra material on each smooth value for "normal" obj files. 601 | // where explicit usemtl defines geometry groups. 602 | // Example asset: examples/models/obj/cerberus/Cerberus.obj 603 | 604 | /* 605 | * http://paulbourke.net/dataformats/obj/ 606 | * or 607 | * http://www.cs.utah.edu/~boulos/cs3505/obj_spec.pdf 608 | * 609 | * From chapter "Grouping" Syntax explanation "s group_number": 610 | * "group_number is the smoothing group number. To turn off smoothing groups, use a value of 0 or off. 611 | * Polygonal elements use group numbers to put elements in different smoothing groups. For free-form 612 | * surfaces, smoothing groups are either turned on or off; there is no difference between values greater 613 | * than 0." 614 | */ 615 | if ( result.length > 1 ) { 616 | 617 | var value = result[ 1 ].trim().toLowerCase(); 618 | state.object.smooth = ( value !== '0' && value !== 'off' ); 619 | 620 | } else { 621 | 622 | // ZBrush can produce "s" lines #11707 623 | state.object.smooth = true; 624 | 625 | } 626 | var material = state.object.currentMaterial(); 627 | if ( material ) material.smooth = state.object.smooth; 628 | 629 | } else { 630 | 631 | // Handle null terminated files without exception 632 | if ( line === '\0' ) continue; 633 | 634 | throw new Error( 'THREE.OBJLoader: Unexpected line: "' + line + '"' ); 635 | 636 | } 637 | 638 | } 639 | 640 | state.finalize(); 641 | 642 | var container = new Group(); 643 | container.materialLibraries = [].concat( state.materialLibraries ); 644 | 645 | for ( var i = 0, l = state.objects.length; i < l; i ++ ) { 646 | 647 | var object = state.objects[ i ]; 648 | var geometry = object.geometry; 649 | var materials = object.materials; 650 | var isLine = ( geometry.type === 'Line' ); 651 | var isPoints = ( geometry.type === 'Points' ); 652 | var hasVertexColors = false; 653 | 654 | // Skip o/g line declarations that did not follow with any faces 655 | if ( geometry.vertices.length === 0 ) continue; 656 | 657 | var buffergeometry = new BufferGeometry(); 658 | 659 | buffergeometry.addAttribute( 'position', new Float32BufferAttribute( geometry.vertices, 3 ) ); 660 | 661 | if ( geometry.normals.length > 0 ) { 662 | 663 | buffergeometry.addAttribute( 'normal', new Float32BufferAttribute( geometry.normals, 3 ) ); 664 | 665 | } else { 666 | 667 | buffergeometry.computeVertexNormals(); 668 | 669 | } 670 | 671 | if ( geometry.colors.length > 0 ) { 672 | 673 | hasVertexColors = true; 674 | buffergeometry.addAttribute( 'color', new Float32BufferAttribute( geometry.colors, 3 ) ); 675 | 676 | } 677 | 678 | if ( geometry.uvs.length > 0 ) { 679 | 680 | buffergeometry.addAttribute( 'uv', new Float32BufferAttribute( geometry.uvs, 2 ) ); 681 | 682 | } 683 | 684 | // Create materials 685 | 686 | var createdMaterials = []; 687 | 688 | for ( var mi = 0, miLen = materials.length; mi < miLen; mi ++ ) { 689 | 690 | var sourceMaterial = materials[ mi ]; 691 | var material = undefined; 692 | 693 | if ( this.materials !== null ) { 694 | 695 | material = this.materials.create( sourceMaterial.name ); 696 | 697 | // mtl etc. loaders probably can't create line materials correctly, copy properties to a line material. 698 | if ( isLine && material && ! ( material instanceof LineBasicMaterial ) ) { 699 | 700 | var materialLine = new LineBasicMaterial(); 701 | Material.prototype.copy.call( materialLine, material ); 702 | materialLine.color.copy( material.color ); 703 | material = materialLine; 704 | 705 | } else if ( isPoints && material && ! ( material instanceof PointsMaterial ) ) { 706 | 707 | var materialPoints = new PointsMaterial( { size: 10, sizeAttenuation: false } ); 708 | Material.prototype.copy.call( materialPoints, material ); 709 | materialPoints.color.copy( material.color ); 710 | materialPoints.map = material.map; 711 | material = materialPoints; 712 | 713 | } 714 | 715 | } 716 | 717 | if ( ! material ) { 718 | 719 | if ( isLine ) { 720 | 721 | material = new LineBasicMaterial(); 722 | 723 | } else if ( isPoints ) { 724 | 725 | material = new PointsMaterial( { size: 1, sizeAttenuation: false } ); 726 | 727 | } else { 728 | 729 | material = new MeshPhongMaterial(); 730 | 731 | } 732 | 733 | material.name = sourceMaterial.name; 734 | 735 | } 736 | 737 | material.flatShading = sourceMaterial.smooth ? false : true; 738 | material.vertexColors = hasVertexColors ? VertexColors : NoColors; 739 | 740 | createdMaterials.push( material ); 741 | 742 | } 743 | 744 | // Create mesh 745 | 746 | var mesh; 747 | 748 | if ( createdMaterials.length > 1 ) { 749 | 750 | for ( var mi = 0, miLen = materials.length; mi < miLen; mi ++ ) { 751 | 752 | var sourceMaterial = materials[ mi ]; 753 | buffergeometry.addGroup( sourceMaterial.groupStart, sourceMaterial.groupCount, mi ); 754 | 755 | } 756 | 757 | if ( isLine ) { 758 | 759 | mesh = new LineSegments( buffergeometry, createdMaterials ); 760 | 761 | } else if ( isPoints ) { 762 | 763 | mesh = new Points( buffergeometry, createdMaterials ); 764 | 765 | } else { 766 | 767 | mesh = new Mesh( buffergeometry, createdMaterials ); 768 | 769 | } 770 | 771 | } else { 772 | 773 | if ( isLine ) { 774 | 775 | mesh = new LineSegments( buffergeometry, createdMaterials[ 0 ] ); 776 | 777 | } else if ( isPoints ) { 778 | 779 | mesh = new Points( buffergeometry, createdMaterials[ 0 ] ); 780 | 781 | } else { 782 | 783 | mesh = new Mesh( buffergeometry, createdMaterials[ 0 ] ); 784 | 785 | } 786 | 787 | } 788 | 789 | mesh.name = object.name; 790 | 791 | container.add( mesh ); 792 | 793 | } 794 | 795 | console.timeEnd( 'OBJLoader' ); 796 | 797 | return container; 798 | 799 | } 800 | 801 | } ); 802 | 803 | return OBJLoader; 804 | 805 | } )(); 806 | 807 | export { OBJLoader }; 808 | -------------------------------------------------------------------------------- /examples/madrid/src/loader/basis_transcoder.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SamsungInternet/three-effects/abe587f906054486bcffeeb6f2ccd554f6bb0ad8/examples/madrid/src/loader/basis_transcoder.wasm -------------------------------------------------------------------------------- /examples/madrid/src/pop.js: -------------------------------------------------------------------------------- 1 | import {THREE, attachSystem} from "../../../dist/three-effects.js"; 2 | 3 | export default function(scene) { 4 | 5 | var inDuration = 600; 6 | 7 | attachSystem(scene, "popin", { 8 | init: function(e, objects, name) { 9 | var pos = new THREE.Vector3(); 10 | pos.copy(e.entity.position); 11 | e.entity.position.y += 100; 12 | return { 13 | target: pos, 14 | time: window.performance.now() 15 | } 16 | }, 17 | 18 | beforeRender: function (e, objects, name) { 19 | var t = window.performance.now(); 20 | var updateShadow = false; 21 | 22 | objects.forEach(function (obj) { 23 | 24 | var d = obj.userData[name]; 25 | 26 | if(d.finished) return; 27 | 28 | var pc = Math.max(0, Math.min(1, (t - d.time) / inDuration)); 29 | 30 | obj.position.lerp(d.target, pc); 31 | 32 | updateShadow = true; 33 | 34 | if(pc === 1) d.finished = true; 35 | }); 36 | 37 | if(updateShadow) e.renderer.shadowMap.needsUpdate = true; 38 | 39 | } 40 | }); 41 | 42 | var outDuration = 300; 43 | 44 | attachSystem(scene, "popout", { 45 | init: function(e, objects, name) { 46 | return { 47 | time: window.performance.now() 48 | } 49 | }, 50 | 51 | beforeRender: function (e, objects, name) { 52 | var t = window.performance.now(); 53 | var updateShadow = false; 54 | 55 | objects.slice(0).forEach(function (obj) { 56 | 57 | var d = obj.userData[name]; 58 | 59 | var pc = 1 - Math.max(0, Math.min(1, (t - d.time) / outDuration)); 60 | 61 | obj.scale.set(pc, pc, pc); 62 | 63 | updateShadow = true; 64 | 65 | if(pc === 0){ 66 | Object.keys(obj.userData).forEach(function(k) { 67 | scene.dispatchEvent({ type: k + "/unregister", entity: obj }); 68 | }); 69 | obj.parent.remove(obj); 70 | } 71 | }); 72 | 73 | if(updateShadow) e.renderer.shadowMap.needsUpdate = true; 74 | 75 | } 76 | }); 77 | 78 | } -------------------------------------------------------------------------------- /examples/madrid/src/sky.js: -------------------------------------------------------------------------------- 1 | import { THREE } from "../../../dist/three-effects.js"; 2 | 3 | import { Sky } from "./lib/Sky.js"; 4 | 5 | export default function (renderer, scene, camera, assets) { 6 | var group = new THREE.Group(); 7 | 8 | var _scene = new THREE.Scene(); 9 | 10 | var mesh = new Sky(); 11 | 12 | var cubeCamera = new THREE.CubeCamera( 1, 100000, 128 ); 13 | 14 | _scene.add( cubeCamera ); 15 | 16 | _scene.add( mesh ); 17 | 18 | var shader = THREE.ShaderLib[ "cube" ]; 19 | shader.uniforms[ "tCube" ].value = cubeCamera.renderTarget.texture; 20 | 21 | var material = new THREE.ShaderMaterial( { 22 | 23 | fragmentShader: shader.fragmentShader, 24 | vertexShader: shader.vertexShader, 25 | uniforms: shader.uniforms, 26 | depthWrite: false, 27 | side: THREE.BackSide 28 | 29 | }); 30 | 31 | var skybox = new THREE.Mesh( new THREE.BoxBufferGeometry( 1000, 1000, 1000 ), material ); 32 | 33 | group.add(skybox); 34 | 35 | var light = new THREE.DirectionalLight(new THREE.Color(0xFFFFFF), 1); 36 | 37 | light.castShadow = true; 38 | 39 | light.position.set(50, 100, 20).normalize(); 40 | 41 | group.add(light); 42 | 43 | 44 | var isHD = document.location.hash === "#hd"; 45 | 46 | light.shadow.mapSize.width = isHD ? 4096 : 2048; 47 | light.shadow.mapSize.height = isHD ? 4096 : 2048; 48 | light.shadow.type = isHD ? THREE.PCFSoftShadowMap : THREE.PCFShadowMap; 49 | light.shadow.camera.near = 1; 50 | light.shadow.camera.far = 200; 51 | light.shadow.camera.left = -50; 52 | light.shadow.camera.top = 50; 53 | light.shadow.camera.right = 50; 54 | light.shadow.camera.bottom = -50; 55 | light.shadow.bias = 0.00001; 56 | light.shadow.radius = 1; 57 | 58 | var hemi = new THREE.HemisphereLight(new THREE.Color(0x888899), new THREE.Color(0x776666), 1); 59 | 60 | group.add(hemi); 61 | 62 | 63 | var ambient = new THREE.AmbientLight( 0x666666 ); 64 | group.add(ambient); 65 | 66 | mesh.material.uniforms.sunPosition.value = light.position; 67 | 68 | var col = new THREE.Color(0xCC7733); 69 | 70 | var a = 1; 71 | 72 | scene.fog = new THREE.FogExp2( 0xFFFFFF,0.0066); 73 | 74 | var needsUpdate = false; 75 | 76 | var fn = function () { 77 | hemi.intensity = 0.1 + a; 78 | light.intensity = 0.1 + a; 79 | ambient.intensity = 0.1 + a; 80 | 81 | light.color.set(0xFFFFFF); 82 | light.color.lerp(col, Math.pow(1 - a, 10) ); 83 | 84 | scene.fog.color.copy(light.color).multiplyScalar(0.33); 85 | scene.userData["bloom_strength"].value = 0.1 + 0.1 * a; 86 | 87 | needsUpdate = true; 88 | 89 | var vrEnabled = renderer.vr.enabled; 90 | 91 | renderer.vr.enabled = false; 92 | 93 | //cubeCamera.update( renderer, _scene ); 94 | 95 | renderer.vr.enabled = vrEnabled; 96 | 97 | 98 | renderer.shadowMap.needsUpdate = true; 99 | }; 100 | 101 | scene.addEventListener("afterRender", function () { 102 | 103 | if(needsUpdate) { 104 | 105 | cubeCamera.update( renderer, _scene ); 106 | 107 | needsUpdate = false; 108 | 109 | } 110 | 111 | }); 112 | 113 | window.setTimeout(fn,0); 114 | 115 | scene.dispatchEvent({ type:"interact/register", entity: skybox}); 116 | 117 | skybox.addEventListener("interact/move", function (e) { 118 | if(!e.hand.pressed) return; 119 | var vec = e.hand.raycaster.ray.direction; 120 | if(vec.y < 0) return; 121 | light.position.copy(vec).multiplyScalar(900); 122 | a = vec.y; 123 | 124 | fn(); 125 | 126 | }); 127 | 128 | return group; 129 | } -------------------------------------------------------------------------------- /examples/madrid/src/statues.js: -------------------------------------------------------------------------------- 1 | import {THREE} from "../../../dist/three-effects.js"; 2 | 3 | export default function (renderer, scene, camera, assets) { 4 | var group = new THREE.Group(); 5 | 6 | var material = new THREE.MeshStandardMaterial({ 7 | metalness: 0, 8 | roughness: 1, 9 | aoMapIntensity: 0.5, 10 | map: assets["venus_diffuse"], 11 | aoMap: assets["venus_material"], 12 | roughnessMap: assets["venus_material"], 13 | normalMap: assets["venus_normals"] 14 | }); 15 | 16 | assets["venus_model"].scale(0.05,0.05,0.05); 17 | assets["venus_model"].computeBoundingBox(); 18 | 19 | var arr = ["bloom", "outline", "ssao", "filmgrain", "!fxaa", "colors", "godrays", "!glitch"]; 20 | 21 | arr.forEach(function(s, i){ 22 | var m = new THREE.Mesh(assets["venus_model"], material.clone()); 23 | m.castShadow = true; 24 | m.receiveShadow = true; 25 | 26 | m.material.color.setHSL(i/arr.length, 0.2, 0.7); 27 | 28 | var a = Math.PI * 2 * (i / arr.length); 29 | m.position.set(Math.sin(a) * 5, 0, Math.cos(a) * 5); 30 | 31 | var r = (0.4 + i / arr.length); 32 | m.rotation.y = Math.PI * 2 * Math.round(r * 4) / 4; 33 | 34 | group.add(m); 35 | 36 | scene.dispatchEvent({ type: "interact/register", entity: m}); 37 | 38 | m.addEventListener("interact/enter",function () { 39 | m.material.emissive.set(0x111111); 40 | scene.dispatchEvent({ type: "audio/tick" }); 41 | m.userData.label.visible = true; 42 | }); 43 | 44 | m.addEventListener("interact/leave",function () { 45 | m.material.emissive.set(0x000000); 46 | m.userData.label.visible = false; 47 | }); 48 | 49 | var ev = {type: "option", name: s, value: false}; 50 | 51 | var isActive = false; 52 | 53 | m.material.color.setHSL(i / arr.length, isActive ? 0.8 : 0.2, isActive ? 0.8 : 0.7); 54 | 55 | m.addEventListener("interact/press",function () { 56 | isActive = !isActive; 57 | m.material.color.setHSL(i / arr.length, isActive ? 0.8 : 0.2, isActive ? 0.8 : 0.7); 58 | ev.value = isActive; 59 | scene.dispatchEvent(ev); 60 | scene.dispatchEvent({ type: "audio/zit" }); 61 | m.userData.label.text = s.replace("!", "") + (isActive ? " on" : " off"); 62 | m.userData.label.disabled = !isActive; 63 | m.userData.label.needsUpdate = true; 64 | }); 65 | 66 | scene.dispatchEvent(ev); 67 | 68 | scene.dispatchEvent({ type: "label/register", visible: false, entity: m, text: s.replace("!", "") + " off", scale: 0.33, disabled: !isActive}); 69 | }) 70 | 71 | group.position.y = -0.01; 72 | return group; 73 | } -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Yannis Gravezas 3 | * Copyright (c) 2019 Samsung Internet 4 | * Available under the MIT license. 5 | */ 6 | 7 | import * as THREE from 'three'; 8 | import * as attach from './src/lib/index.js'; 9 | import attachEffects from './src/fx.js'; 10 | import attachSystem from './src/ecs.js'; 11 | import './src/chunk.js'; 12 | 13 | export { attachEffects, attachSystem, attach, THREE } -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "three-effects", 3 | "version": "0.1.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "@types/estree": { 8 | "version": "0.0.39", 9 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz", 10 | "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", 11 | "dev": true 12 | }, 13 | "@types/node": { 14 | "version": "12.7.12", 15 | "resolved": "https://registry.npmjs.org/@types/node/-/node-12.7.12.tgz", 16 | "integrity": "sha512-KPYGmfD0/b1eXurQ59fXD1GBzhSQfz6/lKBxkaHX9dKTzjXbK68Zt7yGUxUsCS1jeTy/8aL+d9JEr+S54mpkWQ==", 17 | "dev": true 18 | }, 19 | "@types/resolve": { 20 | "version": "0.0.8", 21 | "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz", 22 | "integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==", 23 | "dev": true, 24 | "requires": { 25 | "@types/node": "*" 26 | } 27 | }, 28 | "builtin-modules": { 29 | "version": "3.1.0", 30 | "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", 31 | "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==", 32 | "dev": true 33 | }, 34 | "estree-walker": { 35 | "version": "0.6.1", 36 | "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", 37 | "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==", 38 | "dev": true 39 | }, 40 | "is-module": { 41 | "version": "1.0.0", 42 | "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", 43 | "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=", 44 | "dev": true 45 | }, 46 | "path-parse": { 47 | "version": "1.0.6", 48 | "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", 49 | "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", 50 | "dev": true 51 | }, 52 | "resolve": { 53 | "version": "1.12.0", 54 | "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", 55 | "integrity": "sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==", 56 | "dev": true, 57 | "requires": { 58 | "path-parse": "^1.0.6" 59 | } 60 | }, 61 | "rollup": { 62 | "version": "1.23.1", 63 | "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.23.1.tgz", 64 | "integrity": "sha512-95C1GZQpr/NIA0kMUQmSjuMDQ45oZfPgDBcN0yZwBG7Kee//m7H68vgIyg+SPuyrTZ5PrXfyLK80OzXeKG5dAA==", 65 | "dev": true, 66 | "requires": { 67 | "@types/estree": "*", 68 | "@types/node": "*", 69 | "acorn": "^7.1.0" 70 | }, 71 | "dependencies": { 72 | "acorn": { 73 | "version": "7.1.0", 74 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", 75 | "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", 76 | "dev": true 77 | } 78 | } 79 | }, 80 | "rollup-plugin-node-resolve": { 81 | "version": "5.2.0", 82 | "resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz", 83 | "integrity": "sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==", 84 | "dev": true, 85 | "requires": { 86 | "@types/resolve": "0.0.8", 87 | "builtin-modules": "^3.1.0", 88 | "is-module": "^1.0.0", 89 | "resolve": "^1.11.1", 90 | "rollup-pluginutils": "^2.8.1" 91 | } 92 | }, 93 | "rollup-pluginutils": { 94 | "version": "2.8.2", 95 | "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz", 96 | "integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==", 97 | "dev": true, 98 | "requires": { 99 | "estree-walker": "^0.6.1" 100 | } 101 | }, 102 | "three": { 103 | "version": "0.109.0", 104 | "resolved": "https://registry.npmjs.org/three/-/three-0.109.0.tgz", 105 | "integrity": "sha512-XT99T3Hvgh2CEvwPdHYEunNE+clLK6KiT1U8En7YOgIqTUw4MrLeIc8zxQAJ6wbP8hhJaY5+Cff3jwBPpBa0gA==", 106 | "dev": true 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "three-effects", 3 | "version": "0.1.0", 4 | "description": "Post processing and entity component systems for Threejs", 5 | "main": "./dist/three-effects.js", 6 | "module": "./dist/three-effects.module.js", 7 | "scripts": { 8 | "dist": "rollup -c" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/SamsungInternet/three-effects.git" 13 | }, 14 | "keywords": [ 15 | "webgl", 16 | "grading", 17 | "postprocessing", 18 | "effect", 19 | "bloom", 20 | "outline", 21 | "sobel", 22 | "freichen", 23 | "edge", 24 | "colors", 25 | "ssao", 26 | "ambient", 27 | "depth", 28 | "occlusion", 29 | "fxaa", 30 | "antialiasing", 31 | "godrays", 32 | "sunshafts", 33 | "three", 34 | "threejs", 35 | "uber", 36 | "shader", 37 | "grain", 38 | "film" 39 | ], 40 | "author": "Yannis Gravezas ", 41 | "license": "MIT", 42 | "bugs": { 43 | "url": "https://github.com/SamsungInternet/three-effects/issues" 44 | }, 45 | "homepage": "https://github.com/SamsungInternet/three-effects#readme", 46 | "devDependencies": { 47 | "three": "^0.109.0", 48 | "rollup": "^1.23.1", 49 | "rollup-plugin-node-resolve": "^5.2.0" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import nodeResolve from 'rollup-plugin-node-resolve'; 2 | 3 | export default [{ 4 | input: 'index.js', 5 | output: { 6 | file: 'dist/three-effects.js', 7 | format: 'esm' 8 | }, 9 | plugins: [ 10 | nodeResolve() 11 | ] 12 | }, 13 | { 14 | input: 'index.js', 15 | external: ["three"], 16 | output: { 17 | file: 'dist/three-effects.module.js', 18 | format: 'esm' 19 | } 20 | }]; -------------------------------------------------------------------------------- /src/chunk.js: -------------------------------------------------------------------------------- 1 | import { ShaderChunk } from "three"; 2 | 3 | ShaderChunk["vr_pars"] = ` 4 | #ifndef VR_PARS 5 | 6 | #define VR_PARS 1 7 | uniform float VR; 8 | 9 | #define selectVR(novr, left, right) ( (VR > 0.) ? ( (gl_FragCoord.x < VR) ? (left) : (right) ): (novr)) 10 | 11 | vec4 textureVR(in sampler2D tex, in vec2 uv) { 12 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x) ); 13 | return texture2D(tex, uv); 14 | } 15 | 16 | vec4 textureVR(in sampler2D tex, in vec2 uv, float bias) { 17 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x)); 18 | return texture2D(tex, uv, bias); 19 | } 20 | 21 | #ifdef TEXTURE_LOD_EXT 22 | 23 | vec4 textureVRLod(in sampler2D tex, in vec2 uv, float lod) { 24 | uv.x = selectVR(uv.x, min(0.5, uv.x), max(0.5, uv.x)); 25 | return texture2DLodEXT(tex, uv, bias); 26 | } 27 | 28 | #endif 29 | 30 | #endif 31 | `; 32 | 33 | ShaderChunk["blur_pars"] = ` 34 | #ifndef BLUR_PARS 35 | 36 | #define BLUR_PARS 1 37 | 38 | #ifndef VR_PARS 39 | #define textureVR(t, u) texture2D(t, u) 40 | #endif 41 | 42 | #ifndef BLUR_WEIGHT 43 | #define BLUR_WEIGHT(v, uv) v.a; 44 | #endif 45 | 46 | #define BLUR_MAX_RADIUS 255 47 | 48 | float blur_gaussian_pdf(in float x, in float sigma) { 49 | return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma; 50 | } 51 | 52 | vec4 blur_weighted(const float fSigma, const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 53 | vec2 invSize = 1.0 / resolution; 54 | float weightSum = blur_gaussian_pdf(0.0, fSigma); 55 | vec4 diffuseSum = textureVR( tex, uv) * weightSum; 56 | for( int i = 1; i < BLUR_MAX_RADIUS; i ++ ) { 57 | if(float(i) > fSigma) break; 58 | float x = float(i); 59 | float w = blur_gaussian_pdf(x, fSigma); 60 | vec2 uvOffset = direction * invSize * x; 61 | vec2 uvv = uv + uvOffset; 62 | vec4 sample1 = textureVR( tex, uvv); 63 | float w1 = BLUR_WEIGHT(sample1, uvv); 64 | uvv = uv - uvOffset; 65 | vec4 sample2 = textureVR( tex, uvv); 66 | float w2 = BLUR_WEIGHT(sample1, uvv); 67 | diffuseSum += (sample1 * w1 + sample2 * w2) * w; 68 | weightSum += (w1 + w2) * w; 69 | } 70 | return diffuseSum/weightSum; 71 | } 72 | 73 | vec4 blur(const float fSigma, const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 74 | vec2 invSize = 1.0 / resolution; 75 | float weightSum = blur_gaussian_pdf(0.0, fSigma); 76 | vec4 diffuseSum = textureVR( tex, uv) * weightSum; 77 | for( int i = 1; i < BLUR_MAX_RADIUS; i ++ ) { 78 | if(float(i) > fSigma) break; 79 | float x = float(i); 80 | float w = blur_gaussian_pdf(x, fSigma); 81 | vec2 uvOffset = direction * invSize * x; 82 | vec4 sample1 = textureVR( tex, uv + uvOffset); 83 | vec4 sample2 = textureVR( tex, uv - uvOffset); 84 | diffuseSum += (sample1 + sample2) * w; 85 | weightSum += 2.0 * w; 86 | } 87 | return diffuseSum/weightSum; 88 | } 89 | 90 | vec4 blur5(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 91 | vec4 color = vec4(0.0); 92 | vec2 off1 = vec2(1.3333333333333333) * direction; 93 | color += textureVR(tex, uv) * 0.29411764705882354; 94 | color += textureVR(tex, uv + (off1 / resolution)) * 0.35294117647058826; 95 | color += textureVR(tex, uv - (off1 / resolution)) * 0.35294117647058826; 96 | return color; 97 | } 98 | 99 | vec4 blur9(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 100 | vec4 color = vec4(0.0); 101 | vec2 off1 = vec2(1.3846153846) * direction; 102 | vec2 off2 = vec2(3.2307692308) * direction; 103 | color += textureVR(tex, vUv) * 0.2270270270; 104 | color += textureVR(tex, vUv + (off1 / resolution)) * 0.3162162162; 105 | color += textureVR(tex, vUv - (off1 / resolution)) * 0.3162162162; 106 | color += textureVR(tex, vUv + (off2 / resolution)) * 0.0702702703; 107 | color += textureVR(tex, vUv - (off2 / resolution)) * 0.0702702703; 108 | return color; 109 | } 110 | 111 | vec4 blur13(const in sampler2D tex, const in vec2 uv, const in vec2 direction, const in vec2 resolution) { 112 | vec4 color = vec4(0.0); 113 | vec2 off1 = vec2(1.411764705882353) * direction; 114 | vec2 off2 = vec2(3.2941176470588234) * direction; 115 | vec2 off3 = vec2(5.176470588235294) * direction; 116 | color += textureVR(tex, vUv) * 0.1964825501511404; 117 | color += textureVR(tex, vUv + (off1 / resolution)) * 0.2969069646728344; 118 | color += textureVR(tex, vUv - (off1 / resolution)) * 0.2969069646728344; 119 | color += textureVR(tex, vUv + (off2 / resolution)) * 0.09447039785044732; 120 | color += textureVR(tex, vUv - (off2 / resolution)) * 0.09447039785044732; 121 | color += textureVR(tex, vUv + (off3 / resolution)) * 0.010381362401148057; 122 | color += textureVR(tex, vUv - (off3 / resolution)) * 0.010381362401148057; 123 | return color; 124 | } 125 | #endif 126 | `; 127 | 128 | export default ShaderChunk; 129 | -------------------------------------------------------------------------------- /src/ecs.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Yannis Gravezas 3 | * Copyright (c) 2019 Samsung Internet 4 | * Available under the MIT license. 5 | */ 6 | 7 | export default function (obj, name, api) { 8 | 9 | var objects = []; 10 | 11 | var listeners = {}; 12 | 13 | function addListener(lname, fn) { 14 | listeners[lname] = fn 15 | obj.addEventListener(lname, listeners[lname]); 16 | } 17 | 18 | addListener(name + "/register", function(e) { 19 | var index = objects.indexOf(e.entity); 20 | if( index !== -1) { 21 | objects.splice(index, 1); 22 | if(api.remove) api.remove(e, objects, name); 23 | delete e.entity.userdata[name]; 24 | } 25 | objects.push(e.entity); 26 | e.entity.userData[name] = api.init(e, objects, name, e.reset); 27 | }); 28 | 29 | addListener(name + "/unregister", function(e) { 30 | var index = objects.indexOf(e.entity); 31 | if(index !== -1) { 32 | objects.splice(index, 1); 33 | if(api.remove) api.remove(e, objects, name); 34 | delete e.entity.userData[name]; 35 | } 36 | }); 37 | 38 | for (var k in api) { 39 | var lname, fn; 40 | switch(k) { 41 | case "init": 42 | case "remove": 43 | case "control": continue; 44 | default: 45 | addListener(k, function(e) { 46 | api[k](e, objects, name); 47 | }); 48 | break; 49 | } 50 | } 51 | 52 | return function (arg) { 53 | if(!arg) { 54 | objects.forEach( function (obj) { 55 | if (api.remove) api.remove({ entity: obj }, objects, name); 56 | delete e.entity.userdata[name]; 57 | }); 58 | for(var k in listeners) { 59 | obj.removeEventListener(k, listeners[k]); 60 | } 61 | } else if (api.control) { 62 | api.control(arg, objects, name); 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /src/fx.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016-2018, Yannis Gravezas 3 | * Copyright (c) 2019 Samsung Internet 4 | * Available under the MIT license. 5 | */ 6 | 7 | import * as THREE from 'three'; 8 | 9 | export default function (scene, antialias) { 10 | var renderTargets = [new THREE.WebGLRenderTarget(1, 1), new THREE.WebGLRenderTarget(1, 1)]; 11 | var multiTarget = new THREE.WebGLMultisampleRenderTarget(1, 1); 12 | multiTarget.samples = antialias === true ? 4 : antialias; 13 | var depthTexture = new THREE.DepthTexture(); 14 | depthTexture.format = THREE.DepthStencilFormat; 15 | depthTexture.type = THREE.UnsignedInt248Type; 16 | 17 | renderTargets[0].depthTexture = multiTarget.depthTexture = depthTexture; 18 | 19 | scene.userData.VR = { value: 0 }; 20 | scene.userData.colorTexture = { value: null }; 21 | scene.userData.depthTexture = { value: depthTexture }; 22 | 23 | var passes = []; 24 | 25 | var realTarget; 26 | 27 | var _scene = new THREE.Scene(); 28 | var _ortho = new THREE.OrthographicCamera(1,1,1,1,1,10); 29 | var _quad = new THREE.Mesh(new THREE.PlaneBufferGeometry(2,2), null); 30 | _quad.frustumCulled = false; 31 | _scene.add(_quad); 32 | 33 | var vsize = new THREE.Vector2(); 34 | 35 | scene.userData.resolution = { value: vsize }; 36 | 37 | var event = { type: "beforeRender", scene: null, renderer: null, camera: null, size: vsize, time: 0 }; 38 | 39 | function dispatch(type) { 40 | event.type = type; 41 | scene.dispatchEvent(event); 42 | } 43 | 44 | scene.onBeforeRender = function (renderer, scene, camera, renderTarget) { 45 | 46 | event.time = window.performance.now(); 47 | 48 | 49 | if (renderTarget) { 50 | vsize.set(renderTarget.width, renderTarget.height); 51 | } else { 52 | renderer.getDrawingBufferSize(vsize); 53 | } 54 | 55 | if (!passes.length) { 56 | dispatch("beforeRender"); 57 | return; 58 | } 59 | 60 | if(vsize.x !== renderTargets[0].width || vsize.y !== renderTargets[0].height) { 61 | renderTargets[0].setSize(vsize.x, vsize.y); 62 | renderTargets[1].setSize(vsize.x, vsize.y); 63 | multiTarget.setSize(vsize.x, vsize.y); 64 | dispatch("resizeEffects"); 65 | } 66 | 67 | scene.userData.VR.value = renderer.vr.isPresenting() ? vsize.x * 0.5 : 0; 68 | 69 | event.renderer = renderer; 70 | event.scene = scene; 71 | event.camera = camera; 72 | realTarget = event.outputTarget = renderTarget; 73 | event.renderTarget = renderTargets[0]; 74 | 75 | 76 | dispatch("beforeRender"); 77 | 78 | renderer.setRenderTarget(antialias && renderer.capabilities.isWebGL2 ? multiTarget : renderTargets[0]); 79 | 80 | }; 81 | 82 | scene.onAfterRender = function (renderer, scene, camera) { 83 | if (!passes.length) return; 84 | 85 | var vrEnabled = renderer.vr.enabled; 86 | renderer.vr.enabled = false; 87 | 88 | var u = scene.userData; 89 | event.renderTarget = antialias && renderer.capabilities.isWebGL2 ? multiTarget : renderTargets[0]; 90 | u.colorTexture.value = event.renderTarget.texture; 91 | 92 | dispatch("afterRender"); 93 | 94 | passes.forEach(function (p, i) { 95 | event.passId = p.passId; 96 | dispatch("beforePass"); 97 | 98 | var rt = (i == (passes.length - 1)) ? realTarget : renderTargets[(i + 1) & 1]; 99 | 100 | _quad.material = p; 101 | renderer.setRenderTarget(rt); 102 | //renderer.setViewport(0, 0, vsize.x, vsize.y); 103 | renderer.render(_scene, _ortho); 104 | 105 | u.colorTexture.value = rt ? rt.texture : null; 106 | event.renderTarget = rt; 107 | dispatch("afterPass"); 108 | }); 109 | 110 | delete event.passId; 111 | dispatch("afterEffects"); 112 | 113 | renderer.vr.enabled = vrEnabled; 114 | }; 115 | 116 | var fxPattern = /FX_PASS_[0-9]+/gm; 117 | var symPattern = /^\w+$/; 118 | var uPattern = /^\s*uniform\s+/; 119 | 120 | function parsePasses( src ) { 121 | var arr = src.match(fxPattern); 122 | if(!arr) return ["main"]; 123 | var set = new Set(arr); 124 | arr = [...set]; 125 | arr.sort(function(a, b) { 126 | return a.localeCompare(b); 127 | }); 128 | arr.push("main"); 129 | return arr; 130 | } 131 | 132 | return function ( src ) { 133 | passes.forEach(function(m){ m.dispose(); }); 134 | passes = []; 135 | 136 | if(!src) return; 137 | 138 | if (Array.isArray(src)) { 139 | var head = []; 140 | var body = []; 141 | var bc = 0, c = 0; 142 | 143 | src.forEach(function (s, i) { 144 | if(i && s[0] === "!") bc++; 145 | }); 146 | 147 | if(bc) body.push(`#if defined FX_PASS_${c}`); 148 | 149 | src.forEach(function (s, i) { 150 | 151 | if(bc && i && s[0] === "!") { 152 | body.push(c < bc - 1 ? `#elif defined FX_PASS_${++c}` : "#else"); 153 | } 154 | 155 | s = s.replace("!", "").trim(); 156 | 157 | if(!s) return; 158 | 159 | if(s[0] === "#") { 160 | head.push(`#include <${s.replace("#", "")}>`); 161 | } else if(s.match(symPattern)) { 162 | head.push(`#include <${s}_pars>`); 163 | body.push(`\t${s}_apply(color, uv);`) 164 | } else if(s.match(uPattern)){ 165 | head.push(s); 166 | } else { 167 | body.push(s); 168 | } 169 | 170 | }); 171 | 172 | //body.push("fragColor.a = 1.0;") 173 | if(bc) body.push("#endif"); 174 | 175 | src = [ 176 | head.join("\n"), 177 | "", 178 | "void main(void){", 179 | "\tvec2 uv = vUv;", 180 | "\tvec4 color = texture2D(colorTexture, uv);", 181 | body.join("\n"), 182 | "\tgl_FragColor = color;", 183 | "}" 184 | ].join("\n") 185 | } 186 | 187 | var def = parsePasses(src); 188 | 189 | src = [ 190 | "#include ", 191 | "uniform sampler2D colorTexture;", 192 | "uniform sampler2D depthTexture;", 193 | "uniform vec2 resolution;", 194 | "varying vec2 vUv;", 195 | src 196 | ].join("\n"); 197 | 198 | def.forEach(function (d){ 199 | var defines = {}; 200 | if(d !== "main") defines[d] = 1; 201 | var m = new THREE.ShaderMaterial({ 202 | defines: defines, 203 | uniforms: scene.userData, 204 | vertexShader: ` 205 | varying vec2 vUv; 206 | 207 | void main(void) { 208 | vUv = uv; 209 | gl_Position = vec4(position.xy, 0., 1.); 210 | } 211 | `, 212 | fragmentShader: src, 213 | depthWrite: false, 214 | depthTest: false, 215 | extensions: { 216 | derivatives: true, 217 | shaderTextureLOD: true 218 | }, 219 | fog: false, 220 | lights: false 221 | }); 222 | m.passId = d; 223 | passes.push(m); 224 | }); 225 | } 226 | } -------------------------------------------------------------------------------- /src/lib/bloom/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "three"; 2 | 3 | THREE.ShaderChunk["bloom_pars"] = ` 4 | uniform sampler2D bloom_texture; 5 | 6 | void bloom_apply(inout vec4 fragColor, in vec2 uv) { 7 | fragColor.rgb += texture2D(bloom_texture, uv).rgb; 8 | } 9 | `; 10 | 11 | export default function (scene, config) { 12 | 13 | config = config || {}; 14 | 15 | var inp = new THREE.WebGLRenderTarget(1,1); 16 | var ping = [ new THREE.WebGLRenderTarget(1,1), new THREE.WebGLRenderTarget(1,1), new THREE.WebGLRenderTarget(1,1) ]; 17 | var pong = [ new THREE.WebGLRenderTarget(1,1), new THREE.WebGLRenderTarget(1,1), new THREE.WebGLRenderTarget(1,1) ]; 18 | 19 | var passId = config.before || "main"; 20 | 21 | function getPass(src, uniforms) { 22 | return new THREE.ShaderMaterial({ 23 | uniforms: uniforms, 24 | vertexShader: ` 25 | varying vec2 vUv; 26 | 27 | void main(void) { 28 | vUv = uv; 29 | gl_Position = vec4(position.xy, 0., 1.); 30 | } 31 | `, 32 | fragmentShader: "varying vec2 vUv;\n" + src, 33 | depthWrite: false, 34 | depthTest: false 35 | }); 36 | } 37 | 38 | var controlUniforms = {}; 39 | 40 | var preUniforms = config.inputUniforms || { 41 | colorTexture: { value: null }, 42 | depthTexture: { value: null }, 43 | threshold: { value: config.threshold || 0.9 }, 44 | smooth: { value: config.smooth || 0.01 } 45 | } 46 | 47 | var prePass = getPass(config.inputShader || ` 48 | uniform sampler2D colorTexture; 49 | uniform sampler2D depthTexture; 50 | uniform float threshold; 51 | 52 | void main(void) { 53 | vec4 texel = texture2D( colorTexture, vUv ); 54 | vec3 luma = vec3( 0.299, 0.587, 0.114 ); 55 | float v = dot( texel.xyz, luma ); 56 | vec4 outputColor = vec4( 0., 0., 0., 1. ); 57 | float alpha = smoothstep( threshold, threshold + 0.01, v ); 58 | 59 | gl_FragColor = mix( outputColor, texel, alpha ); 60 | } 61 | `, preUniforms); 62 | 63 | var blurUniforms = { 64 | colorTexture: { value: null }, 65 | direction: { value: new THREE.Vector2(1, 0) }, 66 | resolution: { value: new THREE.Vector2(1, 1) } 67 | } 68 | 69 | var blurPasses = [ 70 | getPass(` 71 | #include 72 | #include 73 | 74 | uniform sampler2D colorTexture; 75 | uniform vec2 direction; 76 | uniform vec2 resolution; 77 | 78 | void main(void) { 79 | gl_FragColor = blur5(colorTexture, vUv, direction, resolution); 80 | } 81 | `, blurUniforms), 82 | getPass(` 83 | #include 84 | #include 85 | 86 | uniform sampler2D colorTexture; 87 | uniform vec2 direction; 88 | uniform vec2 resolution; 89 | 90 | void main(void) { 91 | gl_FragColor = blur9(colorTexture, vUv, direction, resolution); 92 | } 93 | `, blurUniforms), 94 | getPass(` 95 | #include 96 | #include 97 | 98 | uniform sampler2D colorTexture; 99 | uniform vec2 direction; 100 | uniform vec2 resolution; 101 | 102 | void main(void) { 103 | gl_FragColor = blur13(colorTexture, vUv, direction, resolution); 104 | } 105 | `, blurUniforms), 106 | ]; 107 | 108 | var postUniforms = { 109 | strength: { value: 0.5 }, 110 | radius: { value: 1 }, 111 | blurTexture1: { value: pong[0].texture }, 112 | blurTexture2: { value: pong[1].texture }, 113 | blurTexture3: { value: pong[2].texture }, 114 | colorTexture: { value: null } 115 | }; 116 | 117 | controlUniforms.strength = scene.userData.bloom_strength = postUniforms.strength; 118 | controlUniforms.radius = scene.userData.bloom_radius = postUniforms.radius; 119 | if (preUniforms.threshold) controlUniforms.threshold = scene.userData.bloom_threshold = preUniforms.threshold; 120 | scene.userData.bloom_texture = { value: ping[0].texture }; 121 | 122 | var postPass = getPass(` 123 | uniform sampler2D blurTexture1; 124 | uniform sampler2D blurTexture2; 125 | uniform sampler2D blurTexture3; 126 | uniform float strength; 127 | uniform float radius; 128 | 129 | float lerpBloomFactor(const in float factor, const in float mirrorFactor) { 130 | return mix(factor, mirrorFactor, radius); 131 | } 132 | 133 | void main() { 134 | gl_FragColor = strength * ( lerpBloomFactor(1., 0.2) * texture2D(blurTexture1, vUv) + \ 135 | lerpBloomFactor(0.2, 0.8) * texture2D(blurTexture2, vUv) + \ 136 | lerpBloomFactor(0.2, 1.) * texture2D(blurTexture3, vUv) );\ 137 | } 138 | `, postUniforms); 139 | 140 | scene.userData.bloom_internal = {prePass, blurPasses, postPass}; 141 | scene.userData.bloom = controlUniforms; 142 | 143 | var _scene = new THREE.Scene(); 144 | var _ortho = new THREE.OrthographicCamera(1,1,1,1,1,10); 145 | var _quad = new THREE.Mesh(new THREE.PlaneBufferGeometry(2,2), null); 146 | _quad.frustumCulled = false; 147 | _scene.add(_quad); 148 | 149 | function performPass(renderer, m, inputTarget, outputTarget) { 150 | _quad.material = m; 151 | if (m.uniforms.colorTexture) 152 | m.uniforms.colorTexture.value = inputTarget ? inputTarget.texture : null; 153 | if (m.uniforms.depthTexture) 154 | m.uniforms.depthTexture.value = inputTarget ? inputTarget.depthTexture: null; 155 | if (m.uniforms.resolution) 156 | m.uniforms.resolution.value.set(outputTarget.width, outputTarget.height); 157 | renderer.setRenderTarget(outputTarget); 158 | renderer.render(_scene, _ortho); 159 | } 160 | 161 | var fn = function (e) { 162 | if(passId !== e.passId) return; 163 | 164 | blurUniforms.VR = { value: 0 }; 165 | 166 | performPass(e.renderer, prePass, e.renderTarget, inp); 167 | 168 | blurUniforms.VR.value = e.scene.userData.VR.value * 0.25; 169 | 170 | for(var i=0; i< 3; i++) { 171 | blurUniforms.direction.value.set(0, 1); 172 | performPass(e.renderer, blurPasses[i], i ? pong[i - 1] : inp, ping[i]); 173 | 174 | blurUniforms.direction.value.set(1, 0); 175 | performPass(e.renderer, blurPasses[i], ping[i], pong[i]); 176 | blurUniforms.VR.value *= 0.5; 177 | } 178 | 179 | performPass(e.renderer, postPass, false, ping[0]); 180 | }; 181 | 182 | scene.addEventListener("beforePass", fn); 183 | 184 | var fr = function (e) { 185 | var w = e.size.x * 0.5, h = e.size.y * 0.5; 186 | inp.setSize(w, h); 187 | for(var i=0; i< 3; i++) { 188 | w = Math.floor(w * 0.5); 189 | h = Math.floor(h * 0.5); 190 | ping[i].setSize(w, h); 191 | pong[i].setSize(w, h); 192 | } 193 | } 194 | 195 | scene.addEventListener("resizeEffects", fr); 196 | 197 | return function (arg) { 198 | if ( arg ) { 199 | if(arg.before) passId = arg.before; 200 | for ( var k in arg) { 201 | if (controlUniforms[k]) { 202 | controlUniforms[k].value = arg[k]; 203 | } 204 | } 205 | } else { 206 | scene.removeEventListener("beforePass", fn); 207 | scene.removeEventListener("resizeEffects", fr); 208 | 209 | inp.dispose(); 210 | for(var i = 0; i < 3; i++) { 211 | ping[i].dispose(); 212 | pong[i].dispose(); 213 | blurPasses[i].dispose(); 214 | } 215 | 216 | prePass.dispose(); 217 | postPass.dispose(); 218 | 219 | delete scene.userData.bloom_internal; 220 | delete scene.userData.bloom_strength; 221 | delete scene.userData.bloom_radius; 222 | delete scene.userData.bloom_threshold; 223 | delete scene.userData.bloom_texture; 224 | } 225 | } 226 | } -------------------------------------------------------------------------------- /src/lib/colors/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "three"; 2 | 3 | THREE.ShaderChunk.colors = ` 4 | uniform sampler2D colors_texture; 5 | 6 | void colors_apply(inout vec4 fragColor, vec2 uv) { 7 | #ifndef COLORS_NO_CLAMP 8 | fragColor = clamp(fragColor, 0.0, 1.0); 9 | #endif 10 | 11 | mediump float blueColor = fragColor.b * 63.0; 12 | 13 | mediump vec2 quad1; 14 | quad1.y = floor(floor(blueColor) / 8.0); 15 | quad1.x = floor(blueColor) - (quad1.y * 8.0); 16 | 17 | mediump vec2 quad2; 18 | quad2.y = floor(ceil(blueColor) / 8.0); 19 | quad2.x = ceil(blueColor) - (quad2.y * 8.0); 20 | 21 | highp vec2 texPos1; 22 | texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * fragColor.r); 23 | texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * fragColor.g); 24 | 25 | #ifdef COLORS_FLIP_Y 26 | texPos1.y = 1.0-texPos1.y; 27 | #endif 28 | 29 | highp vec2 texPos2; 30 | texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * fragColor.r); 31 | texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * fragColor.g); 32 | 33 | #ifdef COLORS_FLIP_Y 34 | texPos2.y = 1.0-texPos2.y; 35 | #endif 36 | 37 | lowp vec4 newColor1 = texture2D($texture, texPos1); 38 | lowp vec4 newColor2 = texture2D($texture, texPos2); 39 | 40 | fragColor = mix(newColor1, newColor2, fract(blueColor)); 41 | } 42 | 43 | vec3 colors_rgb2hsv(vec3 c){ 44 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); 45 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g)); 46 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r)); 47 | 48 | float d = q.x - min(q.w, q.y); 49 | float e = 1.0e-10; 50 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x); 51 | } 52 | 53 | vec3 colors_hsv2rgb(vec3 c) 54 | { 55 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); 56 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); 57 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y); 58 | } 59 | `; 60 | 61 | export default function (scene) { 62 | var controlUniform = {value: null}; 63 | scene.userData["colors_texture"] = controlUniform; 64 | scene.userData.colors = { 65 | texture: controlUniform 66 | } 67 | 68 | return function(arg) { 69 | if(arg) { 70 | if(arg.texture) controlUniform.value = arg.texture; 71 | } else { 72 | delete scene.userData["bloom_texture"]; 73 | } 74 | } 75 | } -------------------------------------------------------------------------------- /src/lib/filmgrain/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "three"; 2 | 3 | THREE.ShaderChunk["filmgrain_pars"] = ` 4 | uniform float filmgrain_time; 5 | uniform float filmgrain_sCount; 6 | uniform float filmgrain_sIntensity; 7 | uniform float filmgrain_nIntensity; 8 | 9 | void filmgrain_apply(inout vec4 color, in vec2 uv) { 10 | vec4 cTextureScreen = color; 11 | float dx = rand( uv + mod(filmgrain_time, 3.14) ); 12 | vec3 cResult = cTextureScreen.rgb + cTextureScreen.rgb * clamp( 0.1 + dx, 0.0, 1.0 ); 13 | vec2 sc = vec2( sin( uv.y * filmgrain_sCount ), cos( uv.y * filmgrain_sCount ) ); 14 | cResult += cTextureScreen.rgb * vec3( sc.x, sc.y, sc.x ) * filmgrain_sIntensity; 15 | cResult = cTextureScreen.rgb + clamp( filmgrain_nIntensity, 0.0,1.0 ) * ( cResult - cTextureScreen.rgb ); 16 | color.rgb = cResult; 17 | } 18 | ` 19 | 20 | export default function (scene, config) { 21 | 22 | var controlUniforms = { 23 | "time": { type: "f", value: 0.0 }, 24 | "nIntensity": { type: "f", value: 0.3 }, 25 | "sIntensity": { type: "f", value: 0.03 }, 26 | "sCount": { type: "f", value: 4096 } 27 | }; 28 | 29 | function handleConf(conf) { 30 | for(var k in conf) { 31 | if(k in controlUniforms){ 32 | controlUniforms[k].value = conf[k]; 33 | } 34 | } 35 | } 36 | 37 | if(config) handleConf(config); 38 | 39 | scene.userData["filmgrain_time"] = controlUniforms["time"]; 40 | scene.userData["filmgrain_sCount"] = controlUniforms["sCount"]; 41 | scene.userData["filmgrain_sIntensity"] = controlUniforms["sIntensity"]; 42 | scene.userData["filmgrain_nIntensity"] = controlUniforms["nIntensity"]; 43 | 44 | scene.userData.filmgrain = controlUniforms; 45 | 46 | return function (arg) { 47 | if(arg) { 48 | handleConf(arg); 49 | return; 50 | } 51 | delete scene.userData["filmgrain_time"]; 52 | delete scene.userData["filmgrain_sCount"]; 53 | delete scene.userData["filmgrain_sIntensity"]; 54 | delete scene.userData["filmgrain_nIntensity"]; 55 | delete scene.userData.filmgrain; 56 | } 57 | 58 | } -------------------------------------------------------------------------------- /src/lib/fxaa/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | THREE.ShaderChunk["fxaa_pars"] = ` 4 | #define FXAA_REDUCE_MIN (1.0/ 128.0) 5 | #define FXAA_REDUCE_MUL (1.0 / 8.0) 6 | #define FXAA_SPAN_MAX 8.0 7 | 8 | void fxaa_apply(inout vec4 color, in vec2 uv) 9 | { 10 | vec2 inverseVP = vec2(1.0 / resolution.x, 1.0 / resolution.y); 11 | vec3 rgbNW = texture2D(colorTexture, uv + vec2(-1.0, -1.0) * inverseVP).xyz; 12 | vec3 rgbNE = texture2D(colorTexture, uv + vec2(1.0, -1.0) * inverseVP).xyz; 13 | vec3 rgbSW = texture2D(colorTexture, uv + vec2(-1.0, 1.0) * inverseVP).xyz; 14 | vec3 rgbSE = texture2D(colorTexture, uv + vec2(1.0, 1.0) * inverseVP).xyz; 15 | vec3 rgbM = color.rgb; 16 | vec3 luma = vec3(0.299, 0.587, 0.114); 17 | float lumaNW = dot(rgbNW, luma); 18 | float lumaNE = dot(rgbNE, luma); 19 | float lumaSW = dot(rgbSW, luma); 20 | float lumaSE = dot(rgbSE, luma); 21 | float lumaM = dot(rgbM, luma); 22 | float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE))); 23 | float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE))); 24 | 25 | vec2 dir; 26 | dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE)); 27 | dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE)); 28 | 29 | float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) * 30 | (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN); 31 | 32 | float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce); 33 | dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX), 34 | max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX), 35 | dir * rcpDirMin)) * inverseVP; 36 | 37 | vec3 rgbA = 0.5 * ( 38 | texture2D(colorTexture, uv + dir * (1.0 / 3.0 - 0.5)).xyz + 39 | texture2D(colorTexture, uv + dir * (2.0 / 3.0 - 0.5)).xyz); 40 | vec3 rgbB = rgbA * 0.5 + 0.25 * ( 41 | texture2D(colorTexture, uv + dir * -0.5).xyz + 42 | texture2D(colorTexture, uv + dir * 0.5).xyz); 43 | 44 | float lumaB = dot(rgbB, luma); 45 | if ((lumaB < lumaMin) || (lumaB > lumaMax)) 46 | color.rgb = rgbA; 47 | else 48 | color.rgb = rgbB; 49 | } 50 | 51 | ` 52 | // FXAA doesn't do any texture generation or need uniforms but we stay consistent with the other effects 53 | export default function(){ 54 | return function () {} 55 | }; 56 | 57 | -------------------------------------------------------------------------------- /src/lib/glitch/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "three"; 2 | 3 | THREE.ShaderChunk["glitch_pars"] = ` 4 | uniform sampler2D glitch_tDisp; 5 | uniform float glitch_amount; 6 | uniform float glitch_snow; 7 | uniform float glitch_angle; 8 | uniform float glitch_seed; 9 | uniform float glitch_seed_x; 10 | uniform float glitch_seed_y; 11 | uniform float glitch_distortion_x; 12 | uniform float glitch_distortion_y; 13 | uniform float glitch_col_s; 14 | uniform float glitch_intensity; 15 | 16 | float glitch_rand(vec2 co){ 17 | return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453); 18 | } 19 | 20 | void glitch_apply(inout vec4 fragColor, vec2 uv) { 21 | vec2 p = uv; 22 | vec2 p2 = p; 23 | float xs = floor(gl_FragCoord.x / 0.5); 24 | float ys = floor(gl_FragCoord.y / 0.5); 25 | 26 | //based on staffantans glitch shader for unity https://github.com/staffantan/unityglitch 27 | vec4 normal = texture2D(glitch_tDisp, p2 * glitch_seed * glitch_seed); 28 | if(p2.y < glitch_distortion_x + glitch_col_s && p2.y > glitch_distortion_x - glitch_col_s * glitch_seed) { 29 | if(glitch_seed_x>0.){ 30 | p.y = 1. - (p.y + glitch_distortion_y); 31 | } 32 | else { 33 | p.y = glitch_distortion_y; 34 | } 35 | } 36 | if(p2.x < glitch_distortion_y + glitch_col_s && p2.x > glitch_distortion_y - glitch_col_s * glitch_seed) { 37 | if( glitch_seed_y > 0.){ 38 | p.x = glitch_distortion_x; 39 | } 40 | else { 41 | p.x = 1. - (p.x + glitch_distortion_x); 42 | } 43 | } 44 | p.x+=normal.x* glitch_seed_x * (glitch_seed/5.); 45 | p.y+=normal.y* glitch_seed_y * (glitch_seed/5.); 46 | 47 | //base from RGB shift shader 48 | vec2 offset = glitch_amount * vec2( cos(glitch_angle), sin(glitch_angle)); 49 | vec4 cr = texture2D(colorTexture, p + offset); 50 | vec4 cga = texture2D(colorTexture, p); 51 | vec4 cb = texture2D(colorTexture, p - offset); 52 | vec4 color = vec4(cr.r, cga.g, cb.b, cga.a); 53 | 54 | //add noise 55 | vec4 snow = 200.*glitch_amount*vec4(glitch_rand(vec2(xs * glitch_seed,ys * glitch_seed*50.))*0.2); 56 | color = color + glitch_snow * snow; 57 | 58 | fragColor = mix(fragColor, color, glitch_intensity); 59 | } 60 | `; 61 | 62 | export default function (scene, config) { 63 | var curF = 0; 64 | var randX = 0; 65 | 66 | var generateTrigger = function() { 67 | 68 | randX = THREE.Math.randInt( 120, 240 ); 69 | 70 | }; 71 | 72 | var generateHeightmap = function( dt_size ) { 73 | 74 | var data_arr = new Float32Array( dt_size * dt_size * 3 ); 75 | var length = dt_size * dt_size; 76 | 77 | for ( var i = 0; i < length; i ++ ) { 78 | 79 | var val = THREE.Math.randFloat( 0, 1 ); 80 | data_arr[ i * 3 + 0 ] = val; 81 | data_arr[ i * 3 + 1 ] = val; 82 | data_arr[ i * 3 + 2 ] = val; 83 | 84 | } 85 | 86 | var texture = new THREE.DataTexture( data_arr, dt_size, dt_size, THREE.RGBFormat, THREE.FloatType ); 87 | texture.needsUpdate = true; 88 | return texture; 89 | 90 | } 91 | 92 | var controlUniforms = { 93 | "tDisp": { type: "t", value: generateHeightmap( 64 ) }, 94 | "amount": { type: "f", value: 0.08 }, 95 | "snow": { type: "f", value: 0.5 }, 96 | "angle": { type: "f", value: 0.02 }, 97 | "seed": { type: "f", value: 0.02 }, 98 | "seed_x": { type: "f", value: 0.02 },//-1,1 99 | "seed_y": { type: "f", value: 0.02 },//-1,1 100 | "distortion_x": { type: "f", value: 0.5 }, 101 | "distortion_y": { type: "f", value: 0.6 }, 102 | "col_s": { type: "f", value: 0.05 }, 103 | "intensity": { type: "f", value: 0.33 } 104 | }; 105 | 106 | for(var k in controlUniforms) { 107 | scene.userData["glitch_" + k] = controlUniforms[k]; 108 | } 109 | 110 | scene.glitch = controlUniforms; 111 | 112 | scene.addEventListener("beforeRender", function () { 113 | controlUniforms[ 'seed' ].value = Math.random();//default seeding 114 | if ( curF % randX == 0) { 115 | controlUniforms[ 'amount' ].value = Math.random() / 30; 116 | controlUniforms[ 'angle' ].value = THREE.Math.randFloat( - Math.PI, Math.PI ); 117 | controlUniforms[ 'seed_x' ].value = THREE.Math.randFloat( - 1, 1 ); 118 | controlUniforms[ 'seed_y' ].value = THREE.Math.randFloat( - 1, 1 ); 119 | controlUniforms[ 'distortion_x' ].value = THREE.Math.randFloat( 0, 1 ); 120 | controlUniforms[ 'distortion_y' ].value = THREE.Math.randFloat( 0, 1 ); 121 | curF = 0; 122 | generateTrigger(); 123 | } else if ( curF % randX < randX / 5 ) { 124 | controlUniforms[ 'amount' ].value = Math.random() / 90; 125 | controlUniforms[ 'angle' ].value = THREE.Math.randFloat( - Math.PI, Math.PI ); 126 | controlUniforms[ 'distortion_x' ].value = THREE.Math.randFloat( 0, 1 ); 127 | controlUniforms[ 'distortion_y' ].value = THREE.Math.randFloat( 0, 1 ); 128 | controlUniforms[ 'seed_x' ].value = THREE.Math.randFloat( - 0.3, 0.3 ); 129 | controlUniforms[ 'seed_y' ].value = THREE.Math.randFloat( - 0.3, 0.3 ); 130 | } 131 | curF++; 132 | }); 133 | 134 | var fn = function (arg) { 135 | if(arg) { 136 | 137 | for(var k in controlUniforms) { 138 | if(arg[k] !== undefined && k in controlUniforms) controlUniforms[k].value = arg[k]; 139 | } 140 | 141 | curF = 0; 142 | generateTrigger(); 143 | } else { 144 | for(k in controlUniforms) { 145 | delete scene.userData["glitch_" + k]; 146 | } 147 | } 148 | } 149 | 150 | fn(config); 151 | 152 | return fn; 153 | } -------------------------------------------------------------------------------- /src/lib/godrays/index.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "three"; 2 | 3 | THREE.ShaderChunk["godrays_pars"] = ` 4 | uniform sampler2D godrays_texture; 5 | 6 | float godrays_blendScreen(float base, float blend) { 7 | return 1.0-((1.0-base)*(1.0-blend)); 8 | } 9 | 10 | vec3 godrays_blendScreen(vec3 base, vec3 blend) { 11 | return vec3(godrays_blendScreen(base.r,blend.r),godrays_blendScreen(base.g,blend.g),godrays_blendScreen(base.b,blend.b)); 12 | } 13 | 14 | vec3 godrays_blendScreen(vec3 base, vec3 blend, float opacity) { 15 | return (godrays_blendScreen(base, blend) * opacity + base * (1.0 - opacity)); 16 | } 17 | 18 | void godrays_main(inout vec4 color, vec2 uv) { 19 | vec4 texel = texture2D(godrays_texture, uv); 20 | color.rgb = godrays_blendScreen( color.rgb, texel.rgb, godrays_intensity * godrays_attenuation); 21 | } 22 | `; 23 | 24 | export default function (scene, config) { 25 | 26 | config = config || {}; 27 | 28 | var inp = new THREE.WebGLRenderTarget(1,1); 29 | var ping = new THREE.WebGLRenderTarget(1,1); 30 | var pong = new THREE.WebGLRenderTarget(1,1); 31 | 32 | var passId = config.passId; 33 | 34 | function getPass(src, uniforms) { 35 | return new THREE.ShaderMaterial({ 36 | uniforms: uniforms, 37 | vertexShader: ` 38 | varying vec2 vUv; 39 | 40 | void main(void) { 41 | vUv = uv; 42 | gl_Position = vec4(position.xy, 0., 1.); 43 | } 44 | `, 45 | fragmentShader: "varying vec2 vUv;\n" + src, 46 | depthWrite: false, 47 | depthTest: false 48 | }); 49 | } 50 | 51 | var controlUniforms = {}; 52 | 53 | var preUniforms = config.inputUniforms || { 54 | colorTexture: { value: null }, 55 | depthTexture: { value: null }, 56 | threshold: { value: config.threshold || 0.9 }, 57 | smooth: { value: config.smooth || 0.01 } 58 | } 59 | 60 | var prePass = getPass(config.inputShader || ` 61 | 62 | `, preUniforms); 63 | 64 | var blurUniforms = { 65 | colorTexture: { value: null }, 66 | direction: { value: new THREE.Vector2(1, 0) }, 67 | resolution: { value: new THREE.Vector2(1, 1) } 68 | } 69 | 70 | var blurPass = getPass(` 71 | #include 72 | 73 | uniform sampler2D colorTexture; 74 | uniform vec2 direction; 75 | uniform vec2 resolution; 76 | 77 | void main(void) { 78 | vec4 color = vec4(0.0); 79 | vec2 off1 = vec2(1.3333333333333333) * direction; 80 | color += textureVR(colorTexture, vUv) * 0.29411764705882354; 81 | color += textureVR(colorTexture, vUv + (off1 / resolution)) * 0.35294117647058826; 82 | color += textureVR(colorTexture, vUv - (off1 / resolution)) * 0.35294117647058826; 83 | gl_FragColor = color; 84 | } 85 | `, blurUniforms) 86 | 87 | 88 | controlUniforms.strength = scene.userData.godrays_strength = postUniforms.strength; 89 | controlUniforms.radius = scene.userData.godrays_radius = postUniforms.radius; 90 | if (preUniforms.threshold) controlUniforms.threshold = scene.userData.godrays_threshold = preUniforms.threshold; 91 | scene.userData.godrays_texture = { value: ping[0].texture }; 92 | 93 | scene.userData.godrays_internal = {prePass, blurPass}; 94 | 95 | var _scene = new THREE.Scene(); 96 | var _ortho = new THREE.OrthographicCamera(1,1,1,1,1,10); 97 | var _quad = new THREE.Mesh(new THREE.PlaneBufferGeometry(2,2), null); 98 | _quad.frustumCulled = false; 99 | _scene.add(_quad); 100 | 101 | function performPass(renderer, m, inputTarget, outputTarget) { 102 | _quad.material = m; 103 | if (m.uniforms.colorTexture) 104 | m.uniforms.colorTexture.value = inputTarget ? inputTarget.texture : null; 105 | if (m.uniforms.depthTexture) 106 | m.uniforms.depthTexture.value = inputTarget ? inputTarget.depthTexture: null; 107 | if (m.uniforms.resolution) 108 | m.uniforms.resolution.value.set(inputTarget.width, inputTarget.height); 109 | renderer.setRenderTarget(outputTarget); 110 | renderer.render(_scene, _ortho); 111 | } 112 | 113 | var fn = function (e) { 114 | if(passId !== e.passId) return; 115 | 116 | blurUniforms.VR = { value: 0 }; 117 | 118 | performPass(e.renderer, prePass, e.renderTarget, inp); 119 | 120 | blurUniforms.VR.value = e.scene.userData.VR.value * 0.25; 121 | 122 | blurUniforms.step.value = Math.pow( 6, -1 ); 123 | performPass(e.renderer, blurPass, inp, ping); 124 | 125 | blurUniforms.step.value = Math.pow( 6, -2 ); 126 | performPass(e.renderer, blurPass, ping, pong); 127 | 128 | blurUniforms.step.value = Math.pow( 6, -3 ); 129 | performPass(e.renderer, blurPass, pong, ping); 130 | }; 131 | 132 | scene.addEventListener("afterPass", fn); 133 | 134 | var fr = function (e) { 135 | var w = e.size.x * 0.5, h = e.size.y * 0.5; 136 | inp.setSize(w, h); 137 | w = Math.floor(w * 0.5); 138 | h = Math.floor(h * 0.5); 139 | ping.setSize(w, h); 140 | pong.setSize(w, h); 141 | } 142 | 143 | scene.addEventListener("resizeEffects", fr); 144 | 145 | return function (arg) { 146 | if ( arg ) { 147 | for ( var k in arg) { 148 | if (controlUniforms[k]) { 149 | controlUniforms[k].value = arg[k]; 150 | } 151 | } 152 | } else { 153 | scene.removeEventListener("afterPass", fn); 154 | scene.removeEventListener("resizeEffects", fr); 155 | 156 | inp.dispose(); 157 | 158 | ping.dispose(); 159 | pong.dispose(); 160 | blurPass; 161 | prePass.dispose(); 162 | 163 | delete scene.userData.godrays_internal; 164 | 165 | //TODO Cleanup scene.userData; 166 | } 167 | } 168 | } -------------------------------------------------------------------------------- /src/lib/index.js: -------------------------------------------------------------------------------- 1 | import bloom from './bloom/index.js'; 2 | import fxaa from './fxaa/index.js'; 3 | import filmgrain from './filmgrain/index.js'; 4 | //import colors from './colors/index.js'; 5 | 6 | import glitch from './glitch/index.js' 7 | 8 | export { bloom, fxaa, filmgrain, glitch } -------------------------------------------------------------------------------- /src/lib/outline/index.js: -------------------------------------------------------------------------------- 1 | import { WebGLRenderTarget } from "three" 2 | 3 | THREE.ShaderChunk["outline_pars"] = ` 4 | uniform sampler2D outline_texture; 5 | uniform float outline_blend; 6 | uniform float outline_radius; 7 | uniform float outline_smooth; 8 | uniform vec3 outline_color; 9 | 10 | void outline_apply(inout vec4 color, vec2 uv) { 11 | vec4 texel = sampler2D(outline_texture, uv); 12 | float d = mix(length(texel.rg), texel.b, outline_blend); 13 | d = smoothstep(outline_radius, outline_radius + outline_smooth, d); 14 | color = mix(color, outline_color, d); 15 | } 16 | `; 17 | export default function(scene, config) { 18 | var ping = new WebGLRenderTarget(1,1); 19 | var pong = new WebGLRenderTarget(1,1); 20 | 21 | var controlUniforms = { 22 | normalColor: { value: new THREE.Color(0x000000) }, 23 | depthColor: { value: new THREE.Color(0x000000) }, 24 | intensity: { value: 1 }, 25 | factors: { value: [0.5, 0.5, 0.5] } 26 | } 27 | 28 | function getPass(src, uniforms) { 29 | return new THREE.ShaderMaterial({ 30 | uniforms: uniforms, 31 | vertexShader: ` 32 | varying vec2 vUv; 33 | 34 | void main(void) { 35 | vUv = uv; 36 | gl_Position = vec4(position.xy, 0., 1.); 37 | } 38 | `, 39 | fragmentShader: "varying vec2 vUv;\n" + src, 40 | depthWrite: false, 41 | depthTest: false 42 | }); 43 | } 44 | 45 | var prePass = getPass(` 46 | #include 47 | 48 | uniform mat4 cameraProjectionMatrixLeft; 49 | uniform mat4 cameraProjectionMatrixRight; 50 | 51 | uniform mat4 cameraInverseProjectionMatrixLeft; 52 | uniform mat4 cameraInverseProjectionMatrixRight; 53 | 54 | 55 | void main (void) { 56 | mat4 projectionMatrix = selectVR(cameraProjectionMatrix, cameraProjectionMatrixLeft, cameraProjectionMatrixRight); 57 | mat4 inverseProjectionMatrix = selectVR(cameraInverseProjectionMatrix, cameraInverseProjectionMatrixLeft, cameraInverseProjectionMatrixRight); 58 | 59 | float depth = texture2D(depthTexture, vUv).r; 60 | float clipW = projectionMatrix[2][3] * (perspectiveDepthToViewZ( depth, cameraNear, cameraFar )) + projectionMatrix[3][3]; 61 | vec4 clipPosition = vec4( ( vec3( vUv, depth ) - 0.5 ) * 2.0, 1.0 ); 62 | clipPosition *= clipW; 63 | vec3 viewPosition = ( inverseProjectionMatrix * clipPosition ).xyz; 64 | 65 | vec3 normal = normalize(cross(dFdx(viewPosition), dFdy(viewPosition))); 66 | normal = normal * 0.5 + 0.5; 67 | gl_FragColor = vec4(normal.xy, depth, 1.); 68 | } 69 | 70 | `, {}); 71 | 72 | var sobelPass = getPass(` 73 | uniform sampler2D colorTexture; 74 | uniform vec2 resolution; 75 | 76 | float 77 | void main(void) { 78 | vec2 texel = vec2( 1.0 / resolution.x, 1.0 / resolution.y ); 79 | 80 | // kernel definition (in glsl matrices are filled in column-major order) 81 | 82 | const mat3 Gx = mat3( -1, -2, -1, 0, 0, 0, 1, 2, 1 ); // x direction kernel 83 | const mat3 Gy = mat3( -1, 0, 1, -2, 0, 2, -1, 0, 1 ); // y direction kernel 84 | 85 | // fetch the 3x3 neighbourhood of a fragment 86 | 87 | // first column 88 | 89 | vec4 tx0y0 = texture2D( color, vUv + texel * vec2( -1, -1 ) ); 90 | vec4 tx0y1 = texture2D( tDiffuse, vUv + texel * vec2( -1, 0 ) ); 91 | vec4 tx0y2 = texture2D( tDiffuse, vUv + texel * vec2( -1, 1 ) ); 92 | 93 | // second column 94 | 95 | vec4 tx1y0 = texture2D( tDiffuse, vUv + texel * vec2( 0, -1 ) ); 96 | vec4 tx1y1 = texture2D( tDiffuse, vUv + texel * vec2( 0, 0 ) ); 97 | vec4 tx1y2 = texture2D( tDiffuse, vUv + texel * vec2( 0, 1 ) ); 98 | 99 | // third column 100 | 101 | vec4 tx2y0 = texture2D( tDiffuse, vUv + texel * vec2( 1, -1 ) ); 102 | vec4 tx2y1 = texture2D( tDiffuse, vUv + texel * vec2( 1, 0 ) ); 103 | vec4 tx2y2 = texture2D( tDiffuse, vUv + texel * vec2( 1, 1 ) ); 104 | 105 | // gradient value in x direction 106 | 107 | vec4 valueGx = Gx[0][0] * tx0y0 + Gx[1][0] * tx1y0 + Gx[2][0] * tx2y0 + 108 | Gx[0][1] * tx0y1 + Gx[1][1] * tx1y1 + Gx[2][1] * tx2y1 + 109 | Gx[0][2] * tx0y2 + Gx[1][2] * tx1y2 + Gx[2][2] * tx2y2; 110 | 111 | // gradient value in y direction 112 | 113 | vec4 valueGy = Gy[0][0] * tx0y0 + Gy[1][0] * tx1y0 + Gy[2][0] * tx2y0 + 114 | Gy[0][1] * tx0y1 + Gy[1][1] * tx1y1 + Gy[2][1] * tx2y1 + 115 | Gy[0][2] * tx0y2 + Gy[1][2] * tx1y2 + Gy[2][2] * tx2y2; 116 | 117 | // magnitute of the total gradient 118 | 119 | vec4 G = sqrt( ( valueGx * valueGx ) + ( valueGy * valueGy ) ); 120 | 121 | gl_FragColor = vec4( G.x, G.y, G.x + G.z * 1. / 256., 1. ); 122 | 123 | } 124 | 125 | `, lineUniforms); 126 | 127 | var blurPass = getPass(` 128 | uniform sampler2D colorTexture; 129 | uniform vec2 direction; 130 | uniform vec2 resolution; 131 | 132 | void main(void) { 133 | vec4 color = vec4(0.0); 134 | vec2 off1 = vec2(1.3333333333333333) * direction; 135 | color += textureVR(colorTexture, vUv) * 0.29411764705882354; 136 | color += textureVR(colorTexture, vUv + (off1 / resolution)) * 0.35294117647058826; 137 | color += textureVR(colorTexture, vUv - (off1 / resolution)) * 0.35294117647058826; 138 | gl_FragColor = color; 139 | } 140 | `, blurUniforms); 141 | 142 | 143 | var _scene = new THREE.Scene(); 144 | var _ortho = new THREE.OrthographicCamera(1,1,1,1,1,10); 145 | var _quad = new THREE.Mesh(new THREE.PlaneBufferGeometry(2,2), null); 146 | _quad.frustumCulled = false; 147 | _scene.add(_quad); 148 | 149 | function performPass(renderer, m, inputTarget, outputTarget) { 150 | _quad.material = m; 151 | if (m.uniforms.colorTexture) 152 | m.uniforms.colorTexture.value = inputTarget ? inputTarget.texture : null; 153 | if (m.uniforms.depthTexture) 154 | m.uniforms.depthTexture.value = inputTarget ? inputTarget.depthTexture: null; 155 | if (m.uniforms.resolution) 156 | m.uniforms.resolution.value.set(outputTarget.width, outputTarget.height); 157 | renderer.setRenderTarget(outputTarget); 158 | renderer.render(_scene, _ortho); 159 | } 160 | 161 | scene.addEventListener("afterRender", function (e) { 162 | 163 | }); 164 | return function (arg) { 165 | 166 | } 167 | } --------------------------------------------------------------------------------