├── .gitignore ├── INSTRUCTION.md ├── README.md ├── index.html ├── lib └── minimal-gltf-loader.js ├── models └── sponza │ ├── buffer_0.bin │ ├── color.jpeg │ ├── fragmentShader0.glsl │ ├── normal.png │ ├── sponza.gltf │ └── vertexShader0.glsl ├── package.json ├── src ├── init.js ├── main.js ├── renderers │ ├── clustered.js │ ├── clusteredDeferred.js │ ├── clusteredForwardPlus.js │ ├── forward.js │ └── textureBuffer.js ├── scene.js ├── shaders │ ├── clusteredForward.frag.glsl.js │ ├── clusteredForward.vert.glsl │ ├── deferred.frag.glsl.js │ ├── deferredToTexture.frag.glsl │ ├── deferredToTexture.vert.glsl │ ├── forward.frag.glsl.js │ ├── forward.vert.glsl │ └── quad.vert.glsl └── utils.js └── webpack.config.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /INSTRUCTION.md: -------------------------------------------------------------------------------- 1 | WebGL Clustered Deferred and Forward+ Shading - Instructions 2 | ========================================================== 3 | 4 | **This is due Thursday 10/26** 5 | 6 | ## Running the code 7 | 8 | - Clone this repository 9 | - Download and install [Node.js](https://nodejs.org/en/) 10 | - Run `npm install` in the root directory of this project. This will download and install dependences 11 | - Run `npm start` and navigate to [http://localhost:5650](http://localhost:5650) 12 | 13 | This project requires a WebGL-capable browser with support for several extensions. You can check for support on [WebGL Report](http://webglreport.com/): 14 | - OES_texture_float 15 | - OES_texture_float_linear 16 | - OES_element_index_uint 17 | - EXT_frag_depth 18 | - WEBGL_depth_texture 19 | - WEBGL_draw_buffer 20 | 21 | Google Chrome seems to work best on all platforms. If you have problems running the starter code, use Chrome or Chromium, and make sure you have updated your browser and video drivers. 22 | 23 | ## Requirements 24 | **Ask on the mailing list for any clarifications** 25 | 26 | In this project, you are given code for: 27 | - Loading glTF models 28 | - Camera control 29 | - Simple forward renderer 30 | - Partial implementation and setup for Clustered Deferred and Forward+ shading 31 | - Many helpful helpers 32 | 33 | ## Required Tasks 34 | 35 | **Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. 36 | 37 | **Clustered Forward+** 38 | - Build a data structure to keep track of how many lights are in each cluster and what their indices are 39 | - Render the scene using only the lights that overlap a given cluster 40 | 41 | **Clustered Deferred** 42 | - Reuse clustering logic from Clustered Forward+ 43 | - Store vertex attributes in g-buffer 44 | - Read g-buffer in a shader to produce final output 45 | 46 | **Effects** 47 | - Implement deferred Blinn-Phong shading (diffuse + specular) for point lights 48 | - OR 49 | - Implement one of the following effects: 50 | - Bloom using post-process blur (box or Gaussian) 51 | - Toon shading (with ramp shading + simple depth-edge detection for outlines) 52 | 53 | **Optimizations** 54 | - Optimized g-buffer format - reduce the number and size of g-buffers: 55 | - Ideas: 56 | - Pack values together into vec4s 57 | - Use 2-component normals 58 | - Quantize values by using smaller texture types instead of gl.FLOAT 59 | - Reduce number of properties passed via g-buffer, e.g. by: 60 | - Reconstructing world space position using camera matrices and X/Y/depth 61 | - For credit, you must show a good optimization effort and record the performance of each version you test, in a simple table. 62 | - It is expected that you won't need all 4 provided g-buffers for a basic pipeline make sure you disable the unused ones. 63 | 64 | ## Performance & Analysis 65 | 66 | Compare your implementations of Clustered Forward+ and Clustered Deferred shading and analyze their differences. 67 | - Is one of them faster? 68 | - Is one of them better at certain types of workloads? 69 | - What are the benefits and tradeoffs of using one over the other? 70 | - For any differences in performance, briefly explain what may be causing the difference. 71 | 72 | **Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. 73 | 74 | Optimize your JavaScript and/or GLSL code. Chrome/Firefox's profiling tools (see Resources section) will be useful for this. For each change that improves performance, show the before and after render times. 75 | 76 | For each new effect feature (required or extra), please provide the following analysis: 77 | - Concise overview write-up of the feature. 78 | - Performance change due to adding the feature. 79 | - If applicable, how do parameters (such as number of lights, etc.) affect performance? Show data with simple graphs. 80 | - Show timing in milliseconds, not FPS. 81 | - If you did something to accelerate the feature, what did you do and why? 82 | - How might this feature be optimized beyond your current implementation? 83 | 84 | For each performance feature (required or extra), please provide: 85 | - Concise overview write-up of the feature. 86 | - Detailed performance improvement analysis of adding the feature 87 | - What is the best case scenario for your performance improvement? What is the worst? Explain briefly. 88 | - Are there tradeoffs to this performance feature? Explain briefly. 89 | - How do parameters (such as number of lights, tile size, etc.) affect performance? Show data with graphs. 90 | - Show timing in milliseconds, not FPS. 91 | - Show debug views when possible. 92 | - If the debug view correlates with performance, explain how. 93 | 94 | ## Starter Code Tour 95 | 96 | Initialization happens in `src/init.js`. You don't need to worry about this; it is mostly initializing the gl context, debug modes, extensions, etc. 97 | 98 | `src/main.js` is configuration for the renderers. It sets up the gui for switching renderers and initializes the scene and render loop. The only important thing here are the arguments for `ClusteredForwardPlusRenderer` and `ClusteredDeferredRenderer`. These constructors take the number of x, y, and z slices to split the frustum into. 99 | 100 | `src/scene.js` handles loading a .gltf scene and initializes the lights. Here, you can modify the number of lights, their positions, and how they move around. Also, take a look at the `draw` function. This handles binding the vertex attributes, which are hardcoded to `a_position`, `a_normal`, and `a_uv`, as well as the color and normal maps to targets `gl.TEXTURE0` and `gl.TEXTURE1`. 101 | 102 | **Simple Forward Shading Pipeline** 103 | I've written a simple forward shading pipeline as an example for how everything works. Check out `src/forward.js`. 104 | 105 | The constructor for the renderer initializes a `TextureBuffer` to store the lights. This isn't totally necessary for a forward renderer, but you'll need this to do clustered shading. What we're trying to do here is upload to a shader all the positions of our lights. However, we unfortunately can't upload arbitrary data to the GPU with WebGL so we have to pack it as a texture. Figuring out how to do this is terribly painful so I did it for you. 106 | 107 | The constructor for `TextureBuffer` takes two arguments, the number of elements, and the size of each element (in floats). It will allocate a floating point texture of dimension `numElements x ceil(elementSize / 4)`. This is because we pack every 4 adjacent values into a single pixel. 108 | 109 | Go to the `render` function to see how this is used in practice. Here, the buffer for the texture storing the lights is populated with the light positions. Notice that the first four values get stored at locations: `this._lightTexture.bufferIndex(i, 0) + 0` to `this._lightTexture.bufferIndex(i, 0) + 3` and then the next three are at `this._lightTexture.bufferIndex(i, 1) + 0` to `this._lightTexture.bufferIndex(i, 0) + 2`. Keep in mind that the data is stored as a texture, so the 5th element is actually the 1st element of the pixel in the second row. 110 | 111 | Look again at the constructor of `ForwardRenderer`. Also initialized here is the shader program. The shader program takes in a vertex source, a fragment source, and then a map of what uniform and vertex attributes should be extracted from the shader. In this code, the shader location for `u_viewProjectionMatrix` gets stored as `this._shaderProgram.u_viewProjectionMatrix`. If you look at `fsSource`, there's a strange thing happening there. `fsSource` is actually a function and it's being called with a configuration object containing the number of lights. What this is doing is creating a shader source string that is parameterized. We can't have dynamic loops in WebGL, but we can dynamically generate static shaders. If you take a look at `src/shaders/forward.frag.glsl.js`, you'll see that `${numLights}` is used throughout. 112 | 113 | Now go look inside `src/shaders/forward.frag.glsl.js`. Here, there is a simple loop which loops over the lights and applies shading for each one. I've written a helper called `UnpackLight(index)` which unpacks the `index`th light from the texture into a struct. Make sure you fully understand how this is working because you will need to implement something similar for clusters. Inside `UnpackLight` I use another helper called `ExtractFloat(texture, textureWidth, textureHeight, index, component)`. This pulls out the `component`th component from the `index`th value packed inside a `textureWidth x textureHeight` texture. Again, this is meant to be an example implementation. Using this function to pull out four values into a `vec4` will be unecessarily slow. 114 | 115 | **Getting Started** 116 | Here's a few tips to get you started. 117 | 118 | 1. Complete `updateClusters` in `src/renderers/clustered.js`. This should update the cluster `TextureBuffer` with a mapping from cluster index to light count and light list (indices). 119 | 120 | 2. Update `src/shaders/clusteredForward.frag.glsl.js` to 121 | - Determine the cluster for a fragment 122 | - Read in the lights in that cluster from the populated data 123 | - Do shading for just those lights 124 | - You may find it necessary to bind additional uniforms in `src/renderers/clusteredForwardPlus.js` 125 | 126 | 3. Update `src/shaders/deferredToTexture.frag.glsl` to write desired data to the g-buffer 127 | 4. Update `src/deferred.frag.glsl` to read values from the g-buffer and perform simple forward rendering. (Right now it just outputs the screen xy coordinate) 128 | 5. Update it to use clustered shading. You should be able to reuse lots of stuff from Clustered Forward+ for this. You will also likely need to update shader inputs in `src/renderers/clusteredDeferred.js` 129 | 130 | ## README 131 | 132 | Replace the contents of the README.md in a clear manner with the following: 133 | - A brief description of the project and the specific features you implemented. 134 | - At least one screenshot of your project running. 135 | - A 30+ second video/gif of your project running showing all features. (Even though your demo can be seen online, using multiple render targets means it won't run on many computers. A video will work everywhere.) 136 | - Performance analysis (described above) 137 | 138 | **GitHub Pages** 139 | Since this assignment is in WebGL, you can make your project easily viewable by taking advantage of GitHub's project pages feature. 140 | 141 | Once you are done with the assignment, create a new branch: 142 | 143 | `git branch gh-pages` 144 | 145 | Run `npm run build` and commit the compiled files 146 | 147 | Push the branch to GitHub: 148 | 149 | `git push origin gh-pages` 150 | 151 | Now, you can go to `.github.io/` to see your renderer online from anywhere. Add this link to your README. 152 | 153 | ## Submit 154 | 155 | Beware of any build issues discussed on the Google Group. 156 | 157 | Open a GitHub pull request so that we can see that you have finished. The title should be "Project 5B: YOUR NAME". The template of the comment section of your pull request is attached below, you can do some copy and paste: 158 | 159 | - Repo Link 160 | - (Briefly) Mentions features that you've completed. Especially those bells and whistles you want to highlight 161 | - Feature 0 162 | - Feature 1 163 | - ... 164 | - Feedback on the project itself, if any. 165 | 166 | ### Third-Party Code Policy 167 | 168 | - Use of any third-party code must be approved by asking on our mailing list. 169 | - If it is approved, all students are welcome to use it. Generally, we approve use of third-party code that is not a core part of the project. For example, for the path tracer, we would approve using a third-party library for loading models, but would not approve copying and pasting a CUDA function for doing refraction. 170 | - Third-party code **MUST** be credited in README.md. 171 | - Using third-party code without its approval, including using another student's code, is an academic integrity violation, and will, at minimum, result in you receiving an F for the semester. 172 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | WebGL Clustered Deferred and Forward+ Shading 2 | ====================== 3 | 4 | **University of Pennsylvania, CIS 565: GPU Programming and Architecture, Project 5** 5 | 6 | * (TODO) YOUR NAME HERE 7 | * Tested on: (TODO) **Google Chrome 222.2** on 8 | Windows 22, i7-2222 @ 2.22GHz 22GB, GTX 222 222MB (Moore 2222 Lab) 9 | 10 | ### Live Online 11 | 12 | [![](img/thumb.png)](http://TODO.github.io/Project5B-WebGL-Deferred-Shading) 13 | 14 | ### Demo Video/GIF 15 | 16 | [![](img/video.png)](TODO) 17 | 18 | ### (TODO: Your README) 19 | 20 | *DO NOT* leave the README to the last minute! It is a crucial part of the 21 | project, and we will not be able to grade you without a good README. 22 | 23 | This assignment has a considerable amount of performance analysis compared 24 | to implementation work. Complete the implementation early to leave time! 25 | 26 | 27 | ### Credits 28 | 29 | * [Three.js](https://github.com/mrdoob/three.js) by [@mrdoob](https://github.com/mrdoob) and contributors 30 | * [stats.js](https://github.com/mrdoob/stats.js) by [@mrdoob](https://github.com/mrdoob) and contributors 31 | * [webgl-debug](https://github.com/KhronosGroup/WebGLDeveloperTools) by Khronos Group Inc. 32 | * [glMatrix](https://github.com/toji/gl-matrix) by [@toji](https://github.com/toji) and contributors 33 | * [minimal-gltf-loader](https://github.com/shrekshao/minimal-gltf-loader) by [@shrekshao](https://github.com/shrekshao) 34 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /lib/minimal-gltf-loader.js: -------------------------------------------------------------------------------- 1 | // From https://github.com/shrekshao/minimal-gltf-loader 2 | import {vec3, vec4, quat, mat4} from 'gl-matrix'; 3 | 4 | var MinimalGLTFLoader = MinimalGLTFLoader || {}; 5 | 6 | // Data classes 7 | var Scene = MinimalGLTFLoader.Scene = function () { 8 | // not 1-1 to meshes in json file 9 | // each mesh with a different node hierarchy is a new instance 10 | this.meshes = []; 11 | //this.meshes = {}; 12 | }; 13 | 14 | // Node 15 | 16 | var Mesh = MinimalGLTFLoader.Mesh = function () { 17 | this.meshID = ''; // mesh id name in glTF json meshes 18 | this.primitives = []; 19 | }; 20 | 21 | var Primitive = MinimalGLTFLoader.Primitive = function () { 22 | this.mode = 4; // default: gl.TRIANGLES 23 | 24 | this.matrix = mat4.create(); 25 | 26 | this.indices = null; 27 | this.indicesComponentType = 5123; // default: gl.UNSIGNED_SHORT 28 | 29 | // !!: assume vertex buffer is interleaved 30 | // see discussion https://github.com/KhronosGroup/glTF/issues/21 31 | this.vertexBuffer = null; 32 | 33 | // attribute info (stride, offset, etc) 34 | this.attributes = {}; 35 | 36 | // cur glTF spec supports only one material per primitive 37 | this.material = null; 38 | this.technique = null; 39 | 40 | 41 | 42 | // // Program gl buffer name 43 | // // ?? reconsider if it's suitable to put it here 44 | // this.indicesWebGLBufferName = null; 45 | // this.vertexWebGLBufferName = null; 46 | 47 | }; 48 | 49 | 50 | /** 51 | * 52 | */ 53 | var glTFModel = MinimalGLTFLoader.glTFModel = function () { 54 | this.defaultScene = ''; 55 | this.scenes = {}; 56 | 57 | this.nodeMatrix = {}; 58 | 59 | this.json = null; 60 | 61 | this.shaders = {}; 62 | this.programs = {}; 63 | 64 | this.images = {}; 65 | 66 | }; 67 | 68 | 69 | 70 | var gl; 71 | 72 | var glTFLoader = MinimalGLTFLoader.glTFLoader = function (glContext) { 73 | gl = glContext; 74 | this._init(); 75 | this.glTF = null; 76 | }; 77 | 78 | glTFLoader.prototype._init = function() { 79 | this._parseDone = false; 80 | this._loadDone = false; 81 | 82 | this._bufferRequested = 0; 83 | this._bufferLoaded = 0; 84 | this._buffers = {}; 85 | this._bufferTasks = {}; 86 | 87 | // ?? Move to glTFModel to avoid collected by GC ?? 88 | this._bufferViews = {}; 89 | 90 | this._shaderRequested = 0; 91 | this._shaderLoaded = 0; 92 | 93 | this._imageRequested = 0; 94 | this._imageLoaded = 0; 95 | 96 | this._pendingTasks = 0; 97 | this._finishedPendingTasks = 0; 98 | 99 | this.onload = null; 100 | 101 | }; 102 | 103 | 104 | glTFLoader.prototype._getBufferViewData = function(json, bufferViewID, callback) { 105 | var bufferViewData = this._bufferViews[bufferViewID]; 106 | if(!bufferViewData) { 107 | // load bufferView for the first time 108 | var bufferView = json.bufferViews[bufferViewID]; 109 | var bufferData = this._buffers[bufferView.buffer]; 110 | if (bufferData) { 111 | // buffer already loaded 112 | //console.log("dependent buffer ready, create bufferView" + bufferViewID); 113 | this._bufferViews[bufferViewID] = bufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 114 | callback(bufferViewData); 115 | } else { 116 | // buffer not yet loaded 117 | // add pending task to _bufferTasks 118 | //console.log("pending Task: wait for buffer to load bufferView " + bufferViewID); 119 | this._pendingTasks++; 120 | var bufferTask = this._bufferTasks[bufferView.buffer]; 121 | if (!bufferTask) { 122 | this._bufferTasks[bufferView.buffer] = []; 123 | bufferTask = this._bufferTasks[bufferView.buffer]; 124 | } 125 | var loader = this; 126 | bufferTask.push(function(newBufferData) { 127 | // share same bufferView 128 | // hierarchy needs to be post processed in the renderer 129 | var curBufferViewData = loader._bufferViews[bufferViewID]; 130 | if (!curBufferViewData) { 131 | console.log('create new BufferView Data for ' + bufferViewID); 132 | curBufferViewData = loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 133 | } 134 | loader._finishedPendingTasks++; 135 | callback(curBufferViewData); 136 | 137 | // // create new bufferView for each mesh access with a different hierarchy 138 | // // hierarchy transformation will be prepared in this way 139 | // console.log('create new BufferView Data for ' + bufferViewID); 140 | // loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 141 | // loader._finishedPendingTasks++; 142 | // callback(loader._bufferViews[bufferViewID]); 143 | }); 144 | } 145 | 146 | } else { 147 | // no need to load buffer from file 148 | // use cached ones 149 | //console.log("use cached bufferView " + bufferViewID); 150 | callback(bufferViewData); 151 | } 152 | }; 153 | 154 | // glTFLoader.prototype._doNextLoadTaskInList = function () { 155 | // }; 156 | 157 | glTFLoader.prototype._checkComplete = function () { 158 | if (this._bufferRequested == this._bufferLoaded && 159 | this._shaderRequested == this._shaderLoaded && 160 | this._imageRequested == this._imageLoaded 161 | // && other resources finish loading 162 | ) { 163 | this._loadDone = true; 164 | } 165 | 166 | if (this._loadDone && this._parseDone && this._pendingTasks == this._finishedPendingTasks) { 167 | this.onload(this.glTF); 168 | } 169 | }; 170 | 171 | 172 | glTFLoader.prototype._parseGLTF = function (json) { 173 | 174 | this.glTF.json = json; 175 | this.glTF.defaultScene = json.scene; 176 | 177 | // Iterate through every scene 178 | if (json.scenes) { 179 | for (var sceneID in json.scenes) { 180 | var newScene = new Scene(); 181 | this.glTF.scenes[sceneID] = newScene; 182 | 183 | var scene = json.scenes[sceneID]; 184 | var nodes = scene.nodes; 185 | var nodeLen = nodes.length; 186 | 187 | // Iterate through every node within scene 188 | for (var n = 0; n < nodeLen; ++n) { 189 | var nodeID = nodes[n]; 190 | //var node = json.nodes[nodeName]; 191 | 192 | // Traverse node 193 | this._parseNode(json, nodeID, newScene); 194 | } 195 | } 196 | } 197 | 198 | this._parseDone = true; 199 | this._checkComplete(); 200 | }; 201 | 202 | 203 | var translationVec3 = vec3.create(); 204 | var rotationQuat = quat.create(); 205 | var scaleVec3 = vec3.create(); 206 | var TRMatrix = mat4.create(); 207 | 208 | glTFLoader.prototype._parseNode = function(json, nodeID, newScene, matrix) { 209 | var node = json.nodes[nodeID]; 210 | 211 | if (matrix === undefined) { 212 | matrix = mat4.create(); 213 | } 214 | 215 | var curMatrix = mat4.create(); 216 | 217 | if (node.hasOwnProperty('matrix')) { 218 | // matrix 219 | for(var i = 0; i < 16; ++i) { 220 | curMatrix[i] = node.matrix[i]; 221 | } 222 | mat4.multiply(curMatrix, matrix, curMatrix); 223 | //mat4.multiply(curMatrix, curMatrix, matrix); 224 | } else { 225 | // translation, rotation, scale (TRS) 226 | // TODO: these labels are optional 227 | vec3.set(translationVec3, node.translation[0], node.translation[1], node.translation[2]); 228 | quat.set(rotationQuat, node.rotation[0], node.rotation[1], node.rotation[2], node.rotation[3]); 229 | mat4.fromRotationTranslation(TRMatrix, rotationQuat, translationVec3); 230 | mat4.multiply(curMatrix, curMatrix, TRMatrix); 231 | vec3.set(scaleVec3, node.scale[0], node.scale[1], node.scale[2]); 232 | mat4.scale(curMatrix, curMatrix, scaleVec3); 233 | } 234 | 235 | // store node matrix 236 | this.glTF.nodeMatrix[nodeID] = curMatrix; 237 | 238 | 239 | 240 | // Iterate through every mesh within node 241 | var meshes = node.meshes; 242 | if(!!meshes) { 243 | var meshLen = meshes.length; 244 | for (var m = 0; m < meshLen; ++m) { 245 | var newMesh = new Mesh(); 246 | newScene.meshes.push(newMesh); 247 | 248 | var meshName = meshes[m]; 249 | var mesh = json.meshes[meshName]; 250 | 251 | newMesh.meshID = meshName; 252 | 253 | // Iterate through primitives 254 | var primitives = mesh.primitives; 255 | var primitiveLen = primitives.length; 256 | 257 | for (var p = 0; p < primitiveLen; ++p) { 258 | var newPrimitive = new Primitive(); 259 | newMesh.primitives.push(newPrimitive); 260 | 261 | var primitive = primitives[p]; 262 | 263 | if (primitive.indices) { 264 | this._parseIndices(json, primitive, newPrimitive); 265 | } 266 | 267 | this._parseAttributes(json, primitive, newPrimitive, curMatrix); 268 | 269 | // required 270 | newPrimitive.material = json.materials[primitive.material]; 271 | 272 | if (newPrimitive.material.technique) { 273 | newPrimitive.technique = json.techniques[newPrimitive.material.technique]; 274 | } else { 275 | // TODO: use default technique in glTF spec Appendix A 276 | } 277 | 278 | } 279 | } 280 | } 281 | 282 | 283 | // Go through all the children recursively 284 | var children = node.children; 285 | var childreLen = children.length; 286 | for (var c = 0; c < childreLen; ++c) { 287 | var childNodeID = children[c]; 288 | this._parseNode(json, childNodeID, newScene, curMatrix); 289 | } 290 | 291 | }; 292 | 293 | 294 | glTFLoader.prototype._parseIndices = function(json, primitive, newPrimitive) { 295 | 296 | var accessorName = primitive.indices; 297 | var accessor = json.accessors[accessorName]; 298 | 299 | newPrimitive.mode = primitive.mode || 4; 300 | newPrimitive.indicesComponentType = accessor.componentType; 301 | 302 | var loader = this; 303 | this._getBufferViewData(json, accessor.bufferView, function(bufferViewData) { 304 | newPrimitive.indices = _getAccessorData(bufferViewData, accessor); 305 | loader._checkComplete(); 306 | }); 307 | }; 308 | 309 | 310 | 311 | 312 | //var tmpVec4 = vec4.create(); 313 | //var inverseTransposeMatrix = mat4.create(); 314 | 315 | glTFLoader.prototype._parseAttributes = function(json, primitive, newPrimitive, matrix) { 316 | // !! Assume interleaved vertex attributes 317 | // i.e., all attributes share one bufferView 318 | 319 | 320 | // vertex buffer processing 321 | var firstSemantic = Object.keys(primitive.attributes)[0]; 322 | var firstAccessor = json.accessors[primitive.attributes[firstSemantic]]; 323 | var vertexBufferViewID = firstAccessor.bufferView; 324 | var bufferView = json.bufferViews[vertexBufferViewID]; 325 | 326 | var loader = this; 327 | 328 | this._getBufferViewData(json, vertexBufferViewID, function(bufferViewData) { 329 | var data = newPrimitive.vertexBuffer = _arrayBuffer2TypedArray( 330 | bufferViewData, 331 | 0, 332 | bufferView.byteLength / ComponentType2ByteSize[firstAccessor.componentType], 333 | firstAccessor.componentType 334 | ); 335 | 336 | for (var attributeName in primitive.attributes) { 337 | var accessorName = primitive.attributes[attributeName]; 338 | var accessor = json.accessors[accessorName]; 339 | 340 | var componentTypeByteSize = ComponentType2ByteSize[accessor.componentType]; 341 | 342 | var stride = accessor.byteStride / componentTypeByteSize; 343 | var offset = accessor.byteOffset / componentTypeByteSize; 344 | var count = accessor.count; 345 | 346 | // // Matrix transformation 347 | // if (attributeName === 'POSITION') { 348 | // for (var i = 0; i < count; ++i) { 349 | // // TODO: add vec2 and other(needed?) support 350 | // vec4.set(tmpVec4, data[stride * i + offset] 351 | // , data[stride * i + offset + 1] 352 | // , data[stride * i + offset + 2] 353 | // , 1); 354 | // vec4.transformMat4(tmpVec4, tmpVec4, matrix); 355 | // vec4.scale(tmpVec4, tmpVec4, 1 / tmpVec4[3]); 356 | // data[stride * i + offset] = tmpVec4[0]; 357 | // data[stride * i + offset + 1] = tmpVec4[1]; 358 | // data[stride * i + offset + 2] = tmpVec4[2]; 359 | // } 360 | // } 361 | // else if (attributeName === 'NORMAL') { 362 | // mat4.invert(inverseTransposeMatrix, matrix); 363 | // mat4.transpose(inverseTransposeMatrix, inverseTransposeMatrix); 364 | 365 | // for (var i = 0; i < count; ++i) { 366 | // // @todo: add vec2 and other(needed?) support 367 | // vec4.set(tmpVec4, data[stride * i + offset] 368 | // , data[stride * i + offset + 1] 369 | // , data[stride * i + offset + 2] 370 | // , 0); 371 | // vec4.transformMat4(tmpVec4, tmpVec4, inverseTransposeMatrix); 372 | // vec4.normalize(tmpVec4, tmpVec4); 373 | // data[stride * i + offset] = tmpVec4[0]; 374 | // data[stride * i + offset + 1] = tmpVec4[1]; 375 | // data[stride * i + offset + 2] = tmpVec4[2]; 376 | // } 377 | // } 378 | 379 | 380 | // local transform matrix 381 | 382 | mat4.copy(newPrimitive.matrix, matrix); 383 | 384 | 385 | 386 | // for vertexAttribPointer 387 | newPrimitive.attributes[attributeName] = { 388 | //GLuint program location, 389 | size: Type2NumOfComponent[accessor.type], 390 | type: accessor.componentType, 391 | //GLboolean normalized 392 | stride: accessor.byteStride, 393 | offset: accessor.byteOffset 394 | }; 395 | 396 | } 397 | 398 | loader._checkComplete(); 399 | }); 400 | 401 | }; 402 | 403 | /** 404 | * load a glTF model 405 | * 406 | * @param {String} uri uri of the .glTF file. Other resources (bins, images) are assumed to be in the same base path 407 | * @param {Function} callback the onload callback function 408 | */ 409 | glTFLoader.prototype.loadGLTF = function (uri, callback) { 410 | 411 | this._init(); 412 | 413 | this.onload = callback || function(glTF) { 414 | console.log('glTF model loaded.'); 415 | console.log(glTF); 416 | }; 417 | 418 | 419 | this.glTF = new glTFModel(); 420 | 421 | this.baseUri = _getBaseUri(uri); 422 | 423 | var loader = this; 424 | 425 | _loadJSON(uri, function (response) { 426 | // Parse JSON string into object 427 | var json = JSON.parse(response); 428 | 429 | var bid; 430 | 431 | var loadArrayBufferCallback = function (resource) { 432 | 433 | loader._buffers[bid] = resource; 434 | loader._bufferLoaded++; 435 | if (loader._bufferTasks[bid]) { 436 | var i,len; 437 | for (i = 0, len = loader._bufferTasks[bid].length; i < len; ++i) { 438 | (loader._bufferTasks[bid][i])(resource); 439 | } 440 | } 441 | loader._checkComplete(); 442 | 443 | }; 444 | 445 | // Launch loading resources task: buffers, etc. 446 | if (json.buffers) { 447 | for (bid in json.buffers) { 448 | 449 | loader._bufferRequested++; 450 | 451 | _loadArrayBuffer(loader.baseUri + json.buffers[bid].uri, loadArrayBufferCallback); 452 | 453 | } 454 | } 455 | 456 | // load images 457 | 458 | 459 | var loadImageCallback = function (img, iid) { 460 | loader._imageLoaded++; 461 | loader.glTF.images[iid] = img; 462 | loader._checkComplete(); 463 | }; 464 | 465 | var iid; 466 | 467 | if (json.images) { 468 | for (iid in json.images) { 469 | loader._imageRequested++; 470 | _loadImage(loader.baseUri + json.images[iid].uri, iid, loadImageCallback); 471 | } 472 | } 473 | 474 | 475 | // load shaders 476 | var pid; 477 | var newProgram; 478 | 479 | var loadVertexShaderFileCallback = function (resource) { 480 | loader._shaderLoaded++; 481 | newProgram.vertexShader = resource; 482 | if (newProgram.fragmentShader) { 483 | // create Program 484 | newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); 485 | loader._checkComplete(); 486 | } 487 | }; 488 | var loadFragmentShaderFileCallback = function (resource) { 489 | loader._shaderLoaded++; 490 | newProgram.fragmentShader = resource; 491 | if (newProgram.vertexShader) { 492 | // create Program 493 | newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); 494 | loader._checkComplete(); 495 | } 496 | }; 497 | 498 | if (json.programs) { 499 | for (pid in json.programs) { 500 | newProgram = loader.glTF.programs[pid] = { 501 | vertexShader: null, 502 | fragmentShader: null, 503 | program: null 504 | }; 505 | var program = json.programs[pid]; 506 | loader._shaderRequested += 2; 507 | 508 | _loadShaderFile(loader.baseUri + json.shaders[program.vertexShader].uri, loadVertexShaderFileCallback); 509 | _loadShaderFile(loader.baseUri + json.shaders[program.fragmentShader].uri, loadFragmentShaderFileCallback); 510 | } 511 | } 512 | 513 | 514 | 515 | 516 | // start glTF scene parsing 517 | loader._parseGLTF(json); 518 | }); 519 | }; 520 | 521 | 522 | 523 | 524 | // TODO: get from gl context 525 | var ComponentType2ByteSize = { 526 | 5120: 1, // BYTE 527 | 5121: 1, // UNSIGNED_BYTE 528 | 5122: 2, // SHORT 529 | 5123: 2, // UNSIGNED_SHORT 530 | 5126: 4 // FLOAT 531 | }; 532 | 533 | var Type2NumOfComponent = { 534 | 'SCALAR': 1, 535 | 'VEC2': 2, 536 | 'VEC3': 3, 537 | 'VEC4': 4, 538 | 'MAT2': 4, 539 | 'MAT3': 9, 540 | 'MAT4': 16 541 | }; 542 | 543 | MinimalGLTFLoader.Attributes = [ 544 | 'POSITION', 545 | 'NORMAL', 546 | 'TEXCOORD', 547 | 'COLOR', 548 | 'JOINT', 549 | 'WEIGHT' 550 | ]; 551 | 552 | // MinimalGLTFLoader.UniformFunctionsBind = { 553 | // 35676: gl.uniformMatrix4fv // FLOAT_MAT4 554 | // }; 555 | 556 | 557 | // ------ Scope limited private util functions--------------- 558 | 559 | function _arrayBuffer2TypedArray(resource, byteOffset, countOfComponentType, componentType) { 560 | switch(componentType) { 561 | // @todo: finish 562 | case 5122: return new Int16Array(resource, byteOffset, countOfComponentType); 563 | case 5123: return new Uint16Array(resource, byteOffset, countOfComponentType); 564 | case 5124: return new Int32Array(resource, byteOffset, countOfComponentType); 565 | case 5125: return new Uint32Array(resource, byteOffset, countOfComponentType); 566 | case 5126: return new Float32Array(resource, byteOffset, countOfComponentType); 567 | default: return null; 568 | } 569 | } 570 | 571 | function _getAccessorData(bufferViewData, accessor) { 572 | return _arrayBuffer2TypedArray( 573 | bufferViewData, 574 | accessor.byteOffset, 575 | accessor.count * Type2NumOfComponent[accessor.type], 576 | accessor.componentType 577 | ); 578 | } 579 | 580 | function _getBaseUri(uri) { 581 | 582 | // https://github.com/AnalyticalGraphicsInc/cesium/blob/master/Source/Core/getBaseUri.js 583 | 584 | var basePath = ''; 585 | var i = uri.lastIndexOf('/'); 586 | if(i !== -1) { 587 | basePath = uri.substring(0, i + 1); 588 | } 589 | 590 | return basePath; 591 | } 592 | 593 | function _loadJSON(src, callback) { 594 | 595 | // native json loading technique from @KryptoniteDove: 596 | // http://codepen.io/KryptoniteDove/post/load-json-file-locally-using-pure-javascript 597 | 598 | var xobj = new XMLHttpRequest(); 599 | xobj.overrideMimeType("application/json"); 600 | xobj.open('GET', src, true); 601 | xobj.onreadystatechange = function () { 602 | if (xobj.readyState == 4 && // Request finished, response ready 603 | xobj.status == "200") { // Status OK 604 | callback(xobj.responseText, this); 605 | } 606 | }; 607 | xobj.send(null); 608 | } 609 | 610 | function _loadArrayBuffer(url, callback) { 611 | var xobj = new XMLHttpRequest(); 612 | xobj.responseType = 'arraybuffer'; 613 | xobj.open('GET', url, true); 614 | xobj.onreadystatechange = function () { 615 | if (xobj.readyState == 4 && // Request finished, response ready 616 | xobj.status == "200") { // Status OK 617 | var arrayBuffer = xobj.response; 618 | if (arrayBuffer && callback) { 619 | callback(arrayBuffer); 620 | } 621 | } 622 | }; 623 | xobj.send(null); 624 | } 625 | 626 | function _loadShaderFile(url, callback) { 627 | var xobj = new XMLHttpRequest(); 628 | xobj.responseType = 'text'; 629 | xobj.open('GET', url, true); 630 | xobj.onreadystatechange = function () { 631 | if (xobj.readyState == 4 && // Request finished, response ready 632 | xobj.status == "200") { // Status OK 633 | var file = xobj.response; 634 | if (file && callback) { 635 | callback(file); 636 | } 637 | } 638 | }; 639 | xobj.send(null); 640 | } 641 | 642 | function _loadImage(url, iid, onload) { 643 | var img = new Image(); 644 | img.src = url; 645 | img.onload = function() { 646 | onload(img, iid); 647 | }; 648 | } 649 | 650 | 651 | function _createShader(gl, source, type) { 652 | var shader = gl.createShader(type); 653 | gl.shaderSource(shader, source); 654 | gl.compileShader(shader); 655 | return shader; 656 | } 657 | 658 | function _createProgram(gl, vertexShaderSource, fragmentShaderSource) { 659 | var program = gl.createProgram(); 660 | var vshader = _createShader(gl, vertexShaderSource, gl.VERTEX_SHADER); 661 | var fshader = _createShader(gl, fragmentShaderSource, gl.FRAGMENT_SHADER); 662 | gl.attachShader(program, vshader); 663 | gl.deleteShader(vshader); 664 | gl.attachShader(program, fshader); 665 | gl.deleteShader(fshader); 666 | gl.linkProgram(program); 667 | 668 | var log = gl.getProgramInfoLog(program); 669 | if (log) { 670 | console.log(log); 671 | } 672 | 673 | log = gl.getShaderInfoLog(vshader); 674 | if (log) { 675 | console.log(log); 676 | } 677 | 678 | log = gl.getShaderInfoLog(fshader); 679 | if (log) { 680 | console.log(log); 681 | } 682 | 683 | return program; 684 | } 685 | 686 | export { glTFLoader }; -------------------------------------------------------------------------------- /models/sponza/buffer_0.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CIS565-Fall-2017/Project5-WebGL-Clustered-Deferred-Forward-Plus/e0e02e20d7ea8a421e12274e67cd237de81737c8/models/sponza/buffer_0.bin -------------------------------------------------------------------------------- /models/sponza/color.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CIS565-Fall-2017/Project5-WebGL-Clustered-Deferred-Forward-Plus/e0e02e20d7ea8a421e12274e67cd237de81737c8/models/sponza/color.jpeg -------------------------------------------------------------------------------- /models/sponza/fragmentShader0.glsl: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | uniform vec4 u_ambient; 3 | uniform sampler2D u_diffuse; 4 | uniform sampler2D u_normal; 5 | uniform vec4 u_emission; 6 | uniform vec4 u_specular; 7 | uniform float u_shininess; 8 | uniform float u_transparency; 9 | varying vec3 v_positionEC; 10 | varying vec3 v_normal; 11 | varying vec2 v_texcoord_0; 12 | 13 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 14 | normap = normap * 2.0 - 1.0; 15 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 16 | vec3 surftan = normalize(cross(geomnor, up)); 17 | vec3 surfbinor = cross(geomnor, surftan); 18 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 19 | } 20 | 21 | void main(void) { 22 | vec3 normal = applyNormalMap(normalize(v_normal), texture2D(u_normal, v_texcoord_0).rgb); 23 | vec4 diffuse = texture2D(u_diffuse, v_texcoord_0); 24 | vec3 diffuseLight = vec3(0.0, 0.0, 0.0); 25 | vec3 specular = u_specular.rgb; 26 | vec3 specularLight = vec3(0.0, 0.0, 0.0); 27 | vec3 emission = u_emission.rgb; 28 | vec3 ambient = u_ambient.rgb; 29 | vec3 viewDir = -normalize(v_positionEC); 30 | vec3 ambientLight = vec3(0.0, 0.0, 0.0); 31 | ambientLight += vec3(0.2, 0.2, 0.2); 32 | vec3 l = vec3(0.0, 0.0, 1.0); 33 | diffuseLight += vec3(1.0, 1.0, 1.0) * max(dot(normal,l), 0.); 34 | vec3 h = normalize(l + viewDir); 35 | float specularIntensity = max(0., pow(max(dot(normal, h), 0.), u_shininess)); 36 | specularLight += vec3(1.0, 1.0, 1.0) * specularIntensity; 37 | vec3 color = vec3(0.0, 0.0, 0.0); 38 | color += diffuse.rgb * diffuseLight; 39 | color += specular * specularLight; 40 | color += emission; 41 | color += ambient * ambientLight; 42 | gl_FragColor = vec4(color * diffuse.a, diffuse.a * u_transparency); 43 | } 44 | -------------------------------------------------------------------------------- /models/sponza/normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CIS565-Fall-2017/Project5-WebGL-Clustered-Deferred-Forward-Plus/e0e02e20d7ea8a421e12274e67cd237de81737c8/models/sponza/normal.png -------------------------------------------------------------------------------- /models/sponza/sponza.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "accessors": { 3 | "accessor_index_0": { 4 | "bufferView": "bufferView_1", 5 | "byteOffset": 0, 6 | "byteStride": 0, 7 | "componentType": 5125, 8 | "count": 199269, 9 | "type": "SCALAR", 10 | "min": [ 11 | 0 12 | ], 13 | "max": [ 14 | 199268 15 | ] 16 | }, 17 | "accessor_position": { 18 | "bufferView": "bufferView_0", 19 | "byteOffset": 0, 20 | "byteStride": 0, 21 | "componentType": 5126, 22 | "count": 148975, 23 | "min": [ 24 | -17.268321990966797, 25 | -0.006653999909758568, 26 | -7.7815141677856445 27 | ], 28 | "max": [ 29 | 17.551677703857422, 30 | 15.55334758758545, 31 | 7.818483829498291 32 | ], 33 | "type": "VEC3" 34 | }, 35 | "accessor_normal": { 36 | "bufferView": "bufferView_0", 37 | "byteOffset": 1787700, 38 | "byteStride": 0, 39 | "componentType": 5126, 40 | "count": 148975, 41 | "type": "VEC3", 42 | "min": [ 43 | null, 44 | null, 45 | null 46 | ], 47 | "max": [ 48 | null, 49 | null, 50 | null 51 | ] 52 | }, 53 | "accessor_uv": { 54 | "bufferView": "bufferView_0", 55 | "byteOffset": 3575400, 56 | "byteStride": 0, 57 | "componentType": 5126, 58 | "count": 148975, 59 | "type": "VEC2", 60 | "min": [ 61 | -57.04376983642578, 62 | -61.176544189453125 63 | ], 64 | "max": [ 65 | 57.97621536254883, 66 | 62.176544189453125 67 | ] 68 | } 69 | }, 70 | "asset": { 71 | "generator": "OBJ2GLTF", 72 | "premultipliedAlpha": true, 73 | "profile": { 74 | "api": "WebGL", 75 | "version": "1.0" 76 | }, 77 | "version": "1.0" 78 | }, 79 | "buffers": { 80 | "buffer_0": { 81 | "type": "arraybuffer", 82 | "byteLength": 5564276, 83 | "uri": "buffer_0.bin" 84 | } 85 | }, 86 | "bufferViews": { 87 | "bufferView_0": { 88 | "buffer": "buffer_0", 89 | "byteLength": 4767200, 90 | "byteOffset": 0, 91 | "target": 34962 92 | }, 93 | "bufferView_1": { 94 | "buffer": "buffer_0", 95 | "byteLength": 797076, 96 | "byteOffset": 4767200, 97 | "target": 34963 98 | } 99 | }, 100 | "images": { 101 | "color": { 102 | "uri": "color.jpeg" 103 | }, 104 | "normals": { 105 | "uri": "normal.png" 106 | } 107 | }, 108 | "materials": { 109 | "material_lambert2SG": { 110 | "name": "lambert2SG", 111 | "extensions": {}, 112 | "values": { 113 | "ambient": [ 114 | 0, 115 | 0, 116 | 0, 117 | 1 118 | ], 119 | "diffuse": "texture_color", 120 | "normalMap": "texture_normal", 121 | "emission": [ 122 | 0, 123 | 0, 124 | 0, 125 | 1 126 | ], 127 | "specular": [ 128 | 0, 129 | 0, 130 | 0, 131 | 1 132 | ], 133 | "shininess": 0, 134 | "transparency": 1 135 | }, 136 | "technique": "technique0" 137 | } 138 | }, 139 | "meshes": { 140 | "mesh_sponza": { 141 | "name": "sponza", 142 | "primitives": [ 143 | { 144 | "attributes": { 145 | "POSITION": "accessor_position", 146 | "NORMAL": "accessor_normal", 147 | "TEXCOORD_0": "accessor_uv" 148 | }, 149 | "indices": "accessor_index_0", 150 | "material": "material_lambert2SG", 151 | "mode": 4 152 | } 153 | ] 154 | } 155 | }, 156 | "nodes": { 157 | "rootNode": { 158 | "children": [], 159 | "meshes": [ 160 | "mesh_sponza" 161 | ], 162 | "matrix": [ 163 | 1, 164 | 0, 165 | 0, 166 | 0, 167 | 0, 168 | 1, 169 | 0, 170 | 0, 171 | 0, 172 | 0, 173 | 1, 174 | 0, 175 | 0, 176 | 0, 177 | 0, 178 | 1 179 | ] 180 | } 181 | }, 182 | "samplers": { 183 | "sampler_0": { 184 | "magFilter": 9729, 185 | "minFilter": 9986, 186 | "wrapS": 10497, 187 | "wrapT": 10497 188 | } 189 | }, 190 | "scene": "scene_sponza", 191 | "scenes": { 192 | "scene_sponza": { 193 | "nodes": [ 194 | "rootNode" 195 | ] 196 | } 197 | }, 198 | "textures": { 199 | "texture_color": { 200 | "format": 6407, 201 | "internalFormat": 6407, 202 | "sampler": "sampler_0", 203 | "source": "color", 204 | "target": 3553, 205 | "type": 5121 206 | }, 207 | "texture_normal": { 208 | "format": 6407, 209 | "internalFormat": 6407, 210 | "sampler": "sampler_0", 211 | "source": "normals", 212 | "target": 3553, 213 | "type": 5121 214 | } 215 | }, 216 | "extensionsUsed": [], 217 | "animations": {}, 218 | "cameras": {}, 219 | "techniques": { 220 | "technique0": { 221 | "attributes": { 222 | "a_position": "position", 223 | "a_normal": "normal", 224 | "a_texcoord_0": "texcoord_0" 225 | }, 226 | "parameters": { 227 | "modelViewMatrix": { 228 | "semantic": "MODELVIEW", 229 | "type": 35676 230 | }, 231 | "projectionMatrix": { 232 | "semantic": "PROJECTION", 233 | "type": 35676 234 | }, 235 | "normalMatrix": { 236 | "semantic": "MODELVIEWINVERSETRANSPOSE", 237 | "type": 35675 238 | }, 239 | "ambient": { 240 | "type": 35666 241 | }, 242 | "diffuse": { 243 | "type": 35678 244 | }, 245 | "normalMap": { 246 | "type": 35678 247 | }, 248 | "emission": { 249 | "type": 35666 250 | }, 251 | "specular": { 252 | "type": 35666 253 | }, 254 | "shininess": { 255 | "type": 5126 256 | }, 257 | "transparency": { 258 | "type": 5126 259 | }, 260 | "position": { 261 | "semantic": "POSITION", 262 | "type": 35665 263 | }, 264 | "normal": { 265 | "semantic": "NORMAL", 266 | "type": 35665 267 | }, 268 | "texcoord_0": { 269 | "semantic": "TEXCOORD_0", 270 | "type": 35664 271 | } 272 | }, 273 | "program": "program0", 274 | "states": { 275 | "enable": [ 276 | 2884, 277 | 2929 278 | ] 279 | }, 280 | "uniforms": { 281 | "u_modelViewMatrix": "modelViewMatrix", 282 | "u_projectionMatrix": "projectionMatrix", 283 | "u_normalMatrix": "normalMatrix", 284 | "u_ambient": "ambient", 285 | "u_diffuse": "diffuse", 286 | "u_normal": "normalMap", 287 | "u_emission": "emission", 288 | "u_specular": "specular", 289 | "u_shininess": "shininess", 290 | "u_transparency": "transparency" 291 | } 292 | } 293 | }, 294 | "programs": { 295 | "program0": { 296 | "attributes": [ 297 | "a_position", 298 | "a_normal", 299 | "a_texcoord_0" 300 | ], 301 | "fragmentShader": "fragmentShader0", 302 | "vertexShader": "vertexShader0" 303 | } 304 | }, 305 | "shaders": { 306 | "vertexShader0": { 307 | "type": 35633, 308 | "uri": "vertexShader0.glsl" 309 | }, 310 | "fragmentShader0": { 311 | "type": 35632, 312 | "uri": "fragmentShader0.glsl" 313 | } 314 | }, 315 | "skins": {}, 316 | "extensions": {} 317 | } 318 | -------------------------------------------------------------------------------- /models/sponza/vertexShader0.glsl: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | uniform mat4 u_modelViewMatrix; 3 | uniform mat4 u_projectionMatrix; 4 | uniform mat3 u_normalMatrix; 5 | attribute vec3 a_position; 6 | varying vec3 v_positionEC; 7 | attribute vec3 a_normal; 8 | varying vec3 v_normal; 9 | attribute vec2 a_texcoord_0; 10 | varying vec2 v_texcoord_0; 11 | void main(void) { 12 | vec4 pos = u_modelViewMatrix * vec4(a_position,1.0); 13 | v_positionEC = pos.xyz; 14 | gl_Position = u_projectionMatrix * pos; 15 | v_normal = u_normalMatrix * a_normal; 16 | v_texcoord_0 = a_texcoord_0; 17 | } 18 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "scripts": { 3 | "start": "webpack-dev-server", 4 | "start:production": "webpack-dev-server --env.production", 5 | "build": "webpack --env.production" 6 | }, 7 | "dependencies": { 8 | "dat-gui": "^0.5.0", 9 | "gl-matrix": "^2.4.0", 10 | "spectorjs": "^0.9.0", 11 | "stats-js": "^1.0.0-alpha1", 12 | "three": "^0.87.1", 13 | "three-js": "^79.0.0", 14 | "three-orbitcontrols": "^1.2.1", 15 | "webgl-debug": "^1.0.2" 16 | }, 17 | "devDependencies": { 18 | "babel-core": "^6.26.0", 19 | "babel-loader": "^7.1.2", 20 | "babel-minify-webpack-plugin": "^0.2.0", 21 | "babel-preset-env": "^1.6.0", 22 | "webpack": "^3.7.1", 23 | "webpack-dev-server": "^2.9.2", 24 | "webpack-glsl-loader": "^1.0.1" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/init.js: -------------------------------------------------------------------------------- 1 | // TODO: Change this to enable / disable debug mode 2 | export const DEBUG = true && process.env.NODE_ENV === 'development'; 3 | 4 | import DAT from 'dat-gui'; 5 | import WebGLDebug from 'webgl-debug'; 6 | import Stats from 'stats-js'; 7 | import { PerspectiveCamera } from 'three'; 8 | import OrbitControls from 'three-orbitcontrols'; 9 | import { Spector } from 'spectorjs'; 10 | 11 | export var ABORTED = false; 12 | export function abort(message) { 13 | ABORTED = true; 14 | throw message; 15 | } 16 | 17 | // Get the canvas element 18 | export const canvas = document.getElementById('canvas'); 19 | 20 | // Initialize the WebGL context 21 | const glContext = canvas.getContext('webgl'); 22 | 23 | // Get a debug context 24 | export const gl = DEBUG ? WebGLDebug.makeDebugContext(glContext, (err, funcName, args) => { 25 | abort(WebGLDebug.glEnumToString(err) + ' was caused by call to: ' + funcName); 26 | }) : glContext; 27 | 28 | const supportedExtensions = gl.getSupportedExtensions(); 29 | const requiredExtensions = [ 30 | 'OES_texture_float', 31 | 'OES_texture_float_linear', 32 | 'OES_element_index_uint', 33 | 'WEBGL_depth_texture', 34 | 'WEBGL_draw_buffers', 35 | ]; 36 | 37 | // Check that all required extensions are supported 38 | for (let i = 0; i < requiredExtensions.length; ++i) { 39 | if (supportedExtensions.indexOf(requiredExtensions[i]) < 0) { 40 | throw 'Unable to load extension ' + requiredExtensions[i]; 41 | } 42 | } 43 | 44 | // Get the maximum number of draw buffers 45 | gl.getExtension('OES_texture_float'); 46 | gl.getExtension('OES_texture_float_linear'); 47 | gl.getExtension('OES_element_index_uint'); 48 | gl.getExtension('WEBGL_depth_texture'); 49 | export const WEBGL_draw_buffers = gl.getExtension('WEBGL_draw_buffers'); 50 | export const MAX_DRAW_BUFFERS_WEBGL = gl.getParameter(WEBGL_draw_buffers.MAX_DRAW_BUFFERS_WEBGL); 51 | 52 | export const gui = new DAT.GUI(); 53 | 54 | // initialize statistics widget 55 | const stats = new Stats(); 56 | stats.setMode(1); // 0: fps, 1: ms 57 | stats.domElement.style.position = 'absolute'; 58 | stats.domElement.style.left = '0px'; 59 | stats.domElement.style.top = '0px'; 60 | document.body.appendChild(stats.domElement); 61 | 62 | // Initialize camera 63 | export const camera = new PerspectiveCamera(75, canvas.clientWidth / canvas.clientHeight, 0.1, 1000); 64 | 65 | // Initialize camera controls 66 | export const cameraControls = new OrbitControls(camera, canvas); 67 | cameraControls.enableDamping = true; 68 | cameraControls.enableZoom = true; 69 | cameraControls.rotateSpeed = 0.3; 70 | cameraControls.zoomSpeed = 1.0; 71 | cameraControls.panSpeed = 2.0; 72 | 73 | function setSize(width, height) { 74 | canvas.width = width; 75 | canvas.height = height; 76 | camera.aspect = width / height; 77 | camera.updateProjectionMatrix(); 78 | } 79 | 80 | setSize(canvas.clientWidth, canvas.clientHeight); 81 | window.addEventListener('resize', () => setSize(canvas.clientWidth, canvas.clientHeight)); 82 | 83 | if (DEBUG) { 84 | const spector = new Spector(); 85 | spector.displayUI(); 86 | } 87 | 88 | // Creates a render loop that is wrapped with camera update and stats logging 89 | export function makeRenderLoop(render) { 90 | return function tick() { 91 | cameraControls.update(); 92 | stats.begin(); 93 | render(); 94 | stats.end(); 95 | if (!ABORTED) { 96 | requestAnimationFrame(tick) 97 | } 98 | } 99 | } 100 | 101 | // import the main application 102 | require('./main'); 103 | -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | import { makeRenderLoop, camera, cameraControls, gui, gl } from './init'; 2 | import ForwardRenderer from './renderers/forward'; 3 | import ClusteredForwardPlusRenderer from './renderers/clusteredForwardPlus'; 4 | import ClusteredDeferredRenderer from './renderers/clusteredDeferred'; 5 | import Scene from './scene'; 6 | 7 | const FORWARD = 'Forward'; 8 | const CLUSTERED_FORWARD_PLUS = 'Clustered Forward+'; 9 | const CLUSTERED_DEFFERED = 'Clustered Deferred'; 10 | 11 | const params = { 12 | renderer: CLUSTERED_FORWARD_PLUS, 13 | _renderer: null, 14 | }; 15 | 16 | setRenderer(params.renderer); 17 | 18 | function setRenderer(renderer) { 19 | switch(renderer) { 20 | case FORWARD: 21 | params._renderer = new ForwardRenderer(); 22 | break; 23 | case CLUSTERED_FORWARD_PLUS: 24 | params._renderer = new ClusteredForwardPlusRenderer(15, 15, 15); 25 | break; 26 | case CLUSTERED_DEFFERED: 27 | params._renderer = new ClusteredDeferredRenderer(15, 15, 15); 28 | break; 29 | } 30 | } 31 | 32 | gui.add(params, 'renderer', [FORWARD, CLUSTERED_FORWARD_PLUS, CLUSTERED_DEFFERED]).onChange(setRenderer); 33 | 34 | const scene = new Scene(); 35 | scene.loadGLTF('models/sponza/sponza.gltf'); 36 | 37 | camera.position.set(-10, 8, 0); 38 | cameraControls.target.set(0, 2, 0); 39 | gl.enable(gl.DEPTH_TEST); 40 | 41 | function render() { 42 | scene.update(); 43 | params._renderer.render(camera, scene); 44 | } 45 | 46 | makeRenderLoop(render)(); -------------------------------------------------------------------------------- /src/renderers/clustered.js: -------------------------------------------------------------------------------- 1 | import { mat4, vec4, vec3 } from 'gl-matrix'; 2 | import { NUM_LIGHTS } from '../scene'; 3 | import TextureBuffer from './textureBuffer'; 4 | 5 | export const MAX_LIGHTS_PER_CLUSTER = 100; 6 | 7 | export default class ClusteredRenderer { 8 | constructor(xSlices, ySlices, zSlices) { 9 | // Create a texture to store cluster data. Each cluster stores the number of lights followed by the light indices 10 | this._clusterTexture = new TextureBuffer(xSlices * ySlices * zSlices, MAX_LIGHTS_PER_CLUSTER + 1); 11 | this._xSlices = xSlices; 12 | this._ySlices = ySlices; 13 | this._zSlices = zSlices; 14 | } 15 | 16 | updateClusters(camera, viewMatrix, scene) { 17 | // TODO: Update the cluster texture with the count and indices of the lights in each cluster 18 | // This will take some time. The math is nontrivial... 19 | 20 | for (let z = 0; z < this._zSlices; ++z) { 21 | for (let y = 0; y < this._ySlices; ++y) { 22 | for (let x = 0; x < this._xSlices; ++x) { 23 | let i = x + y * this._xSlices + z * this._xSlices * this._ySlices; 24 | // Reset the light count to 0 for every cluster 25 | this._clusterTexture.buffer[this._clusterTexture.bufferIndex(i, 0)] = 0; 26 | } 27 | } 28 | } 29 | 30 | this._clusterTexture.update(); 31 | } 32 | } -------------------------------------------------------------------------------- /src/renderers/clusteredDeferred.js: -------------------------------------------------------------------------------- 1 | import { gl, WEBGL_draw_buffers, canvas } from '../init'; 2 | import { mat4, vec4 } from 'gl-matrix'; 3 | import { loadShaderProgram, renderFullscreenQuad } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import toTextureVert from '../shaders/deferredToTexture.vert.glsl'; 6 | import toTextureFrag from '../shaders/deferredToTexture.frag.glsl'; 7 | import QuadVertSource from '../shaders/quad.vert.glsl'; 8 | import fsSource from '../shaders/deferred.frag.glsl.js'; 9 | import TextureBuffer from './textureBuffer'; 10 | import ClusteredRenderer from './clustered'; 11 | 12 | export const NUM_GBUFFERS = 4; 13 | 14 | export default class ClusteredDeferredRenderer extends ClusteredRenderer { 15 | constructor(xSlices, ySlices, zSlices) { 16 | super(xSlices, ySlices, zSlices); 17 | 18 | this.setupDrawBuffers(canvas.width, canvas.height); 19 | 20 | // Create a texture to store light data 21 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 22 | 23 | this._progCopy = loadShaderProgram(toTextureVert, toTextureFrag, { 24 | uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap'], 25 | attribs: ['a_position', 'a_normal', 'a_uv'], 26 | }); 27 | 28 | this._progShade = loadShaderProgram(QuadVertSource, fsSource({ 29 | numLights: NUM_LIGHTS, 30 | numGBuffers: NUM_GBUFFERS, 31 | }), { 32 | uniforms: ['u_gbuffers[0]', 'u_gbuffers[1]', 'u_gbuffers[2]', 'u_gbuffers[3]'], 33 | attribs: ['a_uv'], 34 | }); 35 | 36 | this._projectionMatrix = mat4.create(); 37 | this._viewMatrix = mat4.create(); 38 | this._viewProjectionMatrix = mat4.create(); 39 | } 40 | 41 | setupDrawBuffers(width, height) { 42 | this._width = width; 43 | this._height = height; 44 | 45 | this._fbo = gl.createFramebuffer(); 46 | 47 | //Create, bind, and store a depth target texture for the FBO 48 | this._depthTex = gl.createTexture(); 49 | gl.bindTexture(gl.TEXTURE_2D, this._depthTex); 50 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 51 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 52 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 53 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 54 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); 55 | gl.bindTexture(gl.TEXTURE_2D, null); 56 | 57 | gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); 58 | gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, this._depthTex, 0); 59 | 60 | // Create, bind, and store "color" target textures for the FBO 61 | this._gbuffers = new Array(NUM_GBUFFERS); 62 | let attachments = new Array(NUM_GBUFFERS); 63 | for (let i = 0; i < NUM_GBUFFERS; i++) { 64 | attachments[i] = WEBGL_draw_buffers[`COLOR_ATTACHMENT${i}_WEBGL`]; 65 | this._gbuffers[i] = gl.createTexture(); 66 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 67 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 68 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 69 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 70 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 71 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); 72 | gl.bindTexture(gl.TEXTURE_2D, null); 73 | 74 | gl.framebufferTexture2D(gl.FRAMEBUFFER, attachments[i], gl.TEXTURE_2D, this._gbuffers[i], 0); 75 | } 76 | 77 | if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) { 78 | throw "Framebuffer incomplete"; 79 | } 80 | 81 | // Tell the WEBGL_draw_buffers extension which FBO attachments are 82 | // being used. (This extension allows for multiple render targets.) 83 | WEBGL_draw_buffers.drawBuffersWEBGL(attachments); 84 | 85 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 86 | } 87 | 88 | resize(width, height) { 89 | this._width = width; 90 | this._height = height; 91 | 92 | gl.bindTexture(gl.TEXTURE_2D, this._depthTex); 93 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); 94 | for (let i = 0; i < NUM_GBUFFERS; i++) { 95 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 96 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); 97 | } 98 | gl.bindTexture(gl.TEXTURE_2D, null); 99 | } 100 | 101 | render(camera, scene) { 102 | if (canvas.width != this._width || canvas.height != this._height) { 103 | this.resize(canvas.width, canvas.height); 104 | } 105 | 106 | // Update the camera matrices 107 | camera.updateMatrixWorld(); 108 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 109 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 110 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 111 | 112 | // Render to the whole screen 113 | gl.viewport(0, 0, canvas.width, canvas.height); 114 | 115 | // Bind the framebuffer 116 | gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); 117 | 118 | // Clear the frame 119 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 120 | 121 | // Use the shader program to copy to the draw buffers 122 | gl.useProgram(this._progCopy.glShaderProgram); 123 | 124 | // Upload the camera matrix 125 | gl.uniformMatrix4fv(this._progCopy.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 126 | 127 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 128 | scene.draw(this._progCopy); 129 | 130 | // Update the buffer used to populate the texture packed with light data 131 | for (let i = 0; i < NUM_LIGHTS; ++i) { 132 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 133 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 134 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 135 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 136 | 137 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 138 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 139 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 140 | } 141 | // Update the light texture 142 | this._lightTexture.update(); 143 | 144 | // Update the clusters for the frame 145 | this.updateClusters(camera, this._viewMatrix, scene); 146 | 147 | // Bind the default null framebuffer which is the screen 148 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 149 | 150 | // Clear the frame 151 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 152 | 153 | // Use this shader program 154 | gl.useProgram(this._progShade.glShaderProgram); 155 | 156 | // TODO: Bind any other shader inputs 157 | 158 | // Bind g-buffers 159 | const firstGBufferBinding = 0; // You may have to change this if you use other texture slots 160 | for (let i = 0; i < NUM_GBUFFERS; i++) { 161 | gl.activeTexture(gl[`TEXTURE${i + firstGBufferBinding}`]); 162 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 163 | gl.uniform1i(this._progShade[`u_gbuffers[${i}]`], i + firstGBufferBinding); 164 | } 165 | 166 | renderFullscreenQuad(this._progShade); 167 | } 168 | }; 169 | -------------------------------------------------------------------------------- /src/renderers/clusteredForwardPlus.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | import { mat4, vec4, vec3 } from 'gl-matrix'; 3 | import { loadShaderProgram } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import vsSource from '../shaders/clusteredForward.vert.glsl'; 6 | import fsSource from '../shaders/clusteredForward.frag.glsl.js'; 7 | import TextureBuffer from './textureBuffer'; 8 | import ClusteredRenderer from './clustered'; 9 | 10 | export default class ClusteredForwardPlusRenderer extends ClusteredRenderer { 11 | constructor(xSlices, ySlices, zSlices) { 12 | super(xSlices, ySlices, zSlices); 13 | 14 | // Create a texture to store light data 15 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 16 | 17 | this._shaderProgram = loadShaderProgram(vsSource, fsSource({ 18 | numLights: NUM_LIGHTS, 19 | }), { 20 | uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer', 'u_clusterbuffer'], 21 | attribs: ['a_position', 'a_normal', 'a_uv'], 22 | }); 23 | 24 | this._projectionMatrix = mat4.create(); 25 | this._viewMatrix = mat4.create(); 26 | this._viewProjectionMatrix = mat4.create(); 27 | } 28 | 29 | render(camera, scene) { 30 | // Update the camera matrices 31 | camera.updateMatrixWorld(); 32 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 33 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 34 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 35 | 36 | // Update cluster texture which maps from cluster index to light list 37 | this.updateClusters(camera, this._viewMatrix, scene); 38 | 39 | // Update the buffer used to populate the texture packed with light data 40 | for (let i = 0; i < NUM_LIGHTS; ++i) { 41 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 42 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 43 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 44 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 45 | 46 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 47 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 48 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 49 | } 50 | // Update the light texture 51 | this._lightTexture.update(); 52 | 53 | // Bind the default null framebuffer which is the screen 54 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 55 | 56 | // Render to the whole screen 57 | gl.viewport(0, 0, canvas.width, canvas.height); 58 | 59 | // Clear the frame 60 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 61 | 62 | // Use this shader program 63 | gl.useProgram(this._shaderProgram.glShaderProgram); 64 | 65 | // Upload the camera matrix 66 | gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 67 | 68 | // Set the light texture as a uniform input to the shader 69 | gl.activeTexture(gl.TEXTURE2); 70 | gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); 71 | gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); 72 | 73 | // Set the cluster texture as a uniform input to the shader 74 | gl.activeTexture(gl.TEXTURE3); 75 | gl.bindTexture(gl.TEXTURE_2D, this._clusterTexture.glTexture); 76 | gl.uniform1i(this._shaderProgram.u_clusterbuffer, 3); 77 | 78 | // TODO: Bind any other shader inputs 79 | 80 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 81 | scene.draw(this._shaderProgram); 82 | } 83 | }; -------------------------------------------------------------------------------- /src/renderers/forward.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | import { mat4, vec4 } from 'gl-matrix'; 3 | import { loadShaderProgram } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import vsSource from '../shaders/forward.vert.glsl'; 6 | import fsSource from '../shaders/forward.frag.glsl.js'; 7 | import TextureBuffer from './textureBuffer'; 8 | 9 | export default class ForwardRenderer { 10 | constructor() { 11 | // Create a texture to store light data 12 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 13 | 14 | // Initialize a shader program. The fragment shader source is compiled based on the number of lights 15 | this._shaderProgram = loadShaderProgram(vsSource, fsSource({ 16 | numLights: NUM_LIGHTS, 17 | }), { 18 | uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer'], 19 | attribs: ['a_position', 'a_normal', 'a_uv'], 20 | }); 21 | 22 | this._projectionMatrix = mat4.create(); 23 | this._viewMatrix = mat4.create(); 24 | this._viewProjectionMatrix = mat4.create(); 25 | } 26 | 27 | render(camera, scene) { 28 | // Update the camera matrices 29 | camera.updateMatrixWorld(); 30 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 31 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 32 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 33 | 34 | // Update the buffer used to populate the texture packed with light data 35 | for (let i = 0; i < NUM_LIGHTS; ++i) { 36 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 37 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 38 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 39 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 40 | 41 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 42 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 43 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 44 | } 45 | // Update the light texture 46 | this._lightTexture.update(); 47 | 48 | // Bind the default null framebuffer which is the screen 49 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 50 | 51 | // Render to the whole screen 52 | gl.viewport(0, 0, canvas.width, canvas.height); 53 | 54 | // Clear the frame 55 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 56 | 57 | // Use this shader program 58 | gl.useProgram(this._shaderProgram.glShaderProgram); 59 | 60 | // Upload the camera matrix 61 | gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 62 | 63 | // Set the light texture as a uniform input to the shader 64 | gl.activeTexture(gl.TEXTURE2); 65 | gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); 66 | gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); 67 | 68 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 69 | scene.draw(this._shaderProgram); 70 | } 71 | }; 72 | -------------------------------------------------------------------------------- /src/renderers/textureBuffer.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | 3 | export default class TextureBuffer { 4 | /** 5 | * This class represents a buffer in a shader. Unforunately we can't bind arbitrary buffers so we need to pack the data as a texture 6 | * @param {Number} elementCount The number of items in the buffer 7 | * @param {Number} elementSize The number of values in each item of the buffer 8 | */ 9 | constructor(elementCount, elementSize) { 10 | // Initialize the texture. We use gl.NEAREST for texture filtering because we don't want to blend between values in the buffer. We want the exact value 11 | this._glTexture = gl.createTexture(); 12 | gl.bindTexture(gl.TEXTURE_2D, this._glTexture); 13 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 14 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 15 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 16 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 17 | 18 | // The texture stores 4 values in each "pixel". Thus, the texture we create is elementCount x ceil(elementSize / 4) 19 | this._pixelsPerElement = Math.ceil(elementSize / 4); 20 | this._elementCount = elementCount; 21 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, elementCount, this._pixelsPerElement, 0, gl.RGBA, gl.FLOAT, null); 22 | gl.bindTexture(gl.TEXTURE_2D, null); 23 | 24 | // Create a buffer to use to upload to the texture 25 | this._buffer = new Float32Array(elementCount * 4 * this._pixelsPerElement); 26 | } 27 | 28 | get glTexture() { 29 | return this._glTexture; 30 | } 31 | 32 | get buffer() { 33 | return this._buffer; 34 | } 35 | 36 | /** 37 | * Computes the starting buffer index to a particular item. 38 | * @param {*} index The index of the item 39 | * @param {*} component The ith float of an element is located in the (i/4)th pixel 40 | */ 41 | bufferIndex(index, component) { 42 | return 4 * index + 4 * component * this._elementCount; 43 | } 44 | 45 | /** 46 | * Update the texture with the data in the buffer 47 | */ 48 | update() { 49 | gl.bindTexture(gl.TEXTURE_2D, this._glTexture); 50 | gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, this._elementCount, this._pixelsPerElement, gl.RGBA, gl.FLOAT, this._buffer); 51 | gl.bindTexture(gl.TEXTURE_2D, null); 52 | } 53 | }; -------------------------------------------------------------------------------- /src/scene.js: -------------------------------------------------------------------------------- 1 | const MinimalGLTFLoader = require('../lib/minimal-gltf-loader'); 2 | import { gl } from './init'; 3 | 4 | // TODO: Edit if you want to change the light initial positions 5 | export const LIGHT_MIN = [-14, 0, -6]; 6 | export const LIGHT_MAX = [14, 20, 6]; 7 | export const LIGHT_RADIUS = 5.0; 8 | export const LIGHT_DT = -0.03; 9 | 10 | // TODO: This controls the number of lights 11 | export const NUM_LIGHTS = 100; 12 | 13 | class Scene { 14 | constructor() { 15 | this.lights = []; 16 | this.models = []; 17 | 18 | for (let i = 0; i < NUM_LIGHTS; ++i) { 19 | this.lights.push({ 20 | position: new Float32Array([ 21 | Math.random() * (LIGHT_MAX[0] - LIGHT_MIN[0]) + LIGHT_MIN[0], 22 | Math.random() * (LIGHT_MAX[1] - LIGHT_MIN[1]) + LIGHT_MIN[1], 23 | Math.random() * (LIGHT_MAX[2] - LIGHT_MIN[2]) + LIGHT_MIN[2], 24 | ]), 25 | color: new Float32Array([ 26 | 0.5 + 0.5 * Math.random(), 27 | 0.5 + 0.5 * Math.random(), 28 | 0.5 + Math.random(), 29 | ]), 30 | radius: LIGHT_RADIUS, 31 | }); 32 | } 33 | } 34 | 35 | loadGLTF(url) { 36 | var glTFLoader = new MinimalGLTFLoader.glTFLoader(gl); 37 | glTFLoader.loadGLTF(url, glTF => { 38 | var curScene = glTF.scenes[glTF.defaultScene]; 39 | 40 | var webGLTextures = {}; 41 | 42 | // temp var 43 | var i,len; 44 | var primitiveOrderID; 45 | 46 | var mesh; 47 | var primitive; 48 | var vertexBuffer; 49 | var indicesBuffer; 50 | 51 | // textures setting 52 | var textureID = 0; 53 | var textureInfo; 54 | var samplerInfo; 55 | var target, format, internalFormat, type; // texture info 56 | var magFilter, minFilter, wrapS, wrapT; 57 | var image; 58 | var texture; 59 | 60 | // temp for sponza 61 | var colorTextureName = 'texture_color'; 62 | var normalTextureName = 'texture_normal'; 63 | 64 | for (var tid in glTF.json.textures) { 65 | textureInfo = glTF.json.textures[tid]; 66 | target = textureInfo.target || gl.TEXTURE_2D; 67 | format = textureInfo.format || gl.RGBA; 68 | internalFormat = textureInfo.format || gl.RGBA; 69 | type = textureInfo.type || gl.UNSIGNED_BYTE; 70 | 71 | image = glTF.images[textureInfo.source]; 72 | 73 | texture = gl.createTexture(); 74 | gl.activeTexture(gl.TEXTURE0 + textureID); 75 | gl.bindTexture(target, texture); 76 | 77 | switch(target) { 78 | case 3553: // gl.TEXTURE_2D 79 | gl.texImage2D(target, 0, internalFormat, format, type, image); 80 | break; 81 | } 82 | 83 | // !! Sampler 84 | // raw WebGL 1, no sampler object, set magfilter, wrapS, etc 85 | samplerInfo = glTF.json.samplers[textureInfo.sampler]; 86 | minFilter = samplerInfo.minFilter || gl.NEAREST_MIPMAP_LINEAR; 87 | magFilter = samplerInfo.magFilter || gl.LINEAR; 88 | wrapS = samplerInfo.wrapS || gl.REPEAT; 89 | wrapT = samplerInfo.wrapT || gl.REPEAT; 90 | gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); 91 | gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); 92 | gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); 93 | gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); 94 | if (minFilter == gl.NEAREST_MIPMAP_NEAREST || 95 | minFilter == gl.NEAREST_MIPMAP_LINEAR || 96 | minFilter == gl.LINEAR_MIPMAP_NEAREST || 97 | minFilter == gl.LINEAR_MIPMAP_LINEAR ) { 98 | gl.generateMipmap(target); 99 | } 100 | 101 | 102 | gl.bindTexture(target, null); 103 | 104 | webGLTextures[tid] = { 105 | texture: texture, 106 | target: target, 107 | id: textureID 108 | }; 109 | 110 | textureID++; 111 | } 112 | 113 | // vertex attributes 114 | for (var mid in curScene.meshes) { 115 | mesh = curScene.meshes[mid]; 116 | 117 | for (i = 0, len = mesh.primitives.length; i < len; ++i) { 118 | primitive = mesh.primitives[i]; 119 | 120 | vertexBuffer = gl.createBuffer(); 121 | indicesBuffer = gl.createBuffer(); 122 | 123 | // initialize buffer 124 | var vertices = primitive.vertexBuffer; 125 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); 126 | gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); 127 | gl.bindBuffer(gl.ARRAY_BUFFER, null); 128 | 129 | var indices = primitive.indices; 130 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indicesBuffer); 131 | gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); 132 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); 133 | 134 | var posInfo = primitive.attributes[primitive.technique.parameters['position'].semantic]; 135 | var norInfo = primitive.attributes[primitive.technique.parameters['normal'].semantic]; 136 | var uvInfo = primitive.attributes[primitive.technique.parameters['texcoord_0'].semantic]; 137 | 138 | this.models.push({ 139 | gltf: primitive, 140 | 141 | idx: indicesBuffer, 142 | 143 | attributes: vertexBuffer, 144 | posInfo: {size: posInfo.size, type: posInfo.type, stride: posInfo.stride, offset: posInfo.offset}, 145 | norInfo: {size: norInfo.size, type: norInfo.type, stride: norInfo.stride, offset: norInfo.offset}, 146 | uvInfo: {size: uvInfo.size, type: uvInfo.type, stride: uvInfo.stride, offset: uvInfo.offset}, 147 | 148 | // specific textures temp test 149 | colmap: webGLTextures[colorTextureName].texture, 150 | normap: webGLTextures[normalTextureName].texture 151 | }); 152 | } 153 | } 154 | 155 | }); 156 | } 157 | 158 | update() { 159 | for (let i = 0; i < NUM_LIGHTS; i++) { 160 | // OPTIONAL TODO: Edit if you want to change how lights move 161 | this.lights[i].position[1] += LIGHT_DT; 162 | // wrap lights from bottom to top 163 | this.lights[i].position[1] = (this.lights[i].position[1] + LIGHT_MAX[1] - LIGHT_MIN[1]) % LIGHT_MAX[1] + LIGHT_MIN[1]; 164 | } 165 | } 166 | 167 | draw(shaderProgram) { 168 | for (let i = 0; i < this.models.length; ++i) { 169 | const model = this.models[i]; 170 | if (model.colmap) { 171 | gl.activeTexture(gl.TEXTURE0); 172 | gl.bindTexture(gl.TEXTURE_2D, model.colmap); 173 | gl.uniform1i(shaderProgram.u_colmap, 0); 174 | } 175 | 176 | if (model.normap) { 177 | gl.activeTexture(gl.TEXTURE1); 178 | gl.bindTexture(gl.TEXTURE_2D, model.normap); 179 | gl.uniform1i(shaderProgram.u_normap, 1); 180 | } 181 | 182 | gl.bindBuffer(gl.ARRAY_BUFFER, model.attributes); 183 | 184 | gl.enableVertexAttribArray(shaderProgram.a_position); 185 | gl.vertexAttribPointer(shaderProgram.a_position, model.posInfo.size, model.posInfo.type, false, model.posInfo.stride, model.posInfo.offset); 186 | 187 | gl.enableVertexAttribArray(shaderProgram.a_normal); 188 | gl.vertexAttribPointer(shaderProgram.a_normal, model.norInfo.size, model.norInfo.type, false, model.norInfo.stride, model.norInfo.offset); 189 | 190 | gl.enableVertexAttribArray(shaderProgram.a_uv); 191 | gl.vertexAttribPointer(shaderProgram.a_uv, model.uvInfo.size, model.uvInfo.type, false, model.uvInfo.stride, model.uvInfo.offset); 192 | 193 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, model.idx); 194 | 195 | gl.drawElements(model.gltf.mode, model.gltf.indices.length, model.gltf.indicesComponentType, 0); 196 | } 197 | } 198 | 199 | } 200 | 201 | export default Scene; -------------------------------------------------------------------------------- /src/shaders/clusteredForward.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | // TODO: This is pretty much just a clone of forward.frag.glsl.js 4 | 5 | #version 100 6 | precision highp float; 7 | 8 | uniform sampler2D u_colmap; 9 | uniform sampler2D u_normap; 10 | uniform sampler2D u_lightbuffer; 11 | 12 | // TODO: Read this buffer to determine the lights influencing a cluster 13 | uniform sampler2D u_clusterbuffer; 14 | 15 | varying vec3 v_position; 16 | varying vec3 v_normal; 17 | varying vec2 v_uv; 18 | 19 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 20 | normap = normap * 2.0 - 1.0; 21 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 22 | vec3 surftan = normalize(cross(geomnor, up)); 23 | vec3 surfbinor = cross(geomnor, surftan); 24 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 25 | } 26 | 27 | struct Light { 28 | vec3 position; 29 | float radius; 30 | vec3 color; 31 | }; 32 | 33 | float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { 34 | float u = float(index + 1) / float(textureWidth + 1); 35 | int pixel = component / 4; 36 | float v = float(pixel + 1) / float(textureHeight + 1); 37 | vec4 texel = texture2D(texture, vec2(u, v)); 38 | int pixelComponent = component - pixel * 4; 39 | if (pixelComponent == 0) { 40 | return texel[0]; 41 | } else if (pixelComponent == 1) { 42 | return texel[1]; 43 | } else if (pixelComponent == 2) { 44 | return texel[2]; 45 | } else if (pixelComponent == 3) { 46 | return texel[3]; 47 | } 48 | } 49 | 50 | Light UnpackLight(int index) { 51 | Light light; 52 | float u = float(index + 1) / float(${params.numLights + 1}); 53 | vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.3)); 54 | vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.6)); 55 | light.position = v1.xyz; 56 | 57 | // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer 58 | // Note that this is just an example implementation to extract one float. 59 | // There are more efficient ways if you need adjacent values 60 | light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); 61 | 62 | light.color = v2.rgb; 63 | return light; 64 | } 65 | 66 | // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius 67 | float cubicGaussian(float h) { 68 | if (h < 1.0) { 69 | return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); 70 | } else if (h < 2.0) { 71 | return 0.25 * pow(2.0 - h, 3.0); 72 | } else { 73 | return 0.0; 74 | } 75 | } 76 | 77 | void main() { 78 | vec3 albedo = texture2D(u_colmap, v_uv).rgb; 79 | vec3 normap = texture2D(u_normap, v_uv).xyz; 80 | vec3 normal = applyNormalMap(v_normal, normap); 81 | 82 | vec3 fragColor = vec3(0.0); 83 | 84 | for (int i = 0; i < ${params.numLights}; ++i) { 85 | Light light = UnpackLight(i); 86 | float lightDistance = distance(light.position, v_position); 87 | vec3 L = (light.position - v_position) / lightDistance; 88 | 89 | float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); 90 | float lambertTerm = max(dot(L, normal), 0.0); 91 | 92 | fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); 93 | } 94 | 95 | const vec3 ambientLight = vec3(0.025); 96 | fragColor += albedo * ambientLight; 97 | 98 | gl_FragColor = vec4(fragColor, 1.0); 99 | } 100 | `; 101 | } 102 | -------------------------------------------------------------------------------- /src/shaders/clusteredForward.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/deferred.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | #version 100 4 | precision highp float; 5 | 6 | uniform sampler2D u_gbuffers[${params.numGBuffers}]; 7 | 8 | varying vec2 v_uv; 9 | 10 | void main() { 11 | // TODO: extract data from g buffers and do lighting 12 | // vec4 gb0 = texture2D(u_gbuffers[0], v_uv); 13 | // vec4 gb1 = texture2D(u_gbuffers[1], v_uv); 14 | // vec4 gb2 = texture2D(u_gbuffers[2], v_uv); 15 | // vec4 gb3 = texture2D(u_gbuffers[3], v_uv); 16 | 17 | gl_FragColor = vec4(v_uv, 0.0, 1.0); 18 | } 19 | `; 20 | } -------------------------------------------------------------------------------- /src/shaders/deferredToTexture.frag.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | #extension GL_EXT_draw_buffers: enable 3 | precision highp float; 4 | 5 | uniform sampler2D u_colmap; 6 | uniform sampler2D u_normap; 7 | 8 | varying vec3 v_position; 9 | varying vec3 v_normal; 10 | varying vec2 v_uv; 11 | 12 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 13 | normap = normap * 2.0 - 1.0; 14 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 15 | vec3 surftan = normalize(cross(geomnor, up)); 16 | vec3 surfbinor = cross(geomnor, surftan); 17 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 18 | } 19 | 20 | void main() { 21 | vec3 norm = applyNormalMap(v_normal, vec3(texture2D(u_normap, v_uv))); 22 | vec3 col = vec3(texture2D(u_colmap, v_uv)); 23 | 24 | // TODO: populate your g buffer 25 | // gl_FragData[0] = ?? 26 | // gl_FragData[1] = ?? 27 | // gl_FragData[2] = ?? 28 | // gl_FragData[3] = ?? 29 | } -------------------------------------------------------------------------------- /src/shaders/deferredToTexture.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/forward.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | #version 100 4 | precision highp float; 5 | 6 | uniform sampler2D u_colmap; 7 | uniform sampler2D u_normap; 8 | uniform sampler2D u_lightbuffer; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 15 | normap = normap * 2.0 - 1.0; 16 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 17 | vec3 surftan = normalize(cross(geomnor, up)); 18 | vec3 surfbinor = cross(geomnor, surftan); 19 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 20 | } 21 | 22 | struct Light { 23 | vec3 position; 24 | float radius; 25 | vec3 color; 26 | }; 27 | 28 | float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { 29 | float u = float(index + 1) / float(textureWidth + 1); 30 | int pixel = component / 4; 31 | float v = float(pixel + 1) / float(textureHeight + 1); 32 | vec4 texel = texture2D(texture, vec2(u, v)); 33 | int pixelComponent = component - pixel * 4; 34 | if (pixelComponent == 0) { 35 | return texel[0]; 36 | } else if (pixelComponent == 1) { 37 | return texel[1]; 38 | } else if (pixelComponent == 2) { 39 | return texel[2]; 40 | } else if (pixelComponent == 3) { 41 | return texel[3]; 42 | } 43 | } 44 | 45 | Light UnpackLight(int index) { 46 | Light light; 47 | float u = float(index + 1) / float(${params.numLights + 1}); 48 | vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.0)); 49 | vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.5)); 50 | light.position = v1.xyz; 51 | 52 | // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer 53 | // Note that this is just an example implementation to extract one float. 54 | // There are more efficient ways if you need adjacent values 55 | light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); 56 | 57 | light.color = v2.rgb; 58 | return light; 59 | } 60 | 61 | // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius 62 | float cubicGaussian(float h) { 63 | if (h < 1.0) { 64 | return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); 65 | } else if (h < 2.0) { 66 | return 0.25 * pow(2.0 - h, 3.0); 67 | } else { 68 | return 0.0; 69 | } 70 | } 71 | 72 | void main() { 73 | vec3 albedo = texture2D(u_colmap, v_uv).rgb; 74 | vec3 normap = texture2D(u_normap, v_uv).xyz; 75 | vec3 normal = applyNormalMap(v_normal, normap); 76 | 77 | vec3 fragColor = vec3(0.0); 78 | 79 | for (int i = 0; i < ${params.numLights}; ++i) { 80 | Light light = UnpackLight(i); 81 | float lightDistance = distance(light.position, v_position); 82 | vec3 L = (light.position - v_position) / lightDistance; 83 | 84 | float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); 85 | float lambertTerm = max(dot(L, normal), 0.0); 86 | 87 | fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); 88 | } 89 | 90 | const vec3 ambientLight = vec3(0.025); 91 | fragColor += albedo * ambientLight; 92 | 93 | gl_FragColor = vec4(fragColor, 1.0); 94 | } 95 | `; 96 | } 97 | -------------------------------------------------------------------------------- /src/shaders/forward.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/quad.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | attribute vec3 a_position; 5 | 6 | varying vec2 v_uv; 7 | 8 | void main() { 9 | gl_Position = vec4(a_position, 1.0); 10 | v_uv = a_position.xy * 0.5 + 0.5; 11 | } -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | import { gl, canvas, abort } from './init'; 2 | import QuadVertSource from './shaders/quad.vert.glsl'; 3 | 4 | function downloadURI(uri, name) { 5 | var link = document.createElement('a'); 6 | link.download = name; 7 | link.href = uri; 8 | document.body.appendChild(link); 9 | link.click(); 10 | document.body.removeChild(link); 11 | }; 12 | 13 | export function saveCanvas() { 14 | downloadURI(canvas.toDataURL('image/png'), 'webgl-canvas-' + Date.now() + '.png'); 15 | } 16 | 17 | function compileShader(shaderSource, shaderType) { 18 | var shader = gl.createShader(shaderType); 19 | gl.shaderSource(shader, shaderSource); 20 | gl.compileShader(shader); 21 | if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { 22 | console.error(shaderSource); 23 | abort('shader compiler error:\n' + gl.getShaderInfoLog(shader)); 24 | } 25 | 26 | return shader; 27 | }; 28 | 29 | function linkShader(vs, fs) { 30 | var prog = gl.createProgram(); 31 | gl.attachShader(prog, vs); 32 | gl.attachShader(prog, fs); 33 | gl.linkProgram(prog); 34 | if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) { 35 | abort('shader linker error:\n' + gl.getProgramInfoLog(prog)); 36 | } 37 | return prog; 38 | }; 39 | 40 | function addShaderLocations(result, shaderLocations) { 41 | if (shaderLocations && shaderLocations.uniforms && shaderLocations.uniforms.length) { 42 | for (let i = 0; i < shaderLocations.uniforms.length; ++i) { 43 | result = Object.assign(result, { 44 | [shaderLocations.uniforms[i]]: gl.getUniformLocation(result.glShaderProgram, shaderLocations.uniforms[i]), 45 | }); 46 | } 47 | } 48 | if (shaderLocations && shaderLocations.attribs && shaderLocations.attribs.length) { 49 | for (let i = 0; i < shaderLocations.attribs.length; ++i) { 50 | result = Object.assign(result, { 51 | [shaderLocations.attribs[i]]: gl.getAttribLocation(result.glShaderProgram, shaderLocations.attribs[i]), 52 | }); 53 | } 54 | } 55 | return result; 56 | } 57 | 58 | export function loadShaderProgram(vsSource, fsSource, shaderLocations) { 59 | const vs = compileShader(vsSource, gl.VERTEX_SHADER); 60 | const fs = compileShader(fsSource, gl.FRAGMENT_SHADER); 61 | return addShaderLocations({ 62 | glShaderProgram: linkShader(vs, fs), 63 | }, shaderLocations); 64 | } 65 | 66 | const quadPositions = new Float32Array([ 67 | -1.0, -1.0, 0.0, 68 | 1.0, -1.0, 0.0, 69 | -1.0, 1.0, 0.0, 70 | 1.0, 1.0, 0.0 71 | ]); 72 | 73 | const quadBuffer = gl.createBuffer(); 74 | gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); 75 | gl.bufferData(gl.ARRAY_BUFFER, quadPositions, gl.STATIC_DRAW); 76 | 77 | export function renderFullscreenQuad(program) { 78 | // Bind the program to use to draw the quad 79 | gl.useProgram(program.glShaderProgram); 80 | 81 | // Bind the VBO as the gl.ARRAY_BUFFER 82 | gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); 83 | 84 | // Enable the bound buffer as the vertex attrib array for 85 | // program.a_position, using gl.enableVertexAttribArray 86 | gl.enableVertexAttribArray(program.a_position); 87 | 88 | // Use gl.vertexAttribPointer to tell WebGL the type/layout for 89 | // program.a_position's access pattern. 90 | gl.vertexAttribPointer(program.a_position, 3, gl.FLOAT, gl.FALSE, 0, 0); 91 | 92 | // Use gl.drawArrays to draw the quad 93 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 94 | 95 | // Disable the enabled vertex attrib array 96 | gl.disableVertexAttribArray(program.a_position); 97 | 98 | // Unbind the array buffer. 99 | gl.bindBuffer(gl.ARRAY_BUFFER, null); 100 | } -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const webpack = require('webpack'); 3 | const MinifyPlugin = require('babel-minify-webpack-plugin'); 4 | 5 | module.exports = function(env) { 6 | const isProduction = env && env.production === true; 7 | 8 | return { 9 | entry: path.join(__dirname, 'src/init'), 10 | output: { 11 | path: path.join(__dirname, 'build'), 12 | filename: 'bundle.js', 13 | }, 14 | module: { 15 | loaders: [ 16 | { 17 | test: /\.js$/, 18 | exclude: /(node_modules|bower_components)/, 19 | loader: 'babel-loader', 20 | query: { 21 | presets: [['env', { 22 | targets: { 23 | browsers: ['> 1%', 'last 2 major versions'], 24 | }, 25 | loose: true, 26 | modules: false, 27 | }]], 28 | }, 29 | }, 30 | { 31 | test: /\.glsl$/, 32 | loader: 'webpack-glsl-loader' 33 | }, 34 | ], 35 | }, 36 | plugins: [ 37 | isProduction ? new MinifyPlugin({ 38 | keepFnName: true, 39 | keepClassName: true, 40 | }) : undefined, 41 | new webpack.DefinePlugin({ 42 | 'process.env': { 43 | 'NODE_ENV': (isProduction ? JSON.stringify('production'): JSON.stringify('development')), 44 | } 45 | }), 46 | ].filter(p => p), 47 | devtool: 'source-map', 48 | devServer: { 49 | port: 5650, 50 | publicPath: '/build/' 51 | }, 52 | }; 53 | }; 54 | --------------------------------------------------------------------------------