├── .gitignore ├── INSTRUCTION.md ├── README.md ├── build ├── bundle.js └── bundle.js.map ├── img ├── albedo.JPG ├── blinn.JPG ├── clu.png ├── clus.JPG ├── cluster.gif ├── comp1.JPG ├── deferred-v2.png ├── depth.JPG ├── init ├── lambert.gif ├── pa.JPG ├── purecol.JPG ├── sc1.JPG ├── surfacenor.JPG ├── toon.JPG ├── toon.gif ├── xslice.JPG └── yslice.JPG ├── index.html ├── lib └── minimal-gltf-loader.js ├── models └── sponza │ ├── buffer_0.bin │ ├── color.jpeg │ ├── fragmentShader0.glsl │ ├── normal.png │ ├── sponza.gltf │ └── vertexShader0.glsl ├── package.json ├── src ├── init.js ├── main.js ├── renderers │ ├── base.js │ ├── clustered.js │ ├── forward.js │ ├── forwardPlus.js │ └── textureBuffer.js ├── scene.js ├── shaders │ ├── deferred.frag.glsl.js │ ├── deferredToTexture.frag.glsl │ ├── deferredToTexture.vert.glsl │ ├── forward.frag.glsl.js │ ├── forward.vert.glsl │ ├── forwardPlus.frag.glsl.js │ ├── forwardPlus.vert.glsl │ └── quad.vert.glsl └── utils.js └── webpack.config.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .idea -------------------------------------------------------------------------------- /INSTRUCTION.md: -------------------------------------------------------------------------------- 1 | WebGL Clustered and Forward+ Shading - Instructions 2 | ========================================================== 3 | 4 | **This is due Thursday 10/26** 5 | 6 | ## Running the code 7 | 8 | - Clone this repository 9 | - Download and install [Node.js](https://nodejs.org/en/) 10 | - Run `npm install` in the root directory of this project. This will download and install dependences 11 | - Run `npm start` and navigate to [http://localhost:5650](http://localhost:5650) 12 | 13 | This project requires a WebGL-capable browser with support for several extensions. You can check for support on [WebGL Report](http://webglreport.com/): 14 | - OES_texture_float 15 | - OES_texture_float_linear 16 | - OES_element_index_uint 17 | - EXT_frag_depth 18 | - WEBGL_depth_texture 19 | - WEBGL_draw_buffer 20 | 21 | Google Chrome seems to work best on all platforms. If you have problems running the starter code, use Chrome or Chromium, and make sure you have updated your browser and video drivers. 22 | 23 | ## Requirements 24 | **Ask on the mailing list for any clarifications** 25 | 26 | In this project, you are given code for: 27 | - Loading glTF models 28 | - Camera control 29 | - Simple forward renderer 30 | - Partial implementation and setup for Clustered and Forward+ shading 31 | - Many helpful helpers 32 | 33 | ## Required Tasks 34 | 35 | **Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. 36 | 37 | **Forward+** 38 | - Build a data structure to keep track of how many lights are in each cluster and what their indices are 39 | - Render the scene using only the lights that overlap a given cluster 40 | 41 | **Clustered** 42 | - Reuse clustering logic from Forward+ 43 | - Store vertex attributes in g-buffer 44 | - Read g-buffer in a shader to produce final output 45 | 46 | **Effects** 47 | - Implement deferred Blinn-Phong shading (diffuse + specular) for point lights 48 | - OR 49 | - Implement one of the following effects: 50 | - Bloom using post-process blur (box or Gaussian) 51 | - Toon shading (with ramp shading + simple depth-edge detection for outlines) 52 | 53 | **Optimizations** 54 | - Optimized g-buffer format - reduce the number and size of g-buffers: 55 | - Ideas: 56 | - Pack values together into vec4s 57 | - Use 2-component normals 58 | - Quantize values by using smaller texture types instead of gl.FLOAT 59 | - Reduce number of properties passed via g-buffer, e.g. by: 60 | - Reconstructing world space position using camera matrices and X/Y/depth 61 | - For credit, you must show a good optimization effort and record the performance of each version you test, in a simple table. 62 | - It is expected that you won't need all 4 provided g-buffers for a basic pipeline make sure you disable the unused ones. 63 | 64 | ## Performance & Analysis 65 | 66 | Compare your implementations of Forward+ and Clustered shading and analyze their differences. 67 | - Is one of them faster? 68 | - Is one of them better at certain types of workloads? 69 | - What are the benefits and tradeoffs of using one over the other? 70 | - For any differences in performance, briefly explain what may be causing the difference. 71 | 72 | **Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. 73 | 74 | Optimize your JavaScript and/or GLSL code. Chrome/Firefox's profiling tools (see Resources section) will be useful for this. For each change that improves performance, show the before and after render times. 75 | 76 | For each new effect feature (required or extra), please provide the following analysis: 77 | - Concise overview write-up of the feature. 78 | - Performance change due to adding the feature. 79 | - If applicable, how do parameters (such as number of lights, etc.) affect performance? Show data with simple graphs. 80 | - Show timing in milliseconds, not FPS. 81 | - If you did something to accelerate the feature, what did you do and why? 82 | - How might this feature be optimized beyond your current implementation? 83 | 84 | For each performance feature (required or extra), please provide: 85 | - Concise overview write-up of the feature. 86 | - Detailed performance improvement analysis of adding the feature 87 | - What is the best case scenario for your performance improvement? What is the worst? Explain briefly. 88 | - Are there tradeoffs to this performance feature? Explain briefly. 89 | - How do parameters (such as number of lights, tile size, etc.) affect performance? Show data with graphs. 90 | - Show timing in milliseconds, not FPS. 91 | - Show debug views when possible. 92 | - If the debug view correlates with performance, explain how. 93 | 94 | ## Starter Code Tour 95 | 96 | Initialization happens in `src/init.js`. You don't need to worry about this; it is mostly initializing the gl context, debug modes, extensions, etc. 97 | 98 | `src/main.js` is configuration for the renderers. It sets up the gui for switching renderers and initializes the scene and render loop. The only important thing here are the arguments for `ForwardPlusRenderer` and `ClusteredRenderer`. These constructors take the number of x, y, and z slices to split the frustum into. 99 | 100 | `src/scene.js` handles loading a .gltf scene and initializes the lights. Here, you can modify the number of lights, their positions, and how they move around. Also, take a look at the `draw` function. This handles binding the vertex attributes, which are hardcoded to `a_position`, `a_normal`, and `a_uv`, as well as the color and normal maps to targets `gl.TEXTURE0` and `gl.TEXTURE1`. 101 | 102 | **Simple Forward Shading Pipeline** 103 | There is a simple forward shading pipeline as an example for how everything works. Check out `src/forward.js`. 104 | 105 | The constructor for the renderer initializes a `TextureBuffer` to store the lights. This isn't totally necessary for a forward renderer, but you'll need this to do clustered shading. What we're trying to do here is upload to a shader all the positions of our lights. However, we unfortunately can't upload arbitrary data to the GPU with WebGL so we have to pack it as a texture. This is set up for you. 106 | 107 | The constructor for `TextureBuffer` takes two arguments, the number of elements, and the size of each element (in floats). It will allocate a floating point texture of dimension `numElements x ceil(elementSize / 4)`. This is because we pack every 4 adjacent values into a single pixel. 108 | 109 | Go to the `render` function to see how this is used in practice. Here, the buffer for the texture storing the lights is populated with the light positions. Notice that the first four values get stored at locations: `this._lightTexture.bufferIndex(i, 0) + 0` to `this._lightTexture.bufferIndex(i, 0) + 3` and then the next three are at `this._lightTexture.bufferIndex(i, 1) + 0` to `this._lightTexture.bufferIndex(i, 0) + 2`. Keep in mind that the data is stored as a texture, so the 5th element is actually the 1st element of the pixel in the second row. 110 | 111 | Look again at the constructor of `ForwardRenderer`. Also initialized here is the shader program. The shader program takes in a vertex source, a fragment source, and then a map of what uniform and vertex attributes should be extracted from the shader. In this code, the shader location for `u_viewProjectionMatrix` gets stored as `this._shaderProgram.u_viewProjectionMatrix`. If you look at `fsSource`, there's a strange thing happening there. `fsSource` is actually a function and it's being called with a configuration object containing the number of lights. What this is doing is creating a shader source string that is parameterized. We can't have dynamic loops in WebGL, but we can dynamically generate static shaders. If you take a look at `src/shaders/forward.frag.glsl.js`, you'll see that `${numLights}` is used throughout. 112 | 113 | Now go look inside `src/shaders/forward.frag.glsl.js`. Here, there is a simple loop which loops over the lights and applies shading for each one. There is a helper `UnpackLight(index)` which unpacks the `index`th light from the texture into a struct. Make sure you fully understand how this is working because you will need to implement something similar for clusters. Inside `UnpackLight` there is another helper called `ExtractFloat(texture, textureWidth, textureHeight, index, component)`. This pulls out the `component`th component from the `index`th value packed inside a `textureWidth x textureHeight` texture. Again, this is meant to be an example implementation. Using this function to pull out four values into a `vec4` will be unecessarily slow. 114 | 115 | **Getting Started** 116 | Here's a few tips to get you started. 117 | 118 | 1. Complete `updateClusters` in `src/renderers/base.js`. This should update the cluster `TextureBuffer` with a mapping from cluster index to light count and light list (indices). 119 | 120 | 2. Update `src/shaders/forwardPlus.frag.glsl.js` to 121 | - Determine the cluster for a fragment 122 | - Read in the lights in that cluster from the populated data 123 | - Do shading for just those lights 124 | - You may find it necessary to bind additional uniforms in `src/renderers/forwardPlus.js` 125 | 126 | 3. Update `src/shaders/deferredToTexture.frag.glsl` to write desired data to the g-buffer 127 | 4. Update `src/deferred.frag.glsl` to read values from the g-buffer and perform simple forward rendering. (Right now it just outputs the screen xy coordinate) 128 | 5. Update it to use clustered shading. You should be able to reuse lots of stuff from Forward+ for this. You will also likely need to update shader inputs in `src/renderers/clustered.js` 129 | 130 | ## README 131 | 132 | Replace the contents of the README.md in a clear manner with the following: 133 | - A brief description of the project and the specific features you implemented. 134 | - At least one screenshot of your project running. 135 | - A 30+ second video/gif of your project running showing all features. (Even though your demo can be seen online, using multiple render targets means it won't run on many computers. A video will work everywhere.) 136 | - Performance analysis (described above) 137 | 138 | **GitHub Pages** 139 | Since this assignment is in WebGL, you can make your project easily viewable by taking advantage of GitHub's project pages feature. 140 | 141 | Once you are done with the assignment, create a new branch: 142 | 143 | `git branch gh-pages` 144 | 145 | Run `npm run build` and commit the compiled files 146 | 147 | Push the branch to GitHub: 148 | 149 | `git push origin gh-pages` 150 | 151 | Now, you can go to `.github.io/` to see your renderer online from anywhere. Add this link to your README. 152 | 153 | ## Submit 154 | 155 | Beware of any build issues discussed on the Google Group. 156 | 157 | Open a GitHub pull request so that we can see that you have finished. The title should be "Project 5B: YOUR NAME". The template of the comment section of your pull request is attached below, you can do some copy and paste: 158 | 159 | - Repo Link 160 | - (Briefly) Mentions features that you've completed. Especially those bells and whistles you want to highlight 161 | - Feature 0 162 | - Feature 1 163 | - ... 164 | - Feedback on the project itself, if any. 165 | 166 | ### Third-Party Code Policy 167 | 168 | - Use of any third-party code must be approved by asking on our mailing list. 169 | - If it is approved, all students are welcome to use it. Generally, we approve use of third-party code that is not a core part of the project. For example, for the path tracer, we would approve using a third-party library for loading models, but would not approve copying and pasting a CUDA function for doing refraction. 170 | - Third-party code **MUST** be credited in README.md. 171 | - Using third-party code without its approval, including using another student's code, is an academic integrity violation, and will, at minimum, result in you receiving an F for the semester. 172 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | WebGL Clustered and Forward+ Shading 2 | ====================== 3 | 4 | ## [Live Online](https://lanlou123.github.io/WebGL-Clustered-Deferred-Forward-Plus-Rendering/) 5 | 6 | note : I only wrote the shading selection dropdown menu for clustered rendering 7 | 8 | ## Click this gif for video link 9 | 10 | [![click this gif for video link](img/cluster.gif)](https://www.youtube.com/watch?v=FbcmS2m7nNE) 11 | 12 | due to gif capture sofware's limitation, the above gif which represent blinn-phong shading appear to have some ramp effects from toon. 13 | 14 | ## Sample gifs 15 | 16 | blinn-phong|lambert|toon 17 | ----|----|---- 18 | ![](img/cluster.gif)|![](img/lambert.gif)|![](img/toon.gif) 19 | 20 | # Introduction: 21 | 22 | ### forward rendering 23 | forward shading is the most straight forward shading method, basically, it first loops through each geomery in the scene, then, inside one sigle loop for the geometry, it will do another loop of all the lights in the scene to apply light influence on the current geometry, it is esay to tell that once the number of lights is really huge, this method can suffer from terrible performance issue, which makes this rendering technique impossible to be applied to current day games as most of them possess large amount use of lighting. 24 | 25 | ### forward plus(clustered) rendering 26 | a better solution towards the issue brought by forward rendering is to use clustered structure for lighting, in forward plus, we will have the simmilar first step:loop all the geometry, but different second step, this time, we will catergorize lights into different clusters, and inside the fragment shader for each geometry loop, we will not check every light anymore, instead, we only accumulate light influence from lights that inside the cluster the geometry point is in. as a result, we will have a big improvement on performance, especially when there are a lot of lights. 27 | 28 | ### clustered deferred rendering 29 | we can further improve the performance by introducing an extra buffer:g-buffer which contains certain amounts of 2d textures(specified by user according to need), inside the textures, we will only store the geometry displacement "on the top" or in other words have the smaller depth value compared with other fragments at same NDC coordinate, apart from that, we can also store normal, depth, albedo, as long as it is required for our rendering. 30 | 31 | a graph for typical deferred rendering. 32 | 33 | ![](img/deferred-v2.png) 34 | 35 | according to the image above, the reason this method is better is that the lighting loop will only check the 2d texture instead of checking every geometry, and the texture only store the toppest geometry, which ensures that we won't check the lighting inluence on those geometry "behind the walls" since we are not seeing it. 36 | 37 | ### cluster structure 38 | 39 | ![](img/clu.png) 40 | 41 | an example for the cluster structure would look like the above image, althogh it is simply drawed as a 2d frustum, it actually should be a 3d one, and this one can be taken as a simplified version of the 3d one looking along y axis direction, so, the orange sphere represents the light influence area, those clusters colored as red and orange are the ones we will consider storing into the buffer of this specific light, we will store the clusters in the form of cluster start index in x,y,z directions and end index respectively. 42 | 43 | 44 | # Project Features: 45 | 46 | ### Forward+ 47 | - Build a data structure to keep track of how many lights are in each cluster and what their indices are 48 | - Render the scene using only the lights that overlap a given cluster 49 | 50 | 51 | ### Clustered 52 | - Reuse clustering logic from Forward+ 53 | - Store vertex attributes in g-buffer 54 | - Read g-buffer in a shader to produce final output 55 | 56 | ### Effects 57 | - Implemented deferred Blinn-Phong shading (diffuse + specular) for point lights 58 | - for the blinn-phong effect, I simply used the half angle method and an extra inverse viewmatrix to compute the view point 59 | position. 60 | - Implemented toon shading (with ramp shading + simple depth-edge detection for outlines) 61 | - toon is simple, just do an interpolation between the ramped value of lambert term and the original lambert term, the same goes for blinn-phong term ~ 62 | 63 | ![](img/blinn.JPG) 64 | 65 | blinn-phong shading, you can notice the reflection on the floor and pillar quite clearly. 66 | 67 | ![](img/toon.JPG) 68 | 69 | toon shading, the color is ramped based on the distance to the light center. 70 | 71 | 72 | ### Optimizations 73 | - Optimized g-buffer format - reduce the number and size of g-buffers: 74 | - Pack values together into vec4s 75 | - I stored position and normal data in vec4s 76 | - Use 2-component normals 77 | - since the length of a normal will always be 1, we don't need all three of them, I choosed to pack the normal's x and y component into the former 2 vec4s, one thing to notice is I have to multiply viewmatrix before packing , and use inverse viemat in shader do unpacking... 78 | 79 | 80 | 81 | 82 | # Performance & Analysis 83 | 84 | ### Three methods comparision: 85 | 86 | ![](img/comp1.JPG) 87 | 88 | this test is done with light radius being 4, cluster size = 15X15X15, and light number gradually increasing 89 | as the diagram explicitly shown, when the light count increases, pure forward suffers from drastic performance drop while forward+ and clustered deferred shows better score, actually, the last one: deferred is really efficient , it's performance didn't change much even if there are up to 4k lights. 90 | 91 | ### Cluster size influence: 92 | ![](img/clus.JPG) 93 | 94 | this test is done with 3500 light sources, light radius 4, renderer is clustered deferred. 95 | from this image, we know that cluster size of 15X15X15 strikes the best performance, I guess this can only be aquired by fine tunning the renderer. 96 | 97 | ### compressed g buffer vs not compressed: 98 | 99 | (the axis label should be compressed and uncompressed in the bottom of the image, just ignore it) 100 | 101 | ![](img/pa.JPG) 102 | this test is done with 1000 light sources, renderer is clustered deferred 103 | this apparently shows the benefit of packing 3 floats g-buffer into 2 floats g-buffer, as we reduced the number of g buffers, therefore reduced the times we read and write to g buffers. 104 | 105 | ### debug images: 106 | 107 | xslice view|yslice view 108 | ----|----- 109 | ![](img/xslice.JPG)|![](img/yslice.JPG) 110 | 111 | depth buffer| albedo buffer| surface normal buffer 112 | ---|---|--- 113 | ![](img/depth.JPG)|![](img/albedo.JPG)|![](img/surfacenor.JPG) 114 | 115 | pure light buffer 116 | ![](img/purecol.JPG) 117 | 118 | ### Credits 119 | 120 | * [Three.js](https://github.com/mrdoob/three.js) by [@mrdoob](https://github.com/mrdoob) and contributors 121 | * [stats.js](https://github.com/mrdoob/stats.js) by [@mrdoob](https://github.com/mrdoob) and contributors 122 | * [webgl-debug](https://github.com/KhronosGroup/WebGLDeveloperTools) by Khronos Group Inc. 123 | * [glMatrix](https://github.com/toji/gl-matrix) by [@toji](https://github.com/toji) and contributors 124 | * [minimal-gltf-loader](https://github.com/shrekshao/minimal-gltf-loader) by [@shrekshao](https://github.com/shrekshao) 125 | -------------------------------------------------------------------------------- /img/albedo.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/albedo.JPG -------------------------------------------------------------------------------- /img/blinn.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/blinn.JPG -------------------------------------------------------------------------------- /img/clu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/clu.png -------------------------------------------------------------------------------- /img/clus.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/clus.JPG -------------------------------------------------------------------------------- /img/cluster.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/cluster.gif -------------------------------------------------------------------------------- /img/comp1.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/comp1.JPG -------------------------------------------------------------------------------- /img/deferred-v2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/deferred-v2.png -------------------------------------------------------------------------------- /img/depth.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/depth.JPG -------------------------------------------------------------------------------- /img/init: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /img/lambert.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/lambert.gif -------------------------------------------------------------------------------- /img/pa.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/pa.JPG -------------------------------------------------------------------------------- /img/purecol.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/purecol.JPG -------------------------------------------------------------------------------- /img/sc1.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/sc1.JPG -------------------------------------------------------------------------------- /img/surfacenor.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/surfacenor.JPG -------------------------------------------------------------------------------- /img/toon.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/toon.JPG -------------------------------------------------------------------------------- /img/toon.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/toon.gif -------------------------------------------------------------------------------- /img/xslice.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/xslice.JPG -------------------------------------------------------------------------------- /img/yslice.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/img/yslice.JPG -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /lib/minimal-gltf-loader.js: -------------------------------------------------------------------------------- 1 | // From https://github.com/shrekshao/minimal-gltf-loader 2 | import {vec3, vec4, quat, mat4} from 'gl-matrix'; 3 | 4 | var MinimalGLTFLoader = MinimalGLTFLoader || {}; 5 | 6 | // Data classes 7 | var Scene = MinimalGLTFLoader.Scene = function () { 8 | // not 1-1 to meshes in json file 9 | // each mesh with a different node hierarchy is a new instance 10 | this.meshes = []; 11 | //this.meshes = {}; 12 | }; 13 | 14 | // Node 15 | 16 | var Mesh = MinimalGLTFLoader.Mesh = function () { 17 | this.meshID = ''; // mesh id name in glTF json meshes 18 | this.primitives = []; 19 | }; 20 | 21 | var Primitive = MinimalGLTFLoader.Primitive = function () { 22 | this.mode = 4; // default: gl.TRIANGLES 23 | 24 | this.matrix = mat4.create(); 25 | 26 | this.indices = null; 27 | this.indicesComponentType = 5123; // default: gl.UNSIGNED_SHORT 28 | 29 | // !!: assume vertex buffer is interleaved 30 | // see discussion https://github.com/KhronosGroup/glTF/issues/21 31 | this.vertexBuffer = null; 32 | 33 | // attribute info (stride, offset, etc) 34 | this.attributes = {}; 35 | 36 | // cur glTF spec supports only one material per primitive 37 | this.material = null; 38 | this.technique = null; 39 | 40 | 41 | 42 | // // Program gl buffer name 43 | // // ?? reconsider if it's suitable to put it here 44 | // this.indicesWebGLBufferName = null; 45 | // this.vertexWebGLBufferName = null; 46 | 47 | }; 48 | 49 | 50 | /** 51 | * 52 | */ 53 | var glTFModel = MinimalGLTFLoader.glTFModel = function () { 54 | this.defaultScene = ''; 55 | this.scenes = {}; 56 | 57 | this.nodeMatrix = {}; 58 | 59 | this.json = null; 60 | 61 | this.shaders = {}; 62 | this.programs = {}; 63 | 64 | this.images = {}; 65 | 66 | }; 67 | 68 | 69 | 70 | var gl; 71 | 72 | var glTFLoader = MinimalGLTFLoader.glTFLoader = function (glContext) { 73 | gl = glContext; 74 | this._init(); 75 | this.glTF = null; 76 | }; 77 | 78 | glTFLoader.prototype._init = function() { 79 | this._parseDone = false; 80 | this._loadDone = false; 81 | 82 | this._bufferRequested = 0; 83 | this._bufferLoaded = 0; 84 | this._buffers = {}; 85 | this._bufferTasks = {}; 86 | 87 | // ?? Move to glTFModel to avoid collected by GC ?? 88 | this._bufferViews = {}; 89 | 90 | this._shaderRequested = 0; 91 | this._shaderLoaded = 0; 92 | 93 | this._imageRequested = 0; 94 | this._imageLoaded = 0; 95 | 96 | this._pendingTasks = 0; 97 | this._finishedPendingTasks = 0; 98 | 99 | this.onload = null; 100 | 101 | }; 102 | 103 | 104 | glTFLoader.prototype._getBufferViewData = function(json, bufferViewID, callback) { 105 | var bufferViewData = this._bufferViews[bufferViewID]; 106 | if(!bufferViewData) { 107 | // load bufferView for the first time 108 | var bufferView = json.bufferViews[bufferViewID]; 109 | var bufferData = this._buffers[bufferView.buffer]; 110 | if (bufferData) { 111 | // buffer already loaded 112 | //console.log("dependent buffer ready, create bufferView" + bufferViewID); 113 | this._bufferViews[bufferViewID] = bufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 114 | callback(bufferViewData); 115 | } else { 116 | // buffer not yet loaded 117 | // add pending task to _bufferTasks 118 | //console.log("pending Task: wait for buffer to load bufferView " + bufferViewID); 119 | this._pendingTasks++; 120 | var bufferTask = this._bufferTasks[bufferView.buffer]; 121 | if (!bufferTask) { 122 | this._bufferTasks[bufferView.buffer] = []; 123 | bufferTask = this._bufferTasks[bufferView.buffer]; 124 | } 125 | var loader = this; 126 | bufferTask.push(function(newBufferData) { 127 | // share same bufferView 128 | // hierarchy needs to be post processed in the renderer 129 | var curBufferViewData = loader._bufferViews[bufferViewID]; 130 | if (!curBufferViewData) { 131 | console.log('create new BufferView Data for ' + bufferViewID); 132 | curBufferViewData = loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 133 | } 134 | loader._finishedPendingTasks++; 135 | callback(curBufferViewData); 136 | 137 | // // create new bufferView for each mesh access with a different hierarchy 138 | // // hierarchy transformation will be prepared in this way 139 | // console.log('create new BufferView Data for ' + bufferViewID); 140 | // loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); 141 | // loader._finishedPendingTasks++; 142 | // callback(loader._bufferViews[bufferViewID]); 143 | }); 144 | } 145 | 146 | } else { 147 | // no need to load buffer from file 148 | // use cached ones 149 | //console.log("use cached bufferView " + bufferViewID); 150 | callback(bufferViewData); 151 | } 152 | }; 153 | 154 | // glTFLoader.prototype._doNextLoadTaskInList = function () { 155 | // }; 156 | 157 | glTFLoader.prototype._checkComplete = function () { 158 | if (this._bufferRequested == this._bufferLoaded && 159 | this._shaderRequested == this._shaderLoaded && 160 | this._imageRequested == this._imageLoaded 161 | // && other resources finish loading 162 | ) { 163 | this._loadDone = true; 164 | } 165 | 166 | if (this._loadDone && this._parseDone && this._pendingTasks == this._finishedPendingTasks) { 167 | this.onload(this.glTF); 168 | } 169 | }; 170 | 171 | 172 | glTFLoader.prototype._parseGLTF = function (json) { 173 | 174 | this.glTF.json = json; 175 | this.glTF.defaultScene = json.scene; 176 | 177 | // Iterate through every scene 178 | if (json.scenes) { 179 | for (var sceneID in json.scenes) { 180 | var newScene = new Scene(); 181 | this.glTF.scenes[sceneID] = newScene; 182 | 183 | var scene = json.scenes[sceneID]; 184 | var nodes = scene.nodes; 185 | var nodeLen = nodes.length; 186 | 187 | // Iterate through every node within scene 188 | for (var n = 0; n < nodeLen; ++n) { 189 | var nodeID = nodes[n]; 190 | //var node = json.nodes[nodeName]; 191 | 192 | // Traverse node 193 | this._parseNode(json, nodeID, newScene); 194 | } 195 | } 196 | } 197 | 198 | this._parseDone = true; 199 | this._checkComplete(); 200 | }; 201 | 202 | 203 | var translationVec3 = vec3.create(); 204 | var rotationQuat = quat.create(); 205 | var scaleVec3 = vec3.create(); 206 | var TRMatrix = mat4.create(); 207 | 208 | glTFLoader.prototype._parseNode = function(json, nodeID, newScene, matrix) { 209 | var node = json.nodes[nodeID]; 210 | 211 | if (matrix === undefined) { 212 | matrix = mat4.create(); 213 | } 214 | 215 | var curMatrix = mat4.create(); 216 | 217 | if (node.hasOwnProperty('matrix')) { 218 | // matrix 219 | for(var i = 0; i < 16; ++i) { 220 | curMatrix[i] = node.matrix[i]; 221 | } 222 | mat4.multiply(curMatrix, matrix, curMatrix); 223 | //mat4.multiply(curMatrix, curMatrix, matrix); 224 | } else { 225 | // translation, rotation, scale (TRS) 226 | // TODO: these labels are optional 227 | vec3.set(translationVec3, node.translation[0], node.translation[1], node.translation[2]); 228 | quat.set(rotationQuat, node.rotation[0], node.rotation[1], node.rotation[2], node.rotation[3]); 229 | mat4.fromRotationTranslation(TRMatrix, rotationQuat, translationVec3); 230 | mat4.multiply(curMatrix, curMatrix, TRMatrix); 231 | vec3.set(scaleVec3, node.scale[0], node.scale[1], node.scale[2]); 232 | mat4.scale(curMatrix, curMatrix, scaleVec3); 233 | } 234 | 235 | // store node matrix 236 | this.glTF.nodeMatrix[nodeID] = curMatrix; 237 | 238 | 239 | 240 | // Iterate through every mesh within node 241 | var meshes = node.meshes; 242 | if(!!meshes) { 243 | var meshLen = meshes.length; 244 | for (var m = 0; m < meshLen; ++m) { 245 | var newMesh = new Mesh(); 246 | newScene.meshes.push(newMesh); 247 | 248 | var meshName = meshes[m]; 249 | var mesh = json.meshes[meshName]; 250 | 251 | newMesh.meshID = meshName; 252 | 253 | // Iterate through primitives 254 | var primitives = mesh.primitives; 255 | var primitiveLen = primitives.length; 256 | 257 | for (var p = 0; p < primitiveLen; ++p) { 258 | var newPrimitive = new Primitive(); 259 | newMesh.primitives.push(newPrimitive); 260 | 261 | var primitive = primitives[p]; 262 | 263 | if (primitive.indices) { 264 | this._parseIndices(json, primitive, newPrimitive); 265 | } 266 | 267 | this._parseAttributes(json, primitive, newPrimitive, curMatrix); 268 | 269 | // required 270 | newPrimitive.material = json.materials[primitive.material]; 271 | 272 | if (newPrimitive.material.technique) { 273 | newPrimitive.technique = json.techniques[newPrimitive.material.technique]; 274 | } else { 275 | // TODO: use default technique in glTF spec Appendix A 276 | } 277 | 278 | } 279 | } 280 | } 281 | 282 | 283 | // Go through all the children recursively 284 | var children = node.children; 285 | var childreLen = children.length; 286 | for (var c = 0; c < childreLen; ++c) { 287 | var childNodeID = children[c]; 288 | this._parseNode(json, childNodeID, newScene, curMatrix); 289 | } 290 | 291 | }; 292 | 293 | 294 | glTFLoader.prototype._parseIndices = function(json, primitive, newPrimitive) { 295 | 296 | var accessorName = primitive.indices; 297 | var accessor = json.accessors[accessorName]; 298 | 299 | newPrimitive.mode = primitive.mode || 4; 300 | newPrimitive.indicesComponentType = accessor.componentType; 301 | 302 | var loader = this; 303 | this._getBufferViewData(json, accessor.bufferView, function(bufferViewData) { 304 | newPrimitive.indices = _getAccessorData(bufferViewData, accessor); 305 | loader._checkComplete(); 306 | }); 307 | }; 308 | 309 | 310 | 311 | 312 | //var tmpVec4 = vec4.create(); 313 | //var inverseTransposeMatrix = mat4.create(); 314 | 315 | glTFLoader.prototype._parseAttributes = function(json, primitive, newPrimitive, matrix) { 316 | // !! Assume interleaved vertex attributes 317 | // i.e., all attributes share one bufferView 318 | 319 | 320 | // vertex buffer processing 321 | var firstSemantic = Object.keys(primitive.attributes)[0]; 322 | var firstAccessor = json.accessors[primitive.attributes[firstSemantic]]; 323 | var vertexBufferViewID = firstAccessor.bufferView; 324 | var bufferView = json.bufferViews[vertexBufferViewID]; 325 | 326 | var loader = this; 327 | 328 | this._getBufferViewData(json, vertexBufferViewID, function(bufferViewData) { 329 | var data = newPrimitive.vertexBuffer = _arrayBuffer2TypedArray( 330 | bufferViewData, 331 | 0, 332 | bufferView.byteLength / ComponentType2ByteSize[firstAccessor.componentType], 333 | firstAccessor.componentType 334 | ); 335 | 336 | for (var attributeName in primitive.attributes) { 337 | var accessorName = primitive.attributes[attributeName]; 338 | var accessor = json.accessors[accessorName]; 339 | 340 | var componentTypeByteSize = ComponentType2ByteSize[accessor.componentType]; 341 | 342 | var stride = accessor.byteStride / componentTypeByteSize; 343 | var offset = accessor.byteOffset / componentTypeByteSize; 344 | var count = accessor.count; 345 | 346 | // // Matrix transformation 347 | // if (attributeName === 'POSITION') { 348 | // for (var i = 0; i < count; ++i) { 349 | // // TODO: add vec2 and other(needed?) support 350 | // vec4.set(tmpVec4, data[stride * i + offset] 351 | // , data[stride * i + offset + 1] 352 | // , data[stride * i + offset + 2] 353 | // , 1); 354 | // vec4.transformMat4(tmpVec4, tmpVec4, matrix); 355 | // vec4.scale(tmpVec4, tmpVec4, 1 / tmpVec4[3]); 356 | // data[stride * i + offset] = tmpVec4[0]; 357 | // data[stride * i + offset + 1] = tmpVec4[1]; 358 | // data[stride * i + offset + 2] = tmpVec4[2]; 359 | // } 360 | // } 361 | // else if (attributeName === 'NORMAL') { 362 | // mat4.invert(inverseTransposeMatrix, matrix); 363 | // mat4.transpose(inverseTransposeMatrix, inverseTransposeMatrix); 364 | 365 | // for (var i = 0; i < count; ++i) { 366 | // // @todo: add vec2 and other(needed?) support 367 | // vec4.set(tmpVec4, data[stride * i + offset] 368 | // , data[stride * i + offset + 1] 369 | // , data[stride * i + offset + 2] 370 | // , 0); 371 | // vec4.transformMat4(tmpVec4, tmpVec4, inverseTransposeMatrix); 372 | // vec4.normalize(tmpVec4, tmpVec4); 373 | // data[stride * i + offset] = tmpVec4[0]; 374 | // data[stride * i + offset + 1] = tmpVec4[1]; 375 | // data[stride * i + offset + 2] = tmpVec4[2]; 376 | // } 377 | // } 378 | 379 | 380 | // local transform matrix 381 | 382 | mat4.copy(newPrimitive.matrix, matrix); 383 | 384 | 385 | 386 | // for vertexAttribPointer 387 | newPrimitive.attributes[attributeName] = { 388 | //GLuint program location, 389 | size: Type2NumOfComponent[accessor.type], 390 | type: accessor.componentType, 391 | //GLboolean normalized 392 | stride: accessor.byteStride, 393 | offset: accessor.byteOffset 394 | }; 395 | 396 | } 397 | 398 | loader._checkComplete(); 399 | }); 400 | 401 | }; 402 | 403 | /** 404 | * load a glTF model 405 | * 406 | * @param {String} uri uri of the .glTF file. Other resources (bins, images) are assumed to be in the same base path 407 | * @param {Function} callback the onload callback function 408 | */ 409 | glTFLoader.prototype.loadGLTF = function (uri, callback) { 410 | 411 | this._init(); 412 | 413 | this.onload = callback || function(glTF) { 414 | console.log('glTF model loaded.'); 415 | console.log(glTF); 416 | }; 417 | 418 | 419 | this.glTF = new glTFModel(); 420 | 421 | this.baseUri = _getBaseUri(uri); 422 | 423 | var loader = this; 424 | 425 | _loadJSON(uri, function (response) { 426 | // Parse JSON string into object 427 | var json = JSON.parse(response); 428 | 429 | var bid; 430 | 431 | var loadArrayBufferCallback = function (resource) { 432 | 433 | loader._buffers[bid] = resource; 434 | loader._bufferLoaded++; 435 | if (loader._bufferTasks[bid]) { 436 | var i,len; 437 | for (i = 0, len = loader._bufferTasks[bid].length; i < len; ++i) { 438 | (loader._bufferTasks[bid][i])(resource); 439 | } 440 | } 441 | loader._checkComplete(); 442 | 443 | }; 444 | 445 | // Launch loading resources task: buffers, etc. 446 | if (json.buffers) { 447 | for (bid in json.buffers) { 448 | 449 | loader._bufferRequested++; 450 | 451 | _loadArrayBuffer(loader.baseUri + json.buffers[bid].uri, loadArrayBufferCallback); 452 | 453 | } 454 | } 455 | 456 | // load images 457 | 458 | 459 | var loadImageCallback = function (img, iid) { 460 | loader._imageLoaded++; 461 | loader.glTF.images[iid] = img; 462 | loader._checkComplete(); 463 | }; 464 | 465 | var iid; 466 | 467 | if (json.images) { 468 | for (iid in json.images) { 469 | loader._imageRequested++; 470 | _loadImage(loader.baseUri + json.images[iid].uri, iid, loadImageCallback); 471 | } 472 | } 473 | 474 | 475 | // load shaders 476 | var pid; 477 | var newProgram; 478 | 479 | var loadVertexShaderFileCallback = function (resource) { 480 | loader._shaderLoaded++; 481 | newProgram.vertexShader = resource; 482 | if (newProgram.fragmentShader) { 483 | // create Program 484 | newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); 485 | loader._checkComplete(); 486 | } 487 | }; 488 | var loadFragmentShaderFileCallback = function (resource) { 489 | loader._shaderLoaded++; 490 | newProgram.fragmentShader = resource; 491 | if (newProgram.vertexShader) { 492 | // create Program 493 | newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); 494 | loader._checkComplete(); 495 | } 496 | }; 497 | 498 | if (json.programs) { 499 | for (pid in json.programs) { 500 | newProgram = loader.glTF.programs[pid] = { 501 | vertexShader: null, 502 | fragmentShader: null, 503 | program: null 504 | }; 505 | var program = json.programs[pid]; 506 | loader._shaderRequested += 2; 507 | 508 | _loadShaderFile(loader.baseUri + json.shaders[program.vertexShader].uri, loadVertexShaderFileCallback); 509 | _loadShaderFile(loader.baseUri + json.shaders[program.fragmentShader].uri, loadFragmentShaderFileCallback); 510 | } 511 | } 512 | 513 | 514 | 515 | 516 | // start glTF scene parsing 517 | loader._parseGLTF(json); 518 | }); 519 | }; 520 | 521 | 522 | 523 | 524 | // TODO: get from gl context 525 | var ComponentType2ByteSize = { 526 | 5120: 1, // BYTE 527 | 5121: 1, // UNSIGNED_BYTE 528 | 5122: 2, // SHORT 529 | 5123: 2, // UNSIGNED_SHORT 530 | 5126: 4 // FLOAT 531 | }; 532 | 533 | var Type2NumOfComponent = { 534 | 'SCALAR': 1, 535 | 'VEC2': 2, 536 | 'VEC3': 3, 537 | 'VEC4': 4, 538 | 'MAT2': 4, 539 | 'MAT3': 9, 540 | 'MAT4': 16 541 | }; 542 | 543 | MinimalGLTFLoader.Attributes = [ 544 | 'POSITION', 545 | 'NORMAL', 546 | 'TEXCOORD', 547 | 'COLOR', 548 | 'JOINT', 549 | 'WEIGHT' 550 | ]; 551 | 552 | // MinimalGLTFLoader.UniformFunctionsBind = { 553 | // 35676: gl.uniformMatrix4fv // FLOAT_MAT4 554 | // }; 555 | 556 | 557 | // ------ Scope limited private util functions--------------- 558 | 559 | function _arrayBuffer2TypedArray(resource, byteOffset, countOfComponentType, componentType) { 560 | switch(componentType) { 561 | // @todo: finish 562 | case 5122: return new Int16Array(resource, byteOffset, countOfComponentType); 563 | case 5123: return new Uint16Array(resource, byteOffset, countOfComponentType); 564 | case 5124: return new Int32Array(resource, byteOffset, countOfComponentType); 565 | case 5125: return new Uint32Array(resource, byteOffset, countOfComponentType); 566 | case 5126: return new Float32Array(resource, byteOffset, countOfComponentType); 567 | default: return null; 568 | } 569 | } 570 | 571 | function _getAccessorData(bufferViewData, accessor) { 572 | return _arrayBuffer2TypedArray( 573 | bufferViewData, 574 | accessor.byteOffset, 575 | accessor.count * Type2NumOfComponent[accessor.type], 576 | accessor.componentType 577 | ); 578 | } 579 | 580 | function _getBaseUri(uri) { 581 | 582 | // https://github.com/AnalyticalGraphicsInc/cesium/blob/master/Source/Core/getBaseUri.js 583 | 584 | var basePath = ''; 585 | var i = uri.lastIndexOf('/'); 586 | if(i !== -1) { 587 | basePath = uri.substring(0, i + 1); 588 | } 589 | 590 | return basePath; 591 | } 592 | 593 | function _loadJSON(src, callback) { 594 | 595 | // native json loading technique from @KryptoniteDove: 596 | // http://codepen.io/KryptoniteDove/post/load-json-file-locally-using-pure-javascript 597 | 598 | var xobj = new XMLHttpRequest(); 599 | xobj.overrideMimeType("application/json"); 600 | xobj.open('GET', src, true); 601 | xobj.onreadystatechange = function () { 602 | if (xobj.readyState == 4 && // Request finished, response ready 603 | xobj.status == "200") { // Status OK 604 | callback(xobj.responseText, this); 605 | } 606 | }; 607 | xobj.send(null); 608 | } 609 | 610 | function _loadArrayBuffer(url, callback) { 611 | var xobj = new XMLHttpRequest(); 612 | xobj.responseType = 'arraybuffer'; 613 | xobj.open('GET', url, true); 614 | xobj.onreadystatechange = function () { 615 | if (xobj.readyState == 4 && // Request finished, response ready 616 | xobj.status == "200") { // Status OK 617 | var arrayBuffer = xobj.response; 618 | if (arrayBuffer && callback) { 619 | callback(arrayBuffer); 620 | } 621 | } 622 | }; 623 | xobj.send(null); 624 | } 625 | 626 | function _loadShaderFile(url, callback) { 627 | var xobj = new XMLHttpRequest(); 628 | xobj.responseType = 'text'; 629 | xobj.open('GET', url, true); 630 | xobj.onreadystatechange = function () { 631 | if (xobj.readyState == 4 && // Request finished, response ready 632 | xobj.status == "200") { // Status OK 633 | var file = xobj.response; 634 | if (file && callback) { 635 | callback(file); 636 | } 637 | } 638 | }; 639 | xobj.send(null); 640 | } 641 | 642 | function _loadImage(url, iid, onload) { 643 | var img = new Image(); 644 | img.src = url; 645 | img.onload = function() { 646 | onload(img, iid); 647 | }; 648 | } 649 | 650 | 651 | function _createShader(gl, source, type) { 652 | var shader = gl.createShader(type); 653 | gl.shaderSource(shader, source); 654 | gl.compileShader(shader); 655 | return shader; 656 | } 657 | 658 | function _createProgram(gl, vertexShaderSource, fragmentShaderSource) { 659 | var program = gl.createProgram(); 660 | var vshader = _createShader(gl, vertexShaderSource, gl.VERTEX_SHADER); 661 | var fshader = _createShader(gl, fragmentShaderSource, gl.FRAGMENT_SHADER); 662 | gl.attachShader(program, vshader); 663 | gl.deleteShader(vshader); 664 | gl.attachShader(program, fshader); 665 | gl.deleteShader(fshader); 666 | gl.linkProgram(program); 667 | 668 | var log = gl.getProgramInfoLog(program); 669 | if (log) { 670 | console.log(log); 671 | } 672 | 673 | log = gl.getShaderInfoLog(vshader); 674 | if (log) { 675 | console.log(log); 676 | } 677 | 678 | log = gl.getShaderInfoLog(fshader); 679 | if (log) { 680 | console.log(log); 681 | } 682 | 683 | return program; 684 | } 685 | 686 | export { glTFLoader }; -------------------------------------------------------------------------------- /models/sponza/buffer_0.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/models/sponza/buffer_0.bin -------------------------------------------------------------------------------- /models/sponza/color.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/models/sponza/color.jpeg -------------------------------------------------------------------------------- /models/sponza/fragmentShader0.glsl: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | uniform vec4 u_ambient; 3 | uniform sampler2D u_diffuse; 4 | uniform sampler2D u_normal; 5 | uniform vec4 u_emission; 6 | uniform vec4 u_specular; 7 | uniform float u_shininess; 8 | uniform float u_transparency; 9 | varying vec3 v_positionEC; 10 | varying vec3 v_normal; 11 | varying vec2 v_texcoord_0; 12 | 13 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 14 | normap = normap * 2.0 - 1.0; 15 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 16 | vec3 surftan = normalize(cross(geomnor, up)); 17 | vec3 surfbinor = cross(geomnor, surftan); 18 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 19 | } 20 | 21 | void main(void) { 22 | vec3 normal = applyNormalMap(normalize(v_normal), texture2D(u_normal, v_texcoord_0).rgb); 23 | vec4 diffuse = texture2D(u_diffuse, v_texcoord_0); 24 | vec3 diffuseLight = vec3(0.0, 0.0, 0.0); 25 | vec3 specular = u_specular.rgb; 26 | vec3 specularLight = vec3(0.0, 0.0, 0.0); 27 | vec3 emission = u_emission.rgb; 28 | vec3 ambient = u_ambient.rgb; 29 | vec3 viewDir = -normalize(v_positionEC); 30 | vec3 ambientLight = vec3(0.0, 0.0, 0.0); 31 | ambientLight += vec3(0.2, 0.2, 0.2); 32 | vec3 l = vec3(0.0, 0.0, 1.0); 33 | diffuseLight += vec3(1.0, 1.0, 1.0) * max(dot(normal,l), 0.); 34 | vec3 h = normalize(l + viewDir); 35 | float specularIntensity = max(0., pow(max(dot(normal, h), 0.), u_shininess)); 36 | specularLight += vec3(1.0, 1.0, 1.0) * specularIntensity; 37 | vec3 color = vec3(0.0, 0.0, 0.0); 38 | color += diffuse.rgb * diffuseLight; 39 | color += specular * specularLight; 40 | color += emission; 41 | color += ambient * ambientLight; 42 | gl_FragColor = vec4(color * diffuse.a, diffuse.a * u_transparency); 43 | } 44 | -------------------------------------------------------------------------------- /models/sponza/normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LanLou123/WebGL-Clustered-Deferred-Forward-Plus-Rendering/9df54468fb5ffa0cc1c2b8b372c6c45f2d6fc0e4/models/sponza/normal.png -------------------------------------------------------------------------------- /models/sponza/sponza.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "accessors": { 3 | "accessor_index_0": { 4 | "bufferView": "bufferView_1", 5 | "byteOffset": 0, 6 | "byteStride": 0, 7 | "componentType": 5125, 8 | "count": 199269, 9 | "type": "SCALAR", 10 | "min": [ 11 | 0 12 | ], 13 | "max": [ 14 | 199268 15 | ] 16 | }, 17 | "accessor_position": { 18 | "bufferView": "bufferView_0", 19 | "byteOffset": 0, 20 | "byteStride": 0, 21 | "componentType": 5126, 22 | "count": 148975, 23 | "min": [ 24 | -17.268321990966797, 25 | -0.006653999909758568, 26 | -7.7815141677856445 27 | ], 28 | "max": [ 29 | 17.551677703857422, 30 | 15.55334758758545, 31 | 7.818483829498291 32 | ], 33 | "type": "VEC3" 34 | }, 35 | "accessor_normal": { 36 | "bufferView": "bufferView_0", 37 | "byteOffset": 1787700, 38 | "byteStride": 0, 39 | "componentType": 5126, 40 | "count": 148975, 41 | "type": "VEC3", 42 | "min": [ 43 | null, 44 | null, 45 | null 46 | ], 47 | "max": [ 48 | null, 49 | null, 50 | null 51 | ] 52 | }, 53 | "accessor_uv": { 54 | "bufferView": "bufferView_0", 55 | "byteOffset": 3575400, 56 | "byteStride": 0, 57 | "componentType": 5126, 58 | "count": 148975, 59 | "type": "VEC2", 60 | "min": [ 61 | -57.04376983642578, 62 | -61.176544189453125 63 | ], 64 | "max": [ 65 | 57.97621536254883, 66 | 62.176544189453125 67 | ] 68 | } 69 | }, 70 | "asset": { 71 | "generator": "OBJ2GLTF", 72 | "premultipliedAlpha": true, 73 | "profile": { 74 | "api": "WebGL", 75 | "version": "1.0" 76 | }, 77 | "version": "1.0" 78 | }, 79 | "buffers": { 80 | "buffer_0": { 81 | "type": "arraybuffer", 82 | "byteLength": 5564276, 83 | "uri": "buffer_0.bin" 84 | } 85 | }, 86 | "bufferViews": { 87 | "bufferView_0": { 88 | "buffer": "buffer_0", 89 | "byteLength": 4767200, 90 | "byteOffset": 0, 91 | "target": 34962 92 | }, 93 | "bufferView_1": { 94 | "buffer": "buffer_0", 95 | "byteLength": 797076, 96 | "byteOffset": 4767200, 97 | "target": 34963 98 | } 99 | }, 100 | "images": { 101 | "color": { 102 | "uri": "color.jpeg" 103 | }, 104 | "normals": { 105 | "uri": "normal.png" 106 | } 107 | }, 108 | "materials": { 109 | "material_lambert2SG": { 110 | "name": "lambert2SG", 111 | "extensions": {}, 112 | "values": { 113 | "ambient": [ 114 | 0, 115 | 0, 116 | 0, 117 | 1 118 | ], 119 | "diffuse": "texture_color", 120 | "normalMap": "texture_normal", 121 | "emission": [ 122 | 0, 123 | 0, 124 | 0, 125 | 1 126 | ], 127 | "specular": [ 128 | 0, 129 | 0, 130 | 0, 131 | 1 132 | ], 133 | "shininess": 0, 134 | "transparency": 1 135 | }, 136 | "technique": "technique0" 137 | } 138 | }, 139 | "meshes": { 140 | "mesh_sponza": { 141 | "name": "sponza", 142 | "primitives": [ 143 | { 144 | "attributes": { 145 | "POSITION": "accessor_position", 146 | "NORMAL": "accessor_normal", 147 | "TEXCOORD_0": "accessor_uv" 148 | }, 149 | "indices": "accessor_index_0", 150 | "material": "material_lambert2SG", 151 | "mode": 4 152 | } 153 | ] 154 | } 155 | }, 156 | "nodes": { 157 | "rootNode": { 158 | "children": [], 159 | "meshes": [ 160 | "mesh_sponza" 161 | ], 162 | "matrix": [ 163 | 1, 164 | 0, 165 | 0, 166 | 0, 167 | 0, 168 | 1, 169 | 0, 170 | 0, 171 | 0, 172 | 0, 173 | 1, 174 | 0, 175 | 0, 176 | 0, 177 | 0, 178 | 1 179 | ] 180 | } 181 | }, 182 | "samplers": { 183 | "sampler_0": { 184 | "magFilter": 9729, 185 | "minFilter": 9986, 186 | "wrapS": 10497, 187 | "wrapT": 10497 188 | } 189 | }, 190 | "scene": "scene_sponza", 191 | "scenes": { 192 | "scene_sponza": { 193 | "nodes": [ 194 | "rootNode" 195 | ] 196 | } 197 | }, 198 | "textures": { 199 | "texture_color": { 200 | "format": 6407, 201 | "internalFormat": 6407, 202 | "sampler": "sampler_0", 203 | "source": "color", 204 | "target": 3553, 205 | "type": 5121 206 | }, 207 | "texture_normal": { 208 | "format": 6407, 209 | "internalFormat": 6407, 210 | "sampler": "sampler_0", 211 | "source": "normals", 212 | "target": 3553, 213 | "type": 5121 214 | } 215 | }, 216 | "extensionsUsed": [], 217 | "animations": {}, 218 | "cameras": {}, 219 | "techniques": { 220 | "technique0": { 221 | "attributes": { 222 | "a_position": "position", 223 | "a_normal": "normal", 224 | "a_texcoord_0": "texcoord_0" 225 | }, 226 | "parameters": { 227 | "modelViewMatrix": { 228 | "semantic": "MODELVIEW", 229 | "type": 35676 230 | }, 231 | "projectionMatrix": { 232 | "semantic": "PROJECTION", 233 | "type": 35676 234 | }, 235 | "normalMatrix": { 236 | "semantic": "MODELVIEWINVERSETRANSPOSE", 237 | "type": 35675 238 | }, 239 | "ambient": { 240 | "type": 35666 241 | }, 242 | "diffuse": { 243 | "type": 35678 244 | }, 245 | "normalMap": { 246 | "type": 35678 247 | }, 248 | "emission": { 249 | "type": 35666 250 | }, 251 | "specular": { 252 | "type": 35666 253 | }, 254 | "shininess": { 255 | "type": 5126 256 | }, 257 | "transparency": { 258 | "type": 5126 259 | }, 260 | "position": { 261 | "semantic": "POSITION", 262 | "type": 35665 263 | }, 264 | "normal": { 265 | "semantic": "NORMAL", 266 | "type": 35665 267 | }, 268 | "texcoord_0": { 269 | "semantic": "TEXCOORD_0", 270 | "type": 35664 271 | } 272 | }, 273 | "program": "program0", 274 | "states": { 275 | "enable": [ 276 | 2884, 277 | 2929 278 | ] 279 | }, 280 | "uniforms": { 281 | "u_modelViewMatrix": "modelViewMatrix", 282 | "u_projectionMatrix": "projectionMatrix", 283 | "u_normalMatrix": "normalMatrix", 284 | "u_ambient": "ambient", 285 | "u_diffuse": "diffuse", 286 | "u_normal": "normalMap", 287 | "u_emission": "emission", 288 | "u_specular": "specular", 289 | "u_shininess": "shininess", 290 | "u_transparency": "transparency" 291 | } 292 | } 293 | }, 294 | "programs": { 295 | "program0": { 296 | "attributes": [ 297 | "a_position", 298 | "a_normal", 299 | "a_texcoord_0" 300 | ], 301 | "fragmentShader": "fragmentShader0", 302 | "vertexShader": "vertexShader0" 303 | } 304 | }, 305 | "shaders": { 306 | "vertexShader0": { 307 | "type": 35633, 308 | "uri": "vertexShader0.glsl" 309 | }, 310 | "fragmentShader0": { 311 | "type": 35632, 312 | "uri": "fragmentShader0.glsl" 313 | } 314 | }, 315 | "skins": {}, 316 | "extensions": {} 317 | } 318 | -------------------------------------------------------------------------------- /models/sponza/vertexShader0.glsl: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | uniform mat4 u_modelViewMatrix; 3 | uniform mat4 u_projectionMatrix; 4 | uniform mat3 u_normalMatrix; 5 | attribute vec3 a_position; 6 | varying vec3 v_positionEC; 7 | attribute vec3 a_normal; 8 | varying vec3 v_normal; 9 | attribute vec2 a_texcoord_0; 10 | varying vec2 v_texcoord_0; 11 | void main(void) { 12 | vec4 pos = u_modelViewMatrix * vec4(a_position,1.0); 13 | v_positionEC = pos.xyz; 14 | gl_Position = u_projectionMatrix * pos; 15 | v_normal = u_normalMatrix * a_normal; 16 | v_texcoord_0 = a_texcoord_0; 17 | } 18 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "scripts": { 3 | "start": "webpack-dev-server", 4 | "start:production": "webpack-dev-server --env.production", 5 | "build": "webpack --env.production" 6 | }, 7 | "dependencies": { 8 | "dat.gui": "^0.7.3", 9 | "gl-matrix": "^2.4.0", 10 | "spectorjs": "^0.9.0", 11 | "stats-js": "^1.0.0-alpha1", 12 | "three": "^0.87.1", 13 | "three-js": "^79.0.0", 14 | "three-orbitcontrols": "^1.2.1", 15 | "webgl-debug": "^1.0.2" 16 | }, 17 | "devDependencies": { 18 | "babel-core": "^6.26.0", 19 | "babel-loader": "^7.1.2", 20 | "babel-minify-webpack-plugin": "^0.2.0", 21 | "babel-preset-env": "^1.6.0", 22 | "webpack": "^3.7.1", 23 | "webpack-dev-server": "^2.9.2", 24 | "webpack-glsl-loader": "^1.0.1" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/init.js: -------------------------------------------------------------------------------- 1 | // TODO: Change this to enable / disable debug mode 2 | export const DEBUG = true && process.env.NODE_ENV === 'development'; 3 | 4 | import DAT from 'dat.gui'; 5 | import WebGLDebug from 'webgl-debug'; 6 | import Stats from 'stats-js'; 7 | import { PerspectiveCamera } from 'three'; 8 | import OrbitControls from 'three-orbitcontrols'; 9 | import { Spector } from 'spectorjs'; 10 | 11 | export var ABORTED = false; 12 | export function abort(message) { 13 | ABORTED = true; 14 | throw message; 15 | } 16 | 17 | // Get the canvas element 18 | export const canvas = document.getElementById('canvas'); 19 | 20 | // Initialize the WebGL context 21 | const glContext = canvas.getContext('webgl'); 22 | 23 | // Get a debug context 24 | export const gl = DEBUG ? WebGLDebug.makeDebugContext(glContext, (err, funcName, args) => { 25 | abort(WebGLDebug.glEnumToString(err) + ' was caused by call to: ' + funcName); 26 | }) : glContext; 27 | 28 | const supportedExtensions = gl.getSupportedExtensions(); 29 | const requiredExtensions = [ 30 | 'OES_texture_float', 31 | 'OES_texture_float_linear', 32 | 'OES_element_index_uint', 33 | 'WEBGL_depth_texture', 34 | 'WEBGL_draw_buffers', 35 | ]; 36 | 37 | // Check that all required extensions are supported 38 | for (let i = 0; i < requiredExtensions.length; ++i) { 39 | if (supportedExtensions.indexOf(requiredExtensions[i]) < 0) { 40 | throw 'Unable to load extension ' + requiredExtensions[i]; 41 | } 42 | } 43 | 44 | // Get the maximum number of draw buffers 45 | gl.getExtension('OES_texture_float'); 46 | gl.getExtension('OES_texture_float_linear'); 47 | gl.getExtension('OES_element_index_uint'); 48 | gl.getExtension('WEBGL_depth_texture'); 49 | export const WEBGL_draw_buffers = gl.getExtension('WEBGL_draw_buffers'); 50 | export const MAX_DRAW_BUFFERS_WEBGL = gl.getParameter(WEBGL_draw_buffers.MAX_DRAW_BUFFERS_WEBGL); 51 | 52 | export const gui = new DAT.GUI(); 53 | 54 | // initialize statistics widget 55 | const stats = new Stats(); 56 | stats.setMode(1); // 0: fps, 1: ms 57 | stats.domElement.style.position = 'absolute'; 58 | stats.domElement.style.left = '0px'; 59 | stats.domElement.style.top = '0px'; 60 | document.body.appendChild(stats.domElement); 61 | 62 | // Initialize camera 63 | export const camera = new PerspectiveCamera(75, canvas.clientWidth / canvas.clientHeight, 0.1, 1000); 64 | 65 | // Initialize camera controls 66 | export const cameraControls = new OrbitControls(camera, canvas); 67 | cameraControls.enableDamping = true; 68 | cameraControls.enableZoom = true; 69 | cameraControls.rotateSpeed = 0.3; 70 | cameraControls.zoomSpeed = 1.0; 71 | cameraControls.panSpeed = 2.0; 72 | 73 | function setSize(width, height) { 74 | canvas.width = width; 75 | canvas.height = height; 76 | camera.aspect = width / height; 77 | camera.updateProjectionMatrix(); 78 | } 79 | 80 | setSize(canvas.clientWidth, canvas.clientHeight); 81 | window.addEventListener('resize', () => setSize(canvas.clientWidth, canvas.clientHeight)); 82 | 83 | if (DEBUG) { 84 | const spector = new Spector(); 85 | spector.displayUI(); 86 | } 87 | 88 | // Creates a render loop that is wrapped with camera update and stats logging 89 | export function makeRenderLoop(render) { 90 | return function tick() { 91 | cameraControls.update(); 92 | stats.begin(); 93 | render(); 94 | stats.end(); 95 | if (!ABORTED) { 96 | requestAnimationFrame(tick) 97 | } 98 | } 99 | } 100 | 101 | // import the main application 102 | require('./main'); 103 | -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | import { makeRenderLoop, camera, cameraControls, gui, gl } from './init'; 2 | import ForwardRenderer from './renderers/forward'; 3 | import ForwardPlusRenderer from './renderers/forwardPlus'; 4 | import ClusteredRenderer from './renderers/clustered'; 5 | import Scene from './scene'; 6 | 7 | const FORWARD = 'Forward'; 8 | const FORWARD_PLUS = 'Forward+'; 9 | const CLUSTERED = 'Clustered'; 10 | const lambert = 'lambert'; 11 | const blinnphong = 'blinnphong'; 12 | const toon = 'toon'; 13 | 14 | var shadingtype = 0; 15 | 16 | const params = { 17 | renderer: FORWARD, 18 | shadingmode:lambert, 19 | _renderer: null, 20 | }; 21 | 22 | setRenderer(params.renderer); 23 | 24 | function setRenderer(renderer) { 25 | if(params.shadingmode == 'lambert') 26 | { 27 | shadingtype = 0; 28 | } 29 | else if(params.shadingmode == 'blinnphong') 30 | { 31 | shadingtype = 1; 32 | } 33 | else if(params.shadingmode == 'toon') 34 | { 35 | shadingtype = 2; 36 | } 37 | switch(renderer) { 38 | case FORWARD: 39 | params._renderer = new ForwardRenderer(); 40 | break; 41 | case FORWARD_PLUS: 42 | params._renderer = new ForwardPlusRenderer(15, 15, 15); 43 | break; 44 | case CLUSTERED: 45 | params._renderer = new ClusteredRenderer(15, 15, 15,shadingtype); 46 | break; 47 | } 48 | } 49 | 50 | gui.add(params, 'renderer', [FORWARD, FORWARD_PLUS, CLUSTERED]).onChange(setRenderer); 51 | 52 | gui.add(params, 'shadingmode', [blinnphong, lambert,toon]).onChange(function (x) {setRenderer(params.renderer);}); 53 | 54 | const scene = new Scene(); 55 | scene.loadGLTF('models/sponza/sponza.gltf'); 56 | 57 | camera.position.set(-10, 8, 0); 58 | cameraControls.target.set(0, 2, 0); 59 | gl.enable(gl.DEPTH_TEST); 60 | 61 | function render() { 62 | scene.update(); 63 | params._renderer.render(camera, scene); 64 | } 65 | 66 | makeRenderLoop(render)(); -------------------------------------------------------------------------------- /src/renderers/base.js: -------------------------------------------------------------------------------- 1 | import TextureBuffer from './textureBuffer'; 2 | import { NUM_LIGHTS } from '../scene'; 3 | import { mat4, vec4, vec3, vec2 } from 'gl-matrix'; 4 | 5 | export const MAX_LIGHTS_PER_CLUSTER = 700; 6 | 7 | function getNormalComponents(angle) { 8 | 9 | let bigHypot = Math.sqrt(1 + angle*angle); 10 | let normSide1 = 1 / bigHypot; 11 | let normSide2 = -angle*normSide1; 12 | return vec2.fromValues(normSide1, normSide2); 13 | } 14 | 15 | function findPlanePointDis(planePos, Pt, XY) { 16 | let interval = Math.sqrt(planePos*planePos+1); 17 | let lightp = vec3.fromValues(Pt[0],Pt[1],Pt[2]); 18 | let res = vec3.fromValues(0,0,0); 19 | if(XY == 1) { 20 | let planenor = vec3.fromValues(1.0/interval,0.0,-planePos/interval); 21 | res = vec3.dot(lightp,planenor); 22 | } 23 | if(XY==2) 24 | { 25 | let planenor = vec3.fromValues(0.0,1.0/interval,-planePos/interval); 26 | res = vec3.dot(lightp,planenor); 27 | } 28 | return res; 29 | } 30 | 31 | export default class BaseRenderer { 32 | constructor(xSlices, ySlices, zSlices) { 33 | // Create a texture to store cluster data. Each cluster stores the number of lights followed by the light indices 34 | this._clusterTexture = new TextureBuffer(xSlices * ySlices * zSlices, MAX_LIGHTS_PER_CLUSTER + 1); 35 | this._xSlices = xSlices; 36 | this._ySlices = ySlices; 37 | this._zSlices = zSlices; 38 | } 39 | 40 | updateClusters(camera, viewMatrix, scene) { 41 | // TODO: Update the cluster texture with the count and indices of the lights in each cluster 42 | // This will take some time. The math is nontrivial... 43 | 44 | for (let z = 0; z < this._zSlices; ++z) { 45 | for (let y = 0; y < this._ySlices; ++y) { 46 | for (let x = 0; x < this._xSlices; ++x) { 47 | let i = x + y * this._xSlices + z * this._xSlices * this._ySlices; 48 | // Reset the light count to 0 for every cluster 49 | this._clusterTexture.buffer[this._clusterTexture.bufferIndex(i, 0)] = 0; 50 | } 51 | } 52 | } 53 | 54 | const halfY = Math.tan((camera.fov*0.5) * (Math.PI/180.0)); 55 | const ylengthPerCluster = (halfY * 2.0 / this._ySlices); 56 | const xlengthPerCluster = (halfY * 2.0 / this._xSlices) * camera.aspect; 57 | const zlengthPerCluster = (camera.far - camera.near) / this._zSlices; 58 | const ystart = -halfY; 59 | const xstart = -halfY * camera.aspect; 60 | 61 | for(let i = 0; i < NUM_LIGHTS; ++i) { 62 | let lightRadius = scene.lights[i].radius; 63 | let lightPos = vec4.fromValues(scene.lights[i].position[0], scene.lights[i].position[1], scene.lights[i].position[2], 1.0); 64 | vec4.transformMat4(lightPos, lightPos, viewMatrix); 65 | lightPos[2] *= -1.0; 66 | 67 | 68 | let xminidx = this._xSlices; 69 | let xmaxidx = this._xSlices; 70 | let yminidx = this._ySlices; 71 | let ymaxidx = this._ySlices; 72 | let minposz = lightPos[2] - camera.near - lightRadius; 73 | let maxposz = lightPos[2] - camera.near + lightRadius; 74 | let zminidx = Math.floor(minposz / zlengthPerCluster); 75 | let zmaxidx = Math.floor(maxposz / zlengthPerCluster)+1; 76 | if(zminidx > this._zSlices-1 || zmaxidx < 0) { continue; } 77 | zminidx = Math.max(0, zminidx); 78 | zmaxidx = Math.min(this._zSlices, zmaxidx); 79 | 80 | for(let j = 0; j <= this._xSlices; ++j) { 81 | let norm2 = vec2.clone(getNormalComponents(xstart+xlengthPerCluster*j)); 82 | let norm3 = vec3.fromValues(norm2[0], 0, norm2[1]); 83 | if(vec3.dot(lightPos, norm3) < lightRadius) { 84 | xminidx = Math.max(0, j-1); 85 | break; 86 | } 87 | } 88 | 89 | 90 | for(let j = xminidx+1; j<=this.xSlices; ++j) { 91 | let norm2 = vec2.clone(getNormalComponents(xstart+xlengthPerCluster*j)); 92 | let norm3 = vec3.fromValues(norm2[0], 0, norm2[1]); 93 | if(vec3.dot(lightPos, norm3) < -lightRadius) { 94 | xmaxidx = Math.max(0, j-1); 95 | break; 96 | } 97 | } 98 | 99 | 100 | for(let j = 0; j <= this._ySlices; ++j) { 101 | let norm2 = vec2.clone(getNormalComponents(ystart+ylengthPerCluster*j)); 102 | let norm3 = vec3.fromValues(0, norm2[0], norm2[1]); 103 | if(vec3.dot(lightPos, norm3) < lightRadius) { 104 | yminidx = Math.max(0, j-1); 105 | break; 106 | } 107 | } 108 | 109 | 110 | for(let j = yminidx+1; j<=this.ySlices; ++j) { 111 | let norm2 = vec2.clone(getNormalComponents(ystart+ylengthPerCluster*j)); 112 | let norm3 = vec3.fromValues(0, norm2[0], norm2[1]); 113 | if(vec3.dot(lightPos, norm3) < -lightRadius) { 114 | ymaxidx = Math.max(0, j-1); 115 | break; 116 | } 117 | } 118 | 119 | 120 | 121 | for(let z = zminidx; z < zmaxidx; ++z) { 122 | for(let y = yminidx; y < ymaxidx; ++y) { 123 | for(let x = xminidx; x < xmaxidx; ++x) { 124 | let clusterIdx = x + y*this._xSlices + z*this._xSlices*this._ySlices; 125 | let lightCountIdx = this._clusterTexture.bufferIndex(clusterIdx, 0); 126 | let lightCount = 1 + this._clusterTexture.buffer[lightCountIdx]; 127 | 128 | if(lightCount <= MAX_LIGHTS_PER_CLUSTER) { 129 | this._clusterTexture.buffer[lightCountIdx] = lightCount; 130 | let texel = Math.floor(lightCount*0.25); 131 | let texelIdx = this._clusterTexture.bufferIndex(clusterIdx, texel); 132 | let componentIdx = lightCount - texel*4; 133 | this._clusterTexture.buffer[texelIdx+componentIdx] = i; 134 | } 135 | } 136 | } 137 | } 138 | 139 | 140 | 141 | }//end light loop 142 | 143 | this._clusterTexture.update(); 144 | } 145 | } -------------------------------------------------------------------------------- /src/renderers/clustered.js: -------------------------------------------------------------------------------- 1 | import { gl, WEBGL_draw_buffers, canvas } from '../init'; 2 | import { mat4, vec4 ,vec3, vec2} from 'gl-matrix'; 3 | import { loadShaderProgram, renderFullscreenQuad } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import toTextureVert from '../shaders/deferredToTexture.vert.glsl'; 6 | import toTextureFrag from '../shaders/deferredToTexture.frag.glsl'; 7 | import QuadVertSource from '../shaders/quad.vert.glsl'; 8 | import fsSource from '../shaders/deferred.frag.glsl.js'; 9 | import TextureBuffer from './textureBuffer'; 10 | import BaseRenderer from './base'; 11 | import {MAX_LIGHTS_PER_CLUSTER} from "./base"; 12 | 13 | export const NUM_GBUFFERS = 2; 14 | 15 | export default class ClusteredRenderer extends BaseRenderer { 16 | constructor(xSlices, ySlices, zSlices,_shadingtype) { 17 | super(xSlices, ySlices, zSlices); 18 | 19 | this.setupDrawBuffers(canvas.width, canvas.height); 20 | 21 | // Create a texture to store light data 22 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 23 | 24 | this._progCopy = loadShaderProgram(toTextureVert, toTextureFrag, { 25 | uniforms: ['u_viewProjectionMatrix','u_viewMatrix', 'u_colmap', 'u_normap'], 26 | attribs: ['a_position', 'a_normal', 'a_uv'], 27 | }); 28 | 29 | this._progShade = loadShaderProgram(QuadVertSource, fsSource({ 30 | numLights: NUM_LIGHTS, 31 | numGBuffers: NUM_GBUFFERS, 32 | xSlices: xSlices, ySlices: ySlices, zSlices: zSlices, 33 | maxLightsPerCluster: MAX_LIGHTS_PER_CLUSTER, 34 | 35 | }), { 36 | uniforms: ['u_gbuffers[0]', 'u_gbuffers[1]', 'u_gbuffers[2]', 'u_viewMatrix','u_invviewMatrix','u_shadingtype','u_clusterbuffer','u_lightbuffer','u_screenwidth', 'u_screenheight','u_near','u_far'], 37 | attribs: ['a_uv'], 38 | }); 39 | 40 | this.shadingtype = _shadingtype; 41 | this._projectionMatrix = mat4.create(); 42 | this._viewMatrix = mat4.create(); 43 | this._viewProjectionMatrix = mat4.create(); 44 | } 45 | 46 | setupDrawBuffers(width, height) { 47 | this._width = width; 48 | this._height = height; 49 | 50 | this._fbo = gl.createFramebuffer(); 51 | 52 | //Create, bind, and store a depth target texture for the FBO 53 | this._depthTex = gl.createTexture(); 54 | gl.bindTexture(gl.TEXTURE_2D, this._depthTex); 55 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 56 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 57 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 58 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 59 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); 60 | gl.bindTexture(gl.TEXTURE_2D, null); 61 | 62 | gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); 63 | gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, this._depthTex, 0); 64 | 65 | // Create, bind, and store "color" target textures for the FBO 66 | this._gbuffers = new Array(NUM_GBUFFERS); 67 | let attachments = new Array(NUM_GBUFFERS); 68 | for (let i = 0; i < NUM_GBUFFERS; i++) { 69 | attachments[i] = WEBGL_draw_buffers[`COLOR_ATTACHMENT${i}_WEBGL`]; 70 | this._gbuffers[i] = gl.createTexture(); 71 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 72 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 73 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 74 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 75 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 76 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); 77 | gl.bindTexture(gl.TEXTURE_2D, null); 78 | 79 | gl.framebufferTexture2D(gl.FRAMEBUFFER, attachments[i], gl.TEXTURE_2D, this._gbuffers[i], 0); 80 | } 81 | 82 | if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) { 83 | throw "Framebuffer incomplete"; 84 | } 85 | 86 | // Tell the WEBGL_draw_buffers extension which FBO attachments are 87 | // being used. (This extension allows for multiple render targets.) 88 | WEBGL_draw_buffers.drawBuffersWEBGL(attachments); 89 | 90 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 91 | } 92 | 93 | resize(width, height) { 94 | this._width = width; 95 | this._height = height; 96 | 97 | gl.bindTexture(gl.TEXTURE_2D, this._depthTex); 98 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); 99 | for (let i = 0; i < NUM_GBUFFERS; i++) { 100 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 101 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); 102 | } 103 | gl.bindTexture(gl.TEXTURE_2D, null); 104 | } 105 | 106 | render(camera, scene) { 107 | if (canvas.width != this._width || canvas.height != this._height) { 108 | this.resize(canvas.width, canvas.height); 109 | } 110 | 111 | // Update the camera matrices 112 | camera.updateMatrixWorld(); 113 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 114 | let invviewMatrix = mat4.create(); 115 | mat4.invert(invviewMatrix,this._viewMatrix) 116 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 117 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 118 | 119 | // Render to the whole screen 120 | gl.viewport(0, 0, canvas.width, canvas.height); 121 | 122 | // Bind the framebuffer 123 | gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); 124 | 125 | // Clear the frame 126 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 127 | 128 | // Use the shader program to copy to the draw buffers 129 | gl.useProgram(this._progCopy.glShaderProgram); 130 | 131 | // Upload the camera matrix 132 | gl.uniformMatrix4fv(this._progCopy.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 133 | gl.uniformMatrix4fv(this._progCopy.u_viewMatrix,false,this._viewMatrix); 134 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 135 | scene.draw(this._progCopy); 136 | 137 | // Update the buffer used to populate the texture packed with light data 138 | for (let i = 0; i < NUM_LIGHTS; ++i) { 139 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 140 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 141 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 142 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 143 | 144 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 145 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 146 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 147 | } 148 | // Update the light texture 149 | this._lightTexture.update(); 150 | 151 | // Update the clusters for the frame 152 | this.updateClusters(camera, this._viewMatrix, scene); 153 | 154 | // Bind the default null framebuffer which is the screen 155 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 156 | 157 | // Clear the frame 158 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 159 | 160 | // Use this shader program 161 | gl.useProgram(this._progShade.glShaderProgram); 162 | gl.uniformMatrix4fv(this._progShade.u_viewMatrix,false,this._viewMatrix); 163 | gl.uniformMatrix4fv(this._progShade.u_invviewMatrix,false,invviewMatrix); 164 | // TODO: Bind any other shader inputs 165 | gl.activeTexture(gl.TEXTURE3); 166 | gl.bindTexture(gl.TEXTURE_2D, this._clusterTexture.glTexture); 167 | gl.uniform1i(this._progShade.u_clusterbuffer, 3); 168 | 169 | gl.activeTexture(gl.TEXTURE4); 170 | gl.bindTexture(gl.TEXTURE_2D,this._lightTexture.glTexture); 171 | gl.uniform1i(this._progShade.u_lightbuffer,4); 172 | 173 | gl.uniform1f(this._progShade.u_screenwidth, canvas.width); 174 | gl.uniform1f(this._progShade.u_screenheight, canvas.height); 175 | 176 | gl.uniform1f(this._progShade.u_far,camera.far); 177 | gl.uniform1f(this._progShade.u_near,camera.near); 178 | 179 | gl.uniform1i(this._progShade.u_shadingtype,this.shadingtype); 180 | // Bind g-buffers 181 | const firstGBufferBinding = 0; // You may have to change this if you use other texture slots 182 | for (let i = 0; i < NUM_GBUFFERS; i++) { 183 | gl.activeTexture(gl[`TEXTURE${i + firstGBufferBinding}`]); 184 | gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); 185 | gl.uniform1i(this._progShade[`u_gbuffers[${i}]`], i + firstGBufferBinding); 186 | } 187 | 188 | renderFullscreenQuad(this._progShade); 189 | } 190 | }; 191 | -------------------------------------------------------------------------------- /src/renderers/forward.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | import { mat4, vec4 } from 'gl-matrix'; 3 | import { loadShaderProgram } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import vsSource from '../shaders/forward.vert.glsl'; 6 | import fsSource from '../shaders/forward.frag.glsl.js'; 7 | import TextureBuffer from './textureBuffer'; 8 | 9 | export default class ForwardRenderer { 10 | constructor(_shadingtype) { 11 | // Create a texture to store light data 12 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 13 | // Initialize a shader program. The fragment shader source is compiled based on the number of lights 14 | this._shaderProgram = loadShaderProgram(vsSource, fsSource({ 15 | numLights: NUM_LIGHTS, 16 | 17 | }), { 18 | uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer','u_shadingtype'], 19 | attribs: ['a_position', 'a_normal', 'a_uv'], 20 | }); 21 | this.shadingtype = _shadingtype; 22 | this._projectionMatrix = mat4.create(); 23 | this._viewMatrix = mat4.create(); 24 | this._viewProjectionMatrix = mat4.create(); 25 | } 26 | 27 | render(camera, scene) { 28 | // Update the camera matrices 29 | camera.updateMatrixWorld(); 30 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 31 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 32 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 33 | 34 | // Update the buffer used to populate the texture packed with light data 35 | for (let i = 0; i < NUM_LIGHTS; ++i) { 36 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 37 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 38 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 39 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 40 | 41 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 42 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 43 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 44 | } 45 | // Update the light texture 46 | this._lightTexture.update(); 47 | 48 | // Bind the default null framebuffer which is the screen 49 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 50 | 51 | // Render to the whole screen 52 | gl.viewport(0, 0, canvas.width, canvas.height); 53 | 54 | // Clear the frame 55 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 56 | 57 | // Use this shader program 58 | gl.useProgram(this._shaderProgram.glShaderProgram); 59 | 60 | // Upload the camera matrix 61 | gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 62 | 63 | // Set the light texture as a uniform input to the shader 64 | gl.activeTexture(gl.TEXTURE2); 65 | gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); 66 | gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); 67 | gl.uniform1i(this._shaderProgram.u_shadingtype,this.shadingtype); 68 | 69 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 70 | scene.draw(this._shaderProgram); 71 | } 72 | }; 73 | -------------------------------------------------------------------------------- /src/renderers/forwardPlus.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | import { mat4, vec4, vec3 } from 'gl-matrix'; 3 | import { loadShaderProgram } from '../utils'; 4 | import { NUM_LIGHTS } from '../scene'; 5 | import vsSource from '../shaders/forwardPlus.vert.glsl'; 6 | import fsSource from '../shaders/forwardPlus.frag.glsl.js'; 7 | import TextureBuffer from './textureBuffer'; 8 | import BaseRenderer from './base'; 9 | import { MAX_LIGHTS_PER_CLUSTER} from "./base"; 10 | 11 | export default class ForwardPlusRenderer extends BaseRenderer { 12 | constructor(xSlices, ySlices, zSlices) { 13 | super(xSlices, ySlices, zSlices); 14 | 15 | // Create a texture to store light data 16 | this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); 17 | 18 | this._shaderProgram = loadShaderProgram(vsSource, fsSource({ 19 | numLights: NUM_LIGHTS, 20 | xSlices: xSlices,ySlices: ySlices,zSlices: zSlices,maxLightsPerCluster: MAX_LIGHTS_PER_CLUSTER, 21 | }), { 22 | uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer', 'u_clusterbuffer','u_viewMatrix', 23 | 'u_near','u_far','u_screenwidth', 'u_screenheight'], 24 | attribs: ['a_position', 'a_normal', 'a_uv'], 25 | }); 26 | 27 | this._projectionMatrix = mat4.create(); 28 | this._viewMatrix = mat4.create(); 29 | this._viewProjectionMatrix = mat4.create(); 30 | } 31 | 32 | render(camera, scene) { 33 | // Update the camera matrices 34 | camera.updateMatrixWorld(); 35 | mat4.invert(this._viewMatrix, camera.matrixWorld.elements); 36 | mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); 37 | mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); 38 | 39 | // Update cluster texture which maps from cluster index to light list 40 | this.updateClusters(camera, this._viewMatrix, scene); 41 | 42 | // Update the buffer used to populate the texture packed with light data 43 | for (let i = 0; i < NUM_LIGHTS; ++i) { 44 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; 45 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; 46 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; 47 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; 48 | 49 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; 50 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; 51 | this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; 52 | } 53 | // Update the light texture 54 | this._lightTexture.update(); 55 | 56 | // Bind the default null framebuffer which is the screen 57 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 58 | 59 | // Render to the whole screen 60 | gl.viewport(0, 0, canvas.width, canvas.height); 61 | 62 | // Clear the frame 63 | gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); 64 | 65 | // Use this shader program 66 | gl.useProgram(this._shaderProgram.glShaderProgram); 67 | 68 | // Upload the camera matrix 69 | gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); 70 | gl.uniformMatrix4fv(this._shaderProgram.u_viewMatrix,false,this._viewMatrix); 71 | // Set the light texture as a uniform input to the shader 72 | gl.activeTexture(gl.TEXTURE2); 73 | gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); 74 | gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); 75 | 76 | // Set the cluster texture as a uniform input to the shader 77 | gl.activeTexture(gl.TEXTURE3); 78 | gl.bindTexture(gl.TEXTURE_2D, this._clusterTexture.glTexture); 79 | gl.uniform1i(this._shaderProgram.u_clusterbuffer, 3); 80 | 81 | // TODO: Bind any other shader inputs 82 | gl.uniform1f(this._shaderProgram.u_screenwidth, canvas.width); 83 | gl.uniform1f(this._shaderProgram.u_screenheight,canvas.height); 84 | gl.uniform1f(this._shaderProgram.u_near,camera.near); 85 | gl.uniform1f(this._shaderProgram.u_far,camera.far); 86 | // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs 87 | scene.draw(this._shaderProgram); 88 | } 89 | }; -------------------------------------------------------------------------------- /src/renderers/textureBuffer.js: -------------------------------------------------------------------------------- 1 | import { gl } from '../init'; 2 | 3 | export default class TextureBuffer { 4 | /** 5 | * This class represents a buffer in a shader. Unforunately we can't bind arbitrary buffers so we need to pack the data as a texture 6 | * @param {Number} elementCount The number of items in the buffer 7 | * @param {Number} elementSize The number of values in each item of the buffer 8 | */ 9 | constructor(elementCount, elementSize) { 10 | // Initialize the texture. We use gl.NEAREST for texture filtering because we don't want to blend between values in the buffer. We want the exact value 11 | this._glTexture = gl.createTexture(); 12 | gl.bindTexture(gl.TEXTURE_2D, this._glTexture); 13 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 14 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 15 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 16 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 17 | 18 | // The texture stores 4 values in each "pixel". Thus, the texture we create is elementCount x ceil(elementSize / 4) 19 | this._pixelsPerElement = Math.ceil(elementSize / 4); 20 | this._elementCount = elementCount; 21 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, elementCount, this._pixelsPerElement, 0, gl.RGBA, gl.FLOAT, null); 22 | gl.bindTexture(gl.TEXTURE_2D, null); 23 | 24 | // Create a buffer to use to upload to the texture 25 | this._buffer = new Float32Array(elementCount * 4 * this._pixelsPerElement); 26 | } 27 | 28 | get glTexture() { 29 | return this._glTexture; 30 | } 31 | 32 | get buffer() { 33 | return this._buffer; 34 | } 35 | 36 | /** 37 | * Computes the starting buffer index to a particular item. 38 | * @param {*} index The index of the item 39 | * @param {*} component The ith float of an element is located in the (i/4)th pixel 40 | */ 41 | bufferIndex(index, component) { 42 | return 4 * index + 4 * component * this._elementCount; 43 | } 44 | 45 | /** 46 | * Update the texture with the data in the buffer 47 | */ 48 | update() { 49 | gl.bindTexture(gl.TEXTURE_2D, this._glTexture); 50 | gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, this._elementCount, this._pixelsPerElement, gl.RGBA, gl.FLOAT, this._buffer); 51 | gl.bindTexture(gl.TEXTURE_2D, null); 52 | } 53 | }; -------------------------------------------------------------------------------- /src/scene.js: -------------------------------------------------------------------------------- 1 | const MinimalGLTFLoader = require('../lib/minimal-gltf-loader'); 2 | import { gl } from './init'; 3 | 4 | // TODO: Edit if you want to change the light initial positions 5 | export const LIGHT_MIN = [-14, 0, -6]; 6 | export const LIGHT_MAX = [14, 20, 6]; 7 | export const LIGHT_RADIUS = 4.0; 8 | export const LIGHT_DT = -0.07; 9 | 10 | // TODO: This controls the number of lights 11 | export const NUM_LIGHTS = 600; 12 | 13 | class Scene { 14 | constructor() { 15 | this.lights = []; 16 | this.models = []; 17 | 18 | for (let i = 0; i < NUM_LIGHTS; ++i) { 19 | this.lights.push({ 20 | position: new Float32Array([ 21 | Math.random() * (LIGHT_MAX[0] - LIGHT_MIN[0]) + LIGHT_MIN[0], 22 | Math.random() * (LIGHT_MAX[1] - LIGHT_MIN[1]) + LIGHT_MIN[1], 23 | Math.random() * (LIGHT_MAX[2] - LIGHT_MIN[2]) + LIGHT_MIN[2], 24 | ]), 25 | color: new Float32Array([ 26 | 0.1 + 0.9 * Math.random(), 27 | 0.1 + 0.9 * Math.random(), 28 | 0.1 + 0.9 * Math.random(), 29 | ]), 30 | radius: LIGHT_RADIUS, 31 | }); 32 | } 33 | } 34 | 35 | loadGLTF(url) { 36 | var glTFLoader = new MinimalGLTFLoader.glTFLoader(gl); 37 | glTFLoader.loadGLTF(url, glTF => { 38 | var curScene = glTF.scenes[glTF.defaultScene]; 39 | 40 | var webGLTextures = {}; 41 | 42 | // temp var 43 | var i,len; 44 | var primitiveOrderID; 45 | 46 | var mesh; 47 | var primitive; 48 | var vertexBuffer; 49 | var indicesBuffer; 50 | 51 | // textures setting 52 | var textureID = 0; 53 | var textureInfo; 54 | var samplerInfo; 55 | var target, format, internalFormat, type; // texture info 56 | var magFilter, minFilter, wrapS, wrapT; 57 | var image; 58 | var texture; 59 | 60 | // temp for sponza 61 | var colorTextureName = 'texture_color'; 62 | var normalTextureName = 'texture_normal'; 63 | 64 | for (var tid in glTF.json.textures) { 65 | textureInfo = glTF.json.textures[tid]; 66 | target = textureInfo.target || gl.TEXTURE_2D; 67 | format = textureInfo.format || gl.RGBA; 68 | internalFormat = textureInfo.format || gl.RGBA; 69 | type = textureInfo.type || gl.UNSIGNED_BYTE; 70 | 71 | image = glTF.images[textureInfo.source]; 72 | 73 | texture = gl.createTexture(); 74 | gl.activeTexture(gl.TEXTURE0 + textureID); 75 | gl.bindTexture(target, texture); 76 | 77 | switch(target) { 78 | case 3553: // gl.TEXTURE_2D 79 | gl.texImage2D(target, 0, internalFormat, format, type, image); 80 | break; 81 | } 82 | 83 | // !! Sampler 84 | // raw WebGL 1, no sampler object, set magfilter, wrapS, etc 85 | samplerInfo = glTF.json.samplers[textureInfo.sampler]; 86 | minFilter = samplerInfo.minFilter || gl.NEAREST_MIPMAP_LINEAR; 87 | magFilter = samplerInfo.magFilter || gl.LINEAR; 88 | wrapS = samplerInfo.wrapS || gl.REPEAT; 89 | wrapT = samplerInfo.wrapT || gl.REPEAT; 90 | gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); 91 | gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); 92 | gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); 93 | gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); 94 | if (minFilter == gl.NEAREST_MIPMAP_NEAREST || 95 | minFilter == gl.NEAREST_MIPMAP_LINEAR || 96 | minFilter == gl.LINEAR_MIPMAP_NEAREST || 97 | minFilter == gl.LINEAR_MIPMAP_LINEAR ) { 98 | gl.generateMipmap(target); 99 | } 100 | 101 | 102 | gl.bindTexture(target, null); 103 | 104 | webGLTextures[tid] = { 105 | texture: texture, 106 | target: target, 107 | id: textureID 108 | }; 109 | 110 | textureID++; 111 | } 112 | 113 | // vertex attributes 114 | for (var mid in curScene.meshes) { 115 | mesh = curScene.meshes[mid]; 116 | 117 | for (i = 0, len = mesh.primitives.length; i < len; ++i) { 118 | primitive = mesh.primitives[i]; 119 | 120 | vertexBuffer = gl.createBuffer(); 121 | indicesBuffer = gl.createBuffer(); 122 | 123 | // initialize buffer 124 | var vertices = primitive.vertexBuffer; 125 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); 126 | gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); 127 | gl.bindBuffer(gl.ARRAY_BUFFER, null); 128 | 129 | var indices = primitive.indices; 130 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indicesBuffer); 131 | gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); 132 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); 133 | 134 | var posInfo = primitive.attributes[primitive.technique.parameters['position'].semantic]; 135 | var norInfo = primitive.attributes[primitive.technique.parameters['normal'].semantic]; 136 | var uvInfo = primitive.attributes[primitive.technique.parameters['texcoord_0'].semantic]; 137 | 138 | this.models.push({ 139 | gltf: primitive, 140 | 141 | idx: indicesBuffer, 142 | 143 | attributes: vertexBuffer, 144 | posInfo: {size: posInfo.size, type: posInfo.type, stride: posInfo.stride, offset: posInfo.offset}, 145 | norInfo: {size: norInfo.size, type: norInfo.type, stride: norInfo.stride, offset: norInfo.offset}, 146 | uvInfo: {size: uvInfo.size, type: uvInfo.type, stride: uvInfo.stride, offset: uvInfo.offset}, 147 | 148 | // specific textures temp test 149 | colmap: webGLTextures[colorTextureName].texture, 150 | normap: webGLTextures[normalTextureName].texture 151 | }); 152 | } 153 | } 154 | 155 | }); 156 | } 157 | 158 | update() { 159 | for (let i = 0; i < NUM_LIGHTS; i++) { 160 | // OPTIONAL TODO: Edit if you want to change how lights move 161 | this.lights[i].position[1] += LIGHT_DT; 162 | // wrap lights from bottom to top 163 | this.lights[i].position[1] = (this.lights[i].position[1] + LIGHT_MAX[1] - LIGHT_MIN[1]) % LIGHT_MAX[1] + LIGHT_MIN[1]; 164 | } 165 | } 166 | 167 | draw(shaderProgram) { 168 | for (let i = 0; i < this.models.length; ++i) { 169 | const model = this.models[i]; 170 | if (model.colmap) { 171 | gl.activeTexture(gl.TEXTURE0); 172 | gl.bindTexture(gl.TEXTURE_2D, model.colmap); 173 | gl.uniform1i(shaderProgram.u_colmap, 0); 174 | } 175 | 176 | if (model.normap) { 177 | gl.activeTexture(gl.TEXTURE1); 178 | gl.bindTexture(gl.TEXTURE_2D, model.normap); 179 | gl.uniform1i(shaderProgram.u_normap, 1); 180 | } 181 | 182 | gl.bindBuffer(gl.ARRAY_BUFFER, model.attributes); 183 | 184 | gl.enableVertexAttribArray(shaderProgram.a_position); 185 | gl.vertexAttribPointer(shaderProgram.a_position, model.posInfo.size, model.posInfo.type, false, model.posInfo.stride, model.posInfo.offset); 186 | 187 | gl.enableVertexAttribArray(shaderProgram.a_normal); 188 | gl.vertexAttribPointer(shaderProgram.a_normal, model.norInfo.size, model.norInfo.type, false, model.norInfo.stride, model.norInfo.offset); 189 | 190 | gl.enableVertexAttribArray(shaderProgram.a_uv); 191 | gl.vertexAttribPointer(shaderProgram.a_uv, model.uvInfo.size, model.uvInfo.type, false, model.uvInfo.stride, model.uvInfo.offset); 192 | 193 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, model.idx); 194 | 195 | gl.drawElements(model.gltf.mode, model.gltf.indices.length, model.gltf.indicesComponentType, 0); 196 | } 197 | } 198 | 199 | } 200 | 201 | export default Scene; -------------------------------------------------------------------------------- /src/shaders/deferred.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | #version 100 4 | precision highp float; 5 | 6 | uniform sampler2D u_gbuffers[${params.numGBuffers}]; 7 | uniform sampler2D u_clusterbuffer; 8 | uniform sampler2D u_lightbuffer; 9 | uniform mat4 u_viewMatrix; 10 | uniform mat4 u_invviewMatrix; 11 | uniform float u_near; 12 | uniform float u_far; 13 | uniform float u_screenwidth; 14 | uniform float u_screenheight; 15 | uniform int u_shadingtype; 16 | 17 | varying vec2 v_uv; 18 | 19 | struct Light { 20 | vec3 position; 21 | float radius; 22 | vec3 color; 23 | }; 24 | 25 | float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { 26 | float u = float(index + 1) / float(textureWidth + 1); 27 | int pixel = component / 4; 28 | float v = float(pixel + 1) / float(textureHeight + 1); 29 | vec4 texel = texture2D(texture, vec2(u, v)); 30 | int texelComponent = component - pixel * 4; 31 | if (texelComponent == 0) { 32 | return texel[0]; 33 | } else if (texelComponent == 1) { 34 | return texel[1]; 35 | } else if (texelComponent == 2) { 36 | return texel[2]; 37 | } else if (texelComponent == 3) { 38 | return texel[3]; 39 | } 40 | } 41 | 42 | Light UnpackLight(int index) { 43 | Light light; 44 | float u = float(index + 1) / float(${params.numLights + 1}); 45 | vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.3)); 46 | vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.6)); 47 | light.position = v1.xyz; 48 | // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer 49 | // Note that this is just an example implementation to extract one float. 50 | // There are more efficient ways if you need adjacent values 51 | light.radius = v1.w;//ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); 52 | light.color = v2.rgb; 53 | return light; 54 | } 55 | 56 | // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius 57 | float cubicGaussian(float h) { 58 | if (h < 1.0) { 59 | return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); 60 | } else if (h < 2.0) { 61 | return 0.25 * pow(2.0 - h, 3.0); 62 | } else { 63 | return 0.0; 64 | } 65 | } 66 | 67 | void main() { 68 | // TODO: extract data from g buffers and do lighting 69 | vec4 gbuffer0 = texture2D(u_gbuffers[0], v_uv); 70 | vec4 gbuffer1 = texture2D(u_gbuffers[1], v_uv); 71 | //vec4 gbuffer2 = texture2D(u_gbuffers[2], v_uv); 72 | //vec3 normal = gbuffer2.xyz; 73 | vec3 normal; 74 | vec3 v_position = gbuffer1.xyz; 75 | 76 | vec2 snor = vec2(gbuffer0.w,gbuffer1.w); 77 | float ival = sqrt(1.0 - gbuffer0.w *gbuffer0.w-gbuffer1.w*gbuffer1.w); 78 | vec4 normdecompressed = u_invviewMatrix*vec4(snor,ival,0.0); 79 | normal = normalize(normdecompressed.xyz); 80 | 81 | 82 | vec3 albedo = gbuffer0.rgb; 83 | int clusterXidx = int( gl_FragCoord.x / (float(u_screenwidth) / float(${params.xSlices})) ); 84 | int clusterYidx = int( gl_FragCoord.y / (float(u_screenheight) / float(${params.ySlices})) ); 85 | 86 | 87 | vec4 viewPosRaw = u_viewMatrix * vec4(v_position,1.0); 88 | vec4 viewpos = u_invviewMatrix*vec4(0.0,0.0,0.0,1.0); 89 | 90 | 91 | int clusterZidx = int( (-viewPosRaw.z-u_near) / (float(u_far-u_near) / float(${params.zSlices})) ); 92 | int clusterIdx = clusterXidx + clusterYidx*${params.xSlices} + clusterZidx*${params.xSlices}*${params.ySlices}; 93 | int clusterCount = ${params.xSlices}*${params.ySlices}*${params.zSlices}; 94 | 95 | float U = float(clusterIdx+1) / float(clusterCount+1); 96 | int clusterLightCount = int(texture2D(u_clusterbuffer, vec2(U,0)).r); 97 | int texelsPerCol = int(float(${params.maxLightsPerCluster}+1)/4.0) + 1; 98 | 99 | vec3 fragColor = vec3(0.0); 100 | for (int i = 0; i < ${params.numLights}; ++i) { 101 | if(i >= clusterLightCount) { break; } 102 | int texelIdx = int(float(i+1) * 0.25); 103 | float V = float(texelIdx+1) / float(texelsPerCol+1); 104 | vec4 texel = texture2D(u_clusterbuffer, vec2(U,V)); 105 | int lightIdx; 106 | int texelComponent = (i+1) - (texelIdx * 4); 107 | if (texelComponent == 0) { 108 | lightIdx = int(texel[0]); 109 | } else if (texelComponent == 1) { 110 | lightIdx = int(texel[1]); 111 | } else if (texelComponent == 2) { 112 | lightIdx = int(texel[2]); 113 | } else if (texelComponent == 3) { 114 | lightIdx = int(texel[3]); 115 | } 116 | Light light = UnpackLight(lightIdx); 117 | float lightDistance = distance(light.position, v_position); 118 | vec3 L = (light.position - v_position) / lightDistance; 119 | float lightIntensity = 0.6*cubicGaussian(2.0 * lightDistance / light.radius); 120 | 121 | //blinn-phon shading 122 | vec3 viewdir = normalize(vec3(viewpos) - v_position); 123 | vec3 lightdir = normalize(L); 124 | vec3 halfv = normalize(lightdir+viewdir); 125 | float theta = max(0.0,dot(halfv,normal)); 126 | float specterm; 127 | if(u_shadingtype == 1) 128 | { 129 | specterm = pow(theta,1000.0); 130 | } 131 | else 132 | { 133 | specterm = 0.0; 134 | } 135 | 136 | //lambert shading 137 | float lambertTerm = max(dot(L, normal), 0.0); 138 | 139 | //toon shading: 140 | float toonnum = 3.3; 141 | float toonnumspec = 0.3; 142 | float toonmag = 0.7; 143 | float toonlamb = floor(lambertTerm*toonnum)/toonnum; 144 | float toonspec = floor(specterm*toonnumspec)/toonnumspec; 145 | if(u_shadingtype==2) 146 | { 147 | lambertTerm = lambertTerm*(1.0-toonmag)+toonlamb*toonmag; 148 | specterm = specterm*(1.0-toonmag)+toonspec*toonmag; 149 | } 150 | 151 | fragColor += albedo * (lambertTerm + 3.0*specterm)* light.color * vec3(lightIntensity); 152 | }//lightloop 153 | float depthval = -viewPosRaw.z; 154 | const vec3 ambientLight = vec3(0.025); 155 | fragColor += albedo * ambientLight; 156 | //fragColor = 0.04*vec3(depthval); 157 | gl_FragColor = vec4(fragColor, 1.0); 158 | } 159 | `; 160 | } -------------------------------------------------------------------------------- /src/shaders/deferredToTexture.frag.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | #extension GL_EXT_draw_buffers: enable 3 | precision highp float; 4 | 5 | uniform sampler2D u_colmap; 6 | uniform sampler2D u_normap; 7 | uniform mat4 u_viewMatrix; 8 | 9 | varying vec3 v_position; 10 | varying vec3 v_normal; 11 | varying vec2 v_uv; 12 | 13 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 14 | normap = normap * 2.0 - 1.0; 15 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 16 | vec3 surftan = normalize(cross(geomnor, up)); 17 | vec3 surfbinor = cross(geomnor, surftan); 18 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 19 | } 20 | 21 | void main() { 22 | vec3 norm = applyNormalMap(v_normal, vec3(texture2D(u_normap, v_uv))); 23 | vec3 col = vec3(texture2D(u_colmap, v_uv)); 24 | 25 | vec3 compressednorm = normalize(vec3(u_viewMatrix * vec4(norm,0.0))); 26 | gl_FragData[0] = vec4(col, compressednorm.x); 27 | gl_FragData[1] = vec4(v_position, compressednorm.y); 28 | //gl_FragData[2] = vec4(norm, 0.0); 29 | // TODO: populate your g buffer 30 | // gl_FragData[0] = ?? 31 | // gl_FragData[1] = ?? 32 | // gl_FragData[2] = ?? 33 | // gl_FragData[3] = ?? 34 | } -------------------------------------------------------------------------------- /src/shaders/deferredToTexture.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/forward.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | #version 100 4 | precision highp float; 5 | 6 | uniform sampler2D u_colmap; 7 | uniform sampler2D u_normap; 8 | uniform sampler2D u_lightbuffer; 9 | uniform int u_shadingtype; 10 | 11 | varying vec3 v_position; 12 | varying vec3 v_normal; 13 | varying vec2 v_uv; 14 | 15 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 16 | normap = normap * 2.0 - 1.0; 17 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 18 | vec3 surftan = normalize(cross(geomnor, up)); 19 | vec3 surfbinor = cross(geomnor, surftan); 20 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 21 | } 22 | 23 | struct Light { 24 | vec3 position; 25 | float radius; 26 | vec3 color; 27 | }; 28 | 29 | float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { 30 | float u = float(index + 1) / float(textureWidth + 1); 31 | int pixel = component / 4; 32 | float v = float(pixel + 1) / float(textureHeight + 1); 33 | vec4 texel = texture2D(texture, vec2(u, v)); 34 | int pixelComponent = component - pixel * 4; 35 | if (pixelComponent == 0) { 36 | return texel[0]; 37 | } else if (pixelComponent == 1) { 38 | return texel[1]; 39 | } else if (pixelComponent == 2) { 40 | return texel[2]; 41 | } else if (pixelComponent == 3) { 42 | return texel[3]; 43 | } 44 | } 45 | 46 | Light UnpackLight(int index) { 47 | Light light; 48 | float u = float(index + 1) / float(${params.numLights + 1}); 49 | vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.0)); 50 | vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.5)); 51 | light.position = v1.xyz; 52 | 53 | // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer 54 | // Note that this is just an example implementation to extract one float. 55 | // There are more efficient ways if you need adjacent values 56 | light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); 57 | 58 | light.color = v2.rgb; 59 | return light; 60 | } 61 | 62 | // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius 63 | float cubicGaussian(float h) { 64 | if (h < 1.0) { 65 | return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); 66 | } else if (h < 2.0) { 67 | return 0.25 * pow(2.0 - h, 3.0); 68 | } else { 69 | return 0.0; 70 | } 71 | } 72 | 73 | void main() { 74 | vec3 albedo = texture2D(u_colmap, v_uv).rgb; 75 | vec3 normap = texture2D(u_normap, v_uv).xyz; 76 | vec3 normal = applyNormalMap(v_normal, normap); 77 | 78 | vec3 fragColor = vec3(0.0); 79 | 80 | for (int i = 0; i < ${params.numLights}; ++i) { 81 | Light light = UnpackLight(i); 82 | float lightDistance = distance(light.position, v_position); 83 | vec3 L = (light.position - v_position) / lightDistance; 84 | 85 | float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); 86 | float lambertTerm = max(dot(L, normal), 0.0); 87 | 88 | fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); 89 | } 90 | 91 | const vec3 ambientLight = vec3(0.025); 92 | fragColor += albedo * ambientLight; 93 | 94 | gl_FragColor = vec4(fragColor, 1.0); 95 | } 96 | `; 97 | } 98 | -------------------------------------------------------------------------------- /src/shaders/forward.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/forwardPlus.frag.glsl.js: -------------------------------------------------------------------------------- 1 | export default function(params) { 2 | return ` 3 | // TODO: This is pretty much just a clone of forward.frag.glsl.js 4 | 5 | #version 100 6 | precision highp float; 7 | 8 | uniform sampler2D u_colmap; 9 | uniform sampler2D u_normap; 10 | uniform sampler2D u_lightbuffer; 11 | 12 | // TODO: Read this buffer to determine the lights influencing a cluster 13 | uniform sampler2D u_clusterbuffer; 14 | 15 | uniform mat4 u_viewMatrix; 16 | uniform float u_screenwidth; 17 | uniform float u_screenheight; 18 | uniform float u_near; 19 | uniform float u_far; 20 | 21 | varying vec3 v_position; 22 | varying vec3 v_normal; 23 | varying vec2 v_uv; 24 | 25 | vec3 applyNormalMap(vec3 geomnor, vec3 normap) { 26 | normap = normap * 2.0 - 1.0; 27 | vec3 up = normalize(vec3(0.001, 1, 0.001)); 28 | vec3 surftan = normalize(cross(geomnor, up)); 29 | vec3 surfbinor = cross(geomnor, surftan); 30 | return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; 31 | } 32 | 33 | struct Light { 34 | vec3 position; 35 | float radius; 36 | vec3 color; 37 | }; 38 | 39 | float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { 40 | float u = float(index + 1) / float(textureWidth + 1); 41 | int pixel = component / 4; 42 | float v = float(pixel + 1) / float(textureHeight + 1); 43 | vec4 texel = texture2D(texture, vec2(u, v)); 44 | int pixelComponent = component - pixel * 4; 45 | if (pixelComponent == 0) { 46 | return texel[0]; 47 | } else if (pixelComponent == 1) { 48 | return texel[1]; 49 | } else if (pixelComponent == 2) { 50 | return texel[2]; 51 | } else if (pixelComponent == 3) { 52 | return texel[3]; 53 | } 54 | } 55 | 56 | Light UnpackLight(int index) { 57 | Light light; 58 | float u = float(index + 1) / float(${params.numLights + 1}); 59 | vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.3)); 60 | vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.6)); 61 | light.position = v1.xyz; 62 | 63 | // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer 64 | // Note that this is just an example implementation to extract one float. 65 | // There are more efficient ways if you need adjacent values 66 | light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); 67 | 68 | light.color = v2.rgb; 69 | return light; 70 | } 71 | 72 | // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius 73 | float cubicGaussian(float h) { 74 | if (h < 1.0) { 75 | return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); 76 | } else if (h < 2.0) { 77 | return 0.25 * pow(2.0 - h, 3.0); 78 | } else { 79 | return 0.0; 80 | } 81 | } 82 | 83 | void main() { 84 | vec3 albedo = texture2D(u_colmap, v_uv).rgb; 85 | vec3 normap = texture2D(u_normap, v_uv).xyz; 86 | vec3 normal = applyNormalMap(v_normal, normap); 87 | 88 | int clusterXidx = int( gl_FragCoord.x / (float(u_screenwidth) / float(${params.xSlices})) ); 89 | int clusterYidx = int( gl_FragCoord.y / (float(u_screenheight) / float(${params.ySlices})) ); 90 | vec4 fragCamPos = u_viewMatrix * vec4(v_position,1.0); 91 | int clusterZidx = int( (-fragCamPos.z-u_near) / (float(u_far-u_near) / float(${params.zSlices})) ); 92 | 93 | int clusterIdx = clusterXidx + clusterYidx*${params.xSlices} + clusterZidx*${params.xSlices}*${params.ySlices}; 94 | int clusterCount = ${params.xSlices}*${params.ySlices}*${params.zSlices}; 95 | float U = float(clusterIdx+1) / float(clusterCount+1); 96 | int clusterLightCount = int(texture2D(u_clusterbuffer, vec2(U,0)).r); 97 | 98 | int texelsPerCol = int(float(${params.maxLightsPerCluster}+1) * 0.25) + 1; 99 | vec3 fragColor = vec3(0.0); 100 | 101 | 102 | 103 | for (int i = 0; i < ${params.numLights}; ++i) { 104 | if(i >= clusterLightCount) { break; } 105 | int texelIdx = int(float(i+1) * 0.25); 106 | float V = float(texelIdx+1) / float(texelsPerCol+1); 107 | vec4 texel = texture2D(u_clusterbuffer, vec2(U,V)); 108 | 109 | int lightIdx; 110 | int texelComponent = (i+1) - (texelIdx * 4); 111 | 112 | if (texelComponent == 0) { 113 | lightIdx = int(texel[0]); 114 | } else if (texelComponent == 1) { 115 | lightIdx = int(texel[1]); 116 | } else if (texelComponent == 2) { 117 | lightIdx = int(texel[2]); 118 | } else if (texelComponent == 3) { 119 | lightIdx = int(texel[3]); 120 | } 121 | Light light = UnpackLight(lightIdx); 122 | 123 | float lightDistance = distance(light.position, v_position); 124 | vec3 L = (light.position - v_position) / lightDistance; 125 | 126 | float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); 127 | float lambertTerm = max(dot(L, normal), 0.0); 128 | 129 | fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); 130 | } 131 | 132 | const vec3 ambientLight = vec3(0.025); 133 | fragColor += albedo * ambientLight; 134 | 135 | gl_FragColor = vec4(fragColor, 1.0); 136 | } 137 | `; 138 | } 139 | -------------------------------------------------------------------------------- /src/shaders/forwardPlus.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | uniform mat4 u_viewProjectionMatrix; 5 | 6 | attribute vec3 a_position; 7 | attribute vec3 a_normal; 8 | attribute vec2 a_uv; 9 | 10 | varying vec3 v_position; 11 | varying vec3 v_normal; 12 | varying vec2 v_uv; 13 | 14 | void main() { 15 | gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); 16 | v_position = a_position; 17 | v_normal = a_normal; 18 | v_uv = a_uv; 19 | } -------------------------------------------------------------------------------- /src/shaders/quad.vert.glsl: -------------------------------------------------------------------------------- 1 | #version 100 2 | precision highp float; 3 | 4 | attribute vec3 a_position; 5 | 6 | varying vec2 v_uv; 7 | 8 | void main() { 9 | gl_Position = vec4(a_position, 1.0); 10 | v_uv = a_position.xy * 0.5 + 0.5; 11 | } -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | import { gl, canvas, abort } from './init'; 2 | import QuadVertSource from './shaders/quad.vert.glsl'; 3 | 4 | function downloadURI(uri, name) { 5 | var link = document.createElement('a'); 6 | link.download = name; 7 | link.href = uri; 8 | document.body.appendChild(link); 9 | link.click(); 10 | document.body.removeChild(link); 11 | }; 12 | 13 | export function saveCanvas() { 14 | downloadURI(canvas.toDataURL('image/png'), 'webgl-canvas-' + Date.now() + '.png'); 15 | } 16 | 17 | function compileShader(shaderSource, shaderType) { 18 | var shader = gl.createShader(shaderType); 19 | gl.shaderSource(shader, shaderSource); 20 | gl.compileShader(shader); 21 | if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { 22 | console.error(shaderSource); 23 | abort('shader compiler error:\n' + gl.getShaderInfoLog(shader)); 24 | } 25 | 26 | return shader; 27 | }; 28 | 29 | function linkShader(vs, fs) { 30 | var prog = gl.createProgram(); 31 | gl.attachShader(prog, vs); 32 | gl.attachShader(prog, fs); 33 | gl.linkProgram(prog); 34 | if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) { 35 | abort('shader linker error:\n' + gl.getProgramInfoLog(prog)); 36 | } 37 | return prog; 38 | }; 39 | 40 | function addShaderLocations(result, shaderLocations) { 41 | if (shaderLocations && shaderLocations.uniforms && shaderLocations.uniforms.length) { 42 | for (let i = 0; i < shaderLocations.uniforms.length; ++i) { 43 | result = Object.assign(result, { 44 | [shaderLocations.uniforms[i]]: gl.getUniformLocation(result.glShaderProgram, shaderLocations.uniforms[i]), 45 | }); 46 | } 47 | } 48 | if (shaderLocations && shaderLocations.attribs && shaderLocations.attribs.length) { 49 | for (let i = 0; i < shaderLocations.attribs.length; ++i) { 50 | result = Object.assign(result, { 51 | [shaderLocations.attribs[i]]: gl.getAttribLocation(result.glShaderProgram, shaderLocations.attribs[i]), 52 | }); 53 | } 54 | } 55 | return result; 56 | } 57 | 58 | export function loadShaderProgram(vsSource, fsSource, shaderLocations) { 59 | const vs = compileShader(vsSource, gl.VERTEX_SHADER); 60 | const fs = compileShader(fsSource, gl.FRAGMENT_SHADER); 61 | return addShaderLocations({ 62 | glShaderProgram: linkShader(vs, fs), 63 | }, shaderLocations); 64 | } 65 | 66 | const quadPositions = new Float32Array([ 67 | -1.0, -1.0, 0.0, 68 | 1.0, -1.0, 0.0, 69 | -1.0, 1.0, 0.0, 70 | 1.0, 1.0, 0.0 71 | ]); 72 | 73 | const quadBuffer = gl.createBuffer(); 74 | gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); 75 | gl.bufferData(gl.ARRAY_BUFFER, quadPositions, gl.STATIC_DRAW); 76 | 77 | export function renderFullscreenQuad(program) { 78 | // Bind the program to use to draw the quad 79 | gl.useProgram(program.glShaderProgram); 80 | 81 | // Bind the VBO as the gl.ARRAY_BUFFER 82 | gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); 83 | 84 | // Enable the bound buffer as the vertex attrib array for 85 | // program.a_position, using gl.enableVertexAttribArray 86 | gl.enableVertexAttribArray(program.a_position); 87 | 88 | // Use gl.vertexAttribPointer to tell WebGL the type/layout for 89 | // program.a_position's access pattern. 90 | gl.vertexAttribPointer(program.a_position, 3, gl.FLOAT, gl.FALSE, 0, 0); 91 | 92 | // Use gl.drawArrays to draw the quad 93 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 94 | 95 | // Disable the enabled vertex attrib array 96 | gl.disableVertexAttribArray(program.a_position); 97 | 98 | // Unbind the array buffer. 99 | gl.bindBuffer(gl.ARRAY_BUFFER, null); 100 | } -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const webpack = require('webpack'); 3 | const MinifyPlugin = require('babel-minify-webpack-plugin'); 4 | 5 | module.exports = function(env) { 6 | const isProduction = env && env.production === true; 7 | 8 | return { 9 | entry: path.join(__dirname, 'src/init'), 10 | output: { 11 | path: path.join(__dirname, 'build'), 12 | filename: 'bundle.js', 13 | }, 14 | module: { 15 | loaders: [ 16 | { 17 | test: /\.js$/, 18 | exclude: /(node_modules|bower_components)/, 19 | loader: 'babel-loader', 20 | query: { 21 | presets: [['env', { 22 | targets: { 23 | browsers: ['> 1%', 'last 2 major versions'], 24 | }, 25 | loose: true, 26 | modules: false, 27 | }]], 28 | }, 29 | }, 30 | { 31 | test: /\.glsl$/, 32 | loader: 'webpack-glsl-loader' 33 | }, 34 | ], 35 | }, 36 | plugins: [ 37 | isProduction ? new MinifyPlugin({ 38 | keepFnName: true, 39 | keepClassName: true, 40 | }) : undefined, 41 | new webpack.DefinePlugin({ 42 | 'process.env': { 43 | 'NODE_ENV': (isProduction ? JSON.stringify('production'): JSON.stringify('development')), 44 | } 45 | }), 46 | ].filter(p => p), 47 | devtool: 'cheap-source-map', 48 | devServer: { 49 | port: 5660, 50 | publicPath: '/build/' 51 | }, 52 | }; 53 | }; 54 | --------------------------------------------------------------------------------