├── Babylon_Path_Tracing.html ├── Debugging_GLTF_Loading.html ├── GLTF_Model_Path_Tracing.html ├── HDRI_Environment_Path_Tracing.html ├── LICENSE ├── Physical_Sky_Model.html ├── README.md ├── Transformed_Quadric_Geometry.html ├── js ├── BVH_Fast_Builder.js ├── BVH_SAH_Quality_Builder.js ├── BabylonPathTracing_FragmentShader.js ├── Babylon_Path_Tracing.js ├── Debugging_GLTF_Loading.js ├── GLTFModelPathTracing_FragmentShader.js ├── GLTF_Model_Path_Tracing.js ├── HDRIEnvironmentPathTracing_FragmentShader.js ├── HDRI_Environment_Path_Tracing.js ├── PathTracingCommon.js ├── PhysicalSkyModel_FragmentShader.js ├── Physical_Sky_Model.js ├── TransformedQuadricGeometry_FragmentShader.js ├── Transformed_Quadric_Geometry.js ├── babylon.glTFFileLoader.min.js ├── babylon.js ├── dat.gui.min.js └── stats.min.js ├── models ├── DamagedHelmet.bin ├── DamagedHelmet.gltf ├── Duck.bin ├── Duck.gltf ├── StanfordBunny.glb ├── StanfordDragon.glb ├── UtahTeapot.glb ├── materials │ ├── DamagedHelmet │ │ ├── Default_AO.jpg │ │ ├── Default_albedo.jpg │ │ ├── Default_emissive.jpg │ │ ├── Default_metalRoughness.jpg │ │ └── Default_normal.jpg │ └── Duck │ │ └── DuckCM.png ├── testBookCase.gltf └── twoParts-opaque.gltf └── textures ├── BlueNoise_RGBA256.png ├── cloud_layers_2k.hdr ├── delta_2_2k.hdr ├── kiara_5_noon_2k.hdr ├── noon_grass_2k.hdr └── symmetrical_garden_2k.hdr /Babylon_Path_Tracing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Babylon PathTracing Renderer 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
37 | Babylon.js PathTracing Renderer - Cornell Box 38 |
39 | 40 |
41 |
42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /Debugging_GLTF_Loading.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Babylon PathTracing Renderer - Debugging glTF Loading 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
38 | Babylon.js PathTracing Renderer - Debugging glTF Loading 39 |
40 | 41 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /GLTF_Model_Path_Tracing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Babylon PathTracing Renderer - glTF Models 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
38 | Babylon.js PathTracing Renderer - glTF Models 39 |
40 | 41 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /HDRI_Environment_Path_Tracing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Babylon PathTracing Renderer - HDRI Environment 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
38 | Babylon PathTracing Renderer - HDRI Environment 39 |
40 | 41 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Creative Commons Legal Code 2 | 3 | CC0 1.0 Universal 4 | 5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE 6 | LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN 7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS 8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES 9 | REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS 10 | PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM 11 | THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED 12 | HEREUNDER. 13 | 14 | Statement of Purpose 15 | 16 | The laws of most jurisdictions throughout the world automatically confer 17 | exclusive Copyright and Related Rights (defined below) upon the creator 18 | and subsequent owner(s) (each and all, an "owner") of an original work of 19 | authorship and/or a database (each, a "Work"). 20 | 21 | Certain owners wish to permanently relinquish those rights to a Work for 22 | the purpose of contributing to a commons of creative, cultural and 23 | scientific works ("Commons") that the public can reliably and without fear 24 | of later claims of infringement build upon, modify, incorporate in other 25 | works, reuse and redistribute as freely as possible in any form whatsoever 26 | and for any purposes, including without limitation commercial purposes. 27 | These owners may contribute to the Commons to promote the ideal of a free 28 | culture and the further production of creative, cultural and scientific 29 | works, or to gain reputation or greater distribution for their Work in 30 | part through the use and efforts of others. 31 | 32 | For these and/or other purposes and motivations, and without any 33 | expectation of additional consideration or compensation, the person 34 | associating CC0 with a Work (the "Affirmer"), to the extent that he or she 35 | is an owner of Copyright and Related Rights in the Work, voluntarily 36 | elects to apply CC0 to the Work and publicly distribute the Work under its 37 | terms, with knowledge of his or her Copyright and Related Rights in the 38 | Work and the meaning and intended legal effect of CC0 on those rights. 39 | 40 | 1. Copyright and Related Rights. A Work made available under CC0 may be 41 | protected by copyright and related or neighboring rights ("Copyright and 42 | Related Rights"). Copyright and Related Rights include, but are not 43 | limited to, the following: 44 | 45 | i. the right to reproduce, adapt, distribute, perform, display, 46 | communicate, and translate a Work; 47 | ii. moral rights retained by the original author(s) and/or performer(s); 48 | iii. publicity and privacy rights pertaining to a person's image or 49 | likeness depicted in a Work; 50 | iv. rights protecting against unfair competition in regards to a Work, 51 | subject to the limitations in paragraph 4(a), below; 52 | v. rights protecting the extraction, dissemination, use and reuse of data 53 | in a Work; 54 | vi. database rights (such as those arising under Directive 96/9/EC of the 55 | European Parliament and of the Council of 11 March 1996 on the legal 56 | protection of databases, and under any national implementation 57 | thereof, including any amended or successor version of such 58 | directive); and 59 | vii. other similar, equivalent or corresponding rights throughout the 60 | world based on applicable law or treaty, and any national 61 | implementations thereof. 62 | 63 | 2. Waiver. To the greatest extent permitted by, but not in contravention 64 | of, applicable law, Affirmer hereby overtly, fully, permanently, 65 | irrevocably and unconditionally waives, abandons, and surrenders all of 66 | Affirmer's Copyright and Related Rights and associated claims and causes 67 | of action, whether now known or unknown (including existing as well as 68 | future claims and causes of action), in the Work (i) in all territories 69 | worldwide, (ii) for the maximum duration provided by applicable law or 70 | treaty (including future time extensions), (iii) in any current or future 71 | medium and for any number of copies, and (iv) for any purpose whatsoever, 72 | including without limitation commercial, advertising or promotional 73 | purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each 74 | member of the public at large and to the detriment of Affirmer's heirs and 75 | successors, fully intending that such Waiver shall not be subject to 76 | revocation, rescission, cancellation, termination, or any other legal or 77 | equitable action to disrupt the quiet enjoyment of the Work by the public 78 | as contemplated by Affirmer's express Statement of Purpose. 79 | 80 | 3. Public License Fallback. Should any part of the Waiver for any reason 81 | be judged legally invalid or ineffective under applicable law, then the 82 | Waiver shall be preserved to the maximum extent permitted taking into 83 | account Affirmer's express Statement of Purpose. In addition, to the 84 | extent the Waiver is so judged Affirmer hereby grants to each affected 85 | person a royalty-free, non transferable, non sublicensable, non exclusive, 86 | irrevocable and unconditional license to exercise Affirmer's Copyright and 87 | Related Rights in the Work (i) in all territories worldwide, (ii) for the 88 | maximum duration provided by applicable law or treaty (including future 89 | time extensions), (iii) in any current or future medium and for any number 90 | of copies, and (iv) for any purpose whatsoever, including without 91 | limitation commercial, advertising or promotional purposes (the 92 | "License"). The License shall be deemed effective as of the date CC0 was 93 | applied by Affirmer to the Work. Should any part of the License for any 94 | reason be judged legally invalid or ineffective under applicable law, such 95 | partial invalidity or ineffectiveness shall not invalidate the remainder 96 | of the License, and in such case Affirmer hereby affirms that he or she 97 | will not (i) exercise any of his or her remaining Copyright and Related 98 | Rights in the Work or (ii) assert any associated claims and causes of 99 | action with respect to the Work, in either case contrary to Affirmer's 100 | express Statement of Purpose. 101 | 102 | 4. Limitations and Disclaimers. 103 | 104 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 105 | surrendered, licensed or otherwise affected by this document. 106 | b. Affirmer offers the Work as-is and makes no representations or 107 | warranties of any kind concerning the Work, express, implied, 108 | statutory or otherwise, including without limitation warranties of 109 | title, merchantability, fitness for a particular purpose, non 110 | infringement, or the absence of latent or other defects, accuracy, or 111 | the present or absence of errors, whether or not discoverable, all to 112 | the greatest extent permissible under applicable law. 113 | c. Affirmer disclaims responsibility for clearing rights of other persons 114 | that may apply to the Work or any use thereof, including without 115 | limitation any person's Copyright and Related Rights in the Work. 116 | Further, Affirmer disclaims responsibility for obtaining any necessary 117 | consents, permissions or other rights required for any use of the 118 | Work. 119 | d. Affirmer understands and acknowledges that Creative Commons is not a 120 | party to this document and has no duty or obligation with respect to 121 | this CC0 or use of the Work. 122 | -------------------------------------------------------------------------------- /Physical_Sky_Model.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Physical Sky Model 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
38 | Babylon.js PathTracing Renderer - Physical Sky Model 39 |
40 | 41 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Babylon.js-PathTracing-Renderer 2 | Real-time PathTracing with global illumination and progressive rendering, all on top of the Babylon.js WebGL framework. Click here for Live Demo: https://erichlof.github.io/Babylon.js-PathTracing-Renderer/Babylon_Path_Tracing.html 3 | 4 |
5 | Transformed Quadric Geometry demo: https://erichlof.github.io/Babylon.js-PathTracing-Renderer/Transformed_Quadric_Geometry.html 6 |
7 | 8 |
9 | glTF Model Path Tracing demo: https://erichlof.github.io/Babylon.js-PathTracing-Renderer/GLTF_Model_Path_Tracing.html 10 |
11 | 12 |
13 | Physical Sky Model demo: https://erichlof.github.io/Babylon.js-PathTracing-Renderer/Physical_Sky_Model.html 14 |
15 | 16 |
17 | HDRI Environment demo: https://erichlof.github.io/Babylon.js-PathTracing-Renderer/HDRI_Environment_Path_Tracing.html 18 |
19 | 20 |

Note: by request of Babylon.js users, this is a W.I.P. conversion from using three.js as the host engine to using Babylon.js as the host engine behind my custom path tracing shaders.

21 | 22 |
23 | 24 |

TODO

25 | 26 | * Add more robust support for arbitrary PBR materials, especially when they are specified in the glTF files 27 | * Add mobile touch/pointer support so that users can enjoy real-time path tracing on any device with a browser 28 | 29 |
30 | To see how this all this got started and to follow future progress, take a look at this Babylon Forum discussion: https://forum.babylonjs.com/t/path-tracing-in-babylonjs/11475/2 31 | 32 |
33 |
34 | 35 |

Progress Updates

36 | 37 | * January 27th, 2023: Big update across all shaders to rendering of Transparent and ClearCoat objects. When rendering these materials, we have to take into account the reflected portion of light as well as the refracted (or transmitted) portion of the light, at the same time. Before, I was using Blue Noise to randomly select which path a ray should follow - basically like flipping a coin. 'Heads' would send the bounced secondary ray off of the surface as a mirror reflection-type ray. 'Tails' would let the ray pass through the surface as a transmittted or refraction-type ray. Although this randomized Monte Carlo method works, and will eventually converge on the correct visual result (a double image of the reflection as well as the refraction), one of the drawbacks of using any randomized routine like this is that it produces noise, especially when the camera is moving. I recently implemented a different approach, in which the 2 rays are both spawned when a camera ray hits an object that has a Transparent or ClearCoat surface. A reflection ray is created and stored until a later bounce in the bounces loop, and the second refracted or transmitted ray is always allowed to go first - it interacts with the scene and reports back whatever it can see through the object. When this transmitted ray is done, we rewind time back to the surface that intitially spawned these 2 rays, and the reflected ray now gets its turn to interact with the scene and report back whatever it sees. Although this method produces slightly more shader code and just a little more complexity, it is totally worth it because in the end you get a smooth, solid double image on these types of objects, just like we see in real life. And the reflection portion (often the bright white highlight of the light source near the top of the object) remains solid and steady without any noise, even while moving the camera and navigating the scene. 38 | 39 | * May 5th, 2022: With the major release of Babylon.js 5 today, this date also marks the 1 year anniversary of the Babylon.js PathTracing Renderer! In the last couple of weeks I have been improving the BVH system for glTF models. Now there are 2 BVH builders you can choose between: the previous 'Fast Builder' - which builds a fairly-good AABB tree in a matter of milliseconds, and now the newly-added 'SAH Quality Builder' - which might take a little longer (still, just a couple of seconds!), but produces a high-quality AABB tree using the industry standard tried-and-true SAH (Surface Area Heuristic) building algorithm. I have since changed all the demos to use this higher-quality SAH tree builder instead of the older Fast Builder. You might not even notice the build-time difference: I custom made our SAH builder to go as fast as I know how! But hopefully, you *will* notice the improvement in frame rate, especially when flying up close to a glTF model, or attempting to render a heavier model with lots of triangles. Lastly, a bug fix: all of the models were appearing mirrored from how they appear in my other renderers. I finally figured out how to account for Babylon's Left-Handed coordinate system (vs. three.js' Right-Handed system). The solution involved flipping the Z axis as well as changing the winding order of the triangle vertices as I load and store each individual triangle vertex on the GPU data texture. Now everything looks as it should - enjoy! :-) 40 | 41 | * March 23rd, 2022: Major refactor to entire codebase - now all procedural JavaScript code hopefully reads and flows more naturally on all of the demos' setup files. Also, I made improvements to the denoiser and updated all the GLSL shaders to reflect these new changes. With these improvements and optimizations, convergence happens almost instantly! 42 | 43 | * August 30th, 2021: Physical Sky Model has been successfully added - check out the new demo! I expanded our pathTracingCommon.js library to define and handle various parameters related to realistic sun and sky environment lighting. Users can easily change the direction (azimuth angle) of the Sun as well as time of day (zenith angle) with the handy dat.gui sliders. The sky model used is the Preetham Model, which is the industry standard. There are more sky models to choose from out there, but most require multiple samples through the gas volume of the sky (via ray marching), which gets expensive. On the other hand, the Preetham Model that we use is an analytic model, and therefore it can give very realistic results with only one sample (ray direction) needed - which is perfect for real time applications. Another recent update that is repo-wide is the addition of a pixel resolution slider to all demos. The default resolution is now full resolution (1.0), but can be dialed down if needed to improve framerate, especialy on underpowered devices. 44 | 45 | * August 18th, 2021: Implemented loading and path tracing of PBR materials that are included with glTF models. I successfully ported most of my loading code and shader PBR material-handling code from my three.js renderer. The js setup file loads in an arbitrary glTF model, and then determines if it has any or all of the following: albedoTexture/a.k.a. diffuseMap (most common), bumpTexture/a.k.a. normalMap (also very common), metallicTexture/a.k.a. metallicRoughnessMap, and emissiveTexture/a.k.a. emissiveMap. Note: ambientTextures/a.k.a. ambientOcclusionMaps, are not used because we get a much more accurate ambient occlusion from the path tracer itself, as a by-product of the ray casting process - essentially for free! If the glTF model has any or all of the previously mentioned textures included, these are sent over to the GPU and the shader handles how the camera rays interact with the various PBR materials. The Damaged Helmet model uses all of the above, and is currently rendering correctly. However, I may need help from more experienced Babylon/glTF users to make the loading code more robust, as there are numerous ways to define materials inside an arbitrary glTF file, i.e. metallicRoughness maps. But the good news is, we are now loading and path tracing glTF models with all of their materials, in real time! 46 | 47 | * August 10th, 2021: glTF Models are now loading and being path traced in real time - whoo hoo! I successfully ported my glTF geometry preparation and BVH builder code from the three.js framework to the Babylon.js framework. It took me a while to find the equivalents to some of the routines between the two libraries, but once I did, it was smooth sailing! In fact, Babylon's system was so easy to use that I added a new model-picker to the GUI menu, allowing the end user to easily select a model to load from the drop-down list. Then, behind the scenes, the Babylon PathTracing Renderer jumps into action, quickly loading the glTF/glb file, converting it to a BVH-builder-friendly representation, building an efficient BVH, storing that tree as a GPU data texture, then it starts ray casting against that data inside the GPU path tracer - all in a matter of seconds! ;-) 48 | 49 | * July 15th, 2021: Added camera and FPS stats with stats.js and a more intuitive GUI with the dat.gui.js system. Instead of memorizing numerous hotkeys, the demos feature fully functioning menus and foldable controls in the upper right-hand corner of the webpage. 50 | 51 | * May 21st, 2021: Updated and improved the de-noiser for Diffuse and clearCoat Diffuse surfaces. Now scenes containing these surfaces (which is nearly all scenes) converges almost instantly! I figured out how to cast the denoiser's neighbor pixel net a little wider. The diffuse blur kernel was 3x3, or 9 taps in the screenOutput shader. I kept that one for specular surfaces like transparent glass and the coating on top of clearCoat diffuse surfaces when the camera is active. For the diffuse portions of the scene, (Diffuse, and Diffuse part of clearCoat Diffuse) I increased the sample radius of the blur kernel to 5x5, or 25 taps in the screenOutput shader. Although this is quite a lot of taps, the GPU doesn't appear to mind too much because all pixels are doing this same task for their neighbors, so there should be no GPU diversion. This new wider radius really makes a big difference and is definitely worth the extra texture taps! If I really want to go nuts, I could increase the radius to 7x7, which would mean 49 taps per pixel, ha! But I think the current radius is big enough for now and gives really smooth results. What's neat also is that edges such as normal edges, object silhouette edges, and color boundary edges have remained razor sharp through this whole denoising process. So we can have the best of both worlds: diffuse smoothness and detail sharpness where it counts! 52 | 53 | * May 13th, 2021: Implemented edge detection and my 1st attempt at a real-time de-noiser (it's actually more of a 'noise-smoother', but still it makes a big difference!). Path tracing is an inherently noisy affair because we only have a budget for 1 random ray path to follow as it bounces around in the scene on each animation frame for each pixel. A certain pixel's ray might wind up taking a completely different path than its immediate neighbor pixels and returning a very different intersection color/intensity, hence the visual noise - especially on diffuse surfaces. Inspired by recent NVIDIA efforts like Path Traced Quake, Quake II RTX, and Minecraft RTX, all of which feature real time edge detection and their proprietary A.I. deep learning denoising technology, I set out to create my own simple edge detector and denoiser that could be run real time in the browser, even on smart phones! If you try the updated demo now, I hope you'll agree that with my 1st attempt, although nowhere near the level of sophistication of NVIDIA's (nor will it ever be, ha), the initial results are promising! As you drag the camera around, the scene smoothly goes along with you, almost noise-free, and when you do let the camera be still, it instantly converges on a photo-realistic result! 54 | 55 | * May 12th, 2021: Added Blue Noise sampling for alternate random number generator and to smooth out noise. Each pixel samples a 256x256 RGBA high quality blue noise texture at a randomly offset uv location and then stores and cycles through 1, 2, 3, or all 4 (user choice) of the R,G,B, and A channels that it sampled for that animation frame. Since blue noise has higher frequency, evenly distributed noise (as compared to the usual more-chaotic white noise), the result is a much smoother appearance on diffuse and transparent surfaces that must take random samples to look physically correct. Also convergence is sped up, which is always welcome! IN addition to this blue noise update, I added some controls to the quad area light in the rad and blue Cornell Box scene. Now you can select a different plane placement of the quad light by pressing any number keys, 1-6. Also, you can decrease and increase the quad area light's size by holding down the left or right bracket keys. 56 | 57 | * May 9th, 2021: The path tracing camera now has some realistic physical features like FOV, aperture size, and focus distance. The mouse wheel controls the FOV (zooming in and out), comma and period keys (< >) control the aperture size, and dash and equals keys (- +) move the focal point forward and back out in the scene. THe traditional WASD,QE keys control camera flight forward and backward, strafe left and right, and climb up and down. Have fun with the new camera controls! 58 | 59 | * May 7th, 2021: Success! With the awesome help and guidance from Evgeni_Popov on the Babylon.js forum, I now have the pixel history working correctly. This means that when the camera is still, our Babylon.js Pathtracing Renderer continually samples the stationary scene over and over, all the while averaging the results. After a couple of seconds, it converges on a noise-free result. And since we are following the laws of physics and optics, this result will be photo-realistic! Many thanks again to Evgeni_Popov on the awesome Babylon.js forums for providing helpful examples and pointing me in the right direction. Now that this project is off the ground, we can fly! :-D 60 | 61 | * May 5th, 2021: I usually like figuring stuff out, but at this point I need help! The issue is that if you take a look at my 'Babylon_Path_Tracing.js' setup file in the 'js' folder, you can see that I have tried unsuccessfully to create a Babylon postProcess feedback loop, also known as a ping-pong buffer. The general concept is that you render(ray trace) a full screen noisy image with my custom fragment pixel shader ('pathTracing.fragment.fx' in 'shaders' folder) using the usual Babylon PostProcess, then copy/save all those pixels (can either be done with Babylon's PassPostProcess or as I have tried here with my custom tiny shader called 'screenCopy.fragment.fx' in 'shaders' folder. Then the trick is I need to be able to use the first pathTracing postProcess on the next animation frame and 'read' from the output of the screenCopy postProcess, essentially reading its own history (or giving it a short-term pixel 'memory' of what it calculated last frame). When this is correctly implemented, I will be able to blend each current image with the previous history image, which will refine the image over time thorugh sampling and averaging, and therefore the images settles down from an initial noisy state to a smooth, converged state. If you have an idea how to do this correctly with Babylon, please post on the Babylon Forum linked to above, or you can open an issue here on this repo. Thank you! 62 | 63 | * May 4th, 2021: The first commit, project repo created. 64 | -------------------------------------------------------------------------------- /Transformed_Quadric_Geometry.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Transformed Quadric Geometry 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 |
35 | 36 |
38 | Babylon.js PathTracing Renderer - Transformed Quadric Geometry 39 |
40 | 41 |
43 |
44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /js/BVH_Fast_Builder.js: -------------------------------------------------------------------------------- 1 | /* BVH (Bounding Volume Hierarchy) Iterative Fast Builder */ 2 | /* 3 | Inspired by: Thanassis Tsiodras (ttsiodras on GitHub) 4 | https://github.com/ttsiodras/renderer-cuda/blob/master/src/BVH.cpp 5 | Edited and Ported from C++ to Javascript by: Erich Loftis (erichlof on GitHub) 6 | */ 7 | 8 | 9 | let stackptr = 0; 10 | let buildnodes = []; 11 | let leftWorkLists = []; 12 | let rightWorkLists = []; 13 | let parentList = []; 14 | let currentList, aabb_array_copy; 15 | let k, value, side0, side1, side2; 16 | let bestSplit, goodSplit, okaySplit; 17 | let bestAxis, goodAxis, okayAxis; 18 | let leftWorkCount = 0; 19 | let rightWorkCount = 0; 20 | let currentMinCorner = new BABYLON.Vector3(); 21 | let currentMaxCorner = new BABYLON.Vector3(); 22 | let testMinCorner = new BABYLON.Vector3(); 23 | let testMaxCorner = new BABYLON.Vector3(); 24 | let testCentroid = new BABYLON.Vector3(); 25 | let currentCentroid = new BABYLON.Vector3(); 26 | let spatialAverage = new BABYLON.Vector3(); 27 | 28 | 29 | function BVH_FlatNode() 30 | { 31 | this.idSelf = 0; 32 | this.idPrimitive = -1; // a negative primitive id means that this is another inner node 33 | this.idRightChild = 0; 34 | this.idParent = 0; 35 | this.minCorner = new BABYLON.Vector3(); 36 | this.maxCorner = new BABYLON.Vector3(); 37 | } 38 | 39 | 40 | function BVH_Create_Node(workList, idParent, isRightBranch) 41 | { 42 | 43 | // re-initialize bounding box extents 44 | currentMinCorner.set(Infinity, Infinity, Infinity); 45 | currentMaxCorner.set(-Infinity, -Infinity, -Infinity); 46 | 47 | if (workList.length < 1) 48 | { // should never happen, but just in case... 49 | return; 50 | } 51 | else if (workList.length == 1) 52 | { 53 | // if we're down to 1 primitive aabb, quickly create a leaf node and return. 54 | k = workList[0]; 55 | // create leaf node 56 | let flatLeafNode = new BVH_FlatNode(); 57 | flatLeafNode.idSelf = buildnodes.length; 58 | flatLeafNode.idPrimitive = k; // id of primitive (usually a triangle) that is stored inside this AABB leaf node 59 | flatLeafNode.idRightChild = -1; // leaf nodes do not have children 60 | flatLeafNode.idParent = idParent; 61 | flatLeafNode.minCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 62 | flatLeafNode.maxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 63 | buildnodes.push(flatLeafNode); 64 | 65 | // if this is a right branch, fill in parent's missing link to this right child, 66 | // now that we have assigned this right child an ID 67 | if (isRightBranch) 68 | buildnodes[idParent].idRightChild = flatLeafNode.idSelf; 69 | 70 | return; 71 | } // end else if (workList.length == 1) 72 | 73 | else if (workList.length > 1) 74 | { 75 | // this is where the real work happens: we must sort an arbitrary number of primitive aabb's. 76 | // to get a balanced tree, we hope for about half to be placed in left child, half to be placed in right child. 77 | 78 | workListLength = workList.length; 79 | // construct bounding box around all of the current workList's triangle AABBs 80 | for (let i = 0; i < workListLength; i++) 81 | { 82 | k = workList[i]; 83 | testMinCorner.set(aabb_array[9 * k + 0], aabb_array[9 * k + 1], aabb_array[9 * k + 2]); 84 | testMaxCorner.set(aabb_array[9 * k + 3], aabb_array[9 * k + 4], aabb_array[9 * k + 5]); 85 | currentMinCorner.minimizeInPlace(testMinCorner); 86 | currentMaxCorner.maximizeInPlace(testMaxCorner); 87 | } 88 | 89 | 90 | // create an inner node to represent this newly grown bounding box 91 | let flatnode = new BVH_FlatNode(); 92 | flatnode.idSelf = buildnodes.length; // its own id matches the number of nodes we've created so far 93 | flatnode.idPrimitive = -1; // a negative primitive id means that this is just another inner node (with pointers to children), no triangle 94 | flatnode.idRightChild = 0; // missing RightChild link will be filled in soon; don't know how deep the left branches will go while constructing top-to-bottom 95 | flatnode.idParent = idParent; 96 | flatnode.minCorner.copyFrom(currentMinCorner); 97 | flatnode.maxCorner.copyFrom(currentMaxCorner); 98 | buildnodes.push(flatnode); 99 | 100 | // if this is a right branch, fill in parent's missing link to this right child, 101 | // now that we have assigned this right child an ID 102 | if (isRightBranch) 103 | buildnodes[idParent].idRightChild = flatnode.idSelf; 104 | 105 | 106 | side0 = currentMaxCorner.x - currentMinCorner.x; // length along X-axis 107 | side1 = currentMaxCorner.y - currentMinCorner.y; // length along Y-axis 108 | side2 = currentMaxCorner.z - currentMinCorner.z; // length along Z-axis 109 | 110 | // this simply uses the spatial average of the longest box extent to determine the split plane, 111 | // which is very fast and results in a fair-to-good quality, balanced binary tree structure 112 | 113 | // calculate the middle point of the current box (aka 'spatial median') 114 | spatialAverage.copyFrom(currentMinCorner); 115 | spatialAverage.addInPlace(currentMaxCorner); 116 | spatialAverage.scaleInPlace(0.5); 117 | 118 | // initialize variables 119 | bestAxis = 0; goodAxis = 1; okayAxis = 2; 120 | bestSplit = spatialAverage.x; goodSplit = spatialAverage.y; okaySplit = spatialAverage.z; 121 | 122 | // determine the longest extent of the box, and start with that as splitting dimension 123 | if (side0 >= side1 && side0 >= side2) 124 | { 125 | bestAxis = 0; 126 | bestSplit = spatialAverage.x; 127 | if (side1 >= side2) 128 | { 129 | goodAxis = 1; 130 | goodSplit = spatialAverage.y; 131 | okayAxis = 2; 132 | okaySplit = spatialAverage.z; 133 | } 134 | else 135 | { 136 | goodAxis = 2; 137 | goodSplit = spatialAverage.z; 138 | okayAxis = 1; 139 | okaySplit = spatialAverage.y; 140 | } 141 | } 142 | else if (side1 >= side0 && side1 >= side2) 143 | { 144 | bestAxis = 1; 145 | bestSplit = spatialAverage.y; 146 | if (side0 >= side2) 147 | { 148 | goodAxis = 0; 149 | goodSplit = spatialAverage.x; 150 | okayAxis = 2; 151 | okaySplit = spatialAverage.z; 152 | } 153 | else 154 | { 155 | goodAxis = 2; 156 | goodSplit = spatialAverage.z; 157 | okayAxis = 0; 158 | okaySplit = spatialAverage.x; 159 | } 160 | } 161 | else// if (side2 >= side0 && side2 >= side1) 162 | { 163 | bestAxis = 2; 164 | bestSplit = spatialAverage.z; 165 | if (side0 >= side1) 166 | { 167 | goodAxis = 0; 168 | goodSplit = spatialAverage.x; 169 | okayAxis = 1; 170 | okaySplit = spatialAverage.y; 171 | } 172 | else 173 | { 174 | goodAxis = 1; 175 | goodSplit = spatialAverage.y; 176 | okayAxis = 0; 177 | okaySplit = spatialAverage.x; 178 | } 179 | } 180 | 181 | // try best axis first, then try the other two if necessary 182 | for (let axis = 0; axis < 3; axis++) 183 | { 184 | // distribute the triangle AABBs in either the left child or right child 185 | // reset counters for the loop coming up 186 | leftWorkCount = 0; 187 | rightWorkCount = 0; 188 | 189 | // this loop is to count how many elements we will need for the left branch and the right branch 190 | for (let i = 0; i < workList.length; i++) 191 | { 192 | k = workList[i]; 193 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 194 | 195 | // get bbox center 196 | if (bestAxis == 0) value = testCentroid.x; // X-axis 197 | else if (bestAxis == 1) value = testCentroid.y; // Y-axis 198 | else value = testCentroid.z; // Z-axis 199 | 200 | if (value < bestSplit) 201 | { 202 | leftWorkCount++; 203 | } else 204 | { 205 | rightWorkCount++; 206 | } 207 | } 208 | 209 | if (leftWorkCount > 0 && rightWorkCount > 0) 210 | { 211 | break; // success, move on to the next part 212 | } 213 | else// if (leftWorkCount == 0 || rightWorkCount == 0) 214 | { 215 | // try another axis 216 | if (axis == 0) 217 | { 218 | bestAxis = goodAxis; 219 | bestSplit = goodSplit; 220 | } 221 | else if (axis == 1) 222 | { 223 | bestAxis = okayAxis; 224 | bestSplit = okaySplit; 225 | } 226 | 227 | continue; 228 | } 229 | 230 | } // end for (let axis = 0; axis < 3; axis++) 231 | 232 | 233 | // if the below if statement is true, then we have successfully sorted the primitive(triangle) AABBs 234 | if (leftWorkCount > 0 && rightWorkCount > 0) 235 | { 236 | // now that the size of each branch is known, we can initialize the left and right arrays 237 | leftWorkLists[stackptr] = new Uint32Array(leftWorkCount); 238 | rightWorkLists[stackptr] = new Uint32Array(rightWorkCount); 239 | 240 | // reset counters for the loop coming up 241 | leftWorkCount = 0; 242 | rightWorkCount = 0; 243 | 244 | // sort the primitives and populate the current leftWorkLists and rightWorklists 245 | for (let i = 0; i < workList.length; i++) 246 | { 247 | k = workList[i]; 248 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 249 | 250 | // get bbox center 251 | if (bestAxis == 0) value = testCentroid.x; // X-axis 252 | else if (bestAxis == 1) value = testCentroid.y; // Y-axis 253 | else value = testCentroid.z; // Z-axis 254 | 255 | if (value < bestSplit) 256 | { 257 | leftWorkLists[stackptr][leftWorkCount] = k; 258 | leftWorkCount++; 259 | } else 260 | { 261 | rightWorkLists[stackptr][rightWorkCount] = k; 262 | rightWorkCount++; 263 | } 264 | } 265 | 266 | return; // success! 267 | 268 | } // end if (leftWorkCount > 0 && rightWorkCount > 0) 269 | 270 | 271 | // if we reached this point, the builder failed to find a decent splitting plane axis, so 272 | // manually populate the current leftWorkLists and rightWorklists. 273 | // reset counters to 0 274 | leftWorkCount = 0; 275 | rightWorkCount = 0; 276 | 277 | // this loop is to count how many elements we will need for the left branch and the right branch 278 | for (let i = 0; i < workList.length; i++) 279 | { 280 | if (i % 2 == 0) 281 | { 282 | leftWorkCount++; 283 | } else 284 | { 285 | rightWorkCount++; 286 | } 287 | } 288 | 289 | // now that the size of each branch is known, we can initialize the left and right arrays 290 | leftWorkLists[stackptr] = new Uint32Array(leftWorkCount); 291 | rightWorkLists[stackptr] = new Uint32Array(rightWorkCount); 292 | 293 | // reset counters for the loop coming up 294 | leftWorkCount = 0; 295 | rightWorkCount = 0; 296 | 297 | for (let i = 0; i < workList.length; i++) 298 | { 299 | k = workList[i]; 300 | 301 | if (i % 2 == 0) 302 | { 303 | leftWorkLists[stackptr][leftWorkCount] = k; 304 | leftWorkCount++; 305 | } else 306 | { 307 | rightWorkLists[stackptr][rightWorkCount] = k; 308 | rightWorkCount++; 309 | } 310 | } 311 | 312 | } // end else if (workList.length > 1) 313 | 314 | } // end function BVH_Create_Node(workList, aabb_array, idParent, isLeftBranch) 315 | 316 | 317 | 318 | function BVH_Build_Iterative(workList, aabb_array) 319 | { 320 | 321 | currentList = workList; 322 | // save a global copy of the supplied aabb_array, so that it can be used by the various functions in this file 323 | aabb_array_copy = new Float32Array(aabb_array); 324 | 325 | // reset BVH builder arrays; 326 | buildnodes = []; 327 | leftWorkLists = []; 328 | rightWorkLists = []; 329 | parentList = []; 330 | // initialize variables 331 | stackptr = 0; 332 | 333 | // parent id of -1, meaning this is the root node, which has no parent 334 | parentList.push(-1); 335 | BVH_Create_Node(currentList, -1, false); // build root node 336 | 337 | // build the tree using the "go down left branches until done, then ascend back up right branches" approach 338 | while (stackptr > -1) 339 | { 340 | // pop the next node off of the left-side stack 341 | currentList = leftWorkLists[stackptr]; 342 | 343 | if (currentList != undefined) 344 | { // left side of tree 345 | 346 | leftWorkLists[stackptr] = null; // mark as processed 347 | stackptr++; 348 | 349 | parentList.push(buildnodes.length - 1); 350 | 351 | // build the left node 352 | BVH_Create_Node(currentList, buildnodes.length - 1, false); 353 | } 354 | else 355 | { // right side of tree 356 | // pop the next node off of the right-side stack 357 | currentList = rightWorkLists[stackptr]; 358 | 359 | if (currentList != undefined) 360 | { 361 | rightWorkLists[stackptr] = null; // mark as processed 362 | stackptr++; 363 | 364 | // build the right node 365 | BVH_Create_Node(currentList, parentList.pop(), true); 366 | } 367 | else 368 | { 369 | stackptr--; 370 | } 371 | } 372 | 373 | } // end while (stackptr > -1) 374 | 375 | 376 | // Copy the buildnodes array into the aabb_array 377 | for (let n = 0; n < buildnodes.length; n++) 378 | { 379 | // slot 0 380 | aabb_array[8 * n + 0] = buildnodes[n].idPrimitive; // r or x component 381 | aabb_array[8 * n + 1] = buildnodes[n].minCorner.x; // g or y component 382 | aabb_array[8 * n + 2] = buildnodes[n].minCorner.y; // b or z component 383 | aabb_array[8 * n + 3] = buildnodes[n].minCorner.z; // a or w component 384 | 385 | // slot 1 386 | aabb_array[8 * n + 4] = buildnodes[n].idRightChild; // r or x component 387 | aabb_array[8 * n + 5] = buildnodes[n].maxCorner.x; // g or y component 388 | aabb_array[8 * n + 6] = buildnodes[n].maxCorner.y; // b or z component 389 | aabb_array[8 * n + 7] = buildnodes[n].maxCorner.z; // a or w component 390 | } 391 | 392 | } // end function BVH_Build_Iterative(workList, aabb_array) -------------------------------------------------------------------------------- /js/BVH_SAH_Quality_Builder.js: -------------------------------------------------------------------------------- 1 | /* BVH (Bounding Volume Hierarchy) Iterative SAH Quality Builder */ 2 | /* 3 | Inspired by: Thanassis Tsiodras (ttsiodras on GitHub) 4 | https://github.com/ttsiodras/renderer-cuda/blob/master/src/BVH.cpp 5 | Edited and Ported from C++ to Javascript by: Erich Loftis (erichlof on GitHub) 6 | */ 7 | 8 | 9 | 10 | let stackptr = 0; 11 | let buildnodes = []; 12 | let leftWorkLists = []; 13 | let rightWorkLists = []; 14 | let parentList = []; 15 | let currentList, aabb_array_copy; 16 | let bestSplit = null; 17 | let bestAxis = null; 18 | let leftWorkCount = 0; 19 | let rightWorkCount = 0; 20 | let bestSplitHasBeenFound = false; 21 | let currentMinCorner = new BABYLON.Vector3(); 22 | let currentMaxCorner = new BABYLON.Vector3(); 23 | let testMinCorner = new BABYLON.Vector3(); 24 | let testMaxCorner = new BABYLON.Vector3(); 25 | let testCentroid = new BABYLON.Vector3(); 26 | let currentCentroid = new BABYLON.Vector3(); 27 | let minCentroid = new BABYLON.Vector3(); 28 | let maxCentroid = new BABYLON.Vector3(); 29 | let centroidAverage = new BABYLON.Vector3(); 30 | let spatialAverage = new BABYLON.Vector3(); 31 | let LBottomCorner = new BABYLON.Vector3(); 32 | let LTopCorner = new BABYLON.Vector3(); 33 | let RBottomCorner = new BABYLON.Vector3(); 34 | let RTopCorner = new BABYLON.Vector3(); 35 | let k, value, side0, side1, side2, minCost, testSplit, testStep; 36 | let countLeft, countRight; 37 | let currentAxis, longestAxis, mediumAxis, shortestAxis; 38 | let lside0, lside1, lside2, rside0, rside1, rside2; 39 | let surfaceLeft, surfaceRight, totalCost; 40 | let numBins = 4; // must be 2 or higher for the BVH to work properly 41 | 42 | 43 | 44 | function BVH_FlatNode() 45 | { 46 | this.idSelf = 0; 47 | this.idPrimitive = -1; // a negative primitive id means that this is another inner node 48 | this.idRightChild = 0; 49 | this.idParent = 0; 50 | this.minCorner = new BABYLON.Vector3(); 51 | this.maxCorner = new BABYLON.Vector3(); 52 | } 53 | 54 | 55 | function BVH_Create_Node(workList, idParent, isRightBranch) 56 | { 57 | // reset flag 58 | bestSplitHasBeenFound = false; 59 | 60 | // re-initialize bounding box extents 61 | currentMinCorner.set(Infinity, Infinity, Infinity); 62 | currentMaxCorner.set(-Infinity, -Infinity, -Infinity); 63 | 64 | if (workList.length < 1) 65 | { // should never happen, but just in case... 66 | return; 67 | } 68 | else if (workList.length == 1) 69 | { 70 | // if we're down to 1 primitive aabb, quickly create a leaf node and return. 71 | k = workList[0]; 72 | // create leaf node 73 | let flatLeafNode = new BVH_FlatNode(); 74 | flatLeafNode.idSelf = buildnodes.length; 75 | flatLeafNode.idPrimitive = k; // id of primitive (usually a triangle) that is stored inside this AABB leaf node 76 | flatLeafNode.idRightChild = -1; // leaf nodes do not have children 77 | flatLeafNode.idParent = idParent; 78 | flatLeafNode.minCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 79 | flatLeafNode.maxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 80 | buildnodes.push(flatLeafNode); 81 | 82 | // if this is a right branch, fill in parent's missing link to this right child, 83 | // now that we have assigned this right child an ID 84 | if (isRightBranch) 85 | buildnodes[idParent].idRightChild = flatLeafNode.idSelf; 86 | 87 | return; 88 | } // end else if (workList.length == 1) 89 | 90 | else if (workList.length == 2) 91 | { 92 | // if we're down to 2 primitive AABBs, quickly create 1 interior node (that holds both), and 2 leaf nodes, then return. 93 | 94 | // construct bounding box around the current workList's triangle AABBs 95 | for (let i = 0; i < 2; i++) 96 | { 97 | k = workList[i]; 98 | testMinCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 99 | testMaxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 100 | currentMinCorner.minimizeInPlace(testMinCorner); 101 | currentMaxCorner.maximizeInPlace(testMaxCorner); 102 | } 103 | 104 | // create inner node 105 | let flatnode0 = new BVH_FlatNode(); 106 | flatnode0.idSelf = buildnodes.length; 107 | flatnode0.idPrimitive = -1; // a negative primitive id means that this is just another inner node (with pointers to children) 108 | flatnode0.idRightChild = buildnodes.length + 2; 109 | flatnode0.idParent = idParent; 110 | flatnode0.minCorner.copyFrom(currentMinCorner); 111 | flatnode0.maxCorner.copyFrom(currentMaxCorner); 112 | buildnodes.push(flatnode0); 113 | 114 | // if this is a right branch, fill in parent's missing link to this right child, 115 | // now that we have assigned this right child an ID 116 | if (isRightBranch) 117 | buildnodes[idParent].idRightChild = flatnode0.idSelf; 118 | 119 | 120 | // create 'left' leaf node 121 | k = workList[0]; 122 | let flatnode1 = new BVH_FlatNode(); 123 | flatnode1.idSelf = buildnodes.length; 124 | flatnode1.idPrimitive = k; // id of primitive (usually a triangle) that is stored inside this AABB leaf node 125 | flatnode1.idRightChild = -1; // leaf nodes do not have children 126 | flatnode1.idParent = flatnode0.idSelf; 127 | flatnode1.minCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 128 | flatnode1.maxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 129 | buildnodes.push(flatnode1); 130 | 131 | // create 'right' leaf node 132 | k = workList[1]; 133 | let flatnode2 = new BVH_FlatNode(); 134 | flatnode2.idSelf = buildnodes.length; 135 | flatnode2.idPrimitive = k; // id of primitive (usually a triangle) that is stored inside this AABB leaf node 136 | flatnode2.idRightChild = -1; // leaf nodes do not have children 137 | flatnode2.idParent = flatnode0.idSelf; 138 | flatnode2.minCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 139 | flatnode2.maxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 140 | buildnodes.push(flatnode2); 141 | 142 | return; 143 | 144 | } // end else if (workList.length == 2) 145 | 146 | else if (workList.length > 2) 147 | { 148 | // this is where the real work happens: we must sort an arbitrary number of primitive (usually triangles) AABBs. 149 | // to get a balanced tree, we hope for about half to be placed in left child, half to be placed in right child. 150 | 151 | // re-initialize min/max centroids 152 | minCentroid.set(Infinity, Infinity, Infinity); 153 | maxCentroid.set(-Infinity, -Infinity, -Infinity); 154 | centroidAverage.set(0, 0, 0); 155 | 156 | // construct/grow bounding box around all of the current workList's primitive(triangle) AABBs 157 | // also, calculate the average position of all the aabb's centroids 158 | for (let i = 0; i < workList.length; i++) 159 | { 160 | k = workList[i]; 161 | 162 | testMinCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 163 | testMaxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 164 | currentCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 165 | 166 | currentMinCorner.minimizeInPlace(testMinCorner); 167 | currentMaxCorner.maximizeInPlace(testMaxCorner); 168 | 169 | minCentroid.minimizeInPlace(currentCentroid); 170 | maxCentroid.maximizeInPlace(currentCentroid); 171 | 172 | centroidAverage.addInPlace(currentCentroid); // sum up all aabb centroid positions 173 | } 174 | // divide the aabb centroid sum by the number of centroids to get average 175 | centroidAverage.scaleInPlace(1 / workList.length); 176 | 177 | // calculate the middle point of this newly-grown bounding box (aka the 'spatial median') 178 | //spatialAverage.copy(currentMinCorner).addInPlace(currentMaxCorner).multiplyScalar(0.5); 179 | 180 | // create inner node 181 | let flatnode = new BVH_FlatNode(); 182 | flatnode.idSelf = buildnodes.length; // its own id matches the number of nodes we've created so far 183 | flatnode.idPrimitive = -1; // a negative primitive id means that this is just another inner node (with pointers to children) 184 | flatnode.idRightChild = 0; // missing RightChild link will be filled in soon; don't know how deep the left branches will go while constructing top-to-bottom 185 | flatnode.idParent = idParent; 186 | flatnode.minCorner.copyFrom(currentMinCorner); 187 | flatnode.maxCorner.copyFrom(currentMaxCorner); 188 | buildnodes.push(flatnode); 189 | 190 | // if this is a right branch, fill in parent's missing link to this right child, 191 | // now that we have assigned this right child an ID 192 | if (isRightBranch) 193 | buildnodes[idParent].idRightChild = flatnode.idSelf; 194 | 195 | 196 | // Begin split plane determination using the Surface Area Heuristic(SAH) strategy 197 | 198 | side0 = currentMaxCorner.x - currentMinCorner.x; // length along X-axis 199 | side1 = currentMaxCorner.y - currentMinCorner.y; // length along Y-axis 200 | side2 = currentMaxCorner.z - currentMinCorner.z; // length along Z-axis 201 | 202 | minCost = workList.length * ((side0 * side1) + (side1 * side2) + (side2 * side0)); 203 | 204 | // reset bestSplit and bestAxis 205 | bestSplit = null; 206 | bestAxis = null; 207 | 208 | // Try all 3 axes X, Y, Z 209 | for (let axis = 0; axis < 3; axis++) 210 | { // 0 = X, 1 = Y, 2 = Z axis 211 | // we will try dividing the triangle AABBs based on the current axis 212 | 213 | if (axis == 0) 214 | { 215 | testSplit = currentMinCorner.x; 216 | testStep = side0 / numBins; 217 | //testSplit = minCentroid.x; 218 | //testStep = (maxCentroid.x - minCentroid.x) / numBins; 219 | } 220 | else if (axis == 1) 221 | { 222 | testSplit = currentMinCorner.y; 223 | testStep = side1 / numBins; 224 | //testSplit = minCentroid.y; 225 | //testStep = (maxCentroid.y - minCentroid.y) / numBins; 226 | } 227 | else // if (axis == 2) 228 | { 229 | testSplit = currentMinCorner.z; 230 | testStep = side2 / numBins; 231 | //testSplit = minCentroid.z; 232 | //testStep = (maxCentroid.z - minCentroid.z) / numBins; 233 | } 234 | 235 | for (let partition = 1; partition < numBins; partition++) 236 | { 237 | testSplit += testStep; 238 | 239 | // Create potential left and right bounding boxes 240 | LBottomCorner.set(Infinity, Infinity, Infinity); 241 | LTopCorner.set(-Infinity, -Infinity, -Infinity); 242 | RBottomCorner.set(Infinity, Infinity, Infinity); 243 | RTopCorner.set(-Infinity, -Infinity, -Infinity); 244 | 245 | // The number of triangle AABBs in the left and right bboxes (needed to calculate SAH cost function) 246 | countLeft = 0; 247 | countRight = 0; 248 | 249 | // allocate triangle AABBs in workList based on their bbox centers 250 | // this is a fast O(N) pass, no triangle AABB sorting needed (yet) 251 | for (let i = 0; i < workList.length; i++) 252 | { 253 | k = workList[i]; 254 | testMinCorner.set(aabb_array_copy[9 * k + 0], aabb_array_copy[9 * k + 1], aabb_array_copy[9 * k + 2]); 255 | testMaxCorner.set(aabb_array_copy[9 * k + 3], aabb_array_copy[9 * k + 4], aabb_array_copy[9 * k + 5]); 256 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 257 | 258 | // get bbox center 259 | if (axis == 0) 260 | { // X-axis 261 | value = testCentroid.x; 262 | } 263 | else if (axis == 1) 264 | { // Y-axis 265 | value = testCentroid.y; 266 | } 267 | else 268 | { // Z-axis 269 | value = testCentroid.z; 270 | } 271 | 272 | if (value < testSplit) 273 | { 274 | // if centroid is smaller then testSplit, put triangle box in Left bbox 275 | LBottomCorner.minimizeInPlace(testMinCorner); 276 | LTopCorner.maximizeInPlace(testMaxCorner); 277 | countLeft++; 278 | } else 279 | { 280 | // else put triangle box in Right bbox 281 | RBottomCorner.minimizeInPlace(testMinCorner); 282 | RTopCorner.maximizeInPlace(testMaxCorner); 283 | countRight++; 284 | } 285 | } // end for (let i = 0; i < workList.length; i++) 286 | 287 | // First, check for bad partitionings, i.e. bins with 0 triangle AABBs make no sense 288 | if (countLeft < 1 || countRight < 1) 289 | continue; 290 | 291 | // Now use the Surface Area Heuristic to see if this split has a better "cost" 292 | 293 | // It's a real partitioning, calculate the sides of Left and Right BBox 294 | lside0 = LTopCorner.x - LBottomCorner.x; 295 | lside1 = LTopCorner.y - LBottomCorner.y; 296 | lside2 = LTopCorner.z - LBottomCorner.z; 297 | 298 | rside0 = RTopCorner.x - RBottomCorner.x; 299 | rside1 = RTopCorner.y - RBottomCorner.y; 300 | rside2 = RTopCorner.z - RBottomCorner.z; 301 | 302 | // calculate SurfaceArea of Left and Right BBox 303 | surfaceLeft = (lside0 * lside1) + (lside1 * lside2) + (lside2 * lside0); 304 | surfaceRight = (rside0 * rside1) + (rside1 * rside2) + (rside2 * rside0); 305 | 306 | // calculate total cost by multiplying left and right bbox by number of triangle AABBs in each 307 | totalCost = (surfaceLeft * countLeft) + (surfaceRight * countRight); 308 | 309 | // keep track of cheapest split found so far 310 | if (totalCost < minCost) 311 | { 312 | minCost = totalCost; 313 | bestSplit = testSplit; 314 | bestAxis = axis; 315 | bestSplitHasBeenFound = true; 316 | } 317 | } // end for (let partition = 1; partition < numBins; partition++) 318 | 319 | } // end for (let axis = 0; axis < 3; axis++) 320 | 321 | } // end else if (workList.length > 2) 322 | 323 | 324 | // If the SAH strategy failed, now try to populate the current leftWorkLists and rightWorklists with the Object Median strategy 325 | if (!bestSplitHasBeenFound) 326 | { 327 | //console.log("bestSplit not found, now trying Object Median strategy..."); 328 | //console.log("num of AABBs remaining: " + workList.length); 329 | 330 | // determine the longest extent of the box, and start with that as splitting dimension 331 | if (side0 >= side1 && side0 >= side2) 332 | { 333 | longestAxis = 0; 334 | if (side1 >= side2) 335 | { 336 | mediumAxis = 1; shortestAxis = 2; 337 | } 338 | else 339 | { 340 | mediumAxis = 2; shortestAxis = 1; 341 | } 342 | } 343 | else if (side1 >= side0 && side1 >= side2) 344 | { 345 | longestAxis = 1; 346 | if (side0 >= side2) 347 | { 348 | mediumAxis = 0; shortestAxis = 2; 349 | } 350 | else 351 | { 352 | mediumAxis = 2; shortestAxis = 0; 353 | } 354 | } 355 | else// if (side2 >= side0 && side2 >= side1) 356 | { 357 | longestAxis = 2; 358 | if (side0 >= side1) 359 | { 360 | mediumAxis = 0; shortestAxis = 1; 361 | } 362 | else 363 | { 364 | mediumAxis = 1; shortestAxis = 0; 365 | } 366 | } 367 | 368 | // try longest axis first, then try the other two if necessary 369 | currentAxis = longestAxis; // a split along the longest axis would be optimal, so try this first 370 | // reset counters for the loop coming up 371 | leftWorkCount = 0; 372 | rightWorkCount = 0; 373 | 374 | // this loop is to count how many elements we will need for the left branch and the right branch 375 | for (let i = 0; i < workList.length; i++) 376 | { 377 | k = workList[i]; 378 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 379 | 380 | // get bbox center 381 | if (currentAxis == 0) 382 | { 383 | value = testCentroid.x; // X-axis 384 | testSplit = centroidAverage.x; 385 | //testSplit = spatialAverage.x; 386 | } 387 | else if (currentAxis == 1) 388 | { 389 | value = testCentroid.y; // Y-axis 390 | testSplit = centroidAverage.y; 391 | //testSplit = spatialAverage.y; 392 | } 393 | else 394 | { 395 | value = testCentroid.z; // Z-axis 396 | testSplit = centroidAverage.z; 397 | //testSplit = spatialAverage.z; 398 | } 399 | 400 | if (value < testSplit) 401 | { 402 | leftWorkCount++; 403 | } else 404 | { 405 | rightWorkCount++; 406 | } 407 | } 408 | 409 | if (leftWorkCount > 0 && rightWorkCount > 0) 410 | { 411 | bestSplit = testSplit; 412 | bestAxis = currentAxis; 413 | bestSplitHasBeenFound = true; 414 | } 415 | 416 | if (!bestSplitHasBeenFound) // if longest axis failed 417 | { 418 | currentAxis = mediumAxis; // try middle-length axis next 419 | // reset counters for the loop coming up 420 | leftWorkCount = 0; 421 | rightWorkCount = 0; 422 | 423 | // this loop is to count how many elements we will need for the left branch and the right branch 424 | for (let i = 0; i < workList.length; i++) 425 | { 426 | k = workList[i]; 427 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 428 | 429 | // get bbox center 430 | if (currentAxis == 0) 431 | { 432 | value = testCentroid.x; // X-axis 433 | testSplit = centroidAverage.x; 434 | //testSplit = spatialAverage.x; 435 | } 436 | else if (currentAxis == 1) 437 | { 438 | value = testCentroid.y; // Y-axis 439 | testSplit = centroidAverage.y; 440 | //testSplit = spatialAverage.y; 441 | } 442 | else 443 | { 444 | value = testCentroid.z; // Z-axis 445 | testSplit = centroidAverage.z; 446 | //testSplit = spatialAverage.z; 447 | } 448 | 449 | if (value < testSplit) 450 | { 451 | leftWorkCount++; 452 | } else 453 | { 454 | rightWorkCount++; 455 | } 456 | } 457 | 458 | if (leftWorkCount > 0 && rightWorkCount > 0) 459 | { 460 | bestSplit = testSplit; 461 | bestAxis = currentAxis; 462 | bestSplitHasBeenFound = true; 463 | } 464 | } // end if ( !bestSplitHasBeenFound ) // if longest axis failed 465 | 466 | if (!bestSplitHasBeenFound) // if middle-length axis failed 467 | { 468 | currentAxis = shortestAxis; // try shortest axis last 469 | // reset counters for the loop coming up 470 | leftWorkCount = 0; 471 | rightWorkCount = 0; 472 | 473 | // this loop is to count how many elements we will need for the left branch and the right branch 474 | for (let i = 0; i < workList.length; i++) 475 | { 476 | k = workList[i]; 477 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 478 | 479 | // get bbox center 480 | if (currentAxis == 0) 481 | { 482 | value = testCentroid.x; // X-axis 483 | testSplit = centroidAverage.x; 484 | //testSplit = spatialAverage.x; 485 | } 486 | else if (currentAxis == 1) 487 | { 488 | value = testCentroid.y; // Y-axis 489 | testSplit = centroidAverage.y; 490 | //testSplit = spatialAverage.y; 491 | } 492 | else 493 | { 494 | value = testCentroid.z; // Z-axis 495 | testSplit = centroidAverage.z; 496 | //testSplit = spatialAverage.z; 497 | } 498 | 499 | if (value < testSplit) 500 | { 501 | leftWorkCount++; 502 | } else 503 | { 504 | rightWorkCount++; 505 | } 506 | } 507 | 508 | if (leftWorkCount > 0 && rightWorkCount > 0) 509 | { 510 | bestSplit = testSplit; 511 | bestAxis = currentAxis; 512 | bestSplitHasBeenFound = true; 513 | } 514 | } // end if ( !bestSplitHasBeenFound ) // if middle-length axis failed 515 | 516 | } // end if ( !bestSplitHasBeenFound ) // If the SAH strategy failed 517 | 518 | 519 | leftWorkCount = 0; 520 | rightWorkCount = 0; 521 | 522 | // if all strategies have failed, we must manually populate the current leftWorkLists and rightWorklists 523 | if (!bestSplitHasBeenFound) 524 | { 525 | //console.log("bestSplit still not found, resorting to manual placement..."); 526 | //console.log("num of AABBs remaining: " + workList.length); 527 | 528 | // this loop is to count how many elements we need for the left branch and the right branch 529 | for (let i = 0; i < workList.length; i++) 530 | { 531 | if (i % 2 == 0) 532 | { 533 | leftWorkCount++; 534 | } else 535 | { 536 | rightWorkCount++; 537 | } 538 | } 539 | 540 | // now that the size of each branch is known, we can initialize the left and right arrays 541 | leftWorkLists[stackptr] = new Uint32Array(leftWorkCount); 542 | rightWorkLists[stackptr] = new Uint32Array(rightWorkCount); 543 | 544 | // reset counters for the loop coming up 545 | leftWorkCount = 0; 546 | rightWorkCount = 0; 547 | 548 | for (let i = 0; i < workList.length; i++) 549 | { 550 | k = workList[i]; 551 | 552 | if (i % 2 == 0) 553 | { 554 | leftWorkLists[stackptr][leftWorkCount] = k; 555 | leftWorkCount++; 556 | } else 557 | { 558 | rightWorkLists[stackptr][rightWorkCount] = k; 559 | rightWorkCount++; 560 | } 561 | } 562 | 563 | return; // return early 564 | } // end if ( !bestSplitHasBeenFound ) 565 | 566 | 567 | // the following code can only be reached if (workList.length > 2) and bestSplit has been successfully found. 568 | // Other unsuccessful conditions will have been handled and will 'return' earlier 569 | 570 | // distribute the triangle AABBs in the left or right child nodes 571 | leftWorkCount = 0; 572 | rightWorkCount = 0; 573 | 574 | // this loop is to count how many elements we need for the left branch and the right branch 575 | for (let i = 0; i < workList.length; i++) 576 | { 577 | k = workList[i]; 578 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 579 | 580 | // get bbox center 581 | if (bestAxis == 0) value = testCentroid.x; // X-axis 582 | else if (bestAxis == 1) value = testCentroid.y; // Y-axis 583 | else value = testCentroid.z; // Z-axis 584 | 585 | if (value < bestSplit) 586 | { 587 | leftWorkCount++; 588 | } else 589 | { 590 | rightWorkCount++; 591 | } 592 | } 593 | 594 | // now that the size of each branch is known, we can initialize the left and right arrays 595 | leftWorkLists[stackptr] = new Uint32Array(leftWorkCount); 596 | rightWorkLists[stackptr] = new Uint32Array(rightWorkCount); 597 | 598 | // reset counters for the loop coming up 599 | leftWorkCount = 0; 600 | rightWorkCount = 0; 601 | 602 | // populate the current leftWorkLists and rightWorklists 603 | for (let i = 0; i < workList.length; i++) 604 | { 605 | k = workList[i]; 606 | testCentroid.set(aabb_array_copy[9 * k + 6], aabb_array_copy[9 * k + 7], aabb_array_copy[9 * k + 8]); 607 | 608 | // get bbox center 609 | if (bestAxis == 0) value = testCentroid.x; // X-axis 610 | else if (bestAxis == 1) value = testCentroid.y; // Y-axis 611 | else value = testCentroid.z; // Z-axis 612 | 613 | if (value < bestSplit) 614 | { 615 | leftWorkLists[stackptr][leftWorkCount] = k; 616 | leftWorkCount++; 617 | } else 618 | { 619 | rightWorkLists[stackptr][rightWorkCount] = k; 620 | rightWorkCount++; 621 | } 622 | } 623 | 624 | } // end function BVH_Create_Node(workList, idParent, isRightBranch) 625 | 626 | 627 | 628 | 629 | function BVH_Build_Iterative(workList, aabb_array) 630 | { 631 | 632 | currentList = workList; 633 | // save a global copy of the supplied aabb_array, so that it can be used by the various functions in this file 634 | aabb_array_copy = new Float32Array(aabb_array); 635 | 636 | // reset BVH builder arrays; 637 | buildnodes = []; 638 | leftWorkLists = []; 639 | rightWorkLists = []; 640 | parentList = []; 641 | 642 | // initialize variables 643 | stackptr = 0; 644 | 645 | // parent id of -1, meaning this is the root node, which has no parent 646 | parentList.push(-1); 647 | BVH_Create_Node(currentList, -1, false); // build root node 648 | 649 | // build the tree using the "go down left branches until done, then ascend back up right branches" approach 650 | while (stackptr > -1) 651 | { 652 | // pop the next node off of the left-side stack 653 | currentList = leftWorkLists[stackptr]; 654 | 655 | if (currentList != undefined) 656 | { // left side of tree 657 | 658 | leftWorkLists[stackptr] = null; // mark as processed 659 | stackptr++; 660 | 661 | parentList.push(buildnodes.length - 1); 662 | 663 | // build the left node 664 | BVH_Create_Node(currentList, buildnodes.length - 1, false); 665 | } 666 | else 667 | { 668 | currentList = rightWorkLists[stackptr]; 669 | 670 | if (currentList != undefined) 671 | { 672 | rightWorkLists[stackptr] = null; // mark as processed 673 | stackptr++; 674 | 675 | // build the right node 676 | BVH_Create_Node(currentList, parentList.pop(), true); 677 | } 678 | else 679 | { 680 | stackptr--; 681 | } 682 | } 683 | 684 | } // end while (stackptr > -1) 685 | 686 | 687 | // Copy the buildnodes array into the aabb_array 688 | for (let n = 0; n < buildnodes.length; n++) 689 | { 690 | // slot 0 691 | aabb_array[8 * n + 0] = buildnodes[n].idPrimitive; // r or x component 692 | aabb_array[8 * n + 1] = buildnodes[n].minCorner.x; // g or y component 693 | aabb_array[8 * n + 2] = buildnodes[n].minCorner.y; // b or z component 694 | aabb_array[8 * n + 3] = buildnodes[n].minCorner.z; // a or w component 695 | 696 | // slot 1 697 | aabb_array[8 * n + 4] = buildnodes[n].idRightChild; // r or x component 698 | aabb_array[8 * n + 5] = buildnodes[n].maxCorner.x; // g or y component 699 | aabb_array[8 * n + 6] = buildnodes[n].maxCorner.y; // b or z component 700 | aabb_array[8 * n + 7] = buildnodes[n].maxCorner.z; // a or w component 701 | } 702 | 703 | } // end function BVH_Build_Iterative(workList, aabb_array) -------------------------------------------------------------------------------- /js/BabylonPathTracing_FragmentShader.js: -------------------------------------------------------------------------------- 1 | BABYLON.Effect.ShadersStore["pathTracingFragmentShader"] = ` 2 | #version 300 es 3 | 4 | precision highp float; 5 | precision highp int; 6 | precision highp sampler2D; 7 | 8 | // Demo-specific Uniforms 9 | uniform mat4 uLeftSphereInvMatrix; 10 | uniform mat4 uRightSphereInvMatrix; 11 | uniform float uQuadLightPlaneSelectionNumber; 12 | uniform float uQuadLightRadius; 13 | uniform int uRightSphereMatType; 14 | 15 | // demo/scene-specific setup 16 | #define N_QUADS 6 17 | #define N_SPHERES 2 18 | 19 | struct UnitSphere { vec3 color; int type; }; 20 | struct Quad { vec3 normal; vec3 v0; vec3 v1; vec3 v2; vec3 v3; vec3 color; int type; }; 21 | 22 | Quad quads[N_QUADS]; 23 | UnitSphere spheres[N_SPHERES]; 24 | 25 | // the camera ray for this pixel (global variables) 26 | vec3 rayOrigin, rayDirection; 27 | 28 | 29 | // all required includes go here: 30 | 31 | #include // required on all scenes 32 | 33 | #include // required on all scenes 34 | 35 | #include // required on all scenes 36 | 37 | #include // required on scenes with any math-geometry shapes like sphere, cylinder, cone, etc. 38 | 39 | #include // required on scenes with unit spheres that will be translated, rotated, and scaled by their matrix transform 40 | 41 | #include // required on scenes with quads (actually internally they are made up of 2 triangles) 42 | 43 | #include // required on scenes with axis-aligned quad area lights (quad must reside in either XY, XZ, or YZ planes) 44 | 45 | 46 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 47 | float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 48 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 49 | { 50 | vec3 rObjOrigin, rObjDirection; 51 | vec3 hit, n; 52 | float t, d; 53 | int objectCount = 0; 54 | 55 | // initialize hit record 56 | t = INFINITY; 57 | hitType = -100; 58 | hitObjectID = -INFINITY; 59 | 60 | 61 | // transform ray into Left Sphere's object space 62 | rObjOrigin = vec3( uLeftSphereInvMatrix * vec4(rayOrigin, 1.0) ); 63 | rObjDirection = vec3( uLeftSphereInvMatrix * vec4(rayDirection, 0.0) ); 64 | 65 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 66 | 67 | if (d < t) 68 | { 69 | t = d; 70 | hitNormal = transpose(mat3(uLeftSphereInvMatrix)) * n; 71 | hitColor = spheres[0].color; 72 | hitType = spheres[0].type; 73 | hitObjectID = float(objectCount); 74 | } 75 | objectCount++; 76 | 77 | // transform ray into Right Sphere's object space 78 | rObjOrigin = vec3( uRightSphereInvMatrix * vec4(rayOrigin, 1.0) ); 79 | rObjDirection = vec3( uRightSphereInvMatrix * vec4(rayDirection, 0.0) ); 80 | 81 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 82 | 83 | if (d < t) 84 | { 85 | t = d; 86 | hitNormal = transpose(mat3(uRightSphereInvMatrix)) * n; 87 | hitColor = spheres[1].color; 88 | hitType = spheres[1].type; 89 | hitObjectID = float(objectCount); 90 | } 91 | objectCount++; 92 | 93 | 94 | 95 | for (int i = 0; i < N_QUADS; i++) 96 | { 97 | d = QuadIntersect( quads[i].v0, quads[i].v1, quads[i].v2, quads[i].v3, rayOrigin, rayDirection, FALSE ); 98 | 99 | if (d < t) 100 | { 101 | t = d; 102 | hitNormal = quads[i].normal; 103 | hitColor = quads[i].color; 104 | hitType = quads[i].type; 105 | hitObjectID = float(objectCount); 106 | } 107 | 108 | objectCount++; 109 | } 110 | 111 | return t; 112 | 113 | } // end float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 114 | 115 | 116 | 117 | //----------------------------------------------------------------------------------------------------------------------------- 118 | vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 119 | //----------------------------------------------------------------------------------------------------------------------------- 120 | { 121 | // a record of ray-surface intersection data 122 | vec3 hitNormal, hitEmission, hitColor; 123 | vec2 hitUV; 124 | float t, hitObjectID; 125 | int hitTextureID; 126 | int hitType; 127 | 128 | Quad light = quads[5]; 129 | 130 | vec3 accumCol = vec3(0); 131 | vec3 mask = vec3(1); 132 | vec3 reflectionMask = vec3(1); 133 | vec3 reflectionRayOrigin = vec3(0); 134 | vec3 reflectionRayDirection = vec3(0); 135 | vec3 dirToLight; 136 | vec3 tdir; 137 | vec3 x, n, nl; 138 | vec3 absorptionCoefficient; 139 | 140 | float nc, nt, ratioIoR, Re, Tr; 141 | //float P, RP, TP; 142 | float weight; 143 | float thickness = 0.05; 144 | float scatteringDistance; 145 | 146 | int diffuseCount = 0; 147 | int previousIntersecType = -100; 148 | hitType = -100; 149 | 150 | int coatTypeIntersected = FALSE; 151 | int bounceIsSpecular = TRUE; 152 | int sampleLight = FALSE; 153 | int willNeedReflectionRay = FALSE; 154 | 155 | 156 | for (int bounces = 0; bounces < 6; bounces++) 157 | { 158 | previousIntersecType = hitType; 159 | 160 | t = SceneIntersect(rayOrigin, rayDirection, hitNormal, hitEmission, hitColor, hitType, hitObjectID); 161 | 162 | 163 | if (t == INFINITY) 164 | { 165 | if (willNeedReflectionRay == TRUE) 166 | { 167 | mask = reflectionMask; 168 | rayOrigin = reflectionRayOrigin; 169 | rayDirection = reflectionRayDirection; 170 | 171 | willNeedReflectionRay = FALSE; 172 | bounceIsSpecular = TRUE; 173 | sampleLight = FALSE; 174 | diffuseCount = 0; 175 | continue; 176 | } 177 | 178 | break; 179 | } 180 | 181 | // useful data 182 | n = normalize(hitNormal); 183 | nl = dot(n, rayDirection) < 0.0 ? n : -n; 184 | x = rayOrigin + rayDirection * t ; 185 | 186 | if (bounces == 0) 187 | { 188 | objectNormal = nl; 189 | objectColor = hitColor; 190 | objectID = hitObjectID; 191 | } 192 | if (bounces == 1 && previousIntersecType == METAL) 193 | { 194 | objectNormal = nl; 195 | } 196 | 197 | 198 | if (hitType == LIGHT) 199 | { 200 | if (bounces == 0 || (bounces == 1 && previousIntersecType == METAL)) 201 | pixelSharpness = 1.01; 202 | 203 | if (diffuseCount == 0) 204 | { 205 | objectNormal = nl; 206 | objectColor = hitColor; 207 | objectID = hitObjectID; 208 | } 209 | 210 | if (bounceIsSpecular == TRUE || sampleLight == TRUE) 211 | accumCol += mask * hitColor; 212 | 213 | if (willNeedReflectionRay == TRUE) 214 | { 215 | mask = reflectionMask; 216 | rayOrigin = reflectionRayOrigin; 217 | rayDirection = reflectionRayDirection; 218 | 219 | willNeedReflectionRay = FALSE; 220 | bounceIsSpecular = TRUE; 221 | sampleLight = FALSE; 222 | diffuseCount = 0; 223 | continue; 224 | } 225 | // reached a light, so we can exit 226 | break; 227 | 228 | } // end if (hitType == LIGHT) 229 | 230 | 231 | // if we get here and sampleLight is still TRUE, shadow ray failed to find the light source 232 | // the ray hit an occluding object along its way to the light 233 | if (sampleLight == TRUE) 234 | { 235 | if (willNeedReflectionRay == TRUE) 236 | { 237 | mask = reflectionMask; 238 | rayOrigin = reflectionRayOrigin; 239 | rayDirection = reflectionRayDirection; 240 | 241 | willNeedReflectionRay = FALSE; 242 | bounceIsSpecular = TRUE; 243 | sampleLight = FALSE; 244 | diffuseCount = 0; 245 | continue; 246 | } 247 | 248 | break; 249 | } 250 | 251 | 252 | 253 | if (hitType == DIFFUSE) // Ideal diffuse reflection 254 | { 255 | diffuseCount++; 256 | 257 | mask *= hitColor; 258 | 259 | bounceIsSpecular = FALSE; 260 | 261 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 262 | { 263 | mask *= 2.0; 264 | // choose random Diffuse sample vector 265 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 266 | rayOrigin = x + nl * uEPS_intersect; 267 | continue; 268 | } 269 | 270 | dirToLight = sampleAxisAlignedQuadLight(x, nl, quads[5], weight); 271 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 272 | mask *= weight; 273 | 274 | rayDirection = dirToLight; 275 | rayOrigin = x + nl * uEPS_intersect; 276 | 277 | sampleLight = TRUE; 278 | continue; 279 | 280 | } // end if (hitType == DIFFUSE) 281 | 282 | 283 | if (hitType == METAL) // Ideal metal specular reflection 284 | { 285 | mask *= hitColor; 286 | 287 | rayDirection = reflect(rayDirection, nl); 288 | rayOrigin = x + nl * uEPS_intersect; 289 | 290 | continue; 291 | } 292 | 293 | 294 | if (hitType == TRANSPARENT) // Ideal dielectric specular reflection/refraction 295 | { 296 | pixelSharpness = diffuseCount == 0 && coatTypeIntersected == FALSE ? -1.0 : pixelSharpness; 297 | 298 | nc = 1.0; // IOR of Air 299 | nt = 1.5; // IOR of common Glass 300 | Re = calcFresnelReflectance(rayDirection, n, nc, nt, ratioIoR); 301 | Tr = 1.0 - Re; 302 | 303 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 304 | { 305 | reflectionMask = mask * Re; 306 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 307 | reflectionRayOrigin = x + nl * uEPS_intersect; 308 | willNeedReflectionRay = TRUE; 309 | } 310 | 311 | if (Re == 1.0) 312 | { 313 | mask = reflectionMask; 314 | rayOrigin = reflectionRayOrigin; 315 | rayDirection = reflectionRayDirection; 316 | 317 | willNeedReflectionRay = FALSE; 318 | bounceIsSpecular = TRUE; 319 | sampleLight = FALSE; 320 | continue; 321 | } 322 | 323 | // transmit ray through surface 324 | 325 | // is ray leaving a solid object from the inside? 326 | // If so, attenuate ray color with object color by how far ray has travelled through the medium 327 | if (distance(n, nl) > 0.1) 328 | { 329 | thickness = 0.01; 330 | mask *= exp( log(clamp(hitColor, 0.01, 0.99)) * thickness * t ); 331 | } 332 | 333 | mask *= Tr; 334 | 335 | tdir = refract(rayDirection, nl, ratioIoR); 336 | rayDirection = tdir; 337 | rayOrigin = x - nl * uEPS_intersect; 338 | 339 | if (diffuseCount == 1) 340 | bounceIsSpecular = TRUE; // turn on refracting caustics 341 | 342 | continue; 343 | 344 | } // end if (hitType == TRANSPARENT) 345 | 346 | 347 | if (hitType == CLEARCOAT_DIFFUSE) // Diffuse object underneath with ClearCoat on top 348 | { 349 | coatTypeIntersected = TRUE; 350 | 351 | nc = 1.0; // IOR of Air 352 | nt = 1.5; // IOR of Clear Coat 353 | Re = calcFresnelReflectance(rayDirection, nl, nc, nt, ratioIoR); 354 | Tr = 1.0 - Re; 355 | 356 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 357 | { 358 | reflectionMask = mask * Re; 359 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 360 | reflectionRayOrigin = x + nl * uEPS_intersect; 361 | willNeedReflectionRay = TRUE; 362 | } 363 | 364 | diffuseCount++; 365 | 366 | if (bounces == 0) 367 | mask *= Tr; 368 | mask *= hitColor; 369 | 370 | bounceIsSpecular = FALSE; 371 | 372 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 373 | { 374 | mask *= 2.0; 375 | // choose random Diffuse sample vector 376 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 377 | rayOrigin = x + nl * uEPS_intersect; 378 | continue; 379 | } 380 | 381 | dirToLight = sampleAxisAlignedQuadLight(x, nl, quads[5], weight); 382 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 383 | mask *= weight; 384 | 385 | rayDirection = dirToLight; 386 | rayOrigin = x + nl * uEPS_intersect; 387 | 388 | // this check helps keep random noisy bright pixels from this clearCoat diffuse surface out of the possible previous refracted glass surface 389 | if (bounces < 3) 390 | sampleLight = TRUE; 391 | continue; 392 | 393 | } //end if (hitType == CLEARCOAT_DIFFUSE) 394 | 395 | } // end for (int bounces = 0; bounces < 6; bounces++) 396 | 397 | 398 | return max(vec3(0), accumCol); 399 | 400 | } // end vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 401 | 402 | 403 | //------------------- 404 | void SetupScene(void) 405 | //------------------- 406 | { 407 | vec3 light_emissionColor = vec3(1.0, 1.0, 1.0) * 5.0; // Bright white light 408 | 409 | float wallRadius = 50.0; 410 | float lightRadius = uQuadLightRadius * 0.2; 411 | 412 | spheres[0] = UnitSphere( vec3(1.0, 1.0, 0.0), CLEARCOAT_DIFFUSE ); // clearCoat diffuse Sphere Left 413 | spheres[1] = UnitSphere( vec3(1.0, 1.0, 1.0), uRightSphereMatType ); // user-chosen material Sphere Right 414 | 415 | quads[0] = Quad( vec3( 0, 0,-1), vec3(-wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Back Wall 416 | quads[1] = Quad( vec3( 1, 0, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius, wallRadius,-wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 0.7, 0.05, 0.05), DIFFUSE);// Left Wall Red 417 | quads[2] = Quad( vec3(-1, 0, 0), vec3( wallRadius,-wallRadius,-wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3(0.05, 0.05, 0.7), DIFFUSE);// Right Wall Blue 418 | quads[3] = Quad( vec3( 0,-1, 0), vec3(-wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Ceiling 419 | quads[4] = Quad( vec3( 0, 1, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Floor 420 | 421 | if (uQuadLightPlaneSelectionNumber == 1.0) 422 | quads[5] = Quad( vec3(-1, 0, 0), vec3(wallRadius-1.0,-lightRadius, lightRadius), vec3(wallRadius-1.0, lightRadius, lightRadius), vec3(wallRadius-1.0, lightRadius,-lightRadius), vec3(wallRadius-1.0,-lightRadius,-lightRadius), light_emissionColor, LIGHT);// Quad Area Light on right wall 423 | else if (uQuadLightPlaneSelectionNumber == 2.0) 424 | quads[5] = Quad( vec3( 1, 0, 0), vec3(-wallRadius+1.0,-lightRadius,-lightRadius), vec3(-wallRadius+1.0, lightRadius,-lightRadius), vec3(-wallRadius+1.0, lightRadius, lightRadius), vec3(-wallRadius+1.0,-lightRadius, lightRadius), light_emissionColor, LIGHT);// Quad Area Light on left wall 425 | else if (uQuadLightPlaneSelectionNumber == 3.0) 426 | quads[5] = Quad( vec3( 0, 0, 1), vec3(-lightRadius,-lightRadius, -wallRadius+1.0), vec3(lightRadius,-lightRadius, -wallRadius+1.0), vec3(lightRadius, lightRadius, -wallRadius+1.0), vec3(-lightRadius, lightRadius, -wallRadius+1.0), light_emissionColor, LIGHT);// Quad Area Light on front 'wall'(opening of box) 427 | else if (uQuadLightPlaneSelectionNumber == 4.0) 428 | quads[5] = Quad( vec3( 0, 0,-1), vec3(-lightRadius,-lightRadius, wallRadius-1.0), vec3(-lightRadius, lightRadius, wallRadius-1.0), vec3(lightRadius, lightRadius, wallRadius-1.0), vec3(lightRadius,-lightRadius, wallRadius-1.0), light_emissionColor, LIGHT);// Quad Area Light on back wall 429 | else if (uQuadLightPlaneSelectionNumber == 5.0) 430 | quads[5] = Quad( vec3( 0, 1, 0), vec3(-lightRadius, -wallRadius+1.0,-lightRadius), vec3(-lightRadius, -wallRadius+1.0, lightRadius), vec3(lightRadius, -wallRadius+1.0, lightRadius), vec3(lightRadius, -wallRadius+1.0,-lightRadius), light_emissionColor, LIGHT);// Quad Area Light on floor 431 | else if (uQuadLightPlaneSelectionNumber == 6.0) 432 | quads[5] = Quad( vec3( 0,-1, 0), vec3(-lightRadius, wallRadius-1.0,-lightRadius), vec3(lightRadius, wallRadius-1.0,-lightRadius), vec3(lightRadius, wallRadius-1.0, lightRadius), vec3(-lightRadius, wallRadius-1.0, lightRadius), light_emissionColor, LIGHT);// Quad Area Light on ceiling 433 | 434 | } // end void SetupScene(void) 435 | 436 | 437 | // if your scene is static and doesn't have any special requirements, you can use the default main() 438 | #include 439 | 440 | `; 441 | -------------------------------------------------------------------------------- /js/Babylon_Path_Tracing.js: -------------------------------------------------------------------------------- 1 | let canvas, engine, pathTracingScene; 2 | let container, stats; 3 | let gui; 4 | let pixel_ResolutionController, pixel_ResolutionObject; 5 | let needChangePixelResolution = false; 6 | let quadLight_LocationController, quadLight_LocationObject; 7 | let needChangeQuadLightLocation = false; 8 | let quadLight_RadiusController, quadLight_RadiusObject; 9 | let needChangeQuadLightRadius = false; 10 | let rightSphere_MaterialController, rightSphere_MaterialObject; 11 | let needChangeRightSphereMaterial = false; 12 | let isPaused = true; 13 | let camera, oldCameraMatrix, newCameraMatrix; 14 | let camFlightSpeed; // scene specific, depending on scene size dimensions 15 | let cameraRecentlyMoving = false; 16 | let windowIsBeingResized = false; 17 | let beginningFlag = true; 18 | let timeInSeconds = 0.0; 19 | let frameTime = 0.0; 20 | let newWidth, newHeight; 21 | let nm, om; 22 | let increaseFOV = false; 23 | let decreaseFOV = false; 24 | let uApertureSize; // scene specific, depending on scene size dimensions 25 | let apertureChangeAmount; // scene specific, depending on scene size dimensions 26 | let uFocusDistance; // scene specific, depending on scene size dimensions 27 | let focusDistChangeAmount; // scene specific, depending on scene size dimensions 28 | let mouseControl = true; 29 | let cameraDirectionVector = new BABYLON.Vector3(); //for moving where the camera is looking 30 | let cameraRightVector = new BABYLON.Vector3(); //for strafing the camera right and left 31 | let cameraUpVector = new BABYLON.Vector3(); //for moving camera up and down 32 | let blueNoiseTexture; 33 | let infoElement = document.getElementById('info'); 34 | infoElement.style.cursor = "default"; 35 | infoElement.style.userSelect = "none"; 36 | infoElement.style.MozUserSelect = "none"; 37 | 38 | let cameraInfoElement = document.getElementById('cameraInfo'); 39 | cameraInfoElement.style.cursor = "default"; 40 | cameraInfoElement.style.userSelect = "none"; 41 | cameraInfoElement.style.MozUserSelect = "none"; 42 | 43 | // common required uniforms 44 | let uSceneIsDynamic = false; // will any geometry, lights, or models be moving in the scene? 45 | let uRandomVec2 = new BABYLON.Vector2(); // used to offset the texture UV when sampling the blueNoiseTexture for smooth randomness - this vec2 is updated/changed every animation frame 46 | let uTime = 0.0; // elapsed time in seconds since the app started 47 | let uFrameCounter = 1.0; // 1 instead of 0 because it is used as a rng() seed in pathtracing shader 48 | let uSampleCounter = 0.0; // will get increased by 1 in animation loop before rendering 49 | let uOneOverSampleCounter = 0.0; // the sample accumulation buffer gets multiplied by this reciprocal of SampleCounter, for averaging final pixel color 50 | let uPreviousSampleCount = 0.0; // records the previous frame's sample count, so that if the camera moves after being still, it can multiply the current frame by the reciprocal (1/SamplesCount) 51 | let uULen = 1.0; // rendering pixel horizontal scale, related to camera's FOV and aspect ratio 52 | let uVLen = 1.0; // rendering pixel vertical scale, related to camera's FOV 53 | let uCameraIsMoving = false; // lets the path tracer know if the camera is being moved 54 | let uToneMappingExposure = 1.0; // exposure amount when applying Reinhard tonemapping in final stages of pixel colors' output 55 | let uPixelEdgeSharpness = 1.0; // for dynamic scenes only - if pixel is found to be lying on a border/boundary edge, how sharp should it be? (range: 0.0-1.0) 56 | let uEdgeSharpenSpeed = 0.05; // applies to edges only - how fast is the blur filter removed from edges? 57 | let uFilterDecaySpeed = 0.0002; // applies to entire image(edges and non-edges alike) - how fast should the blur filter go away for the entire image? 58 | 59 | // scene/demo-specific variables; 60 | let sphereRadius = 16; 61 | let wallRadius = 50; 62 | let leftSphereTransformNode; 63 | let rightSphereTransformNode; 64 | // scene/demo-specific uniforms 65 | let uQuadLightPlaneSelectionNumber; 66 | let uQuadLightRadius; 67 | let uRightSphereMatType; 68 | let uLeftSphereInvMatrix = new BABYLON.Matrix(); 69 | let uRightSphereInvMatrix = new BABYLON.Matrix(); 70 | 71 | 72 | // The following list of keys is not exhaustive, but it should be more than enough to build interactive demos and games 73 | let KeyboardState = { 74 | KeyA: false, KeyB: false, KeyC: false, KeyD: false, KeyE: false, KeyF: false, KeyG: false, KeyH: false, KeyI: false, KeyJ: false, KeyK: false, KeyL: false, KeyM: false, 75 | KeyN: false, KeyO: false, KeyP: false, KeyQ: false, KeyR: false, KeyS: false, KeyT: false, KeyU: false, KeyV: false, KeyW: false, KeyX: false, KeyY: false, KeyZ: false, 76 | ArrowLeft: false, ArrowUp: false, ArrowRight: false, ArrowDown: false, Space: false, Enter: false, PageUp: false, PageDown: false, Tab: false, 77 | Minus: false, Equal: false, BracketLeft: false, BracketRight: false, Semicolon: false, Quote: false, Backquote: false, 78 | Comma: false, Period: false, ShiftLeft: false, ShiftRight: false, Slash: false, Backslash: false, Backspace: false, 79 | Digit1: false, Digit2: false, Digit3: false, Digit4: false, Digit5: false, Digit6: false, Digit7: false, Digit8: false, Digit9: false, Digit0: false 80 | } 81 | 82 | function onKeyDown(event) 83 | { 84 | event.preventDefault(); 85 | 86 | KeyboardState[event.code] = true; 87 | } 88 | 89 | function onKeyUp(event) 90 | { 91 | event.preventDefault(); 92 | 93 | KeyboardState[event.code] = false; 94 | } 95 | 96 | function keyPressed(keyName) 97 | { 98 | return KeyboardState[keyName]; 99 | } 100 | 101 | function onMouseWheel(event) 102 | { 103 | if (isPaused) 104 | return; 105 | 106 | // use the following instead, because event.preventDefault() gives errors in console 107 | event.stopPropagation(); 108 | 109 | if (event.deltaY > 0) 110 | { 111 | increaseFOV = true; 112 | } 113 | else if (event.deltaY < 0) 114 | { 115 | decreaseFOV = true; 116 | } 117 | } 118 | 119 | // Watch for browser/canvas resize events 120 | window.addEventListener("resize", function () 121 | { 122 | handleWindowResize(); 123 | }); 124 | 125 | if ('ontouchstart' in window) 126 | { 127 | mouseControl = false; 128 | // TODO: instantiate my custom 'MobileJoystickControls' or similar Babylon solution? 129 | } 130 | 131 | if (mouseControl) 132 | { 133 | window.addEventListener('wheel', onMouseWheel, false); 134 | } 135 | 136 | function handleWindowResize() 137 | { 138 | windowIsBeingResized = true; 139 | 140 | engine.resize(); 141 | 142 | newWidth = engine.getRenderWidth(); 143 | newHeight = engine.getRenderHeight(); 144 | pathTracingRenderTarget.resize({ width: newWidth, height: newHeight }); 145 | screenCopyRenderTarget.resize({ width: newWidth, height: newHeight }); 146 | 147 | width = newWidth; 148 | height = newHeight; 149 | 150 | uVLen = Math.tan(camera.fov * 0.5); 151 | uULen = uVLen * (width / height); 152 | } 153 | 154 | // setup GUI 155 | function init_GUI() 156 | { 157 | pixel_ResolutionObject = { 158 | pixel_Resolution: 0.75 159 | } 160 | 161 | quadLight_LocationObject = { 162 | QuadLight_Location: 'Ceiling' 163 | }; 164 | 165 | quadLight_RadiusObject = { 166 | QuadLight_Radius: 50 167 | } 168 | 169 | rightSphere_MaterialObject = { 170 | RSphere_MaterialPreset: 'Metal' 171 | } 172 | 173 | function handlePixelResolutionChange() 174 | { 175 | needChangePixelResolution = true; 176 | } 177 | 178 | function handleQuadLightLocationChange() 179 | { 180 | needChangeQuadLightLocation = true; 181 | } 182 | 183 | function handleQuadLightRadiusChange() 184 | { 185 | needChangeQuadLightRadius = true; 186 | } 187 | 188 | function handleRightSphereMaterialChange() 189 | { 190 | needChangeRightSphereMaterial = true; 191 | } 192 | 193 | gui = new dat.GUI(); 194 | 195 | pixel_ResolutionController = gui.add(pixel_ResolutionObject, 'pixel_Resolution', 0.5, 1.0, 0.05).onChange(handlePixelResolutionChange); 196 | 197 | quadLight_LocationController = gui.add(quadLight_LocationObject, 'QuadLight_Location', ['Ceiling', 198 | 'Right Wall', 'Left Wall', 'Floor', 'Front Wall', 'Back Wall']).onChange(handleQuadLightLocationChange); 199 | 200 | quadLight_RadiusController = gui.add(quadLight_RadiusObject, 'QuadLight_Radius', 5, 150, 1.0).onChange(handleQuadLightRadiusChange); 201 | 202 | rightSphere_MaterialController = gui.add(rightSphere_MaterialObject, 'RSphere_MaterialPreset', ['Transparent', 203 | 'Diffuse', 'ClearCoat_Diffuse', 'Metal']).onChange(handleRightSphereMaterialChange); 204 | 205 | } //end function init_GUI() 206 | 207 | init_GUI(); 208 | 209 | // setup the frame rate display (FPS) in the top-left corner 210 | container = document.getElementById('container'); 211 | 212 | stats = new Stats(); 213 | stats.domElement.style.position = 'absolute'; 214 | stats.domElement.style.top = '0px'; 215 | stats.domElement.style.cursor = "default"; 216 | stats.domElement.style.webkitUserSelect = "none"; 217 | stats.domElement.style.MozUserSelect = "none"; 218 | container.appendChild(stats.domElement); 219 | 220 | 221 | 222 | canvas = document.getElementById("renderCanvas"); 223 | 224 | engine = new BABYLON.Engine(canvas, true); 225 | 226 | 227 | // Create the scene space 228 | pathTracingScene = new BABYLON.Scene(engine); 229 | 230 | // enable browser's mouse pointer lock feature, for free-look camera controlled by mouse movement 231 | pathTracingScene.onPointerDown = evt => 232 | { 233 | engine.enterPointerlock(); 234 | } 235 | 236 | // Add a camera to the scene and attach it to the canvas 237 | camera = new BABYLON.UniversalCamera("Camera", new BABYLON.Vector3(), pathTracingScene); 238 | camera.attachControl(canvas, true); 239 | 240 | uVLen = Math.tan(camera.fov * 0.5); 241 | uULen = uVLen * (engine.getRenderWidth() / engine.getRenderHeight()); 242 | 243 | // SCENE/DEMO-SPECIFIC PARAMETERS 244 | camera.position.set(0, -20, -120); 245 | camera.inertia = 0; 246 | camera.angularSensibility = 500; 247 | camFlightSpeed = 100; // scene specific, depending on scene size dimensions 248 | uApertureSize = 0.0; // aperture size at beginning of app 249 | uFocusDistance = 113.0; // initial focus distance from camera in scene - scene specific, depending on scene size dimensions 250 | const uEPS_intersect = 0.01; // value is scene-size dependent 251 | apertureChangeAmount = 1; // scene specific, depending on scene size dimensions 252 | focusDistChangeAmount = 1; // scene specific, depending on scene size dimensions 253 | uQuadLightPlaneSelectionNumber = 6; 254 | uQuadLightRadius = 50; 255 | uRightSphereMatType = 3; // enum number code for METAL material - demo starts off with this setting for right sphere 256 | 257 | oldCameraMatrix = new BABYLON.Matrix(); 258 | newCameraMatrix = new BABYLON.Matrix(); 259 | 260 | // must be instantiated here after scene has been created 261 | leftSphereTransformNode = new BABYLON.TransformNode(); 262 | rightSphereTransformNode = new BABYLON.TransformNode(); 263 | 264 | leftSphereTransformNode.position.set(-wallRadius * 0.45, -wallRadius + sphereRadius + 0.1, -wallRadius * 0.2); 265 | leftSphereTransformNode.scaling.set(sphereRadius, sphereRadius, sphereRadius); 266 | //leftSphereTransformNode.scaling.set(sphereRadius * 0.3, sphereRadius, sphereRadius); 267 | //leftSphereTransformNode.rotation.set(0, 0, Math.PI * 0.2); 268 | uLeftSphereInvMatrix.copyFrom(leftSphereTransformNode.getWorldMatrix()); 269 | uLeftSphereInvMatrix.invert(); 270 | 271 | rightSphereTransformNode.position.set(wallRadius * 0.45, -wallRadius + sphereRadius + 0.1, -wallRadius * 0.2); 272 | rightSphereTransformNode.scaling.set(sphereRadius, sphereRadius, sphereRadius); 273 | uRightSphereInvMatrix.copyFrom(rightSphereTransformNode.getWorldMatrix()); 274 | uRightSphereInvMatrix.invert(); 275 | 276 | let width = engine.getRenderWidth(), height = engine.getRenderHeight(); 277 | 278 | blueNoiseTexture = new BABYLON.Texture("./textures/BlueNoise_RGBA256.png", 279 | pathTracingScene, 280 | true, 281 | false, 282 | BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, 283 | null, 284 | null, 285 | null, 286 | false, 287 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 288 | 289 | 290 | 291 | const pathTracingRenderTarget = new BABYLON.RenderTargetTexture("pathTracingRenderTarget", { width, height }, pathTracingScene, false, false, 292 | BABYLON.Constants.TEXTURETYPE_FLOAT, false, BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, false, false, false, 293 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 294 | 295 | const screenCopyRenderTarget = new BABYLON.RenderTargetTexture("screenCopyRenderTarget", { width, height }, pathTracingScene, false, false, 296 | BABYLON.Constants.TEXTURETYPE_FLOAT, false, BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, false, false, false, 297 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 298 | 299 | const eRenderer = new BABYLON.EffectRenderer(engine); 300 | 301 | // SCREEN COPY EFFECT 302 | const screenCopyEffect = new BABYLON.EffectWrapper({ 303 | engine: engine, 304 | fragmentShader: BABYLON.Effect.ShadersStore["screenCopyFragmentShader"], 305 | uniformNames: [], 306 | samplerNames: ["pathTracedImageBuffer"], 307 | name: "screenCopyEffectWrapper" 308 | }); 309 | // add uniforms 310 | screenCopyEffect.onApplyObservable.add(() => 311 | { 312 | screenCopyEffect.effect.setTexture("pathTracedImageBuffer", pathTracingRenderTarget); 313 | }); 314 | 315 | // SCREEN OUTPUT EFFECT 316 | const screenOutputEffect = new BABYLON.EffectWrapper({ 317 | engine: engine, 318 | fragmentShader: BABYLON.Effect.ShadersStore["screenOutputFragmentShader"], 319 | uniformNames: ["uSampleCounter", "uOneOverSampleCounter", "uPixelEdgeSharpness", "uEdgeSharpenSpeed", "uFilterDecaySpeed", 320 | "uToneMappingExposure", "uSceneIsDynamic"], 321 | samplerNames: ["accumulationBuffer"], 322 | name: "screenOutputEffectWrapper" 323 | }); 324 | // add uniforms 325 | screenOutputEffect.onApplyObservable.add(() => 326 | { 327 | screenOutputEffect.effect.setTexture("accumulationBuffer", pathTracingRenderTarget); 328 | screenOutputEffect.effect.setFloat("uSampleCounter", uSampleCounter); 329 | screenOutputEffect.effect.setFloat("uOneOverSampleCounter", uOneOverSampleCounter); 330 | screenOutputEffect.effect.setFloat("uPixelEdgeSharpness", uPixelEdgeSharpness); 331 | screenOutputEffect.effect.setFloat("uEdgeSharpenSpeed", uEdgeSharpenSpeed); 332 | screenOutputEffect.effect.setFloat("uFilterDecaySpeed", uFilterDecaySpeed); 333 | screenOutputEffect.effect.setFloat("uToneMappingExposure", uToneMappingExposure); 334 | screenOutputEffect.effect.setBool("uSceneIsDynamic", uSceneIsDynamic); 335 | }); 336 | 337 | // MAIN PATH TRACING EFFECT 338 | const pathTracingEffect = new BABYLON.EffectWrapper({ 339 | engine: engine, 340 | fragmentShader: BABYLON.Effect.ShadersStore["pathTracingFragmentShader"], 341 | uniformNames: ["uResolution", "uRandomVec2", "uULen", "uVLen", "uTime", "uFrameCounter", "uSampleCounter", "uPreviousSampleCount", "uEPS_intersect", "uCameraMatrix", "uApertureSize", 342 | "uFocusDistance", "uCameraIsMoving", "uLeftSphereInvMatrix", "uRightSphereInvMatrix", "uQuadLightPlaneSelectionNumber", "uQuadLightRadius", "uRightSphereMatType"], 343 | samplerNames: ["previousBuffer", "blueNoiseTexture"], 344 | name: "pathTracingEffectWrapper" 345 | }); 346 | // add uniforms 347 | pathTracingEffect.onApplyObservable.add(() => 348 | { 349 | pathTracingEffect.effect.setTexture("previousBuffer", screenCopyRenderTarget); 350 | pathTracingEffect.effect.setTexture("blueNoiseTexture", blueNoiseTexture); 351 | pathTracingEffect.effect.setFloat2("uResolution", pathTracingRenderTarget.getSize().width, pathTracingRenderTarget.getSize().height); 352 | pathTracingEffect.effect.setFloat2("uRandomVec2", uRandomVec2.x, uRandomVec2.y); 353 | pathTracingEffect.effect.setFloat("uULen", uULen); 354 | pathTracingEffect.effect.setFloat("uVLen", uVLen); 355 | pathTracingEffect.effect.setFloat("uTime", uTime); 356 | pathTracingEffect.effect.setFloat("uFrameCounter", uFrameCounter); 357 | pathTracingEffect.effect.setFloat("uSampleCounter", uSampleCounter); 358 | pathTracingEffect.effect.setFloat("uPreviousSampleCount", uPreviousSampleCount); 359 | pathTracingEffect.effect.setFloat("uEPS_intersect", uEPS_intersect); 360 | pathTracingEffect.effect.setFloat("uApertureSize", uApertureSize); 361 | pathTracingEffect.effect.setFloat("uFocusDistance", uFocusDistance); 362 | pathTracingEffect.effect.setFloat("uQuadLightPlaneSelectionNumber", uQuadLightPlaneSelectionNumber); 363 | pathTracingEffect.effect.setFloat("uQuadLightRadius", uQuadLightRadius); 364 | pathTracingEffect.effect.setInt("uRightSphereMatType", uRightSphereMatType); 365 | pathTracingEffect.effect.setBool("uCameraIsMoving", uCameraIsMoving); 366 | pathTracingEffect.effect.setMatrix("uCameraMatrix", camera.getWorldMatrix()); 367 | pathTracingEffect.effect.setMatrix("uLeftSphereInvMatrix", uLeftSphereInvMatrix); 368 | pathTracingEffect.effect.setMatrix("uRightSphereInvMatrix", uRightSphereInvMatrix); 369 | }); 370 | 371 | function getElapsedTimeInSeconds() 372 | { 373 | timeInSeconds += (engine.getDeltaTime() * 0.001); 374 | return timeInSeconds; 375 | } 376 | 377 | 378 | // Register a render loop to repeatedly render the scene 379 | engine.runRenderLoop(function () 380 | { 381 | 382 | // first, reset cameraIsMoving flag 383 | uCameraIsMoving = false; 384 | 385 | if (beginningFlag && uSampleCounter == 1) 386 | { 387 | pixel_ResolutionController.setValue(0.75); 388 | beginningFlag = false; 389 | } 390 | 391 | 392 | // if GUI has been used, update 393 | 394 | if (needChangePixelResolution) 395 | { 396 | engine.setHardwareScalingLevel(1.0 / pixel_ResolutionController.getValue()); 397 | 398 | handleWindowResize(); 399 | 400 | needChangePixelResolution = false; 401 | } 402 | 403 | if (needChangeQuadLightLocation) 404 | { 405 | if (quadLight_LocationController.getValue() == 'Right Wall') 406 | { 407 | uQuadLightPlaneSelectionNumber = 1; 408 | } 409 | else if (quadLight_LocationController.getValue() == 'Left Wall') 410 | { 411 | uQuadLightPlaneSelectionNumber = 2; 412 | } 413 | else if (quadLight_LocationController.getValue() == 'Front Wall') 414 | { 415 | uQuadLightPlaneSelectionNumber = 3; 416 | } 417 | else if (quadLight_LocationController.getValue() == 'Back Wall') 418 | { 419 | uQuadLightPlaneSelectionNumber = 4; 420 | } 421 | else if (quadLight_LocationController.getValue() == 'Floor') 422 | { 423 | uQuadLightPlaneSelectionNumber = 5; 424 | } 425 | else if (quadLight_LocationController.getValue() == 'Ceiling') 426 | { 427 | uQuadLightPlaneSelectionNumber = 6; 428 | } 429 | 430 | uCameraIsMoving = true; 431 | needChangeQuadLightLocation = false; 432 | } 433 | 434 | if (needChangeQuadLightRadius) 435 | { 436 | uQuadLightRadius = quadLight_RadiusController.getValue(); 437 | 438 | uCameraIsMoving = true; 439 | needChangeQuadLightRadius = false; 440 | } 441 | 442 | if (needChangeRightSphereMaterial) 443 | { 444 | if (rightSphere_MaterialController.getValue() == 'Transparent') 445 | { 446 | uRightSphereMatType = 2;// enum number code for TRANSPARENT material 447 | } 448 | else if (rightSphere_MaterialController.getValue() == 'Diffuse') 449 | { 450 | uRightSphereMatType = 1;// enum number code for DIFFUSE material 451 | } 452 | else if (rightSphere_MaterialController.getValue() == 'ClearCoat_Diffuse') 453 | { 454 | uRightSphereMatType = 4;// enum number code for CLEARCOAT_DIFFUSE material 455 | } 456 | else if (rightSphere_MaterialController.getValue() == 'Metal') 457 | { 458 | uRightSphereMatType = 3;// enum number code for METAL material 459 | } 460 | 461 | uCameraIsMoving = true; 462 | needChangeRightSphereMaterial = false; 463 | } 464 | 465 | 466 | // check for pointerLock state and add or remove keyboard listeners 467 | if (isPaused && engine.isPointerLock) 468 | { 469 | document.addEventListener('keydown', onKeyDown, false); 470 | document.addEventListener('keyup', onKeyUp, false); 471 | isPaused = false; 472 | } 473 | if (!isPaused && !engine.isPointerLock) 474 | { 475 | document.removeEventListener('keydown', onKeyDown, false); 476 | document.removeEventListener('keyup', onKeyUp, false); 477 | isPaused = true; 478 | } 479 | 480 | 481 | if (windowIsBeingResized) 482 | { 483 | uCameraIsMoving = true; 484 | windowIsBeingResized = false; 485 | } 486 | 487 | uTime = getElapsedTimeInSeconds(); 488 | 489 | frameTime = engine.getDeltaTime() * 0.001; 490 | 491 | uRandomVec2.set(Math.random(), Math.random()); 492 | 493 | // my own optimized way of telling if the camera has moved or not 494 | newCameraMatrix.copyFrom(camera.getWorldMatrix()); 495 | nm = newCameraMatrix.m; 496 | om = oldCameraMatrix.m; 497 | if ( nm[0] != om[0] || nm[1] != om[1] || nm[2] != om[2] || nm[3] != om[3] || 498 | nm[4] != om[4] || nm[5] != om[5] || nm[6] != om[6] || nm[7] != om[7] || 499 | nm[8] != om[8] || nm[9] != om[9] || nm[10] != om[10] || nm[11] != om[11] || 500 | nm[12] != om[12] || nm[13] != om[13] || nm[14] != om[14] || nm[15] != om[15] ) 501 | { 502 | uCameraIsMoving = true; 503 | } 504 | // save camera state for next frame's comparison 505 | oldCameraMatrix.copyFrom(newCameraMatrix); 506 | 507 | // get current camera orientation basis vectors 508 | cameraDirectionVector.set(nm[8], nm[9], nm[10]); 509 | cameraDirectionVector.normalize(); 510 | cameraUpVector.set(nm[4], nm[5], nm[6]); 511 | cameraUpVector.normalize(); 512 | cameraRightVector.set(nm[0], nm[1], nm[2]); 513 | cameraRightVector.normalize(); 514 | 515 | // check for user input 516 | if (keyPressed('KeyW') && !keyPressed('KeyS')) 517 | { 518 | camera.position.addInPlace(cameraDirectionVector.scaleToRef(camFlightSpeed * frameTime, cameraDirectionVector)); 519 | } 520 | if (keyPressed('KeyS') && !keyPressed('KeyW')) 521 | { 522 | camera.position.subtractInPlace(cameraDirectionVector.scaleToRef(camFlightSpeed * frameTime, cameraDirectionVector)); 523 | } 524 | if (keyPressed('KeyA') && !keyPressed('KeyD')) 525 | { 526 | camera.position.subtractInPlace(cameraRightVector.scaleToRef(camFlightSpeed * frameTime, cameraRightVector)); 527 | } 528 | if (keyPressed('KeyD') && !keyPressed('KeyA')) 529 | { 530 | camera.position.addInPlace(cameraRightVector.scaleToRef(camFlightSpeed * frameTime, cameraRightVector)); 531 | } 532 | if (keyPressed('KeyE') && !keyPressed('KeyQ')) 533 | { 534 | camera.position.addInPlace(cameraUpVector.scaleToRef(camFlightSpeed * frameTime, cameraUpVector)); 535 | } 536 | if (keyPressed('KeyQ') && !keyPressed('KeyE')) 537 | { 538 | camera.position.subtractInPlace(cameraUpVector.scaleToRef(camFlightSpeed * frameTime, cameraUpVector)); 539 | } 540 | 541 | if (keyPressed('Equal') && !keyPressed('Minus')) 542 | { 543 | uFocusDistance += focusDistChangeAmount; 544 | uCameraIsMoving = true; 545 | } 546 | if (keyPressed('Minus') && !keyPressed('Equal')) 547 | { 548 | uFocusDistance -= focusDistChangeAmount; 549 | if (uFocusDistance < 1) 550 | uFocusDistance = 1; 551 | uCameraIsMoving = true; 552 | } 553 | if (keyPressed('BracketRight') && !keyPressed('BracketLeft')) 554 | { 555 | uApertureSize += apertureChangeAmount; 556 | if (uApertureSize > 100000.0) 557 | uApertureSize = 100000.0; 558 | uCameraIsMoving = true; 559 | } 560 | if (keyPressed('BracketLeft') && !keyPressed('BracketRight')) 561 | { 562 | uApertureSize -= apertureChangeAmount; 563 | if (uApertureSize < 0.0) 564 | uApertureSize = 0.0; 565 | uCameraIsMoving = true; 566 | } 567 | 568 | 569 | // now update uniforms that are common to all scenes 570 | if (increaseFOV) 571 | { 572 | camera.fov += (Math.PI / 180); 573 | if (camera.fov > 150 * (Math.PI / 180)) 574 | camera.fov = 150 * (Math.PI / 180); 575 | 576 | uVLen = Math.tan(camera.fov * 0.5); 577 | uULen = uVLen * (width / height); 578 | 579 | uCameraIsMoving = true; 580 | increaseFOV = false; 581 | } 582 | if (decreaseFOV) 583 | { 584 | camera.fov -= (Math.PI / 180); 585 | if (camera.fov < 1 * (Math.PI / 180)) 586 | camera.fov = 1 * (Math.PI / 180); 587 | 588 | uVLen = Math.tan(camera.fov * 0.5); 589 | uULen = uVLen * (width / height); 590 | 591 | uCameraIsMoving = true; 592 | decreaseFOV = false; 593 | } 594 | 595 | 596 | if (!uCameraIsMoving) 597 | { 598 | if (uSceneIsDynamic) 599 | uSampleCounter = 1.0; // reset for continuous updating of image 600 | else uSampleCounter += 1.0; // for progressive refinement of image 601 | 602 | uFrameCounter += 1.0; 603 | 604 | cameraRecentlyMoving = false; 605 | } 606 | 607 | if (uCameraIsMoving) 608 | { 609 | uFrameCounter += 1.0; 610 | 611 | if (!cameraRecentlyMoving) 612 | { 613 | // record current uSampleCounter value before it gets set to 1.0 below 614 | uPreviousSampleCount = uSampleCounter; 615 | uFrameCounter = 1.0; 616 | cameraRecentlyMoving = true; 617 | } 618 | 619 | uSampleCounter = 1.0; 620 | } 621 | 622 | uOneOverSampleCounter = 1.0 / uSampleCounter; 623 | 624 | 625 | // CAMERA INFO 626 | cameraInfoElement.innerHTML = "FOV( mousewheel ): " + (camera.fov * 180 / Math.PI).toFixed(0) + "
" + "Aperture( [ and ] ): " + uApertureSize.toFixed(1) + 627 | "
" + "FocusDistance( - and + ): " + uFocusDistance.toFixed(0) + "
" + "Samples: " + uSampleCounter; 628 | 629 | 630 | // the following is necessary to update the user's world camera movement - should take no time at all 631 | pathTracingScene.render(); 632 | // now for the heavy lifter, the bulk of the frame time 633 | eRenderer.render(pathTracingEffect, pathTracingRenderTarget); 634 | // then simply copy(store) what the pathTracer just calculated - should take no time at all 635 | eRenderer.render(screenCopyEffect, screenCopyRenderTarget); 636 | // finally take the accumulated pathTracingRenderTarget buffer and average by numberOfSamples taken, then apply Reinhard tonemapping (brings image into friendly 0.0-1.0 rgb color float range), 637 | // and lastly raise to the power of (0.4545), in order to make gamma correction (gives more brightness range where it counts). This last step should also take minimal time 638 | eRenderer.render(screenOutputEffect, null); // null, because we don't feed this non-linear image-processed output back into the pathTracing accumulation buffer as it would 'pollute' the pathtracing unbounded linear color space 639 | 640 | stats.update(); 641 | 642 | }); // end engine.runRenderLoop(function () 643 | -------------------------------------------------------------------------------- /js/HDRIEnvironmentPathTracing_FragmentShader.js: -------------------------------------------------------------------------------- 1 | BABYLON.Effect.ShadersStore["pathTracingFragmentShader"] = ` 2 | #version 300 es 3 | 4 | precision highp float; 5 | precision highp int; 6 | precision highp sampler2D; 7 | 8 | // Demo-specific Uniforms 9 | uniform sampler2D tAABBTexture; 10 | uniform sampler2D tTriangleTexture; 11 | uniform sampler2D tAlbedoTexture; 12 | uniform sampler2D tBumpTexture; 13 | uniform sampler2D tMetallicTexture; 14 | uniform sampler2D tEmissiveTexture; 15 | uniform sampler2D tHDRTexture; 16 | 17 | uniform mat4 uLeftSphereInvMatrix; 18 | uniform mat4 uRightSphereInvMatrix; 19 | uniform mat4 uGLTF_Model_InvMatrix; 20 | uniform vec3 uSunDirection; 21 | uniform float uHDRExposure; 22 | uniform float uSunPower; 23 | uniform int uModelMaterialType; 24 | uniform bool uModelUsesAlbedoTexture; 25 | uniform bool uModelUsesBumpTexture; 26 | uniform bool uModelUsesMetallicTexture; 27 | uniform bool uModelUsesEmissiveTexture; 28 | 29 | 30 | //#define INV_TEXTURE_WIDTH 0.000244140625 // (1 / 4096 texture width) 31 | //#define INV_TEXTURE_WIDTH 0.00048828125 // (1 / 2048 texture width) 32 | //#define INV_TEXTURE_WIDTH 0.0009765625 // (1 / 1024 texture width) 33 | 34 | #define INV_TEXTURE_WIDTH 0.00048828125 // (1 / 2048 texture width) 35 | 36 | // demo/scene-specific setup 37 | #define N_QUADS 4 // ceiling quad and quad area light are removed for this demo 38 | #define N_SPHERES 2 39 | 40 | struct UnitSphere { vec3 color; int type; }; 41 | struct Quad { vec3 normal; vec3 v0; vec3 v1; vec3 v2; vec3 v3; vec3 color; int type; }; 42 | 43 | Quad quads[N_QUADS]; 44 | UnitSphere spheres[N_SPHERES]; 45 | 46 | // the camera ray for this pixel (global variables) 47 | vec3 rayOrigin, rayDirection; 48 | 49 | 50 | // all required includes go here: 51 | 52 | #include // required on all scenes 53 | 54 | #include // required on all scenes 55 | 56 | #include // required on all scenes 57 | 58 | #include // required on scenes with any math-geometry shapes like sphere, cylinder, cone, etc. 59 | 60 | #include // required on scenes with unit spheres that will be translated, rotated, and scaled by their matrix transform 61 | 62 | #include // required on scenes with quads (actually internally they are made up of 2 triangles) 63 | 64 | #include // required on scenes containing a BVH for models in gltf/glb format 65 | 66 | #include // required on scenes containing triangular models in gltf/glb format 67 | 68 | #include // required on scenes containing triangular models in gltf/glb format, and that need transparency effects 69 | 70 | 71 | 72 | vec3 perturbNormal(vec3 nl, vec2 normalScale, vec2 uv) 73 | { 74 | vec3 S = normalize( cross( abs(nl.y) < 0.9 ? vec3(0, 1, 0) : vec3(0, 0,-1), nl ) ); 75 | vec3 T = cross(nl, S); 76 | vec3 N = normalize( nl ); 77 | // invert S, T when the UV direction is backwards (from mirrored faces), 78 | // otherwise it will do the normal mapping backwards. 79 | vec3 NfromST = cross( S, T ); 80 | if( dot( NfromST, N ) < 0.0 ) 81 | { 82 | S *= -1.0; 83 | T *= -1.0; 84 | } 85 | mat3 tsn = mat3( S, T, N ); 86 | 87 | vec3 mapN = texture(tBumpTexture, uv).xyz * 2.0 - 1.0; 88 | mapN = normalize(mapN); 89 | mapN.xy *= normalScale; 90 | 91 | return normalize( tsn * mapN ); 92 | } 93 | 94 | 95 | vec2 stackLevels[28]; 96 | 97 | //vec4 boxNodeData0 corresponds to .x = idTriangle, .y = aabbMin.x, .z = aabbMin.y, .w = aabbMin.z 98 | //vec4 boxNodeData1 corresponds to .x = idRightChild .y = aabbMax.x, .z = aabbMax.y, .w = aabbMax.z 99 | 100 | void GetBoxNodeData(const in float i, inout vec4 boxNodeData0, inout vec4 boxNodeData1) 101 | { 102 | // each bounding box's data is encoded in 2 rgba(or xyzw) texture slots 103 | float ix2 = i * 2.0; 104 | // (ix2 + 0.0) corresponds to .x = idTriangle, .y = aabbMin.x, .z = aabbMin.y, .w = aabbMin.z 105 | // (ix2 + 1.0) corresponds to .x = idRightChild .y = aabbMax.x, .z = aabbMax.y, .w = aabbMax.z 106 | 107 | ivec2 uv0 = ivec2( mod(ix2 + 0.0, 2048.0), (ix2 + 0.0) * INV_TEXTURE_WIDTH ); // data0 108 | ivec2 uv1 = ivec2( mod(ix2 + 1.0, 2048.0), (ix2 + 1.0) * INV_TEXTURE_WIDTH ); // data1 109 | 110 | boxNodeData0 = texelFetch(tAABBTexture, uv0, 0); 111 | boxNodeData1 = texelFetch(tAABBTexture, uv1, 0); 112 | } 113 | 114 | 115 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 116 | float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out vec2 hitUV, out int hitType, out float hitObjectID ) 117 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 118 | { 119 | vec4 currentBoxNodeData0, nodeAData0, nodeBData0, tmpNodeData0; 120 | vec4 currentBoxNodeData1, nodeAData1, nodeBData1, tmpNodeData1; 121 | 122 | vec4 vd0, vd1, vd2, vd3, vd4, vd5, vd6, vd7; 123 | 124 | vec3 inverseDir;// = 1.0 / rayDirection; // will be calculated later, after ray has been transformed to glTF model's object space 125 | vec3 hit, n; 126 | vec3 rObjOrigin, rObjDirection; 127 | 128 | vec2 currentStackData, stackDataA, stackDataB, tmpStackData; 129 | ivec2 uv0, uv1, uv2, uv3, uv4, uv5, uv6, uv7; 130 | 131 | float t, d; 132 | float stackptr = 0.0; 133 | float bc, bd; 134 | float id = 0.0; 135 | float tu, tv; 136 | float triangleID = 0.0; 137 | float triangleU = 0.0; 138 | float triangleV = 0.0; 139 | float triangleW = 0.0; 140 | 141 | int objectCount = 0; 142 | 143 | int skip = FALSE; 144 | int triangleLookupNeeded = FALSE; 145 | 146 | // initialize hit record 147 | t = INFINITY; 148 | hitType = -100; 149 | hitObjectID = -INFINITY; 150 | 151 | // transform ray into Left Sphere's object space 152 | rObjOrigin = vec3( uLeftSphereInvMatrix * vec4(rayOrigin, 1.0) ); 153 | rObjDirection = vec3( uLeftSphereInvMatrix * vec4(rayDirection, 0.0) ); 154 | 155 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 156 | 157 | if (d < t) 158 | { 159 | t = d; 160 | hitNormal = transpose(mat3(uLeftSphereInvMatrix)) * n; 161 | hitColor = spheres[0].color; 162 | hitType = spheres[0].type; 163 | hitObjectID = float(objectCount); 164 | } 165 | objectCount++; 166 | 167 | // transform ray into Right Sphere's object space 168 | rObjOrigin = vec3( uRightSphereInvMatrix * vec4(rayOrigin, 1.0) ); 169 | rObjDirection = vec3( uRightSphereInvMatrix * vec4(rayDirection, 0.0) ); 170 | 171 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 172 | 173 | if (d < t) 174 | { 175 | t = d; 176 | hitNormal = transpose(mat3(uRightSphereInvMatrix)) * n; 177 | hitColor = spheres[1].color; 178 | hitType = spheres[1].type; 179 | hitObjectID = float(objectCount); 180 | } 181 | objectCount++; 182 | 183 | for (int i = 0; i < N_QUADS; i++) 184 | { 185 | d = QuadIntersect( quads[i].v0, quads[i].v1, quads[i].v2, quads[i].v3, rayOrigin, rayDirection, FALSE ); 186 | 187 | if (d < t) 188 | { 189 | t = d; 190 | hitNormal = quads[i].normal; 191 | hitColor = quads[i].color; 192 | hitType = quads[i].type; 193 | hitObjectID = float(objectCount); 194 | } 195 | 196 | objectCount++; 197 | } 198 | 199 | // transform ray into GLTF_Model's object space 200 | rayOrigin = vec3( uGLTF_Model_InvMatrix * vec4(rayOrigin, 1.0) ); 201 | rayDirection = vec3( uGLTF_Model_InvMatrix * vec4(rayDirection, 0.0) ); 202 | inverseDir = 1.0 / rayDirection; // inverseDir must be re-calculated, now that we are in model's object space 203 | 204 | GetBoxNodeData(stackptr, currentBoxNodeData0, currentBoxNodeData1); 205 | currentStackData = vec2(stackptr, BoundingBoxIntersect(currentBoxNodeData0.yzw, currentBoxNodeData1.yzw, rayOrigin, inverseDir)); 206 | stackLevels[0] = currentStackData; 207 | skip = (currentStackData.y < t) ? TRUE : FALSE; 208 | 209 | while (true) 210 | { 211 | if (skip == FALSE) 212 | { 213 | // decrease pointer by 1 (0.0 is root level, 27.0 is maximum depth) 214 | if (--stackptr < 0.0) // went past the root level, terminate loop 215 | break; 216 | 217 | currentStackData = stackLevels[int(stackptr)]; 218 | 219 | if (currentStackData.y >= t) 220 | continue; 221 | 222 | GetBoxNodeData(currentStackData.x, currentBoxNodeData0, currentBoxNodeData1); 223 | } 224 | skip = FALSE; // reset skip 225 | 226 | 227 | if (currentBoxNodeData0.x < 0.0) // < 0.0 signifies an inner node 228 | { 229 | GetBoxNodeData(currentStackData.x + 1.0, nodeAData0, nodeAData1); 230 | GetBoxNodeData(currentBoxNodeData1.x, nodeBData0, nodeBData1); 231 | stackDataA = vec2(currentStackData.x + 1.0, BoundingBoxIntersect(nodeAData0.yzw, nodeAData1.yzw, rayOrigin, inverseDir)); 232 | stackDataB = vec2(currentBoxNodeData1.x, BoundingBoxIntersect(nodeBData0.yzw, nodeBData1.yzw, rayOrigin, inverseDir)); 233 | 234 | // first sort the branch node data so that 'a' is the smallest 235 | if (stackDataB.y < stackDataA.y) 236 | { 237 | tmpStackData = stackDataB; 238 | stackDataB = stackDataA; 239 | stackDataA = tmpStackData; 240 | 241 | tmpNodeData0 = nodeBData0; tmpNodeData1 = nodeBData1; 242 | nodeBData0 = nodeAData0; nodeBData1 = nodeAData1; 243 | nodeAData0 = tmpNodeData0; nodeAData1 = tmpNodeData1; 244 | } // branch 'b' now has the larger rayT value of 'a' and 'b' 245 | 246 | if (stackDataB.y < t) // see if branch 'b' (the larger rayT) needs to be processed 247 | { 248 | currentStackData = stackDataB; 249 | currentBoxNodeData0 = nodeBData0; 250 | currentBoxNodeData1 = nodeBData1; 251 | skip = TRUE; // this will prevent the stackptr from decreasing by 1 252 | } 253 | if (stackDataA.y < t) // see if branch 'a' (the smaller rayT) needs to be processed 254 | { 255 | if (skip == TRUE) // if larger branch 'b' needed to be processed also, 256 | stackLevels[int(stackptr++)] = stackDataB; // cue larger branch 'b' for future round 257 | // also, increase pointer by 1 258 | 259 | currentStackData = stackDataA; 260 | currentBoxNodeData0 = nodeAData0; 261 | currentBoxNodeData1 = nodeAData1; 262 | skip = TRUE; // this will prevent the stackptr from decreasing by 1 263 | } 264 | 265 | continue; 266 | } // end if (currentBoxNodeData0.x < 0.0) // inner node 267 | 268 | 269 | // else this is a leaf 270 | 271 | // each triangle's data is encoded in 8 rgba(or xyzw) texture slots 272 | id = 8.0 * currentBoxNodeData0.x; 273 | 274 | uv0 = ivec2( mod(id + 0.0, 2048.0), (id + 0.0) * INV_TEXTURE_WIDTH ); 275 | uv1 = ivec2( mod(id + 1.0, 2048.0), (id + 1.0) * INV_TEXTURE_WIDTH ); 276 | uv2 = ivec2( mod(id + 2.0, 2048.0), (id + 2.0) * INV_TEXTURE_WIDTH ); 277 | 278 | vd0 = texelFetch(tTriangleTexture, uv0, 0); 279 | vd1 = texelFetch(tTriangleTexture, uv1, 0); 280 | vd2 = texelFetch(tTriangleTexture, uv2, 0); 281 | 282 | if (!uModelUsesAlbedoTexture && uModelMaterialType == TRANSPARENT) 283 | d = BVH_DoubleSidedTriangleIntersect( vec3(vd0.xyz), vec3(vd0.w, vd1.xy), vec3(vd1.zw, vd2.x), rayOrigin, rayDirection, tu, tv ); 284 | else 285 | d = BVH_TriangleIntersect( vec3(vd0.xyz), vec3(vd0.w, vd1.xy), vec3(vd1.zw, vd2.x), rayOrigin, rayDirection, tu, tv ); 286 | 287 | if (d < t) 288 | { 289 | t = d; 290 | triangleID = id; 291 | triangleU = tu; 292 | triangleV = tv; 293 | triangleLookupNeeded = TRUE; 294 | } 295 | 296 | } // end while (true) 297 | 298 | 299 | 300 | if (triangleLookupNeeded == TRUE) 301 | { 302 | uv0 = ivec2( mod(triangleID + 0.0, 2048.0), (triangleID + 0.0) * INV_TEXTURE_WIDTH ); 303 | uv1 = ivec2( mod(triangleID + 1.0, 2048.0), (triangleID + 1.0) * INV_TEXTURE_WIDTH ); 304 | uv2 = ivec2( mod(triangleID + 2.0, 2048.0), (triangleID + 2.0) * INV_TEXTURE_WIDTH ); 305 | uv3 = ivec2( mod(triangleID + 3.0, 2048.0), (triangleID + 3.0) * INV_TEXTURE_WIDTH ); 306 | uv4 = ivec2( mod(triangleID + 4.0, 2048.0), (triangleID + 4.0) * INV_TEXTURE_WIDTH ); 307 | uv5 = ivec2( mod(triangleID + 5.0, 2048.0), (triangleID + 5.0) * INV_TEXTURE_WIDTH ); 308 | uv6 = ivec2( mod(triangleID + 6.0, 2048.0), (triangleID + 6.0) * INV_TEXTURE_WIDTH ); 309 | uv7 = ivec2( mod(triangleID + 7.0, 2048.0), (triangleID + 7.0) * INV_TEXTURE_WIDTH ); 310 | 311 | // the complete vertex data for each individual triangle consumes 8 rgba texture slots on the GPU data texture 312 | // also, packing/padding the vertex data into 8 texels ensures 8-boundary alignments (power of 2) which is more memory-access friendly 313 | vd0 = texelFetch(tTriangleTexture, uv0, 0); // rgb: vertex0 position xyz, a: vertex1 position x 314 | vd1 = texelFetch(tTriangleTexture, uv1, 0); // rg: vertex1(cont.) position yz, ba: vertex2 position xy 315 | vd2 = texelFetch(tTriangleTexture, uv2, 0); // r: vertex2(cont.) position z, gba: vertex0 normal xyz 316 | vd3 = texelFetch(tTriangleTexture, uv3, 0); // rgb: vertex1 normal xyz, a: vertex2 normal x 317 | vd4 = texelFetch(tTriangleTexture, uv4, 0); // rg: vertex2(cont.) normal yz, ba: vertex0 uv 318 | vd5 = texelFetch(tTriangleTexture, uv5, 0); // rg: vertex1 uv, ba: vertex2 uv 319 | vd6 = texelFetch(tTriangleTexture, uv6, 0); // rgb: triangle material rgb color, a: triangle material type id (enum) 320 | vd7 = texelFetch(tTriangleTexture, uv7, 0); // rgba: (reserved for future PBR material extra properties) 321 | 322 | // face normal for flat-shaded polygon look 323 | //hitNormal = normalize( cross(vec3(vd0.w, vd1.xy) - vec3(vd0.xyz), vec3(vd1.zw, vd2.x) - vec3(vd0.xyz)) ); 324 | 325 | // interpolated normal using triangle intersection's uv's 326 | triangleW = 1.0 - triangleU - triangleV; 327 | n = normalize(triangleW * vec3(vd2.yzw) + triangleU * vec3(vd3.xyz) + triangleV * vec3(vd3.w, vd4.xy)); 328 | hitUV = triangleW * vec2(vd4.zw) + triangleU * vec2(vd5.xy) + triangleV * vec2(vd5.zw); 329 | n = uModelUsesBumpTexture ? perturbNormal(n, vec2(1.0, 1.0), hitUV) : n; 330 | // transform normal back into world space 331 | hitNormal = transpose(mat3(uGLTF_Model_InvMatrix)) * n; 332 | 333 | //hitType = int(vd6.x); 334 | hitType = uModelUsesAlbedoTexture ? PBR_MATERIAL : uModelMaterialType; 335 | 336 | hitColor = vec3(1);//vd6.yzw; 337 | 338 | //hitTextureID = int(vd7.x); 339 | //hitTextureID = -1; 340 | 341 | hitObjectID = float(objectCount); 342 | } // if (triangleLookupNeeded == TRUE) 343 | 344 | return t; 345 | 346 | } // end float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out vec2 hitUV, out int hitType, out float hitObjectID ) 347 | 348 | 349 | 350 | 351 | vec3 Get_HDR_Color(vec3 rayDirection) 352 | { 353 | vec2 sampleUV; 354 | sampleUV.x = atan(rayDirection.x, rayDirection.z) * ONE_OVER_TWO_PI + 0.5; 355 | sampleUV.y = acos(-rayDirection.y) * ONE_OVER_PI; 356 | 357 | vec3 texColor = texture(tHDRTexture, sampleUV).rgb; 358 | 359 | return texColor * uHDRExposure; 360 | } 361 | 362 | //----------------------------------------------------------------------------------------------------------------------------- 363 | vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 364 | //----------------------------------------------------------------------------------------------------------------------------- 365 | { 366 | // recorded intersection data: 367 | vec3 hitNormal, hitEmission, hitColor; 368 | vec2 hitUV; 369 | float t, hitObjectID; 370 | int hitType, hitTextureID; 371 | 372 | vec3 accumCol = vec3(0); 373 | vec3 mask = vec3(1); 374 | vec3 reflectionMask = vec3(1); 375 | vec3 reflectionRayOrigin = vec3(0); 376 | vec3 reflectionRayDirection = vec3(0); 377 | vec3 dirToLight; 378 | vec3 tdir; 379 | vec3 x, n, nl; 380 | vec3 absorptionCoefficient; 381 | vec3 metallicRoughness = vec3(0); 382 | vec3 emission = vec3(0); 383 | 384 | float nc, nt, ratioIoR, Re, Tr; 385 | //float P, RP, TP; 386 | float weight; 387 | float thickness = 0.05; 388 | float scatteringDistance; 389 | float maxEmission = 0.0; 390 | 391 | int diffuseCount = 0; 392 | int previousIntersecType = -100; 393 | hitType = -100; 394 | 395 | int coatTypeIntersected = FALSE; 396 | int bounceIsSpecular = TRUE; 397 | int sampleLight = FALSE; 398 | int willNeedReflectionRay = FALSE; 399 | 400 | 401 | for (int bounces = 0; bounces < 6; bounces++) 402 | { 403 | previousIntersecType = hitType; 404 | 405 | t = SceneIntersect(rayOrigin, rayDirection, hitNormal, hitEmission, hitColor, hitUV, hitType, hitObjectID); 406 | 407 | 408 | if (t == INFINITY) 409 | { 410 | vec3 environmentColor = Get_HDR_Color(rayDirection); 411 | 412 | if (bounces == 0) 413 | { 414 | pixelSharpness = 1.01; 415 | 416 | accumCol += environmentColor; 417 | break; 418 | } 419 | else if (diffuseCount == 0 && bounceIsSpecular == TRUE) 420 | { 421 | if (coatTypeIntersected == TRUE) 422 | { 423 | if (dot(rayDirection, uSunDirection) > 0.995) 424 | pixelSharpness = 1.01; 425 | } 426 | else 427 | pixelSharpness = 1.01; 428 | 429 | accumCol += mask * environmentColor; 430 | } 431 | else if (sampleLight == TRUE) 432 | { 433 | accumCol += mask * environmentColor; 434 | } 435 | else if (diffuseCount == 1 && previousIntersecType == TRANSPARENT && bounceIsSpecular == TRUE && bounces < 3) 436 | { 437 | if (dot(rayDirection, uSunDirection) > 0.99) 438 | pixelSharpness = 1.01; 439 | accumCol += mask * environmentColor; 440 | } 441 | else if (diffuseCount > 0) 442 | { 443 | weight = dot(rayDirection, uSunDirection) < 0.99 ? 1.0 : 0.0; 444 | accumCol += mask * environmentColor * weight; 445 | } 446 | 447 | if (willNeedReflectionRay == TRUE) 448 | { 449 | mask = reflectionMask; 450 | rayOrigin = reflectionRayOrigin; 451 | rayDirection = reflectionRayDirection; 452 | 453 | willNeedReflectionRay = FALSE; 454 | bounceIsSpecular = TRUE; 455 | sampleLight = FALSE; 456 | diffuseCount = 0; 457 | continue; 458 | } 459 | 460 | // reached the HDRI sky light, so we can exit 461 | break; 462 | } // end if (t == INFINITY) 463 | 464 | 465 | // useful data 466 | n = normalize(hitNormal); 467 | nl = dot(n, rayDirection) < 0.0 ? n : -n; 468 | x = rayOrigin + rayDirection * t; 469 | 470 | if (bounces == 0) 471 | { 472 | objectNormal = nl; 473 | objectColor = hitColor; 474 | objectID = hitObjectID; 475 | } 476 | if (bounces == 1 && previousIntersecType == METAL) 477 | { 478 | objectNormal = nl; 479 | } 480 | 481 | 482 | // if we get here and sampleLight is still TRUE, shadow ray failed to find the light source 483 | // the ray hit an occluding object along its way to the light 484 | if (sampleLight == TRUE) 485 | { 486 | if (willNeedReflectionRay == TRUE) 487 | { 488 | mask = reflectionMask; 489 | rayOrigin = reflectionRayOrigin; 490 | rayDirection = reflectionRayDirection; 491 | 492 | willNeedReflectionRay = FALSE; 493 | bounceIsSpecular = TRUE; 494 | sampleLight = FALSE; 495 | diffuseCount = 0; 496 | continue; 497 | } 498 | 499 | break; 500 | } 501 | 502 | 503 | if (hitType == PBR_MATERIAL) 504 | { 505 | hitColor = texture(tAlbedoTexture, hitUV).rgb; 506 | hitColor = pow(hitColor, vec3(2.2)); 507 | 508 | emission = uModelUsesEmissiveTexture ? texture(tEmissiveTexture, hitUV).rgb : vec3(0); 509 | emission = pow(emission, vec3(2.2)); 510 | maxEmission = max(emission.r, max(emission.g, emission.b)); 511 | if (bounceIsSpecular == TRUE && maxEmission > 0.01) 512 | { 513 | pixelSharpness = 1.01; 514 | accumCol += mask * emission; 515 | break; 516 | } 517 | 518 | hitType = DIFFUSE; 519 | 520 | metallicRoughness = uModelUsesMetallicTexture ? texture(tMetallicTexture, hitUV).rgb : vec3(0); 521 | metallicRoughness = pow(metallicRoughness, vec3(2.2)); 522 | if (metallicRoughness.g > 0.01) // roughness 523 | { 524 | hitType = CLEARCOAT_DIFFUSE; 525 | } 526 | if (metallicRoughness.b > 0.01) // metalness 527 | { 528 | hitType = METAL; 529 | } 530 | 531 | } 532 | 533 | if (hitType == DIFFUSE) // Ideal diffuse reflection 534 | { 535 | diffuseCount++; 536 | 537 | mask *= hitColor; 538 | 539 | bounceIsSpecular = FALSE; 540 | 541 | if (diffuseCount <= 2 && blueNoise_rand() < 0.5) 542 | { 543 | mask *= 2.0; 544 | // choose random Diffuse sample vector 545 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 546 | rayOrigin = x + nl * uEPS_intersect; 547 | continue; 548 | } 549 | 550 | rayDirection = randomDirectionInSpecularLobe(uSunDirection, 0.05); // create shadow ray pointed towards light 551 | rayOrigin = x + nl * uEPS_intersect; 552 | 553 | weight = max(0.0, dot(rayDirection, nl)) * (uSunPower * uSunPower * 0.0000001); // down-weight directSunLight contribution 554 | mask *= diffuseCount <= 2 ? 2.0 : 1.0; 555 | mask *= weight; 556 | 557 | sampleLight = TRUE; 558 | continue; 559 | 560 | } // end if (hitType == DIFFUSE) 561 | 562 | 563 | if (hitType == METAL) // Ideal metal specular reflection 564 | { 565 | mask *= hitColor; 566 | 567 | rayDirection = randomDirectionInSpecularLobe(reflect(rayDirection, nl), metallicRoughness.g); 568 | rayOrigin = x + nl * uEPS_intersect; 569 | 570 | continue; 571 | } 572 | 573 | 574 | if (hitType == TRANSPARENT) // Ideal dielectric specular reflection/refraction 575 | { 576 | pixelSharpness = diffuseCount == 0 && coatTypeIntersected == FALSE ? -1.0 : pixelSharpness; 577 | 578 | nc = 1.0; // IOR of Air 579 | nt = 1.5; // IOR of common Glass 580 | Re = calcFresnelReflectance(rayDirection, n, nc, nt, ratioIoR); 581 | Tr = 1.0 - Re; 582 | 583 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 584 | { 585 | reflectionMask = mask * Re; 586 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 587 | reflectionRayOrigin = x + nl * uEPS_intersect; 588 | willNeedReflectionRay = TRUE; 589 | } 590 | 591 | if (Re == 1.0) 592 | { 593 | mask = reflectionMask; 594 | rayOrigin = reflectionRayOrigin; 595 | rayDirection = reflectionRayDirection; 596 | 597 | willNeedReflectionRay = FALSE; 598 | bounceIsSpecular = TRUE; 599 | sampleLight = FALSE; 600 | continue; 601 | } 602 | 603 | // transmit ray through surface 604 | 605 | // is ray leaving a solid object from the inside? 606 | // If so, attenuate ray color with object color by how far ray has travelled through the medium 607 | if (distance(n, nl) > 0.1) 608 | { 609 | thickness = 0.01; 610 | mask *= exp( log(clamp(hitColor, 0.01, 0.99)) * thickness * t ); 611 | } 612 | 613 | mask *= Tr; 614 | 615 | tdir = refract(rayDirection, nl, ratioIoR); 616 | rayDirection = tdir; 617 | rayOrigin = x - nl * uEPS_intersect; 618 | 619 | if (diffuseCount == 1) 620 | bounceIsSpecular = TRUE; // turn on refracting caustics 621 | 622 | continue; 623 | 624 | } // end if (hitType == TRANSPARENT) 625 | 626 | 627 | if (hitType == CLEARCOAT_DIFFUSE) // Diffuse object underneath with ClearCoat on top 628 | { 629 | coatTypeIntersected = TRUE; 630 | 631 | nc = 1.0; // IOR of Air 632 | nt = 1.5; // IOR of Clear Coat 633 | Re = calcFresnelReflectance(rayDirection, nl, nc, nt, ratioIoR); 634 | Tr = 1.0 - Re; 635 | 636 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 637 | { 638 | reflectionMask = mask * Re; 639 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 640 | reflectionRayOrigin = x + nl * uEPS_intersect; 641 | willNeedReflectionRay = TRUE; 642 | } 643 | 644 | diffuseCount++; 645 | 646 | if (bounces == 0) 647 | mask *= Tr; 648 | mask *= hitColor; 649 | 650 | bounceIsSpecular = FALSE; 651 | 652 | if (diffuseCount <= 2 && blueNoise_rand() < 0.5) 653 | { 654 | mask *= 2.0; 655 | // choose random Diffuse sample vector 656 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 657 | rayOrigin = x + nl * uEPS_intersect; 658 | continue; 659 | } 660 | 661 | rayDirection = randomDirectionInSpecularLobe(uSunDirection, 0.05); // create shadow ray pointed towards light 662 | rayOrigin = x + nl * uEPS_intersect; 663 | 664 | weight = max(0.0, dot(rayDirection, nl)) * (uSunPower * uSunPower * 0.0000001); // down-weight directSunLight contribution 665 | mask *= diffuseCount <= 2 ? 2.0 : 1.0; 666 | mask *= weight; 667 | 668 | // this check helps keep random noisy bright pixels from this clearCoat diffuse surface out of the possible previous refracted glass surface 669 | if (bounces < 3) 670 | sampleLight = TRUE; 671 | continue; 672 | 673 | } //end if (hitType == CLEARCOAT_DIFFUSE) 674 | 675 | } // end for (int bounces = 0; bounces < 6; bounces++) 676 | 677 | 678 | return max(vec3(0), accumCol); 679 | 680 | } // end vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 681 | 682 | 683 | //------------------- 684 | void SetupScene(void) 685 | //------------------- 686 | { 687 | float wallRadius = 50.0; 688 | 689 | spheres[0] = UnitSphere( vec3(1.0, 1.0, 0.0), CLEARCOAT_DIFFUSE ); // clearCoat diffuse Sphere Left 690 | spheres[1] = UnitSphere( vec3(1.0, 1.0, 1.0), METAL ); // metal Sphere Right 691 | 692 | quads[0] = Quad( vec3( 0, 0, 1), vec3(-wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Back Wall 693 | quads[1] = Quad( vec3( 1, 0, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius, wallRadius,-wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 0.7, 0.05, 0.05), DIFFUSE);// Left Wall Red 694 | quads[2] = Quad( vec3(-1, 0, 0), vec3( wallRadius,-wallRadius,-wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3(0.05, 0.05, 0.7), DIFFUSE);// Right Wall Blue 695 | //quads[3] = Quad( vec3( 0,-1, 0), vec3(-wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Ceiling 696 | quads[3] = Quad( vec3( 0, 1, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Floor 697 | 698 | } // end void SetupScene(void) 699 | 700 | 701 | // if your scene is static and doesn't have any special requirements, you can use the default main() 702 | #include 703 | 704 | `; 705 | -------------------------------------------------------------------------------- /js/PhysicalSkyModel_FragmentShader.js: -------------------------------------------------------------------------------- 1 | BABYLON.Effect.ShadersStore["pathTracingFragmentShader"] = ` 2 | #version 300 es 3 | 4 | precision highp float; 5 | precision highp int; 6 | precision highp sampler2D; 7 | 8 | // Demo-specific Uniforms 9 | uniform mat4 uLeftSphereInvMatrix; 10 | uniform mat4 uRightSphereInvMatrix; 11 | uniform vec3 uSunDirection; 12 | uniform int uRightSphereMatType; 13 | 14 | // demo/scene-specific setup 15 | #define N_QUADS 4 // ceiling quad and quad area light are removed for this demo 16 | #define N_SPHERES 2 17 | 18 | #include // required defines for scenes that use the physical sky model 19 | 20 | struct UnitSphere { vec3 color; int type; }; 21 | struct Quad { vec3 normal; vec3 v0; vec3 v1; vec3 v2; vec3 v3; vec3 color; int type; }; 22 | 23 | Quad quads[N_QUADS]; 24 | UnitSphere spheres[N_SPHERES]; 25 | 26 | // the camera ray for this pixel (global variables) 27 | vec3 rayOrigin, rayDirection; 28 | 29 | 30 | // all required includes go here: 31 | 32 | #include // required on all scenes 33 | 34 | #include // required on all scenes 35 | 36 | #include // required on all scenes 37 | 38 | #include // required on scenes with any math-geometry shapes like sphere, cylinder, cone, etc. 39 | 40 | #include // required on scenes with unit spheres that will be translated, rotated, and scaled by their matrix transform 41 | 42 | #include // required on scenes with quads (actually internally they are made up of 2 triangles) 43 | 44 | #include // required on scenes that use the physical sky model for environment lighting 45 | 46 | 47 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 48 | float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 49 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 50 | { 51 | vec3 rObjOrigin, rObjDirection; 52 | vec3 hit, n; 53 | float t, d; 54 | int objectCount = 0; 55 | 56 | // initialize hit record 57 | t = INFINITY; 58 | hitType = -100; 59 | hitObjectID = -INFINITY; 60 | 61 | // transform ray into Left Sphere's object space 62 | rObjOrigin = vec3( uLeftSphereInvMatrix * vec4(rayOrigin, 1.0) ); 63 | rObjDirection = vec3( uLeftSphereInvMatrix * vec4(rayDirection, 0.0) ); 64 | 65 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 66 | 67 | if (d < t) 68 | { 69 | t = d; 70 | hitNormal = transpose(mat3(uLeftSphereInvMatrix)) * n; 71 | hitColor = spheres[0].color; 72 | hitType = spheres[0].type; 73 | hitObjectID = float(objectCount); 74 | } 75 | objectCount++; 76 | 77 | // transform ray into Right Sphere's object space 78 | rObjOrigin = vec3( uRightSphereInvMatrix * vec4(rayOrigin, 1.0) ); 79 | rObjDirection = vec3( uRightSphereInvMatrix * vec4(rayDirection, 0.0) ); 80 | 81 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 82 | 83 | if (d < t) 84 | { 85 | t = d; 86 | hitNormal = transpose(mat3(uRightSphereInvMatrix)) * n; 87 | hitColor = spheres[1].color; 88 | hitType = spheres[1].type; 89 | hitObjectID = float(objectCount); 90 | } 91 | objectCount++; 92 | 93 | 94 | 95 | for (int i = 0; i < N_QUADS; i++) 96 | { 97 | d = QuadIntersect( quads[i].v0, quads[i].v1, quads[i].v2, quads[i].v3, rayOrigin, rayDirection, FALSE ); 98 | 99 | if (d < t) 100 | { 101 | t = d; 102 | hitNormal = quads[i].normal; 103 | hitColor = quads[i].color; 104 | hitType = quads[i].type; 105 | hitObjectID = float(objectCount); 106 | } 107 | 108 | objectCount++; 109 | } 110 | 111 | return t; 112 | 113 | } // end float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 114 | 115 | 116 | 117 | 118 | //----------------------------------------------------------------------------------------------------------------------------- 119 | vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 120 | //----------------------------------------------------------------------------------------------------------------------------- 121 | { 122 | // a record of ray-surface intersection data 123 | vec3 hitNormal, hitEmission, hitColor; 124 | vec2 hitUV; 125 | float t, hitObjectID; 126 | int hitTextureID; 127 | int hitType; 128 | 129 | vec3 accumCol = vec3(0); 130 | vec3 mask = vec3(1); 131 | vec3 reflectionMask = vec3(1); 132 | vec3 reflectionRayOrigin = vec3(0); 133 | vec3 reflectionRayDirection = vec3(0); 134 | vec3 tdir; 135 | vec3 x, n, nl; 136 | vec3 absorptionCoefficient; 137 | 138 | float nc, nt, ratioIoR, Re, Tr; 139 | //float P, RP, TP; 140 | float weight; 141 | float thickness = 0.05; 142 | float scatteringDistance; 143 | 144 | int diffuseCount = 0; 145 | int previousIntersecType = -100; 146 | hitType = -100; 147 | 148 | int coatTypeIntersected = FALSE; 149 | int bounceIsSpecular = TRUE; 150 | int sampleLight = FALSE; 151 | int willNeedReflectionRay = FALSE; 152 | 153 | 154 | for (int bounces = 0; bounces < 6; bounces++) 155 | { 156 | previousIntersecType = hitType; 157 | 158 | t = SceneIntersect(rayOrigin, rayDirection, hitNormal, hitEmission, hitColor, hitType, hitObjectID); 159 | 160 | 161 | if (t == INFINITY) 162 | { 163 | vec3 skyColor = Get_Sky_Color(rayDirection); 164 | 165 | if (bounces == 0) // ray hits sky first 166 | { 167 | pixelSharpness = 1.01; 168 | //skyHit = TRUE; 169 | //firstX = skyPos; 170 | accumCol += skyColor; 171 | break; // exit early 172 | } 173 | else if (diffuseCount == 0 && bounceIsSpecular == TRUE) 174 | { 175 | if (coatTypeIntersected == TRUE) 176 | { 177 | if (dot(rayDirection, uSunDirection) > 0.995) 178 | pixelSharpness = 1.01; 179 | } 180 | else 181 | pixelSharpness = 1.01; 182 | //skyHit = TRUE; 183 | //firstX = skyPos; 184 | accumCol += mask * skyColor; 185 | } 186 | else if (sampleLight == TRUE) 187 | { 188 | accumCol += mask * skyColor; 189 | } 190 | else if (diffuseCount == 1 && previousIntersecType == TRANSPARENT && bounceIsSpecular == TRUE) 191 | { 192 | accumCol += mask * skyColor; 193 | } 194 | else if (diffuseCount > 0) 195 | { 196 | weight = dot(rayDirection, uSunDirection) < 0.99 ? 1.0 : 0.0; 197 | accumCol += mask * skyColor * weight; 198 | } 199 | 200 | if (willNeedReflectionRay == TRUE) 201 | { 202 | mask = reflectionMask; 203 | rayOrigin = reflectionRayOrigin; 204 | rayDirection = reflectionRayDirection; 205 | 206 | willNeedReflectionRay = FALSE; 207 | bounceIsSpecular = TRUE; 208 | sampleLight = FALSE; 209 | diffuseCount = 0; 210 | continue; 211 | } 212 | 213 | // reached the sky light, so we can exit 214 | break; 215 | } // end if (t == INFINITY) 216 | 217 | 218 | // useful data 219 | n = normalize(hitNormal); 220 | nl = dot(n, rayDirection) < 0.0 ? n : -n; 221 | x = rayOrigin + rayDirection * t; 222 | 223 | if (bounces == 0) 224 | { 225 | objectNormal = nl; 226 | objectColor = hitColor; 227 | objectID = hitObjectID; 228 | } 229 | if (bounces == 1 && previousIntersecType == METAL) 230 | { 231 | objectNormal = nl; 232 | } 233 | 234 | 235 | /* if (hitType == LIGHT) 236 | { 237 | if (diffuseCount == 0) 238 | pixelSharpness = 1.01; 239 | 240 | if (bounceIsSpecular == TRUE || sampleLight == TRUE) 241 | accumCol = mask * hitColor; 242 | 243 | // reached a light, so we can exit 244 | break; 245 | 246 | } // end if (hitType == LIGHT) */ 247 | 248 | 249 | // if we get here and sampleLight is still TRUE, shadow ray failed to find the light source 250 | // the ray hit an occluding object along its way to the light 251 | if (sampleLight == TRUE) 252 | { 253 | if (willNeedReflectionRay == TRUE) 254 | { 255 | mask = reflectionMask; 256 | rayOrigin = reflectionRayOrigin; 257 | rayDirection = reflectionRayDirection; 258 | 259 | willNeedReflectionRay = FALSE; 260 | bounceIsSpecular = TRUE; 261 | sampleLight = FALSE; 262 | diffuseCount = 0; 263 | continue; 264 | } 265 | 266 | break; 267 | } 268 | 269 | 270 | 271 | if (hitType == DIFFUSE) // Ideal diffuse reflection 272 | { 273 | diffuseCount++; 274 | 275 | mask *= hitColor; 276 | 277 | bounceIsSpecular = FALSE; 278 | 279 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 280 | { 281 | mask *= 2.0; 282 | // choose random Diffuse sample vector 283 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 284 | rayOrigin = x + nl * uEPS_intersect; 285 | continue; 286 | } 287 | 288 | rayDirection = randomDirectionInSpecularLobe(uSunDirection, 0.15); // create shadow ray pointed towards light 289 | rayOrigin = x + nl * uEPS_intersect; 290 | 291 | weight = max(0.0, dot(rayDirection, nl)) * 0.05; // down-weight directSunLight contribution 292 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 293 | mask *= weight; 294 | 295 | sampleLight = TRUE; 296 | continue; 297 | 298 | } // end if (hitType == DIFFUSE) 299 | 300 | 301 | if (hitType == METAL) // Ideal metal specular reflection 302 | { 303 | mask *= hitColor; 304 | 305 | rayDirection = reflect(rayDirection, nl); 306 | rayOrigin = x + nl * uEPS_intersect; 307 | 308 | continue; 309 | } 310 | 311 | 312 | if (hitType == TRANSPARENT) // Ideal dielectric specular reflection/refraction 313 | { 314 | pixelSharpness = diffuseCount == 0 && coatTypeIntersected == FALSE ? -1.0 : pixelSharpness; 315 | 316 | nc = 1.0; // IOR of Air 317 | nt = 1.5; // IOR of common Glass 318 | Re = calcFresnelReflectance(rayDirection, n, nc, nt, ratioIoR); 319 | Tr = 1.0 - Re; 320 | 321 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 322 | { 323 | reflectionMask = mask * Re; 324 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 325 | reflectionRayOrigin = x + nl * uEPS_intersect; 326 | willNeedReflectionRay = TRUE; 327 | } 328 | 329 | if (Re == 1.0) 330 | { 331 | mask = reflectionMask; 332 | rayOrigin = reflectionRayOrigin; 333 | rayDirection = reflectionRayDirection; 334 | 335 | willNeedReflectionRay = FALSE; 336 | bounceIsSpecular = TRUE; 337 | sampleLight = FALSE; 338 | continue; 339 | } 340 | 341 | // transmit ray through surface 342 | 343 | // is ray leaving a solid object from the inside? 344 | // If so, attenuate ray color with object color by how far ray has travelled through the medium 345 | if (distance(n, nl) > 0.1) 346 | { 347 | thickness = 0.01; 348 | mask *= exp( log(clamp(hitColor, 0.01, 0.99)) * thickness * t ); 349 | } 350 | 351 | mask *= Tr; 352 | 353 | tdir = refract(rayDirection, nl, ratioIoR); 354 | rayDirection = tdir; 355 | rayOrigin = x - nl * uEPS_intersect; 356 | 357 | if (diffuseCount == 1) 358 | bounceIsSpecular = TRUE; // turn on refracting caustics 359 | 360 | continue; 361 | 362 | } // end if (hitType == TRANSPARENT) 363 | 364 | 365 | if (hitType == CLEARCOAT_DIFFUSE) // Diffuse object underneath with ClearCoat on top 366 | { 367 | coatTypeIntersected = TRUE; 368 | 369 | nc = 1.0; // IOR of Air 370 | nt = 1.5; // IOR of Clear Coat 371 | Re = calcFresnelReflectance(rayDirection, nl, nc, nt, ratioIoR); 372 | Tr = 1.0 - Re; 373 | 374 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 375 | { 376 | reflectionMask = mask * Re; 377 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 378 | reflectionRayOrigin = x + nl * uEPS_intersect; 379 | willNeedReflectionRay = TRUE; 380 | } 381 | 382 | diffuseCount++; 383 | 384 | if (bounces == 0) 385 | mask *= Tr; 386 | mask *= hitColor; 387 | 388 | bounceIsSpecular = FALSE; 389 | 390 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 391 | { 392 | mask *= 2.0; 393 | // choose random Diffuse sample vector 394 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 395 | rayOrigin = x + nl * uEPS_intersect; 396 | continue; 397 | } 398 | 399 | rayDirection = randomDirectionInSpecularLobe(uSunDirection, 0.15); // create shadow ray pointed towards light 400 | rayOrigin = x + nl * uEPS_intersect; 401 | 402 | weight = max(0.0, dot(rayDirection, nl)) * 0.05; // down-weight directSunLight contribution 403 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 404 | mask *= weight; 405 | 406 | // this check helps keep random noisy bright pixels from this clearCoat diffuse surface out of the possible previous refracted glass surface 407 | if (bounces < 3) 408 | sampleLight = TRUE; 409 | continue; 410 | 411 | } //end if (hitType == CLEARCOAT_DIFFUSE) 412 | 413 | } // end for (int bounces = 0; bounces < 6; bounces++) 414 | 415 | 416 | return max(vec3(0), accumCol); 417 | 418 | } // end vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 419 | 420 | 421 | //------------------------------------------------------------------------------------------------- 422 | void SetupScene(void) 423 | //------------------------------------------------------------------------------------------------- 424 | { 425 | vec3 light_emissionColor = vec3(1.0, 1.0, 1.0) * 5.0; // Bright white light 426 | 427 | float wallRadius = 50.0; 428 | 429 | spheres[0] = UnitSphere( vec3(1.0, 1.0, 0.0), CLEARCOAT_DIFFUSE ); // clearCoat diffuse Sphere Left 430 | spheres[1] = UnitSphere( vec3(1.0, 1.0, 1.0), uRightSphereMatType ); // user-chosen material Sphere Right 431 | 432 | quads[0] = Quad( vec3( 0, 0, 1), vec3(-wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Back Wall 433 | quads[1] = Quad( vec3( 1, 0, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius, wallRadius,-wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 0.7, 0.05, 0.05), DIFFUSE);// Left Wall Red 434 | quads[2] = Quad( vec3(-1, 0, 0), vec3( wallRadius,-wallRadius,-wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3(0.05, 0.05, 0.7), DIFFUSE);// Right Wall Blue 435 | //quads[3] = Quad( vec3( 0,-1, 0), vec3(-wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Ceiling 436 | quads[3] = Quad( vec3( 0, 1, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Floor 437 | 438 | } // end void SetupScene(void) 439 | 440 | 441 | // if your scene is static and doesn't have any special requirements, you can use the default main() 442 | #include 443 | 444 | `; 445 | -------------------------------------------------------------------------------- /js/Physical_Sky_Model.js: -------------------------------------------------------------------------------- 1 | let canvas, engine, pathTracingScene; 2 | let container, stats; 3 | let gui; 4 | let pixel_ResolutionController, pixel_ResolutionObject; 5 | let needChangePixelResolution = false; 6 | let sunDirTransform_RotateXController, sunDirTransform_RotateXObject; 7 | let sunDirTransform_RotateYController, sunDirTransform_RotateYObject; 8 | let needChangeSunDirRotation = false; 9 | let rightSphere_MaterialController, rightSphere_MaterialObject; 10 | let needChangeRightSphereMaterial = false; 11 | let isPaused = true; 12 | let camera, oldCameraMatrix, newCameraMatrix; 13 | let camFlightSpeed; // scene specific, depending on scene size dimensions 14 | let cameraRecentlyMoving = false; 15 | let windowIsBeingResized = false; 16 | let beginningFlag = true; 17 | let timeInSeconds = 0.0; 18 | let frameTime = 0.0; 19 | let newWidth, newHeight; 20 | let nm, om; 21 | let increaseFOV = false; 22 | let decreaseFOV = false; 23 | let uApertureSize; // scene specific, depending on scene size dimensions 24 | let apertureChangeAmount; // scene specific, depending on scene size dimensions 25 | let uFocusDistance; // scene specific, depending on scene size dimensions 26 | let focusDistChangeAmount; // scene specific, depending on scene size dimensions 27 | let mouseControl = true; 28 | let cameraDirectionVector = new BABYLON.Vector3(); //for moving where the camera is looking 29 | let cameraRightVector = new BABYLON.Vector3(); //for strafing the camera right and left 30 | let cameraUpVector = new BABYLON.Vector3(); //for moving camera up and down 31 | let blueNoiseTexture; 32 | let infoElement = document.getElementById('info'); 33 | infoElement.style.cursor = "default"; 34 | infoElement.style.userSelect = "none"; 35 | infoElement.style.MozUserSelect = "none"; 36 | 37 | let cameraInfoElement = document.getElementById('cameraInfo'); 38 | cameraInfoElement.style.cursor = "default"; 39 | cameraInfoElement.style.userSelect = "none"; 40 | cameraInfoElement.style.MozUserSelect = "none"; 41 | 42 | // common required uniforms 43 | let uSceneIsDynamic = false; // will any geometry, lights, or models be moving in the scene? 44 | let uRandomVec2 = new BABYLON.Vector2(); // used to offset the texture UV when sampling the blueNoiseTexture for smooth randomness - this vec2 is updated/changed every animation frame 45 | let uTime = 0.0; // elapsed time in seconds since the app started 46 | let uFrameCounter = 1.0; // 1 instead of 0 because it is used as a rng() seed in pathtracing shader 47 | let uSampleCounter = 0.0; // will get increased by 1 in animation loop before rendering 48 | let uOneOverSampleCounter = 0.0; // the sample accumulation buffer gets multiplied by this reciprocal of SampleCounter, for averaging final pixel color 49 | let uULen = 1.0; // rendering pixel horizontal scale, related to camera's FOV and aspect ratio 50 | let uVLen = 1.0; // rendering pixel vertical scale, related to camera's FOV 51 | let uCameraIsMoving = false; // lets the path tracer know if the camera is being moved 52 | let uToneMappingExposure = 1.0; // exposure amount when applying Reinhard tonemapping in final stages of pixel colors' output 53 | let uPixelEdgeSharpness = 1.0; // for dynamic scenes only - if pixel is found to be lying on a border/boundary edge, how sharp should it be? (range: 0.0-1.0) 54 | let uEdgeSharpenSpeed = 0.05; // applies to edges only - how fast is the blur filter removed from edges? 55 | let uFilterDecaySpeed = 0.0002; // applies to entire image(edges and non-edges alike) - how fast should the blur filter go away for the entire image? 56 | 57 | // scene/demo-specific variables; 58 | let sphereRadius = 16; 59 | let wallRadius = 50; 60 | let leftSphereTransformNode; 61 | let rightSphereTransformNode; 62 | let sunTransformNode; 63 | let sunDirRotationX, sunDirRotationY; 64 | // scene/demo-specific uniforms 65 | let uSunDirection = new BABYLON.Vector3(); 66 | let uRightSphereMatType; 67 | let uLeftSphereInvMatrix = new BABYLON.Matrix(); 68 | let uRightSphereInvMatrix = new BABYLON.Matrix(); 69 | 70 | 71 | // The following list of keys is not exhaustive, but it should be more than enough to build interactive demos and games 72 | let KeyboardState = { 73 | KeyA: false, KeyB: false, KeyC: false, KeyD: false, KeyE: false, KeyF: false, KeyG: false, KeyH: false, KeyI: false, KeyJ: false, KeyK: false, KeyL: false, KeyM: false, 74 | KeyN: false, KeyO: false, KeyP: false, KeyQ: false, KeyR: false, KeyS: false, KeyT: false, KeyU: false, KeyV: false, KeyW: false, KeyX: false, KeyY: false, KeyZ: false, 75 | ArrowLeft: false, ArrowUp: false, ArrowRight: false, ArrowDown: false, Space: false, Enter: false, PageUp: false, PageDown: false, Tab: false, 76 | Minus: false, Equal: false, BracketLeft: false, BracketRight: false, Semicolon: false, Quote: false, Backquote: false, 77 | Comma: false, Period: false, ShiftLeft: false, ShiftRight: false, Slash: false, Backslash: false, Backspace: false, 78 | Digit1: false, Digit2: false, Digit3: false, Digit4: false, Digit5: false, Digit6: false, Digit7: false, Digit8: false, Digit9: false, Digit0: false 79 | } 80 | 81 | function onKeyDown(event) 82 | { 83 | event.preventDefault(); 84 | 85 | KeyboardState[event.code] = true; 86 | } 87 | 88 | function onKeyUp(event) 89 | { 90 | event.preventDefault(); 91 | 92 | KeyboardState[event.code] = false; 93 | } 94 | 95 | function keyPressed(keyName) 96 | { 97 | return KeyboardState[keyName]; 98 | } 99 | 100 | function onMouseWheel(event) 101 | { 102 | if (isPaused) 103 | return; 104 | 105 | // use the following instead, because event.preventDefault() gives errors in console 106 | event.stopPropagation(); 107 | 108 | if (event.deltaY > 0) 109 | { 110 | increaseFOV = true; 111 | } 112 | else if (event.deltaY < 0) 113 | { 114 | decreaseFOV = true; 115 | } 116 | } 117 | 118 | // Watch for browser/canvas resize events 119 | window.addEventListener("resize", function () 120 | { 121 | handleWindowResize(); 122 | }); 123 | 124 | if ('ontouchstart' in window) 125 | { 126 | mouseControl = false; 127 | // TODO: instantiate my custom 'MobileJoystickControls' or similar Babylon solution? 128 | } 129 | 130 | if (mouseControl) 131 | { 132 | window.addEventListener('wheel', onMouseWheel, false); 133 | } 134 | 135 | function handleWindowResize() 136 | { 137 | windowIsBeingResized = true; 138 | 139 | engine.resize(); 140 | 141 | newWidth = engine.getRenderWidth(); 142 | newHeight = engine.getRenderHeight(); 143 | pathTracingRenderTarget.resize({ width: newWidth, height: newHeight }); 144 | screenCopyRenderTarget.resize({ width: newWidth, height: newHeight }); 145 | 146 | width = newWidth; 147 | height = newHeight; 148 | 149 | uVLen = Math.tan(camera.fov * 0.5); 150 | uULen = uVLen * (width / height); 151 | } 152 | 153 | 154 | // setup GUI 155 | function init_GUI() 156 | { 157 | pixel_ResolutionObject = { 158 | pixel_Resolution: 0.75 159 | } 160 | 161 | sunDirTransform_RotateXObject = { 162 | sunDir_RotateX: 298 163 | } 164 | sunDirTransform_RotateYObject = { 165 | sunDir_RotateY: 318 166 | } 167 | 168 | rightSphere_MaterialObject = { 169 | RSphere_MaterialPreset: 'Metal' 170 | } 171 | 172 | function handlePixelResolutionChange() 173 | { 174 | needChangePixelResolution = true; 175 | } 176 | 177 | function handleSunDirRotationChange() 178 | { 179 | needChangeSunDirRotation = true; 180 | } 181 | 182 | function handleRightSphereMaterialChange() 183 | { 184 | needChangeRightSphereMaterial = true; 185 | } 186 | 187 | gui = new dat.GUI(); 188 | 189 | pixel_ResolutionController = gui.add(pixel_ResolutionObject, 'pixel_Resolution', 0.5, 1.0, 0.05).onChange(handlePixelResolutionChange); 190 | 191 | sunDirTransform_RotateXController = gui.add(sunDirTransform_RotateXObject, 'sunDir_RotateX', 160, 370, 1).onChange(handleSunDirRotationChange); 192 | sunDirTransform_RotateYController = gui.add(sunDirTransform_RotateYObject, 'sunDir_RotateY', 0, 359, 1).onChange(handleSunDirRotationChange); 193 | 194 | rightSphere_MaterialController = gui.add(rightSphere_MaterialObject, 'RSphere_MaterialPreset', ['Transparent', 195 | 'Diffuse', 'ClearCoat_Diffuse', 'Metal']).onChange(handleRightSphereMaterialChange); 196 | 197 | // jumpstart setting of initial sun direction when the demo begins 198 | handleSunDirRotationChange(); 199 | } 200 | 201 | init_GUI(); 202 | 203 | 204 | // setup the frame rate display (FPS) in the top-left corner 205 | container = document.getElementById('container'); 206 | 207 | stats = new Stats(); 208 | stats.domElement.style.position = 'absolute'; 209 | stats.domElement.style.top = '0px'; 210 | stats.domElement.style.cursor = "default"; 211 | stats.domElement.style.webkitUserSelect = "none"; 212 | stats.domElement.style.MozUserSelect = "none"; 213 | container.appendChild(stats.domElement); 214 | 215 | 216 | canvas = document.getElementById("renderCanvas"); 217 | 218 | engine = new BABYLON.Engine(canvas, true); 219 | 220 | 221 | // Create the scene space 222 | pathTracingScene = new BABYLON.Scene(engine); 223 | 224 | // enable browser's mouse pointer lock feature, for free-look camera controlled by mouse movement 225 | pathTracingScene.onPointerDown = evt => 226 | { 227 | engine.enterPointerlock(); 228 | } 229 | 230 | // Add a camera to the scene and attach it to the canvas 231 | camera = new BABYLON.UniversalCamera("Camera", new BABYLON.Vector3(), pathTracingScene); 232 | camera.attachControl(canvas, true); 233 | 234 | uVLen = Math.tan(camera.fov * 0.5); 235 | uULen = uVLen * (engine.getRenderWidth() / engine.getRenderHeight()); 236 | 237 | 238 | 239 | // SCENE/DEMO-SPECIFIC PARAMETERS 240 | camera.position.set(0, -10, -200); 241 | camera.inertia = 0; 242 | camera.angularSensibility = 500; 243 | camFlightSpeed = 100; // scene specific, depending on scene size dimensions 244 | uApertureSize = 0.0; // aperture size at beginning of app 245 | uFocusDistance = 113.0; // initial focus distance from camera in scene - scene specific, depending on scene size dimensions 246 | const uEPS_intersect = 0.01; // value is scene-size dependent 247 | apertureChangeAmount = 1; // scene specific, depending on scene size dimensions 248 | focusDistChangeAmount = 1; // scene specific, depending on scene size dimensions 249 | uRightSphereMatType = 3; // enum number code for METAL material - demo starts off with this setting for right sphere 250 | 251 | oldCameraMatrix = new BABYLON.Matrix(); 252 | newCameraMatrix = new BABYLON.Matrix(); 253 | 254 | // must be instantiated here after scene has been created 255 | leftSphereTransformNode = new BABYLON.TransformNode(); 256 | rightSphereTransformNode = new BABYLON.TransformNode(); 257 | sunTransformNode = new BABYLON.TransformNode(); 258 | 259 | leftSphereTransformNode.position.set(-wallRadius * 0.45, -wallRadius + sphereRadius + 0.1, -wallRadius * 0.2); 260 | leftSphereTransformNode.scaling.set(sphereRadius, sphereRadius, sphereRadius); 261 | //leftSphereTransformNode.scaling.set(sphereRadius * 0.3, sphereRadius, sphereRadius); 262 | //leftSphereTransformNode.rotation.set(0, 0, Math.PI * 0.2); 263 | uLeftSphereInvMatrix.copyFrom(leftSphereTransformNode.getWorldMatrix()); 264 | uLeftSphereInvMatrix.invert(); 265 | 266 | rightSphereTransformNode.position.set(wallRadius * 0.45, -wallRadius + sphereRadius + 0.1, -wallRadius * 0.2); 267 | rightSphereTransformNode.scaling.set(sphereRadius, sphereRadius, sphereRadius); 268 | uRightSphereInvMatrix.copyFrom(rightSphereTransformNode.getWorldMatrix()); 269 | uRightSphereInvMatrix.invert(); 270 | 271 | let width = engine.getRenderWidth(), height = engine.getRenderHeight(); 272 | 273 | blueNoiseTexture = new BABYLON.Texture("./textures/BlueNoise_RGBA256.png", 274 | pathTracingScene, 275 | true, 276 | false, 277 | BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, 278 | null, 279 | null, 280 | null, 281 | false, 282 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 283 | 284 | 285 | 286 | const pathTracingRenderTarget = new BABYLON.RenderTargetTexture("pathTracingRenderTarget", { width, height }, pathTracingScene, false, false, 287 | BABYLON.Constants.TEXTURETYPE_FLOAT, false, BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, false, false, false, 288 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 289 | 290 | const screenCopyRenderTarget = new BABYLON.RenderTargetTexture("screenCopyRenderTarget", { width, height }, pathTracingScene, false, false, 291 | BABYLON.Constants.TEXTURETYPE_FLOAT, false, BABYLON.Constants.TEXTURE_NEAREST_SAMPLINGMODE, false, false, false, 292 | BABYLON.Constants.TEXTUREFORMAT_RGBA); 293 | 294 | const eRenderer = new BABYLON.EffectRenderer(engine); 295 | 296 | // SCREEN COPY EFFECT 297 | const screenCopyEffect = new BABYLON.EffectWrapper({ 298 | engine: engine, 299 | fragmentShader: BABYLON.Effect.ShadersStore["screenCopyFragmentShader"], 300 | uniformNames: [], 301 | samplerNames: ["pathTracedImageBuffer"], 302 | name: "screenCopyEffectWrapper" 303 | }); 304 | 305 | screenCopyEffect.onApplyObservable.add(() => 306 | { 307 | screenCopyEffect.effect.setTexture("pathTracedImageBuffer", pathTracingRenderTarget); 308 | }); 309 | 310 | // SCREEN OUTPUT EFFECT 311 | const screenOutputEffect = new BABYLON.EffectWrapper({ 312 | engine: engine, 313 | fragmentShader: BABYLON.Effect.ShadersStore["screenOutputFragmentShader"], 314 | uniformNames: ["uSampleCounter", "uOneOverSampleCounter", "uPixelEdgeSharpness", "uEdgeSharpenSpeed", "uFilterDecaySpeed", 315 | "uToneMappingExposure", "uSceneIsDynamic"], 316 | samplerNames: ["accumulationBuffer"], 317 | name: "screenOutputEffectWrapper" 318 | }); 319 | 320 | screenOutputEffect.onApplyObservable.add(() => 321 | { 322 | screenOutputEffect.effect.setTexture("accumulationBuffer", pathTracingRenderTarget); 323 | screenOutputEffect.effect.setFloat("uSampleCounter", uSampleCounter); 324 | screenOutputEffect.effect.setFloat("uOneOverSampleCounter", uOneOverSampleCounter); 325 | screenOutputEffect.effect.setFloat("uPixelEdgeSharpness", uPixelEdgeSharpness); 326 | screenOutputEffect.effect.setFloat("uEdgeSharpenSpeed", uEdgeSharpenSpeed); 327 | screenOutputEffect.effect.setFloat("uFilterDecaySpeed", uFilterDecaySpeed); 328 | screenOutputEffect.effect.setFloat("uToneMappingExposure", uToneMappingExposure); 329 | screenOutputEffect.effect.setBool("uSceneIsDynamic", uSceneIsDynamic); 330 | }); 331 | 332 | // MAIN PATH TRACING EFFECT 333 | const pathTracingEffect = new BABYLON.EffectWrapper({ 334 | engine: engine, 335 | fragmentShader: BABYLON.Effect.ShadersStore["pathTracingFragmentShader"], 336 | uniformNames: ["uResolution", "uRandomVec2", "uULen", "uVLen", "uTime", "uFrameCounter", "uSampleCounter", "uPreviousSampleCount", "uEPS_intersect", "uCameraMatrix", "uApertureSize", 337 | "uFocusDistance", "uCameraIsMoving", "uSunDirection", "uLeftSphereInvMatrix", "uRightSphereInvMatrix", "uRightSphereMatType"], 338 | samplerNames: ["previousBuffer", "blueNoiseTexture"], 339 | name: "pathTracingEffectWrapper" 340 | }); 341 | 342 | pathTracingEffect.onApplyObservable.add(() => 343 | { 344 | pathTracingEffect.effect.setTexture("previousBuffer", screenCopyRenderTarget); 345 | pathTracingEffect.effect.setTexture("blueNoiseTexture", blueNoiseTexture); 346 | pathTracingEffect.effect.setVector3("uSunDirection", uSunDirection); 347 | pathTracingEffect.effect.setFloat2("uResolution", pathTracingRenderTarget.getSize().width, pathTracingRenderTarget.getSize().height); 348 | pathTracingEffect.effect.setFloat2("uRandomVec2", uRandomVec2.x, uRandomVec2.y); 349 | pathTracingEffect.effect.setFloat("uULen", uULen); 350 | pathTracingEffect.effect.setFloat("uVLen", uVLen); 351 | pathTracingEffect.effect.setFloat("uTime", uTime); 352 | pathTracingEffect.effect.setFloat("uFrameCounter", uFrameCounter); 353 | pathTracingEffect.effect.setFloat("uSampleCounter", uSampleCounter); 354 | pathTracingEffect.effect.setFloat("uPreviousSampleCount", uPreviousSampleCount); 355 | pathTracingEffect.effect.setFloat("uEPS_intersect", uEPS_intersect); 356 | pathTracingEffect.effect.setFloat("uApertureSize", uApertureSize); 357 | pathTracingEffect.effect.setFloat("uFocusDistance", uFocusDistance); 358 | pathTracingEffect.effect.setInt("uRightSphereMatType", uRightSphereMatType); 359 | pathTracingEffect.effect.setBool("uCameraIsMoving", uCameraIsMoving); 360 | pathTracingEffect.effect.setMatrix("uCameraMatrix", camera.getWorldMatrix()); 361 | pathTracingEffect.effect.setMatrix("uLeftSphereInvMatrix", uLeftSphereInvMatrix); 362 | pathTracingEffect.effect.setMatrix("uRightSphereInvMatrix", uRightSphereInvMatrix); 363 | }); 364 | 365 | 366 | function getElapsedTimeInSeconds() 367 | { 368 | timeInSeconds += (engine.getDeltaTime() * 0.001); 369 | return timeInSeconds; 370 | } 371 | 372 | 373 | // Register a render loop to repeatedly render the scene 374 | engine.runRenderLoop(function () 375 | { 376 | 377 | // first, reset cameraIsMoving flag 378 | uCameraIsMoving = false; 379 | 380 | if (beginningFlag && uSampleCounter == 1) 381 | { 382 | pixel_ResolutionController.setValue(0.75); 383 | beginningFlag = false; 384 | } 385 | 386 | // if GUI has been used, update 387 | 388 | if (needChangePixelResolution) 389 | { 390 | engine.setHardwareScalingLevel(1.0 / pixel_ResolutionController.getValue()); 391 | 392 | handleWindowResize(); 393 | 394 | needChangePixelResolution = false; 395 | } 396 | 397 | if (needChangeSunDirRotation) 398 | { 399 | sunDirRotationX = sunDirTransform_RotateXController.getValue(); 400 | sunDirRotationY = sunDirTransform_RotateYController.getValue(); 401 | 402 | sunDirRotationX *= (Math.PI / 180); 403 | sunDirRotationY *= (Math.PI / 180); 404 | 405 | sunTransformNode.rotation.set(sunDirRotationX, sunDirRotationY, 0); 406 | 407 | uCameraIsMoving = true; 408 | needChangeSunDirRotation = false; 409 | } 410 | 411 | if (needChangeRightSphereMaterial) 412 | { 413 | if (rightSphere_MaterialController.getValue() == 'Transparent') 414 | { 415 | uRightSphereMatType = 2;// enum number code for TRANSPARENT material 416 | } 417 | else if (rightSphere_MaterialController.getValue() == 'Diffuse') 418 | { 419 | uRightSphereMatType = 1;// enum number code for DIFFUSE material 420 | } 421 | else if (rightSphere_MaterialController.getValue() == 'ClearCoat_Diffuse') 422 | { 423 | uRightSphereMatType = 4;// enum number code for CLEARCOAT_DIFFUSE material 424 | } 425 | else if (rightSphere_MaterialController.getValue() == 'Metal') 426 | { 427 | uRightSphereMatType = 3;// enum number code for METAL material 428 | } 429 | 430 | uCameraIsMoving = true; 431 | needChangeRightSphereMaterial = false; 432 | } 433 | 434 | 435 | // check for pointerLock state and add or remove keyboard listeners 436 | if (isPaused && engine.isPointerLock) 437 | { 438 | document.addEventListener('keydown', onKeyDown, false); 439 | document.addEventListener('keyup', onKeyUp, false); 440 | isPaused = false; 441 | } 442 | if (!isPaused && !engine.isPointerLock) 443 | { 444 | document.removeEventListener('keydown', onKeyDown, false); 445 | document.removeEventListener('keyup', onKeyUp, false); 446 | isPaused = true; 447 | } 448 | 449 | 450 | if (windowIsBeingResized) 451 | { 452 | uCameraIsMoving = true; 453 | windowIsBeingResized = false; 454 | } 455 | 456 | uTime = getElapsedTimeInSeconds(); 457 | 458 | frameTime = engine.getDeltaTime() * 0.001; 459 | 460 | uRandomVec2.set(Math.random(), Math.random()); 461 | 462 | // my own optimized way of telling if the camera has moved or not 463 | newCameraMatrix.copyFrom(camera.getWorldMatrix()); 464 | nm = newCameraMatrix.m; 465 | om = oldCameraMatrix.m; 466 | if (nm[0] != om[0] || nm[1] != om[1] || nm[2] != om[2] || nm[3] != om[3] || 467 | nm[4] != om[4] || nm[5] != om[5] || nm[6] != om[6] || nm[7] != om[7] || 468 | nm[8] != om[8] || nm[9] != om[9] || nm[10] != om[10] || nm[11] != om[11] || 469 | nm[12] != om[12] || nm[13] != om[13] || nm[14] != om[14] || nm[15] != om[15]) 470 | { 471 | uCameraIsMoving = true; 472 | } 473 | // save camera state for next frame's comparison 474 | oldCameraMatrix.copyFrom(newCameraMatrix); 475 | 476 | // get current camera orientation basis vectors 477 | cameraDirectionVector.set(nm[8], nm[9], nm[10]); 478 | cameraDirectionVector.normalize(); 479 | cameraUpVector.set(nm[4], nm[5], nm[6]); 480 | cameraUpVector.normalize(); 481 | cameraRightVector.set(nm[0], nm[1], nm[2]); 482 | cameraRightVector.normalize(); 483 | 484 | // check for user input 485 | if (keyPressed('KeyW') && !keyPressed('KeyS')) 486 | { 487 | camera.position.addInPlace(cameraDirectionVector.scaleToRef(camFlightSpeed * frameTime, cameraDirectionVector)); 488 | } 489 | if (keyPressed('KeyS') && !keyPressed('KeyW')) 490 | { 491 | camera.position.subtractInPlace(cameraDirectionVector.scaleToRef(camFlightSpeed * frameTime, cameraDirectionVector)); 492 | } 493 | if (keyPressed('KeyA') && !keyPressed('KeyD')) 494 | { 495 | camera.position.subtractInPlace(cameraRightVector.scaleToRef(camFlightSpeed * frameTime, cameraRightVector)); 496 | } 497 | if (keyPressed('KeyD') && !keyPressed('KeyA')) 498 | { 499 | camera.position.addInPlace(cameraRightVector.scaleToRef(camFlightSpeed * frameTime, cameraRightVector)); 500 | } 501 | if (keyPressed('KeyE') && !keyPressed('KeyQ')) 502 | { 503 | camera.position.addInPlace(cameraUpVector.scaleToRef(camFlightSpeed * frameTime, cameraUpVector)); 504 | } 505 | if (keyPressed('KeyQ') && !keyPressed('KeyE')) 506 | { 507 | camera.position.subtractInPlace(cameraUpVector.scaleToRef(camFlightSpeed * frameTime, cameraUpVector)); 508 | } 509 | 510 | if (keyPressed('Equal') && !keyPressed('Minus')) 511 | { 512 | uFocusDistance += focusDistChangeAmount; 513 | uCameraIsMoving = true; 514 | } 515 | if (keyPressed('Minus') && !keyPressed('Equal')) 516 | { 517 | uFocusDistance -= focusDistChangeAmount; 518 | if (uFocusDistance < 1) 519 | uFocusDistance = 1; 520 | uCameraIsMoving = true; 521 | } 522 | if (keyPressed('BracketRight') && !keyPressed('BracketLeft')) 523 | { 524 | uApertureSize += apertureChangeAmount; 525 | if (uApertureSize > 100000.0) 526 | uApertureSize = 100000.0; 527 | uCameraIsMoving = true; 528 | } 529 | if (keyPressed('BracketLeft') && !keyPressed('BracketRight')) 530 | { 531 | uApertureSize -= apertureChangeAmount; 532 | if (uApertureSize < 0.0) 533 | uApertureSize = 0.0; 534 | uCameraIsMoving = true; 535 | } 536 | 537 | 538 | // now update uniforms that are common to all scenes 539 | if (increaseFOV) 540 | { 541 | camera.fov += (Math.PI / 180); 542 | if (camera.fov > 150 * (Math.PI / 180)) 543 | camera.fov = 150 * (Math.PI / 180); 544 | 545 | uVLen = Math.tan(camera.fov * 0.5); 546 | uULen = uVLen * (width / height); 547 | 548 | uCameraIsMoving = true; 549 | increaseFOV = false; 550 | } 551 | if (decreaseFOV) 552 | { 553 | camera.fov -= (Math.PI / 180); 554 | if (camera.fov < 1 * (Math.PI / 180)) 555 | camera.fov = 1 * (Math.PI / 180); 556 | 557 | uVLen = Math.tan(camera.fov * 0.5); 558 | uULen = uVLen * (width / height); 559 | 560 | uCameraIsMoving = true; 561 | decreaseFOV = false; 562 | } 563 | 564 | if (!uCameraIsMoving) 565 | { 566 | if (uSceneIsDynamic) 567 | uSampleCounter = 1.0; // reset for continuous updating of image 568 | else uSampleCounter += 1.0; // for progressive refinement of image 569 | 570 | uFrameCounter += 1.0; 571 | 572 | cameraRecentlyMoving = false; 573 | } 574 | 575 | if (uCameraIsMoving) 576 | { 577 | uFrameCounter += 1.0; 578 | 579 | if (!cameraRecentlyMoving) 580 | { 581 | // record current uSampleCounter value before it gets set to 1.0 below 582 | uPreviousSampleCount = uSampleCounter; 583 | uFrameCounter = 1.0; 584 | cameraRecentlyMoving = true; 585 | } 586 | 587 | uSampleCounter = 1.0; 588 | } 589 | 590 | uOneOverSampleCounter = 1.0 / uSampleCounter; 591 | 592 | // update Sun direction uniform 593 | //sunTransformNode.rotation.x += -0.01; 594 | uSunDirection.copyFrom(sunTransformNode.forward); 595 | 596 | 597 | // CAMERA INFO 598 | cameraInfoElement.innerHTML = "FOV( mousewheel ): " + (camera.fov * 180 / Math.PI).toFixed(0) + "
" + "Aperture( [ and ] ): " + uApertureSize.toFixed(1) + 599 | "
" + "FocusDistance( - and + ): " + uFocusDistance.toFixed(0) + "
" + "Samples: " + uSampleCounter; 600 | 601 | // the following is necessary to update the user's world camera movement - should take no time at all 602 | pathTracingScene.render(); 603 | // now for the heavy lifter, the bulk of the frame time 604 | eRenderer.render(pathTracingEffect, pathTracingRenderTarget); 605 | // then simply copy(store) what the pathTracer just calculated - should take no time at all 606 | eRenderer.render(screenCopyEffect, screenCopyRenderTarget); 607 | // finally take the accumulated pathTracingRenderTarget buffer and average by numberOfSamples taken, then apply Reinhard tonemapping (brings image into friendly 0.0-1.0 rgb color float range), 608 | // and lastly raise to the power of (0.4545), in order to make gamma correction (gives more brightness range where it counts). This last step should also take minimal time 609 | eRenderer.render(screenOutputEffect, null); // null, because we don't feed this non-linear image-processed output back into the pathTracing accumulation buffer as it would 'pollute' the pathtracing unbounded linear color space 610 | 611 | stats.update(); 612 | }); // end engine.runRenderLoop(function () 613 | -------------------------------------------------------------------------------- /js/TransformedQuadricGeometry_FragmentShader.js: -------------------------------------------------------------------------------- 1 | BABYLON.Effect.ShadersStore["pathTracingFragmentShader"] = ` 2 | #version 300 es 3 | 4 | precision highp float; 5 | precision highp int; 6 | precision highp sampler2D; 7 | 8 | // Demo-specific Uniforms 9 | uniform mat4 uSphereInvMatrix; 10 | uniform mat4 uCylinderInvMatrix; 11 | uniform mat4 uConeInvMatrix; 12 | uniform mat4 uParaboloidInvMatrix; 13 | uniform mat4 uHyperboloidInvMatrix; 14 | uniform mat4 uCapsuleInvMatrix; 15 | uniform mat4 uFlattenedRingInvMatrix; 16 | uniform mat4 uBoxInvMatrix; 17 | uniform mat4 uPyramidFrustumInvMatrix; 18 | uniform mat4 uDiskInvMatrix; 19 | uniform mat4 uRectangleInvMatrix; 20 | uniform mat4 uTorusInvMatrix; 21 | uniform float uQuadLightPlaneSelectionNumber; 22 | uniform float uQuadLightRadius; 23 | uniform float uShapeK; 24 | uniform int uAllShapesMatType; 25 | 26 | // demo/scene-specific setup 27 | #define N_QUADS 6 28 | 29 | struct Quad { vec3 normal; vec3 v0; vec3 v1; vec3 v2; vec3 v3; vec3 color; int type; }; 30 | 31 | Quad quads[N_QUADS]; 32 | 33 | // the camera ray for this pixel (global variables) 34 | vec3 rayOrigin, rayDirection; 35 | 36 | 37 | // all required includes go here: 38 | 39 | #include // required on all scenes 40 | 41 | #include // required on all scenes 42 | 43 | #include // required on all scenes 44 | 45 | #include // required on scenes with any math-geometry shapes like sphere, cylinder, cone, etc. 46 | 47 | #include // required on scenes with complex shapes (like the torus): a bounding sphere check will quickly eliminate rays that would miss 48 | 49 | #include // required on scenes with unit spheres that will be translated, rotated, and scaled by their matrix transform 50 | 51 | #include // required on scenes with unit cylinders that will be translated, rotated, and scaled by their matrix transform 52 | 53 | #include // required on scenes with unit cones that will be translated, rotated, and scaled by their matrix transform 54 | 55 | #include // required on scenes with unit paraboloids that will be translated, rotated, and scaled by their matrix transform 56 | 57 | #include // required on scenes with unit hyperboloids that will be translated, rotated, and scaled by their matrix transform 58 | 59 | #include // required on scenes with unit capsules that will be translated, rotated, and scaled by their matrix transform 60 | 61 | #include // required on scenes with unit flattened rings that will be translated, rotated, and scaled by their matrix transform 62 | 63 | #include // required on scenes with unit boxes that will be translated, rotated, and scaled by their matrix transform 64 | 65 | #include // required on scenes with pyramids/frustums that will be translated, rotated, and scaled by their matrix transform 66 | 67 | #include // required on scenes with unit disks that will be translated, rotated, and scaled by their matrix transform 68 | 69 | #include // required on scenes with unit rectangles that will be translated, rotated, and scaled by their matrix transform 70 | 71 | #include // required on scenes with unit torii/rings that will be translated, rotated, and scaled by their matrix transform 72 | 73 | #include // required on scenes with quads (actually internally they are made up of 2 triangles) 74 | 75 | #include // required on scenes with axis-aligned quad area lights (quad must reside in either XY, XZ, or YZ planes) 76 | 77 | 78 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 79 | float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 80 | //------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 81 | { 82 | 83 | vec3 rObjOrigin, rObjDirection; 84 | vec3 hit, n, hitPos; 85 | float t, d, dt; 86 | int objectCount = 0; 87 | int insideSphere = FALSE; 88 | 89 | // initialize hit record 90 | t = INFINITY; 91 | hitType = -100; 92 | hitObjectID = -INFINITY; 93 | 94 | for (int i = 0; i < N_QUADS; i++) 95 | { 96 | d = QuadIntersect( quads[i].v0, quads[i].v1, quads[i].v2, quads[i].v3, rayOrigin, rayDirection, FALSE ); 97 | 98 | if (d < t) 99 | { 100 | t = d; 101 | hitNormal = quads[i].normal; 102 | hitColor = quads[i].color; 103 | hitType = quads[i].type; 104 | hitObjectID = float(objectCount); 105 | } 106 | 107 | objectCount++; 108 | } 109 | 110 | 111 | // transform ray into sphere's object space 112 | rObjOrigin = vec3( uSphereInvMatrix * vec4(rayOrigin, 1.0) ); 113 | rObjDirection = vec3( uSphereInvMatrix * vec4(rayDirection, 0.0) ); 114 | 115 | d = UnitSphereIntersect( rObjOrigin, rObjDirection, n ); 116 | // hit = rObjOrigin + rObjDirection * d; 117 | // hit = vec3( inverse(uSphereInvMatrix) * vec4(hit, 1.0) ); 118 | // d = distance(rayOrigin, hit); 119 | 120 | if (d < t) 121 | { 122 | t = d; 123 | hitNormal = transpose(mat3(uSphereInvMatrix)) * n; 124 | hitColor = vec3(1.0, 0.0, 0.0); 125 | hitType = uAllShapesMatType; 126 | hitObjectID = float(objectCount); 127 | } 128 | objectCount++; 129 | 130 | // transform ray into cylinder's object space 131 | rObjOrigin = vec3( uCylinderInvMatrix * vec4(rayOrigin, 1.0) ); 132 | rObjDirection = vec3( uCylinderInvMatrix * vec4(rayDirection, 0.0) ); 133 | 134 | d = UnitCylinderIntersect( rObjOrigin, rObjDirection, n ); 135 | 136 | if (d < t) 137 | { 138 | t = d; 139 | hitNormal = transpose(mat3(uCylinderInvMatrix)) * n; 140 | hitColor = vec3(0.0, 1.0, 0.0); 141 | hitType = uAllShapesMatType; 142 | hitObjectID = float(objectCount); 143 | } 144 | objectCount++; 145 | 146 | // transform ray into cone's object space 147 | rObjOrigin = vec3( uConeInvMatrix * vec4(rayOrigin, 1.0) ); 148 | rObjDirection = vec3( uConeInvMatrix * vec4(rayDirection, 0.0) ); 149 | 150 | d = UnitConeIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 151 | 152 | if (d < t) 153 | { 154 | t = d; 155 | hitNormal = transpose(mat3(uConeInvMatrix)) * n; 156 | hitColor = vec3(1.0, 1.0, 0.0); 157 | hitType = uAllShapesMatType; 158 | hitObjectID = float(objectCount); 159 | } 160 | objectCount++; 161 | 162 | // transform ray into paraboloid's object space 163 | rObjOrigin = vec3( uParaboloidInvMatrix * vec4(rayOrigin, 1.0) ); 164 | rObjDirection = vec3( uParaboloidInvMatrix * vec4(rayDirection, 0.0) ); 165 | 166 | d = UnitParaboloidIntersect( rObjOrigin, rObjDirection, n ); 167 | 168 | if (d < t) 169 | { 170 | t = d; 171 | hitNormal = transpose(mat3(uParaboloidInvMatrix)) * n; 172 | hitColor = vec3(1.0, 0.0, 1.0); 173 | hitType = uAllShapesMatType; 174 | hitObjectID = float(objectCount); 175 | } 176 | objectCount++; 177 | 178 | // transform ray into hyperboloid's object space 179 | rObjOrigin = vec3( uHyperboloidInvMatrix * vec4(rayOrigin, 1.0) ); 180 | rObjDirection = vec3( uHyperboloidInvMatrix * vec4(rayDirection, 0.0) ); 181 | 182 | d = UnitHyperboloidIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 183 | 184 | if (d < t) 185 | { 186 | t = d; 187 | hitNormal = transpose(mat3(uHyperboloidInvMatrix)) * n; 188 | hitColor = vec3(1.0, 0.1, 0.0); 189 | hitType = uAllShapesMatType; 190 | hitObjectID = float(objectCount); 191 | } 192 | objectCount++; 193 | 194 | // transform ray into capsule's object space 195 | rObjOrigin = vec3( uCapsuleInvMatrix * vec4(rayOrigin, 1.0) ); 196 | rObjDirection = vec3( uCapsuleInvMatrix * vec4(rayDirection, 0.0) ); 197 | 198 | d = UnitCapsuleIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 199 | 200 | if (d < t) 201 | { 202 | t = d; 203 | hitNormal = transpose(mat3(uCapsuleInvMatrix)) * n; 204 | hitColor = vec3(0.5, 1.0, 0.0); 205 | hitType = uAllShapesMatType; 206 | hitObjectID = float(objectCount); 207 | } 208 | objectCount++; 209 | 210 | // transform ray into flattened ring's object space 211 | rObjOrigin = vec3( uFlattenedRingInvMatrix * vec4(rayOrigin, 1.0) ); 212 | rObjDirection = vec3( uFlattenedRingInvMatrix * vec4(rayDirection, 0.0) ); 213 | 214 | d = UnitFlattenedRingIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 215 | 216 | if (d < t) 217 | { 218 | t = d; 219 | hitNormal = transpose(mat3(uFlattenedRingInvMatrix)) * n; 220 | hitColor = vec3(0.0, 0.4, 1.0); 221 | hitType = uAllShapesMatType; 222 | hitObjectID = float(objectCount); 223 | } 224 | objectCount++; 225 | 226 | // transform ray into box's object space 227 | rObjOrigin = vec3( uBoxInvMatrix * vec4(rayOrigin, 1.0) ); 228 | rObjDirection = vec3( uBoxInvMatrix * vec4(rayDirection, 0.0) ); 229 | 230 | d = UnitBoxIntersect( rObjOrigin, rObjDirection, n ); 231 | 232 | if (d < t) 233 | { 234 | t = d; 235 | hitNormal = transpose(mat3(uBoxInvMatrix)) * n; 236 | hitColor = vec3(0.0, 0.0, 1.0); 237 | hitType = uAllShapesMatType; 238 | hitObjectID = float(objectCount); 239 | } 240 | objectCount++; 241 | 242 | // transform ray into pyramid/frustum's object space 243 | rObjOrigin = vec3( uPyramidFrustumInvMatrix * vec4(rayOrigin, 1.0) ); 244 | rObjDirection = vec3( uPyramidFrustumInvMatrix * vec4(rayDirection, 0.0) ); 245 | 246 | d = PyramidFrustumIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 247 | 248 | if (d < t) 249 | { 250 | t = d; 251 | hitNormal = transpose(mat3(uPyramidFrustumInvMatrix)) * n; 252 | hitColor = vec3(0.2, 0.0, 1.0); 253 | hitType = uAllShapesMatType; 254 | hitObjectID = float(objectCount); 255 | } 256 | objectCount++; 257 | 258 | // transform ray into disk's object space 259 | rObjOrigin = vec3( uDiskInvMatrix * vec4(rayOrigin, 1.0) ); 260 | rObjDirection = vec3( uDiskInvMatrix * vec4(rayDirection, 0.0) ); 261 | 262 | d = UnitDiskIntersect( rObjOrigin, rObjDirection ); 263 | 264 | if (d < t) 265 | { 266 | t = d; 267 | hitNormal = vec3(0,-1,0); 268 | hitNormal = transpose(mat3(uDiskInvMatrix)) * hitNormal; 269 | hitColor = vec3(0.0, 1.0, 0.5); 270 | hitType = uAllShapesMatType; 271 | hitObjectID = float(objectCount); 272 | } 273 | objectCount++; 274 | 275 | // transform ray into rectangle's object space 276 | rObjOrigin = vec3( uRectangleInvMatrix * vec4(rayOrigin, 1.0) ); 277 | rObjDirection = vec3( uRectangleInvMatrix * vec4(rayDirection, 0.0) ); 278 | 279 | d = UnitRectangleIntersect( rObjOrigin, rObjDirection ); 280 | 281 | if (d < t) 282 | { 283 | t = d; 284 | hitNormal = vec3(0,-1,0); 285 | hitNormal = transpose(mat3(uRectangleInvMatrix)) * hitNormal; 286 | hitColor = vec3(1.0, 0.3, 0.0); 287 | hitType = uAllShapesMatType; 288 | hitObjectID = float(objectCount); 289 | } 290 | objectCount++; 291 | 292 | // transform ray into torus's object space 293 | rObjOrigin = vec3( uTorusInvMatrix * vec4(rayOrigin, 1.0) ); 294 | rObjDirection = vec3( uTorusInvMatrix * vec4(rayDirection, 0.0) ); 295 | // first check that the ray hits the bounding sphere around the torus 296 | d = UnitBoundingSphereIntersect( rObjOrigin, rObjDirection, insideSphere ); 297 | if (d < INFINITY) 298 | { // if outside the sphere, move the ray up close to the Torus, for numerical stability 299 | d = insideSphere == TRUE ? 0.0 : d; 300 | rObjOrigin += rObjDirection * d; 301 | 302 | dt = d + UnitTorusIntersect( rObjOrigin, rObjDirection, uShapeK, n ); 303 | if (dt < t) 304 | { 305 | t = dt; 306 | hitNormal = transpose(mat3(uTorusInvMatrix)) * n; 307 | hitColor = vec3(0.5, 0.0, 1.0); 308 | hitType = uAllShapesMatType; 309 | hitObjectID = float(objectCount); 310 | } 311 | } 312 | 313 | return t; 314 | 315 | } // end float SceneIntersect( vec3 rayOrigin, vec3 rayDirection, out vec3 hitNormal, out vec3 hitEmission, out vec3 hitColor, out int hitType, out float hitObjectID ) 316 | 317 | 318 | 319 | //----------------------------------------------------------------------------------------------------------------------------- 320 | vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 321 | //----------------------------------------------------------------------------------------------------------------------------- 322 | { 323 | // a record of ray-surface intersection data 324 | vec3 hitNormal, hitEmission, hitColor; 325 | vec2 hitUV; 326 | float t, hitObjectID; 327 | int hitTextureID; 328 | int hitType; 329 | 330 | Quad light = quads[5]; 331 | 332 | vec3 accumCol = vec3(0); 333 | vec3 mask = vec3(1); 334 | vec3 reflectionMask = vec3(1); 335 | vec3 reflectionRayOrigin = vec3(0); 336 | vec3 reflectionRayDirection = vec3(0); 337 | vec3 dirToLight; 338 | vec3 tdir; 339 | vec3 x, n, nl; 340 | vec3 absorptionCoefficient; 341 | 342 | float nc, nt, ratioIoR, Re, Tr; 343 | //float P, RP, TP; 344 | float weight; 345 | float thickness = 0.05; 346 | float scatteringDistance; 347 | 348 | int diffuseCount = 0; 349 | int previousIntersecType = -100; 350 | hitType = -100; 351 | 352 | int coatTypeIntersected = FALSE; 353 | int bounceIsSpecular = TRUE; 354 | int sampleLight = FALSE; 355 | int willNeedReflectionRay = FALSE; 356 | 357 | 358 | for (int bounces = 0; bounces < 8; bounces++) 359 | { 360 | previousIntersecType = hitType; 361 | 362 | t = SceneIntersect(rayOrigin, rayDirection, hitNormal, hitEmission, hitColor, hitType, hitObjectID); 363 | 364 | 365 | if (t == INFINITY) 366 | { 367 | if (willNeedReflectionRay == TRUE) 368 | { 369 | mask = reflectionMask; 370 | rayOrigin = reflectionRayOrigin; 371 | rayDirection = reflectionRayDirection; 372 | 373 | willNeedReflectionRay = FALSE; 374 | bounceIsSpecular = TRUE; 375 | sampleLight = FALSE; 376 | diffuseCount = 0; 377 | continue; 378 | } 379 | 380 | break; 381 | } 382 | 383 | // useful data 384 | n = normalize(hitNormal); 385 | nl = dot(n, rayDirection) < 0.0 ? n : -n; 386 | x = rayOrigin + rayDirection * t; 387 | 388 | if (bounces == 0) 389 | { 390 | objectNormal = nl; 391 | objectColor = hitColor; 392 | objectID = hitObjectID; 393 | } 394 | if (bounces == 1 && previousIntersecType == METAL) 395 | { 396 | objectNormal = nl; 397 | } 398 | 399 | 400 | if (hitType == LIGHT) 401 | { 402 | if (bounces == 0 || (bounces == 1 && previousIntersecType == METAL)) 403 | pixelSharpness = 1.01; 404 | 405 | if (diffuseCount == 0) 406 | { 407 | objectNormal = nl; 408 | objectColor = hitColor; 409 | objectID = hitObjectID; 410 | } 411 | 412 | if (bounceIsSpecular == TRUE || sampleLight == TRUE) 413 | accumCol += mask * hitColor; 414 | 415 | if (willNeedReflectionRay == TRUE) 416 | { 417 | mask = reflectionMask; 418 | rayOrigin = reflectionRayOrigin; 419 | rayDirection = reflectionRayDirection; 420 | 421 | willNeedReflectionRay = FALSE; 422 | bounceIsSpecular = TRUE; 423 | sampleLight = FALSE; 424 | diffuseCount = 0; 425 | continue; 426 | } 427 | // reached a light, so we can exit 428 | break; 429 | 430 | } // end if (hitType == LIGHT) 431 | 432 | 433 | // if we get here and sampleLight is still TRUE, shadow ray failed to find the light source 434 | // the ray hit an occluding object along its way to the light 435 | if (sampleLight == TRUE) 436 | { 437 | if (willNeedReflectionRay == TRUE) 438 | { 439 | mask = reflectionMask; 440 | rayOrigin = reflectionRayOrigin; 441 | rayDirection = reflectionRayDirection; 442 | 443 | willNeedReflectionRay = FALSE; 444 | bounceIsSpecular = TRUE; 445 | sampleLight = FALSE; 446 | diffuseCount = 0; 447 | continue; 448 | } 449 | 450 | break; 451 | } 452 | 453 | 454 | 455 | if (hitType == DIFFUSE) // Ideal diffuse reflection 456 | { 457 | diffuseCount++; 458 | 459 | mask *= hitColor; 460 | 461 | bounceIsSpecular = FALSE; 462 | 463 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 464 | { 465 | mask *= 2.0; 466 | // choose random Diffuse sample vector 467 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 468 | rayOrigin = x + nl * uEPS_intersect; 469 | continue; 470 | } 471 | 472 | dirToLight = sampleAxisAlignedQuadLight(x, nl, quads[5], weight); 473 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 474 | mask *= weight; 475 | 476 | rayDirection = dirToLight; 477 | rayOrigin = x + nl * uEPS_intersect; 478 | 479 | sampleLight = TRUE; 480 | continue; 481 | 482 | } // end if (hitType == DIFFUSE) 483 | 484 | 485 | if (hitType == METAL) // Ideal metal specular reflection 486 | { 487 | mask *= hitColor; 488 | 489 | rayDirection = reflect(rayDirection, nl); 490 | rayOrigin = x + nl * uEPS_intersect; 491 | 492 | continue; 493 | } 494 | 495 | 496 | if (hitType == TRANSPARENT) // Ideal dielectric specular reflection/refraction 497 | { 498 | pixelSharpness = diffuseCount == 0 && coatTypeIntersected == FALSE ? -1.0 : pixelSharpness; 499 | 500 | nc = 1.0; // IOR of Air 501 | nt = 1.5; // IOR of common Glass 502 | Re = calcFresnelReflectance(rayDirection, n, nc, nt, ratioIoR); 503 | Tr = 1.0 - Re; 504 | 505 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 506 | { 507 | reflectionMask = mask * Re; 508 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 509 | reflectionRayOrigin = x + nl * uEPS_intersect; 510 | willNeedReflectionRay = TRUE; 511 | } 512 | 513 | if (Re == 1.0) 514 | { 515 | mask = reflectionMask; 516 | rayOrigin = reflectionRayOrigin; 517 | rayDirection = reflectionRayDirection; 518 | 519 | willNeedReflectionRay = FALSE; 520 | bounceIsSpecular = TRUE; 521 | sampleLight = FALSE; 522 | continue; 523 | } 524 | 525 | // transmit ray through surface 526 | 527 | mask *= Tr; 528 | mask *= hitColor; 529 | 530 | tdir = refract(rayDirection, nl, ratioIoR); 531 | rayDirection = tdir; 532 | rayOrigin = x - nl * uEPS_intersect; 533 | 534 | if (diffuseCount == 1) 535 | bounceIsSpecular = TRUE; // turn on refracting caustics 536 | 537 | continue; 538 | 539 | } // end if (hitType == TRANSPARENT) 540 | 541 | 542 | if (hitType == CLEARCOAT_DIFFUSE) // Diffuse object underneath with ClearCoat on top 543 | { 544 | coatTypeIntersected = TRUE; 545 | 546 | nc = 1.0; // IOR of Air 547 | nt = 1.5; // IOR of Clear Coat 548 | Re = calcFresnelReflectance(rayDirection, nl, nc, nt, ratioIoR); 549 | Tr = 1.0 - Re; 550 | 551 | if (bounces == 0 || (bounces == 1 && hitObjectID != objectID && bounceIsSpecular == TRUE)) 552 | { 553 | reflectionMask = mask * Re; 554 | reflectionRayDirection = reflect(rayDirection, nl); // reflect ray from surface 555 | reflectionRayOrigin = x + nl * uEPS_intersect; 556 | willNeedReflectionRay = TRUE; 557 | } 558 | 559 | diffuseCount++; 560 | 561 | if (bounces == 0) 562 | mask *= Tr; 563 | mask *= hitColor; 564 | 565 | bounceIsSpecular = FALSE; 566 | 567 | if (diffuseCount == 1 && blueNoise_rand() < 0.5) 568 | { 569 | mask *= 2.0; 570 | // choose random Diffuse sample vector 571 | rayDirection = randomCosWeightedDirectionInHemisphere(nl); 572 | rayOrigin = x + nl * uEPS_intersect; 573 | continue; 574 | } 575 | 576 | dirToLight = sampleAxisAlignedQuadLight(x, nl, quads[5], weight); 577 | mask *= diffuseCount == 1 ? 2.0 : 1.0; 578 | mask *= weight; 579 | 580 | rayDirection = dirToLight; 581 | rayOrigin = x + nl * uEPS_intersect; 582 | 583 | // this check helps keep random noisy bright pixels from this clearCoat diffuse surface out of the possible previous refracted glass surface 584 | if (bounces < 3) 585 | sampleLight = TRUE; 586 | continue; 587 | 588 | } //end if (hitType == CLEARCOAT_DIFFUSE) 589 | 590 | } // end for (int bounces = 0; bounces < 8; bounces++) 591 | 592 | 593 | return max(vec3(0), accumCol); 594 | 595 | } // end vec3 CalculateRadiance( out vec3 objectNormal, out vec3 objectColor, out float objectID, out float pixelSharpness ) 596 | 597 | 598 | //----------------------------------------------------------------------------------------------- 599 | void SetupScene(void) 600 | //----------------------------------------------------------------------------------------------- 601 | { 602 | vec3 light_emissionColor = vec3(1.0, 1.0, 1.0) * 5.0; // Bright white light 603 | float wallRadius = 50.0; 604 | float lightRadius = uQuadLightRadius * 0.2; 605 | 606 | quads[0] = Quad( vec3( 0, 0, 1), vec3(-wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Back Wall 607 | quads[1] = Quad( vec3( 1, 0, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius, wallRadius,-wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 0.7, 0.05, 0.05), DIFFUSE);// Left Wall Red 608 | quads[2] = Quad( vec3(-1, 0, 0), vec3( wallRadius,-wallRadius,-wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3(0.05, 0.05, 0.7), DIFFUSE);// Right Wall Blue 609 | quads[3] = Quad( vec3( 0,-1, 0), vec3(-wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius,-wallRadius), vec3( wallRadius, wallRadius, wallRadius), vec3(-wallRadius, wallRadius, wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Ceiling 610 | quads[4] = Quad( vec3( 0, 1, 0), vec3(-wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius, wallRadius), vec3( wallRadius,-wallRadius,-wallRadius), vec3(-wallRadius,-wallRadius,-wallRadius), vec3( 1.0, 1.0, 1.0), DIFFUSE);// Floor 611 | 612 | if (uQuadLightPlaneSelectionNumber == 1.0) 613 | quads[5] = Quad( vec3(-1, 0, 0), vec3(wallRadius-1.0,-lightRadius, lightRadius), vec3(wallRadius-1.0, lightRadius, lightRadius), vec3(wallRadius-1.0, lightRadius,-lightRadius), vec3(wallRadius-1.0,-lightRadius,-lightRadius), light_emissionColor, LIGHT);// Quad Area Light on right wall 614 | else if (uQuadLightPlaneSelectionNumber == 2.0) 615 | quads[5] = Quad( vec3( 1, 0, 0), vec3(-wallRadius+1.0,-lightRadius,-lightRadius), vec3(-wallRadius+1.0, lightRadius,-lightRadius), vec3(-wallRadius+1.0, lightRadius, lightRadius), vec3(-wallRadius+1.0,-lightRadius, lightRadius), light_emissionColor, LIGHT);// Quad Area Light on left wall 616 | else if (uQuadLightPlaneSelectionNumber == 3.0) 617 | quads[5] = Quad( vec3( 0, 0, 1), vec3(-lightRadius,-lightRadius, -wallRadius+1.0), vec3(lightRadius,-lightRadius, -wallRadius+1.0), vec3(lightRadius, lightRadius, -wallRadius+1.0), vec3(-lightRadius, lightRadius, -wallRadius+1.0), light_emissionColor, LIGHT);// Quad Area Light on front 'wall'(opening of box) 618 | else if (uQuadLightPlaneSelectionNumber == 4.0) 619 | quads[5] = Quad( vec3( 0, 0,-1), vec3(-lightRadius,-lightRadius, wallRadius-1.0), vec3(-lightRadius, lightRadius, wallRadius-1.0), vec3(lightRadius, lightRadius, wallRadius-1.0), vec3(lightRadius,-lightRadius, wallRadius-1.0), light_emissionColor, LIGHT);// Quad Area Light on back wall 620 | else if (uQuadLightPlaneSelectionNumber == 5.0) 621 | quads[5] = Quad( vec3( 0, 1, 0), vec3(-lightRadius, -wallRadius+1.0,-lightRadius), vec3(-lightRadius, -wallRadius+1.0, lightRadius), vec3(lightRadius, -wallRadius+1.0, lightRadius), vec3(lightRadius, -wallRadius+1.0,-lightRadius), light_emissionColor, LIGHT);// Quad Area Light on floor 622 | else if (uQuadLightPlaneSelectionNumber == 6.0) 623 | quads[5] = Quad( vec3( 0,-1, 0), vec3(-lightRadius, wallRadius-1.0,-lightRadius), vec3(lightRadius, wallRadius-1.0,-lightRadius), vec3(lightRadius, wallRadius-1.0, lightRadius), vec3(-lightRadius, wallRadius-1.0, lightRadius), light_emissionColor, LIGHT);// Quad Area Light on ceiling 624 | 625 | } // end void SetupScene(void) 626 | 627 | 628 | // if your scene is static and doesn't have any special requirements, you can use the default main() 629 | #include 630 | 631 | `; 632 | -------------------------------------------------------------------------------- /js/stats.min.js: -------------------------------------------------------------------------------- 1 | // stats.js - http://github.com/mrdoob/stats.js 2 | var Stats=function(){var l=Date.now(),m=l,g=0,n=Infinity,o=0,h=0,p=Infinity,q=0,r=0,s=0,f=document.createElement("div");f.id="stats";f.addEventListener("mousedown",function(b){b.preventDefault();t(++s%2)},!1);f.style.cssText="width:80px;opacity:0.9;cursor:pointer";var a=document.createElement("div");a.id="fps";a.style.cssText="padding:0 0 3px 3px;text-align:left;background-color:#002";f.appendChild(a);var i=document.createElement("div");i.id="fpsText";i.style.cssText="color:#0ff;font-family:Helvetica,Arial,sans-serif;font-size:9px;font-weight:bold;line-height:15px"; 3 | i.innerHTML="FPS";a.appendChild(i);var c=document.createElement("div");c.id="fpsGraph";c.style.cssText="position:relative;width:74px;height:30px;background-color:#0ff";for(a.appendChild(c);74>c.children.length;){var j=document.createElement("span");j.style.cssText="width:1px;height:30px;float:left;background-color:#113";c.appendChild(j)}var d=document.createElement("div");d.id="ms";d.style.cssText="padding:0 0 3px 3px;text-align:left;background-color:#020;display:none";f.appendChild(d);var k=document.createElement("div"); 4 | k.id="msText";k.style.cssText="color:#0f0;font-family:Helvetica,Arial,sans-serif;font-size:9px;font-weight:bold;line-height:15px";k.innerHTML="MS";d.appendChild(k);var e=document.createElement("div");e.id="msGraph";e.style.cssText="position:relative;width:74px;height:30px;background-color:#0f0";for(d.appendChild(e);74>e.children.length;)j=document.createElement("span"),j.style.cssText="width:1px;height:30px;float:left;background-color:#131",e.appendChild(j);var t=function(b){s=b;switch(s){case 0:a.style.display= 5 | "block";d.style.display="none";break;case 1:a.style.display="none",d.style.display="block"}};return{REVISION:11,domElement:f,setMode:t,begin:function(){l=Date.now()},end:function(){var b=Date.now();g=b-l;n=Math.min(n,g);o=Math.max(o,g);k.textContent=g+" MS ("+n+"-"+o+")";var a=Math.min(30,30-30*(g/200));e.appendChild(e.firstChild).style.height=a+"px";r++;b>m+1E3&&(h=Math.round(1E3*r/(b-m)),p=Math.min(p,h),q=Math.max(q,h),i.textContent=h+" FPS ("+p+"-"+q+")",a=Math.min(30,30-30*(h/100)),c.appendChild(c.firstChild).style.height= 6 | a+"px",m=b,r=0);return b},update:function(){l=this.end()}}}; 7 | -------------------------------------------------------------------------------- /models/DamagedHelmet.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/DamagedHelmet.bin -------------------------------------------------------------------------------- /models/DamagedHelmet.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "accessors" : [ 3 | { 4 | "bufferView" : 0, 5 | "componentType" : 5123, 6 | "count" : 46356, 7 | "max" : [ 8 | 14555 9 | ], 10 | "min" : [ 11 | 0 12 | ], 13 | "type" : "SCALAR" 14 | }, 15 | { 16 | "bufferView" : 1, 17 | "componentType" : 5126, 18 | "count" : 14556, 19 | "max" : [ 20 | 0.9424954056739807, 21 | 0.8128451108932495, 22 | 0.900973916053772 23 | ], 24 | "min" : [ 25 | -0.9474585652351379, 26 | -1.18715500831604, 27 | -0.9009949564933777 28 | ], 29 | "type" : "VEC3" 30 | }, 31 | { 32 | "bufferView" : 2, 33 | "componentType" : 5126, 34 | "count" : 14556, 35 | "max" : [ 36 | 1.0, 37 | 1.0, 38 | 1.0 39 | ], 40 | "min" : [ 41 | -1.0, 42 | -1.0, 43 | -1.0 44 | ], 45 | "type" : "VEC3" 46 | }, 47 | { 48 | "bufferView" : 3, 49 | "componentType" : 5126, 50 | "count" : 14556, 51 | "max" : [ 52 | 0.9999759793281555, 53 | 1.998665988445282 54 | ], 55 | "min" : [ 56 | 0.002448640065267682, 57 | 1.0005531199858524 58 | ], 59 | "type" : "VEC2" 60 | } 61 | ], 62 | "asset" : { 63 | "generator" : "Khronos Blender glTF 2.0 exporter", 64 | "version" : "2.0" 65 | }, 66 | "bufferViews" : [ 67 | { 68 | "buffer" : 0, 69 | "byteLength" : 92712, 70 | "byteOffset" : 0, 71 | "target" : 34963 72 | }, 73 | { 74 | "buffer" : 0, 75 | "byteLength" : 174672, 76 | "byteOffset" : 92712, 77 | "target" : 34962 78 | }, 79 | { 80 | "buffer" : 0, 81 | "byteLength" : 174672, 82 | "byteOffset" : 267384, 83 | "target" : 34962 84 | }, 85 | { 86 | "buffer" : 0, 87 | "byteLength" : 116448, 88 | "byteOffset" : 442056, 89 | "target" : 34962 90 | } 91 | ], 92 | "buffers" : [ 93 | { 94 | "byteLength" : 558504, 95 | "uri" : "DamagedHelmet.bin" 96 | } 97 | ], 98 | "images" : [ 99 | { 100 | "uri" : "materials/DamagedHelmet/Default_albedo.jpg" 101 | }, 102 | { 103 | "uri" : "materials/DamagedHelmet/Default_metalRoughness.jpg" 104 | }, 105 | { 106 | "uri" : "materials/DamagedHelmet/Default_emissive.jpg" 107 | }, 108 | { 109 | "uri" : "materials/DamagedHelmet/Default_AO.jpg" 110 | }, 111 | { 112 | "uri" : "materials/DamagedHelmet/Default_normal.jpg" 113 | } 114 | ], 115 | "materials" : [ 116 | { 117 | "emissiveFactor" : [ 118 | 1.0, 119 | 1.0, 120 | 1.0 121 | ], 122 | "emissiveTexture" : { 123 | "index" : 2 124 | }, 125 | "name" : "Material_MR", 126 | "normalTexture" : { 127 | "index" : 4 128 | }, 129 | "occlusionTexture" : { 130 | "index" : 3 131 | }, 132 | "pbrMetallicRoughness" : { 133 | "baseColorTexture" : { 134 | "index" : 0 135 | }, 136 | "metallicRoughnessTexture" : { 137 | "index" : 1 138 | } 139 | } 140 | } 141 | ], 142 | "meshes" : [ 143 | { 144 | "name" : "mesh_helmet_LP_13930damagedHelmet", 145 | "primitives" : [ 146 | { 147 | "attributes" : { 148 | "NORMAL" : 2, 149 | "POSITION" : 1, 150 | "TEXCOORD_0" : 3 151 | }, 152 | "indices" : 0, 153 | "material" : 0 154 | } 155 | ] 156 | } 157 | ], 158 | "nodes" : [ 159 | { 160 | "mesh" : 0, 161 | "name" : "node_damagedHelmet_-6514", 162 | "rotation" : [ 163 | 0.0, 164 | 0.0, 165 | 0.0, 166 | 0.0 167 | ] 168 | } 169 | ], 170 | "samplers" : [ 171 | {} 172 | ], 173 | "scene" : 0, 174 | "scenes" : [ 175 | { 176 | "name" : "Scene", 177 | "nodes" : [ 178 | 0 179 | ] 180 | } 181 | ], 182 | "textures" : [ 183 | { 184 | "sampler" : 0, 185 | "source" : 0 186 | }, 187 | { 188 | "sampler" : 0, 189 | "source" : 1 190 | }, 191 | { 192 | "sampler" : 0, 193 | "source" : 2 194 | }, 195 | { 196 | "sampler" : 0, 197 | "source" : 3 198 | }, 199 | { 200 | "sampler" : 0, 201 | "source" : 4 202 | } 203 | ] 204 | } -------------------------------------------------------------------------------- /models/Duck.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/Duck.bin -------------------------------------------------------------------------------- /models/Duck.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "generator": "COLLADA2GLTF", 4 | "version": "2.0" 5 | }, 6 | "scene": 0, 7 | "scenes": [ 8 | { 9 | "nodes": [ 10 | 0 11 | ] 12 | } 13 | ], 14 | "nodes": [ 15 | { 16 | "children": [ 17 | 2, 18 | 1 19 | ], 20 | "matrix": [ 21 | 0.009999999776482582, 22 | 0.0, 23 | 0.0, 24 | 0.0, 25 | 0.0, 26 | 0.009999999776482582, 27 | 0.0, 28 | 0.0, 29 | 0.0, 30 | 0.0, 31 | 0.009999999776482582, 32 | 0.0, 33 | 0.0, 34 | 0.0, 35 | 0.0, 36 | 1.0 37 | ] 38 | }, 39 | { 40 | "matrix": [ 41 | -0.7289686799049377, 42 | 0.0, 43 | -0.6845470666885376, 44 | 0.0, 45 | -0.4252049028873444, 46 | 0.7836934328079224, 47 | 0.4527972936630249, 48 | 0.0, 49 | 0.5364750623703003, 50 | 0.6211478114128113, 51 | -0.571287989616394, 52 | 0.0, 53 | 400.1130065917969, 54 | 463.2640075683594, 55 | -431.0780334472656, 56 | 1.0 57 | ], 58 | "camera": 0 59 | }, 60 | { 61 | "mesh": 0 62 | } 63 | ], 64 | "cameras": [ 65 | { 66 | "perspective": { 67 | "aspectRatio": 1.5, 68 | "yfov": 0.6605925559997559, 69 | "zfar": 10000.0, 70 | "znear": 1.0 71 | }, 72 | "type": "perspective" 73 | } 74 | ], 75 | "meshes": [ 76 | { 77 | "primitives": [ 78 | { 79 | "attributes": { 80 | "NORMAL": 1, 81 | "POSITION": 2, 82 | "TEXCOORD_0": 3 83 | }, 84 | "indices": 0, 85 | "mode": 4, 86 | "material": 0 87 | } 88 | ], 89 | "name": "LOD3spShape" 90 | } 91 | ], 92 | "accessors": [ 93 | { 94 | "bufferView": 0, 95 | "byteOffset": 0, 96 | "componentType": 5123, 97 | "count": 12636, 98 | "max": [ 99 | 2398 100 | ], 101 | "min": [ 102 | 0 103 | ], 104 | "type": "SCALAR" 105 | }, 106 | { 107 | "bufferView": 1, 108 | "byteOffset": 0, 109 | "componentType": 5126, 110 | "count": 2399, 111 | "max": [ 112 | 0.9995989799499512, 113 | 0.999580979347229, 114 | 0.9984359741210938 115 | ], 116 | "min": [ 117 | -0.9990839958190918, 118 | -1.0, 119 | -0.9998319745063782 120 | ], 121 | "type": "VEC3" 122 | }, 123 | { 124 | "bufferView": 1, 125 | "byteOffset": 28788, 126 | "componentType": 5126, 127 | "count": 2399, 128 | "max": [ 129 | 96.17990112304688, 130 | 163.97000122070313, 131 | 53.92519760131836 132 | ], 133 | "min": [ 134 | -69.29850006103516, 135 | 9.929369926452637, 136 | -61.32819747924805 137 | ], 138 | "type": "VEC3" 139 | }, 140 | { 141 | "bufferView": 2, 142 | "byteOffset": 0, 143 | "componentType": 5126, 144 | "count": 2399, 145 | "max": [ 146 | 0.9833459854125976, 147 | 0.9800369739532472 148 | ], 149 | "min": [ 150 | 0.026409000158309938, 151 | 0.01996302604675293 152 | ], 153 | "type": "VEC2" 154 | } 155 | ], 156 | "materials": [ 157 | { 158 | "pbrMetallicRoughness": { 159 | "baseColorTexture": { 160 | "index": 0 161 | }, 162 | "metallicFactor": 0.0 163 | }, 164 | "emissiveFactor": [ 165 | 0.0, 166 | 0.0, 167 | 0.0 168 | ], 169 | "name": "blinn3-fx" 170 | } 171 | ], 172 | "textures": [ 173 | { 174 | "sampler": 0, 175 | "source": 0 176 | } 177 | ], 178 | "images": [ 179 | { 180 | "uri": "materials/Duck/DuckCM.png" 181 | } 182 | ], 183 | "samplers": [ 184 | { 185 | "magFilter": 9729, 186 | "minFilter": 9986, 187 | "wrapS": 10497, 188 | "wrapT": 10497 189 | } 190 | ], 191 | "bufferViews": [ 192 | { 193 | "buffer": 0, 194 | "byteOffset": 76768, 195 | "byteLength": 25272, 196 | "target": 34963 197 | }, 198 | { 199 | "buffer": 0, 200 | "byteOffset": 0, 201 | "byteLength": 57576, 202 | "byteStride": 12, 203 | "target": 34962 204 | }, 205 | { 206 | "buffer": 0, 207 | "byteOffset": 57576, 208 | "byteLength": 19192, 209 | "byteStride": 8, 210 | "target": 34962 211 | } 212 | ], 213 | "buffers": [ 214 | { 215 | "byteLength": 102040, 216 | "uri": "Duck.bin" 217 | } 218 | ] 219 | } -------------------------------------------------------------------------------- /models/StanfordBunny.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/StanfordBunny.glb -------------------------------------------------------------------------------- /models/StanfordDragon.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/StanfordDragon.glb -------------------------------------------------------------------------------- /models/UtahTeapot.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/UtahTeapot.glb -------------------------------------------------------------------------------- /models/materials/DamagedHelmet/Default_AO.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/DamagedHelmet/Default_AO.jpg -------------------------------------------------------------------------------- /models/materials/DamagedHelmet/Default_albedo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/DamagedHelmet/Default_albedo.jpg -------------------------------------------------------------------------------- /models/materials/DamagedHelmet/Default_emissive.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/DamagedHelmet/Default_emissive.jpg -------------------------------------------------------------------------------- /models/materials/DamagedHelmet/Default_metalRoughness.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/DamagedHelmet/Default_metalRoughness.jpg -------------------------------------------------------------------------------- /models/materials/DamagedHelmet/Default_normal.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/DamagedHelmet/Default_normal.jpg -------------------------------------------------------------------------------- /models/materials/Duck/DuckCM.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/models/materials/Duck/DuckCM.png -------------------------------------------------------------------------------- /textures/BlueNoise_RGBA256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/BlueNoise_RGBA256.png -------------------------------------------------------------------------------- /textures/cloud_layers_2k.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/cloud_layers_2k.hdr -------------------------------------------------------------------------------- /textures/delta_2_2k.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/delta_2_2k.hdr -------------------------------------------------------------------------------- /textures/kiara_5_noon_2k.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/kiara_5_noon_2k.hdr -------------------------------------------------------------------------------- /textures/noon_grass_2k.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/noon_grass_2k.hdr -------------------------------------------------------------------------------- /textures/symmetrical_garden_2k.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/erichlof/Babylon.js-PathTracing-Renderer/840054c0227cbe3e38ef47359f9b84beee544af9/textures/symmetrical_garden_2k.hdr --------------------------------------------------------------------------------