├── .babelrc ├── .eslintrc ├── .github └── workflows │ └── publish_package.yml ├── .gitignore ├── .npmignore ├── .nvmrc ├── .release-it.json ├── .travis.yml ├── CODEOWNERS ├── LICENSE ├── PULL_REQUEST_TEMPLATE.md ├── README.md ├── _config.yml ├── build ├── RayTracingRenderer.es5.js └── RayTracingRenderer.js ├── jest.config.js ├── package-lock.json ├── package.json ├── preview.jpg ├── rollup.config.js ├── scenes ├── envmaps │ ├── blurry-sunset-with-dirlight.hdr │ ├── gray-background-with-dirlight.hdr │ └── street-by-water.hdr ├── renderer-test │ ├── credits.txt │ ├── diffuse.png │ ├── envmap.hdr │ ├── glass_diffuse.png │ ├── glass_normal.png │ ├── index.html │ ├── main.js │ ├── metalness.png │ ├── metalrough.png │ ├── normal.png │ └── roughness.png ├── sample-models │ ├── dev.html │ ├── index.html │ └── main.js └── webgl-comparison │ ├── dev.html │ ├── envmap.jpg │ ├── index.html │ ├── main.js │ └── scene.gltf ├── scripts ├── env-vars.sh └── release.sh ├── src ├── EnvironmentLight.js ├── LensCamera.js ├── RayTracingMaterial.js ├── RayTracingRenderer.js ├── SoftDirectionalLight.js ├── constants.js ├── main.js └── renderer │ ├── Framebuffer.js │ ├── FullscreenQuad.js │ ├── GBufferPass.js │ ├── MaterialBuffer.js │ ├── RayTracePass.js │ ├── RenderPass.js │ ├── RenderSize.js │ ├── RenderingPipeline.js │ ├── ReprojectPass.js │ ├── StratifiedSampler.js │ ├── StratifiedSamplerCombined.js │ ├── Texture.js │ ├── TileRender.js │ ├── ToneMapPass.js │ ├── UniformBuffer.js │ ├── UniformSetter.js │ ├── bvhAccel.js │ ├── bvhUtil.js │ ├── decomposeScene.js │ ├── envMapCreation.js │ ├── envMapDistribution.js │ ├── glUtil.js │ ├── glsl │ ├── chunks │ │ ├── bsdf.glsl │ │ ├── constants.glsl │ │ ├── envMap.glsl │ │ ├── intersect.glsl │ │ ├── materialBuffer.glsl │ │ ├── random.glsl │ │ ├── rayTraceCore.glsl │ │ ├── sample.glsl │ │ ├── sampleGlassMicrofacet.glsl │ │ ├── sampleGlassSpecular.glsl │ │ ├── sampleMaterial.glsl │ │ ├── sampleShadowCatcher.glsl │ │ ├── surfaceInteractionDirect.glsl │ │ └── textureLinear.glsl │ ├── fullscreenQuad.vert │ ├── gBuffer.frag │ ├── gBuffer.vert │ ├── rayTrace.frag │ ├── reproject.frag │ └── toneMap.frag │ ├── glslUtil.js │ ├── mergeMeshesToGeometry.js │ ├── rgbeToFloat.js │ ├── texture │ ├── HDR_L_0.png │ ├── noise.js │ └── readme.txt │ ├── texturesFromMaterials.js │ └── util.js └── test ├── EnvironmentLight.test.js ├── bvhUtil.test.js ├── envMapCreation.test.js └── util.test.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | ["@babel/preset-env"] 4 | ], 5 | } 6 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "es6": true, 5 | "jest": true 6 | }, 7 | "extends": "eslint:recommended", 8 | "parserOptions": { 9 | "ecmaVersion": 9, 10 | "sourceType": "module" 11 | }, 12 | "rules": { 13 | "no-console": 0, 14 | "semi": 1 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /.github/workflows/publish_package.yml: -------------------------------------------------------------------------------- 1 | on: 2 | release: 3 | types: [published] 4 | name: Publish Package 5 | jobs: 6 | publish-npm: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v1 10 | - uses: actions/setup-node@v1 11 | with: 12 | node-version: 10.15.3 13 | registry-url: https://registry.npmjs.org/ 14 | - run: npm install 15 | - run: npm publish --access public 16 | env: 17 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # hidden config files 2 | .DS_Store 3 | 4 | # dependencies 5 | node_modules 6 | 7 | # file with environment variables 8 | .env 9 | 10 | # artifacts from tests 11 | coverage/ -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # files to ignore when you install the library as an npm package 2 | 3 | # folder with sample scenes & 3D models 4 | scenes/ 5 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v10.15.3 2 | -------------------------------------------------------------------------------- /.release-it.json: -------------------------------------------------------------------------------- 1 | { 2 | "hooks": { 3 | "before:init": ["npm install", "npm run test"], 4 | "after:bump": "npm run build", 5 | "after:release": "echo Successfully released ${name} v${version} to ${repo.repository}." 6 | }, 7 | "git": { 8 | "changelog": "git log --pretty=format:\"* %s (%h)\" ${latestTag}...HEAD", 9 | "requireCleanWorkingDir": true, 10 | "requireUpstream": true, 11 | "requireCommits": false, 12 | "addUntrackedFiles": false, 13 | "commit": true, 14 | "commitMessage": "Release ${version}", 15 | "commitArgs": "", 16 | "tag": true, 17 | "tagName": "${version}", 18 | "tagAnnotation": "Release ${version}", 19 | "tagArgs": "", 20 | "push": true, 21 | "pushArgs": "--follow-tags", 22 | "pushRepo": "origin" 23 | }, 24 | "npm": { 25 | "publish": false, 26 | "publishPath": ".", 27 | "access": null, 28 | "otp": null 29 | }, 30 | "github": { 31 | "release": true, 32 | "releaseName": "Release v${version}", 33 | "releaseNotes": null, 34 | "preRelease": false, 35 | "draft": false, 36 | "tokenRef": "GITHUB_TOKEN", 37 | "assets": null, 38 | "host": null, 39 | "timeout": 0, 40 | "proxy": null 41 | }, 42 | "gitlab": { 43 | "release": false, 44 | "releaseName": "Release ${version}", 45 | "releaseNotes": null, 46 | "tokenRef": "GITLAB_TOKEN", 47 | "assets": null, 48 | "origin": null 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | env: 2 | global: 3 | - CC_TEST_REPORTER_ID=5df0c45e1b9cd012ffcd16ad6a65b5ddb87f36fa3e335c43458de53641cacc45 4 | language: node_js 5 | before_script: 6 | - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter 7 | - chmod +x ./cc-test-reporter 8 | - ./cc-test-reporter before-build 9 | after_script: 10 | - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT 11 | jobs: 12 | include: 13 | - stage: test 14 | name: "Unit Tests" 15 | script: npm run test 16 | - stage: check 17 | name: "Linting" 18 | script: npm run lint -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @carlos-lopez-garces @elfrank @jaxry @lyonsno @santiagoroca 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 HOVER 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Brief Description 2 | Please include a **summary** of the change and/or which issue is fixed. Please also include *relevant motivation and context*. 3 | 4 | ## Image(s) or GIFs (if applicable) 5 | TODO or DELETE 6 | 7 | ## Pull Request Guidelines 8 | 9 | - [ ] I have added pull requests labels which describe my contribution. 10 | - [ ] All existing tests passed. 11 | - [ ] I have added tests to cover my changes, of which pass. 12 | - [ ] I have [compared](https://github.com/hoverinc/ray-tracing-renderer/wiki/Contributing#comparing-changes) the render output of my branch to `master`. 13 | - [ ] My change requires modifications to the documentation. 14 | - [ ] I have updated the documentation accordingly. 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # Ray Tracing Renderer 5 | A [Three.js](https://github.com/mrdoob/three.js/) renderer which utilizes path tracing to render a scene with true photorealism. The renderer supports global illumination, reflections, soft shadows, and realistic environment lighting. 6 | 7 | [Demo](https://hoverinc.github.io/ray-tracing-renderer/scenes/sample-models/) | [User Guide](https://github.com/hoverinc/ray-tracing-renderer/wiki/User-Guide) | [API Reference](https://github.com/hoverinc/ray-tracing-renderer/wiki/RayTracingRenderer) | [Contributing](https://github.com/hoverinc/ray-tracing-renderer#contributing) 8 | 9 | 10 | ## Usage 11 | 12 | RayTracingRenderer is the *early alpha stage* of development. Features are incomplete and subject to change, and the renderer is unstable on certain hardware. 13 | ### Download 14 | 15 | * [Latest ES6 Build](https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/master/build/RayTracingRenderer.js) 16 | * [Latest ES5 Build](https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/master/build/RayTracingRenderer.es5.js) 17 | 18 | Or if you use npm, run 19 | `npm install ray-tracing-renderer` 20 | 21 | Ray Tracing Renderer relies on WebGL2, and any browser supporting WebGL2 also supports ES6. Thus, you should only use the ES5 build if the renderer inside your appliaction is *optional*, and your application must support older browsers. 22 | 23 | ### Installation 24 | #### As an HTML script 25 | Ray Tracing Renderer requires Three.js, so make sure it is included in your html first. Then include, 26 | ```javascript 27 | 28 | ``` 29 | 30 | You can then use the renderer in your app. 31 | 32 | ```javascript 33 | const renderer = new THREE.RayTracingRenderer(); 34 | ``` 35 | #### As a module 36 | If you installed via npm, simply import the renderer as follows. 37 | ```javascript 38 | import { RayTracingRenderer } from 'ray-tracing-renderer' 39 | ``` 40 | Or if you downloaded the renderer as a file, 41 | ```javascript 42 | import { RayTracingRenderer } from './RayTracingRenderer.js' 43 | ``` 44 | The renderer can then be used in your app. 45 | ```javascript 46 | const renderer = new RayTracingRenderer(); 47 | ``` 48 | 49 | ## Introduction 50 | Ray Tracing Renderer serves as a drop-in replacement to Three.js's [WebGLRenderer](https://threejs.org/docs/#api/en/renderers/WebGLRenderer). By simply swapping renderers, you can get instant photorealistic lighting. 51 | 52 | [![](preview.jpg)](https://hoverinc.github.io/ray-tracing-renderer/scenes/webgl-comparison/) 53 | [(Click to run example)](https://hoverinc.github.io/ray-tracing-renderer/scenes/webgl-comparison/) 54 | 55 | Ray Tracing Renderer runs on WebGL2, and does so by implementing a [path tracing](https://en.wikipedia.org/wiki/Path_tracing) algorithm inside a shader. It supports arbitrary Three.js scenes, with some restrictions. 56 | 57 | ### Features 58 | * **Global illumination.** Surfaces are illuminated with light reflected from every surface, not just manually placed light sources. This results in natural looking renders with realistic light bouncing and propagation. 59 | * **Soft Shadows.** Shadows are computed automatically without the need to configure shadow properties on Three.js's light sources. The resulting shadows are soft and true-to-life without any visual artifacts. 60 | * **Reflections.** Shiny and metallic surfaces reflect their surroundings, greatly attributing to realism. 61 | * **Environment lighting.** A new light type has been added which dynamically illuminates a scene entirely from an HDR environment map! Manually placed light sources are a thing of the past. 62 | 63 | ### Limitations 64 | * **Progressive rendering.** Path tracing is a progressive method. This means that the more computation time that is spent on rendering, the better the resulting image looks. In order to render a high quality image, the camera must stay still for several seconds, as the render gradually improves. This is in stark contract to WebGLRenderer's method which is able to render a full quality image in one frame. 65 | * **Static geometry**. A BVH acceleration structure is computed for the scene to speed up ray intersections. This computation can take several seconds when first initializing the renderer, and it must be recomputed whenever scene geometry moves or changes. Therefore only camera movement is supported in real-time. 66 | 67 | For a more detailed guide on how to use the renderer, please read the [User Guide](https://github.com/hoverinc/ray-tracing-renderer/wiki/User-Guide) . 68 | 69 | ## Contributing 70 | We want to increase test coverage and maintanability of the repo. If you would like to contribute, take a look at the following and submit Pull Requests: 71 | * [CodeClimate issues](https://codeclimate.com/github/hoverinc/ray-tracing-renderer/issues) to improve maintainability, and 72 | * Unit tests to [improve our coverage](https://codeclimate.com/github/hoverinc/ray-tracing-renderer/code) 73 | 74 | Take a look to [this page](https://github.com/hoverinc/ray-tracing-renderer/wiki/Submitting-Changes) with more details about submitting changes to the project. 75 | 76 | ### Expectations 77 | 78 | This repository started as a side-project and the time we invest on it is limited. It may take us a few days to get back to you but please bring your ideas forward. We'll do our best to respond promptly. 79 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-hacker -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | collectCoverage: true, 3 | coverageReporters: ["lcov"], 4 | collectCoverageFrom: ["src/**/*.{js,jsx,ts,tsx}"], 5 | moduleDirectories: ['node_modules', ''] 6 | }; 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ray-tracing-renderer", 3 | "version": "0.10.15", 4 | "description": "A [Three.js](https://github.com/mrdoob/three.js/) renderer which utilizes path tracing to render a scene with true photorealism. The renderer supports global illumination, reflections, soft shadows, and realistic environment lighting.", 5 | "main": "build/RayTracingRenderer.js", 6 | "scripts": { 7 | "build": "./node_modules/.bin/rollup -c", 8 | "dev": "./node_modules/.bin/rollup -cw --environment DEV & ./node_modules/.bin/http-server", 9 | "test": "./node_modules/jest/bin/jest.js", 10 | "lint": "./node_modules/.bin/eslint src/ test/", 11 | "release": "./scripts/release.sh" 12 | }, 13 | "author": "HOVER Inc", 14 | "license": "MIT", 15 | "devDependencies": { 16 | "@babel/core": "^7.11.6", 17 | "@babel/preset-env": "^7.11.5", 18 | "babel-jest": "^26.0.1", 19 | "dat.gui": "^0.7.6", 20 | "eslint": "^7.8.1", 21 | "http-server": "^0.12.3", 22 | "jest": "^26.0.1", 23 | "release-it": "^14.0.2", 24 | "rollup": "^2.26.10", 25 | "rollup-plugin-babel": "^4.3.3", 26 | "rollup-plugin-node-resolve": "^5.2.0", 27 | "stats.js": "^0.17.0", 28 | "three": "^0.107.0" 29 | }, 30 | "repository": { 31 | "type": "git", 32 | "url": "git+https://github.com/hoverinc/ray-tracing-renderer.git" 33 | }, 34 | "keywords": [ 35 | "path-tracing", 36 | "ray-tracing", 37 | "global-illumination", 38 | "webgl2", 39 | "bvh", 40 | "three.js" 41 | ], 42 | "bugs": { 43 | "url": "https://github.com/hoverinc/ray-tracing-renderer/issues" 44 | }, 45 | "homepage": "https://hoverinc.github.io/ray-tracing-renderer/" 46 | } 47 | -------------------------------------------------------------------------------- /preview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/preview.jpg -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import resolve from 'rollup-plugin-node-resolve'; 2 | import babel from 'rollup-plugin-babel'; 3 | 4 | function build() { 5 | return { 6 | input: 'src/main.js', 7 | output: { 8 | file: 'build/RayTracingRenderer.js', 9 | format: 'umd', 10 | globals: { 11 | three: 'THREE' 12 | }, 13 | name: 'RayTracingRenderer' 14 | }, 15 | plugins: [ 16 | resolve() 17 | ], 18 | external: [ 19 | 'three', 20 | ] 21 | }; 22 | } 23 | 24 | function buildEs5() { 25 | const b = build(); 26 | b.output.file = 'build/RayTracingRenderer.es5.js'; 27 | b.plugins.push( 28 | babel({ 29 | exclude: 'node_modules/**', 30 | extensions: ['.js', '.glsl', '.frag', '.vert'] 31 | }) 32 | ); 33 | return b; 34 | } 35 | 36 | const bundle = [ 37 | build() 38 | ]; 39 | 40 | if (!process.env.DEV) { 41 | bundle.push( 42 | buildEs5() 43 | ); 44 | } 45 | 46 | export default bundle; -------------------------------------------------------------------------------- /scenes/envmaps/blurry-sunset-with-dirlight.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/envmaps/blurry-sunset-with-dirlight.hdr -------------------------------------------------------------------------------- /scenes/envmaps/gray-background-with-dirlight.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/envmaps/gray-background-with-dirlight.hdr -------------------------------------------------------------------------------- /scenes/envmaps/street-by-water.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/envmaps/street-by-water.hdr -------------------------------------------------------------------------------- /scenes/renderer-test/credits.txt: -------------------------------------------------------------------------------- 1 | Glass texture - Bryan Brown - https://skirmish.io/posts/688-seamless-stained-glass-texture/order/popular/author/530 2 | 3 | Wood & Metal texture - textures.com - https://www.textures.com/download/pbr0267/133941 4 | 5 | Environment map - Greg Zaal - https://hdrihaven.com/hdri/?h=leadenhall_market 6 | -------------------------------------------------------------------------------- /scenes/renderer-test/diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/diffuse.png -------------------------------------------------------------------------------- /scenes/renderer-test/envmap.hdr: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/envmap.hdr -------------------------------------------------------------------------------- /scenes/renderer-test/glass_diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/glass_diffuse.png -------------------------------------------------------------------------------- /scenes/renderer-test/glass_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/glass_normal.png -------------------------------------------------------------------------------- /scenes/renderer-test/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Path Tracing Renderer - Test Scene 6 | 7 | 8 | 9 | 10 | 11 | 12 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /scenes/renderer-test/main.js: -------------------------------------------------------------------------------- 1 | const renderer = new THREE.RayTracingRenderer(); 2 | renderer.setPixelRatio(1.0); 3 | renderer.toneMapping = THREE.ACESFilmicToneMapping; 4 | renderer.toneMappingExposure = 1.5; 5 | renderer.toneMappingWhitePoint = 5; 6 | renderer.maxHardwareUsage = true; 7 | renderer.renderWhenOffFocus = false; 8 | 9 | document.body.appendChild(renderer.domElement); 10 | 11 | const stats = new Stats(); 12 | stats.setMode(0); // 0: fps, 1: ms 13 | stats.domElement.style.position = 'absolute'; 14 | stats.domElement.style.left = '0px'; 15 | stats.domElement.style.top = '0px'; 16 | document.body.appendChild(stats.domElement); 17 | 18 | const camera = new THREE.LensCamera(); 19 | camera.fov = 70; 20 | camera.aperture = 0.01; 21 | 22 | const controls = new THREE.OrbitControls(camera, renderer.domElement); 23 | controls.screenSpacePanning = true; 24 | 25 | const scene = new THREE.Scene(); 26 | 27 | function resize() { 28 | if (renderer.domElement.parentElement) { 29 | const width = renderer.domElement.parentElement.clientWidth; 30 | const height = renderer.domElement.parentElement.clientHeight; 31 | renderer.setSize(width, height); 32 | 33 | camera.aspect = width / height; 34 | camera.updateProjectionMatrix(); 35 | } 36 | } 37 | 38 | window.addEventListener('resize', resize); 39 | resize(); 40 | 41 | const tick = (time) => { 42 | controls.update(); 43 | camera.focus = controls.target.distanceTo(camera.position); 44 | stats.begin(); 45 | renderer.sync(time); 46 | renderer.render(scene, camera); 47 | stats.end(); 48 | requestAnimationFrame(tick); 49 | }; 50 | 51 | const geo = new THREE.SphereBufferGeometry(1, 24, 24); 52 | 53 | function makeMesh() { 54 | const mat = new THREE.RayTracingMaterial(); 55 | const mesh = new THREE.Mesh(geo, mat); 56 | 57 | // test setting scale and position on mesh 58 | mesh.position.set(0, 4, 0); 59 | mesh.scale.set(4, 4, 4); 60 | return mesh; 61 | } 62 | 63 | function init() { 64 | const envmap = new THREE.RGBELoader().load('envmap.hdr'); 65 | const envLight = new THREE.EnvironmentLight(envmap); 66 | scene.add(envLight); 67 | 68 | const model = new THREE.Object3D(); 69 | model.rotateY(-Math.PI / 2); 70 | 71 | controls.target.set(0, 2, 0); 72 | camera.position.set(31, 21, -1); 73 | 74 | // smooth 75 | { 76 | const mesh = makeMesh(); 77 | mesh.position.setX(-15); 78 | mesh.position.setZ(15); 79 | mesh.material.roughness = 0.0; 80 | mesh.material.metalness = 0.0; 81 | mesh.material.color.set(0xaa3333); 82 | model.add(mesh); 83 | } 84 | 85 | // diffuse 86 | { 87 | const mesh = makeMesh(); 88 | mesh.position.setX(-5); 89 | mesh.position.setZ(15); 90 | mesh.material.roughness = 1.0; 91 | mesh.material.metalness = 0.0; 92 | mesh.material.color.set(0x222288); 93 | model.add(mesh); 94 | } 95 | 96 | // smooth metal 97 | { 98 | const mesh = makeMesh(); 99 | mesh.position.setX(5); 100 | mesh.position.setZ(15); 101 | mesh.material.roughness = 0.0; 102 | mesh.material.metalness = 1.0; 103 | mesh.material.color.set(0xaaaa33); 104 | model.add(mesh); 105 | } 106 | 107 | //rough metal 108 | { 109 | const mesh = makeMesh(); 110 | mesh.position.setX(15); 111 | mesh.position.setZ(15); 112 | mesh.material.roughness = 1.0; 113 | mesh.material.metalness = 1.0; 114 | mesh.material.color.set(0x33aa33); 115 | model.add(mesh); 116 | } 117 | 118 | // diffuse mapping 119 | { 120 | const mesh = makeMesh(); 121 | mesh.position.setX(15); 122 | mesh.position.setZ(-15); 123 | mesh.material.roughness = 1.0; 124 | mesh.material.metalness = 0.0; 125 | mesh.material.map = new THREE.TextureLoader().load('diffuse.png'); 126 | model.add(mesh); 127 | } 128 | 129 | // roughness/metalness mapping 130 | { 131 | const mesh = makeMesh(); 132 | mesh.position.setX(5); 133 | mesh.position.setZ(-15); 134 | mesh.material.roughness = 1.0; 135 | mesh.material.metalness = 1.0; 136 | mesh.material.color.set(0x333333); 137 | mesh.material.roughnessMap = new THREE.TextureLoader().load('roughness.png'); 138 | mesh.material.metalnessMap = new THREE.TextureLoader().load('metalness.png'); 139 | model.add(mesh); 140 | } 141 | 142 | // normal mapping 143 | { 144 | const mesh = makeMesh(); 145 | mesh.position.setX(-5); 146 | mesh.position.setZ(-15); 147 | mesh.material.roughness = 0.1; 148 | mesh.material.metalness = 1.0; 149 | mesh.material.color.set(0xcccccc); 150 | mesh.material.normalMap = new THREE.TextureLoader().load('normal.png'); 151 | model.add(mesh); 152 | } 153 | 154 | // combined mapping 155 | { 156 | const mesh = makeMesh(); 157 | mesh.position.setX(-15); 158 | mesh.position.setZ(-15); 159 | mesh.material.roughness = 1.0; 160 | mesh.material.metalness = 1.0; 161 | mesh.material.map = new THREE.TextureLoader().load('diffuse.png'); 162 | mesh.material.normalMap = new THREE.TextureLoader().load('normal.png'); 163 | const metalrough = new THREE.TextureLoader().load('metalrough.png'); 164 | mesh.material.roughnessMap = metalrough; 165 | mesh.material.metalnessMap = metalrough; 166 | model.add(mesh); 167 | } 168 | 169 | // hollow glass 170 | { 171 | const mesh = makeMesh(); 172 | mesh.position.setX(-10); 173 | mesh.material.transparent = true; 174 | mesh.material.color.set(0xeeeeee); 175 | model.add(mesh); 176 | } 177 | 178 | // solid glass 179 | { 180 | const mesh = makeMesh(); 181 | mesh.position.setX(10); 182 | mesh.material.transparent = true; 183 | mesh.material.solid = true; 184 | mesh.material.color.set(0x8888ee); 185 | model.add(mesh); 186 | } 187 | 188 | // textured glass 189 | { 190 | const mesh = makeMesh(); 191 | mesh.material.transparent = true; 192 | mesh.material.solid = true; 193 | mesh.material.map = new THREE.TextureLoader().load('glass_diffuse.png'); 194 | mesh.material.normalMap = new THREE.TextureLoader().load('glass_normal.png'); 195 | mesh.material.normalScale.set(1.0, -1.0); 196 | model.add(mesh); 197 | } 198 | 199 | let unreadyMat; 200 | { 201 | // Create a test (non-buffer) Geometry 202 | const geo = new THREE.BoxGeometry(20, 6, 6); 203 | const mat = new THREE.MeshStandardMaterial(); 204 | mat.roughness = 0.2; 205 | mat.metalness = 0.0; 206 | mat.color.set(0x993311); 207 | unreadyMat = mat; 208 | const mesh = new THREE.Mesh(geo, mat); 209 | mesh.position.set(0, 3, 30); 210 | model.add(mesh); 211 | } 212 | 213 | // background mirror 214 | // verifies BVH used in reflections 215 | { 216 | const geo = new THREE.PlaneBufferGeometry(40, 16); 217 | const mat = new THREE.MeshStandardMaterial(); 218 | mat.roughness = 0.0; 219 | mat.metalness = 1.0; 220 | const mesh = new THREE.Mesh(geo, mat); 221 | mesh.position.set(0, 8, 40); 222 | model.add(mesh); 223 | } 224 | 225 | // ground plane 226 | { 227 | const geo = new THREE.PlaneBufferGeometry(1000, 1000); 228 | const mat = new THREE.MeshStandardMaterial(); 229 | mat.shadowCatcher = true; 230 | mat.roughness = 0.5; 231 | mat.metalness = 0.0; 232 | const mesh = new THREE.Mesh(geo, mat); 233 | mesh.rotateX(Math.PI / 2); 234 | model.add(mesh); 235 | } 236 | 237 | // test box with .visible set to false 238 | // should not be visible in the scene 239 | { 240 | const geo = new THREE.BoxBufferGeometry(5, 5, 5); 241 | const mat = new THREE.MeshStandardMaterial(); 242 | const mesh = new THREE.Mesh(geo, mat); 243 | mesh.position.set(0, 10, 0); 244 | mesh.visible = false; 245 | model.add(mesh); 246 | } 247 | 248 | scene.add(model); 249 | 250 | THREE.DefaultLoadingManager.onLoad = () => { 251 | 252 | // give material an unloaded async texture. the renderer should handle this 253 | unreadyMat.map = new THREE.TextureLoader().load('diffuse.png'); 254 | unreadyMat.normalMap = new THREE.TextureLoader().load('normal.png'); 255 | const metalrough = new THREE.TextureLoader().load('metalrough.png'); 256 | unreadyMat.roughnessMap = metalrough; 257 | unreadyMat.metalnessMap = metalrough; 258 | 259 | THREE.DefaultLoadingManager.onLoad = undefined; 260 | tick(); 261 | }; 262 | } 263 | 264 | init(); 265 | -------------------------------------------------------------------------------- /scenes/renderer-test/metalness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/metalness.png -------------------------------------------------------------------------------- /scenes/renderer-test/metalrough.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/metalrough.png -------------------------------------------------------------------------------- /scenes/renderer-test/normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/normal.png -------------------------------------------------------------------------------- /scenes/renderer-test/roughness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/renderer-test/roughness.png -------------------------------------------------------------------------------- /scenes/sample-models/dev.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Path Tracing - Sample Models 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /scenes/sample-models/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Path Tracing - Sample Models 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /scenes/sample-models/main.js: -------------------------------------------------------------------------------- 1 | // Envinronment maps 2 | const ENV_MAPS_SAMPLES = [ 3 | { 4 | path: '../envmaps/gray-background-with-dirlight.hdr', 5 | name: 'Gray + Dir Light', 6 | }, 7 | { 8 | path: '../envmaps/blurry-sunset-with-dirlight.hdr', 9 | name: 'Sunset + Dir Light', 10 | } 11 | ]; 12 | 13 | // Sample models from BabylonJS: http://models.babylonjs.com/ 14 | const BABYLON_JS_SAMPLE_MODELS = [ 15 | { 16 | path: 'https://models.babylonjs.com/CornellBox/cornellBox.glb', 17 | name: 'Cornell Box', 18 | license: 'https://creativecommons.org/licenses/by/4.0/', 19 | }, 20 | { 21 | path: 'https://models.babylonjs.com/PBR_Spheres.glb', 22 | name: 'PBR Spheres', 23 | license: 'https://creativecommons.org/licenses/by/4.0/', 24 | }, 25 | { 26 | path: 'https://models.babylonjs.com/Lee-Perry-Smith-Head/head.glb', 27 | name: 'Lee Perry Smith Head', 28 | license: 'https://creativecommons.org/licenses/by/4.0/', 29 | }, 30 | { 31 | path: 'https://models.babylonjs.com/Georgia-Tech-Dragon/dragon.glb', 32 | name: 'Georgia Tech Dragon', 33 | license: 'https://creativecommons.org/licenses/by/4.0/', 34 | }, 35 | ]; 36 | 37 | const MODEL_DATA = [...BABYLON_JS_SAMPLE_MODELS]; 38 | const INITIAL_MODEL_DATA = MODEL_DATA[0]; 39 | const INITIAL_ENV_MAP = ENV_MAPS_SAMPLES[0]; 40 | 41 | let currentModelLoaded = null; 42 | let groundMesh = null; 43 | let currentEnvLight = null; 44 | 45 | const renderer = new THREE.RayTracingRenderer(); 46 | 47 | renderer.gammaOutput = true; 48 | renderer.gammaFactor = 2.2; 49 | renderer.setPixelRatio(1.0); 50 | renderer.toneMapping = THREE.ACESFilmicToneMapping; 51 | renderer.toneMappingExposure = 1.0; 52 | renderer.toneMappingWhitePoint = 5; 53 | 54 | renderer.renderWhenOffFocus = false; 55 | renderer.renderToScreen = true; 56 | 57 | document.body.appendChild(renderer.domElement); 58 | 59 | const stats = new Stats(); 60 | stats.setMode(0); // 0: fps, 1: ms 61 | stats.domElement.style.position = 'absolute'; 62 | stats.domElement.style.left = '0px'; 63 | stats.domElement.style.top = '0px'; 64 | document.body.appendChild(stats.domElement); 65 | 66 | const camera = new THREE.LensCamera(); 67 | camera.fov = 35; 68 | camera.aperture = 0.01; 69 | 70 | const controls = new THREE.OrbitControls(camera, renderer.domElement); 71 | controls.screenSpacePanning = true; 72 | 73 | const scene = new THREE.Scene(); 74 | 75 | function resize() { 76 | if (renderer.domElement.parentElement) { 77 | const width = renderer.domElement.parentElement.clientWidth; 78 | const height = renderer.domElement.parentElement.clientHeight; 79 | renderer.setSize(width, height); 80 | 81 | camera.aspect = width / height; 82 | camera.updateProjectionMatrix(); 83 | } 84 | } 85 | 86 | let animationFrameId; 87 | 88 | const tick = (time) => { 89 | controls.update(); 90 | camera.focus = controls.target.distanceTo(camera.position); 91 | stats.begin(); 92 | renderer.sync(time); 93 | renderer.render(scene, camera); 94 | stats.end(); 95 | animationFrameId = requestAnimationFrame(tick); 96 | }; 97 | 98 | function load(loader, url) { 99 | return new Promise(resolve => { 100 | const l = new loader(); 101 | l.load(url, resolve, undefined, exception => { throw exception; }); 102 | }); 103 | } 104 | 105 | function createGroundMesh() { 106 | const geo = new THREE.PlaneBufferGeometry(100, 100); 107 | const mat = new THREE.MeshStandardMaterial(); 108 | mat.color.set(0xffffff); 109 | mat.roughness = 0.5; 110 | mat.metalness = 0.0; 111 | mat.shadowCatcher = true; 112 | const mesh = new THREE.Mesh(geo, mat); 113 | mesh.rotateX(Math.PI / 2); 114 | 115 | return mesh; 116 | } 117 | 118 | async function createModelFromData(data) { 119 | const gltfData = await load(THREE.GLTFLoader, data.path); 120 | const gltfScene = gltfData.scene; 121 | 122 | return gltfScene; 123 | } 124 | 125 | function computeBoundingBoxFromModel(model) { 126 | const bounds = new THREE.Box3(); 127 | bounds.setFromObject(model); 128 | return bounds; 129 | } 130 | 131 | function updateCameraFromModel(camera, model) { 132 | const bounds = computeBoundingBoxFromModel(model); 133 | const centroid = new THREE.Vector3(); 134 | bounds.getCenter(centroid); 135 | 136 | const distance = bounds.min.distanceTo(bounds.max); 137 | 138 | // TODO: Why do we need this? 139 | // controls.target.set(centroid); 140 | camera.position.set(0, (bounds.max.y - bounds.min.y) * 0.75, distance * 2.0); 141 | camera.aperture = 0.01 * distance; 142 | 143 | controls.target.copy(centroid); 144 | controls.update(); 145 | 146 | console.log(`Camera at ${camera.position.toArray()}`); 147 | } 148 | 149 | function updateGroundMeshFromModel(groundMesh, model) { 150 | const bounds = computeBoundingBoxFromModel(model); 151 | 152 | const x = currentModelLoaded.position.x; 153 | const y = bounds.min.y - 0.005 * (bounds.max.y - bounds.min.y); // move slightly below bounds to prevent z-fighting 154 | const z = currentModelLoaded.position.z; 155 | 156 | groundMesh.position.set(x, y, z); 157 | } 158 | 159 | function updateSceneWithModel(model) { 160 | if (currentModelLoaded) { 161 | currentModelLoaded.parent.remove(currentModelLoaded); 162 | } 163 | 164 | scene.add(model); 165 | renderer.needsUpdate = true; 166 | currentModelLoaded = model; 167 | updateCameraFromModel(camera, model); 168 | updateGroundMeshFromModel(groundMesh, model); 169 | } 170 | 171 | async function selectModelFromName(name) { 172 | const modelEntry = MODEL_DATA.find(item => item.name === name); 173 | const model = await createModelFromData(modelEntry); 174 | updateSceneWithModel(model); 175 | 176 | console.log(`Switch to Model '${name}'`); 177 | } 178 | 179 | async function loadEnvironmentMap(path) { 180 | const loadPromise = new Promise((resolve) => 181 | new THREE.RGBELoader().load(path, (environmentMapTexture) => 182 | resolve(environmentMapTexture), 183 | ), 184 | ); 185 | 186 | const environmentMap = await loadPromise; 187 | environmentMap.encoding = THREE.LinearEncoding; 188 | 189 | return environmentMap; 190 | } 191 | 192 | async function selectEnvMapFromName(name) { 193 | const envMapEntry = ENV_MAPS_SAMPLES.find(item => item.name === name); 194 | const envMap = await loadEnvironmentMap(envMapEntry.path); 195 | const envLight = new THREE.EnvironmentLight(envMap); 196 | 197 | if (currentEnvLight) scene.remove(currentEnvLight); 198 | scene.add(envLight); 199 | currentEnvLight = envLight; 200 | 201 | renderer.needsUpdate = true; 202 | 203 | console.log(`Switch to Env Map '${name}'`); 204 | } 205 | 206 | async function init() { 207 | window.addEventListener('resize', resize); 208 | resize(); 209 | 210 | selectEnvMapFromName(INITIAL_ENV_MAP.name); 211 | 212 | groundMesh = createGroundMesh(); 213 | selectModelFromName(INITIAL_MODEL_DATA.name); 214 | 215 | scene.add(groundMesh); 216 | scene.add(camera); 217 | 218 | const gui = new dat.GUI(); 219 | const uiOptions = { 220 | selectedModelName: INITIAL_MODEL_DATA.name, 221 | selectedEnvMap: INITIAL_ENV_MAP.name, 222 | modelOptions: MODEL_DATA.map(item => item.name), 223 | envMapOptions: ENV_MAPS_SAMPLES.map(item => item.name), 224 | }; 225 | 226 | const modelController = gui.add(uiOptions, 'selectedModelName', uiOptions.modelOptions) 227 | .name('model'); 228 | 229 | const envMapController = gui.add(uiOptions, 'selectedEnvMap', uiOptions.envMapOptions) 230 | .name('env map'); 231 | 232 | modelController.onChange(async (value) => { 233 | cancelAnimationFrame(animationFrameId); 234 | selectModelFromName(value); 235 | }); 236 | 237 | envMapController.onChange(async (value) => { 238 | cancelAnimationFrame(animationFrameId); 239 | selectEnvMapFromName(value); 240 | }); 241 | 242 | THREE.DefaultLoadingManager.onLoad = tick; 243 | } 244 | 245 | init(); 246 | -------------------------------------------------------------------------------- /scenes/webgl-comparison/dev.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Ray Tracing Example 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 33 | 34 | 35 | 36 |

Loading...

37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /scenes/webgl-comparison/envmap.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/scenes/webgl-comparison/envmap.jpg -------------------------------------------------------------------------------- /scenes/webgl-comparison/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Ray Tracing Example 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 34 | 35 | 36 | 37 |

Loading...

38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /scenes/webgl-comparison/main.js: -------------------------------------------------------------------------------- 1 | let renderer; 2 | let controls; 3 | let scene; 4 | 5 | const camera = new THREE.PerspectiveCamera(); 6 | camera.position.set(64, 32, 16); 7 | camera.fov = 65; 8 | 9 | const stats = new Stats(); 10 | stats.setMode(0); // 0: fps, 1: ms 11 | stats.domElement.style.position = 'absolute'; 12 | stats.domElement.style.left = '0px'; 13 | stats.domElement.style.top = '0px'; 14 | document.body.appendChild(stats.domElement); 15 | 16 | init(); 17 | 18 | window.addEventListener('resize', resize); 19 | 20 | async function init() { 21 | const [envMap, envMapLDR, gltf] = await Promise.all([ 22 | load(THREE.RGBELoader, '../envmaps/street-by-water.hdr'), 23 | load(THREE.TextureLoader, 'envmap.jpg'), 24 | load(THREE.GLTFLoader, 'scene.gltf'), 25 | ]); 26 | 27 | scene = new THREE.Scene(); 28 | 29 | const model = gltf.scene; 30 | 31 | model.scale.set(0.5, 0.5, 0.5); 32 | model.rotateY(Math.PI / 2); 33 | 34 | model.traverse(child => { 35 | if (child instanceof THREE.Mesh) { 36 | // only necessary for WebGLRenderer 37 | child.castShadow = true; 38 | child.receiveShadow = true; 39 | } 40 | if (child.material && child.material.name == 'LensesMat') { 41 | child.material.transparent = true; 42 | } 43 | }); 44 | 45 | const uiCallbacks = { 46 | WebGL: () => initWebGL(envMapLDR, model), 47 | RayTracing: () => initRayTracing(envMap, model) 48 | }; 49 | 50 | const gui = new dat.GUI(); 51 | gui.add(uiCallbacks, 'WebGL'); 52 | gui.add(uiCallbacks, 'RayTracing'); 53 | 54 | uiCallbacks.RayTracing(); 55 | 56 | resize(); 57 | 58 | // THREE.DefaultLoadingManager.onLoad = tick; 59 | tick(); 60 | 61 | document.querySelector('#loading').remove(); 62 | } 63 | 64 | function resize() { 65 | if (renderer.domElement.parentElement) { 66 | const width = renderer.domElement.parentElement.clientWidth; 67 | const height = renderer.domElement.parentElement.clientHeight; 68 | renderer.setSize(width, height); 69 | 70 | camera.aspect = width / height; 71 | camera.updateProjectionMatrix(); 72 | } 73 | } 74 | 75 | function tick(time) { 76 | controls.update(); 77 | camera.focus = controls.target.distanceTo(camera.position); 78 | stats.begin(); 79 | 80 | if (renderer.sync) { 81 | renderer.sync(time); 82 | } 83 | 84 | renderer.render(scene, camera); 85 | stats.end(); 86 | 87 | requestAnimationFrame(tick); 88 | } 89 | 90 | function initWebGL(envMapLDR, model) { 91 | unloadRenderer(renderer); 92 | renderer = new THREE.WebGLRenderer({ 93 | antialias: true 94 | }); 95 | initRenderer(renderer); 96 | 97 | renderer.shadowMap.enabled = true; 98 | renderer.shadowMap.type = THREE.PCFSoftShadowMap; 99 | 100 | scene = new THREE.Scene(); 101 | scene.add(model); 102 | 103 | const dirLight = new THREE.DirectionalLight(0xff3300, 0.3); 104 | dirLight.target.position = controls.target; 105 | scene.add(dirLight.target); 106 | dirLight.target.position.set(0, 20, 0); 107 | dirLight.castShadow = true; 108 | dirLight.position.setFromSphericalCoords(100, -1.31, 4.08); 109 | dirLight.shadow.mapSize.width = 1024; 110 | dirLight.shadow.mapSize.height = 1024; 111 | dirLight.shadow.camera.left = -50; 112 | dirLight.shadow.camera.right = 50; 113 | dirLight.shadow.camera.top = 50; 114 | dirLight.shadow.camera.bottom = -50; 115 | scene.add(dirLight); 116 | 117 | const ambLight = new THREE.AmbientLight(0xffffff, 0.2); 118 | scene.add(ambLight); 119 | 120 | // const helper = new THREE.CameraHelper(dirLight.shadow.camera); 121 | // scene.add(helper); 122 | 123 | const equiToCube = new THREE.EquirectangularToCubeGenerator(envMapLDR); 124 | const cubeMap = equiToCube.renderTarget; 125 | const cubeMapTexture = equiToCube.update(renderer); 126 | 127 | scene.traverse(child => { 128 | if (child.material) { 129 | child.material.envMap = cubeMapTexture; 130 | } 131 | }); 132 | 133 | scene.background = cubeMap; 134 | } 135 | 136 | function initRayTracing(envMap, model) { 137 | unloadRenderer(renderer); 138 | renderer = new THREE.RayTracingRenderer(); 139 | initRenderer(renderer); 140 | 141 | scene = new THREE.Scene(); 142 | 143 | scene.add(model); 144 | 145 | const envLight = new THREE.EnvironmentLight(envMap); 146 | scene.add(envLight); 147 | } 148 | 149 | function initRenderer(renderer) { 150 | document.body.appendChild(renderer.domElement); 151 | resize(); 152 | 153 | controls = new THREE.OrbitControls(camera, renderer.domElement); 154 | controls.screenSpacePanning = true; 155 | controls.target.set(0, 20, 0); 156 | 157 | renderer.gammaOutput = true; 158 | renderer.gammaFactor = 2.2; 159 | renderer.setPixelRatio(1.0); 160 | renderer.toneMapping = THREE.ACESFilmicToneMapping; 161 | renderer.toneMappingExposure = 1.5; 162 | renderer.renderWhenOffFocus = false; 163 | renderer.bounces = 3; 164 | } 165 | 166 | function unloadRenderer(renderer) { 167 | if (renderer) { 168 | renderer.dispose(); 169 | renderer.domElement.remove(); 170 | } 171 | if (controls) { 172 | controls.dispose(); 173 | } 174 | } 175 | 176 | function load(loader, url) { 177 | return new Promise(resolve => { 178 | const l = new loader(); 179 | l.load(url, resolve, undefined, exception => { throw exception; }); 180 | }); 181 | } 182 | -------------------------------------------------------------------------------- /scripts/env-vars.sh: -------------------------------------------------------------------------------- 1 | # Load env vars for .env 2 | export $(grep -v '^#' .env | xargs -0) > /dev/null 3 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | cd "$(dirname "$0")/.." > /dev/null 2 | 3 | set -e 4 | 5 | # load env variables 6 | source scripts/env-vars.sh 7 | 8 | if [ -z "$GITHUB_TOKEN" ]; then 9 | echo 'GITHUB_TOKEN is not set. Canceling release process.' 10 | else 11 | echo 'GITHUB_TOKEN is set, proceed with release.' 12 | 13 | # run "release-it" in interactive mode (CLI) 14 | node_modules/release-it/bin/release-it.js -VV 15 | fi 16 | -------------------------------------------------------------------------------- /src/EnvironmentLight.js: -------------------------------------------------------------------------------- 1 | import { Light } from 'three'; 2 | 3 | export class EnvironmentLight extends Light { 4 | constructor(map, ...args) { 5 | super(...args); 6 | this.map = map; 7 | this.isEnvironmentLight = true; 8 | } 9 | 10 | copy(source) { 11 | super.copy(source); 12 | this.map = source.map; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/LensCamera.js: -------------------------------------------------------------------------------- 1 | import { PerspectiveCamera } from 'three'; 2 | 3 | export class LensCamera extends PerspectiveCamera { 4 | constructor(...args) { 5 | super(...args); 6 | this.aperture = 0.01; 7 | } 8 | 9 | copy(source, recursive) { 10 | super.copy(source, recursive); 11 | this.aperture = source.aperture; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/RayTracingMaterial.js: -------------------------------------------------------------------------------- 1 | import { MeshStandardMaterial } from 'three'; 2 | 3 | export class RayTracingMaterial extends MeshStandardMaterial { 4 | constructor(...args) { 5 | super(...args); 6 | this.solid = false; 7 | this.shadowCatcher = false; 8 | } 9 | 10 | copy(source) { 11 | super.copy(source); 12 | this.solid = source.solid; 13 | this.shadowCatcher = source.shadowCatcher; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/RayTracingRenderer.js: -------------------------------------------------------------------------------- 1 | import { loadExtensions } from './renderer/glUtil'; 2 | import { makeRenderingPipeline } from './renderer/RenderingPipeline'; 3 | import * as THREE from 'three'; 4 | 5 | const glRequiredExtensions = [ 6 | 'EXT_color_buffer_float', // enables rendering to float buffers 7 | 'EXT_float_blend', 8 | ]; 9 | 10 | const glOptionalExtensions = [ 11 | 'OES_texture_float_linear', // enables gl.LINEAR texture filtering for float textures, 12 | ]; 13 | 14 | export function RayTracingRenderer(params = {}) { 15 | const canvas = params.canvas || document.createElement('canvas'); 16 | 17 | const gl = canvas.getContext('webgl2', { 18 | alpha: false, 19 | depth: true, 20 | stencil: false, 21 | antialias: false, 22 | powerPreference: 'high-performance', 23 | failIfMajorPerformanceCaveat: true 24 | }); 25 | 26 | loadExtensions(gl, glRequiredExtensions); 27 | const optionalExtensions = loadExtensions(gl, glOptionalExtensions); 28 | 29 | let pipeline = null; 30 | const size = new THREE.Vector2(); 31 | let pixelRatio = 1; 32 | 33 | const module = { 34 | bounces: 2, 35 | domElement: canvas, 36 | maxHardwareUsage: false, 37 | needsUpdate: true, 38 | onSampleRendered: null, 39 | renderWhenOffFocus: true, 40 | toneMapping: THREE.LinearToneMapping, 41 | toneMappingExposure: 1, 42 | toneMappingWhitePoint: 1, 43 | }; 44 | 45 | function initScene(scene) { 46 | scene.updateMatrixWorld(); 47 | 48 | const toneMappingParams = { 49 | exposure: module.toneMappingExposure, 50 | whitePoint: module.toneMappingWhitePoint, 51 | toneMapping: module.toneMapping 52 | }; 53 | 54 | const bounces = module.bounces; 55 | 56 | pipeline = makeRenderingPipeline({gl, optionalExtensions, scene, toneMappingParams, bounces}); 57 | 58 | pipeline.onSampleRendered = (...args) => { 59 | if (module.onSampleRendered) { 60 | module.onSampleRendered(...args); 61 | } 62 | }; 63 | 64 | module.setSize(size.width, size.height); 65 | module.needsUpdate = false; 66 | } 67 | 68 | module.setSize = (width, height, updateStyle = true) => { 69 | size.set(width, height); 70 | canvas.width = size.width * pixelRatio; 71 | canvas.height = size.height * pixelRatio; 72 | 73 | if (updateStyle) { 74 | canvas.style.width = `${ size.width }px`; 75 | canvas.style.height = `${ size.height }px`; 76 | } 77 | 78 | if (pipeline) { 79 | pipeline.setSize(size.width * pixelRatio, size.height * pixelRatio); 80 | } 81 | }; 82 | 83 | module.getSize = (target) => { 84 | if (!target) { 85 | target = new THREE.Vector2(); 86 | } 87 | 88 | return target.copy(size); 89 | }; 90 | 91 | module.setPixelRatio = (x) => { 92 | if (!x) { 93 | return; 94 | } 95 | pixelRatio = x; 96 | module.setSize(size.width, size.height, false); 97 | }; 98 | 99 | module.getPixelRatio = () => pixelRatio; 100 | 101 | module.getTotalSamplesRendered = () => { 102 | if (pipeline) { 103 | return pipeline.getTotalSamplesRendered(); 104 | } 105 | }; 106 | 107 | let isValidTime = 1; 108 | let currentTime = NaN; 109 | let syncWarning = false; 110 | 111 | function restartTimer() { 112 | isValidTime = NaN; 113 | } 114 | 115 | module.sync = (t) => { 116 | // the first call to the callback of requestAnimationFrame does not have a time parameter 117 | // use performance.now() in this case 118 | currentTime = t || performance.now(); 119 | }; 120 | 121 | let lastFocus = false; 122 | 123 | module.render = (scene, camera) => { 124 | if (!module.renderWhenOffFocus) { 125 | const hasFocus = document.hasFocus(); 126 | if (!hasFocus) { 127 | lastFocus = hasFocus; 128 | return; 129 | } else if (hasFocus && !lastFocus) { 130 | lastFocus = hasFocus; 131 | restartTimer(); 132 | } 133 | } 134 | 135 | if (module.needsUpdate) { 136 | initScene(scene); 137 | } 138 | 139 | if (isNaN(currentTime)) { 140 | if (!syncWarning) { 141 | console.warn('Ray Tracing Renderer warning: For improved performance, please call renderer.sync(time) before render.render(scene, camera), with the time parameter equalling the parameter passed to the callback of requestAnimationFrame'); 142 | syncWarning = true; 143 | } 144 | 145 | currentTime = performance.now(); // less accurate than requestAnimationFrame's time parameter 146 | } 147 | 148 | pipeline.time(isValidTime * currentTime); 149 | 150 | isValidTime = 1; 151 | currentTime = NaN; 152 | 153 | camera.updateMatrixWorld(); 154 | 155 | if(module.maxHardwareUsage) { 156 | // render new sample for the entire screen 157 | pipeline.drawFull(camera); 158 | } else { 159 | // render new sample for a tiled subset of the screen 160 | pipeline.draw(camera); 161 | } 162 | }; 163 | 164 | // Assume module.render is called using requestAnimationFrame. 165 | // This means that when the user is on a different browser tab, module.render won't be called. 166 | // Since the timer should not measure time when module.render is inactive, 167 | // the timer should be reset when the user switches browser tabs 168 | document.addEventListener('visibilitychange', restartTimer); 169 | 170 | module.dispose = () => { 171 | document.removeEventListener('visibilitychange', restartTimer); 172 | pipeline = null; 173 | }; 174 | 175 | return module; 176 | } 177 | 178 | RayTracingRenderer.isSupported = () => { 179 | const gl = document.createElement('canvas') 180 | .getContext('webgl2', { 181 | failIfMajorPerformanceCaveat: true 182 | }); 183 | 184 | if (!gl) { 185 | return false; 186 | } 187 | 188 | const extensions = loadExtensions(gl, glRequiredExtensions); 189 | for (let e in extensions) { 190 | if (!extensions[e]) { 191 | return false; 192 | } 193 | } 194 | 195 | return true; 196 | }; 197 | -------------------------------------------------------------------------------- /src/SoftDirectionalLight.js: -------------------------------------------------------------------------------- 1 | import { DirectionalLight } from 'three'; 2 | 3 | export class SoftDirectionalLight extends DirectionalLight { 4 | constructor(color, intensity, softness = 0) { 5 | super(color, intensity); 6 | this.softness = softness; 7 | } 8 | 9 | copy(source) { 10 | super.copy(source); 11 | this.softness = source.softness; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/constants.js: -------------------------------------------------------------------------------- 1 | export const ThinMaterial = 1; 2 | export const ThickMaterial = 2; 3 | export const ShadowCatcherMaterial = 3; 4 | -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | import * as constants from './constants'; 2 | import { LensCamera } from './LensCamera'; 3 | import { SoftDirectionalLight } from './SoftDirectionalLight'; 4 | import { EnvironmentLight } from './EnvironmentLight'; 5 | import { RayTracingMaterial } from './RayTracingMaterial'; 6 | import { RayTracingRenderer } from './RayTracingRenderer'; 7 | 8 | if (window.THREE) { 9 | /* global THREE */ 10 | THREE.LensCamera = LensCamera; 11 | THREE.SoftDirectionalLight = SoftDirectionalLight; 12 | THREE.EnvironmentLight = EnvironmentLight; 13 | THREE.RayTracingMaterial = RayTracingMaterial; 14 | THREE.RayTracingRenderer = RayTracingRenderer; 15 | THREE.ThickMaterial = constants.ThickMaterial; 16 | THREE.ThinMaterial = constants.ThinMaterial; 17 | } 18 | 19 | export { 20 | constants, 21 | LensCamera, 22 | SoftDirectionalLight, 23 | EnvironmentLight, 24 | RayTracingMaterial, 25 | RayTracingRenderer, 26 | }; 27 | -------------------------------------------------------------------------------- /src/renderer/Framebuffer.js: -------------------------------------------------------------------------------- 1 | export function makeFramebuffer(gl, { color, depth }) { 2 | 3 | const framebuffer = gl.createFramebuffer(); 4 | 5 | function bind() { 6 | gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); 7 | } 8 | 9 | function unbind() { 10 | gl.bindFramebuffer(gl.FRAMEBUFFER, null); 11 | } 12 | 13 | function init() { 14 | bind(); 15 | 16 | const drawBuffers = []; 17 | 18 | for (let location in color) { 19 | location = Number(location); 20 | 21 | if (location === undefined) { 22 | console.error('invalid location'); 23 | } 24 | 25 | const tex = color[location]; 26 | gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + location, tex.target, tex.texture, 0); 27 | drawBuffers.push(gl.COLOR_ATTACHMENT0 + location); 28 | } 29 | 30 | gl.drawBuffers(drawBuffers); 31 | 32 | if (depth) { 33 | gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, depth.target, depth.texture); 34 | } 35 | 36 | unbind(); 37 | } 38 | 39 | init(); 40 | 41 | return { 42 | color, 43 | bind, 44 | unbind 45 | }; 46 | } 47 | -------------------------------------------------------------------------------- /src/renderer/FullscreenQuad.js: -------------------------------------------------------------------------------- 1 | import vertex from './glsl/fullscreenQuad.vert'; 2 | import { makeVertexShader } from './RenderPass'; 3 | 4 | export function makeFullscreenQuad(gl) { 5 | const vao = gl.createVertexArray(); 6 | 7 | gl.bindVertexArray(vao); 8 | 9 | gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); 10 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW); 11 | 12 | // vertex shader should set layout(location = 0) on position attribute 13 | const posLoc = 0; 14 | 15 | gl.enableVertexAttribArray(posLoc); 16 | gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); 17 | 18 | gl.bindVertexArray(null); 19 | 20 | const vertexShader = makeVertexShader(gl, { vertex }); 21 | 22 | function draw() { 23 | gl.bindVertexArray(vao); 24 | gl.drawArrays(gl.TRIANGLES, 0, 6); 25 | } 26 | 27 | return { 28 | draw, 29 | vertexShader 30 | }; 31 | } 32 | -------------------------------------------------------------------------------- /src/renderer/GBufferPass.js: -------------------------------------------------------------------------------- 1 | import { makeRenderPass } from './RenderPass'; 2 | import vertex from './glsl/gBuffer.vert'; 3 | import fragment from './glsl/gBuffer.frag'; 4 | import { Matrix4 } from 'three'; 5 | 6 | export function makeGBufferPass(gl, { materialBuffer, mergedMesh }) { 7 | const renderPass = makeRenderPass(gl, { 8 | defines: materialBuffer.defines, 9 | vertex, 10 | fragment 11 | }); 12 | 13 | renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); 14 | renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); 15 | renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); 16 | 17 | const geometry = mergedMesh.geometry; 18 | 19 | const elementCount = geometry.getIndex().count; 20 | 21 | const vao = gl.createVertexArray(); 22 | 23 | gl.bindVertexArray(vao); 24 | uploadAttributes(gl, renderPass, geometry); 25 | gl.bindVertexArray(null); 26 | 27 | let jitterX = 0; 28 | let jitterY = 0; 29 | function setJitter(x, y) { 30 | jitterX = x; 31 | jitterY = y; 32 | } 33 | 34 | let currentCamera; 35 | function setCamera(camera) { 36 | currentCamera = camera; 37 | } 38 | 39 | function calcCamera() { 40 | projView.copy(currentCamera.projectionMatrix); 41 | 42 | projView.elements[8] += 2 * jitterX; 43 | projView.elements[9] += 2 * jitterY; 44 | 45 | projView.multiply(currentCamera.matrixWorldInverse); 46 | renderPass.setUniform('projView', projView.elements); 47 | } 48 | 49 | let projView = new Matrix4(); 50 | 51 | function draw() { 52 | calcCamera(); 53 | gl.bindVertexArray(vao); 54 | renderPass.useProgram(); 55 | gl.enable(gl.DEPTH_TEST); 56 | gl.drawElements(gl.TRIANGLES, elementCount, gl.UNSIGNED_INT, 0); 57 | gl.disable(gl.DEPTH_TEST); 58 | } 59 | 60 | return { 61 | draw, 62 | outputLocs: renderPass.outputLocs, 63 | setCamera, 64 | setJitter 65 | }; 66 | } 67 | 68 | function uploadAttributes(gl, renderPass, geometry) { 69 | setAttribute(gl, renderPass.attribLocs.aPosition, geometry.getAttribute('position')); 70 | setAttribute(gl, renderPass.attribLocs.aNormal, geometry.getAttribute('normal')); 71 | setAttribute(gl, renderPass.attribLocs.aUv, geometry.getAttribute('uv')); 72 | setAttribute(gl, renderPass.attribLocs.aMaterialMeshIndex, geometry.getAttribute('materialMeshIndex')); 73 | 74 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gl.createBuffer()); 75 | gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, geometry.getIndex().array, gl.STATIC_DRAW); 76 | } 77 | 78 | function setAttribute(gl, location, bufferAttribute) { 79 | if (location === undefined) { 80 | return; 81 | } 82 | 83 | const { itemSize, array } = bufferAttribute; 84 | 85 | gl.enableVertexAttribArray(location); 86 | gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); 87 | gl.bufferData(gl.ARRAY_BUFFER, array, gl.STATIC_DRAW); 88 | 89 | if (array instanceof Float32Array) { 90 | gl.vertexAttribPointer(location, itemSize, gl.FLOAT, false, 0, 0); 91 | } else if (array instanceof Int32Array) { 92 | gl.vertexAttribIPointer(location, itemSize, gl.INT, 0, 0); 93 | } else { 94 | throw 'Unsupported buffer type'; 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/renderer/MaterialBuffer.js: -------------------------------------------------------------------------------- 1 | import { ThinMaterial, ThickMaterial, ShadowCatcherMaterial } from '../constants'; 2 | import materialBufferChunk from './glsl/chunks/materialBuffer.glsl'; 3 | import { makeUniformBuffer } from './UniformBuffer'; 4 | import { makeRenderPass } from "./RenderPass"; 5 | import { makeTexture } from './Texture'; 6 | import { getTexturesFromMaterials, mergeTexturesFromMaterials } from './texturesFromMaterials'; 7 | 8 | export function makeMaterialBuffer(gl, materials) { 9 | const maps = getTexturesFromMaterials(materials, ['map', 'normalMap']); 10 | const pbrMap = mergeTexturesFromMaterials(materials, ['roughnessMap', 'metalnessMap']); 11 | 12 | const textures = {}; 13 | 14 | const bufferData = {}; 15 | 16 | bufferData.color = materials.map(m => m.color); 17 | bufferData.roughness = materials.map(m => m.roughness); 18 | bufferData.metalness = materials.map(m => m.metalness); 19 | bufferData.normalScale = materials.map(m => m.normalScale); 20 | 21 | bufferData.type = materials.map(m => { 22 | if (m.shadowCatcher) { 23 | return ShadowCatcherMaterial; 24 | } 25 | if (m.transparent) { 26 | return m.solid ? ThickMaterial : ThinMaterial; 27 | } 28 | }); 29 | 30 | if (maps.map.textures.length > 0) { 31 | const { relativeSizes, texture } = makeTextureArray(gl, maps.map.textures, true); 32 | textures.diffuseMap = texture; 33 | bufferData.diffuseMapSize = relativeSizes; 34 | bufferData.diffuseMapIndex = maps.map.indices; 35 | } 36 | 37 | if (maps.normalMap.textures.length > 0) { 38 | const { relativeSizes, texture } = makeTextureArray(gl, maps.normalMap.textures, false); 39 | textures.normalMap = texture; 40 | bufferData.normalMapSize = relativeSizes; 41 | bufferData.normalMapIndex = maps.normalMap.indices; 42 | } 43 | 44 | if (pbrMap.textures.length > 0) { 45 | const { relativeSizes, texture } = makeTextureArray(gl, pbrMap.textures, false); 46 | textures.pbrMap = texture; 47 | bufferData.pbrMapSize = relativeSizes; 48 | bufferData.roughnessMapIndex = pbrMap.indices.roughnessMap; 49 | bufferData.metalnessMapIndex = pbrMap.indices.metalnessMap; 50 | } 51 | 52 | const defines = { 53 | NUM_MATERIALS: materials.length, 54 | NUM_DIFFUSE_MAPS: maps.map.textures.length, 55 | NUM_NORMAL_MAPS: maps.normalMap.textures.length, 56 | NUM_DIFFUSE_NORMAL_MAPS: Math.max(maps.map.textures.length, maps.normalMap.textures.length), 57 | NUM_PBR_MAPS: pbrMap.textures.length, 58 | }; 59 | 60 | // create temporary shader program including the Material uniform buffer 61 | // used to query the compiled structure of the uniform buffer 62 | const renderPass = makeRenderPass(gl, { 63 | vertex: { 64 | source: `void main() {}` 65 | }, 66 | fragment: { 67 | includes: [ materialBufferChunk ], 68 | source: `void main() {}` 69 | }, 70 | defines 71 | }); 72 | 73 | uploadToUniformBuffer(gl, renderPass.program, bufferData); 74 | 75 | return { defines, textures }; 76 | } 77 | 78 | function makeTextureArray(gl, textures, gammaCorrection = false) { 79 | const images = textures.map(t => t.image); 80 | const flipY = textures.map(t => t.flipY); 81 | const { maxSize, relativeSizes } = maxImageSize(images); 82 | 83 | // create GL Array Texture from individual textures 84 | const texture = makeTexture(gl, { 85 | width: maxSize.width, 86 | height: maxSize.height, 87 | gammaCorrection, 88 | data: images, 89 | flipY, 90 | channels: 3, 91 | minFilter: gl.LINEAR, 92 | magFilter: gl.LINEAR, 93 | }); 94 | 95 | return { 96 | texture, 97 | relativeSizes 98 | }; 99 | } 100 | 101 | function maxImageSize(images) { 102 | const maxSize = { 103 | width: 0, 104 | height: 0 105 | }; 106 | 107 | for (const image of images) { 108 | maxSize.width = Math.max(maxSize.width, image.width); 109 | maxSize.height = Math.max(maxSize.height, image.height); 110 | } 111 | 112 | const relativeSizes = []; 113 | for (const image of images) { 114 | relativeSizes.push(image.width / maxSize.width); 115 | relativeSizes.push(image.height / maxSize.height); 116 | } 117 | 118 | return { maxSize, relativeSizes }; 119 | } 120 | 121 | 122 | // Upload arrays to uniform buffer objects 123 | // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout 124 | 125 | function uploadToUniformBuffer(gl, program, bufferData) { 126 | const materialBuffer = makeUniformBuffer(gl, program, 'Materials'); 127 | 128 | materialBuffer.set('Materials.colorAndMaterialType[0]', interleave( 129 | { data: [].concat(...bufferData.color.map(d => d.toArray())), channels: 3 }, 130 | { data: bufferData.type, channels: 1} 131 | )); 132 | 133 | materialBuffer.set('Materials.roughnessMetalnessNormalScale[0]', interleave( 134 | { data: bufferData.roughness, channels: 1 }, 135 | { data: bufferData.metalness, channels: 1 }, 136 | { data: [].concat(...bufferData.normalScale.map(d => d.toArray())), channels: 2 } 137 | )); 138 | 139 | materialBuffer.set('Materials.diffuseNormalRoughnessMetalnessMapIndex[0]', interleave( 140 | { data: bufferData.diffuseMapIndex, channels: 1 }, 141 | { data: bufferData.normalMapIndex, channels: 1 }, 142 | { data: bufferData.roughnessMapIndex, channels: 1 }, 143 | { data: bufferData.metalnessMapIndex, channels: 1 } 144 | )); 145 | 146 | materialBuffer.set('Materials.diffuseNormalMapSize[0]', interleave( 147 | { data: bufferData.diffuseMapSize, channels: 2 }, 148 | { data: bufferData.normalMapSize, channels: 2 } 149 | )); 150 | 151 | materialBuffer.set('Materials.pbrMapSize[0]', bufferData.pbrMapSize); 152 | 153 | materialBuffer.bind(0); 154 | } 155 | 156 | function interleave(...arrays) { 157 | let maxLength = 0; 158 | for (let i = 0; i < arrays.length; i++) { 159 | const a = arrays[i]; 160 | const l = a.data ? a.data.length / a.channels : 0; 161 | maxLength = Math.max(maxLength, l); 162 | } 163 | 164 | const interleaved = []; 165 | for (let i = 0; i < maxLength; i++) { 166 | for (let j = 0; j < arrays.length; j++) { 167 | const { data = [], channels } = arrays[j]; 168 | for (let c = 0; c < channels; c++) { 169 | interleaved.push(data[i * channels + c]); 170 | } 171 | } 172 | } 173 | 174 | return interleaved; 175 | } 176 | -------------------------------------------------------------------------------- /src/renderer/RayTracePass.js: -------------------------------------------------------------------------------- 1 | import { bvhAccel, flattenBvh } from './bvhAccel'; 2 | import { generateEnvMapFromSceneComponents, generateBackgroundMapFromSceneBackground } from './envMapCreation'; 3 | import { envMapDistribution } from './envMapDistribution'; 4 | import fragment from './glsl/rayTrace.frag'; 5 | import { makeRenderPass } from './RenderPass'; 6 | import { makeStratifiedSamplerCombined } from './StratifiedSamplerCombined'; 7 | import { makeTexture } from './Texture'; 8 | import { clamp } from './util'; 9 | 10 | export function makeRayTracePass(gl, { 11 | bounces, // number of global illumination bounces 12 | decomposedScene, 13 | fullscreenQuad, 14 | materialBuffer, 15 | mergedMesh, 16 | optionalExtensions, 17 | }) { 18 | 19 | bounces = clamp(bounces, 1, 6); 20 | 21 | const samplingDimensions = []; 22 | 23 | for (let i = 1; i <= bounces; i++) { 24 | // specular or diffuse reflection, light importance sampling, next path direction 25 | samplingDimensions.push(2, 2, 2); 26 | if (i >= 2) { 27 | // russian roulette sampling 28 | // this step is skipped on the first bounce 29 | samplingDimensions.push(1); 30 | } 31 | } 32 | 33 | let samples; 34 | 35 | const renderPass = makeRenderPassFromScene({ 36 | bounces, decomposedScene, fullscreenQuad, gl, materialBuffer, mergedMesh, optionalExtensions, samplingDimensions, 37 | }); 38 | 39 | function setSize(width, height) { 40 | renderPass.setUniform('pixelSize', 1 / width, 1 / height); 41 | } 42 | 43 | // noiseImage is a 32-bit PNG image 44 | function setNoise(noiseImage) { 45 | renderPass.setTexture('noiseTex', makeTexture(gl, { 46 | data: noiseImage, 47 | wrapS: gl.REPEAT, 48 | wrapT: gl.REPEAT, 49 | storage: 'halfFloat', 50 | })); 51 | } 52 | 53 | function setCamera(camera) { 54 | renderPass.setUniform('camera.transform', camera.matrixWorld.elements); 55 | renderPass.setUniform('camera.aspect', camera.aspect); 56 | renderPass.setUniform('camera.fov', 0.5 / Math.tan(0.5 * Math.PI * camera.fov / 180)); 57 | } 58 | 59 | function setJitter(x, y) { 60 | renderPass.setUniform('jitter', x, y); 61 | } 62 | 63 | function setGBuffers({ position, normal, faceNormal, color, matProps }) { 64 | renderPass.setTexture('gPosition', position); 65 | renderPass.setTexture('gNormal', normal); 66 | renderPass.setTexture('gFaceNormal', faceNormal); 67 | renderPass.setTexture('gColor', color); 68 | renderPass.setTexture('gMatProps', matProps); 69 | } 70 | 71 | function nextSeed() { 72 | renderPass.setUniform('stratifiedSamples[0]', samples.next()); 73 | } 74 | 75 | function setStrataCount(strataCount) { 76 | if (strataCount > 1 && strataCount !== samples.strataCount) { 77 | // reinitailizing random has a performance cost. we can skip it if 78 | // * strataCount is 1, since a strataCount of 1 works with any sized StratifiedRandomCombined 79 | // * random already has the same strata count as desired 80 | samples = makeStratifiedSamplerCombined(strataCount, samplingDimensions); 81 | } else { 82 | samples.restart(); 83 | } 84 | 85 | renderPass.setUniform('strataSize', 1.0 / strataCount); 86 | nextSeed(); 87 | } 88 | 89 | function bindTextures() { 90 | renderPass.bindTextures(); 91 | } 92 | 93 | function draw() { 94 | renderPass.useProgram(false); 95 | fullscreenQuad.draw(); 96 | } 97 | 98 | samples = makeStratifiedSamplerCombined(1, samplingDimensions); 99 | 100 | return { 101 | bindTextures, 102 | draw, 103 | nextSeed, 104 | outputLocs: renderPass.outputLocs, 105 | setCamera, 106 | setJitter, 107 | setGBuffers, 108 | setNoise, 109 | setSize, 110 | setStrataCount, 111 | }; 112 | } 113 | function makeRenderPassFromScene({ 114 | bounces, 115 | decomposedScene, 116 | fullscreenQuad, 117 | gl, 118 | materialBuffer, 119 | mergedMesh, 120 | optionalExtensions, 121 | samplingDimensions, 122 | }) { 123 | const { OES_texture_float_linear } = optionalExtensions; 124 | 125 | const { background, directionalLights, ambientLights, environmentLights } = decomposedScene; 126 | 127 | const { geometry, materials, materialIndices } = mergedMesh; 128 | 129 | // create bounding volume hierarchy from a static scene 130 | const bvh = bvhAccel(geometry, materialIndices); 131 | const flattenedBvh = flattenBvh(bvh); 132 | const numTris = geometry.index.count / 3; 133 | 134 | const renderPass = makeRenderPass(gl, { 135 | defines: { 136 | OES_texture_float_linear, 137 | BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, 138 | INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, 139 | VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, 140 | STACK_SIZE: flattenedBvh.maxDepth, 141 | BOUNCES: bounces, 142 | USE_GLASS: materials.some(m => m.transparent), 143 | USE_SHADOW_CATCHER: materials.some(m => m.shadowCatcher), 144 | SAMPLING_DIMENSIONS: samplingDimensions.reduce((a, b) => a + b), 145 | ...materialBuffer.defines 146 | }, 147 | fragment, 148 | vertex: fullscreenQuad.vertexShader 149 | }); 150 | 151 | renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); 152 | renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); 153 | renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); 154 | 155 | renderPass.setTexture('positionBuffer', makeDataTexture(gl, geometry.getAttribute('position').array, 3)); 156 | 157 | renderPass.setTexture('normalBuffer', makeDataTexture(gl, geometry.getAttribute('normal').array, 3)); 158 | 159 | renderPass.setTexture('uvBuffer', makeDataTexture(gl, geometry.getAttribute('uv').array, 2)); 160 | 161 | renderPass.setTexture('bvhBuffer', makeDataTexture(gl, flattenedBvh.buffer, 4)); 162 | 163 | const envImage = generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights); 164 | const envImageTextureObject = makeTexture(gl, { 165 | data: envImage.data, 166 | storage: 'halfFloat', 167 | minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, 168 | magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, 169 | width: envImage.width, 170 | height: envImage.height, 171 | }); 172 | 173 | renderPass.setTexture('envMap', envImageTextureObject); 174 | 175 | let backgroundImageTextureObject; 176 | if (background) { 177 | const backgroundImage = generateBackgroundMapFromSceneBackground(background); 178 | backgroundImageTextureObject = makeTexture(gl, { 179 | data: backgroundImage.data, 180 | storage: 'halfFloat', 181 | minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, 182 | magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, 183 | width: backgroundImage.width, 184 | height: backgroundImage.height, 185 | }); 186 | } else { 187 | backgroundImageTextureObject = envImageTextureObject; 188 | } 189 | 190 | renderPass.setTexture('backgroundMap', backgroundImageTextureObject); 191 | 192 | const distribution = envMapDistribution(envImage); 193 | 194 | renderPass.setTexture('envMapDistribution', makeTexture(gl, { 195 | data: distribution.data, 196 | storage: 'halfFloat', 197 | width: distribution.width, 198 | height: distribution.height, 199 | })); 200 | 201 | return renderPass; 202 | } 203 | 204 | function textureDimensionsFromArray(count) { 205 | const columnsLog = Math.round(Math.log2(Math.sqrt(count))); 206 | const columns = 2 ** columnsLog; 207 | const rows = Math.ceil(count / columns); 208 | return { 209 | columnsLog, 210 | columns, 211 | rows, 212 | size: rows * columns, 213 | }; 214 | } 215 | 216 | function makeDataTexture(gl, dataArray, channels) { 217 | const textureDim = textureDimensionsFromArray(dataArray.length / channels); 218 | return makeTexture(gl, { 219 | data: padArray(dataArray, channels * textureDim.size), 220 | width: textureDim.columns, 221 | height: textureDim.rows, 222 | }); 223 | } 224 | 225 | // expand array to the given length 226 | function padArray(typedArray, length) { 227 | const newArray = new typedArray.constructor(length); 228 | newArray.set(typedArray); 229 | return newArray; 230 | } 231 | -------------------------------------------------------------------------------- /src/renderer/RenderPass.js: -------------------------------------------------------------------------------- 1 | import { compileShader, createProgram, getAttributes } from './glUtil'; 2 | import { makeUniformSetter } from './UniformSetter'; 3 | 4 | export function makeRenderPass(gl, params) { 5 | const { 6 | fragment, 7 | vertex, 8 | } = params; 9 | 10 | const vertexCompiled = vertex instanceof WebGLShader ? vertex : makeVertexShader(gl, params); 11 | 12 | const fragmentCompiled = fragment instanceof WebGLShader ? fragment : makeFragmentShader(gl, params); 13 | 14 | const program = createProgram(gl, vertexCompiled, fragmentCompiled); 15 | 16 | return { 17 | ...makeRenderPassFromProgram(gl, program), 18 | outputLocs: fragment.outputs ? getOutputLocations(fragment.outputs) : {} 19 | }; 20 | } 21 | 22 | export function makeVertexShader(gl, { defines, vertex }) { 23 | return makeShaderStage(gl, gl.VERTEX_SHADER, vertex, defines); 24 | } 25 | 26 | export function makeFragmentShader(gl, { defines, fragment }) { 27 | return makeShaderStage(gl, gl.FRAGMENT_SHADER, fragment, defines); 28 | } 29 | 30 | function makeRenderPassFromProgram(gl, program) { 31 | 32 | const uniformSetter = makeUniformSetter(gl, program); 33 | 34 | const textures = {}; 35 | 36 | let nextTexUnit = 1; 37 | 38 | function setTexture(name, texture) { 39 | if (!texture) { 40 | return; 41 | } 42 | 43 | if (!textures[name]) { 44 | const unit = nextTexUnit++; 45 | 46 | uniformSetter.setUniform(name, unit); 47 | 48 | textures[name] = { 49 | unit, 50 | tex: texture 51 | }; 52 | } else { 53 | textures[name].tex = texture; 54 | } 55 | } 56 | 57 | function bindTextures() { 58 | for (let name in textures) { 59 | const { tex, unit } = textures[name]; 60 | gl.activeTexture(gl.TEXTURE0 + unit); 61 | gl.bindTexture(tex.target, tex.texture); 62 | } 63 | } 64 | 65 | function useProgram(autoBindTextures = true) { 66 | gl.useProgram(program); 67 | uniformSetter.upload(); 68 | if (autoBindTextures) { 69 | bindTextures(); 70 | } 71 | } 72 | 73 | return { 74 | attribLocs: getAttributes(gl, program), 75 | bindTextures, 76 | program, 77 | setTexture, 78 | setUniform: uniformSetter.setUniform, 79 | textures, 80 | useProgram, 81 | }; 82 | } 83 | 84 | function makeShaderStage(gl, type, shader, defines) { 85 | let str = '#version 300 es\nprecision mediump float;\nprecision mediump int;\n'; 86 | 87 | if (defines) { 88 | str += addDefines(defines); 89 | } 90 | 91 | if (type === gl.FRAGMENT_SHADER && shader.outputs) { 92 | str += addOutputs(shader.outputs); 93 | } 94 | 95 | if (shader.includes) { 96 | str += addIncludes(shader.includes, defines); 97 | } 98 | 99 | if (typeof shader.source === 'function') { 100 | str += shader.source(defines); 101 | } else { 102 | str += shader.source; 103 | } 104 | 105 | return compileShader(gl, type, str); 106 | } 107 | 108 | function addDefines(defines) { 109 | let str = ''; 110 | 111 | for (const name in defines) { 112 | const value = defines[name]; 113 | 114 | // don't define falsy values such as false, 0, and ''. 115 | // this adds support for #ifdef on falsy values 116 | if (value) { 117 | str += `#define ${name} ${value}\n`; 118 | } 119 | } 120 | 121 | return str; 122 | } 123 | 124 | function addOutputs(outputs) { 125 | let str = ''; 126 | 127 | const locations = getOutputLocations(outputs); 128 | 129 | for (let name in locations) { 130 | const location = locations[name]; 131 | str += `layout(location = ${location}) out vec4 out_${name};\n`; 132 | } 133 | 134 | return str; 135 | } 136 | 137 | function addIncludes(includes, defines) { 138 | let str = ''; 139 | 140 | for (let include of includes) { 141 | if (typeof include === 'function') { 142 | str += include(defines); 143 | } else { 144 | str += include; 145 | } 146 | } 147 | 148 | return str; 149 | } 150 | 151 | function getOutputLocations(outputs) { 152 | let locations = {}; 153 | 154 | for (let i = 0; i < outputs.length; i++) { 155 | locations[outputs[i]] = i; 156 | } 157 | 158 | return locations; 159 | } 160 | -------------------------------------------------------------------------------- /src/renderer/RenderSize.js: -------------------------------------------------------------------------------- 1 | import { clamp } from './util'; 2 | import { Vector2 } from 'three'; 3 | 4 | export function makeRenderSize(gl) { 5 | const desiredMsPerFrame = 20; 6 | 7 | let fullWidth; 8 | let fullHeight; 9 | 10 | let renderWidth; 11 | let renderHeight; 12 | let scale = new Vector2(1, 1); 13 | 14 | let pixelsPerFrame = pixelsPerFrameEstimate(gl); 15 | 16 | function setSize(w, h) { 17 | fullWidth = w; 18 | fullHeight = h; 19 | calcDimensions(); 20 | } 21 | 22 | function calcDimensions() { 23 | const aspectRatio = fullWidth / fullHeight; 24 | renderWidth = Math.round(clamp(Math.sqrt(pixelsPerFrame * aspectRatio), 1, fullWidth)); 25 | renderHeight = Math.round(clamp(renderWidth / aspectRatio, 1, fullHeight)); 26 | scale.set(renderWidth / fullWidth, renderHeight / fullHeight); 27 | } 28 | 29 | function adjustSize(elapsedFrameMs) { 30 | if (!elapsedFrameMs) { 31 | return; 32 | } 33 | 34 | // tweak to find balance. higher = faster convergence, lower = less fluctuations to microstutters 35 | const strength = 600; 36 | 37 | const error = desiredMsPerFrame - elapsedFrameMs; 38 | 39 | pixelsPerFrame += strength * error; 40 | pixelsPerFrame = clamp(pixelsPerFrame, 8192, fullWidth * fullHeight); 41 | calcDimensions(); 42 | } 43 | 44 | return { 45 | adjustSize, 46 | setSize, 47 | scale, 48 | get width() { 49 | return renderWidth; 50 | }, 51 | get height() { 52 | return renderHeight; 53 | } 54 | }; 55 | } 56 | 57 | function pixelsPerFrameEstimate(gl) { 58 | const maxRenderbufferSize = gl.getParameter(gl.MAX_RENDERBUFFER_SIZE); 59 | 60 | if (maxRenderbufferSize <= 8192) { 61 | return 80000; 62 | } else if (maxRenderbufferSize === 16384) { 63 | return 150000; 64 | } else if (maxRenderbufferSize >= 32768) { 65 | return 400000; 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/renderer/ReprojectPass.js: -------------------------------------------------------------------------------- 1 | import fragment from './glsl/reproject.frag'; 2 | import { makeRenderPass } from './RenderPass'; 3 | import * as THREE from 'three'; 4 | 5 | export function makeReprojectPass(gl, params) { 6 | const { 7 | fullscreenQuad, 8 | maxReprojectedSamples, 9 | } = params; 10 | 11 | const renderPass = makeRenderPass(gl, { 12 | defines: { 13 | MAX_SAMPLES: maxReprojectedSamples.toFixed(1) 14 | }, 15 | vertex: fullscreenQuad.vertexShader, 16 | fragment 17 | }); 18 | 19 | const historyCamera = new THREE.Matrix4(); 20 | 21 | function setPreviousCamera(camera) { 22 | historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); 23 | 24 | renderPass.setUniform('historyCamera', historyCamera.elements); 25 | } 26 | 27 | function setJitter(x, y) { 28 | renderPass.setUniform('jitter', x, y); 29 | } 30 | 31 | function draw(params) { 32 | const { 33 | blendAmount, 34 | light, 35 | lightScale, 36 | position, 37 | previousLight, 38 | previousLightScale, 39 | previousPosition, 40 | } = params; 41 | 42 | renderPass.setUniform('blendAmount', blendAmount); 43 | renderPass.setUniform('lightScale', lightScale.x, lightScale.y); 44 | renderPass.setUniform('previousLightScale', previousLightScale.x, previousLightScale.y); 45 | 46 | renderPass.setTexture('lightTex', light); 47 | renderPass.setTexture('positionTex', position); 48 | renderPass.setTexture('previousLightTex', previousLight); 49 | renderPass.setTexture('previousPositionTex', previousPosition); 50 | 51 | renderPass.useProgram(); 52 | fullscreenQuad.draw(); 53 | } 54 | 55 | return { 56 | draw, 57 | setJitter, 58 | setPreviousCamera, 59 | }; 60 | } 61 | -------------------------------------------------------------------------------- /src/renderer/StratifiedSampler.js: -------------------------------------------------------------------------------- 1 | /* 2 | Stratified Sampling 3 | http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html 4 | 5 | Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, 6 | instead of being evenly spaced across the domain. 7 | This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. 8 | 9 | We can reduce the amount of clustering of random numbers by using stratified sampling. 10 | Stratification divides the [0, 1) range into partitions, or stratum, of equal size. 11 | Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. 12 | When every stratum has been sampled once, this sequence is shuffled again and the process repeats. 13 | 14 | The returned sample ranges between [0, numberOfStratum). 15 | The integer part ideintifies the stratum (the first stratum being 0). 16 | The fractional part is the random number. 17 | 18 | To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. 19 | */ 20 | 21 | import { shuffle } from "./util"; 22 | 23 | export function makeStratifiedSampler(strataCount, dimensions) { 24 | const strata = []; 25 | const l = strataCount ** dimensions; 26 | for (let i = 0; i < l; i++) { 27 | strata[i] = i; 28 | } 29 | 30 | let index = strata.length; 31 | 32 | const sample = []; 33 | 34 | function restart() { 35 | index = 0; 36 | } 37 | 38 | function next() { 39 | if (index >= strata.length) { 40 | shuffle(strata); 41 | restart(); 42 | } 43 | let stratum = strata[index++]; 44 | 45 | for (let i = 0; i < dimensions; i++) { 46 | sample[i] = stratum % strataCount + Math.random(); 47 | stratum = Math.floor(stratum / strataCount); 48 | } 49 | 50 | return sample; 51 | } 52 | 53 | return { 54 | next, 55 | restart, 56 | strataCount 57 | }; 58 | } 59 | -------------------------------------------------------------------------------- /src/renderer/StratifiedSamplerCombined.js: -------------------------------------------------------------------------------- 1 | /* 2 | Stratified Sampling 3 | http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html 4 | 5 | It is computationally unfeasible to compute stratified sampling for large dimensions (>2) 6 | Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension 7 | e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. 8 | This reaps many benefits of stratification while still allowing for small strata sizes. 9 | */ 10 | 11 | import { makeStratifiedSampler } from "./StratifiedSampler"; 12 | 13 | export function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { 14 | const strataObjs = []; 15 | 16 | for (const dim of listOfDimensions) { 17 | strataObjs.push(makeStratifiedSampler(strataCount, dim)); 18 | } 19 | 20 | const combined = []; 21 | 22 | function next() { 23 | let i = 0; 24 | 25 | for (const strata of strataObjs) { 26 | const nums = strata.next(); 27 | 28 | for (const num of nums) { 29 | combined[i++] = num; 30 | } 31 | } 32 | 33 | return combined; 34 | } 35 | 36 | function restart() { 37 | for (const strata of strataObjs) { 38 | strata.restart(); 39 | } 40 | } 41 | 42 | return { 43 | next, 44 | restart, 45 | strataCount 46 | }; 47 | } 48 | -------------------------------------------------------------------------------- /src/renderer/Texture.js: -------------------------------------------------------------------------------- 1 | import { clamp } from './util'; 2 | 3 | export function makeTexture(gl, params) { 4 | let { 5 | width = null, 6 | height = null, 7 | 8 | // A single HTMLImageElement, ImageData, or TypedArray, 9 | // Or an array of any of these objects. In this case an Array Texture will be created 10 | data = null, 11 | 12 | // If greater than 1, create an Array Texture of this length 13 | length = 1, 14 | 15 | // Number of channels, [1-4]. If left blank, the the function will decide the number of channels automatically from the data 16 | channels = null, 17 | 18 | // Either 'byte' or 'float' 19 | // If left empty, the function will decide the format automatically from the data 20 | storage = null, 21 | 22 | // Reverse the texture across the y-axis. 23 | flipY = false, 24 | 25 | // sampling properties 26 | gammaCorrection = false, 27 | wrapS = gl.CLAMP_TO_EDGE, 28 | wrapT = gl.CLAMP_TO_EDGE, 29 | minFilter = gl.NEAREST, 30 | magFilter = gl.NEAREST, 31 | } = params; 32 | 33 | width = width || data.width || 0; 34 | height = height || data.height || 0; 35 | 36 | const texture = gl.createTexture(); 37 | 38 | let target; 39 | let dataArray; 40 | 41 | // if data is a JS array but not a TypedArray, assume data is an array of images and create a GL Array Texture 42 | if (Array.isArray(data)) { 43 | dataArray = data; 44 | data = dataArray[0]; 45 | } 46 | 47 | target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; 48 | 49 | gl.activeTexture(gl.TEXTURE0); 50 | gl.bindTexture(target, texture); 51 | 52 | gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); 53 | gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); 54 | gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); 55 | gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); 56 | 57 | if (!channels) { 58 | if (data && data.length) { 59 | channels = data.length / (width * height); // infer number of channels from data size 60 | } else { 61 | channels = 4; 62 | } 63 | } 64 | 65 | channels = clamp(channels, 1, 4); 66 | 67 | const { type, format, internalFormat } = getTextureFormat(gl, channels, storage, data, gammaCorrection); 68 | 69 | if (dataArray) { 70 | gl.texStorage3D(target, 1, internalFormat, width, height, dataArray.length); 71 | for (let i = 0; i < dataArray.length; i++) { 72 | // if layer is an HTMLImageElement, use the .width and .height properties of each layer 73 | // otherwise use the max size of the array texture 74 | const layerWidth = dataArray[i].width || width; 75 | const layerHeight = dataArray[i].height || height; 76 | 77 | gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); 78 | 79 | gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); 80 | } 81 | } else if (length > 1) { 82 | // create empty array texture 83 | gl.texStorage3D(target, 1, internalFormat, width, height, length); 84 | } else { 85 | gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); 86 | gl.texStorage2D(target, 1, internalFormat, width, height); 87 | if (data) { 88 | gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); 89 | } 90 | } 91 | 92 | // return state to default 93 | gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); 94 | 95 | return { 96 | target, 97 | texture 98 | }; 99 | } 100 | 101 | export function makeDepthTarget(gl, width, height) { 102 | const texture = gl.createRenderbuffer(); 103 | const target = gl.RENDERBUFFER; 104 | 105 | gl.bindRenderbuffer(target, texture); 106 | gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT24, width, height); 107 | gl.bindRenderbuffer(target, null); 108 | 109 | return { 110 | target, 111 | texture 112 | }; 113 | } 114 | 115 | function getFormat(gl, channels) { 116 | const map = { 117 | 1: gl.RED, 118 | 2: gl.RG, 119 | 3: gl.RGB, 120 | 4: gl.RGBA 121 | }; 122 | return map[channels]; 123 | } 124 | 125 | function getTextureFormat(gl, channels, storage, data, gammaCorrection) { 126 | let type; 127 | let internalFormat; 128 | 129 | const isByteArray = 130 | data instanceof Uint8Array || 131 | data instanceof HTMLImageElement || 132 | data instanceof HTMLCanvasElement || 133 | data instanceof ImageData; 134 | 135 | const isFloatArray = data instanceof Float32Array; 136 | 137 | if (storage === 'byte' || (!storage && isByteArray)) { 138 | internalFormat = { 139 | 1: gl.R8, 140 | 2: gl.RG8, 141 | 3: gammaCorrection ? gl.SRGB8 : gl.RGB8, 142 | 4: gammaCorrection ? gl.SRGB8_ALPHA8 : gl.RGBA8 143 | }[channels]; 144 | 145 | type = gl.UNSIGNED_BYTE; 146 | } else if (storage === 'float' || (!storage && isFloatArray)) { 147 | internalFormat = { 148 | 1: gl.R32F, 149 | 2: gl.RG32F, 150 | 3: gl.RGB32F, 151 | 4: gl.RGBA32F 152 | }[channels]; 153 | 154 | type = gl.FLOAT; 155 | } else if (storage === 'halfFloat') { 156 | internalFormat = { 157 | 1: gl.R16F, 158 | 2: gl.RG16F, 159 | 3: gl.RGB16F, 160 | 4: gl.RGBA16F 161 | }[channels]; 162 | 163 | type = gl.FLOAT; 164 | } else if (storage === 'snorm') { 165 | internalFormat = { 166 | 1: gl.R8_SNORM, 167 | 2: gl.RG8_SNORM, 168 | 3: gl.RGB8_SNORM, 169 | 4: gl.RGBA8_SNORM, 170 | }[channels]; 171 | 172 | type = gl.UNSIGNED_BYTE; 173 | } 174 | 175 | const format = getFormat(gl, channels); 176 | 177 | return { 178 | format, 179 | internalFormat, 180 | type 181 | }; 182 | } 183 | -------------------------------------------------------------------------------- /src/renderer/TileRender.js: -------------------------------------------------------------------------------- 1 | import { clamp } from './util'; 2 | 3 | // TileRender is based on the concept of a compute shader's work group. 4 | 5 | // Sampling the scene with the RayTracingRenderer can be very slow (<1 fps). 6 | // This overworks the GPU and tends to lock up the OS, making it unresponsive. 7 | 8 | // To fix this, we can split the screen into smaller tiles, and sample the scene one tile at a time 9 | // The tile size is set such that each tile takes approximatly a constant amount of time to render. 10 | 11 | // Since the render time of a tile is dependent on the device, we find the desired tile dimensions by measuring 12 | // the time it takes to render an arbitrarily-set tile size and adjusting the size according to the benchmark. 13 | 14 | export function makeTileRender(gl) { 15 | const desiredMsPerTile = 21; 16 | 17 | let currentTile = -1; 18 | let numTiles = 1; 19 | 20 | let tileWidth; 21 | let tileHeight; 22 | 23 | let columns; 24 | let rows; 25 | 26 | let width = 0; 27 | let height = 0; 28 | 29 | let totalElapsedMs; 30 | 31 | // initial number of pixels per rendered tile 32 | // based on correlation between system performance and max supported render buffer size 33 | // adjusted dynamically according to system performance 34 | let pixelsPerTile = pixelsPerTileEstimate(gl); 35 | 36 | function reset() { 37 | currentTile = -1; 38 | totalElapsedMs = NaN; 39 | } 40 | 41 | function setSize(w, h) { 42 | width = w; 43 | height = h; 44 | reset(); 45 | calcTileDimensions(); 46 | } 47 | 48 | function calcTileDimensions() { 49 | const aspectRatio = width / height; 50 | 51 | // quantize the width of the tile so that it evenly divides the entire window 52 | tileWidth = Math.ceil(width / Math.round(width / Math.sqrt(pixelsPerTile * aspectRatio))); 53 | tileHeight = Math.ceil(tileWidth / aspectRatio); 54 | 55 | columns = Math.ceil(width / tileWidth); 56 | rows = Math.ceil(height / tileHeight); 57 | numTiles = columns * rows; 58 | } 59 | 60 | function updatePixelsPerTile() { 61 | const msPerTile = totalElapsedMs / numTiles; 62 | 63 | const error = desiredMsPerTile - msPerTile; 64 | 65 | // tweak to find balance. higher = faster convergence, lower = less fluctuations to microstutters 66 | const strength = 5000; 67 | 68 | // sqrt prevents massive fluctuations in pixelsPerTile for the occasional stutter 69 | pixelsPerTile += strength * Math.sign(error) * Math.sqrt(Math.abs(error)); 70 | pixelsPerTile = clamp(pixelsPerTile, 8192, width * height); 71 | } 72 | 73 | function nextTile(elapsedFrameMs) { 74 | currentTile++; 75 | totalElapsedMs += elapsedFrameMs; 76 | 77 | if (currentTile % numTiles === 0) { 78 | if (totalElapsedMs) { 79 | updatePixelsPerTile(); 80 | calcTileDimensions(); 81 | } 82 | 83 | totalElapsedMs = 0; 84 | currentTile = 0; 85 | } 86 | 87 | const isLastTile = currentTile === numTiles - 1; 88 | 89 | const x = currentTile % columns; 90 | const y = Math.floor(currentTile / columns) % rows; 91 | 92 | return { 93 | x: x * tileWidth, 94 | y: y * tileHeight, 95 | tileWidth, 96 | tileHeight, 97 | isFirstTile: currentTile === 0, 98 | isLastTile, 99 | }; 100 | } 101 | 102 | return { 103 | nextTile, 104 | reset, 105 | setSize, 106 | }; 107 | } 108 | 109 | function pixelsPerTileEstimate(gl) { 110 | const maxRenderbufferSize = gl.getParameter(gl.MAX_RENDERBUFFER_SIZE); 111 | 112 | if (maxRenderbufferSize <= 8192) { 113 | return 200000; 114 | } else if (maxRenderbufferSize === 16384) { 115 | return 400000; 116 | } else if (maxRenderbufferSize >= 32768) { 117 | return 600000; 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /src/renderer/ToneMapPass.js: -------------------------------------------------------------------------------- 1 | import fragment from './glsl/toneMap.frag'; 2 | import { makeRenderPass } from './RenderPass'; 3 | import * as THREE from 'three'; 4 | 5 | const toneMapFunctions = { 6 | [THREE.LinearToneMapping]: 'linear', 7 | [THREE.ReinhardToneMapping]: 'reinhard', 8 | [THREE.Uncharted2ToneMapping]: 'uncharted2', 9 | [THREE.CineonToneMapping]: 'cineon', 10 | [THREE.ACESFilmicToneMapping]: 'acesFilmic' 11 | }; 12 | 13 | export function makeToneMapPass(gl, params) { 14 | const { 15 | fullscreenQuad, 16 | toneMappingParams 17 | } = params; 18 | 19 | const renderPassConfig = { 20 | gl, 21 | defines: { 22 | TONE_MAPPING: toneMapFunctions[toneMappingParams.toneMapping] || 'linear', 23 | WHITE_POINT: toneMappingParams.whitePoint.toExponential(), // toExponential allows integers to be represented as GLSL floats 24 | EXPOSURE: toneMappingParams.exposure.toExponential() 25 | }, 26 | vertex: fullscreenQuad.vertexShader, 27 | fragment, 28 | }; 29 | 30 | renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = true; 31 | const renderPassUpscale = makeRenderPass(gl, renderPassConfig); 32 | 33 | renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = false; 34 | const renderPassNative = makeRenderPass(gl, renderPassConfig); 35 | 36 | function draw(params) { 37 | const { 38 | light, 39 | lightScale, 40 | position 41 | } = params; 42 | 43 | const renderPass = 44 | lightScale.x !== 1 && lightScale.y !== 1 ? 45 | renderPassUpscale : 46 | renderPassNative; 47 | 48 | renderPass.setUniform('lightScale', lightScale.x, lightScale.y); 49 | renderPass.setTexture('lightTex', light); 50 | renderPass.setTexture('positionTex', position); 51 | 52 | renderPass.useProgram(); 53 | fullscreenQuad.draw(); 54 | } 55 | 56 | return { 57 | draw 58 | }; 59 | } 60 | -------------------------------------------------------------------------------- /src/renderer/UniformBuffer.js: -------------------------------------------------------------------------------- 1 | export function makeUniformBuffer(gl, program, blockName) { 2 | const blockIndex = gl.getUniformBlockIndex(program, blockName); 3 | const blockSize = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_DATA_SIZE); 4 | 5 | const uniforms = getUniformBlockInfo(gl, program, blockIndex); 6 | 7 | const buffer = gl.createBuffer(); 8 | gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); 9 | gl.bufferData(gl.UNIFORM_BUFFER, blockSize, gl.STATIC_DRAW); 10 | 11 | const data = new DataView(new ArrayBuffer(blockSize)); 12 | 13 | function set(name, value) { 14 | if (!uniforms[name]) { 15 | // console.warn('No uniform property with name ', name); 16 | return; 17 | } 18 | 19 | const { type, size, offset, stride } = uniforms[name]; 20 | 21 | switch(type) { 22 | case gl.FLOAT: 23 | setData(data, 'setFloat32', size, offset, stride, 1, value); 24 | break; 25 | case gl.FLOAT_VEC2: 26 | setData(data, 'setFloat32', size, offset, stride, 2, value); 27 | break; 28 | case gl.FLOAT_VEC3: 29 | setData(data, 'setFloat32', size, offset, stride, 3, value); 30 | break; 31 | case gl.FLOAT_VEC4: 32 | setData(data, 'setFloat32', size, offset, stride, 4, value); 33 | break; 34 | case gl.INT: 35 | setData(data, 'setInt32', size, offset, stride, 1, value); 36 | break; 37 | case gl.INT_VEC2: 38 | setData(data, 'setInt32', size, offset, stride, 2, value); 39 | break; 40 | case gl.INT_VEC3: 41 | setData(data, 'setInt32', size, offset, stride, 3, value); 42 | break; 43 | case gl.INT_VEC4: 44 | setData(data, 'setInt32', size, offset, stride, 4, value); 45 | break; 46 | case gl.BOOL: 47 | setData(data, 'setUint32', size, offset, stride, 1, value); 48 | break; 49 | default: 50 | console.warn('UniformBuffer: Unsupported type'); 51 | } 52 | } 53 | 54 | function bind(index) { 55 | gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); 56 | gl.bufferSubData(gl.UNIFORM_BUFFER, 0, data); 57 | gl.bindBufferBase(gl.UNIFORM_BUFFER, index, buffer); 58 | } 59 | 60 | return { 61 | set, 62 | bind 63 | }; 64 | } 65 | 66 | function getUniformBlockInfo(gl, program, blockIndex) { 67 | const indices = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES); 68 | const offset = gl.getActiveUniforms(program, indices, gl.UNIFORM_OFFSET); 69 | const stride = gl.getActiveUniforms(program, indices, gl.UNIFORM_ARRAY_STRIDE); 70 | 71 | const uniforms = {}; 72 | for (let i = 0; i < indices.length; i++) { 73 | const { name, type, size } = gl.getActiveUniform(program, indices[i]); 74 | uniforms[name] = { 75 | type, 76 | size, 77 | offset: offset[i], 78 | stride: stride[i] 79 | }; 80 | } 81 | 82 | return uniforms; 83 | } 84 | 85 | function setData(dataView, setter, size, offset, stride, components, value) { 86 | const l = Math.min(value.length / components, size); 87 | for (let i = 0; i < l; i++) { 88 | for (let k = 0; k < components; k++) { 89 | dataView[setter](offset + i * stride + k * 4, value[components * i + k], true); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/renderer/UniformSetter.js: -------------------------------------------------------------------------------- 1 | import { getUniforms } from './glUtil'; 2 | 3 | let typeMap; 4 | 5 | export function makeUniformSetter(gl, program) { 6 | const uniformInfo = getUniforms(gl, program); 7 | const uniforms = {}; 8 | const needsUpload = []; 9 | 10 | for (let name in uniformInfo) { 11 | const { type, location } = uniformInfo[name]; 12 | 13 | const uniform = { 14 | type, 15 | location, 16 | v0: 0, 17 | v1: 0, 18 | v2: 0, 19 | v3: 0 20 | }; 21 | 22 | uniforms[name] = uniform; 23 | } 24 | 25 | const failedUnis = new Set(); 26 | 27 | function setUniform(name, v0, v1, v2, v3) { 28 | // v0 - v4 are the values to be passed to the uniform 29 | // v0 can either be a number or an array, and v1-v3 are optional 30 | const uni = uniforms[name]; 31 | 32 | if (!uni) { 33 | if (!failedUnis.has(name)) { 34 | console.warn(`Uniform "${name}" does not exist in shader`); 35 | failedUnis.add(name); 36 | } 37 | 38 | return; 39 | } 40 | 41 | uni.v0 = v0; 42 | uni.v1 = v1; 43 | uni.v2 = v2; 44 | uni.v3 = v3; 45 | needsUpload.push(uni); 46 | } 47 | 48 | typeMap = typeMap || initTypeMap(gl); 49 | 50 | function upload() { 51 | while (needsUpload.length > 0) { 52 | 53 | const { type, location, v0, v1, v2, v3 } = needsUpload.pop(); 54 | const glMethod = typeMap[type]; 55 | 56 | if (v0.length) { 57 | if (glMethod.matrix) { 58 | const array = v0; 59 | const transpose = v1 || false; 60 | gl[glMethod.matrix](location, transpose, array); 61 | } else { 62 | gl[glMethod.array](location, v0); 63 | } 64 | } else { 65 | gl[glMethod.values](location, v0, v1, v2, v3); 66 | } 67 | } 68 | } 69 | 70 | return { 71 | setUniform, 72 | upload, 73 | }; 74 | } 75 | 76 | function initTypeMap(gl) { 77 | return { 78 | [gl.FLOAT]: glName(1, 'f'), 79 | [gl.FLOAT_VEC2]: glName(2, 'f'), 80 | [gl.FLOAT_VEC3]: glName(3, 'f'), 81 | [gl.FLOAT_VEC4]: glName(4, 'f'), 82 | [gl.INT]: glName(1, 'i'), 83 | [gl.INT_VEC2]: glName(2, 'i'), 84 | [gl.INT_VEC3]: glName(3, 'i'), 85 | [gl.INT_VEC4]: glName(4, 'i'), 86 | [gl.SAMPLER_2D]: glName(1, 'i'), 87 | [gl.SAMPLER_2D_ARRAY]: glName(1, 'i'), 88 | [gl.FLOAT_MAT2]: glNameMatrix(2, 2), 89 | [gl.FLOAT_MAT3]: glNameMatrix(3, 3), 90 | [gl.FLOAT_MAT4]: glNameMatrix(4, 4) 91 | }; 92 | } 93 | 94 | function glName(numComponents, type) { 95 | return { 96 | values: `uniform${numComponents}${type}`, 97 | array: `uniform${numComponents}${type}v` 98 | }; 99 | } 100 | 101 | function glNameMatrix(rows, columns) { 102 | return { 103 | matrix: rows === columns ? 104 | `uniformMatrix${rows}fv` : 105 | `uniformMatrix${rows}x${columns}fv` 106 | }; 107 | } 108 | -------------------------------------------------------------------------------- /src/renderer/bvhAccel.js: -------------------------------------------------------------------------------- 1 | // Create a bounding volume hierarchy of scene geometry 2 | // Uses the surface area heuristic (SAH) algorithm for efficient partitioning 3 | // http://www.pbr-book.org/3ed-2018/Primitives_and_Intersection_Acceleration/Bounding_Volume_Hierarchies.html 4 | 5 | import { Box3, Vector3 } from 'three'; 6 | import { partition, nthElement } from './bvhUtil'; 7 | 8 | const size = new Vector3(); 9 | 10 | export function bvhAccel(geometry) { 11 | const primitiveInfo = makePrimitiveInfo(geometry); 12 | const node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); 13 | 14 | return node; 15 | } 16 | 17 | export function flattenBvh(bvh) { 18 | const flat = []; 19 | const isBounds = []; 20 | 21 | const splitAxisMap = { 22 | x: 0, 23 | y: 1, 24 | z: 2 25 | }; 26 | 27 | let maxDepth = 1; 28 | const traverse = (node, depth = 1) => { 29 | 30 | maxDepth = Math.max(depth, maxDepth); 31 | 32 | if (node.primitives) { 33 | for (let i = 0; i < node.primitives.length; i++) { 34 | const p = node.primitives[i]; 35 | flat.push( 36 | p.indices[0], p.indices[1], p.indices[2], node.primitives.length, 37 | p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex 38 | ); 39 | isBounds.push(false); 40 | } 41 | } else { 42 | const bounds = node.bounds; 43 | 44 | flat.push( 45 | bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], 46 | bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild 47 | ); 48 | 49 | const i = flat.length - 1; 50 | isBounds.push(true); 51 | 52 | traverse(node.child0, depth + 1); 53 | flat[i] = flat.length / 4; // pointer to second child 54 | traverse(node.child1, depth + 1); 55 | } 56 | }; 57 | 58 | traverse(bvh); 59 | 60 | const buffer = new ArrayBuffer(4 * flat.length); 61 | const floatView = new Float32Array(buffer); 62 | const intView = new Int32Array(buffer); 63 | 64 | for (let i = 0; i < isBounds.length; i++) { 65 | let k = 8 * i; 66 | 67 | if (isBounds[i]) { 68 | floatView[k] = flat[k]; 69 | floatView[k + 1] = flat[k + 1]; 70 | floatView[k + 2] = flat[k + 2]; 71 | intView[k + 3] = flat[k + 3]; 72 | } else { 73 | intView[k] = flat[k]; 74 | intView[k + 1] = flat[k + 1]; 75 | intView[k + 2] = flat[k + 2]; 76 | intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle 77 | } 78 | 79 | floatView[k + 4] = flat[k + 4]; 80 | floatView[k + 5] = flat[k + 5]; 81 | floatView[k + 6] = flat[k + 6]; 82 | intView[k + 7] = flat[k + 7]; 83 | } 84 | 85 | return { 86 | maxDepth, 87 | count: flat.length / 4, 88 | buffer: floatView 89 | }; 90 | } 91 | 92 | function makePrimitiveInfo(geometry) { 93 | const primitiveInfo = []; 94 | const indices = geometry.getIndex().array; 95 | const position = geometry.getAttribute('position'); 96 | const materialMeshIndex = geometry.getAttribute('materialMeshIndex'); 97 | 98 | const v0 = new Vector3(); 99 | const v1 = new Vector3(); 100 | const v2 = new Vector3(); 101 | const e0 = new Vector3(); 102 | const e1 = new Vector3(); 103 | 104 | for (let i = 0; i < indices.length; i += 3) { 105 | const i0 = indices[i]; 106 | const i1 = indices[i + 1]; 107 | const i2 = indices[i + 2]; 108 | 109 | const bounds = new Box3(); 110 | 111 | v0.fromBufferAttribute(position, i0); 112 | v1.fromBufferAttribute(position, i1); 113 | v2.fromBufferAttribute(position, i2); 114 | e0.subVectors(v2, v0); 115 | e1.subVectors(v1, v0); 116 | 117 | bounds.expandByPoint(v0); 118 | bounds.expandByPoint(v1); 119 | bounds.expandByPoint(v2); 120 | 121 | const info = { 122 | bounds: bounds, 123 | center: bounds.getCenter(new Vector3()), 124 | indices: [i0, i1, i2], 125 | faceNormal: new Vector3().crossVectors(e1, e0).normalize(), 126 | materialIndex: materialMeshIndex.getX(i0) 127 | }; 128 | 129 | primitiveInfo.push(info); 130 | } 131 | 132 | return primitiveInfo; 133 | } 134 | 135 | function recursiveBuild(primitiveInfo, start, end) { 136 | const bounds = new Box3(); 137 | for (let i = start; i < end; i++) { 138 | bounds.union(primitiveInfo[i].bounds); 139 | } 140 | 141 | const nPrimitives = end - start; 142 | 143 | if (nPrimitives === 1) { 144 | return makeLeafNode(primitiveInfo.slice(start, end), bounds); 145 | } else { 146 | const centroidBounds = new Box3(); 147 | for (let i = start; i < end; i++) { 148 | centroidBounds.expandByPoint(primitiveInfo[i].center); 149 | } 150 | const dim = maximumExtent(centroidBounds); 151 | 152 | 153 | let mid = Math.floor((start + end) / 2); 154 | 155 | // middle split method 156 | // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; 157 | // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); 158 | 159 | // if (mid === start || mid === end) { 160 | // mid = Math.floor((start + end) / 2); 161 | // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); 162 | // } 163 | 164 | // surface area heuristic method 165 | if (nPrimitives <= 4) { 166 | nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); 167 | } else if (centroidBounds.max[dim] === centroidBounds.min[dim]) { 168 | // can't split primitives based on centroid bounds. terminate. 169 | return makeLeafNode(primitiveInfo.slice(start, end), bounds); 170 | } else { 171 | 172 | const buckets = []; 173 | for (let i = 0; i < 12; i++) { 174 | buckets.push({ 175 | bounds: new Box3(), 176 | count: 0, 177 | }); 178 | } 179 | 180 | for (let i = start; i < end; i++) { 181 | let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[i].center)); 182 | if (b === buckets.length) { 183 | b = buckets.length - 1; 184 | } 185 | buckets[b].count++; 186 | buckets[b].bounds.union(primitiveInfo[i].bounds); 187 | } 188 | 189 | const cost = []; 190 | 191 | for (let i = 0; i < buckets.length - 1; i++) { 192 | const b0 = new Box3(); 193 | const b1 = new Box3(); 194 | let count0 = 0; 195 | let count1 = 0; 196 | for (let j = 0; j <= i; j++) { 197 | b0.union(buckets[j].bounds); 198 | count0 += buckets[j].count; 199 | } 200 | for (let j = i + 1; j < buckets.length; j++) { 201 | b1.union(buckets[j].bounds); 202 | count1 += buckets[j].count; 203 | } 204 | cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); 205 | } 206 | 207 | let minCost = cost[0]; 208 | let minCostSplitBucket = 0; 209 | for (let i = 1; i < cost.length; i++) { 210 | if (cost[i] < minCost) { 211 | minCost = cost[i]; 212 | minCostSplitBucket = i; 213 | } 214 | } 215 | 216 | mid = partition(primitiveInfo, p => { 217 | let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); 218 | if (b === buckets.length) { 219 | b = buckets.length - 1; 220 | } 221 | return b <= minCostSplitBucket; 222 | }, start, end); 223 | } 224 | 225 | return makeInteriorNode( 226 | dim, 227 | recursiveBuild(primitiveInfo, start, mid), 228 | recursiveBuild(primitiveInfo, mid, end), 229 | ); 230 | } 231 | } 232 | 233 | function makeLeafNode(primitives, bounds) { 234 | return { 235 | primitives, 236 | bounds 237 | }; 238 | } 239 | 240 | function makeInteriorNode(splitAxis, child0, child1) { 241 | return { 242 | child0, 243 | child1, 244 | bounds: new Box3().union(child0.bounds).union(child1.bounds), 245 | splitAxis, 246 | }; 247 | } 248 | 249 | function maximumExtent(box3) { 250 | box3.getSize(size); 251 | if (size.x > size.z) { 252 | return size.x > size.y ? 'x' : 'y'; 253 | } else { 254 | return size.z > size.y ? 'z' : 'y'; 255 | } 256 | } 257 | 258 | function boxOffset(box3, dim, v) { 259 | let offset = v[dim] - box3.min[dim]; 260 | 261 | if (box3.max[dim] > box3.min[dim]){ 262 | offset /= box3.max[dim] - box3.min[dim]; 263 | } 264 | 265 | return offset; 266 | } 267 | 268 | function surfaceArea(box3) { 269 | box3.getSize(size); 270 | return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); 271 | } 272 | -------------------------------------------------------------------------------- /src/renderer/bvhUtil.js: -------------------------------------------------------------------------------- 1 | // Reorders the elements in the range [first, last) in such a way that 2 | // all elements for which the comparator c returns true 3 | // precede the elements for which comparator c returns false. 4 | export function partition(array, compare, left = 0, right = array.length) { 5 | while (left !== right) { 6 | while (compare(array[left])) { 7 | left++; 8 | if (left === right) { 9 | return left; 10 | } 11 | } 12 | do { 13 | right--; 14 | if (left === right) { 15 | return left; 16 | } 17 | } while (!compare(array[right])); 18 | 19 | swap(array, left, right); 20 | left++; 21 | } 22 | 23 | return left; 24 | } 25 | 26 | // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: 27 | // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. 28 | // All of the elements before this new nth element compare to true with elements after the nth element 29 | export function nthElement(array, compare, left = 0, right = array.length, k = Math.floor((left + right) / 2)) { 30 | for (let i = left; i <= k; i++) { 31 | let minIndex = i; 32 | let minValue = array[i]; 33 | for (let j = i + 1; j < right; j++) { 34 | if (!compare(minValue, array[j])) { 35 | minIndex = j; 36 | minValue = array[j]; 37 | swap(array, i, minIndex); 38 | } 39 | } 40 | } 41 | } 42 | 43 | function swap(array, a, b) { 44 | const x = array[b]; 45 | array[b] = array[a]; 46 | array[a] = x; 47 | } 48 | -------------------------------------------------------------------------------- /src/renderer/decomposeScene.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | export function decomposeScene(scene) { 4 | const meshes = []; 5 | const directionalLights = []; 6 | const ambientLights = []; 7 | const environmentLights = []; 8 | 9 | scene.traverse(child => { 10 | if (child.isMesh) { 11 | if (!child.geometry) { 12 | console.warn(child, 'must have a geometry property'); 13 | } 14 | else if (!(child.material.isMeshStandardMaterial)) { 15 | console.warn(child, 'must use MeshStandardMaterial in order to be rendered.'); 16 | } else { 17 | meshes.push(child); 18 | } 19 | } 20 | else if (child.isDirectionalLight) { 21 | directionalLights.push(child); 22 | } 23 | else if (child.isAmbientLight) { 24 | ambientLights.push(child); 25 | } 26 | else if (child.isEnvironmentLight) { 27 | if (environmentLights.length > 1) { 28 | console.warn(environmentLights, 'only one environment light can be used per scene'); 29 | } 30 | // Valid lights have HDR texture map in RGBEEncoding 31 | if (isHDRTexture(child)) { 32 | environmentLights.push(child); 33 | } else { 34 | console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); 35 | } 36 | } 37 | }); 38 | 39 | const background = scene.background; 40 | 41 | return { 42 | background, meshes, directionalLights, ambientLights, environmentLights 43 | }; 44 | } 45 | 46 | function isHDRTexture(texture) { 47 | return texture.map 48 | && texture.map.image 49 | && (texture.map.encoding === THREE.RGBEEncoding || texture.map.encoding === THREE.LinearEncoding); 50 | } 51 | -------------------------------------------------------------------------------- /src/renderer/envMapCreation.js: -------------------------------------------------------------------------------- 1 | // Convert image data from the RGBE format to a 32-bit floating point format 2 | // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format 3 | 4 | import { rgbeToFloat } from './rgbeToFloat'; 5 | import { clamp } from './util'; 6 | import * as THREE from 'three'; 7 | 8 | const DEFAULT_MAP_RESOLUTION = { 9 | width: 2048, 10 | height: 1024, 11 | }; 12 | 13 | // Tools for generating and modify env maps for lighting from scene component data 14 | 15 | export function generateBackgroundMapFromSceneBackground(background) { 16 | let backgroundImage; 17 | 18 | if (background.isColor) { 19 | backgroundImage = generateSolidMap(1, 1, background); 20 | } else if (background.encoding === THREE.RGBEEncoding) { 21 | backgroundImage = { 22 | width: background.image.width, 23 | height: background.image.height, 24 | data: background.image.data, 25 | }; 26 | backgroundImage.data = rgbeToFloat(backgroundImage.data); 27 | } 28 | return backgroundImage; 29 | } 30 | 31 | export function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { 32 | let envImage = initializeEnvMap(environmentLights); 33 | ambientLights.forEach( light => { addAmbientLightToEnvMap(light, envImage); }); 34 | directionalLights.forEach( light => { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); 35 | 36 | return envImage; 37 | } 38 | 39 | export function initializeEnvMap(environmentLights) { 40 | let envImage; 41 | 42 | // Initialize map from environment light if present 43 | if (environmentLights.length > 0) { 44 | // TODO: support multiple environment lights (what if they have different resolutions?) 45 | const environmentLight = environmentLights[0]; 46 | envImage = { 47 | width: environmentLight.map.image.width, 48 | height: environmentLight.map.image.height, 49 | data: environmentLight.map.image.data, 50 | }; 51 | envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); 52 | } else { 53 | // initialize blank map 54 | envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); 55 | } 56 | 57 | return envImage; 58 | } 59 | 60 | export function generateSolidMap(width, height, color, intensity) { 61 | const texels = width * height; 62 | const floatBuffer = new Float32Array(texels * 3); 63 | if (color && color.isColor) { 64 | setBufferToColor(floatBuffer, color, intensity); 65 | } 66 | return { 67 | width: width, 68 | height: height, 69 | data: floatBuffer, 70 | }; 71 | } 72 | 73 | function setBufferToColor(buffer, color, intensity = 1) { 74 | buffer.forEach(function(part, index) { 75 | const component = index % 3; 76 | if (component === 0) { 77 | buffer[index] = color.r * intensity; 78 | } 79 | else if (component === 1) { 80 | buffer[index] = color.g * intensity; 81 | } 82 | else if (component === 2) { 83 | buffer[index] = color.b * intensity; 84 | } 85 | }); 86 | return buffer; 87 | } 88 | 89 | export function addAmbientLightToEnvMap(light, image) { 90 | const color = light.color; 91 | image.data.forEach(function(part, index) { 92 | const component = index % 3; 93 | if (component === 0) { 94 | image.data[index] += color.r * light.intensity; 95 | } 96 | else if (component === 1) { 97 | image.data[index] += color.g * light.intensity; 98 | } 99 | else if (component === 2) { 100 | image.data[index] += color.b * light.intensity; 101 | } 102 | }); 103 | } 104 | 105 | export function addDirectionalLightToEnvMap(light, image) { 106 | const sphericalCoords = new THREE.Spherical(); 107 | const lightDirection = light.position.clone().sub(light.target.position); 108 | 109 | sphericalCoords.setFromVector3(lightDirection); 110 | sphericalCoords.theta = (Math.PI * 3 / 2) - sphericalCoords.theta; 111 | sphericalCoords.makeSafe(); 112 | 113 | return addLightAtCoordinates(light, image, sphericalCoords); 114 | } 115 | 116 | // Perform modifications on env map to match input scene 117 | function addLightAtCoordinates(light, image, originCoords) { 118 | const floatBuffer = image.data; 119 | const width = image.width; 120 | const height = image.height; 121 | const xTexels = floatBuffer.length / (3 * height); 122 | const yTexels = floatBuffer.length / (3 * width); 123 | 124 | // default softness for standard directional lights is 0.01, i.e. a hard shadow 125 | const softness = light.softness || 0.01; 126 | 127 | // angle from center of light at which no more contributions are projected 128 | const threshold = findThreshold(softness); 129 | 130 | // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it 131 | const useThreshold = threshold < Math.PI / 5; 132 | 133 | // functional trick to keep the conditional check out of the main loop 134 | const intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; 135 | 136 | let begunAddingContributions = false; 137 | let currentCoords = new THREE.Spherical(); 138 | 139 | // Iterates over each row from top to bottom 140 | for (let i = 0; i < xTexels; i++) { 141 | 142 | let encounteredInThisRow = false; 143 | 144 | // Iterates over each texel in row 145 | for (let j = 0; j < yTexels; j++) { 146 | const bufferIndex = j * width + i; 147 | currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); 148 | const falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); 149 | 150 | if(falloff > 0) { 151 | encounteredInThisRow = true; 152 | begunAddingContributions = true; 153 | } 154 | 155 | const intensity = light.intensity * falloff; 156 | 157 | floatBuffer[bufferIndex * 3] += intensity * light.color.r; 158 | floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; 159 | floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; 160 | } 161 | 162 | // First row to not add a contribution since adding began 163 | // This means the entire light has been added and we can exit early 164 | if(!encounteredInThisRow && begunAddingContributions) { 165 | return floatBuffer; 166 | } 167 | } 168 | 169 | return floatBuffer; 170 | } 171 | 172 | function findThreshold(softness) { 173 | const step = Math.PI / 128; 174 | const maxSteps = (2.0 * Math.PI) / step; 175 | 176 | for (let i = 0; i < maxSteps; i++) { 177 | const angle = i * step; 178 | const falloff = getFalloffAtAngle(angle, softness); 179 | if (falloff <= 0.0001) { 180 | return angle; 181 | } 182 | } 183 | } 184 | 185 | function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { 186 | const deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); 187 | const deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); 188 | 189 | if(deltaTheta > threshold && deltaPhi > threshold) { 190 | return 0; 191 | } 192 | 193 | const angle = angleBetweenSphericals(originCoords, currentCoords); 194 | return getFalloffAtAngle(angle, softness); 195 | } 196 | 197 | function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { 198 | const angle = angleBetweenSphericals(originCoords, currentCoords); 199 | return getFalloffAtAngle(angle, softness); 200 | } 201 | 202 | export function getAngleDelta(angleA, angleB) { 203 | const diff = Math.abs(angleA - angleB) % (2 * Math.PI); 204 | return diff > Math.PI ? (2 * Math.PI - diff) : diff; 205 | } 206 | 207 | const angleBetweenSphericals = function() { 208 | const originVector = new THREE.Vector3(); 209 | const currentVector = new THREE.Vector3(); 210 | 211 | return (originCoords, currentCoords) => { 212 | originVector.setFromSpherical(originCoords); 213 | currentVector.setFromSpherical(currentCoords); 214 | return originVector.angleTo(currentVector); 215 | }; 216 | }(); 217 | 218 | // TODO: possibly clean this up and optimize it 219 | // 220 | // This function was arrived at through experimentation, it provides good 221 | // looking results with percieved softness that scale relatively linearly with 222 | // the softness value in the 0 - 1 range 223 | // 224 | // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) 225 | // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times 226 | function getFalloffAtAngle(angle, softness) { 227 | const softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); 228 | const falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); 229 | return falloff; 230 | } 231 | 232 | export function equirectangularToSpherical(x, y, width, height, target) { 233 | target.phi = (Math.PI * y) / height; 234 | target.theta = (2.0 * Math.PI * x) / width; 235 | return target; 236 | } 237 | -------------------------------------------------------------------------------- /src/renderer/envMapDistribution.js: -------------------------------------------------------------------------------- 1 | // Create a piecewise 2D cumulative distribution function of light intensity from an env map 2 | // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions 3 | 4 | export function envMapDistribution(image) { 5 | const data = image.data; 6 | 7 | const cdfImage = { 8 | width: image.width + 2, 9 | height: image.height + 1 10 | }; 11 | 12 | const cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); 13 | 14 | for (let y = 0; y < image.height; y++) { 15 | const sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); 16 | for (let x = 0; x < image.width; x++) { 17 | const i = 3 * (y * image.width + x); 18 | let r = data[i]; 19 | let g = data[i + 1]; 20 | let b = data[i + 2]; 21 | let luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; 22 | luminance *= sinTheta; 23 | cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); 24 | cdf.set(x + 1, y, 1, luminance); 25 | } 26 | 27 | const rowIntegral = cdf.get(cdfImage.width - 1, y, 0); 28 | 29 | for (let x = 1; x < cdf.width; x++) { 30 | cdf.set(x, y, 0, cdf.get(x, y, 0) / rowIntegral); 31 | cdf.set(x, y, 1, cdf.get(x, y, 1) / rowIntegral); 32 | } 33 | 34 | cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); 35 | cdf.set(0, y, 1, rowIntegral); 36 | } 37 | 38 | const integral = cdf.get(0, cdf.height - 1, 0); 39 | 40 | for (let y = 0; y < cdf.height; y++) { 41 | cdf.set(0, y, 0, cdf.get(0, y, 0) / integral); 42 | cdf.set(0, y, 1, cdf.get(0, y, 1) / integral); 43 | } 44 | cdfImage.data = cdf.array; 45 | 46 | return cdfImage; 47 | } 48 | 49 | 50 | function makeTextureArray(width, height, channels) { 51 | const array = new Float32Array(channels * width * height); 52 | 53 | return { 54 | set(x, y, channel, val) { 55 | array[channels * (y * width + x) + channel] = val; 56 | }, 57 | get(x, y, channel) { 58 | return array[channels * (y * width + x) + channel]; 59 | }, 60 | width, 61 | height, 62 | channels, 63 | array 64 | }; 65 | } 66 | -------------------------------------------------------------------------------- /src/renderer/glUtil.js: -------------------------------------------------------------------------------- 1 | export function loadExtensions(gl, extensions) { 2 | const supported = {}; 3 | for (const name of extensions) { 4 | supported[name] = gl.getExtension(name); 5 | } 6 | return supported; 7 | } 8 | 9 | export function compileShader(gl, type, source) { 10 | const shader = gl.createShader(type); 11 | gl.shaderSource(shader, source); 12 | gl.compileShader(shader); 13 | const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS); 14 | 15 | if (success) { 16 | return shader; 17 | } 18 | 19 | const output = source.split('\n').map((x, i) => `${i + 1}: ${x}`).join('\n'); 20 | console.log(output); 21 | 22 | throw gl.getShaderInfoLog(shader); 23 | } 24 | 25 | export function createProgram(gl, vertexShader, fragmentShader, transformVaryings, transformBufferMode) { 26 | const program = gl.createProgram(); 27 | gl.attachShader(program, vertexShader); 28 | gl.attachShader(program, fragmentShader); 29 | 30 | if (transformVaryings) { 31 | gl.transformFeedbackVaryings(program, transformVaryings, transformBufferMode); 32 | } 33 | 34 | gl.linkProgram(program); 35 | 36 | gl.detachShader(program, vertexShader); 37 | gl.detachShader(program, fragmentShader); 38 | 39 | const success = gl.getProgramParameter(program, gl.LINK_STATUS); 40 | 41 | if (success) { 42 | return program; 43 | } 44 | 45 | throw gl.getProgramInfoLog(program); 46 | } 47 | 48 | export function getUniforms(gl, program) { 49 | const uniforms = {}; 50 | 51 | const count = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); 52 | for (let i = 0; i < count; i++) { 53 | const { name, type } = gl.getActiveUniform(program, i); 54 | const location = gl.getUniformLocation(program, name); 55 | if (location) { 56 | uniforms[name] = { 57 | type, location 58 | }; 59 | } 60 | } 61 | 62 | return uniforms; 63 | } 64 | 65 | export function getAttributes(gl, program) { 66 | const attributes = {}; 67 | 68 | const count = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES); 69 | for (let i = 0; i < count; i++) { 70 | const { name } = gl.getActiveAttrib(program, i); 71 | if (name) { 72 | attributes[name] = gl.getAttribLocation(program, name); 73 | } 74 | } 75 | 76 | return attributes; 77 | } 78 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/bsdf.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | // Computes the exact value of the Fresnel factor 4 | // https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/ 5 | float fresnel(float cosTheta, float eta, float invEta) { 6 | eta = cosTheta > 0.0 ? eta : invEta; 7 | cosTheta = abs(cosTheta); 8 | 9 | float gSquared = eta * eta + cosTheta * cosTheta - 1.0; 10 | 11 | if (gSquared < 0.0) { 12 | return 1.0; 13 | } 14 | 15 | float g = sqrt(gSquared); 16 | 17 | float a = (g - cosTheta) / (g + cosTheta); 18 | float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0); 19 | 20 | return 0.5 * a * a * (1.0 + b * b); 21 | } 22 | 23 | float fresnelSchlickWeight(float cosTheta) { 24 | float w = 1.0 - cosTheta; 25 | return (w * w) * (w * w) * w; 26 | } 27 | 28 | // Computes Schlick's approximation of the Fresnel factor 29 | // Assumes ray is moving from a less dense to a more dense medium 30 | float fresnelSchlick(float cosTheta, float r0) { 31 | return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); 32 | } 33 | 34 | // Computes Schlick's approximation of Fresnel factor 35 | // Accounts for total internal reflection if ray is moving from a more dense to a less dense medium 36 | float fresnelSchlickTIR(float cosTheta, float r0, float ni) { 37 | 38 | // moving from a more dense to a less dense medium 39 | if (cosTheta < 0.0) { 40 | float inv_eta = ni; 41 | float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta); 42 | if (SinT2 > 1.0) { 43 | return 1.0; // total internal reflection 44 | } 45 | cosTheta = sqrt(1.0f - SinT2); 46 | } 47 | 48 | return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); 49 | } 50 | 51 | float trowbridgeReitzD(float cosTheta, float alpha2) { 52 | float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0; 53 | return alpha2 / (PI * e * e); 54 | } 55 | 56 | float trowbridgeReitzLambda(float cosTheta, float alpha2) { 57 | float cos2Theta = cosTheta * cosTheta; 58 | float tan2Theta = (1.0 - cos2Theta) / cos2Theta; 59 | return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta)); 60 | } 61 | 62 | // An implementation of Disney's principled BRDF 63 | // https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf 64 | vec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) { 65 | vec3 halfVector = normalize(viewDir + lightDir); 66 | 67 | cosThetaL = abs(cosThetaL); 68 | float cosThetaV = abs(dot(si.normal, viewDir)); 69 | float cosThetaH = abs(dot(si.normal, halfVector)); 70 | float cosThetaD = abs(dot(lightDir, halfVector)); 71 | 72 | float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness); 73 | 74 | float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness)); 75 | float D = trowbridgeReitzD(cosThetaH, alpha2); 76 | 77 | float roughnessRemapped = 0.5 + 0.5 * si.roughness; 78 | float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped); 79 | 80 | float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped)); 81 | 82 | float specular = F * D * G / (4.0 * cosThetaV * cosThetaL); 83 | float specularPdf = D * cosThetaH / (4.0 * cosThetaD); 84 | 85 | float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness; 86 | float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV)); 87 | float diffusePdf = cosThetaL * INVPI; 88 | 89 | pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness); 90 | 91 | return mix(si.color * diffuse + specular, si.color * specular, si.metalness); 92 | } 93 | 94 | `; 95 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/constants.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | #define PI 3.14159265359 3 | #define TWOPI 6.28318530718 4 | #define INVPI 0.31830988618 5 | #define INVPI2 0.10132118364 6 | #define EPS 0.0005 7 | #define INF 1.0e999 8 | 9 | #define ROUGHNESS_MIN 0.03 10 | ` 11 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/envMap.glsl: -------------------------------------------------------------------------------- 1 | // Sample the environment map using a cumulative distribution function as described in 2 | // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights 3 | 4 | export default ` 5 | 6 | uniform sampler2D envMap; 7 | uniform sampler2D envMapDistribution; 8 | uniform sampler2D backgroundMap; 9 | 10 | vec2 cartesianToEquirect(vec3 pointOnSphere) { 11 | float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI); 12 | float theta = acos(pointOnSphere.y); 13 | return vec2(phi * 0.5 * INVPI, theta * INVPI); 14 | } 15 | 16 | float getEnvmapV(float u, out int vOffset, out float pdf) { 17 | ivec2 size = textureSize(envMap, 0); 18 | 19 | int left = 0; 20 | int right = size.y + 1; // cdf length is the length of the env map + 1 21 | while (left < right) { 22 | int mid = (left + right) >> 1; 23 | float s = texelFetch(envMapDistribution, ivec2(0, mid), 0).x; 24 | if (s <= u) { 25 | left = mid + 1; 26 | } else { 27 | right = mid; 28 | } 29 | } 30 | vOffset = left - 1; 31 | 32 | // x channel is cumulative distribution of env map luminance 33 | // y channel is partial probability density of env map luminance 34 | vec2 s0 = texelFetch(envMapDistribution, ivec2(0, vOffset), 0).xy; 35 | vec2 s1 = texelFetch(envMapDistribution, ivec2(0, vOffset + 1), 0).xy; 36 | 37 | pdf = s0.y; 38 | 39 | return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y); 40 | } 41 | 42 | float getEnvmapU(float u, int vOffset, out float pdf) { 43 | ivec2 size = textureSize(envMap, 0); 44 | 45 | int left = 0; 46 | int right = size.x + 1; // cdf length is the length of the env map + 1 47 | while (left < right) { 48 | int mid = (left + right) >> 1; 49 | float s = texelFetch(envMapDistribution, ivec2(1 + mid, vOffset), 0).x; 50 | if (s <= u) { 51 | left = mid + 1; 52 | } else { 53 | right = mid; 54 | } 55 | } 56 | int uOffset = left - 1; 57 | 58 | // x channel is cumulative distribution of env map luminance 59 | // y channel is partial probability density of env map luminance 60 | vec2 s0 = texelFetch(envMapDistribution, ivec2(1 + uOffset, vOffset), 0).xy; 61 | vec2 s1 = texelFetch(envMapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy; 62 | 63 | pdf = s0.y; 64 | 65 | return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x); 66 | } 67 | 68 | // Perform two binary searches to find light direction. 69 | vec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) { 70 | vec2 partialPdf; 71 | int vOffset; 72 | 73 | uv.y = getEnvmapV(random.x, vOffset, partialPdf.y); 74 | uv.x = getEnvmapU(random.y, vOffset, partialPdf.x); 75 | 76 | float phi = uv.x * TWOPI; 77 | float theta = uv.y * PI; 78 | float cosTheta = cos(theta); 79 | float sinTheta = sin(theta); 80 | float cosPhi = cos(phi); 81 | float sinPhi = sin(phi); 82 | 83 | vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi); 84 | 85 | pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta); 86 | 87 | return dir; 88 | } 89 | 90 | float envMapPdf(vec2 uv) { 91 | vec2 size = vec2(textureSize(envMap, 0)); 92 | 93 | float sinTheta = sin(uv.y * PI); 94 | 95 | uv *= size; 96 | 97 | float partialX = texelFetch(envMapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y; 98 | float partialY = texelFetch(envMapDistribution, ivec2(0, uv.y), 0).y; 99 | 100 | return partialX * partialY * INVPI2 / (2.0 * sinTheta); 101 | } 102 | 103 | vec3 sampleEnvmapFromDirection(vec3 d) { 104 | vec2 uv = cartesianToEquirect(d); 105 | return textureLinear(envMap, uv).rgb; 106 | } 107 | 108 | vec3 sampleBackgroundFromDirection(vec3 d) { 109 | vec2 uv = cartesianToEquirect(d); 110 | return textureLinear(backgroundMap, uv).rgb; 111 | } 112 | 113 | `; 114 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/intersect.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | uniform sampler2D positionBuffer; 4 | uniform sampler2D normalBuffer; 5 | uniform sampler2D uvBuffer; 6 | uniform sampler2D bvhBuffer; 7 | 8 | struct Triangle { 9 | vec3 p0; 10 | vec3 p1; 11 | vec3 p2; 12 | }; 13 | 14 | void surfaceInteractionFromBVH(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) { 15 | si.hit = true; 16 | si.faceNormal = faceNormal; 17 | si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2; 18 | ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS); 19 | ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS); 20 | ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS); 21 | 22 | vec3 n0 = texelFetch(normalBuffer, i0, 0).xyz; 23 | vec3 n1 = texelFetch(normalBuffer, i1, 0).xyz; 24 | vec3 n2 = texelFetch(normalBuffer, i2, 0).xyz; 25 | vec3 normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2); 26 | 27 | #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) 28 | vec2 uv0 = texelFetch(uvBuffer, i0, 0).xy; 29 | vec2 uv1 = texelFetch(uvBuffer, i1, 0).xy; 30 | vec2 uv2 = texelFetch(uvBuffer, i2, 0).xy; 31 | vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2); 32 | #else 33 | vec2 uv = vec2(0.0); 34 | #endif 35 | 36 | si.materialType = int(getMatType(materialIndex)); 37 | si.color = getMatColor(materialIndex, uv); 38 | si.roughness = getMatRoughness(materialIndex, uv); 39 | si.metalness = getMatMetalness(materialIndex, uv); 40 | 41 | #ifdef NUM_NORMAL_MAPS 42 | vec3 dp1 = tri.p0 - tri.p2; 43 | vec3 dp2 = tri.p1 - tri.p2; 44 | vec2 duv1 = uv0 - uv2; 45 | vec2 duv2 = uv1 - uv2; 46 | si.normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2); 47 | #else 48 | si.normal = normal; 49 | #endif 50 | } 51 | 52 | struct TriangleIntersect { 53 | float t; 54 | vec3 barycentric; 55 | }; 56 | 57 | // Triangle-ray intersection 58 | // Faster than the classic Möller–Trumbore intersection algorithm 59 | // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection 60 | TriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) { 61 | TriangleIntersect ti; 62 | vec3 d = r.d; 63 | 64 | // translate vertices based on ray origin 65 | vec3 p0t = tri.p0 - r.o; 66 | vec3 p1t = tri.p1 - r.o; 67 | vec3 p2t = tri.p2 - r.o; 68 | 69 | // permute components of triangle vertices 70 | if (maxDim == 0) { 71 | p0t = p0t.yzx; 72 | p1t = p1t.yzx; 73 | p2t = p2t.yzx; 74 | } else if (maxDim == 1) { 75 | p0t = p0t.zxy; 76 | p1t = p1t.zxy; 77 | p2t = p2t.zxy; 78 | } 79 | 80 | // apply shear transformation to translated vertex positions 81 | p0t.xy += shear.xy * p0t.z; 82 | p1t.xy += shear.xy * p1t.z; 83 | p2t.xy += shear.xy * p2t.z; 84 | 85 | // compute edge function coefficients 86 | vec3 e = vec3( 87 | p1t.x * p2t.y - p1t.y * p2t.x, 88 | p2t.x * p0t.y - p2t.y * p0t.x, 89 | p0t.x * p1t.y - p0t.y * p1t.x 90 | ); 91 | 92 | // check if intersection is inside triangle 93 | if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) { 94 | return ti; 95 | } 96 | 97 | float det = e.x + e.y + e.z; 98 | 99 | // not needed? 100 | // if (det == 0.) { 101 | // return ti; 102 | // } 103 | 104 | p0t.z *= shear.z; 105 | p1t.z *= shear.z; 106 | p2t.z *= shear.z; 107 | float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z); 108 | 109 | // not needed? 110 | // if (sign(det) != sign(tScaled)) { 111 | // return ti; 112 | // } 113 | 114 | // check if closer intersection already exists 115 | if (abs(tScaled) > abs(r.tMax * det)) { 116 | return ti; 117 | } 118 | 119 | float invDet = 1. / det; 120 | ti.t = tScaled * invDet; 121 | ti.barycentric = e * invDet; 122 | 123 | return ti; 124 | } 125 | 126 | struct Box { 127 | vec3 min; 128 | vec3 max; 129 | }; 130 | 131 | // Branchless ray/box intersection 132 | // https://tavianator.com/fast-branchless-raybounding-box-intersections/ 133 | float intersectBox(Ray r, Box b) { 134 | vec3 tBot = (b.min - r.o) * r.invD; 135 | vec3 tTop = (b.max - r.o) * r.invD; 136 | vec3 tNear = min(tBot, tTop); 137 | vec3 tFar = max(tBot, tTop); 138 | float t0 = max(tNear.x, max(tNear.y, tNear.z)); 139 | float t1 = min(tFar.x, min(tFar.y, tFar.z)); 140 | 141 | return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1); 142 | } 143 | 144 | int maxDimension(vec3 v) { 145 | return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2); 146 | } 147 | 148 | // Traverse BVH, find closest triangle intersection, and return surface information 149 | void intersectScene(inout Ray ray, inout SurfaceInteraction si) { 150 | si.hit = false; 151 | 152 | int maxDim = maxDimension(abs(ray.d)); 153 | 154 | // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. 155 | // Then create a shear transformation that aligns ray direction with the +z axis 156 | vec3 shear; 157 | if (maxDim == 0) { 158 | shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; 159 | } else if (maxDim == 1) { 160 | shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; 161 | } else { 162 | shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; 163 | } 164 | 165 | int nodesToVisit[STACK_SIZE]; 166 | int stack = 0; 167 | 168 | nodesToVisit[0] = 0; 169 | 170 | while(stack >= 0) { 171 | int i = nodesToVisit[stack--]; 172 | 173 | vec4 r1 = fetchData(bvhBuffer, i, BVH_COLUMNS); 174 | vec4 r2 = fetchData(bvhBuffer, i + 1, BVH_COLUMNS); 175 | 176 | int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); 177 | 178 | if (splitAxisOrNumPrimitives >= 0) { 179 | // Intersection is a bounding box. Test for box intersection and keep traversing BVH 180 | int splitAxis = splitAxisOrNumPrimitives; 181 | 182 | Box bbox = Box(r1.xyz, r2.xyz); 183 | 184 | if (intersectBox(ray, bbox) > 0.0) { 185 | // traverse near node to ray first, and far node to ray last 186 | if (ray.d[splitAxis] > 0.0) { 187 | nodesToVisit[++stack] = floatBitsToInt(r2.w); 188 | nodesToVisit[++stack] = i + 2; 189 | } else { 190 | nodesToVisit[++stack] = i + 2; 191 | nodesToVisit[++stack] = floatBitsToInt(r2.w); 192 | } 193 | } 194 | } else { 195 | ivec3 index = floatBitsToInt(r1.xyz); 196 | Triangle tri = Triangle( 197 | fetchData(positionBuffer, index.x, VERTEX_COLUMNS).xyz, 198 | fetchData(positionBuffer, index.y, VERTEX_COLUMNS).xyz, 199 | fetchData(positionBuffer, index.z, VERTEX_COLUMNS).xyz 200 | ); 201 | TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear); 202 | 203 | if (hit.t > 0.0) { 204 | ray.tMax = hit.t; 205 | int materialIndex = floatBitsToInt(r2.w); 206 | vec3 faceNormal = r2.xyz; 207 | surfaceInteractionFromBVH(si, tri, hit.barycentric, index, faceNormal, materialIndex); 208 | } 209 | } 210 | } 211 | 212 | // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices. 213 | si.roughness = clamp(si.roughness, ROUGHNESS_MIN, 1.0); 214 | si.metalness = clamp(si.metalness, 0.0, 1.0); 215 | } 216 | 217 | bool intersectSceneShadow(inout Ray ray) { 218 | int maxDim = maxDimension(abs(ray.d)); 219 | 220 | // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. 221 | // Then create a shear transformation that aligns ray direction with the +z axis 222 | vec3 shear; 223 | if (maxDim == 0) { 224 | shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; 225 | } else if (maxDim == 1) { 226 | shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; 227 | } else { 228 | shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; 229 | } 230 | 231 | int nodesToVisit[STACK_SIZE]; 232 | int stack = 0; 233 | 234 | nodesToVisit[0] = 0; 235 | 236 | while(stack >= 0) { 237 | int i = nodesToVisit[stack--]; 238 | 239 | vec4 r1 = fetchData(bvhBuffer, i, BVH_COLUMNS); 240 | vec4 r2 = fetchData(bvhBuffer, i + 1, BVH_COLUMNS); 241 | 242 | int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); 243 | 244 | if (splitAxisOrNumPrimitives >= 0) { 245 | int splitAxis = splitAxisOrNumPrimitives; 246 | 247 | Box bbox = Box(r1.xyz, r2.xyz); 248 | 249 | if (intersectBox(ray, bbox) > 0.0) { 250 | if (ray.d[splitAxis] > 0.0) { 251 | nodesToVisit[++stack] = floatBitsToInt(r2.w); 252 | nodesToVisit[++stack] = i + 2; 253 | } else { 254 | nodesToVisit[++stack] = i + 2; 255 | nodesToVisit[++stack] = floatBitsToInt(r2.w); 256 | } 257 | } 258 | } else { 259 | ivec3 index = floatBitsToInt(r1.xyz); 260 | Triangle tri = Triangle( 261 | fetchData(positionBuffer, index.x, VERTEX_COLUMNS).xyz, 262 | fetchData(positionBuffer, index.y, VERTEX_COLUMNS).xyz, 263 | fetchData(positionBuffer, index.z, VERTEX_COLUMNS).xyz 264 | ); 265 | 266 | if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) { 267 | return true; 268 | } 269 | } 270 | } 271 | 272 | return false; 273 | } 274 | 275 | `; 276 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/materialBuffer.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | uniform Materials { 4 | vec4 colorAndMaterialType[NUM_MATERIALS]; 5 | vec4 roughnessMetalnessNormalScale[NUM_MATERIALS]; 6 | 7 | #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) 8 | ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS]; 9 | #endif 10 | 11 | #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) 12 | vec4 diffuseNormalMapSize[NUM_DIFFUSE_NORMAL_MAPS]; 13 | #endif 14 | 15 | #if defined(NUM_PBR_MAPS) 16 | vec2 pbrMapSize[NUM_PBR_MAPS]; 17 | #endif 18 | } materials; 19 | 20 | #ifdef NUM_DIFFUSE_MAPS 21 | uniform mediump sampler2DArray diffuseMap; 22 | #endif 23 | 24 | #ifdef NUM_NORMAL_MAPS 25 | uniform mediump sampler2DArray normalMap; 26 | #endif 27 | 28 | #ifdef NUM_PBR_MAPS 29 | uniform mediump sampler2DArray pbrMap; 30 | #endif 31 | 32 | float getMatType(int materialIndex) { 33 | return materials.colorAndMaterialType[materialIndex].w; 34 | } 35 | 36 | vec3 getMatColor(int materialIndex, vec2 uv) { 37 | vec3 color = materials.colorAndMaterialType[materialIndex].rgb; 38 | 39 | #ifdef NUM_DIFFUSE_MAPS 40 | int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x; 41 | if (diffuseMapIndex >= 0) { 42 | color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb; 43 | } 44 | #endif 45 | 46 | return color; 47 | } 48 | 49 | float getMatRoughness(int materialIndex, vec2 uv) { 50 | float roughness = materials.roughnessMetalnessNormalScale[materialIndex].x; 51 | 52 | #ifdef NUM_PBR_MAPS 53 | int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z; 54 | if (roughnessMapIndex >= 0) { 55 | roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g; 56 | } 57 | #endif 58 | 59 | return roughness; 60 | } 61 | 62 | float getMatMetalness(int materialIndex, vec2 uv) { 63 | float metalness = materials.roughnessMetalnessNormalScale[materialIndex].y; 64 | 65 | #ifdef NUM_PBR_MAPS 66 | int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w; 67 | if (metalnessMapIndex >= 0) { 68 | metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b; 69 | } 70 | #endif 71 | 72 | return metalness; 73 | } 74 | 75 | #ifdef NUM_NORMAL_MAPS 76 | vec3 getMatNormal(int materialIndex, vec2 uv, vec3 normal, vec3 dp1, vec3 dp2, vec2 duv1, vec2 duv2) { 77 | int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y; 78 | if (normalMapIndex >= 0) { 79 | // http://www.thetenthplanet.de/archives/1180 80 | // Compute co-tangent and co-bitangent vectors 81 | vec3 dp2perp = cross(dp2, normal); 82 | vec3 dp1perp = cross(normal, dp1); 83 | vec3 dpdu = dp2perp * duv1.x + dp1perp * duv2.x; 84 | vec3 dpdv = dp2perp * duv1.y + dp1perp * duv2.y; 85 | float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv))); 86 | dpdu *= invmax; 87 | dpdv *= invmax; 88 | 89 | vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0; 90 | n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw; 91 | 92 | mat3 tbn = mat3(dpdu, dpdv, normal); 93 | 94 | return normalize(tbn * n); 95 | } else { 96 | return normal; 97 | } 98 | } 99 | #endif 100 | `; 101 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/random.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | // Noise texture used to generate a different random number for each pixel. 4 | // We use blue noise in particular, but any type of noise will work. 5 | uniform sampler2D noiseTex; 6 | 7 | uniform float stratifiedSamples[SAMPLING_DIMENSIONS]; 8 | uniform float strataSize; 9 | 10 | // Every time we call randomSample() in the shader, and for every call to render, 11 | // we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples 12 | // This allows us to use stratified sampling for each random variable in our path tracing 13 | int sampleIndex = 0; 14 | 15 | float pixelSeed; 16 | 17 | void initRandom() { 18 | vec2 noiseSize = vec2(textureSize(noiseTex, 0)); 19 | 20 | // tile the small noise texture across the entire screen 21 | pixelSeed = texture(noiseTex, vCoord / (pixelSize * noiseSize)).r; 22 | } 23 | 24 | float randomSample() { 25 | float stratifiedSample = stratifiedSamples[sampleIndex++]; 26 | 27 | float random = fract((stratifiedSample + pixelSeed) * strataSize); // blue noise + stratified samples 28 | 29 | // transform random number between [0, 1] to (0, 1) 30 | return EPS + (1.0 - 2.0 * EPS) * random; 31 | } 32 | 33 | vec2 randomSampleVec2() { 34 | return vec2(randomSample(), randomSample()); 35 | } 36 | 37 | struct MaterialSamples { 38 | vec2 s1; 39 | vec2 s2; 40 | vec2 s3; 41 | }; 42 | 43 | MaterialSamples getRandomMaterialSamples() { 44 | MaterialSamples samples; 45 | 46 | samples.s1 = randomSampleVec2(); 47 | samples.s2 = randomSampleVec2(); 48 | samples.s3 = randomSampleVec2(); 49 | 50 | return samples; 51 | } 52 | `; 53 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/rayTraceCore.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | #define STANDARD 0 3 | #define THIN_GLASS 1 4 | #define THICK_GLASS 2 5 | #define SHADOW_CATCHER 3 6 | 7 | const float IOR = 1.5; 8 | const float INV_IOR = 1.0 / IOR; 9 | 10 | const float IOR_THIN = 1.015; 11 | const float INV_IOR_THIN = 1.0 / IOR_THIN; 12 | 13 | const float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR)); 14 | 15 | // https://www.w3.org/WAI/GL/wiki/Relative_luminance 16 | const vec3 luminance = vec3(0.2126, 0.7152, 0.0722); 17 | 18 | #define RAY_MAX_DISTANCE 9999.0 19 | 20 | struct Ray { 21 | vec3 o; 22 | vec3 d; 23 | vec3 invD; 24 | float tMax; 25 | }; 26 | 27 | struct SurfaceInteraction { 28 | bool hit; 29 | vec3 position; 30 | vec3 normal; // smoothed normal from the three triangle vertices 31 | vec3 faceNormal; // normal of the triangle 32 | vec3 color; 33 | float roughness; 34 | float metalness; 35 | int materialType; 36 | }; 37 | 38 | struct Camera { 39 | mat4 transform; 40 | float aspect; 41 | float fov; 42 | float focus; 43 | float aperture; 44 | }; 45 | 46 | void initRay(inout Ray ray, vec3 origin, vec3 direction) { 47 | ray.o = origin; 48 | ray.d = direction; 49 | ray.invD = 1.0 / ray.d; 50 | ray.tMax = RAY_MAX_DISTANCE; 51 | } 52 | 53 | // given the index from a 1D array, retrieve corresponding position from packed 2D texture 54 | ivec2 unpackTexel(int i, int columnsLog2) { 55 | ivec2 u; 56 | u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2) 57 | u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2) 58 | return u; 59 | } 60 | 61 | vec4 fetchData(sampler2D s, int i, int columnsLog2) { 62 | return texelFetch(s, unpackTexel(i, columnsLog2), 0); 63 | } 64 | 65 | ivec4 fetchData(isampler2D s, int i, int columnsLog2) { 66 | return texelFetch(s, unpackTexel(i, columnsLog2), 0); 67 | } 68 | 69 | struct Path { 70 | Ray ray; 71 | vec3 li; 72 | float alpha; 73 | vec3 beta; 74 | bool specularBounce; 75 | bool abort; 76 | float misWeight; 77 | }; 78 | 79 | uniform Camera camera; 80 | uniform vec2 pixelSize; // 1 / screenResolution 81 | uniform vec2 jitter; 82 | 83 | in vec2 vCoord; 84 | `; 85 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/sample.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | // https://graphics.pixar.com/library/OrthonormalB/paper.pdf 4 | mat3 orthonormalBasis(vec3 n) { 5 | float zsign = n.z >= 0.0 ? 1.0 : -1.0; 6 | float a = -1.0 / (zsign + n.z); 7 | float b = n.x * n.y * a; 8 | vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x); 9 | vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y); 10 | return mat3(s, t, n); 11 | } 12 | 13 | // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk 14 | vec2 sampleCircle(vec2 p) { 15 | p = 2.0 * p - 1.0; 16 | 17 | bool greater = abs(p.x) > abs(p.y); 18 | 19 | float r = greater ? p.x : p.y; 20 | float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y); 21 | 22 | return r * vec2(cos(theta), sin(theta)); 23 | } 24 | 25 | // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling 26 | vec3 cosineSampleHemisphere(vec2 p) { 27 | vec2 h = sampleCircle(p); 28 | float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y)); 29 | return vec3(h, z); 30 | } 31 | 32 | 33 | // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs 34 | // Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper 35 | vec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) { 36 | float phi = TWOPI * random.y; 37 | float alpha = roughness * roughness; 38 | float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x)); 39 | float sinTheta = sqrt(1.0 - cosTheta * cosTheta); 40 | 41 | vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta); 42 | 43 | vec3 lightDir = reflect(-viewDir, halfVector); 44 | 45 | return lightDir; 46 | } 47 | 48 | vec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) { 49 | return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random); 50 | } 51 | 52 | float powerHeuristic(float f, float g) { 53 | return (f * f) / (f * f + g * g); 54 | } 55 | 56 | `; 57 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/sampleGlassMicrofacet.glsl: -------------------------------------------------------------------------------- 1 | // Extends from concepts found in sampleMaterial.glsl 2 | // Combines multiple importance sampling with 3 | // Walter Et al. (2007) - Microfacet Models for Refraction through Rough Surfaces 4 | // https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.html 5 | 6 | export default ` 7 | #ifdef USE_GLASS 8 | 9 | // Computes Cook-Torrance specular reflection 10 | vec3 glassReflection(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, out float pdf) { 11 | vec3 halfVector = normalize(viewDir + lightDir); 12 | 13 | float cosThetaV = dot(si.normal, viewDir); 14 | float cosThetaH = dot(si.normal, halfVector); 15 | float cosThetaD = dot(lightDir, halfVector); 16 | 17 | float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness); 18 | 19 | float F = fresnelSchlickTIR(sign(cosThetaL) * cosThetaD, R0, IOR); 20 | float D = trowbridgeReitzD(cosThetaH, alpha2); 21 | float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2) + trowbridgeReitzLambda(cosThetaL, alpha2)); 22 | 23 | pdf = abs(D * cosThetaH / (4.0 * cosThetaD)); 24 | 25 | return vec3(1.0) * abs(F * D * G / (4.0 * cosThetaV * cosThetaL)); 26 | } 27 | 28 | // An implementation of Walter Et al. (2007) - Microfacet Models for Refraction through Rough Surfaces 29 | // https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.html 30 | vec3 glassRefraction(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, out float pdf) { 31 | float dir = sign(dot(viewDir, si.normal)); 32 | float eta = dir > 0.0 ? IOR : INV_IOR; 33 | // float eta = dir > 0.0 ? IOR_THIN : INV_IOR_THIN; 34 | 35 | vec3 halfVector = normalize(viewDir + lightDir * eta); 36 | 37 | float cosThetaV = dot(si.normal, viewDir); 38 | float cosThetaH = dot(si.normal, halfVector); 39 | float cosThetaVH = dot(viewDir, halfVector); 40 | float cosThetaLH = dot(lightDir, halfVector); 41 | 42 | float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness); 43 | 44 | float F = fresnelSchlickTIR(-cosThetaVH, R0, IOR); 45 | float D = trowbridgeReitzD(cosThetaH, alpha2); 46 | float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2) + trowbridgeReitzLambda(cosThetaL, alpha2)); 47 | 48 | float sqrtDenom = cosThetaVH + eta * cosThetaLH; 49 | 50 | pdf = abs(D * cosThetaH * eta * eta * cosThetaLH / (sqrtDenom * sqrtDenom)); 51 | 52 | return si.color * (1.0 - F) * abs(D * G * eta * eta * cosThetaLH * cosThetaVH / (cosThetaL * cosThetaV * sqrtDenom * sqrtDenom)); 53 | } 54 | 55 | vec3 lightDirRefraction(vec3 normal, vec3 viewDir, mat3 basis, float roughness, vec2 random) { 56 | float phi = TWOPI * random.y; 57 | float alpha = roughness * roughness; 58 | float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x)); 59 | float sinTheta = sqrt(1.0 - cosTheta * cosTheta); 60 | 61 | float dir = sign(dot(normal, viewDir)); 62 | 63 | vec3 halfVector = basis * dir * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta); 64 | 65 | vec3 lightDir = refract(-viewDir, halfVector, dir < 0.0 ? IOR : INV_IOR); 66 | // vec3 lightDir = refract(-viewDir, halfVector, INV_IOR_THIN); 67 | 68 | return lightDir; 69 | } 70 | 71 | vec3 glassImportanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lightRefract, bool lastBounce, vec2 random) { 72 | vec3 li; 73 | 74 | float lightPdf; 75 | vec2 uv; 76 | vec3 lightDir = sampleEnvmap(random, uv, lightPdf); 77 | 78 | float cosThetaL = dot(si.normal, lightDir); 79 | 80 | float orientation = dot(si.faceNormal, viewDir) * cosThetaL; 81 | if ((!lightRefract && orientation < 0.0) || (lightRefract && orientation > 0.0)) { 82 | return li; 83 | } 84 | 85 | Ray ray; 86 | initRay(ray, si.position + EPS * lightDir, lightDir); 87 | if (!lastBounce && intersectSceneShadow(ray)) { 88 | return li; 89 | } 90 | 91 | vec3 irr = textureLinear(envMap, uv).xyz; 92 | 93 | float scatteringPdf; 94 | vec3 brdf = lightRefract ? 95 | glassRefraction(si, viewDir, lightDir, cosThetaL, scatteringPdf) : 96 | glassReflection(si, viewDir, lightDir, cosThetaL, scatteringPdf); 97 | 98 | float weight = powerHeuristic(lightPdf, scatteringPdf); 99 | 100 | li = brdf * irr * abs(cosThetaL) * weight / lightPdf; 101 | 102 | return li; 103 | } 104 | 105 | vec3 glassImportanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lightRefract, bool lastBounce, vec3 lightDir) { 106 | vec3 li; 107 | 108 | float cosThetaL = dot(si.normal, lightDir); 109 | 110 | float orientation = dot(si.faceNormal, viewDir) * cosThetaL; 111 | if ((!lightRefract && orientation < 0.0) || (lightRefract && orientation > 0.0)) { 112 | return li; 113 | } 114 | 115 | Ray ray; 116 | initRay(ray, si.position + EPS * lightDir, lightDir); 117 | if (!lastBounce && intersectSceneShadow(ray)) { 118 | return li; 119 | } 120 | 121 | vec2 uv = cartesianToEquirect(lightDir); 122 | float lightPdf = envMapPdf(uv); 123 | 124 | vec3 irr = textureLinear(envMap, vec2(phi, theta)).rgb; 125 | 126 | float scatteringPdf; 127 | vec3 brdf = lightRefract ? 128 | glassRefraction(si, viewDir, lightDir, cosThetaL, scatteringPdf) : 129 | glassReflection(si, viewDir, lightDir, cosThetaL, scatteringPdf); 130 | 131 | float weight = powerHeuristic(scatteringPdf, lightPdf); 132 | 133 | li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf; 134 | 135 | return li; 136 | } 137 | 138 | vec3 sampleGlassMicrofacet(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta, out bool abort) { 139 | vec3 viewDir = -ray.d; 140 | 141 | float cosThetaV = dot(si.normal, viewDir); 142 | 143 | // thin glass 144 | // si.normal *= sign(cosThetaV); 145 | // si.faceNormal *= sign(cosThetaV); 146 | // cosThetaV = abs(cosThetaV); 147 | 148 | mat3 basis = orthonormalBasis(si.normal); 149 | 150 | float F = fresnelSchlickTIR(cosThetaV, R0, IOR); // thick glass 151 | 152 | vec2 reflectionOrRefraction = randomSampleVec2(); 153 | 154 | vec3 lightDir; 155 | bool lightRefract; 156 | float pdf; 157 | 158 | if (reflectionOrRefraction.x < F) { 159 | lightDir = lightDirSpecular(si.normal, viewDir, basis, si.roughness, randomSampleVec2()); 160 | lightRefract = false; 161 | pdf = F; 162 | } else { 163 | lightDir = lightDirRefraction(si.normal, viewDir, basis, si.roughness, randomSampleVec2()); 164 | lightRefract = true; 165 | pdf = 1.0 - F; 166 | } 167 | 168 | bool lastBounce = bounce == BOUNCES; 169 | 170 | vec3 li = beta * ( 171 | glassImportanceSampleLight(si, viewDir, lightRefract, lastBounce, randomSampleVec2()) + 172 | glassImportanceSampleMaterial(si, viewDir, lightRefract, lastBounce, lightDir) 173 | ); 174 | 175 | li /= pdf; 176 | 177 | float scatteringPdf; 178 | float cosThetaL; 179 | vec3 brdf; 180 | 181 | if (reflectionOrRefraction.y < F) { 182 | lightDir = lightDirSpecular(si.normal, viewDir, basis, si.roughness, randomSampleVec2()); 183 | cosThetaL = dot(si.normal, lightDir); 184 | brdf = glassReflection(si, viewDir, lightDir, cosThetaL, scatteringPdf); 185 | scatteringPdf *= F; 186 | lightRefract = false; 187 | } else { 188 | lightDir = lightDirRefraction(si.normal, viewDir, basis, si.roughness, randomSampleVec2()); 189 | cosThetaL = dot(si.normal, lightDir); 190 | brdf = glassRefraction(si, viewDir, lightDir, cosThetaL, scatteringPdf); 191 | scatteringPdf *= 1.0 - F; 192 | lightRefract = true; 193 | } 194 | 195 | beta *= abs(cosThetaL) * brdf / scatteringPdf; 196 | 197 | initRay(ray, si.position + EPS * lightDir, lightDir); 198 | 199 | float orientation = dot(si.faceNormal, viewDir) * cosThetaL; 200 | abort = (!lightRefract && orientation < 0.0) || (lightRefract && orientation > 0.0); 201 | 202 | return li; 203 | } 204 | 205 | #endif 206 | 207 | `; 208 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/sampleGlassSpecular.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | #ifdef USE_GLASS 4 | 5 | void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { 6 | bool lastBounce = bounce == BOUNCES; 7 | vec3 viewDir = -path.ray.d; 8 | float cosTheta = dot(si.normal, viewDir); 9 | 10 | MaterialSamples samples = getRandomMaterialSamples(); 11 | 12 | float reflectionOrRefraction = samples.s1.x; 13 | 14 | float F = si.materialType == THIN_GLASS ? 15 | fresnelSchlick(abs(cosTheta), R0) : // thin glass 16 | fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass 17 | 18 | vec3 lightDir; 19 | 20 | if (reflectionOrRefraction < F) { 21 | lightDir = reflect(-viewDir, si.normal); 22 | } else { 23 | lightDir = si.materialType == THIN_GLASS ? 24 | refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass 25 | refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass 26 | path.beta *= si.color; 27 | } 28 | 29 | path.misWeight = 1.0; 30 | 31 | initRay(path.ray, si.position + EPS * lightDir, lightDir); 32 | 33 | path.li += lastBounce ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0); 34 | 35 | path.specularBounce = true; 36 | } 37 | 38 | #endif 39 | 40 | `; 41 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/sampleMaterial.glsl: -------------------------------------------------------------------------------- 1 | // Estimate the direct lighting integral using multiple importance sampling 2 | // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral 3 | 4 | export default ` 5 | 6 | void sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) { 7 | bool lastBounce = bounce == BOUNCES; 8 | mat3 basis = orthonormalBasis(si.normal); 9 | vec3 viewDir = -path.ray.d; 10 | 11 | MaterialSamples samples = getRandomMaterialSamples(); 12 | 13 | vec2 diffuseOrSpecular = samples.s1; 14 | vec2 lightDirSample = samples.s2; 15 | vec2 bounceDirSample = samples.s3; 16 | 17 | // Step 1: Add direct illumination of the light source (the hdr map) 18 | // On every bounce but the last, importance sample the light source 19 | // On the last bounce, multiple importance sample the brdf AND the light source, determined by random var 20 | 21 | vec3 lightDir; 22 | vec2 uv; 23 | float lightPdf; 24 | bool brdfSample = false; 25 | 26 | if (lastBounce && diffuseOrSpecular.x < 0.5) { 27 | // reuse this sample by multiplying by 2 to bring sample from [0, 0.5), to [0, 1) 28 | lightDir = 2.0 * diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? 29 | lightDirDiffuse(si.faceNormal, viewDir, basis, lightDirSample) : 30 | lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, lightDirSample); 31 | 32 | uv = cartesianToEquirect(lightDir); 33 | lightPdf = envMapPdf(uv); 34 | brdfSample = true; 35 | } else { 36 | lightDir = sampleEnvmap(lightDirSample, uv, lightPdf); 37 | } 38 | 39 | float cosThetaL = dot(si.normal, lightDir); 40 | 41 | float occluded = 1.0; 42 | 43 | float orientation = dot(si.faceNormal, viewDir) * cosThetaL; 44 | if (orientation < 0.0) { 45 | // light dir points towards surface. invalid dir. 46 | occluded = 0.0; 47 | } 48 | 49 | float diffuseWeight = 1.0; 50 | 51 | initRay(path.ray, si.position + EPS * lightDir, lightDir); 52 | if (intersectSceneShadow(path.ray)) { 53 | if (lastBounce) { 54 | diffuseWeight = 0.0; 55 | } else { 56 | occluded = 0.0; 57 | } 58 | } 59 | 60 | vec3 irr = textureLinear(envMap, uv).rgb; 61 | 62 | float scatteringPdf; 63 | vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf); 64 | 65 | float weight; 66 | if (lastBounce) { 67 | weight = brdfSample ? 68 | 2.0 * powerHeuristic(scatteringPdf, lightPdf) / scatteringPdf : 69 | 2.0 * powerHeuristic(lightPdf, scatteringPdf) / lightPdf; 70 | } else { 71 | weight = powerHeuristic(lightPdf, scatteringPdf) / lightPdf; 72 | } 73 | 74 | path.li += path.beta * occluded * brdf * irr * abs(cosThetaL) * weight;; 75 | 76 | // Step 2: Setup ray direction for next bounce by importance sampling the BRDF 77 | 78 | if (lastBounce) { 79 | return; 80 | } 81 | 82 | lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? 83 | lightDirDiffuse(si.faceNormal, viewDir, basis, bounceDirSample) : 84 | lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, bounceDirSample); 85 | 86 | cosThetaL = dot(si.normal, lightDir); 87 | 88 | orientation = dot(si.faceNormal, viewDir) * cosThetaL; 89 | path.abort = orientation < 0.0; 90 | 91 | if (path.abort) { 92 | return; 93 | } 94 | 95 | brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); 96 | 97 | uv = cartesianToEquirect(lightDir); 98 | lightPdf = envMapPdf(uv); 99 | 100 | path.misWeight = powerHeuristic(scatteringPdf, lightPdf); 101 | 102 | path.beta *= abs(cosThetaL) * brdf / scatteringPdf; 103 | 104 | path.specularBounce = false; 105 | 106 | initRay(path.ray, si.position + EPS * lightDir, lightDir); 107 | } 108 | `; 109 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/sampleShadowCatcher.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | #ifdef USE_SHADOW_CATCHER 4 | 5 | void sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) { 6 | bool lastBounce = bounce == BOUNCES; 7 | mat3 basis = orthonormalBasis(si.normal); 8 | vec3 viewDir = -path.ray.d; 9 | vec3 color = bounce == 1 || path.specularBounce ? sampleBackgroundFromDirection(-viewDir) : sampleEnvmapFromDirection(-viewDir); 10 | 11 | si.color = vec3(1, 1, 1); 12 | 13 | MaterialSamples samples = getRandomMaterialSamples(); 14 | 15 | vec2 diffuseOrSpecular = samples.s1; 16 | vec2 lightDirSample = samples.s2; 17 | vec2 bounceDirSample = samples.s3; 18 | 19 | vec3 lightDir; 20 | vec2 uv; 21 | float lightPdf; 22 | bool brdfSample = false; 23 | 24 | if (diffuseOrSpecular.x < 0.5) { 25 | lightDir = 2.0 * diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? 26 | lightDirDiffuse(si.faceNormal, viewDir, basis, lightDirSample) : 27 | lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, lightDirSample); 28 | uv = cartesianToEquirect(lightDir); 29 | lightPdf = envMapPdf(uv); 30 | brdfSample = true; 31 | } else { 32 | lightDir = sampleEnvmap(lightDirSample, uv, lightPdf); 33 | } 34 | 35 | float cosThetaL = dot(si.normal, lightDir); 36 | 37 | float liContrib = 1.0; 38 | 39 | float orientation = dot(si.faceNormal, viewDir) * cosThetaL; 40 | if (orientation < 0.0) { 41 | liContrib = 0.0; 42 | } 43 | 44 | float occluded = 1.0; 45 | initRay(path.ray, si.position + EPS * lightDir, lightDir); 46 | if (intersectSceneShadow(path.ray)) { 47 | occluded = 0.0; 48 | } 49 | 50 | float irr = dot(luminance, textureLinear(envMap, uv).rgb); 51 | 52 | float scatteringPdf; 53 | vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); 54 | 55 | float weight = brdfSample ? 56 | 2.0 * powerHeuristic(scatteringPdf, lightPdf) / scatteringPdf : 57 | 2.0 * powerHeuristic(lightPdf, scatteringPdf) / lightPdf; 58 | 59 | float liEq = liContrib * brdf.r * irr * abs(cosThetaL) * weight; 60 | 61 | float alpha = liEq; 62 | path.alpha *= alpha; 63 | path.li *= alpha; 64 | 65 | path.li += occluded * path.beta * color * liEq; 66 | 67 | if (lastBounce) { 68 | return; 69 | } 70 | 71 | lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? 72 | lightDirDiffuse(si.faceNormal, viewDir, basis, bounceDirSample) : 73 | lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, bounceDirSample); 74 | 75 | cosThetaL = dot(si.normal, lightDir); 76 | 77 | orientation = dot(si.faceNormal, viewDir) * cosThetaL; 78 | path.abort = orientation < 0.0; 79 | 80 | if (path.abort) { 81 | return; 82 | } 83 | 84 | brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); 85 | 86 | uv = cartesianToEquirect(lightDir); 87 | lightPdf = envMapPdf(uv); 88 | 89 | path.misWeight = 0.0; 90 | 91 | path.beta = color * abs(cosThetaL) * brdf.r / scatteringPdf; 92 | 93 | path.specularBounce = false; 94 | 95 | initRay(path.ray, si.position + EPS * lightDir, lightDir); 96 | } 97 | 98 | #endif 99 | 100 | `; 101 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/surfaceInteractionDirect.glsl: -------------------------------------------------------------------------------- 1 | export default ` 2 | 3 | uniform sampler2D gPosition; 4 | uniform sampler2D gNormal; 5 | uniform sampler2D gFaceNormal; 6 | uniform sampler2D gColor; 7 | uniform sampler2D gMatProps; 8 | 9 | void surfaceInteractionDirect(vec2 coord, inout SurfaceInteraction si) { 10 | vec4 positionAndMeshIndex = texture(gPosition, coord); 11 | 12 | si.position = positionAndMeshIndex.xyz; 13 | 14 | float meshIndex = positionAndMeshIndex.w; 15 | 16 | vec4 normalMaterialType = texture(gNormal, coord); 17 | 18 | si.normal = normalize(normalMaterialType.xyz); 19 | si.materialType = int(normalMaterialType.w); 20 | 21 | si.faceNormal = normalize(texture(gFaceNormal, coord).xyz); 22 | 23 | si.color = texture(gColor, coord).rgb; 24 | 25 | vec4 matProps = texture(gMatProps, coord); 26 | si.roughness = matProps.x; 27 | si.metalness = matProps.y; 28 | 29 | si.hit = meshIndex > 0.0 ? true : false; 30 | } 31 | `; 32 | -------------------------------------------------------------------------------- /src/renderer/glsl/chunks/textureLinear.glsl: -------------------------------------------------------------------------------- 1 | // Manually performs linear filtering if the extension OES_texture_float_linear is not supported 2 | 3 | export default ` 4 | vec4 textureLinear(sampler2D map, vec2 uv) { 5 | #ifdef OES_texture_float_linear 6 | return texture(map, uv); 7 | #else 8 | vec2 size = vec2(textureSize(map, 0)); 9 | vec2 texelSize = 1.0 / size; 10 | 11 | uv = uv * size - 0.5; 12 | vec2 f = fract(uv); 13 | uv = floor(uv) + 0.5; 14 | 15 | vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize); 16 | vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize); 17 | vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize); 18 | vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize); 19 | 20 | return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y); 21 | #endif 22 | } 23 | `; 24 | -------------------------------------------------------------------------------- /src/renderer/glsl/fullscreenQuad.vert: -------------------------------------------------------------------------------- 1 | export default { 2 | source: ` 3 | layout(location = 0) in vec2 a_position; 4 | 5 | out vec2 vCoord; 6 | 7 | void main() { 8 | vCoord = a_position; 9 | gl_Position = vec4(2. * a_position - 1., 0, 1); 10 | } 11 | ` 12 | } 13 | -------------------------------------------------------------------------------- /src/renderer/glsl/gBuffer.frag: -------------------------------------------------------------------------------- 1 | import constants from './chunks/constants.glsl'; 2 | import materialBuffer from './chunks/materialBuffer.glsl'; 3 | 4 | export default { 5 | 6 | outputs: ['position', 'normal', 'faceNormal', 'color', 'matProps'], 7 | includes: [ 8 | constants, 9 | materialBuffer, 10 | ], 11 | source: ` 12 | in vec3 vPosition; 13 | in vec3 vNormal; 14 | in vec2 vUv; 15 | flat in ivec2 vMaterialMeshIndex; 16 | 17 | vec3 faceNormals(vec3 pos) { 18 | vec3 fdx = dFdx(pos); 19 | vec3 fdy = dFdy(pos); 20 | return cross(fdx, fdy); 21 | } 22 | 23 | void main() { 24 | int materialIndex = vMaterialMeshIndex.x; 25 | int meshIndex = vMaterialMeshIndex.y; 26 | 27 | vec2 uv = fract(vUv); 28 | 29 | vec3 color = getMatColor(materialIndex, uv); 30 | float roughness = getMatRoughness(materialIndex, uv); 31 | float metalness = getMatMetalness(materialIndex, uv); 32 | float materialType = getMatType(materialIndex); 33 | 34 | roughness = clamp(roughness, ROUGHNESS_MIN, 1.0); 35 | metalness = clamp(metalness, 0.0, 1.0); 36 | 37 | vec3 normal = normalize(vNormal); 38 | vec3 faceNormal = normalize(faceNormals(vPosition)); 39 | normal *= sign(dot(normal, faceNormal)); 40 | 41 | #ifdef NUM_NORMAL_MAPS 42 | vec3 dp1 = dFdx(vPosition); 43 | vec3 dp2 = dFdy(vPosition); 44 | vec2 duv1 = dFdx(vUv); 45 | vec2 duv2 = dFdy(vUv); 46 | normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2); 47 | #endif 48 | 49 | out_position = vec4(vPosition, float(meshIndex) + EPS); 50 | out_normal = vec4(normal, materialType); 51 | out_faceNormal = vec4(faceNormal, 0); 52 | out_color = vec4(color, 0); 53 | out_matProps = vec4(roughness, metalness, 0, 0); 54 | } 55 | ` 56 | 57 | } 58 | -------------------------------------------------------------------------------- /src/renderer/glsl/gBuffer.vert: -------------------------------------------------------------------------------- 1 | export default { 2 | 3 | source: ` 4 | in vec3 aPosition; 5 | in vec3 aNormal; 6 | in vec2 aUv; 7 | in ivec2 aMaterialMeshIndex; 8 | 9 | uniform mat4 projView; 10 | 11 | out vec3 vPosition; 12 | out vec3 vNormal; 13 | out vec2 vUv; 14 | flat out ivec2 vMaterialMeshIndex; 15 | 16 | void main() { 17 | vPosition = aPosition; 18 | vNormal = aNormal; 19 | vUv = aUv; 20 | vMaterialMeshIndex = aMaterialMeshIndex; 21 | gl_Position = projView * vec4(aPosition, 1); 22 | } 23 | ` 24 | } 25 | -------------------------------------------------------------------------------- /src/renderer/glsl/rayTrace.frag: -------------------------------------------------------------------------------- 1 | import { unrollLoop } from '../glslUtil'; 2 | import constants from './chunks/constants.glsl'; 3 | import rayTraceCore from './chunks/rayTraceCore.glsl'; 4 | import textureLinear from './chunks/textureLinear.glsl'; 5 | import materialBuffer from './chunks/materialBuffer.glsl'; 6 | import intersect from './chunks/intersect.glsl'; 7 | import surfaceInteractionDirect from './chunks/surfaceInteractionDirect.glsl'; 8 | import random from './chunks/random.glsl'; 9 | import envMap from './chunks/envMap.glsl'; 10 | import bsdf from './chunks/bsdf.glsl'; 11 | import sample from './chunks/sample.glsl'; 12 | import sampleMaterial from './chunks/sampleMaterial.glsl'; 13 | import sampleShadowCatcher from './chunks/sampleShadowCatcher.glsl'; 14 | import sampleGlass from './chunks/sampleGlassSpecular.glsl'; 15 | 16 | export default { 17 | includes: [ 18 | constants, 19 | rayTraceCore, 20 | textureLinear, 21 | materialBuffer, 22 | intersect, 23 | surfaceInteractionDirect, 24 | random, 25 | envMap, 26 | bsdf, 27 | sample, 28 | sampleMaterial, 29 | sampleGlass, 30 | sampleShadowCatcher, 31 | ], 32 | outputs: ['light'], 33 | source: (defines) => ` 34 | void bounce(inout Path path, int i, inout SurfaceInteraction si) { 35 | 36 | if (!si.hit) { 37 | vec3 irr = path.specularBounce ? sampleBackgroundFromDirection(path.ray.d) : sampleEnvmapFromDirection(path.ray.d); 38 | 39 | // hit a light source (the hdr map) 40 | // add contribution from light source 41 | // path.misWeight is the multiple importance sampled weight of this light source 42 | path.li += path.misWeight * path.beta * irr; 43 | path.abort = true; 44 | return; 45 | } 46 | 47 | #ifdef USE_GLASS 48 | if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) { 49 | sampleGlassSpecular(si, i, path); 50 | } 51 | #endif 52 | #ifdef USE_SHADOW_CATCHER 53 | if (si.materialType == SHADOW_CATCHER) { 54 | sampleShadowCatcher(si, i, path); 55 | } 56 | #endif 57 | if (si.materialType == STANDARD) { 58 | sampleMaterial(si, i, path); 59 | } 60 | 61 | // Russian Roulette sampling 62 | if (i >= 2) { 63 | float q = 1.0 - dot(path.beta, luminance); 64 | if (randomSample() < q) { 65 | path.abort = true; 66 | } 67 | path.beta /= 1.0 - q; 68 | } 69 | 70 | } 71 | 72 | // Path tracing integrator as described in 73 | // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html# 74 | vec4 integrator(inout Ray ray) { 75 | Path path; 76 | path.ray = ray; 77 | path.li = vec3(0); 78 | path.alpha = 1.0; 79 | path.beta = vec3(1.0); 80 | path.specularBounce = true; 81 | path.abort = false; 82 | path.misWeight = 1.0; 83 | 84 | SurfaceInteraction si; 85 | 86 | // first surface interaction from g-buffer 87 | surfaceInteractionDirect(vCoord, si); 88 | 89 | // first surface interaction from ray interesction 90 | // intersectScene(path.ray, si); 91 | 92 | bounce(path, 1, si); 93 | 94 | // Manually unroll for loop. 95 | // Some hardware fails to iterate over a GLSL loop, so we provide this workaround 96 | // for (int i = 1; i < defines.bounces + 1, i += 1) 97 | // equivelant to 98 | ${unrollLoop('i', 2, defines.BOUNCES + 1, 1, ` 99 | if (path.abort) { 100 | return vec4(path.li, path.alpha); 101 | } 102 | intersectScene(path.ray, si); 103 | bounce(path, i, si); 104 | `)} 105 | 106 | return vec4(path.li, path.alpha); 107 | } 108 | 109 | void main() { 110 | initRandom(); 111 | 112 | vec2 vCoordAntiAlias = vCoord + jitter; 113 | 114 | vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov)); 115 | 116 | // Thin lens model with depth-of-field 117 | // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField 118 | // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2()); 119 | // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane 120 | 121 | // vec3 origin = vec3(lensPoint, 0.0); 122 | // direction = normalize(focusPoint - origin); 123 | 124 | // origin = vec3(camera.transform * vec4(origin, 1.0)); 125 | // direction = mat3(camera.transform) * direction; 126 | 127 | vec3 origin = camera.transform[3].xyz; 128 | direction = mat3(camera.transform) * direction; 129 | 130 | Ray cam; 131 | initRay(cam, origin, direction); 132 | 133 | vec4 liAndAlpha = integrator(cam); 134 | 135 | if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) { 136 | liAndAlpha = vec4(0, 0, 0, 1); 137 | } 138 | 139 | out_light = liAndAlpha; 140 | 141 | // Stratified Sampling Sample Count Test 142 | // --------------- 143 | // Uncomment the following code 144 | // Then observe the colors of the image 145 | // If: 146 | // * The resulting image is pure black 147 | // Extra samples are being passed to the shader that aren't being used. 148 | // * The resulting image contains red 149 | // Not enough samples are being passed to the shader 150 | // * The resulting image contains only white with some black 151 | // All samples are used by the shader. Correct result! 152 | 153 | // out_light = vec4(0, 0, 0, 1); 154 | // if (sampleIndex == SAMPLING_DIMENSIONS) { 155 | // out_light = vec4(1, 1, 1, 1); 156 | // } else if (sampleIndex > SAMPLING_DIMENSIONS) { 157 | // out_light = vec4(1, 0, 0, 1); 158 | // } 159 | } 160 | ` 161 | } 162 | -------------------------------------------------------------------------------- /src/renderer/glsl/reproject.frag: -------------------------------------------------------------------------------- 1 | import textureLinear from './chunks/textureLinear.glsl'; 2 | 3 | export default { 4 | outputs: ['light'], 5 | includes: [textureLinear], 6 | source: ` 7 | in vec2 vCoord; 8 | 9 | uniform mediump sampler2D lightTex; 10 | uniform mediump sampler2D positionTex; 11 | uniform vec2 lightScale; 12 | uniform vec2 previousLightScale; 13 | 14 | uniform mediump sampler2D previousLightTex; 15 | uniform mediump sampler2D previousPositionTex; 16 | 17 | uniform mat4 historyCamera; 18 | uniform float blendAmount; 19 | uniform vec2 jitter; 20 | 21 | vec2 reproject(vec3 position) { 22 | vec4 historyCoord = historyCamera * vec4(position, 1.0); 23 | return 0.5 * historyCoord.xy / historyCoord.w + 0.5; 24 | } 25 | 26 | float getMeshId(sampler2D meshIdTex, vec2 vCoord) { 27 | return floor(texture(meshIdTex, vCoord).w); 28 | } 29 | 30 | void main() { 31 | vec3 currentPosition = textureLinear(positionTex, vCoord).xyz; 32 | float currentMeshId = getMeshId(positionTex, vCoord); 33 | 34 | vec4 currentLight = texture(lightTex, lightScale * vCoord); 35 | 36 | if (currentMeshId == 0.0) { 37 | out_light = currentLight; 38 | return; 39 | } 40 | 41 | vec2 hCoord = reproject(currentPosition) - jitter; 42 | 43 | vec2 hSizef = previousLightScale * vec2(textureSize(previousLightTex, 0)); 44 | vec2 hSizeInv = 1.0 / hSizef; 45 | ivec2 hSize = ivec2(hSizef); 46 | 47 | vec2 hTexelf = hCoord * hSizef - 0.5; 48 | ivec2 hTexel = ivec2(hTexelf); 49 | vec2 f = fract(hTexelf); 50 | 51 | ivec2 texel[] = ivec2[]( 52 | hTexel + ivec2(0, 0), 53 | hTexel + ivec2(1, 0), 54 | hTexel + ivec2(0, 1), 55 | hTexel + ivec2(1, 1) 56 | ); 57 | 58 | float weights[] = float[]( 59 | (1.0 - f.x) * (1.0 - f.y), 60 | f.x * (1.0 - f.y), 61 | (1.0 - f.x) * f.y, 62 | f.x * f.y 63 | ); 64 | 65 | vec4 history; 66 | float sum; 67 | 68 | // bilinear sampling, rejecting samples that don't have a matching mesh id 69 | for (int i = 0; i < 4; i++) { 70 | vec2 gCoord = (vec2(texel[i]) + 0.5) * hSizeInv; 71 | 72 | float histMeshId = getMeshId(previousPositionTex, gCoord); 73 | 74 | float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel[i], hSize)) ? 0.0 : 1.0; 75 | 76 | float weight = isValid * weights[i]; 77 | history += weight * texelFetch(previousLightTex, texel[i], 0); 78 | sum += weight; 79 | } 80 | 81 | if (sum > 0.0) { 82 | history /= sum; 83 | } else { 84 | // If all samples of bilinear fail, try a 3x3 box filter 85 | hTexel = ivec2(hTexelf + 0.5); 86 | 87 | for (int x = -1; x <= 1; x++) { 88 | for (int y = -1; y <= 1; y++) { 89 | ivec2 texel = hTexel + ivec2(x, y); 90 | vec2 gCoord = (vec2(texel) + 0.5) * hSizeInv; 91 | 92 | float histMeshId = getMeshId(previousPositionTex, gCoord); 93 | 94 | float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel, hSize)) ? 0.0 : 1.0; 95 | 96 | float weight = isValid; 97 | vec4 h = texelFetch(previousLightTex, texel, 0); 98 | history += weight * h; 99 | sum += weight; 100 | } 101 | } 102 | history = sum > 0.0 ? history / sum : history; 103 | } 104 | 105 | if (history.w > MAX_SAMPLES) { 106 | history.xyz *= MAX_SAMPLES / history.w; 107 | history.w = MAX_SAMPLES; 108 | } 109 | 110 | out_light = blendAmount * history + currentLight; 111 | } 112 | ` 113 | } 114 | -------------------------------------------------------------------------------- /src/renderer/glsl/toneMap.frag: -------------------------------------------------------------------------------- 1 | import textureLinear from './chunks/textureLinear.glsl'; 2 | 3 | export default { 4 | includes: [textureLinear], 5 | outputs: ['color'], 6 | source: ` 7 | in vec2 vCoord; 8 | 9 | uniform sampler2D lightTex; 10 | uniform sampler2D positionTex; 11 | 12 | uniform vec2 lightScale; 13 | 14 | // Tonemapping functions from THREE.js 15 | 16 | vec3 linear(vec3 color) { 17 | return color; 18 | } 19 | // https://www.cs.utah.edu/~reinhard/cdrom/ 20 | vec3 reinhard(vec3 color) { 21 | return clamp(color / (vec3(1.0) + color), vec3(0.0), vec3(1.0)); 22 | } 23 | // http://filmicworlds.com/blog/filmic-tonemapping-operators/ 24 | #define uncharted2Helper(x) max(((x * (0.15 * x + 0.10 * 0.50) + 0.20 * 0.02) / (x * (0.15 * x + 0.50) + 0.20 * 0.30)) - 0.02 / 0.30, vec3(0.0)) 25 | const vec3 uncharted2WhitePoint = 1.0 / uncharted2Helper(vec3(WHITE_POINT)); 26 | vec3 uncharted2( vec3 color ) { 27 | // John Hable's filmic operator from Uncharted 2 video game 28 | return clamp(uncharted2Helper(color) * uncharted2WhitePoint, vec3(0.0), vec3(1.0)); 29 | } 30 | // http://filmicworlds.com/blog/filmic-tonemapping-operators/ 31 | vec3 cineon( vec3 color ) { 32 | // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson 33 | color = max(vec3( 0.0 ), color - 0.004); 34 | return pow((color * (6.2 * color + 0.5)) / (color * (6.2 * color + 1.7) + 0.06), vec3(2.2)); 35 | } 36 | // https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/ 37 | vec3 acesFilmic( vec3 color ) { 38 | return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0)); 39 | } 40 | 41 | #ifdef EDGE_PRESERVING_UPSCALE 42 | 43 | float getMeshId(sampler2D meshIdTex, vec2 vCoord) { 44 | return floor(texture(meshIdTex, vCoord).w); 45 | } 46 | 47 | vec4 getUpscaledLight(vec2 coord) { 48 | float meshId = getMeshId(positionTex, coord); 49 | 50 | vec2 sizef = lightScale * vec2(textureSize(positionTex, 0)); 51 | vec2 texelf = coord * sizef - 0.5; 52 | ivec2 texel = ivec2(texelf); 53 | vec2 f = fract(texelf); 54 | 55 | ivec2 texels[] = ivec2[]( 56 | texel + ivec2(0, 0), 57 | texel + ivec2(1, 0), 58 | texel + ivec2(0, 1), 59 | texel + ivec2(1, 1) 60 | ); 61 | 62 | float weights[] = float[]( 63 | (1.0 - f.x) * (1.0 - f.y), 64 | f.x * (1.0 - f.y), 65 | (1.0 - f.x) * f.y, 66 | f.x * f.y 67 | ); 68 | 69 | vec4 upscaledLight; 70 | float sum; 71 | for (int i = 0; i < 4; i++) { 72 | vec2 pCoord = (vec2(texels[i]) + 0.5) / sizef; 73 | float isValid = getMeshId(positionTex, pCoord) == meshId ? 1.0 : 0.0; 74 | float weight = isValid * weights[i]; 75 | upscaledLight += weight * texelFetch(lightTex, texels[i], 0); 76 | sum += weight; 77 | } 78 | 79 | if (sum > 0.0) { 80 | upscaledLight /= sum; 81 | } else { 82 | upscaledLight = texture(lightTex, lightScale * coord); 83 | } 84 | 85 | return upscaledLight; 86 | } 87 | #endif 88 | 89 | void main() { 90 | #ifdef EDGE_PRESERVING_UPSCALE 91 | vec4 upscaledLight = getUpscaledLight(vCoord); 92 | #else 93 | vec4 upscaledLight = texture(lightTex, lightScale * vCoord); 94 | #endif 95 | 96 | // alpha channel stores the number of samples progressively rendered 97 | // divide the sum of light by alpha to obtain average contribution of light 98 | 99 | // in addition, alpha contains a scale factor for the shadow catcher material 100 | // dividing by alpha normalizes the brightness of the shadow catcher to match the background env map. 101 | vec3 light = upscaledLight.rgb / upscaledLight.a; 102 | 103 | light *= EXPOSURE; 104 | 105 | light = TONE_MAPPING(light); 106 | 107 | light = pow(light, vec3(1.0 / 2.2)); // gamma correction 108 | 109 | out_color = vec4(light, 1.0); 110 | } 111 | ` 112 | } 113 | -------------------------------------------------------------------------------- /src/renderer/glslUtil.js: -------------------------------------------------------------------------------- 1 | export function unrollLoop(indexName, start, limit, step, code) { 2 | let unrolled = `int ${indexName};\n`; 3 | 4 | for (let i = start; (step > 0 && i < limit) || (step < 0 && i > limit); i += step) { 5 | unrolled += `${indexName} = ${i};\n`; 6 | unrolled += code; 7 | } 8 | 9 | return unrolled; 10 | } 11 | -------------------------------------------------------------------------------- /src/renderer/mergeMeshesToGeometry.js: -------------------------------------------------------------------------------- 1 | import { BufferGeometry, BufferAttribute } from 'three'; 2 | 3 | export function mergeMeshesToGeometry(meshes) { 4 | 5 | let vertexCount = 0; 6 | let indexCount = 0; 7 | 8 | const geometryAndMaterialIndex = []; 9 | const materialIndexMap = new Map(); 10 | 11 | for (const mesh of meshes) { 12 | if (!mesh.visible) { 13 | continue; 14 | } 15 | 16 | const geometry = mesh.geometry.isBufferGeometry ? 17 | cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']) : // BufferGeometry object 18 | new BufferGeometry().fromGeometry(mesh.geometry); // Geometry object 19 | 20 | const index = geometry.getIndex(); 21 | if (!index) { 22 | addFlatGeometryIndices(geometry); 23 | } 24 | 25 | geometry.applyMatrix(mesh.matrixWorld); 26 | 27 | if (!geometry.getAttribute('normal')) { 28 | geometry.computeVertexNormals(); 29 | } else { 30 | geometry.normalizeNormals(); 31 | } 32 | 33 | vertexCount += geometry.getAttribute('position').count; 34 | indexCount += geometry.getIndex().count; 35 | 36 | const material = mesh.material; 37 | let materialIndex = materialIndexMap.get(material); 38 | if (materialIndex === undefined) { 39 | materialIndex = materialIndexMap.size; 40 | materialIndexMap.set(material, materialIndex); 41 | } 42 | 43 | geometryAndMaterialIndex.push({ 44 | geometry, 45 | materialIndex 46 | }); 47 | } 48 | 49 | const geometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); 50 | 51 | return { 52 | geometry, 53 | materials: Array.from(materialIndexMap.keys()) 54 | }; 55 | } 56 | 57 | function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { 58 | const positionAttrib = new BufferAttribute(new Float32Array(3 * vertexCount), 3, false); 59 | const normalAttrib = new BufferAttribute(new Float32Array(3 * vertexCount), 3, false); 60 | const uvAttrib = new BufferAttribute(new Float32Array(2 * vertexCount), 2, false); 61 | const materialMeshIndexAttrib = new BufferAttribute(new Int32Array(2 * vertexCount), 2, false); 62 | const indexAttrib = new BufferAttribute(new Uint32Array(indexCount), 1, false); 63 | 64 | const mergedGeometry = new BufferGeometry(); 65 | mergedGeometry.addAttribute('position', positionAttrib); 66 | mergedGeometry.addAttribute('normal', normalAttrib); 67 | mergedGeometry.addAttribute('uv', uvAttrib); 68 | mergedGeometry.addAttribute('materialMeshIndex', materialMeshIndexAttrib); 69 | mergedGeometry.setIndex(indexAttrib); 70 | 71 | let currentVertex = 0; 72 | let currentIndex = 0; 73 | let currentMesh = 1; 74 | 75 | for (const { geometry, materialIndex } of geometryAndMaterialIndex) { 76 | const vertexCount = geometry.getAttribute('position').count; 77 | mergedGeometry.merge(geometry, currentVertex); 78 | 79 | const meshIndex = geometry.getIndex(); 80 | for (let i = 0; i < meshIndex.count; i++) { 81 | indexAttrib.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); 82 | } 83 | 84 | for (let i = 0; i < vertexCount; i++) { 85 | materialMeshIndexAttrib.setXY(currentVertex + i, materialIndex, currentMesh); 86 | } 87 | 88 | currentVertex += vertexCount; 89 | currentIndex += meshIndex.count; 90 | currentMesh++; 91 | } 92 | 93 | return mergedGeometry; 94 | } 95 | 96 | // Similar to buffergeometry.clone(), except we only copy 97 | // specific attributes instead of everything 98 | function cloneBufferGeometry(bufferGeometry, attributes) { 99 | const newGeometry = new BufferGeometry(); 100 | 101 | for (const name of attributes) { 102 | const attrib = bufferGeometry.getAttribute(name); 103 | if (attrib) { 104 | newGeometry.addAttribute(name, attrib.clone()); 105 | } 106 | } 107 | 108 | const index = bufferGeometry.getIndex(); 109 | if (index) { 110 | newGeometry.setIndex(index); 111 | } 112 | 113 | return newGeometry; 114 | } 115 | 116 | function addFlatGeometryIndices(geometry) { 117 | const position = geometry.getAttribute('position'); 118 | 119 | if (!position) { 120 | console.warn('No position attribute'); 121 | return; 122 | } 123 | 124 | const index = new Uint32Array(position.count); 125 | 126 | for (let i = 0; i < index.length; i++) { 127 | index[i] = i; 128 | } 129 | 130 | geometry.setIndex(new BufferAttribute(index, 1, false)); 131 | 132 | return geometry; 133 | } 134 | -------------------------------------------------------------------------------- /src/renderer/rgbeToFloat.js: -------------------------------------------------------------------------------- 1 | // Convert image data from the RGBE format to a 32-bit floating point format 2 | // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format 3 | // Optional multiplier argument for performance optimization 4 | export function rgbeToFloat(buffer, intensity = 1) { 5 | const texels = buffer.length / 4; 6 | const floatBuffer = new Float32Array(texels * 3); 7 | 8 | const expTable = []; 9 | for (let i = 0; i < 255; i++) { 10 | expTable[i] = intensity * Math.pow(2, i - 128) / 255; 11 | } 12 | 13 | for (let i = 0; i < texels; i++) { 14 | 15 | const r = buffer[4 * i]; 16 | const g = buffer[4 * i + 1]; 17 | const b = buffer[4 * i + 2]; 18 | const a = buffer[4 * i + 3]; 19 | const e = expTable[a]; 20 | 21 | floatBuffer[3 * i] = r * e; 22 | floatBuffer[3 * i + 1] = g * e; 23 | floatBuffer[3 * i + 2] = b * e; 24 | } 25 | 26 | return floatBuffer; 27 | } 28 | -------------------------------------------------------------------------------- /src/renderer/texture/HDR_L_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hoverinc/ray-tracing-renderer/28ecded4324900e8ce3b7dda7831ccd780a67d3c/src/renderer/texture/HDR_L_0.png -------------------------------------------------------------------------------- /src/renderer/texture/noise.js: -------------------------------------------------------------------------------- 1 | export default 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABAEAAAAADfkvJBAAAbsklEQVR4nA3UhQIIvBoA0E830810M91MN9PNdDPd/ulmupluppvpZrqZbqabe89DHCiDv5GzaossZGYBp2PFIFqKdmMXIKW85edCB/RT11SD3JMQidRlL7n2ufRH1jVkFUNVc3NaZ7DP0T7/112kM1Qc3RDG0K/4uN7CPC7OmtFRZK3Jy3fhSSySKIZXopTsnIhN69JjLHJYYnfpZu44hnV+UkhG/lPd/D+fIVwWtdhhupVPJmtsLFIhjHA7UUqY4fPIQ2qdKxviqH2sugJ2nC+1ZdV0vEF3RGNcMd4KdvIXaJnujdPrKj4ifkeX2f04avjEbqO0ogI/rD7zhmy6GKG/2w32IetIX5vE9DbrS+CNy4sbmgXoiaug48lV4bVKZgluwPujd+Ioa+KjuntypepEEvl/YYCYTq6w4aaReGMShwLkC4nvq7jFKJmLpoepHJTag/h2aMklShou+tyip5wm67P2/CnvH7K6zuq+KGvy2rkkrR4mc4dpUNTEFHDId9TXQiST3RxHO0lHNgNFIA/Ub1kC0pOlNBf77EtyZ0ejxvikzySL8C8hNWyyc1GvcBCusv/otvBO3YSj+KvvRlKgoNaF/GEB64prsx8qFRwVJcRmMk8l5E5swfHMPuhlr9DmtrLeqs7KOrCMQSpeGW/zH5F2dc0AXZhcp9IthLZyuxpHrkNnp0JfnsY+55XkAtgSOvsWzps8uoJ5GtpAXRWZ5TK9cEM1WVRWC81ZUstPZHHkC7GDjZfl7BJ+VcXkI8RfVIMW0Jq95oxE0R+MDQnMX97DPhYjEXzHM0LvUNyODhdDCvJdNmXlfFp0RsbBNclTj8hpXofsCgVYsAnwPRTNTiTLxZkQW43BmK6wHk7Y0iSdXIfyK8/aQULdx1/hJc0JkRE/UgNDc/dGZWanTCs2WQ0W6Xh7PZGuDMXEaLtIRMZcZAM4ieOwO661Qf4xVyhLOOA2mLe0JyvIDrBhUA42ioUiMmrHJ9te6jwtbQ6xWrKf/ED3qKJ0qvzO2of57KkcyMBvNZndbLTX/iWNaWTezm9E8cleKOSEXK1B3LDfeGk4yx/b7L5+uAvp6UVC/UYAhvPLvSwTWm+qqO5saYjh79LadBJaAR90ct9S/GGZ7Q1zhKyTOUJ9MzT85IldVjLLduUOqovEaASJbXeZ37oFv0w/sOGhvMzpVrL/2MeQx8+ldfQU/QBXIqn8NtHAHjCzaTJk+CDS0e6Wk8N7GEDgoR4rG5M/Zig/LD6hEr6VHmxzmijoKu/oZ+p84oEeiwegquE7pBZPYXEoyLeQ66wRicLXmOzWoib6mq6KUoWxuriq62OQh647TUmn0RuuIjtPfuEkcMQtwJ/IaJabRRe9fRX2Q8Z1L2UNlMclpfMFdKYr+XkVEeb6vChZuOBfhNl+l/hly9L0/mzYIxPhBq4oimlnB273mkgwnr+S7Vnp8Fff8/3VC7IJCtqZ9AxZRnujo3wjmQ9n7WtayxwgvUhUNtJ0UjlEU9vPFhePxDLfkl6z43hhdQSW+xbyKooJEEwqTOkL1VHWc1vReFaVxbcnTGM2Uq1XNXRPos0bdtI8VBKXcZdCV1dNpLcL3DE7Cqfmi2w5JGhGFqATTUhzy7sG2+a0II4ZtupikC488mt9abdTvpYXVALXBU6wNzYLXUTPQwTxH/nNttjKDA7pQT47mopOQmxzW/f3GVhXWoguEUl5EHcUoKm8LdpiMoZV9JONpzZa7wa7hG4XzxvquHj2s5lsIrFbtrbew3+SKbiK6Ry+whAyXrTBC0kgDfwZHNOMNRnwOjHVVICdOGVo6LuFsn6GTKN6u4IeZqtN7B6vzlegD7ioW8i/u430kbtO2pABrgTPwb+xchSZ7jK/V6KxPEWK+K+oBXFmeuikt+HzrIU66KQsI9bRaGqQfKqSkMNumbnN4/ljkFsPxqnDElSF32L17D8UhxbUI8xnuwk/0znwXXcGGmD4QpPo5n6kTod70Zb2oI8Y6pFJKiuLoab7bXBEj+CXFTOH4A4kV/1JNjNRLrexaEX5Ht0xQ1RRskzmhCd+rmnFi9hLeqHe7svy7Lq+/+Mq6am+A/X8e+iptvqcbIjzqCOfbW6SpKQ22gPt8HgTFUMPd9kWgKd2O45Pr0EuOlK8waXFfriga7sXrLlKZZbrgeaPnmsrurd+n2H8hugjc+i1OCpJj2vYPyQ27+lT6/f4JM0c6sJIHwm/8AJS4tXuuo6g9qOCjvOZIrI9ZpaaauQAjwb9eTG0RMYPr2y5AHv8YhZLHvZl+DdQqrI5Z1L4QawT/FOLoQCOLR+EyTIrjcqb6YtiA4mg0/L27reYYg7JpvSVOM7G+p2uIb1iJ0hE+/DvvLW+qqfL034nLU5GQh02j8aHi/aDLS2b4ncYk/OcE+V+hhNqmF2rs1j4a1qziXYgaaDWQRetSbOwC60J8VhFSIf62k2osy7FXqpdrDAdZbuQxf5ZOCGLy6Reago9xBydmN9HBdUqX9VtUYdIKZOGbGAFxEDXjLxDmeVXsd5WIOmlhN0kqe2r84o1upy+z9KLRjY/ui5qGkhNiqoL5iXN6hPbeyGa+ckKwRM6l51Ao+EG/yKruXNsrWvHkuDPKKctS4bYRnq7eIQX+at4s8lD2ovy+D/xlXUWuf2jsNiNQx9xDRwjLAgJUSd5AvfTD80U0Qk91fP8DTkBfaXx1Qhv7FMXifZRMw0MlxtxVFVNzoOTrnjoK9ObCZy5HOwjbWgTib1kFo3BJa9t7oojdJK5RpGcifO66LQ2xuIHBvxcnMcLdEoUWc0QjVhs0k3f4dnoXvREODRB5KWJ2UFTX60WcXERxFQ7uo9mDz1YVbzQddDBHQ3QxD0MPfBnsdX+p9+xg+Sybmtum4hKoJW+CG0NGSQxP/TC0AulZ1tozfATr9Ld/QfURp1kg2FqaOQ2QBZ9JNyCoeQfO0eS+SOCa0lLshW6hnulWqHi/qrMTj6Z03gzB/LMzuaXmZXJSUm7nSKACjQDVzafbiNTqUayYpjDNpqhqIzf4SfRU/KF6S+vo0MhAS/v36BoolU4JbKQO3S3nmAL88puH0GoN6tF3vg2rCzscLVcUbmKzHS/dFroBdGk8bP4Hx8DRotKtJdMa4YZKhvR2OgbnULv+lzYUfjhFusD6KaLR8aHFSSPjYmT2MP6tU1L76u4uqJYrqawEqqpW+Onm4G6KIw2CU0Z29/EIc9gKVwjH3wxNV5v8fmxVunIGB94PxYBV+I3RRM4IO8x7Ab6ZXi3aoEeoUXmtzqHVrGCsrUYpOvIFXSMgX4YQp1Qmp6xf/Ae8gR1U19NUzEdSOjApK9nPuoItqt5HE7TXPIm3sff2fm+SbioN9GcPLltyTLKeeGBjGr668sYsfuymdjM8uHjYqL5BLn4SFqRdjbnZJKgyFHIA51lEjEebtEMfqN7LlORlgreiM3B26G2g82iqssbZBQq6k+rGn5J+MMvsVRus95vMpFR9K9K4errLmJFSMO/iepoBu6CfptR4QzqxpOYH6ERP4xmqS4uKzz3V2RS0SnMNwnYKvdW5Bd16FdS0kWlDeQ2VIMEJtgeVJ7GZIdDYQldWQ6UVK2mM1l000/MRyn5GpGZDkRbQ1RUCs/HLcMDV4hV1/OkEZFpRX+f5zfSHGQR7W2obdeiMnK3qQarTK7wEiq5vTqWXayqhyF4By5l6+HDPKK4AZtVRnoHjVBv8Syd1VocyY2UP9g8c15PpXBNVIET8MnVd8/oNlaGcnZJBZoQ7uAe4SjJAWNdX3AkNrQTQ+ClmMxO23i4nXseStC+4agkPDYeChdcOzLRJ2f/2S+ukJqsW/tvKoN4bP5/sOpHxuN5qC3p5VbaizIefWBKkKWkCc+DO5paPAHAP7wQj+VFRVp/zhPy3Ufw+8I4VsE1QVPtS1ZLf6eJ5Qr3Se3GxfURld71EhvEHJXVbLdJzUL/2nk6nX1mGcxdXUpvIg2gt7rADrkoYq0ogKbYXyK1pOwljuEO0rykAh5k2pMp6hR7rVO7h3IY2Y6gOYpsBqhWfp/sQcbbZa6m7uge0dx8pUgjd9GY5CyUldNEXX3L5JRLaHP2G5UhDtfnn8Qk3sak8Y1dUR5BatyTnyTR2PWwnCVCZe09NdwLG8tpvl3nJCd8dfzPNFMp1Wb4YuuihKIPWkP2k5I0o4OVJB96wDby2Oy2TAwv9VAxh8dFJ9EvU1S390Pdekx8d0jrxgik35GaLDoeZR7ZhH4IqyzO+/WiNzkkGNrOm8MvN4dmom9kbtuCzgy14K097SrhJuoeDEMJ7CI5Tjwn+3AmfjkUQpXUTR+DzdDPKVRgh23w1c0MUoI1EYchky6st4hefmS4bhZhr5vJ9/QYfUpbywukv9iib4S8msMqOE6iqH86px6L3oubJike6fJBB1ODDTZb6V+fAvapLL6DTGQ+2hm2k1svL8litoeKxZaRIXq2/U3HsDb6ghQBJqP4OB29iP4Lv/FaVZlctV9QM5tC1UGRbCWRBSfQs/UOFAGtlhX8VJJMLTD7VQY6HRU23ehdXAYlJHN5FlkRvXQHdDzx2I8Lx1A3sxTd8MXdOjVKH4BCOp2pIx6zrHwar6qO6uYB3FaXXdYNycNXCUNlY9TFLwq5SFuemg60UdhieVa8hml4v/2sHOsDNV1JGM5zmx/U2qKhk/lq+7jXaCuuYxaTPba1OuMHhY16GiuJVonzKBUtjEDVtwPxJP+cXUaRfD/1w5zS0Ulr9DXcQPnIK39Xdgkn+WJahGzGkI1cda/xFhfNn6KP1R7c2Y4JZSBnWK26kkJhs51E/tGk8m5oInvSjOI5risjuorqlI8X0oZh+JmKQeuhn7KLjKmvmd6iCVnIKtMH5KOM6zGu5nP5hmixMLo8Ge0P6jWyD0ukR7F0lqIPEMc/gv0OIsqZvCSug8eZ964gnYXr+LsqPmojHrG0apiIzg6TtkyHc7BHIDzTXuL/yQ38Dhsnm5OPfCorYK/LFTKPOU4xr+m/6WzydVCmPWwM5+UuN9e1Ce/8TRbfdJVzbCrWQJTUO+R8V5Ouh6m6T2jpqllYDfew5Ylcb1teraRxUFb8xxp6zFWH+eqtbIhzomc+DRunqvv3doVoKfOEJGoRKilzmAt4B69k+0FyN0m2ED5ss6NkNLTbn1LDAmHU/QDBj5oU8j9cxLxi2dUd+z5E8RfNT9NUHvApzRU/Bv1R0MEPlER9Nzuhpb/lhmsLxUJfP8EkYWdUCbyW3QzlbTco4AfhKEDNUfeY7pLt8U/a063mUaGD+4wtofwtmo0L2WWqlSxHErH0aDltYsbwqHqNq2CnuJ3qdKjJh/hlYYrsKLKwwTy2eOnzyrIMB1A0rmhiNc3Iz9tkvJt44ZqhJQ70F+jhW8CIgNQuO49/Q8bcJ5NxWlaVj6Yx/VVIZWeY2uK+zuw3hSEhIu2hE5NLfiC9p//I7vq6i6+fioJwF2Uyf2lzHoGt521FPlUJrH+AioQzvJtcJnaGEwHewSXxGFExyX7y81hVsQGng6shr9lG74TM5KdX/LyLIevpKyin6sz/Qj/0MjTQh2g594Yct6NVPL5QNUC3QlX/RR3hOXE9th5Nhf2hBswWfdVZVJsvMQNoGnOVfvNx6Qudgo9Ra/hMVJV8wdF1XQwFSYqwzgxjkVQ9kS+cZjHEhzAK6qMKYlZIjg+ZGqIvykCWBy4T0dlkBykCq33WsIAOAoJaQjH/V5w1uekes5plQOPRfBuTFmGvWRueVX9VW2V7GcccoE90CTSW7cXzaU+9hdflUeUTkk001/PDCAnbTRXb2h4jPeCZ2O0Gh1JuOu2M97PnZjBd6QrJDuqBL60+kuH4BK+Fo8uzLjmaoO4Z4DvsCpZM9DJtlWKvUEnVmTVVj/SOUFmOxBHCZV7CJJETIKA8rIuZKavxzKaxvQSlxD/exg9g130ifoH20pBJPKAz2F+bwyVUq2Qrd98mshdVNhVTtjJXSFx4wzegSfhAKECfcY1u4Wamu3pPqogO+Fu4bifDU1MZRfepxAh8EeLYn0i4Ey6NWwYD4Yhp6hfK8uiGimFPubcsYXiI/nO58QmN5V4+zm1kpdl3AtoeFLF0MT0Wbqk5KJ37rmqFTWYR+4vLsGN4BM3uGoYUJgLv5irINGiw+upKhA3qOIxkiQjVGfR+uo7dRAv4B1WLbqApcD472903Hz2T6/0jmR6G0xWmEWz2g3U7uYZF1FNgKX7PK5p85lXoGMBAMzzA17Kb+EnZmFfk/eghNI4W9r1pGjGZ14YvbIHcHQbYy/Cbb0FTcW61x83ySGRGjc0SOC/qqKE+p28MfV0hfJhNV0P4VdGQdICcYrKPz/Lb306IfSKl+66z83LiKPokGeuq4pI5oqFMzY6FSQC50RXxgifnnckXEUfkZS9kFNJCn0b38Q4aWXRRt2Rl/pLMkll4fdwuPNaRXW11xT1lBdE2KfBblwAdDz/dNhIJtSZZzFtdWq+BqHZPKB8ukbZwCkf0Ne19X1hMFAvsLZIWFyPGnTe36TC9Ej8U5Tkk8J/0Ai9JpnCJ7iLz+VWzFqqEdyaXGqSWk8I4vYovWonifKW2Iok7p8boFaozGsinis86MpknWoeJoazD4OW5UEXvcxNoUvdDdDdP5Ag7V2xypbHy/eGcjY56yF2qGQwUz1xSaE2jit++h9mpYZpqYwuYyrAGT+QlXDsjVSrUXcwiiaCxfsYOm2lmszyrh4tY/LbrY9+GQqK8+SdSyYO2qsmqbvEi+old7nrCaL1Ed7Gx8B05gJ82C1FGFds3FM9tDvUJa9E4vNJVZTLzy89i2dg4sLQmFMGZ8TkH61lUf4Q94D1xRPTYMZst/IK9vjhskJdJeTdKfXNMdOfvVR5eDS3STUlGczIYHEvdhxZ2LR1ud/NYpqYIMqEs7P6yTbIpz8eru61QjH4mg1AybF17mgESqAN4PRnl8uvTsBpT9SlsJ4tgBKtjIZXua36TRmirSIo+iqX8FIol7pKx5CNEox1EdpGC3WWR5C4/Qf+wm3Rc9Z+fhdraPGi8KsWdT0Y7idMylzVwldSXGf1MeGZSiFGe+1tin67kr6ixag26TYYaSi771i5ueEjr+U4+neqPY6H37KaEFzBGFqfpuZIXUEsyIJST01xd2walDwvtGd0Xr7al/ALSXKbRNHSh1/xe9cHVDs+1hv7ul6xPX5ppZAjlZm446vuIsuiiW+rf8Yhmil+Bc0N3Ej3UxAXcTzWdZxEhaN3HRJaX5VMyyR3jLXxZDTnkbrsM3cA1eD52UGL2imx3xA7FB2wN+c9Opo3UG3rZDeIn9Wz2kCfTRVwEesH2oCn0MRHFzZWZcHm4y8GmVp/4BBzd7pXZbBd+3Kehjfw/N0duh2e4hTmuouCuvjrbo4uZaX5DqOyT+PxsJXTBMIOfstFd2/BF/8fnyximG1rFk/Bb6AWOywqHHSYhPhjy0zjuOWSndcUAMwVVtGtDZrFT1FCF+Bboxaz+wYujXVBNPSRt3TBel3xHhVk/9xASyFLqjEhr+/FFxMh7YiKktkftn5CDNDW7xTd7kcU1MJRWMm9Vb55YbVIl5D36BxqFk6osFmqjl8GTjLp7qCnHWMPa24NoufkdWuo7+j/zxUx0N+hbaBqQW6VGia52kcsnkb1p1/I5vgo26CIertrZgMfT8jqxrkeJfAMtwmAWX95Uo/g814vXll5BStHMzzG50EN8RE4g1WgWNNwtUpG10jl8S1zZvvfT7Urzi5eCKOEtweoMJWKejoFKoTY0TliqpCCU+WsqI7ywhpzipVFyeKKikfE+o63t11qguWAP/Wau6OEQE52l5dkq3BGeqwimFMnktyn4J4uoS3aNakAj8XbqStjpC/nXpL354q/zo3SxATjjuEtpr7H5uiodjVHoivbLhvoxnCDdMdZn/RMz0x/k0UIz3lv/EdN0K3pYdrO72VeeH24La2aqJ7wjWeFLhjlus/jC89FaKC05oN6biWqpgGjYshGQTpdTP8ggEQ9mkuTmgqglsFkrE4UBUNreIbnEMHcE9xRN8P2wlZTjr0xKv1HOEvn531ApJFLt1WdXRk/UKSyjmdxIkke903Ftc7EEC1PVDiaNfToRT/c2j0km6I6mKqcW44GqobuOOyp4goU26hWewpfxE/QZaoo2+L50vx5N8rmG/IefiDeJeuqDiAUFwjqeWX3VU11fdoFn04N9PVhNJoSdZoDMztbZ42YhfaMvueW4Irkmp+sS+hlJLmL5y6aI2KYvhGr6kG1kopid1vuiNlY4aXO5KhJmmTo8AWmF8/qUugcq5rLxb7gCiunu2jnQhZ2C2CGD6gw71CMzw13kQ0xEVogsZdVtHHjLD4j7LiIvxpxswLwYRguoCG6H7isSi/qwwQ0Rp8U4/IeuNq/oSDsDfto8dJx9ExJJyVqwX3S9Hi2TazjLCsNtu1984NXMdnbPLbaTdCv1Xpf02+UTqMZe8QWquBlDKoeEtp3e6+qTa7gV+SnG+VIhOeWop/0g56o0EFf+QC1wOdwRPyJH1U/AvgPJYffZMqEtzo4jhfoiKdOyrT7uqqA1NIvricqK3ei1gBW8DwE5zM8Jl3CCUC8MRpH0EbscEoihOptLBntDP+/CH5RWLkfvQhn1TCahR/w201XcYEvUGZbJbnajXRWyh/Xgt/TqkIBOcEXkPBsZHtiaaKlMbWbDSdGf7ab3aSl51fe3qf3nMM3e9vF5W5/BwQT/21ZQ611W2YGPtb8hHbuuiBP+nG6Op6HVqJUlEMUexs1YH5qbTBILRCY2nORVUeh0V1X/hwrwJuy5u2KWupx0Bj1NXtBsuKkezra58+Ez9NGN1R3x0VRindg7mRGZMA8XNOd4jXCIL+IfXYMAN3RSbVUT+oTFdmfMOl1R72SvPQtpwl95zZUxn+g9MtnVMOvDbXVcRnOd+Hr6iDcWH0g6/xRvD99FYtwJR/YlbD05AmFUneyl71x3W17k8xNRMrnJR1djaUGxlsThY6ARjgBPUSc7kkeH/GQIKilgG+8KRCv8mVLcW+Z300I7NBzNJ0XZZhSR1OPSLmHdMOJF8Wf5HzD9K5zFFXG/sFIewu1RPFSOrULH1JTwUR1UMdUvNQAv5jHwTb3KxuWt8StXkuz3mfklNIcc0z3DPyhn9opkrClsVI/xqRBbwytYQq7gQTYNXi4bmGPyjk+CYuiHfj8fp3vDMZ+QZSRvzW6Yq7OilGQHFMfx3GyZXBa2DMa7S2YeuWeHyMy6p3lo29LNtDR3rq5Ljf+RI2guPkcHy9rkF2mJEvvqNI+4jRUs50FfgWy+u5uDaynIAq15dF4tPIB9KIp8L7PDUv1NVoWWJht6iQrIdfgcLu05vsbHBkGc5mECeyC2spv8F4rG++C80ICkoNXwOlIwXEOJzSyX23UIU0h/mklVoY9lfNdVL/E36VD20u4QbVxm6GeKyfGkEvrFUqPR/H9s/XjiBWp1EAAAAABJRU5ErkJggg=='; -------------------------------------------------------------------------------- /src/renderer/texture/readme.txt: -------------------------------------------------------------------------------- 1 | noise.js is a 64x64 pixel precomputed blue noise texture. 2 | It was created by Christoph Peters and downloaded from http://momentsingraphics.de/BlueNoise.html 3 | 4 | The included PNG is converted to base64 and stored as a string inside of noise.js. 5 | Storing the texture as a base64 instead of an image lets us include the texture in the javascript bundle directly. 6 | -------------------------------------------------------------------------------- /src/renderer/texturesFromMaterials.js: -------------------------------------------------------------------------------- 1 | // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property 2 | export function getTexturesFromMaterials(meshes, textureNames) { 3 | const textureMap = {}; 4 | 5 | for (const name of textureNames) { 6 | const textures = []; 7 | textureMap[name] = { 8 | indices: texturesFromMaterials(meshes, name, textures), 9 | textures 10 | }; 11 | } 12 | 13 | return textureMap; 14 | } 15 | 16 | // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties 17 | export function mergeTexturesFromMaterials(meshes, textureNames) { 18 | const textureMap = { 19 | textures: [], 20 | indices: {} 21 | }; 22 | 23 | for (const name of textureNames) { 24 | textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); 25 | } 26 | 27 | return textureMap; 28 | } 29 | 30 | function texturesFromMaterials(materials, textureName, textures) { 31 | const indices = []; 32 | 33 | for (const material of materials) { 34 | const isTextureLoaded = material[textureName] && material[textureName].image; 35 | 36 | if (!isTextureLoaded) { 37 | indices.push(-1); 38 | } else { 39 | let index = textures.length; 40 | for (let i = 0; i < textures.length; i++) { 41 | if (textures[i] === material[textureName]) { 42 | // Reuse existing duplicate texture. 43 | index = i; 44 | break; 45 | } 46 | } 47 | if (index === textures.length) { 48 | // New texture. Add texture to list. 49 | textures.push(material[textureName]); 50 | } 51 | indices.push(index); 52 | } 53 | } 54 | 55 | return indices; 56 | } 57 | -------------------------------------------------------------------------------- /src/renderer/util.js: -------------------------------------------------------------------------------- 1 | export function clamp(x, min, max) { 2 | return Math.min(Math.max(x, min), max); 3 | } 4 | 5 | export function shuffle(arr) { 6 | for (let i = arr.length - 1; i > 0; i--) { 7 | const j = Math.floor(Math.random() * (i + 1)); 8 | const x = arr[i]; 9 | arr[i] = arr[j]; 10 | arr[j] = x; 11 | } 12 | return arr; 13 | } 14 | 15 | export function numberArraysEqual(a, b, eps = 1e-4) { 16 | for (let i = 0; i < a.length; i++) { 17 | if (Math.abs(a[i] - b[i]) > eps) { 18 | return false; 19 | } 20 | } 21 | 22 | return true; 23 | } 24 | -------------------------------------------------------------------------------- /test/EnvironmentLight.test.js: -------------------------------------------------------------------------------- 1 | import { EnvironmentLight } from 'src/EnvironmentLight'; 2 | import * as THREE from 'three'; 3 | 4 | describe('constructor', () => { 5 | let light; 6 | beforeEach(() => { 7 | light = new EnvironmentLight(); 8 | }); 9 | 10 | test('extends from the "Light" object', () => { 11 | expect(light instanceof THREE.Light).toBe(true); 12 | }); 13 | 14 | test('initializes "map" with the parameter provided', () => { 15 | expect(light.map).toBe(undefined); 16 | expect(new EnvironmentLight(5).map).toBe(5); 17 | }); 18 | 19 | test('initializes "color" and "intensity" of the base class', () => { 20 | const lightWithParams = new EnvironmentLight(null, 0x555555, 0.5); 21 | expect(lightWithParams.color.getHex()).toBe(0x555555); 22 | expect(lightWithParams.intensity).toBe(0.5); 23 | }); 24 | 25 | test('creates an instance where "isEnvironmentLight == true"', () => { 26 | expect(light.isEnvironmentLight).toBe(true); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /test/bvhUtil.test.js: -------------------------------------------------------------------------------- 1 | import { partition, nthElement } from 'src/renderer/bvhUtil'; 2 | 3 | describe('partition', () => { 4 | 5 | const compare = x => x < 5; 6 | 7 | test( 8 | 'reorders the elements in the range [start, end) in such a way that\n' + 9 | 'all elements for which the comparator f returns true\n' + 10 | 'precede the elements for which comparator f returns false', () => { 11 | 12 | const arr = [6, 2, 7, 7, 2, 3, 6, 8, 1, 2, 1, 5, 2]; 13 | 14 | const start = 1; 15 | const end = arr.length - 1; 16 | 17 | // the index to the first element of the section partition 18 | const indexFalsePartition = partition(arr, compare, start, end); 19 | 20 | for (let i = start; i < end; i++) { 21 | if (i < indexFalsePartition) { 22 | expect(compare(arr[i])).toBe(true); 23 | } else { 24 | expect(compare(arr[i])).toBe(false); 25 | } 26 | } 27 | 28 | }); 29 | 30 | test('reorders entire array by default', () => { 31 | const arr = [5, 1, 2, 3, 8, 1]; 32 | 33 | const indexFalsePartition = partition(arr, compare); 34 | 35 | for (let i = 0; i < arr.length; i++) { 36 | if (i < indexFalsePartition) { 37 | expect(compare(arr[i])).toBe(true); 38 | } else { 39 | expect(compare(arr[i])).toBe(false); 40 | } 41 | } 42 | }); 43 | }); 44 | 45 | describe('nthElement', () => { 46 | 47 | const compare = (a, b) => a <= b; 48 | const sortCompare = (a, b) => a <= b ? -1 : 1; 49 | 50 | test( 51 | 'partially sorts between [first, last) such that\n' + 52 | 'the element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted\n' + 53 | 'and all of the elements before this new nth element are less than or equal to the elements after the new nth element', () => { 54 | 55 | const arr = [6, 2, 5, 7, 2, 3, 6, 8, 1, 2, 1, 5, 2]; 56 | 57 | const start = 1; 58 | const end = arr.length - 1; 59 | const nth = 5; 60 | 61 | const sortedArr = arr.slice(start, end).sort(sortCompare); 62 | 63 | nthElement(arr, compare, start, end, nth); 64 | 65 | expect(arr[nth]).toBe(sortedArr[nth - start]); 66 | 67 | for (let i = start; i < nth; i++) { 68 | for (let j = nth; j < end; j++) { 69 | expect(compare(arr[i], arr[j])).toBe(true); 70 | } 71 | } 72 | }); 73 | 74 | test('partially sorts entire array with nth = floor(start + end) / 2 by default', () => { 75 | const arr = [10, 2, 7, 8, 3, 1, 3]; 76 | 77 | const nth = Math.floor(arr.length / 2); 78 | 79 | const sortedArr = arr.slice().sort(sortCompare); 80 | 81 | nthElement(arr, compare); 82 | 83 | expect(arr[nth]).toBe(sortedArr[nth]); 84 | 85 | for (let i = 0; i < nth; i++) { 86 | for (let j = nth; j < arr.length; j++) { 87 | expect(compare(arr[i], arr[j])).toBe(true); 88 | } 89 | } 90 | }); 91 | }); 92 | -------------------------------------------------------------------------------- /test/envMapCreation.test.js: -------------------------------------------------------------------------------- 1 | import { equirectangularToSpherical, getAngleDelta } from 'src/renderer/envMapCreation'; 2 | import * as THREE from 'three'; 3 | 4 | describe('equirectangularToSpherical', () => { 5 | let oddWidth, oddHeight, evenWidth, evenHeight, targetCoords; 6 | beforeEach(() => { 7 | oddWidth = 11; 8 | evenWidth = 10; 9 | oddHeight = 11; 10 | evenHeight = 10; 11 | targetCoords = new THREE.Spherical(); 12 | }); 13 | test('converts center of map to theta = PI and phi = PI/2', () => { 14 | let coords = equirectangularToSpherical(oddWidth / 2, oddHeight / 2, oddWidth, oddHeight, targetCoords); 15 | expect(coords.theta).toBeCloseTo(Math.PI); 16 | expect(coords.phi).toBeCloseTo(Math.PI / 2); 17 | coords = equirectangularToSpherical(evenWidth / 2, evenHeight / 2, evenWidth, evenHeight, targetCoords); 18 | expect(coords.theta).toBeCloseTo(Math.PI); 19 | expect(coords.phi).toBeCloseTo(Math.PI / 2); 20 | }); 21 | test('converts lower left corner of map to theta = 0 and phi = 0', () => { 22 | let coords = equirectangularToSpherical(0, 0, oddWidth, oddHeight, targetCoords); 23 | expect(coords.theta).toBeCloseTo(0); 24 | expect(coords.phi).toBeCloseTo(0); 25 | coords = equirectangularToSpherical(0, 0, evenWidth, evenHeight, targetCoords); 26 | expect(coords.theta).toBeCloseTo(0); 27 | expect(coords.phi).toBeCloseTo(0); 28 | }); 29 | }); 30 | 31 | describe('getAngleDelta', () => { 32 | test('handles wraparound case properly', () => { 33 | const angleA = Math.PI / 8; 34 | const angleB = (2 * Math.PI) - (Math.PI / 8); 35 | expect(getAngleDelta(angleA, angleB)).toBeCloseTo(Math.PI / 4); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /test/util.test.js: -------------------------------------------------------------------------------- 1 | import { clamp, shuffle } from 'src/renderer/util'; 2 | 3 | describe('clamp', () => { 4 | 5 | test('returns a number between [min, max]', () => { 6 | expect(clamp(10, 0, 5)).toBe(5); 7 | expect(clamp(-10, 0, 5)).toBe(0); 8 | expect(clamp(3, 0, 5)).toBe(3); 9 | }); 10 | 11 | test('is able to use infinity as bounds', () => { 12 | expect(clamp(Infinity, 0, 5)).toBe(5); 13 | expect(clamp(-Infinity, 0, 5)).toBe(0); 14 | expect(clamp(100, -Infinity, Infinity)).toBe(100); 15 | }); 16 | }); 17 | 18 | describe('shuffle', () => { 19 | let arr; 20 | 21 | beforeEach(() => { 22 | arr = [1, 2, 3, 4, 5]; 23 | }); 24 | 25 | test('shuffles array without adding or removing elements', () => { 26 | const shuffledArr = [...arr]; 27 | shuffle(shuffledArr); 28 | expect(shuffledArr).toHaveLength(arr.length); 29 | expect(shuffledArr).toEqual(expect.arrayContaining(arr)); 30 | expect(arr).toEqual(expect.arrayContaining(shuffledArr)); 31 | }); 32 | 33 | test('shuffles array in-place', () => { 34 | const shuffledArr = shuffle(arr); 35 | expect(shuffledArr).toBe(arr); 36 | }); 37 | }); 38 | --------------------------------------------------------------------------------