├── .editorconfig ├── .eslintrc.json ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md └── workflows │ ├── build-examples.yml │ ├── codeql.yml │ └── node.js.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── docs ├── crab.jpg ├── double-threedscans-envmap.png ├── double-threedscans.png ├── interior-scene-cropped.png ├── lego-death-star-white.png ├── lego-egyptian-white.png ├── lego-x-wing-black.png ├── neko-stop.png ├── rover-black.png ├── rover-blue.png ├── rover-orange.png └── statue.jpg ├── example ├── aoRender.html ├── aoRender.js ├── areaLight.html ├── areaLight.js ├── basic.html ├── basic.js ├── depthOfField.html ├── depthOfField.js ├── fog.html ├── fog.js ├── graphing.html ├── graphing.js ├── hdr.html ├── hdr.js ├── index.html ├── index.js ├── interior.html ├── interior.js ├── lego.html ├── libs │ └── libultrahdr.js ├── lkg.html ├── lkg.js ├── materialBall.html ├── materialBall.js ├── materialDatabase.html ├── materialDatabase.js ├── materials │ └── QuiltPreviewMaterial.js ├── overlay.html ├── overlay.js ├── package.json ├── primitives.html ├── primitives.js ├── renderVideo.html ├── renderVideo.js ├── screenshotList.html ├── screenshotList.js ├── skinnedMesh.html ├── skinnedMesh.js ├── spotLights.html ├── spotLights.js ├── utils │ ├── HDRImageGenerator.js │ ├── LoaderElement.js │ ├── MaterialOrbSceneLoader.js │ ├── generateRadialFloorTexture.js │ └── getScaledSettings.js ├── viewerTest.html └── viewerTest.js ├── package-lock.json ├── package.json ├── rollup.config.js ├── scripts ├── push-screenshots.js ├── regression-test.js ├── update-screenshots.js └── utils.js ├── src ├── core │ ├── PathTracingRenderer.js │ ├── PathTracingSceneGenerator.js │ ├── QuiltPathTracingRenderer.js │ ├── WebGLPathTracer.js │ └── utils │ │ ├── BakedGeometry.js │ │ ├── BufferAttributeUtils.js │ │ ├── GeometryPreparationUtils.js │ │ ├── MeshDiff.js │ │ ├── StaticGeometryGenerator.js │ │ ├── convertToStaticGeometry.js │ │ ├── mergeGeometries.js │ │ └── sceneUpdateUtils.js ├── detectors │ ├── CompatibilityDetector.js │ ├── MaterialCompileDetector.js │ ├── PrecisionDetector.js │ └── PrecisionMaterial.js ├── index.d.ts ├── index.js ├── materials │ ├── MaterialBase.js │ ├── debug │ │ └── GraphMaterial.js │ ├── fullscreen │ │ ├── AlphaDisplayMaterial.js │ │ ├── BlendMaterial.js │ │ ├── ClampedInterpolationMaterial.js │ │ ├── DenoiseMaterial.js │ │ └── GradientMapMaterial.js │ ├── pathtracing │ │ ├── PhysicalPathTracingMaterial.js │ │ └── glsl │ │ │ ├── attenuate_hit_function.glsl.js │ │ │ ├── camera_util_functions.glsl.js │ │ │ ├── direct_light_contribution_function.glsl.js │ │ │ ├── get_surface_record_function.glsl.js │ │ │ ├── index.js │ │ │ ├── render_structs.glsl.js │ │ │ └── trace_scene_function.glsl.js │ └── surface │ │ ├── AmbientOcclusionMaterial.js │ │ └── FogVolumeMaterial.js ├── objects │ ├── EquirectCamera.js │ ├── PhysicalCamera.js │ ├── PhysicalSpotLight.js │ └── ShapedAreaLight.js ├── shader │ ├── bsdf │ │ ├── bsdf_functions.glsl.js │ │ ├── fog_functions.glsl.js │ │ ├── ggx_functions.glsl.js │ │ ├── index.js │ │ ├── iridescence_functions.glsl.js │ │ └── sheen_functions.glsl.js │ ├── bvh │ │ ├── index.js │ │ ├── inside_fog_volume_function.glsl.js │ │ └── ray_any_hit_function.glsl.js │ ├── common │ │ ├── fresnel_functions.glsl.js │ │ ├── index.js │ │ ├── math_functions.glsl.js │ │ ├── shape_intersection_functions.glsl.js │ │ ├── texture_sample_functions.glsl.js │ │ └── util_functions.glsl.js │ ├── rand │ │ ├── index.js │ │ ├── pcg.glsl.js │ │ ├── sobol.glsl.js │ │ └── stratified.glsl.js │ ├── sampling │ │ ├── equirect_sampling_functions.glsl.js │ │ ├── index.js │ │ ├── light_sampling_functions.glsl.js │ │ └── shape_sampling_functions.glsl.js │ └── structs │ │ ├── camera_struct.glsl.js │ │ ├── equirect_struct.glsl.js │ │ ├── index.js │ │ ├── lights_struct.glsl.js │ │ ├── material_struct.glsl.js │ │ └── surface_record_struct.glsl.js ├── textures │ ├── BlueNoiseTexture.js │ ├── GradientEquirectTexture.js │ ├── ProceduralEquirectTexture.js │ └── blueNoise │ │ ├── BlueNoiseGenerator.js │ │ ├── BlueNoiseSamples.js │ │ └── utils.js ├── uniforms │ ├── AttributesTextureArray.js │ ├── EquirectHdrInfoUniform.js │ ├── FloatAttributeTextureArray.js │ ├── LightsInfoUniformStruct.js │ ├── MaterialsTexture.js │ ├── PhysicalCameraUniform.js │ ├── RenderTarget2DArray.js │ ├── StratifiedSamplesTexture.js │ └── stratified │ │ ├── StratifiedSampler.js │ │ └── StratifiedSamplerCombined.js └── utils │ ├── BlurredEnvMapGenerator.js │ ├── CubeToEquirectGenerator.js │ ├── SobolNumberMapGenerator.js │ ├── TextureUtils.js │ ├── UVUnwrapper.js │ ├── bufferToHash.js │ └── macroify.js └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | charset = utf-8 6 | trim_trailing_whitespace = true 7 | insert_final_newline = true 8 | indent_style = tab 9 | insert_final_newline = true 10 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "extends": "./node_modules/eslint-config-mdcs/index.js", 4 | "rules": { 5 | "no-throw-literal": [ 6 | "error" 7 | ], 8 | "prefer-const": [ 9 | "error", 10 | { 11 | "destructuring": "any", 12 | "ignoreReadBeforeAssign": false 13 | } 14 | ], 15 | "quotes": [ "error", "single" ], 16 | "indent": [ "error", "tab" ], 17 | "no-var": [ "error" ] 18 | }, 19 | "overrides": [ 20 | { 21 | "files": ["**/*.d.ts"], 22 | "parser": "@typescript-eslint/parser", 23 | "rules": { 24 | "no-unused-vars": ["error", { "args": "none" }] 25 | } 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: gkjohnson 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report a reproducible bug or regression. 4 | title: '' 5 | labels: 'bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **Describe the bug** 13 | 14 | A clear and concise description of what the bug is. Before submitting, please remove unnecessary sections. 15 | 16 | **Expected behavior** 17 | 18 | A clear and concise description of what you expected to happen. 19 | 20 | **Screenshots and Repro Model** 21 | 22 | Add screenshots and a gltf model to help explain your problem (drag and drop). 23 | 24 | **Platform:** 25 | 26 | - Device: [Desktop, Mobile, ...] 27 | - OS: [Windows, MacOS, Linux, Android, iOS, ...] 28 | - GPU: [NVidia ??, Radeon ??, ...] 29 | - Browser: [Chrome, Firefox, Safari, Edge, ...] 30 | - Browser Version: [xx.xx.xx] 31 | - Three.js version: [r???] 32 | - Library version: [v???] 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for the project. 4 | title: '' 5 | labels: 'enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **Is your feature request related to a problem? Please describe.** 13 | 14 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 15 | 16 | **Describe the solution you'd like** 17 | 18 | A clear and concise description of what you want to happen. 19 | -------------------------------------------------------------------------------- /.github/workflows/build-examples.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Build Examples 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | 10 | jobs: 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | strategy: 16 | matrix: 17 | node-version: [16.x] 18 | 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | - name: Use Node.js ${{ matrix.node-version }} 23 | uses: actions/setup-node@v4 24 | with: 25 | node-version: ${{ matrix.node-version }} 26 | cache: 'npm' 27 | - run: npm ci 28 | - run: npm run build-examples 29 | 30 | - name: Commit Examples 31 | uses: EndBug/add-and-commit@v7 32 | with: 33 | add: 'example/bundle' 34 | message: 'update builds' 35 | push: 'origin HEAD:examples --force' 36 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | schedule: 9 | - cron: "19 6 * * 3" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ javascript ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v3 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v2 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v2 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v2 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | strategy: 18 | matrix: 19 | node-version: [16.x] 20 | 21 | steps: 22 | - uses: actions/checkout@v2 23 | - name: Use Node.js ${{ matrix.node-version }} 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: ${{ matrix.node-version }} 27 | cache: 'npm' 28 | - run: npm ci 29 | - run: npm run build 30 | - run: npm run lint 31 | # - run: npm test 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | .parcel-cache 107 | example/dev-bundle 108 | .vscode/ 109 | screenshots/current/ 110 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thank you for your interest in contributing to the project! 4 | 5 | Contributions of all kinds are welcome including pull requests, issues, and reports of or links to repos using the project! 6 | 7 | ## Filing Issues 8 | 9 | When submitting a bug report try to include a clear, minimal repro case along with the issue. More information means the problem can be fixed faster and better! 10 | 11 | When submitting a feature request please include a well-defined use case and even better if you include code modeling how the new feature could be used with a proposed API! 12 | 13 | Promote discussion! Let's talk about the change and discuss what the best, most flexible option might be. 14 | 15 | ## Developer setup 16 | 17 | To develop and test changes to this library, make sure you have Node and NPM installed. 18 | Check the supported versions in [the test configuration](./.github/workflows/node.js.yml). 19 | 20 | In order to install dependencies, you will need `make` and a C++ compiler available. 21 | On Debian or Ubuntu, run `sudo apt install build-essential`. 22 | 23 | - To install dependencies, run `npm install` 24 | - To start the demos run `npm start` 25 | - Visit "http://localhost:1234/" 26 | 27 | ## Pull Requests 28 | 29 | Keep it simple! Code clean up and linting changes should be submitted as separate PRS from logic changes so the impact to the codebase is clear. 30 | 31 | Keep PRs with logic changes to the essential modifications if possible -- people have to read it! 32 | 33 | Open an issue for discussion first so we can have consensus on the change and be sure to reference the issue that the PR is addressing. 34 | 35 | Keep commit messages descriptive. "Update" and "oops" doesn't tell anyone what happened there! 36 | 37 | Don't modify existing commits when responding to PR comments. New commits make it easier to follow what changed. 38 | 39 | ## Code Style 40 | 41 | Follow the `.editorconfig`, `.babelrc`, `.stylelintrc`, and `.htmlhintrc` style configurations included in the repo to keep the code looking consistent. 42 | 43 | Try to keep code as clear as possible! Code for readability! For example longer, descriptive variable names are preferred to short ones. If a line of code includes a lot of nested statements (even just one or two) consider breaking the line up into multiple variables to improve the clarity of what's happening. 44 | 45 | Include comments describing _why_ a change was made. If code was moved from one part of a function to another then tell what happened and why the change got made so it doesn't get moved back. Comments aren't just for others, they're for your future self, too! 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Garrett Johnson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/crab.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/crab.jpg -------------------------------------------------------------------------------- /docs/double-threedscans-envmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/double-threedscans-envmap.png -------------------------------------------------------------------------------- /docs/double-threedscans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/double-threedscans.png -------------------------------------------------------------------------------- /docs/interior-scene-cropped.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/interior-scene-cropped.png -------------------------------------------------------------------------------- /docs/lego-death-star-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/lego-death-star-white.png -------------------------------------------------------------------------------- /docs/lego-egyptian-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/lego-egyptian-white.png -------------------------------------------------------------------------------- /docs/lego-x-wing-black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/lego-x-wing-black.png -------------------------------------------------------------------------------- /docs/neko-stop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/neko-stop.png -------------------------------------------------------------------------------- /docs/rover-black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/rover-black.png -------------------------------------------------------------------------------- /docs/rover-blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/rover-blue.png -------------------------------------------------------------------------------- /docs/rover-orange.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/rover-orange.png -------------------------------------------------------------------------------- /docs/statue.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/cf751479cd668dbc69380396b2adc795b8d55ead/docs/statue.jpg -------------------------------------------------------------------------------- /example/aoRender.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Path Traced Ambient Occlusion 4 | 5 | 6 | 40 | 41 | 42 | 43 |
LOADING
44 |
45 |
46 |
--
47 |
48 | 51 |
52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /example/areaLight.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Area Light Path Tracing 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/basic.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Basic Path Tracing Example 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/basic.js: -------------------------------------------------------------------------------- 1 | import { 2 | ACESFilmicToneMapping, 3 | Scene, 4 | EquirectangularReflectionMapping, 5 | WebGLRenderer, 6 | PerspectiveCamera, 7 | CubeTextureLoader, 8 | } from 'three'; 9 | import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader.js'; 10 | import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'; 11 | import { RGBELoader } from 'three/examples/jsm/loaders/RGBELoader.js'; 12 | import { ParallelMeshBVHWorker } from 'three-mesh-bvh/src/workers/ParallelMeshBVHWorker.js'; 13 | import { getScaledSettings } from './utils/getScaledSettings.js'; 14 | import { LoaderElement } from './utils/LoaderElement.js'; 15 | import { WebGLPathTracer } from '..'; 16 | 17 | const ENV_URL = 'https://raw.githubusercontent.com/gkjohnson/3d-demo-data/master/hdri/chinese_garden_1k.hdr'; 18 | const MODEL_URL = 'https://raw.githubusercontent.com/gkjohnson/3d-demo-data/main/models/terrarium-robots/scene.gltf'; 19 | const CREDITS = 'Model by "nyancube" on Sketchfab'; 20 | const DESCRIPTION = 'Simple path tracing example scene setup with background blur.'; 21 | 22 | let pathTracer, renderer, controls; 23 | let camera, scene; 24 | let loader; 25 | 26 | init(); 27 | 28 | async function init() { 29 | 30 | const { tiles, renderScale } = getScaledSettings(); 31 | 32 | loader = new LoaderElement(); 33 | loader.attach( document.body ); 34 | 35 | // renderer 36 | renderer = new WebGLRenderer( { antialias: true } ); 37 | renderer.toneMapping = ACESFilmicToneMapping; 38 | document.body.appendChild( renderer.domElement ); 39 | 40 | // path tracer 41 | pathTracer = new WebGLPathTracer( renderer ); 42 | pathTracer.filterGlossyFactor = 0.5; 43 | pathTracer.renderScale = renderScale; 44 | pathTracer.tiles.set( tiles, tiles ); 45 | pathTracer.setBVHWorker( new ParallelMeshBVHWorker() ); 46 | 47 | // camera 48 | camera = new PerspectiveCamera( 75, 1, 0.025, 500 ); 49 | camera.position.set( 8, 9, 24 ); 50 | 51 | // scene 52 | scene = new Scene(); 53 | scene.backgroundBlurriness = 0.05; 54 | 55 | // controls 56 | controls = new OrbitControls( camera, renderer.domElement ); 57 | controls.target.y = 10; 58 | controls.addEventListener( 'change', () => pathTracer.updateCamera() ); 59 | controls.update(); 60 | 61 | // load the appropriate env 62 | let envPromise; 63 | if ( window.location.hash.includes( 'cube' ) ) { 64 | 65 | const path = 'https://raw.githubusercontent.com/mrdoob/three.js/master/examples/textures/cube/SwedishRoyalCastle/'; 66 | const format = '.jpg'; 67 | const envUrls = [ 68 | path + 'px' + format, path + 'nx' + format, 69 | path + 'py' + format, path + 'ny' + format, 70 | path + 'pz' + format, path + 'nz' + format 71 | ]; 72 | envPromise = new CubeTextureLoader().loadAsync( envUrls ); 73 | 74 | scene.environmentIntensity = 5; 75 | scene.backgroundIntensity = 5; 76 | 77 | } else { 78 | 79 | envPromise = new RGBELoader().loadAsync( ENV_URL ).then( tex => { 80 | 81 | tex.mapping = EquirectangularReflectionMapping; 82 | return tex; 83 | 84 | } ); 85 | 86 | } 87 | 88 | // load the environment map and model 89 | const [ gltf, envTexture ] = await Promise.all( [ 90 | new GLTFLoader().loadAsync( MODEL_URL ), 91 | envPromise, 92 | ] ); 93 | 94 | scene.background = envTexture; 95 | scene.environment = envTexture; 96 | scene.add( gltf.scene ); 97 | 98 | // initialize the path tracer 99 | await pathTracer.setSceneAsync( scene, camera, { 100 | onProgress: v => loader.setPercentage( v ), 101 | } ); 102 | 103 | loader.setCredits( CREDITS ); 104 | loader.setDescription( DESCRIPTION ); 105 | 106 | window.addEventListener( 'resize', onResize ); 107 | 108 | onResize(); 109 | animate(); 110 | 111 | } 112 | 113 | function onResize() { 114 | 115 | // update resolution 116 | renderer.setSize( window.innerWidth, window.innerHeight ); 117 | renderer.setPixelRatio( window.devicePixelRatio ); 118 | 119 | camera.aspect = window.innerWidth / window.innerHeight; 120 | camera.updateProjectionMatrix(); 121 | 122 | // update camera 123 | pathTracer.updateCamera(); 124 | 125 | } 126 | 127 | function animate() { 128 | 129 | requestAnimationFrame( animate ); 130 | 131 | pathTracer.renderSample(); 132 | 133 | loader.setSamples( pathTracer.samples, pathTracer.isCompiling ); 134 | 135 | } 136 | -------------------------------------------------------------------------------- /example/depthOfField.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Camera Depth of Field 4 | 5 | 6 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /example/fog.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Path Tracing Fog 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/fog.js: -------------------------------------------------------------------------------- 1 | import { 2 | ACESFilmicToneMapping, 3 | Scene, 4 | BoxGeometry, 5 | CylinderGeometry, 6 | Group, 7 | Mesh, 8 | MeshStandardMaterial, 9 | WebGLRenderer, 10 | Color, 11 | } from 'three'; 12 | import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'; 13 | import { PhysicalCamera, PhysicalSpotLight, FogVolumeMaterial, WebGLPathTracer } from '../src/index.js'; 14 | import { GUI } from 'three/examples/jsm/libs/lil-gui.module.min.js'; 15 | import { getScaledSettings } from './utils/getScaledSettings.js'; 16 | import { LoaderElement } from './utils/LoaderElement.js'; 17 | 18 | let pathTracer, renderer, controls; 19 | let camera, scene, fogMaterial, spotLight; 20 | let loader; 21 | 22 | const params = { 23 | 24 | multipleImportanceSampling: true, 25 | tiles: 2, 26 | renderScale: 1 / window.devicePixelRatio, 27 | 28 | color: '#eeeeee', 29 | fog: true, 30 | density: 0.01, 31 | lightIntensity: 500, 32 | lightColor: '#ffffff', 33 | 34 | bounces: 10, 35 | 36 | ...getScaledSettings(), 37 | 38 | }; 39 | 40 | init(); 41 | 42 | async function init() { 43 | 44 | loader = new LoaderElement(); 45 | loader.attach( document.body ); 46 | 47 | // renderer 48 | renderer = new WebGLRenderer( { antialias: true } ); 49 | renderer.toneMapping = ACESFilmicToneMapping; 50 | document.body.appendChild( renderer.domElement ); 51 | 52 | // path tracer 53 | pathTracer = new WebGLPathTracer( renderer ); 54 | pathTracer.tiles.set( params.tiles, params.tiles ); 55 | 56 | // camera 57 | const aspect = window.innerWidth / window.innerHeight; 58 | camera = new PhysicalCamera( 75, aspect, 0.025, 500 ); 59 | camera.position.set( 0, 1, 6 ); 60 | 61 | // controls 62 | controls = new OrbitControls( camera, renderer.domElement ); 63 | controls.addEventListener( 'change', () => { 64 | 65 | pathTracer.updateCamera(); 66 | 67 | } ); 68 | 69 | // scene 70 | scene = new Scene(); 71 | scene.background = new Color( 0 ); 72 | 73 | fogMaterial = new FogVolumeMaterial(); 74 | 75 | const material = new MeshStandardMaterial( { color: 0x999999, roughness: 1, metalness: 0 } ); 76 | const fogMesh = new Mesh( new BoxGeometry( 8, 4.05, 8 ), fogMaterial ); 77 | const floor = new Mesh( new CylinderGeometry( 5, 5, 0.1, 40 ), material ); 78 | floor.position.y = - 1.1; 79 | 80 | // prepare light 81 | spotLight = new PhysicalSpotLight(); 82 | spotLight.position.set( 0, 1, 0 ).multiplyScalar( 3 ); 83 | spotLight.angle = Math.PI / 4.5; 84 | spotLight.decay = 2; 85 | spotLight.penumbra = 0.15; 86 | spotLight.distance = 0.0; 87 | spotLight.intensity = 50.0; 88 | spotLight.radius = 0.05; 89 | 90 | // prepare slats 91 | const group = new Group(); 92 | group.add( spotLight ); 93 | 94 | const TOTAL_SLATS = 10; 95 | const WIDTH = 2.0; 96 | const slat = new Mesh( new BoxGeometry( 0.1, 0.1, 2 ), material ); 97 | for ( let i = 0; i < TOTAL_SLATS; i ++ ) { 98 | 99 | const s = slat.clone(); 100 | s.position.x = - WIDTH * 0.5 + WIDTH * i / ( TOTAL_SLATS - 1 ); 101 | s.position.y = 2; 102 | group.add( s ); 103 | 104 | } 105 | 106 | scene.add( fogMesh, floor, group ); 107 | pathTracer.setScene( scene, camera ); 108 | 109 | loader.setPercentage( 1 ); 110 | onParamsChange(); 111 | onResize(); 112 | 113 | window.addEventListener( 'resize', onResize ); 114 | 115 | // gui 116 | const gui = new GUI(); 117 | const ptFolder = gui.addFolder( 'Path Tracer' ); 118 | ptFolder.add( params, 'bounces', 1, 20, 1 ).onChange( onParamsChange ); 119 | ptFolder.add( params, 'multipleImportanceSampling' ).onChange( onParamsChange ); 120 | ptFolder.add( params, 'tiles', 1, 4, 1 ).onChange( value => { 121 | 122 | pathTracer.tiles.set( value, value ); 123 | 124 | } ); 125 | ptFolder.add( params, 'renderScale', 0.1, 1 ).onChange( onParamsChange ); 126 | 127 | const fogFolder = gui.addFolder( 'Fog' ); 128 | fogFolder.addColor( params, 'color' ).onChange( onParamsChange ); 129 | fogFolder.add( params, 'density', 0, 1 ).onChange( onParamsChange ); 130 | 131 | const lightFolder = gui.addFolder( 'Spot Light' ); 132 | lightFolder.add( params, 'lightIntensity', 0, 1000 ).onChange( onParamsChange ); 133 | lightFolder.addColor( params, 'lightColor' ).onChange( onParamsChange ); 134 | 135 | animate(); 136 | 137 | } 138 | 139 | function onParamsChange() { 140 | 141 | fogMaterial.color.set( params.color ).convertSRGBToLinear(); 142 | fogMaterial.density = params.density; 143 | 144 | spotLight.intensity = params.lightIntensity; 145 | spotLight.color.set( params.lightColor ); 146 | 147 | pathTracer.multipleImportanceSampling = params.multipleImportanceSampling; 148 | pathTracer.bounces = params.bounces; 149 | pathTracer.renderScale = params.renderScale; 150 | 151 | pathTracer.updateLights(); 152 | pathTracer.updateMaterials(); 153 | 154 | } 155 | 156 | function onResize() { 157 | 158 | renderer.setSize( window.innerWidth, window.innerHeight ); 159 | renderer.setPixelRatio( window.devicePixelRatio ); 160 | 161 | camera.aspect = window.innerWidth / window.innerHeight; 162 | camera.updateProjectionMatrix(); 163 | 164 | pathTracer.updateCamera(); 165 | 166 | } 167 | 168 | function animate() { 169 | 170 | requestAnimationFrame( animate ); 171 | 172 | pathTracer.renderSample(); 173 | 174 | loader.setSamples( pathTracer.samples, pathTracer.isCompiling ); 175 | 176 | } 177 | 178 | 179 | 180 | -------------------------------------------------------------------------------- /example/graphing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Shader Graphing Utility 6 | 7 | 8 | 65 | 66 | 67 | 68 |
69 |
TEST
70 |
71 |
Utility for graphing shader function outputs.
72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /example/hdr.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | High Dynamic Range Path Tracing Example 5 | 6 | 7 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /example/interior.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Interior Scene Path Tracing 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/lkg.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | PBR Path Tracing - Looking Glass Portrait Quilt Generation 4 | 5 | 6 | 56 | 57 | 58 | 59 |
LOADING
60 |
61 | Quilt rendering using WebXR to support hologram display on the Looking Glass Portrait display. 62 |
63 |
64 | 65 | See blocks.glass profile for prerendered images. 66 |
67 |
68 |
69 |
--
70 |
71 |
72 |
--
73 |
74 |
75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /example/materialBall.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Material Orb Path Tracing 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/materialDatabase.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Physically Based Material Database 5 | 6 | 7 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /example/materials/QuiltPreviewMaterial.js: -------------------------------------------------------------------------------- 1 | import { Vector2, CustomBlending } from 'three'; 2 | import { MaterialBase } from '../../src/materials/MaterialBase.js'; 3 | 4 | export class QuiltPreviewMaterial extends MaterialBase { 5 | 6 | constructor( parameters ) { 7 | 8 | super( { 9 | 10 | depthWrite: false, 11 | blending: CustomBlending, 12 | 13 | uniforms: { 14 | 15 | quiltMap: { value: null }, 16 | quiltDimensions: { value: new Vector2() }, 17 | displayIndex: { value: 0 }, 18 | aspectRatio: { value: 1 }, 19 | heightScale: { value: 0.75 }, 20 | 21 | }, 22 | 23 | vertexShader: /* glsl */` 24 | 25 | varying vec2 vUv; 26 | void main() { 27 | 28 | vec4 mvPosition = vec4( position, 1.0 ); 29 | mvPosition = modelViewMatrix * mvPosition; 30 | gl_Position = projectionMatrix * mvPosition; 31 | 32 | vUv = uv; 33 | 34 | } 35 | 36 | `, 37 | 38 | fragmentShader: /* glsl */` 39 | 40 | varying vec2 vUv; 41 | uniform sampler2D quiltMap; 42 | uniform ivec2 quiltDimensions; 43 | uniform int displayIndex; 44 | uniform float aspectRatio; 45 | uniform float heightScale; 46 | 47 | void main() { 48 | 49 | vec2 tileUv = vUv; 50 | tileUv.x -= ( 1.0 - aspectRatio * heightScale ) * 0.5; 51 | tileUv.x /= aspectRatio; 52 | 53 | tileUv.y -= ( 1.0 - heightScale ) * 0.5; 54 | tileUv /= heightScale; 55 | 56 | if ( tileUv.x < 0.0 || tileUv.x > 1.0 || tileUv.y < 0.0 || tileUv.y > 1.0 ) { 57 | 58 | gl_FragColor = vec4( 0.05, 0.05, 0.05, 1.0 ); 59 | return; 60 | 61 | } 62 | 63 | ivec2 size = textureSize( quiltMap, 0 ); 64 | vec2 texelWidth = 1.0 / vec2( size ); 65 | vec2 tileTexelHalfWidth = 0.5 * vec2( quiltDimensions ) * texelWidth; 66 | tileUv = max( tileTexelHalfWidth, min( 1.0 - tileTexelHalfWidth, tileUv ) ); 67 | 68 | ivec2 tileIndex = ivec2( 0 ); 69 | tileIndex.x = displayIndex % quiltDimensions.x; 70 | tileIndex.y = ( displayIndex - tileIndex.x ) / quiltDimensions.x; 71 | 72 | vec2 tileWidth = 1.0 / vec2( quiltDimensions ); 73 | vec2 quiltUv = tileWidth * ( vec2( tileIndex ) + tileUv ); 74 | 75 | gl_FragColor = texture( quiltMap, quiltUv ); 76 | 77 | #include 78 | #include 79 | 80 | } 81 | 82 | `, 83 | 84 | } ); 85 | 86 | this.setValues( parameters ); 87 | 88 | } 89 | 90 | } 91 | 92 | -------------------------------------------------------------------------------- /example/overlay.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Overlay Path Tracing 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/package.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /example/primitives.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Basic Primitives Path Tracing Example 4 | 5 | 6 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /example/primitives.js: -------------------------------------------------------------------------------- 1 | import { Scene, SphereGeometry, MeshStandardMaterial, Mesh, BoxGeometry, PerspectiveCamera, ACESFilmicToneMapping, WebGLRenderer } from 'three'; 2 | import { WebGLPathTracer, GradientEquirectTexture } from '..'; 3 | import { getScaledSettings } from './utils/getScaledSettings.js'; 4 | 5 | // init scene, renderer, camera, controls, etc 6 | const scene = new Scene(); 7 | const sphereGeom = new SphereGeometry( 0.49, 64, 32 ); 8 | const ball1 = new Mesh( 9 | sphereGeom, 10 | new MeshStandardMaterial( { 11 | color: '#e91e63', 12 | roughness: 0.25, 13 | metalness: 1, 14 | } ) 15 | ); 16 | const ball2 = new Mesh( 17 | sphereGeom, 18 | new MeshStandardMaterial( { 19 | color: '#ff9800', 20 | roughness: 0.1, 21 | metalness: 1, 22 | } ) 23 | ); 24 | const ball3 = new Mesh( 25 | sphereGeom, 26 | new MeshStandardMaterial( { 27 | color: '#2196f3', 28 | roughness: 0.2, 29 | metalness: 1, 30 | } ) 31 | ); 32 | const ground = new Mesh( 33 | new BoxGeometry( 3.5, 0.1, 1.5 ), 34 | new MeshStandardMaterial(), 35 | ); 36 | 37 | ball1.position.x = - 1; 38 | ball3.position.x = 1; 39 | ground.position.y = - 0.54; 40 | scene.add( ball1, ball2, ball3, ground ); 41 | 42 | // set the environment map 43 | const texture = new GradientEquirectTexture(); 44 | texture.bottomColor.set( 0xffffff ); 45 | texture.bottomColor.set( 0x666666 ); 46 | texture.update(); 47 | scene.environment = texture; 48 | scene.background = texture; 49 | 50 | const camera = new PerspectiveCamera(); 51 | camera.position.set( 0, 1, - 5 ); 52 | camera.lookAt( 0, 0, 0 ); 53 | 54 | const renderer = new WebGLRenderer( { antialias: true } ); 55 | renderer.toneMapping = ACESFilmicToneMapping; 56 | document.body.appendChild( renderer.domElement ); 57 | 58 | const settings = getScaledSettings(); 59 | const pathTracer = new WebGLPathTracer( renderer ); 60 | pathTracer.renderScale = settings.renderScale; 61 | pathTracer.tiles.setScalar( settings.tiles ); 62 | pathTracer.setScene( scene, camera ); 63 | 64 | onResize(); 65 | 66 | animate(); 67 | 68 | window.addEventListener( 'resize', onResize ); 69 | 70 | function animate() { 71 | 72 | // if the camera position changes call "ptRenderer.reset()" 73 | requestAnimationFrame( animate ); 74 | 75 | // update the camera and render one sample 76 | pathTracer.renderSample(); 77 | 78 | } 79 | 80 | function onResize() { 81 | 82 | // update rendering resolution 83 | const w = window.innerWidth; 84 | const h = window.innerHeight; 85 | 86 | renderer.setSize( w, h ); 87 | renderer.setPixelRatio( window.devicePixelRatio ); 88 | 89 | const aspect = w / h; 90 | camera.aspect = aspect; 91 | camera.updateProjectionMatrix(); 92 | 93 | pathTracer.setScene( scene, camera ); 94 | 95 | } 96 | -------------------------------------------------------------------------------- /example/renderVideo.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Rendering a Path Traced Video 5 | 6 | 7 | 40 | 41 | 42 | 43 |
44 | Rendering and recording multiple path traced frames to an animation. 45 |
46 | Press "record" to begin rendering and recording a video to a webm. 47 |
48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /example/screenshotList.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Model Viewer Fidelity Tests 4 | 5 | 6 | 71 | 72 | 73 | 74 |
75 | 88 | 92 |
93 |
94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /example/screenshotList.js: -------------------------------------------------------------------------------- 1 | const CONFIG_URL = 'https://raw.githubusercontent.com/google/model-viewer/master/packages/render-fidelity-tools/test/config.json'; 2 | const COMMITS_URL = 'https://api.github.com/repos/gkjohnson/three-gpu-pathtracer/commits?sha=screenshots'; 3 | 4 | ( async () => { 5 | 6 | const containerEl = document.getElementById( 'container' ); 7 | const { scenarios } = await ( await fetch( CONFIG_URL ) ).json(); 8 | const commits = await ( await fetch( COMMITS_URL ) ).json(); 9 | const latestSha = commits[ 0 ].sha; 10 | let imageType = window.location.hash.replace( /^#/, '' ) || 'model-viewer'; 11 | 12 | const selectionBox = document.querySelector( 'select' ); 13 | selectionBox.value = imageType; 14 | selectionBox.addEventListener( 'change', () => { 15 | 16 | window.location.hash = selectionBox.value; 17 | imageType = selectionBox.value; 18 | rebuildList(); 19 | 20 | } ); 21 | document.body.style.visibility = 'visible'; 22 | 23 | const largeImageBox = document.querySelector( 'input[type="checkbox"]' ); 24 | largeImageBox.addEventListener( 'change', () => { 25 | 26 | if ( largeImageBox.checked ) { 27 | 28 | containerEl.classList.add( 'large-images' ); 29 | 30 | } else { 31 | 32 | containerEl.classList.remove( 'large-images' ); 33 | 34 | } 35 | 36 | } ); 37 | 38 | rebuildList(); 39 | 40 | function rebuildList() { 41 | 42 | containerEl.innerHTML = ''; 43 | scenarios.forEach( s => { 44 | 45 | const name = s.name; 46 | const url1 = `https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/${ latestSha }/screenshots/golden/${ name }.png`; 47 | let url2; 48 | if ( imageType === 'prior-commit' ) { 49 | 50 | url2 = `https://raw.githubusercontent.com/gkjohnson/three-gpu-pathtracer/${ commits[ 1 ].sha }/screenshots/golden/${ name }.png`; 51 | 52 | } else { 53 | 54 | url2 = `https://raw.githubusercontent.com/google/model-viewer/master/packages/render-fidelity-tools/test/goldens/${ name }/${ imageType }-golden.png`; 55 | 56 | } 57 | 58 | containerEl.innerHTML += ` 59 |
60 |

${ s.name }

61 |
62 | 63 | 64 |
65 |
66 | `; 67 | 68 | } ); 69 | 70 | } 71 | 72 | } )(); 73 | -------------------------------------------------------------------------------- /example/skinnedMesh.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Skinned Mesh Rendering 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/spotLights.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Path Tracing with Spot Lights 5 | 6 | 7 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /example/utils/HDRImageGenerator.js: -------------------------------------------------------------------------------- 1 | import { compress, encode, findTextureMinMax } from '@monogrid/gainmap-js/dist/encode.js'; 2 | import { encodeJPEGMetadata } from '../libs/libultrahdr.js'; 3 | import { FloatType, LinearSRGBColorSpace, RGBAFormat } from 'three'; 4 | 5 | export class HDRImageGenerator { 6 | 7 | get completeImage() { 8 | 9 | return this._lastUrl !== null; 10 | 11 | } 12 | 13 | constructor( renderer, imageElement = new Image() ) { 14 | 15 | this.renderer = renderer; 16 | this.image = imageElement; 17 | this.encoding = false; 18 | this._lastUrl = null; 19 | this._encodingId = - 1; 20 | 21 | } 22 | 23 | async updateFrom( renderTarget ) { 24 | 25 | if ( this.encoding ) { 26 | 27 | throw new Error( 'HDRImageGenerator: HDR image already being encoded.' ); 28 | 29 | } 30 | 31 | const renderer = this.renderer; 32 | const buffer = new Float32Array( renderTarget.width * renderTarget.height * 4 ); 33 | renderer.readRenderTargetPixels( renderTarget, 0, 0, renderTarget.width, renderTarget.height, buffer ); 34 | 35 | const imageInformation = { 36 | header: {}, 37 | width: renderTarget.width, 38 | height: renderTarget.height, 39 | data: buffer, 40 | format: RGBAFormat, 41 | colorSpace: LinearSRGBColorSpace, 42 | type: FloatType, 43 | 44 | }; 45 | 46 | 47 | this._encodingId ++; 48 | this.encoding = true; 49 | 50 | const currentId = this._encodingId; 51 | const jpegData = await encodeHDR( imageInformation ); 52 | 53 | if ( this._encodingId === currentId ) { 54 | 55 | if ( this._lastUrl ) { 56 | 57 | URL.revokeObjectURL( this._lastUrl ); 58 | 59 | } 60 | 61 | const blob = new Blob( [ jpegData ], { type: 'octet/stream' } ); 62 | this._lastUrl = URL.createObjectURL( blob ); 63 | this.image.src = this._lastUrl; 64 | this.encoding = false; 65 | 66 | } 67 | 68 | } 69 | 70 | reset() { 71 | 72 | if ( this.encoding ) { 73 | 74 | this.encoding = false; 75 | this._encodingId ++; 76 | 77 | } 78 | 79 | if ( this._lastUrl ) { 80 | 81 | URL.revokeObjectURL( this._lastUrl ); 82 | this.image.src = ''; 83 | this._lastUrl = null; 84 | 85 | } 86 | 87 | } 88 | 89 | } 90 | 91 | 92 | 93 | async function encodeHDR( image ) { 94 | 95 | // find RAW RGB Max value of a texture 96 | const textureMax = await findTextureMinMax( image ); 97 | 98 | // Encode the gainmap 99 | const encodingResult = encode( { 100 | image, 101 | // this will encode the full HDR range 102 | maxContentBoost: Math.max.apply( this, textureMax ) || 1 103 | } ); 104 | 105 | // obtain the RAW RGBA SDR buffer and create an ImageData 106 | const sdrImageData = new ImageData( 107 | encodingResult.sdr.toArray(), 108 | encodingResult.sdr.width, 109 | encodingResult.sdr.height 110 | ); 111 | // obtain the RAW RGBA Gain map buffer and create an ImageData 112 | const gainMapImageData = new ImageData( 113 | encodingResult.gainMap.toArray(), 114 | encodingResult.gainMap.width, 115 | encodingResult.gainMap.height 116 | ); 117 | 118 | // parallel compress the RAW buffers into the specified mimeType 119 | const mimeType = 'image/jpeg'; 120 | const quality = 0.9; 121 | 122 | const [ sdr, gainMap ] = await Promise.all( [ 123 | compress( { 124 | source: sdrImageData, 125 | mimeType, 126 | quality, 127 | flipY: true // output needs to be flipped 128 | } ), 129 | compress( { 130 | source: gainMapImageData, 131 | mimeType, 132 | quality, 133 | flipY: true // output needs to be flipped 134 | } ) 135 | ] ); 136 | 137 | // obtain the metadata which will be embedded into 138 | // and XMP tag inside the final JPEG file 139 | const metadata = encodingResult.getMetadata(); 140 | 141 | // embed the compressed images + metadata into a single 142 | // JPEG file 143 | const jpegBuffer = await encodeJPEGMetadata( { 144 | ...encodingResult, 145 | ...metadata, 146 | sdr, 147 | gainMap 148 | } ); 149 | 150 | return jpegBuffer; 151 | 152 | } 153 | -------------------------------------------------------------------------------- /example/utils/LoaderElement.js: -------------------------------------------------------------------------------- 1 | let _styleElement; 2 | function initializeStyles() { 3 | 4 | if ( _styleElement ) { 5 | 6 | return; 7 | 8 | } 9 | 10 | _styleElement = document.createElement( 'style' ); 11 | _styleElement.textContent = /* css */` 12 | 13 | .loader-container, .description { 14 | position: absolute; 15 | width: 100%; 16 | font-family: 'Courier New', Courier, monospace; 17 | color: white; 18 | font-weight: light; 19 | align-items: flex-start; 20 | font-size: 14px; 21 | pointer-events: none; 22 | user-select: none; 23 | } 24 | 25 | .loader-container { 26 | display: flex; 27 | flex-direction: column; 28 | bottom: 0; 29 | } 30 | 31 | .description { 32 | top: 0; 33 | width: 100%; 34 | text-align: center; 35 | padding: 5px 0; 36 | } 37 | 38 | .loader-container .bar { 39 | height: 2px; 40 | background: white; 41 | width: 100%; 42 | } 43 | 44 | .loader-container .credits, 45 | .loader-container .samples, 46 | .loader-container .percentage { 47 | padding: 5px; 48 | margin: 0 0 1px 1px; 49 | background: rgba( 0, 0, 0, 0.2 ); 50 | border-radius: 2px; 51 | display: inline-block; 52 | } 53 | 54 | .loader-container:not(.loading) .bar, 55 | .loader-container:not(.loading) .percentage, 56 | .loader-container.loading .credits, 57 | .loader-container.loading .samples, 58 | .loader-container .credits:empty { 59 | display: none; 60 | } 61 | 62 | .loader-container .credits a, 63 | .loader-container .credits, 64 | .loader-container .samples { 65 | color: rgba( 255, 255, 255, 0.75 ); 66 | } 67 | `; 68 | document.head.appendChild( _styleElement ); 69 | 70 | } 71 | 72 | export class LoaderElement { 73 | 74 | constructor() { 75 | 76 | initializeStyles(); 77 | 78 | const container = document.createElement( 'div' ); 79 | container.classList.add( 'loader-container' ); 80 | 81 | const percentageEl = document.createElement( 'div' ); 82 | percentageEl.classList.add( 'percentage' ); 83 | container.appendChild( percentageEl ); 84 | 85 | const samplesEl = document.createElement( 'div' ); 86 | samplesEl.classList.add( 'samples' ); 87 | container.appendChild( samplesEl ); 88 | 89 | const creditsEl = document.createElement( 'div' ); 90 | creditsEl.classList.add( 'credits' ); 91 | container.appendChild( creditsEl ); 92 | 93 | const loaderBarEl = document.createElement( 'div' ); 94 | loaderBarEl.classList.add( 'bar' ); 95 | container.appendChild( loaderBarEl ); 96 | 97 | const descriptionEl = document.createElement( 'div' ); 98 | descriptionEl.classList.add( 'description' ); 99 | container.appendChild( descriptionEl ); 100 | 101 | this._description = descriptionEl; 102 | this._loaderBar = loaderBarEl; 103 | this._percentage = percentageEl; 104 | this._credits = creditsEl; 105 | this._samples = samplesEl; 106 | this._container = container; 107 | 108 | this.setPercentage( 0 ); 109 | 110 | } 111 | 112 | attach( container ) { 113 | 114 | container.appendChild( this._container ); 115 | container.appendChild( this._description ); 116 | 117 | } 118 | 119 | setPercentage( perc ) { 120 | 121 | this._loaderBar.style.width = `${ perc * 100 }%`; 122 | 123 | if ( perc === 0 ) { 124 | 125 | this._percentage.innerText = 'Loading...'; 126 | 127 | } else { 128 | 129 | this._percentage.innerText = `${ ( perc * 100 ).toFixed( 0 ) }%`; 130 | 131 | } 132 | 133 | if ( perc >= 1 ) { 134 | 135 | this._container.classList.remove( 'loading' ); 136 | 137 | } else { 138 | 139 | this._container.classList.add( 'loading' ); 140 | 141 | } 142 | 143 | } 144 | 145 | setSamples( count, compiling = false ) { 146 | 147 | if ( compiling ) { 148 | 149 | this._samples.innerText = 'compiling shader...'; 150 | 151 | } else { 152 | 153 | this._samples.innerText = `${ Math.floor( count ) } samples`; 154 | 155 | } 156 | 157 | } 158 | 159 | setCredits( credits ) { 160 | 161 | this._credits.innerHTML = credits; 162 | 163 | } 164 | 165 | setDescription( description ) { 166 | 167 | this._description.innerHTML = description; 168 | 169 | } 170 | 171 | } 172 | -------------------------------------------------------------------------------- /example/utils/MaterialOrbSceneLoader.js: -------------------------------------------------------------------------------- 1 | import { MeshPhysicalMaterial, RectAreaLight } from 'three'; 2 | import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader.js'; 3 | 4 | // TODO: this scene should technically be rendered at a 1000x smaller scale 5 | 6 | const ORB_SCENE_URL = 'https://raw.githubusercontent.com/gkjohnson/3d-demo-data/main/models/usd-shader-ball/usd-shaderball-scene.glb'; 7 | function assignWatts( light, watts ) { 8 | 9 | // https://github.com/will-ca/glTF-Blender-IO/blob/af9e7f06508a95425b05e485fa83681b268bbdfc/addons/io_scene_gltf2/blender/exp/gltf2_blender_gather_lights.py#L92-L97 10 | const PBR_WATTS_TO_LUMENS = 683; 11 | const area = light.width * light.height; 12 | const lumens = PBR_WATTS_TO_LUMENS * watts; 13 | light.intensity = lumens / ( area * 4 * Math.PI ); 14 | 15 | } 16 | 17 | export class MaterialOrbSceneLoader { 18 | 19 | constructor( manager ) { 20 | 21 | this.manager = manager; 22 | 23 | } 24 | 25 | loadAsync( url = ORB_SCENE_URL, ...rest ) { 26 | 27 | return new GLTFLoader( this.manager ) 28 | .loadAsync( url, ...rest ) 29 | .then( gltf => { 30 | 31 | const { 32 | scene, 33 | cameras, 34 | } = gltf; 35 | 36 | const leftLight = new RectAreaLight( 0xffffff, 1, 15, 15 ); 37 | assignWatts( leftLight, 6327.84 ); 38 | scene.getObjectByName( 'light' ).add( leftLight ); 39 | 40 | for ( let i = 0; i < 4; i ++ ) { 41 | 42 | const light = new RectAreaLight( 0xffffff, 1, 24.36, 24.36 ); 43 | assignWatts( light, 11185.5 ); 44 | scene.getObjectByName( 'light' + i ).add( light ); 45 | 46 | } 47 | 48 | // TODO: why is this necessary? 49 | const camera = cameras[ 0 ]; 50 | camera.fov *= 2.0; 51 | camera.updateProjectionMatrix(); 52 | 53 | // some objects in the scene use 16 bit float vertex colors so we disable them here 54 | scene.traverse( c => { 55 | 56 | if ( c.material ) { 57 | 58 | c.material.vertexColors = false; 59 | 60 | } 61 | 62 | } ); 63 | 64 | const material = new MeshPhysicalMaterial(); 65 | scene.getObjectByName( 'material_surface' ).material = material; 66 | 67 | return { 68 | 69 | material, 70 | camera, 71 | scene, 72 | 73 | }; 74 | 75 | } ); 76 | 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /example/utils/generateRadialFloorTexture.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | export function generateRadialFloorTexture( dim ) { 4 | 5 | const data = new Uint8Array( dim * dim * 4 ); 6 | 7 | for ( let x = 0; x < dim; x ++ ) { 8 | 9 | for ( let y = 0; y < dim; y ++ ) { 10 | 11 | const xNorm = x / ( dim - 1 ); 12 | const yNorm = y / ( dim - 1 ); 13 | 14 | const xCent = 2.0 * ( xNorm - 0.5 ); 15 | const yCent = 2.0 * ( yNorm - 0.5 ); 16 | let a = Math.max( Math.min( 1.0 - Math.sqrt( xCent ** 2 + yCent ** 2 ), 1.0 ), 0.0 ); 17 | a = a ** 2; 18 | a = a * 1.5; 19 | a = Math.min( a, 1.0 ); 20 | 21 | const i = y * dim + x; 22 | data[ i * 4 + 0 ] = 255; 23 | data[ i * 4 + 1 ] = 255; 24 | data[ i * 4 + 2 ] = 255; 25 | data[ i * 4 + 3 ] = a * 255; 26 | 27 | } 28 | 29 | } 30 | 31 | const tex = new THREE.DataTexture( data, dim, dim ); 32 | tex.format = THREE.RGBAFormat; 33 | tex.type = THREE.UnsignedByteType; 34 | tex.minFilter = THREE.LinearFilter; 35 | tex.magFilter = THREE.LinearFilter; 36 | tex.wrapS = THREE.RepeatWrapping; 37 | tex.wrapT = THREE.RepeatWrapping; 38 | tex.needsUpdate = true; 39 | return tex; 40 | 41 | } 42 | -------------------------------------------------------------------------------- /example/utils/getScaledSettings.js: -------------------------------------------------------------------------------- 1 | export function getScaledSettings() { 2 | 3 | let tiles = 3; 4 | let renderScale = Math.max( 1 / window.devicePixelRatio, 0.5 ); 5 | 6 | // adjust performance parameters for mobile 7 | const aspectRatio = window.innerWidth / window.innerHeight; 8 | if ( aspectRatio < 0.65 ) { 9 | 10 | tiles = 4; 11 | renderScale = 0.5 / window.devicePixelRatio; 12 | 13 | } 14 | 15 | return { tiles, renderScale }; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /example/viewerTest.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Model Viewer Fidelity Tests 4 | 5 | 6 | 41 | 42 | 43 | 44 |
45 | 46 |
47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "three-gpu-pathtracer", 3 | "version": "0.0.23", 4 | "description": "Path tracing renderer and utilities for three.js built on top of three-mesh-bvh.", 5 | "module": "src/index.js", 6 | "main": "build/index.umd.cjs", 7 | "type": "module", 8 | "types": "src/index.d.ts", 9 | "sideEffects": false, 10 | "files": [ 11 | "src/*", 12 | "build/*" 13 | ], 14 | "keywords": [ 15 | "webgl", 16 | "threejs", 17 | "performance", 18 | "geometry", 19 | "graphics", 20 | "mesh", 21 | "renderer", 22 | "raytracing", 23 | "bvh", 24 | "three-js", 25 | "path-tracing", 26 | "three-mesh-bvh", 27 | "rtx", 28 | "path-tracer", 29 | "path", 30 | "tracer" 31 | ], 32 | "devDependencies": { 33 | "@lookingglass/webxr": "^0.3.1", 34 | "@monogrid/gainmap-js": "^3.0.5", 35 | "@types/node": "^20.12.7", 36 | "@types/three": "^0.163.0", 37 | "@typescript-eslint/parser": "^6.21.0", 38 | "canvas-capture": "^2.0.5", 39 | "eslint": "^7.32.0", 40 | "eslint-config-mdcs": "^5.0.0", 41 | "node-fetch": "^3.2.9", 42 | "parcel": "^2.12.0", 43 | "pixelmatch": "^5.3.0", 44 | "pngjs": "^6.0.0", 45 | "process": "^0.11.10", 46 | "puppeteer": "^15.4.0", 47 | "rollup": "^2.70.0", 48 | "simple-git": "^3.10.0", 49 | "three": "^0.163.0", 50 | "three-mesh-bvh": "^0.7.4", 51 | "typescript": "5.3.3", 52 | "yargs": "^17.5.1" 53 | }, 54 | "peerDependencies": { 55 | "three": ">=0.151.0", 56 | "three-mesh-bvh": ">=0.7.4", 57 | "xatlas-web": "^0.1.0" 58 | }, 59 | "scripts": { 60 | "start": "cd example && parcel serve ./*.html --dist-dir ./dev-bundle/ --no-cache --no-hmr", 61 | "build-examples": "cd example && parcel build ./*.html --dist-dir ./bundle/ --public-url . --no-cache --no-content-hash", 62 | "update-screenshots": "node ./scripts/push-screenshots.js", 63 | "screenshot-diff": "node ./scripts/regression-test.js", 64 | "build": "rollup -c", 65 | "lint": "eslint \"./src/**/*.{js,ts}\" \"./example/*.js\" && tsc --noEmit", 66 | "prepublishOnly": "npm run build" 67 | }, 68 | "repository": { 69 | "type": "git", 70 | "url": "git+https://github.com/gkjohnson/three-gpu-pathtracer.git" 71 | }, 72 | "author": "Garrett Johnson ", 73 | "license": "MIT", 74 | "bugs": { 75 | "url": "https://github.com/gkjohnson/three-gpu-pathtracer/issues" 76 | }, 77 | "homepage": "https://github.com/gkjohnson/three-gpu-pathtracer#readme" 78 | } 79 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | export default [ 2 | { 3 | input: './src/index.js', 4 | treeshake: false, 5 | external: p => /^three/.test( p ), 6 | output: { 7 | 8 | name: 'ThreePathTracer', 9 | extend: true, 10 | format: 'umd', 11 | file: './build/index.umd.cjs', 12 | sourcemap: true, 13 | 14 | globals: p => { 15 | 16 | if ( /^three-mesh-bvh/.test( p ) ) { 17 | 18 | return 'MeshBVHLib'; 19 | 20 | } else if ( /^three/.test( p ) ) { 21 | 22 | return 'THREE'; 23 | 24 | } 25 | 26 | return null; 27 | 28 | }, 29 | }, 30 | }, 31 | { 32 | input: './src/index.js', 33 | treeshake: false, 34 | external: p => /^three/.test( p ), 35 | output: { 36 | format: 'esm', 37 | file: './build/index.module.js', 38 | sourcemap: true, 39 | }, 40 | } 41 | ]; 42 | -------------------------------------------------------------------------------- /scripts/push-screenshots.js: -------------------------------------------------------------------------------- 1 | import simpleGit from 'simple-git'; 2 | import { exec } from 'child_process'; 3 | import { runScript } from './utils.js'; 4 | 5 | ( async() => { 6 | 7 | const git = simpleGit(); 8 | const status = await git.status(); 9 | const currentBranch = status.current; 10 | 11 | if ( currentBranch !== 'main' ) { 12 | 13 | console.error( 'Current branch is not set to main' ); 14 | process.exit( 1 ); 15 | 16 | } 17 | 18 | const modified = status.modified.length + status.created.length + status.renamed.length + status.deleted.length; 19 | if ( modified !== 0 ) { 20 | 21 | console.error( 'Current branch is not clean' ); 22 | process.exit( 1 ); 23 | 24 | } 25 | 26 | // switch and rebase branches 27 | console.log( 'Switching to "screenshots" branch' ); 28 | await git.checkout( 'screenshots' ); 29 | 30 | console.log( 'Merging in "main"' ); 31 | await git.merge( [ 'main' ] ); 32 | 33 | await exec( 'rm -rf ./screenshots/golden' ); 34 | 35 | // rebuild the screenshots 36 | await runScript( 'node ./scripts/update-screenshots.js ' + process.argv.slice( 2 ).join( ' ' ) ); 37 | 38 | // commit and push the screenshots 39 | console.log( 'Committing all screenshots.' ); 40 | try { 41 | 42 | await git 43 | .add( './screenshots/golden/' ) 44 | .commit( 'update screenshots' ); 45 | 46 | console.log( 'Pushing commit.' ); 47 | await git.push( 'origin', 'screenshots' ); 48 | 49 | } catch ( e ) { 50 | 51 | console.error( 'Could not find any new files to commit' ); 52 | console.error( e.message ); 53 | 54 | } 55 | 56 | // reset git 57 | console.log( `Switching back to "${ currentBranch }" branch` ); 58 | await git.checkout( currentBranch ); 59 | 60 | process.exit(); 61 | 62 | } )(); 63 | -------------------------------------------------------------------------------- /scripts/regression-test.js: -------------------------------------------------------------------------------- 1 | import simpleGit from 'simple-git'; 2 | import { compareImages, runScript } from './utils.js'; 3 | import yargs from 'yargs'; 4 | import path from 'path'; 5 | 6 | const DIFF_THRESHOLD = 0.01; 7 | const PIXEL_THRESHOLD = 0.01; 8 | const argv = yargs( process.argv.slice( 2 ) ) 9 | .usage( 'Usage: $0 [options]' ) 10 | .option( 'diff-path', { 11 | describe: 'Output file for saving out an image diff.', 12 | alias: 'd', 13 | type: 'string', 14 | } ) 15 | .option( 'scenario', { 16 | describe: 'The name of one scenario to run.', 17 | alias: 's', 18 | type: 'string' 19 | } ) 20 | .argv; 21 | 22 | ( async() => { 23 | 24 | if ( argv[ 'diff-path' ] && ! argv.scenario ) { 25 | 26 | console.error( 'Cannot save diff of multiple scenarios.' ); 27 | process.exit( 1 ); 28 | 29 | } 30 | 31 | const git = simpleGit(); 32 | const status = await git.status(); 33 | const currentBranch = status.current; 34 | 35 | const modified = status.modified.length + status.created.length + status.renamed.length + status.deleted.length; 36 | if ( modified !== 0 ) { 37 | 38 | console.error( 'Current branch is not clean' ); 39 | process.exit( 1 ); 40 | 41 | } 42 | 43 | // rebuild the screenshots 44 | console.log( 'Building screenshots' ); 45 | const { scenario } = argv; 46 | let options = '-o ./screenshots/current/'; 47 | if ( scenario ) { 48 | 49 | options += ` -s ${ scenario }`; 50 | 51 | } 52 | 53 | try { 54 | 55 | await runScript( `node ./scripts/update-screenshots.js ${ options }` ); 56 | 57 | } catch { 58 | 59 | process.exit( 1 ); 60 | 61 | } 62 | 63 | // switch and rebase branches 64 | console.log( 'Switching to "screenshots" branch' ); 65 | await git.checkout( 'screenshots' ); 66 | 67 | const rootPath = path.resolve( process.cwd(), './screenshots/' ); 68 | let failed = false; 69 | if ( scenario ) { 70 | 71 | // if there's only one scenario then diff the one file 72 | console.log( `Comparing "${ scenario }" screenshots.` ); 73 | 74 | const diffPath = argv[ 'diff-path' ] ? path.resolve( process.cwd(), argv[ 'diff-path' ] ) : null; 75 | const diff = compareImages( 76 | path.resolve( rootPath, `./golden/${ scenario }.png` ), 77 | path.resolve( rootPath, `./current/${ scenario }.png` ), 78 | PIXEL_THRESHOLD, 79 | diffPath 80 | ); 81 | 82 | if ( diff > PIXEL_THRESHOLD ) { 83 | 84 | failed = true; 85 | 86 | } 87 | 88 | console.log( `\t${ failed ? 'Fail' : 'Pass' }: Images are ${ ( 100 * diff ).toFixed( 2 ) }% different` ); 89 | 90 | } else { 91 | 92 | // diff the set of images in both folders 93 | failed = compareImages( 94 | path.resolve( rootPath, './golden/' ), 95 | path.resolve( rootPath, './current/' ), 96 | PIXEL_THRESHOLD, 97 | DIFF_THRESHOLD, 98 | ); 99 | 100 | } 101 | 102 | // reset git 103 | console.log( `Switching back to "${ currentBranch }" branch` ); 104 | await git.checkout( currentBranch ); 105 | 106 | process.exit( failed ? 1 : 0 ); 107 | 108 | } )(); 109 | -------------------------------------------------------------------------------- /scripts/update-screenshots.js: -------------------------------------------------------------------------------- 1 | import yargs from 'yargs'; 2 | import puppeteer from 'puppeteer'; 3 | import path from 'path'; 4 | import fs from 'fs'; 5 | import fetch from 'node-fetch'; 6 | import { exec } from 'child_process'; 7 | 8 | const excludeList = [ 9 | 'khronos-MetalRoughSpheres-LDR', 10 | 'khronos-BoxInterleaved', 11 | ]; 12 | 13 | let totalTime = 0; 14 | const SAMPLES = 64; 15 | const argv = yargs( process.argv.slice( 2 ) ) 16 | .usage( 'Usage: $0 [options]' ) 17 | .option( 'output-path', { 18 | describe: 'Output directory for the files.', 19 | alias: 'o', 20 | type: 'string', 21 | default: './screenshots/golden/', 22 | } ) 23 | .option( 'scenario', { 24 | describe: 'The name of one scenario to run.', 25 | alias: 's', 26 | type: 'string' 27 | } ) 28 | .option( 'headless', { 29 | describe: 'Whether to run in a headless mode.', 30 | alias: 'h', 31 | type: 'boolean', 32 | default: false 33 | } ) 34 | .argv; 35 | 36 | ( async () => { 37 | 38 | const req = await fetch( 'https://raw.githubusercontent.com/google/model-viewer/master/packages/render-fidelity-tools/test/config.json' ); 39 | const { scenarios } = await req.json(); 40 | const folderPath = path.resolve( process.cwd(), argv[ 'output-path' ] ); 41 | console.log( `Saving to "${ folderPath }"\n` ); 42 | 43 | console.log( 'Running test page service' ); 44 | exec( 'npm run start' ); 45 | 46 | fs.mkdirSync( folderPath, { recursive: true } ); 47 | 48 | try { 49 | 50 | if ( argv.scenario ) { 51 | 52 | const scenario = scenarios.find( s => s.name === argv.scenario ); 53 | if ( ! scenario ) { 54 | 55 | console.error( `Scenario "${ argv.scenario }" does not exist.` ); 56 | process.exit( 1 ); 57 | 58 | } else { 59 | 60 | await saveScreenshot( scenario, folderPath ); 61 | 62 | } 63 | 64 | } else { 65 | 66 | for ( const key in scenarios ) { 67 | 68 | const scenario = scenarios[ key ]; 69 | if ( excludeList.includes( scenario.name ) ) { 70 | 71 | console.log( `Skipping ${ scenario.name }` ); 72 | 73 | } else { 74 | 75 | console.log( `Rendering ${ scenario.name }` ); 76 | await saveScreenshot( scenario, folderPath ); 77 | 78 | } 79 | 80 | } 81 | 82 | } 83 | 84 | console.log( `\nTotal Time: ${ ( 1e-3 * totalTime ).toFixed( 2 ) }s` ); 85 | process.exit( 0 ); 86 | 87 | } catch ( e ) { 88 | 89 | console.error( e ); 90 | process.exit( 1 ); 91 | 92 | } 93 | 94 | } )(); 95 | 96 | async function saveScreenshot( scenario, targetFolder ) { 97 | 98 | const name = scenario.name; 99 | const args = argv.headless ? [ '--use-gl=egl', '--headless' ] : []; 100 | const browser = await puppeteer.launch( { 101 | 102 | defaultViewport: null, 103 | args, 104 | headless: argv.headless, 105 | 106 | } ); 107 | 108 | const page = await browser.newPage(); 109 | 110 | await page.goto( `http://localhost:1234/viewerTest.html?hideUI=true&scale=1&tiles=4&samples=${ SAMPLES }#${ name }` ); 111 | 112 | try { 113 | 114 | const startTime = performance.now(); 115 | await page.evaluate( () => { 116 | 117 | return new Promise( ( resolve, reject ) => { 118 | 119 | const TIMEOUT = 240000; 120 | const handle = setTimeout( () => { 121 | 122 | reject( new Error( `Failed to render in ${ ( 1e-3 * TIMEOUT ).toFixed( 2 ) }s.` ) ); 123 | 124 | }, TIMEOUT ); 125 | 126 | self.addEventListener( 'render-complete', () => { 127 | 128 | clearTimeout( handle ); 129 | resolve(); 130 | 131 | }, { once: true } ); 132 | 133 | } ); 134 | 135 | } ); 136 | 137 | const deltaTime = performance.now() - startTime; 138 | console.log( `\tin ${ ( 1e-3 * deltaTime ).toFixed( 2 ) }s` ); 139 | totalTime += deltaTime; 140 | 141 | } catch ( e ) { 142 | 143 | console.error( e.message ); 144 | await browser.close(); 145 | return; 146 | 147 | } 148 | 149 | // https://stackoverflow.com/questions/11335460/how-do-i-parse-a-data-url-in-node 150 | // https://stackoverflow.com/questions/65914988/how-to-save-a-canvas-as-an-image-using-puppeteer 151 | const dataUrl = await page.evaluate( () => { 152 | 153 | const canvas = document.querySelector( 'canvas' ); 154 | return canvas.toDataURL(); 155 | 156 | } ); 157 | 158 | const [ info, data ] = dataUrl.split( ',' ); 159 | const [ , ext ] = info.match( /^data:.+\/(.+);base64/ ); 160 | const buffer = Buffer.from( data, 'base64' ); 161 | fs.writeFileSync( `${ targetFolder }/${ name }.${ ext }`, buffer ); 162 | 163 | await browser.close(); 164 | 165 | } 166 | -------------------------------------------------------------------------------- /scripts/utils.js: -------------------------------------------------------------------------------- 1 | import { exec } from 'child_process'; 2 | import fs from 'fs'; 3 | import path from 'path'; 4 | import pixelmatch from 'pixelmatch'; 5 | import { PNG } from 'pngjs'; 6 | 7 | // runs a given command with stdio output and errors thrown 8 | export function runScript( command ) { 9 | 10 | return new Promise( ( resolve, reject ) => { 11 | 12 | const proc = exec( command ); 13 | proc.stderr.pipe( process.stderr ); 14 | proc.stdout.pipe( process.stdout ); 15 | proc.on( 'exit', code => { 16 | 17 | if ( code === 0 ) resolve(); 18 | else reject(); 19 | 20 | } ); 21 | 22 | } ); 23 | 24 | } 25 | 26 | // Compare the images in two directories and compare results 27 | export function compareImageDirectories( path1, path2, pixelThreshold = 0.1, diffThreshold = 0.1 ) { 28 | 29 | let failures = 0; 30 | let total = 0; 31 | const files = fs.readdirSync( path1 ); 32 | for ( const key in files ) { 33 | 34 | const f = files[ key ]; 35 | const fileName = path.basename( f ); 36 | total ++; 37 | 38 | if ( fs.existsSync( path.resolve( path2, fileName ) ) ) { 39 | 40 | console.log( `Comparing "${ fileName }" screenshots.` ); 41 | const diff = 42 | compareImages( 43 | path.resolve( path1, fileName ), 44 | path.resolve( path2, fileName ), 45 | pixelThreshold, 46 | ); 47 | 48 | 49 | let status = 'Pass'; 50 | if ( diff > diffThreshold ) { 51 | 52 | status = 'Fail'; 53 | failures ++; 54 | 55 | } 56 | 57 | console.log( `\t${ status }: Images are ${ ( 100 * diff ).toFixed( 2 ) }% different` ); 58 | 59 | } else { 60 | 61 | console.error( `File "${ fileName }" does not not exist in both directories.` ); 62 | 63 | } 64 | 65 | } 66 | 67 | console.log( `${ failures } out of ${ total } comparisons failed.` ); 68 | 69 | return failures !== 0; 70 | 71 | } 72 | 73 | // Compares images at the given path 74 | export function compareImages( path1, path2, threshold = 0.1, diffPath = null ) { 75 | 76 | // Checks if two files exist before diffing 77 | if ( ! fs.existsSync( path1 ) || ! fs.existsSync( path2 ) ) { 78 | 79 | throw new Error( `File "${ path.basename( path1 ) }" does not not exist in both directories.` ); 80 | 81 | } 82 | 83 | // Reads the two files 84 | const img1 = PNG.sync.read( fs.readFileSync( path1 ) ); 85 | const img2 = PNG.sync.read( fs.readFileSync( path2 ) ); 86 | 87 | const { width, height } = img1; 88 | const diff = new PNG( { width, height } ); 89 | 90 | // checks the diff 91 | const diffPixels = pixelmatch( img1.data, img2.data, diff.data, width, height, { threshold } ); 92 | 93 | // writes the pixels out if path is provided. 94 | if ( diffPath ) { 95 | 96 | const buffer = PNG.sync.write( diff, { colorType: 6 } ); 97 | fs.writeFileSync( diffPath, buffer ); 98 | 99 | } 100 | 101 | // returns the ratio of different pixels 102 | return diffPixels / ( width * height ); 103 | 104 | } 105 | -------------------------------------------------------------------------------- /src/core/QuiltPathTracingRenderer.js: -------------------------------------------------------------------------------- 1 | import { PerspectiveCamera, Vector3, MathUtils, Vector2, Matrix4, Vector4 } from 'three'; 2 | import { PathTracingRenderer } from './PathTracingRenderer.js'; 3 | 4 | function* _task( cb ) { 5 | 6 | const { 7 | viewCount, 8 | _camera, 9 | _quiltUtility, 10 | _subframe, 11 | } = this; 12 | 13 | const quiltViewInfo = { 14 | subframe: _subframe, 15 | projectionMatrix: _camera.projectionMatrix, 16 | offsetDirection: new Vector3(), 17 | }; 18 | 19 | while ( true ) { 20 | 21 | for ( let i = 0; i < viewCount; i ++ ) { 22 | 23 | // get the camera info for the current view index 24 | _quiltUtility.near = this.camera.near; 25 | _quiltUtility.far = this.camera.far; 26 | _quiltUtility.getCameraViewInfo( i, quiltViewInfo ); 27 | 28 | // transform offset into world frame from camera frame 29 | quiltViewInfo.offsetDirection.transformDirection( this.camera.matrixWorld ); 30 | 31 | // adjust the render camera with the view offset 32 | this.camera.matrixWorld.decompose( 33 | _camera.position, 34 | _camera.quaternion, 35 | _camera.scale, 36 | ); 37 | _camera.position.addScaledVector( quiltViewInfo.offsetDirection, quiltViewInfo.offset ); 38 | _camera.updateMatrixWorld(); 39 | 40 | // get the inverse projection 41 | _camera.projectionMatrixInverse 42 | .copy( _camera.projectionMatrix ) 43 | .invert(); 44 | 45 | this._opacityFactor = Math.floor( this._samples + 1 ) / Math.floor( this._quiltSamples + 1 ); 46 | 47 | do { 48 | 49 | const ogCamera = this.camera; 50 | this.camera = _camera; 51 | cb(); 52 | this.camera = ogCamera; 53 | yield; 54 | 55 | } while ( this._samples % 1 !== 0 ); 56 | 57 | this._quiltSamples += 1 / viewCount; 58 | 59 | } 60 | 61 | this._quiltSamples = Math.round( this._quiltSamples ); 62 | 63 | } 64 | 65 | } 66 | 67 | // Helper for extracting the camera projection, offset, and quilt subframe needed 68 | // for rendering a quilt with the provided parameters. 69 | class QuiltViewUtility { 70 | 71 | constructor() { 72 | 73 | this.viewCount = 48; 74 | this.quiltDimensions = new Vector2( 8, 6 ); 75 | this.viewCone = 35 * MathUtils.DEG2RAD; 76 | this.viewFoV = 14 * MathUtils.DEG2RAD; 77 | this.displayDistance = 1; 78 | this.displayAspect = 0.75; 79 | this.near = 0.01; 80 | this.far = 10; 81 | 82 | } 83 | 84 | getCameraViewInfo( i, target = {} ) { 85 | 86 | const { 87 | quiltDimensions, 88 | viewCone, 89 | displayDistance, 90 | viewCount, 91 | viewFoV, 92 | displayAspect, 93 | near, 94 | far, 95 | } = this; 96 | 97 | // initialize defaults 98 | target.subframe = target.subframe || new Vector4(); 99 | target.offsetDirection = target.offsetDirection || new Vector3(); 100 | target.projectionMatrix = target.projectionMatrix || new Matrix4(); 101 | 102 | // set camera offset 103 | const halfWidth = Math.tan( 0.5 * viewCone ) * displayDistance; 104 | const totalWidth = halfWidth * 2.0; 105 | const stride = totalWidth / ( viewCount - 1 ); 106 | const offset = viewCount === 1 ? 0 : - halfWidth + stride * i; 107 | target.offsetDirection.set( 1.0, 0, 0 ); 108 | target.offset = offset; 109 | 110 | // set the projection matrix 111 | const displayHalfHeight = Math.tan( viewFoV * 0.5 ) * displayDistance; 112 | const displayHalfWidth = displayAspect * displayHalfHeight; 113 | const nearScale = near / displayDistance; 114 | 115 | target.projectionMatrix.makePerspective( 116 | nearScale * ( - displayHalfWidth - offset ), nearScale * ( displayHalfWidth - offset ), 117 | nearScale * displayHalfHeight, nearScale * - displayHalfHeight, 118 | near, far, 119 | ); 120 | 121 | // set the quilt subframe 122 | const x = i % quiltDimensions.x; 123 | const y = Math.floor( i / quiltDimensions.x ); 124 | 125 | const qw = 1 / quiltDimensions.x; 126 | const qh = 1 / quiltDimensions.y; 127 | target.subframe.set( x * qw, y * qh, qw, qh ); 128 | 129 | return target; 130 | 131 | } 132 | 133 | setFromDisplayView( viewerDistance, displayWidth, displayHeight ) { 134 | 135 | this.displayAspect = displayWidth / displayHeight; 136 | this.displayDistance = viewerDistance; 137 | this.viewFoV = 2.0 * Math.atan( 0.5 * displayHeight / viewerDistance ); 138 | 139 | } 140 | 141 | } 142 | 143 | export class QuiltPathTracingRenderer extends PathTracingRenderer { 144 | 145 | get samples() { 146 | 147 | return this._samples / this.viewCount; 148 | 149 | } 150 | 151 | constructor( ...args ) { 152 | 153 | super( ...args ); 154 | 155 | [ 156 | 'quiltDimensions', 157 | 'viewCount', 158 | 'viewCone', 159 | 'viewFoV', 160 | 'displayDistance', 161 | 'displayAspect', 162 | ].forEach( member => { 163 | 164 | Object.defineProperty( this, member, { 165 | 166 | enumerable: true, 167 | 168 | set: v => { 169 | 170 | this._quiltUtility[ member ] = v; 171 | 172 | }, 173 | 174 | get: () => { 175 | 176 | return this._quiltUtility[ member ]; 177 | 178 | } 179 | 180 | } ); 181 | 182 | } ); 183 | 184 | 185 | this._quiltUtility = new QuiltViewUtility(); 186 | this._quiltSamples = 0; 187 | this._camera = new PerspectiveCamera(); 188 | this._quiltTask = null; 189 | 190 | } 191 | 192 | setFromDisplayView( ...args ) { 193 | 194 | this._quiltUtility.setFromDisplayView( ...args ); 195 | 196 | } 197 | 198 | update() { 199 | 200 | this.alpha = false; 201 | if ( ! this._quiltTask ) { 202 | 203 | this._quiltTask = _task.call( this, () => { 204 | 205 | super.update(); 206 | 207 | } ); 208 | 209 | } 210 | 211 | this._quiltTask.next(); 212 | 213 | } 214 | 215 | reset() { 216 | 217 | super.reset(); 218 | this._quiltTask = null; 219 | this._quiltSamples = 0; 220 | 221 | } 222 | 223 | } 224 | -------------------------------------------------------------------------------- /src/core/utils/BakedGeometry.js: -------------------------------------------------------------------------------- 1 | import { BufferGeometry } from 'three'; 2 | import { MeshDiff } from './MeshDiff.js'; 3 | import { convertToStaticGeometry } from './convertToStaticGeometry.js'; 4 | import { validateAttributes } from './BufferAttributeUtils.js'; 5 | 6 | export class BakedGeometry extends BufferGeometry { 7 | 8 | constructor() { 9 | 10 | super(); 11 | this.version = 0; 12 | this.hash = null; 13 | this._diff = new MeshDiff(); 14 | 15 | } 16 | 17 | // returns whether the passed mesh is compatible with this baked geometry 18 | // such that it can be updated without resizing attributes 19 | isCompatible( mesh, attributes ) { 20 | 21 | const geometry = mesh.geometry; 22 | for ( let i = 0; i < attributes.length; i ++ ) { 23 | 24 | const key = attributes[ i ]; 25 | const attr1 = geometry.attributes[ key ]; 26 | const attr2 = this.attributes[ key ]; 27 | if ( attr1 && ! validateAttributes( attr1, attr2 ) ) { 28 | 29 | return false; 30 | 31 | } 32 | 33 | } 34 | 35 | return true; 36 | 37 | } 38 | 39 | updateFrom( mesh, options ) { 40 | 41 | const diff = this._diff; 42 | if ( diff.didChange( mesh ) ) { 43 | 44 | convertToStaticGeometry( mesh, options, this ); 45 | diff.updateFrom( mesh ); 46 | this.version ++; 47 | this.hash = `${ this.uuid }_${ this.version }`; 48 | return true; 49 | 50 | } else { 51 | 52 | return false; 53 | 54 | } 55 | 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /src/core/utils/BufferAttributeUtils.js: -------------------------------------------------------------------------------- 1 | import { BufferAttribute } from 'three'; 2 | 3 | // target offset is the number of elements in the target buffer stride to skip before copying the 4 | // attributes contents in to. 5 | export function copyAttributeContents( attr, target, targetOffset = 0 ) { 6 | 7 | if ( attr.isInterleavedBufferAttribute ) { 8 | 9 | const itemSize = attr.itemSize; 10 | for ( let i = 0, l = attr.count; i < l; i ++ ) { 11 | 12 | const io = i + targetOffset; 13 | target.setX( io, attr.getX( i ) ); 14 | if ( itemSize >= 2 ) target.setY( io, attr.getY( i ) ); 15 | if ( itemSize >= 3 ) target.setZ( io, attr.getZ( i ) ); 16 | if ( itemSize >= 4 ) target.setW( io, attr.getW( i ) ); 17 | 18 | } 19 | 20 | } else { 21 | 22 | const array = target.array; 23 | const cons = array.constructor; 24 | const byteOffset = array.BYTES_PER_ELEMENT * attr.itemSize * targetOffset; 25 | const temp = new cons( array.buffer, byteOffset, attr.array.length ); 26 | temp.set( attr.array ); 27 | 28 | } 29 | 30 | } 31 | 32 | // Clones the given attribute with a new compatible buffer attribute but no data 33 | export function createAttributeClone( attr, countOverride = null ) { 34 | 35 | const cons = attr.array.constructor; 36 | const normalized = attr.normalized; 37 | const itemSize = attr.itemSize; 38 | const count = countOverride === null ? attr.count : countOverride; 39 | 40 | return new BufferAttribute( new cons( itemSize * count ), itemSize, normalized ); 41 | 42 | } 43 | 44 | // Confirms that the two provided attributes are compatible. Returns false if they are not. 45 | export function validateAttributes( attr1, attr2 ) { 46 | 47 | if ( ! attr1 && ! attr2 ) { 48 | 49 | return true; 50 | 51 | } 52 | 53 | if ( Boolean( attr1 ) !== Boolean( attr2 ) ) { 54 | 55 | return false; 56 | 57 | } 58 | 59 | const sameCount = attr1.count === attr2.count; 60 | const sameNormalized = attr1.normalized === attr2.normalized; 61 | const sameType = attr1.array.constructor === attr2.array.constructor; 62 | const sameItemSize = attr1.itemSize === attr2.itemSize; 63 | 64 | if ( ! sameCount || ! sameNormalized || ! sameType || ! sameItemSize ) { 65 | 66 | return false; 67 | 68 | } 69 | 70 | return true; 71 | 72 | } 73 | -------------------------------------------------------------------------------- /src/core/utils/GeometryPreparationUtils.js: -------------------------------------------------------------------------------- 1 | import { BufferAttribute } from 'three'; 2 | 3 | export function updateMaterialIndexAttribute( geometry, materials, allMaterials ) { 4 | 5 | const indexAttr = geometry.index; 6 | const posAttr = geometry.attributes.position; 7 | const vertCount = posAttr.count; 8 | const totalCount = indexAttr ? indexAttr.count : vertCount; 9 | let groups = geometry.groups; 10 | if ( groups.length === 0 ) { 11 | 12 | groups = [ { count: totalCount, start: 0, materialIndex: 0 } ]; 13 | 14 | } 15 | 16 | let materialIndexAttribute = geometry.getAttribute( 'materialIndex' ); 17 | if ( ! materialIndexAttribute || materialIndexAttribute.count !== vertCount ) { 18 | 19 | // use an array with the minimum precision required to store all material id references. 20 | let array; 21 | if ( allMaterials.length <= 255 ) { 22 | 23 | array = new Uint8Array( vertCount ); 24 | 25 | } else { 26 | 27 | array = new Uint16Array( vertCount ); 28 | 29 | } 30 | 31 | materialIndexAttribute = new BufferAttribute( array, 1, false ); 32 | geometry.deleteAttribute( 'materialIndex' ); 33 | geometry.setAttribute( 'materialIndex', materialIndexAttribute ); 34 | 35 | } 36 | 37 | const materialArray = materialIndexAttribute.array; 38 | for ( let i = 0; i < groups.length; i ++ ) { 39 | 40 | const group = groups[ i ]; 41 | const start = group.start; 42 | const count = group.count; 43 | const endCount = Math.min( count, totalCount - start ); 44 | 45 | const mat = Array.isArray( materials ) ? materials[ group.materialIndex ] : materials; 46 | const materialIndex = allMaterials.indexOf( mat ); 47 | 48 | for ( let j = 0; j < endCount; j ++ ) { 49 | 50 | let index = start + j; 51 | if ( indexAttr ) { 52 | 53 | index = indexAttr.getX( index ); 54 | 55 | } 56 | 57 | materialArray[ index ] = materialIndex; 58 | 59 | } 60 | 61 | } 62 | 63 | } 64 | 65 | export function setCommonAttributes( geometry, attributes ) { 66 | 67 | if ( ! geometry.index ) { 68 | 69 | // TODO: compute a typed array 70 | const indexCount = geometry.attributes.position.count; 71 | const array = new Array( indexCount ); 72 | for ( let i = 0; i < indexCount; i ++ ) { 73 | 74 | array[ i ] = i; 75 | 76 | } 77 | 78 | geometry.setIndex( array ); 79 | 80 | } 81 | 82 | if ( ! geometry.attributes.normal && ( attributes && attributes.includes( 'normal' ) ) ) { 83 | 84 | geometry.computeVertexNormals(); 85 | 86 | } 87 | 88 | if ( ! geometry.attributes.uv && ( attributes && attributes.includes( 'uv' ) ) ) { 89 | 90 | const vertCount = geometry.attributes.position.count; 91 | geometry.setAttribute( 'uv', new BufferAttribute( new Float32Array( vertCount * 2 ), 2, false ) ); 92 | 93 | } 94 | 95 | if ( ! geometry.attributes.uv2 && ( attributes && attributes.includes( 'uv2' ) ) ) { 96 | 97 | const vertCount = geometry.attributes.position.count; 98 | geometry.setAttribute( 'uv2', new BufferAttribute( new Float32Array( vertCount * 2 ), 2, false ) ); 99 | 100 | } 101 | 102 | if ( ! geometry.attributes.tangent && ( attributes && attributes.includes( 'tangent' ) ) ) { 103 | 104 | // compute tangents requires a uv and normal buffer 105 | if ( geometry.attributes.uv && geometry.attributes.normal ) { 106 | 107 | geometry.computeTangents(); 108 | 109 | } else { 110 | 111 | const vertCount = geometry.attributes.position.count; 112 | geometry.setAttribute( 'tangent', new BufferAttribute( new Float32Array( vertCount * 4 ), 4, false ) ); 113 | 114 | } 115 | 116 | } 117 | 118 | if ( ! geometry.attributes.color && ( attributes && attributes.includes( 'color' ) ) ) { 119 | 120 | const vertCount = geometry.attributes.position.count; 121 | const array = new Float32Array( vertCount * 4 ); 122 | array.fill( 1.0 ); 123 | geometry.setAttribute( 'color', new BufferAttribute( array, 4 ) ); 124 | 125 | } 126 | 127 | } 128 | -------------------------------------------------------------------------------- /src/core/utils/MeshDiff.js: -------------------------------------------------------------------------------- 1 | import { Matrix4 } from 'three'; 2 | import { bufferToHash } from '../../utils/bufferToHash.js'; 3 | 4 | function getGeometryHash( geometry ) { 5 | 6 | let hash = geometry.uuid; 7 | const attributes = Object.values( geometry.attributes ); 8 | if ( geometry.index ) { 9 | 10 | attributes.push( geometry.index ); 11 | hash += `index|${ geometry.index.version }`; 12 | 13 | } 14 | 15 | const keys = Object.keys( attributes ).sort(); 16 | for ( const key of keys ) { 17 | 18 | const attr = attributes[ key ]; 19 | hash += `${ key }_${ attr.version }|`; 20 | 21 | } 22 | 23 | return hash; 24 | 25 | } 26 | 27 | function getSkeletonHash( mesh ) { 28 | 29 | const skeleton = mesh.skeleton; 30 | if ( skeleton ) { 31 | 32 | if ( ! skeleton.boneTexture ) { 33 | 34 | skeleton.computeBoneTexture(); 35 | 36 | } 37 | 38 | // we can't use the texture version here because it will change even 39 | // when the bones haven't 40 | const dataHash = bufferToHash( skeleton.boneTexture.image.data.buffer ); 41 | return `${ dataHash }_${ skeleton.boneTexture.uuid }`; 42 | 43 | } else { 44 | 45 | return null; 46 | 47 | } 48 | 49 | } 50 | 51 | // Checks whether the geometry changed between this and last evaluation 52 | export class MeshDiff { 53 | 54 | constructor( mesh = null ) { 55 | 56 | this.matrixWorld = new Matrix4(); 57 | this.geometryHash = null; 58 | this.skeletonHash = null; 59 | this.primitiveCount = - 1; 60 | 61 | if ( mesh !== null ) { 62 | 63 | this.updateFrom( mesh ); 64 | 65 | } 66 | 67 | } 68 | 69 | updateFrom( mesh ) { 70 | 71 | const geometry = mesh.geometry; 72 | const primitiveCount = ( geometry.index ? geometry.index.count : geometry.attributes.position.count ) / 3; 73 | this.matrixWorld.copy( mesh.matrixWorld ); 74 | this.geometryHash = getGeometryHash( geometry ); 75 | this.primitiveCount = primitiveCount; 76 | this.skeletonHash = getSkeletonHash( mesh ); 77 | 78 | } 79 | 80 | didChange( mesh ) { 81 | 82 | const geometry = mesh.geometry; 83 | const primitiveCount = ( geometry.index ? geometry.index.count : geometry.attributes.position.count ) / 3; 84 | 85 | const identical = 86 | this.matrixWorld.equals( mesh.matrixWorld ) && 87 | this.geometryHash === getGeometryHash( geometry ) && 88 | this.skeletonHash === getSkeletonHash( mesh ) && 89 | this.primitiveCount === primitiveCount; 90 | 91 | return ! identical; 92 | 93 | } 94 | 95 | } 96 | -------------------------------------------------------------------------------- /src/core/utils/sceneUpdateUtils.js: -------------------------------------------------------------------------------- 1 | function uuidSort( a, b ) { 2 | 3 | if ( a.uuid < b.uuid ) return 1; 4 | if ( a.uuid > b.uuid ) return - 1; 5 | return 0; 6 | 7 | } 8 | 9 | // we must hash the texture to determine uniqueness using the encoding, as well, because the 10 | // when rendering each texture to the texture array they must have a consistent color space. 11 | export function getTextureHash( t ) { 12 | 13 | return `${ t.source.uuid }:${ t.colorSpace }`; 14 | 15 | } 16 | 17 | // reduce the set of textures to just those with a unique source while retaining 18 | // the order of the textures. 19 | function reduceTexturesToUniqueSources( textures ) { 20 | 21 | const sourceSet = new Set(); 22 | const result = []; 23 | for ( let i = 0, l = textures.length; i < l; i ++ ) { 24 | 25 | const tex = textures[ i ]; 26 | const hash = getTextureHash( tex ); 27 | if ( ! sourceSet.has( hash ) ) { 28 | 29 | sourceSet.add( hash ); 30 | result.push( tex ); 31 | 32 | } 33 | 34 | } 35 | 36 | return result; 37 | 38 | } 39 | 40 | export function getIesTextures( lights ) { 41 | 42 | const textures = lights.map( l => l.iesMap || null ).filter( t => t ); 43 | const textureSet = new Set( textures ); 44 | return Array.from( textureSet ).sort( uuidSort ); 45 | 46 | } 47 | 48 | export function getTextures( materials ) { 49 | 50 | const textureSet = new Set(); 51 | for ( let i = 0, l = materials.length; i < l; i ++ ) { 52 | 53 | const material = materials[ i ]; 54 | for ( const key in material ) { 55 | 56 | const value = material[ key ]; 57 | if ( value && value.isTexture ) { 58 | 59 | textureSet.add( value ); 60 | 61 | } 62 | 63 | } 64 | 65 | } 66 | 67 | const textureArray = Array.from( textureSet ); 68 | return reduceTexturesToUniqueSources( textureArray ).sort( uuidSort ); 69 | 70 | } 71 | 72 | export function getLights( scene ) { 73 | 74 | const lights = []; 75 | scene.traverse( c => { 76 | 77 | if ( c.visible ) { 78 | 79 | if ( 80 | c.isRectAreaLight || 81 | c.isSpotLight || 82 | c.isPointLight || 83 | c.isDirectionalLight 84 | ) { 85 | 86 | lights.push( c ); 87 | 88 | } 89 | 90 | } 91 | 92 | } ); 93 | 94 | return lights.sort( uuidSort ); 95 | 96 | } 97 | -------------------------------------------------------------------------------- /src/detectors/CompatibilityDetector.js: -------------------------------------------------------------------------------- 1 | import { PrecisionDetector } from './PrecisionDetector.js'; 2 | import { MaterialCompileDetector } from './MaterialCompileDetector.js'; 3 | export class CompatibilityDetector { 4 | 5 | constructor( renderer, material ) { 6 | 7 | this._renderer = renderer; 8 | this._material = material; 9 | 10 | } 11 | 12 | detect() { 13 | 14 | let detector = new PrecisionDetector( this._renderer ); 15 | let result = detector.detect(); 16 | if ( ! result.pass ) { 17 | 18 | return result; 19 | 20 | } 21 | 22 | detector = new MaterialCompileDetector( this._renderer ); 23 | result = detector.detect( this._material ); 24 | if ( ! result.pass ) { 25 | 26 | return result; 27 | 28 | } 29 | 30 | return { 31 | detail: {}, 32 | pass: true, 33 | message: '', 34 | }; 35 | 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /src/detectors/MaterialCompileDetector.js: -------------------------------------------------------------------------------- 1 | import { Mesh, BoxGeometry, PerspectiveCamera } from 'three'; 2 | 3 | // Returns whether a material can be compiled and run on the current device or not 4 | export class MaterialCompileDetector { 5 | 6 | constructor( renderer ) { 7 | 8 | this._renderer = renderer; 9 | 10 | } 11 | 12 | detect( material ) { 13 | 14 | const renderer = this._renderer; 15 | const mesh = new Mesh( new BoxGeometry(), material ); 16 | const camera = new PerspectiveCamera(); 17 | const ogShaderErrors = renderer.debug.checkShaderErrors; 18 | renderer.debug.checkShaderErrors = true; 19 | 20 | const programs = renderer.info.programs; 21 | const progLength = programs.length; 22 | renderer.compile( mesh, camera ); 23 | 24 | renderer.debug.checkShaderErrors = ogShaderErrors; 25 | mesh.geometry.dispose(); 26 | 27 | if ( programs.length === progLength ) { 28 | 29 | return { 30 | detail: null, 31 | pass: true, 32 | message: 'Cannot determine shader compilation status if material has already been used.', 33 | }; 34 | 35 | } else { 36 | 37 | const program = programs[ programs.length - 1 ]; 38 | const pass = program.diagnostics ? program.diagnostics.runnable : true; 39 | const message = pass ? '' : `Cannot render ${ material.type } on this device.`; 40 | return { 41 | detail: {}, 42 | pass, 43 | message, 44 | }; 45 | 46 | } 47 | 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /src/detectors/PrecisionDetector.js: -------------------------------------------------------------------------------- 1 | import { WebGLRenderTarget } from 'three'; 2 | import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js'; 3 | import { PrecisionMaterial } from './PrecisionMaterial.js'; 4 | 5 | // see https://github.com/gkjohnson/webgl-precision 6 | // Returns whether the platform can use highp precision consistently in structs 7 | export class PrecisionDetector { 8 | 9 | constructor( renderer ) { 10 | 11 | this._renderer = renderer; 12 | this._result = null; 13 | 14 | } 15 | 16 | detect() { 17 | 18 | if ( this._result ) { 19 | 20 | return this._result; 21 | 22 | } 23 | 24 | const renderer = this._renderer; 25 | const material = new PrecisionMaterial(); 26 | const quad = new FullScreenQuad( material ); 27 | const target = new WebGLRenderTarget( 1, 1 ); 28 | const ogTarget = renderer.getRenderTarget(); 29 | 30 | const detail = { 31 | 'int': extractResult( 'int' ), 32 | 'uint': extractResult( 'uint' ), 33 | 'float': extractResult( 'float' ), 34 | }; 35 | 36 | const message = doesPass( 'int', detail.int ) || doesPass( 'uint', detail.uint ) || doesPass( 'float', detail.float ); 37 | this._result = { 38 | detail, 39 | message, 40 | pass: ! Boolean( message ), 41 | }; 42 | 43 | renderer.setRenderTarget( ogTarget ); 44 | quad.dispose(); 45 | target.dispose(); 46 | material.dispose(); 47 | return this._result; 48 | 49 | function doesPass( type, info ) { 50 | 51 | if ( info.vertex === info.vertexStruct && info.fragment === info.fragmentStruct ) { 52 | 53 | return ''; 54 | 55 | } else { 56 | 57 | return `Type "${ type }" cannot correctly provide highp precision in structs.`; 58 | 59 | } 60 | 61 | } 62 | 63 | function extractResult( mode ) { 64 | 65 | material.mode = mode; 66 | renderer.setRenderTarget( target ); 67 | quad.render( renderer ); 68 | 69 | const readBuffer = new Uint8Array( 4 ); 70 | renderer.readRenderTargetPixels( target, 0, 0, 1, 1, readBuffer ); 71 | 72 | return { 73 | 74 | vertex: readBuffer[ 0 ], 75 | vertexStruct: readBuffer[ 1 ], 76 | fragment: readBuffer[ 2 ], 77 | fragmentStruct: readBuffer[ 3 ], 78 | 79 | }; 80 | 81 | } 82 | 83 | } 84 | 85 | } 86 | -------------------------------------------------------------------------------- /src/detectors/PrecisionMaterial.js: -------------------------------------------------------------------------------- 1 | import { MaterialBase } from '../materials/MaterialBase.js'; 2 | 3 | const computePrecisionFunction = /* glsl */` 4 | precision highp float; 5 | precision highp int; 6 | struct FloatStruct { 7 | highp float value; 8 | }; 9 | 10 | struct IntStruct { 11 | highp int value; 12 | }; 13 | 14 | struct UintStruct { 15 | highp uint value; 16 | }; 17 | 18 | vec2 computePrecision() { 19 | 20 | #if MODE == 0 // float 21 | 22 | float exponent = 0.0; 23 | float value = 1.5; 24 | while ( value > 1.0 ) { 25 | 26 | exponent ++; 27 | value = 1.0 + pow( 2.0, - exponent ) / 2.0; 28 | 29 | } 30 | 31 | float structExponent = 0.0; 32 | FloatStruct str; 33 | str.value = 1.5; 34 | while ( str.value > 1.0 ) { 35 | 36 | structExponent ++; 37 | str.value = 1.0 + pow( 2.0, - structExponent ) / 2.0; 38 | 39 | } 40 | 41 | return vec2( exponent, structExponent ); 42 | 43 | 44 | #elif MODE == 1 // int 45 | 46 | int bits = 0; 47 | int value = 1; 48 | while ( value > 0 ) { 49 | 50 | value = value << 1; 51 | value = value | 1; 52 | bits ++; 53 | 54 | } 55 | 56 | int structBits = 0; 57 | IntStruct str; 58 | str.value = 1; 59 | while ( str.value > 0 ) { 60 | 61 | str.value = str.value << 1; 62 | str.value = str.value | 1; 63 | structBits ++; 64 | 65 | } 66 | 67 | return vec2( bits, structBits ); 68 | 69 | #else // uint 70 | 71 | int bits = 0; 72 | uint value = 1u; 73 | while ( value > 0u ) { 74 | 75 | value = value << 1u; 76 | bits ++; 77 | 78 | } 79 | 80 | int structBits = 0; 81 | UintStruct str; 82 | str.value = 1u; 83 | while( str.value > 0u ) { 84 | 85 | str.value = str.value << 1u; 86 | structBits ++; 87 | 88 | } 89 | 90 | return vec2( bits, structBits ); 91 | 92 | #endif 93 | 94 | } 95 | 96 | 97 | `; 98 | 99 | export class PrecisionMaterial extends MaterialBase { 100 | 101 | set mode( v ) { 102 | 103 | this._mode = v; 104 | 105 | switch ( v.toLowerCase() ) { 106 | 107 | case 'float': 108 | this.setDefine( 'MODE', 0 ); 109 | break; 110 | case 'int': 111 | this.setDefine( 'MODE', 1 ); 112 | break; 113 | case 'uint': 114 | this.setDefine( 'MODE', 2 ); 115 | break; 116 | 117 | } 118 | 119 | } 120 | 121 | constructor() { 122 | 123 | super( { 124 | 125 | vertexShader: /* glsl */` 126 | 127 | ${ computePrecisionFunction } 128 | 129 | varying vec2 vPrecision; 130 | void main() { 131 | 132 | vec4 mvPosition = vec4( position, 1.0 ); 133 | mvPosition = modelViewMatrix * mvPosition; 134 | gl_Position = projectionMatrix * mvPosition; 135 | 136 | vPrecision = computePrecision(); 137 | 138 | } 139 | 140 | `, 141 | 142 | fragmentShader: /* glsl */` 143 | 144 | ${ computePrecisionFunction } 145 | 146 | varying vec2 vPrecision; 147 | void main( void ) { 148 | 149 | vec2 fPrecision = computePrecision(); 150 | gl_FragColor = vec4( vPrecision, fPrecision ) / 255.0; 151 | 152 | } 153 | 154 | `, 155 | 156 | } ); 157 | 158 | } 159 | 160 | } 161 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | // core 2 | export * from './core/PathTracingSceneGenerator.js'; 3 | export * from './core/WebGLPathTracer.js'; 4 | 5 | // objects 6 | export * from './objects/PhysicalCamera.js'; 7 | export * from './objects/EquirectCamera.js'; 8 | export * from './objects/PhysicalSpotLight.js'; 9 | export * from './objects/ShapedAreaLight.js'; 10 | 11 | // textures 12 | export * from './textures/ProceduralEquirectTexture.js'; 13 | export * from './textures/GradientEquirectTexture.js'; 14 | 15 | // utils 16 | export * from './utils/BlurredEnvMapGenerator.js'; 17 | 18 | // materials 19 | export * from './materials/fullscreen/DenoiseMaterial.js'; 20 | export * from './materials/surface/FogVolumeMaterial.js'; 21 | 22 | // deprecated 23 | export * from './materials/pathtracing/PhysicalPathTracingMaterial.js'; 24 | export * from './core/PathTracingRenderer.js'; 25 | -------------------------------------------------------------------------------- /src/materials/MaterialBase.js: -------------------------------------------------------------------------------- 1 | import { ShaderMaterial } from 'three'; 2 | 3 | export class MaterialBase extends ShaderMaterial { 4 | 5 | set needsUpdate( v ) { 6 | 7 | super.needsUpdate = true; 8 | this.dispatchEvent( { 9 | 10 | type: 'recompilation', 11 | 12 | } ); 13 | 14 | } 15 | 16 | constructor( shader ) { 17 | 18 | super( shader ); 19 | 20 | for ( const key in this.uniforms ) { 21 | 22 | Object.defineProperty( this, key, { 23 | 24 | get() { 25 | 26 | return this.uniforms[ key ].value; 27 | 28 | }, 29 | 30 | set( v ) { 31 | 32 | this.uniforms[ key ].value = v; 33 | 34 | } 35 | 36 | } ); 37 | 38 | } 39 | 40 | } 41 | 42 | // sets the given named define value and sets "needsUpdate" to true if it's different 43 | setDefine( name, value = undefined ) { 44 | 45 | if ( value === undefined || value === null ) { 46 | 47 | if ( name in this.defines ) { 48 | 49 | delete this.defines[ name ]; 50 | this.needsUpdate = true; 51 | return true; 52 | 53 | } 54 | 55 | } else { 56 | 57 | if ( this.defines[ name ] !== value ) { 58 | 59 | this.defines[ name ] = value; 60 | this.needsUpdate = true; 61 | return true; 62 | 63 | } 64 | 65 | } 66 | 67 | return false; 68 | 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /src/materials/fullscreen/AlphaDisplayMaterial.js: -------------------------------------------------------------------------------- 1 | import { NoBlending } from 'three'; 2 | import { MaterialBase } from '../MaterialBase.js'; 3 | 4 | export class AlphaDisplayMaterial extends MaterialBase { 5 | 6 | constructor( parameters ) { 7 | 8 | super( { 9 | 10 | uniforms: { 11 | 12 | map: { value: null }, 13 | 14 | }, 15 | 16 | blending: NoBlending, 17 | 18 | vertexShader: /* glsl */` 19 | 20 | varying vec2 vUv; 21 | 22 | void main() { 23 | 24 | vUv = uv; 25 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 26 | 27 | }`, 28 | 29 | fragmentShader: /* glsl */` 30 | 31 | uniform sampler2D map; 32 | 33 | varying vec2 vUv; 34 | 35 | void main() { 36 | 37 | gl_FragColor = vec4( texture( map, vUv ).a ); 38 | gl_FragColor.a = 1.0; 39 | 40 | #include 41 | 42 | }` 43 | 44 | } ); 45 | 46 | this.setValues( parameters ); 47 | 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /src/materials/fullscreen/BlendMaterial.js: -------------------------------------------------------------------------------- 1 | import { NoBlending } from 'three'; 2 | import { MaterialBase } from '../MaterialBase.js'; 3 | 4 | export class BlendMaterial extends MaterialBase { 5 | 6 | constructor( parameters ) { 7 | 8 | super( { 9 | 10 | blending: NoBlending, 11 | 12 | uniforms: { 13 | 14 | target1: { value: null }, 15 | target2: { value: null }, 16 | opacity: { value: 1.0 }, 17 | 18 | }, 19 | 20 | vertexShader: /* glsl */` 21 | 22 | varying vec2 vUv; 23 | 24 | void main() { 25 | 26 | vUv = uv; 27 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 28 | 29 | }`, 30 | 31 | fragmentShader: /* glsl */` 32 | 33 | uniform float opacity; 34 | 35 | uniform sampler2D target1; 36 | uniform sampler2D target2; 37 | 38 | varying vec2 vUv; 39 | 40 | void main() { 41 | 42 | vec4 color1 = texture2D( target1, vUv ); 43 | vec4 color2 = texture2D( target2, vUv ); 44 | 45 | float invOpacity = 1.0 - opacity; 46 | float totalAlpha = color1.a * invOpacity + color2.a * opacity; 47 | 48 | if ( color1.a != 0.0 || color2.a != 0.0 ) { 49 | 50 | gl_FragColor.rgb = color1.rgb * ( invOpacity * color1.a / totalAlpha ) + color2.rgb * ( opacity * color2.a / totalAlpha ); 51 | gl_FragColor.a = totalAlpha; 52 | 53 | } else { 54 | 55 | gl_FragColor = vec4( 0.0 ); 56 | 57 | } 58 | 59 | }` 60 | 61 | } ); 62 | 63 | this.setValues( parameters ); 64 | 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/materials/fullscreen/ClampedInterpolationMaterial.js: -------------------------------------------------------------------------------- 1 | import { ShaderMaterial } from 'three'; 2 | 3 | // Material that tone maps a texture before performing interpolation to prevent 4 | // unexpected high values during texture stretching interpolation. 5 | // Emulates browser image stretching 6 | export class ClampedInterpolationMaterial extends ShaderMaterial { 7 | 8 | get map() { 9 | 10 | return this.uniforms.map.value; 11 | 12 | } 13 | 14 | set map( v ) { 15 | 16 | this.uniforms.map.value = v; 17 | 18 | } 19 | 20 | get opacity() { 21 | 22 | return this.uniforms.opacity.value; 23 | 24 | } 25 | 26 | set opacity( v ) { 27 | 28 | if ( this.uniforms ) { 29 | 30 | this.uniforms.opacity.value = v; 31 | 32 | } 33 | 34 | } 35 | 36 | constructor( params ) { 37 | 38 | super( { 39 | uniforms: { 40 | 41 | map: { value: null }, 42 | opacity: { value: 1 }, 43 | 44 | }, 45 | 46 | vertexShader: /* glsl */` 47 | varying vec2 vUv; 48 | void main() { 49 | 50 | vUv = uv; 51 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 52 | 53 | } 54 | `, 55 | 56 | fragmentShader: /* glsl */` 57 | uniform sampler2D map; 58 | uniform float opacity; 59 | varying vec2 vUv; 60 | 61 | vec4 clampedTexelFatch( sampler2D map, ivec2 px, int lod ) { 62 | 63 | vec4 res = texelFetch( map, ivec2( px.x, px.y ), 0 ); 64 | 65 | #if defined( TONE_MAPPING ) 66 | 67 | res.xyz = toneMapping( res.xyz ); 68 | 69 | #endif 70 | 71 | return linearToOutputTexel( res ); 72 | 73 | } 74 | 75 | void main() { 76 | 77 | vec2 size = vec2( textureSize( map, 0 ) ); 78 | vec2 pxUv = vUv * size; 79 | vec2 pxCurr = floor( pxUv ); 80 | vec2 pxFrac = fract( pxUv ) - 0.5; 81 | vec2 pxOffset; 82 | pxOffset.x = pxFrac.x > 0.0 ? 1.0 : - 1.0; 83 | pxOffset.y = pxFrac.y > 0.0 ? 1.0 : - 1.0; 84 | 85 | vec2 pxNext = clamp( pxOffset + pxCurr, vec2( 0.0 ), size - 1.0 ); 86 | vec2 alpha = abs( pxFrac ); 87 | 88 | vec4 p1 = mix( 89 | clampedTexelFatch( map, ivec2( pxCurr.x, pxCurr.y ), 0 ), 90 | clampedTexelFatch( map, ivec2( pxNext.x, pxCurr.y ), 0 ), 91 | alpha.x 92 | ); 93 | 94 | vec4 p2 = mix( 95 | clampedTexelFatch( map, ivec2( pxCurr.x, pxNext.y ), 0 ), 96 | clampedTexelFatch( map, ivec2( pxNext.x, pxNext.y ), 0 ), 97 | alpha.x 98 | ); 99 | 100 | gl_FragColor = mix( p1, p2, alpha.y ); 101 | gl_FragColor.a *= opacity; 102 | #include 103 | 104 | } 105 | ` 106 | } ); 107 | 108 | this.setValues( params ); 109 | 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /src/materials/fullscreen/DenoiseMaterial.js: -------------------------------------------------------------------------------- 1 | import { NoBlending } from 'three'; 2 | import { MaterialBase } from '../MaterialBase.js'; 3 | 4 | export class DenoiseMaterial extends MaterialBase { 5 | 6 | constructor( parameters ) { 7 | 8 | super( { 9 | 10 | blending: NoBlending, 11 | 12 | transparent: false, 13 | 14 | depthWrite: false, 15 | 16 | depthTest: false, 17 | 18 | defines: { 19 | 20 | USE_SLIDER: 0, 21 | 22 | }, 23 | 24 | uniforms: { 25 | 26 | sigma: { value: 5.0 }, 27 | threshold: { value: 0.03 }, 28 | kSigma: { value: 1.0 }, 29 | 30 | map: { value: null }, 31 | opacity: { value: 1 }, 32 | 33 | }, 34 | 35 | vertexShader: /* glsl */` 36 | 37 | varying vec2 vUv; 38 | 39 | void main() { 40 | 41 | vUv = uv; 42 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 43 | 44 | } 45 | 46 | `, 47 | 48 | fragmentShader: /* glsl */` 49 | 50 | //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 51 | // Copyright (c) 2018-2019 Michele Morrone 52 | // All rights reserved. 53 | // 54 | // https://michelemorrone.eu - https://BrutPitt.com 55 | // 56 | // me@michelemorrone.eu - brutpitt@gmail.com 57 | // twitter: @BrutPitt - github: BrutPitt 58 | // 59 | // https://github.com/BrutPitt/glslSmartDeNoise/ 60 | // 61 | // This software is distributed under the terms of the BSD 2-Clause license 62 | //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 63 | 64 | uniform sampler2D map; 65 | 66 | uniform float sigma; 67 | uniform float threshold; 68 | uniform float kSigma; 69 | uniform float opacity; 70 | 71 | varying vec2 vUv; 72 | 73 | #define INV_SQRT_OF_2PI 0.39894228040143267793994605993439 74 | #define INV_PI 0.31830988618379067153776752674503 75 | 76 | // Parameters: 77 | // sampler2D tex - sampler image / texture 78 | // vec2 uv - actual fragment coord 79 | // float sigma > 0 - sigma Standard Deviation 80 | // float kSigma >= 0 - sigma coefficient 81 | // kSigma * sigma --> radius of the circular kernel 82 | // float threshold - edge sharpening threshold 83 | vec4 smartDeNoise( sampler2D tex, vec2 uv, float sigma, float kSigma, float threshold ) { 84 | 85 | float radius = round( kSigma * sigma ); 86 | float radQ = radius * radius; 87 | 88 | float invSigmaQx2 = 0.5 / ( sigma * sigma ); 89 | float invSigmaQx2PI = INV_PI * invSigmaQx2; 90 | 91 | float invThresholdSqx2 = 0.5 / ( threshold * threshold ); 92 | float invThresholdSqrt2PI = INV_SQRT_OF_2PI / threshold; 93 | 94 | vec4 centrPx = texture2D( tex, uv ); 95 | centrPx.rgb *= centrPx.a; 96 | 97 | float zBuff = 0.0; 98 | vec4 aBuff = vec4( 0.0 ); 99 | vec2 size = vec2( textureSize( tex, 0 ) ); 100 | 101 | vec2 d; 102 | for ( d.x = - radius; d.x <= radius; d.x ++ ) { 103 | 104 | float pt = sqrt( radQ - d.x * d.x ); 105 | 106 | for ( d.y = - pt; d.y <= pt; d.y ++ ) { 107 | 108 | float blurFactor = exp( - dot( d, d ) * invSigmaQx2 ) * invSigmaQx2PI; 109 | 110 | vec4 walkPx = texture2D( tex, uv + d / size ); 111 | walkPx.rgb *= walkPx.a; 112 | 113 | vec4 dC = walkPx - centrPx; 114 | float deltaFactor = exp( - dot( dC.rgba, dC.rgba ) * invThresholdSqx2 ) * invThresholdSqrt2PI * blurFactor; 115 | 116 | zBuff += deltaFactor; 117 | aBuff += deltaFactor * walkPx; 118 | 119 | } 120 | 121 | } 122 | 123 | return aBuff / zBuff; 124 | 125 | } 126 | 127 | void main() { 128 | 129 | gl_FragColor = smartDeNoise( map, vec2( vUv.x, vUv.y ), sigma, kSigma, threshold ); 130 | #include 131 | #include 132 | #include 133 | 134 | gl_FragColor.a *= opacity; 135 | 136 | } 137 | 138 | ` 139 | 140 | } ); 141 | 142 | this.setValues( parameters ); 143 | 144 | } 145 | 146 | } 147 | -------------------------------------------------------------------------------- /src/materials/fullscreen/GradientMapMaterial.js: -------------------------------------------------------------------------------- 1 | import { Color, NoBlending } from 'three'; 2 | import { MaterialBase } from '../MaterialBase.js'; 3 | 4 | export class GradientMapMaterial extends MaterialBase { 5 | 6 | constructor( parameters ) { 7 | 8 | super( { 9 | 10 | defines: { 11 | 12 | FEATURE_BIN: 0, 13 | 14 | }, 15 | 16 | uniforms: { 17 | 18 | map: { value: null }, 19 | 20 | minColor: { value: new Color( 0 ) }, 21 | minValue: { value: 0 }, 22 | 23 | maxColor: { value: new Color( 0xffffff ) }, 24 | maxValue: { value: 10 }, 25 | 26 | field: { value: 0 }, 27 | power: { value: 1 }, 28 | 29 | }, 30 | 31 | blending: NoBlending, 32 | 33 | vertexShader: /* glsl */` 34 | 35 | varying vec2 vUv; 36 | 37 | void main() { 38 | 39 | vUv = uv; 40 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 41 | 42 | }`, 43 | 44 | fragmentShader: /* glsl */` 45 | 46 | uniform sampler2D map; 47 | uniform vec3 minColor; 48 | uniform float minValue; 49 | uniform vec3 maxColor; 50 | uniform float maxValue; 51 | uniform int field; 52 | uniform float power; 53 | 54 | varying vec2 vUv; 55 | 56 | void main() { 57 | 58 | float value = texture( map, vUv )[ field ]; 59 | 60 | #if FEATURE_BIN 61 | 62 | value = ceil( value ); 63 | 64 | #endif 65 | 66 | float t = smoothstep( minValue, maxValue, value ); 67 | t = pow( t, power ); 68 | 69 | gl_FragColor.rgb = vec3( mix( minColor, maxColor, t ) ); 70 | gl_FragColor.a = 1.0; 71 | 72 | #include 73 | 74 | }`, 75 | 76 | } ); 77 | 78 | this.setValues( parameters ); 79 | 80 | } 81 | 82 | } 83 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/attenuate_hit_function.glsl.js: -------------------------------------------------------------------------------- 1 | export const attenuate_hit_function = /* glsl */` 2 | 3 | // step through multiple surface hits and accumulate color attenuation based on transmissive surfaces 4 | // returns true if a solid surface was hit 5 | bool attenuateHit( 6 | RenderState state, 7 | Ray ray, float rayDist, 8 | out vec3 color 9 | ) { 10 | 11 | // store the original bounce index so we can reset it after 12 | uint originalBounceIndex = sobolBounceIndex; 13 | 14 | int traversals = state.traversals; 15 | int transmissiveTraversals = state.transmissiveTraversals; 16 | bool isShadowRay = state.isShadowRay; 17 | Material fogMaterial = state.fogMaterial; 18 | 19 | vec3 startPoint = ray.origin; 20 | 21 | // hit results 22 | SurfaceHit surfaceHit; 23 | 24 | color = vec3( 1.0 ); 25 | 26 | bool result = true; 27 | for ( int i = 0; i < traversals; i ++ ) { 28 | 29 | sobolBounceIndex ++; 30 | 31 | int hitType = traceScene( ray, fogMaterial, surfaceHit ); 32 | 33 | if ( hitType == FOG_HIT ) { 34 | 35 | result = true; 36 | break; 37 | 38 | } else if ( hitType == SURFACE_HIT ) { 39 | 40 | float totalDist = distance( startPoint, ray.origin + ray.direction * surfaceHit.dist ); 41 | if ( totalDist > rayDist ) { 42 | 43 | result = false; 44 | break; 45 | 46 | } 47 | 48 | // TODO: attenuate the contribution based on the PDF of the resulting ray including refraction values 49 | // Should be able to work using the material BSDF functions which will take into account specularity, etc. 50 | // TODO: should we account for emissive surfaces here? 51 | 52 | uint materialIndex = uTexelFetch1D( materialIndexAttribute, surfaceHit.faceIndices.x ).r; 53 | Material material = readMaterialInfo( materials, materialIndex ); 54 | 55 | // adjust the ray to the new surface 56 | bool isEntering = surfaceHit.side == 1.0; 57 | ray.origin = stepRayOrigin( ray.origin, ray.direction, - surfaceHit.faceNormal, surfaceHit.dist ); 58 | 59 | #if FEATURE_FOG 60 | 61 | if ( material.fogVolume ) { 62 | 63 | fogMaterial = material; 64 | fogMaterial.fogVolume = surfaceHit.side == 1.0; 65 | i -= sign( transmissiveTraversals ); 66 | transmissiveTraversals --; 67 | continue; 68 | 69 | } 70 | 71 | #endif 72 | 73 | if ( ! material.castShadow && isShadowRay ) { 74 | 75 | continue; 76 | 77 | } 78 | 79 | vec2 uv = textureSampleBarycoord( attributesArray, ATTR_UV, surfaceHit.barycoord, surfaceHit.faceIndices.xyz ).xy; 80 | vec4 vertexColor = textureSampleBarycoord( attributesArray, ATTR_COLOR, surfaceHit.barycoord, surfaceHit.faceIndices.xyz ); 81 | 82 | // albedo 83 | vec4 albedo = vec4( material.color, material.opacity ); 84 | if ( material.map != - 1 ) { 85 | 86 | vec3 uvPrime = material.mapTransform * vec3( uv, 1 ); 87 | albedo *= texture2D( textures, vec3( uvPrime.xy, material.map ) ); 88 | 89 | } 90 | 91 | if ( material.vertexColors ) { 92 | 93 | albedo *= vertexColor; 94 | 95 | } 96 | 97 | // alphaMap 98 | if ( material.alphaMap != - 1 ) { 99 | 100 | vec3 uvPrime = material.alphaMapTransform * vec3( uv, 1 ); 101 | albedo.a *= texture2D( textures, vec3( uvPrime.xy, material.alphaMap ) ).x; 102 | 103 | } 104 | 105 | // transmission 106 | float transmission = material.transmission; 107 | if ( material.transmissionMap != - 1 ) { 108 | 109 | vec3 uvPrime = material.transmissionMapTransform * vec3( uv, 1 ); 110 | transmission *= texture2D( textures, vec3( uvPrime.xy, material.transmissionMap ) ).r; 111 | 112 | } 113 | 114 | // metalness 115 | float metalness = material.metalness; 116 | if ( material.metalnessMap != - 1 ) { 117 | 118 | vec3 uvPrime = material.metalnessMapTransform * vec3( uv, 1 ); 119 | metalness *= texture2D( textures, vec3( uvPrime.xy, material.metalnessMap ) ).b; 120 | 121 | } 122 | 123 | float alphaTest = material.alphaTest; 124 | bool useAlphaTest = alphaTest != 0.0; 125 | float transmissionFactor = ( 1.0 - metalness ) * transmission; 126 | if ( 127 | transmissionFactor < rand( 9 ) && ! ( 128 | // material sidedness 129 | material.side != 0.0 && surfaceHit.side == material.side 130 | 131 | // alpha test 132 | || useAlphaTest && albedo.a < alphaTest 133 | 134 | // opacity 135 | || material.transparent && ! useAlphaTest && albedo.a < rand( 10 ) 136 | ) 137 | ) { 138 | 139 | result = true; 140 | break; 141 | 142 | } 143 | 144 | if ( surfaceHit.side == 1.0 && isEntering ) { 145 | 146 | // only attenuate by surface color on the way in 147 | color *= mix( vec3( 1.0 ), albedo.rgb, transmissionFactor ); 148 | 149 | } else if ( surfaceHit.side == - 1.0 ) { 150 | 151 | // attenuate by medium once we hit the opposite side of the model 152 | color *= transmissionAttenuation( surfaceHit.dist, material.attenuationColor, material.attenuationDistance ); 153 | 154 | } 155 | 156 | bool isTransmissiveRay = dot( ray.direction, surfaceHit.faceNormal * surfaceHit.side ) < 0.0; 157 | if ( ( isTransmissiveRay || isEntering ) && transmissiveTraversals > 0 ) { 158 | 159 | i -= sign( transmissiveTraversals ); 160 | transmissiveTraversals --; 161 | 162 | } 163 | 164 | } else { 165 | 166 | result = false; 167 | break; 168 | 169 | } 170 | 171 | } 172 | 173 | // reset the bounce index 174 | sobolBounceIndex = originalBounceIndex; 175 | return result; 176 | 177 | } 178 | 179 | `; 180 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/camera_util_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const camera_util_functions = /* glsl */` 2 | 3 | vec3 ndcToRayOrigin( vec2 coord ) { 4 | 5 | vec4 rayOrigin4 = cameraWorldMatrix * invProjectionMatrix * vec4( coord, - 1.0, 1.0 ); 6 | return rayOrigin4.xyz / rayOrigin4.w; 7 | } 8 | 9 | Ray getCameraRay() { 10 | 11 | vec2 ssd = vec2( 1.0 ) / resolution; 12 | 13 | // Jitter the camera ray by finding a uv coordinate at a random sample 14 | // around this pixel's UV coordinate for AA 15 | vec2 ruv = rand2( 0 ); 16 | vec2 jitteredUv = vUv + vec2( tentFilter( ruv.x ) * ssd.x, tentFilter( ruv.y ) * ssd.y ); 17 | Ray ray; 18 | 19 | #if CAMERA_TYPE == 2 20 | 21 | // Equirectangular projection 22 | vec4 rayDirection4 = vec4( equirectUvToDirection( jitteredUv ), 0.0 ); 23 | vec4 rayOrigin4 = vec4( 0.0, 0.0, 0.0, 1.0 ); 24 | 25 | rayDirection4 = cameraWorldMatrix * rayDirection4; 26 | rayOrigin4 = cameraWorldMatrix * rayOrigin4; 27 | 28 | ray.direction = normalize( rayDirection4.xyz ); 29 | ray.origin = rayOrigin4.xyz / rayOrigin4.w; 30 | 31 | #else 32 | 33 | // get [- 1, 1] normalized device coordinates 34 | vec2 ndc = 2.0 * jitteredUv - vec2( 1.0 ); 35 | ray.origin = ndcToRayOrigin( ndc ); 36 | 37 | #if CAMERA_TYPE == 1 38 | 39 | // Orthographic projection 40 | ray.direction = ( cameraWorldMatrix * vec4( 0.0, 0.0, - 1.0, 0.0 ) ).xyz; 41 | ray.direction = normalize( ray.direction ); 42 | 43 | #else 44 | 45 | // Perspective projection 46 | ray.direction = normalize( mat3( cameraWorldMatrix ) * ( invProjectionMatrix * vec4( ndc, 0.0, 1.0 ) ).xyz ); 47 | 48 | #endif 49 | 50 | #endif 51 | 52 | #if FEATURE_DOF 53 | { 54 | 55 | // depth of field 56 | vec3 focalPoint = ray.origin + normalize( ray.direction ) * physicalCamera.focusDistance; 57 | 58 | // get the aperture sample 59 | // if blades === 0 then we assume a circle 60 | vec3 shapeUVW= rand3( 1 ); 61 | int blades = physicalCamera.apertureBlades; 62 | float anamorphicRatio = physicalCamera.anamorphicRatio; 63 | vec2 apertureSample = blades == 0 ? sampleCircle( shapeUVW.xy ) : sampleRegularPolygon( blades, shapeUVW ); 64 | apertureSample *= physicalCamera.bokehSize * 0.5 * 1e-3; 65 | 66 | // rotate the aperture shape 67 | apertureSample = 68 | rotateVector( apertureSample, physicalCamera.apertureRotation ) * 69 | saturate( vec2( anamorphicRatio, 1.0 / anamorphicRatio ) ); 70 | 71 | // create the new ray 72 | ray.origin += ( cameraWorldMatrix * vec4( apertureSample, 0.0, 0.0 ) ).xyz; 73 | ray.direction = focalPoint - ray.origin; 74 | 75 | } 76 | #endif 77 | 78 | ray.direction = normalize( ray.direction ); 79 | 80 | return ray; 81 | 82 | } 83 | 84 | `; 85 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/direct_light_contribution_function.glsl.js: -------------------------------------------------------------------------------- 1 | export const direct_light_contribution_function = /*glsl*/` 2 | 3 | vec3 directLightContribution( vec3 worldWo, SurfaceRecord surf, RenderState state, vec3 rayOrigin ) { 4 | 5 | vec3 result = vec3( 0.0 ); 6 | 7 | // uniformly pick a light or environment map 8 | if( lightsDenom != 0.0 && rand( 5 ) < float( lights.count ) / lightsDenom ) { 9 | 10 | // sample a light or environment 11 | LightRecord lightRec = randomLightSample( lights.tex, iesProfiles, lights.count, rayOrigin, rand3( 6 ) ); 12 | 13 | bool isSampleBelowSurface = ! surf.volumeParticle && dot( surf.faceNormal, lightRec.direction ) < 0.0; 14 | if ( isSampleBelowSurface ) { 15 | 16 | lightRec.pdf = 0.0; 17 | 18 | } 19 | 20 | // check if a ray could even reach the light area 21 | Ray lightRay; 22 | lightRay.origin = rayOrigin; 23 | lightRay.direction = lightRec.direction; 24 | vec3 attenuatedColor; 25 | if ( 26 | lightRec.pdf > 0.0 && 27 | isDirectionValid( lightRec.direction, surf.normal, surf.faceNormal ) && 28 | ! attenuateHit( state, lightRay, lightRec.dist, attenuatedColor ) 29 | ) { 30 | 31 | // get the material pdf 32 | vec3 sampleColor; 33 | float lightMaterialPdf = bsdfResult( worldWo, lightRec.direction, surf, sampleColor ); 34 | bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) ); 35 | if ( lightMaterialPdf > 0.0 && isValidSampleColor ) { 36 | 37 | // weight the direct light contribution 38 | float lightPdf = lightRec.pdf / lightsDenom; 39 | float misWeight = lightRec.type == SPOT_LIGHT_TYPE || lightRec.type == DIR_LIGHT_TYPE || lightRec.type == POINT_LIGHT_TYPE ? 1.0 : misHeuristic( lightPdf, lightMaterialPdf ); 40 | result = attenuatedColor * lightRec.emission * state.throughputColor * sampleColor * misWeight / lightPdf; 41 | 42 | } 43 | 44 | } 45 | 46 | } else if ( envMapInfo.totalSum != 0.0 && environmentIntensity != 0.0 ) { 47 | 48 | // find a sample in the environment map to include in the contribution 49 | vec3 envColor, envDirection; 50 | float envPdf = sampleEquirectProbability( rand2( 7 ), envColor, envDirection ); 51 | envDirection = invEnvRotation3x3 * envDirection; 52 | 53 | // this env sampling is not set up for transmissive sampling and yields overly bright 54 | // results so we ignore the sample in this case. 55 | // TODO: this should be improved but how? The env samples could traverse a few layers? 56 | bool isSampleBelowSurface = ! surf.volumeParticle && dot( surf.faceNormal, envDirection ) < 0.0; 57 | if ( isSampleBelowSurface ) { 58 | 59 | envPdf = 0.0; 60 | 61 | } 62 | 63 | // check if a ray could even reach the surface 64 | Ray envRay; 65 | envRay.origin = rayOrigin; 66 | envRay.direction = envDirection; 67 | vec3 attenuatedColor; 68 | if ( 69 | envPdf > 0.0 && 70 | isDirectionValid( envDirection, surf.normal, surf.faceNormal ) && 71 | ! attenuateHit( state, envRay, INFINITY, attenuatedColor ) 72 | ) { 73 | 74 | // get the material pdf 75 | vec3 sampleColor; 76 | float envMaterialPdf = bsdfResult( worldWo, envDirection, surf, sampleColor ); 77 | bool isValidSampleColor = all( greaterThanEqual( sampleColor, vec3( 0.0 ) ) ); 78 | if ( envMaterialPdf > 0.0 && isValidSampleColor ) { 79 | 80 | // weight the direct light contribution 81 | envPdf /= lightsDenom; 82 | float misWeight = misHeuristic( envPdf, envMaterialPdf ); 83 | result = attenuatedColor * environmentIntensity * envColor * state.throughputColor * sampleColor * misWeight / envPdf; 84 | 85 | } 86 | 87 | } 88 | 89 | } 90 | 91 | // Function changed to have a single return statement to potentially help with crashes on Mac OS. 92 | // See issue #470 93 | return result; 94 | 95 | } 96 | 97 | `; 98 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/index.js: -------------------------------------------------------------------------------- 1 | export * from './attenuate_hit_function.glsl.js'; 2 | export * from './camera_util_functions.glsl.js'; 3 | export * from './direct_light_contribution_function.glsl.js'; 4 | export * from './get_surface_record_function.glsl.js'; 5 | export * from './render_structs.glsl.js'; 6 | export * from './trace_scene_function.glsl.js'; 7 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/render_structs.glsl.js: -------------------------------------------------------------------------------- 1 | export const render_structs = /* glsl */` 2 | 3 | struct Ray { 4 | 5 | vec3 origin; 6 | vec3 direction; 7 | 8 | }; 9 | 10 | struct SurfaceHit { 11 | 12 | uvec4 faceIndices; 13 | vec3 barycoord; 14 | vec3 faceNormal; 15 | float side; 16 | float dist; 17 | 18 | }; 19 | 20 | struct RenderState { 21 | 22 | bool firstRay; 23 | bool transmissiveRay; 24 | bool isShadowRay; 25 | float accumulatedRoughness; 26 | int transmissiveTraversals; 27 | int traversals; 28 | uint depth; 29 | vec3 throughputColor; 30 | Material fogMaterial; 31 | 32 | }; 33 | 34 | RenderState initRenderState() { 35 | 36 | RenderState result; 37 | result.firstRay = true; 38 | result.transmissiveRay = true; 39 | result.isShadowRay = false; 40 | result.accumulatedRoughness = 0.0; 41 | result.transmissiveTraversals = 0; 42 | result.traversals = 0; 43 | result.throughputColor = vec3( 1.0 ); 44 | result.depth = 0u; 45 | result.fogMaterial.fogVolume = false; 46 | return result; 47 | 48 | } 49 | 50 | `; 51 | -------------------------------------------------------------------------------- /src/materials/pathtracing/glsl/trace_scene_function.glsl.js: -------------------------------------------------------------------------------- 1 | export const trace_scene_function = /* glsl */` 2 | 3 | #define NO_HIT 0 4 | #define SURFACE_HIT 1 5 | #define LIGHT_HIT 2 6 | #define FOG_HIT 3 7 | 8 | // Passing the global variable 'lights' into this function caused shader program errors. 9 | // So global variables like 'lights' and 'bvh' were moved out of the function parameters. 10 | // For more information, refer to: https://github.com/gkjohnson/three-gpu-pathtracer/pull/457 11 | int traceScene( 12 | Ray ray, Material fogMaterial, inout SurfaceHit surfaceHit 13 | ) { 14 | 15 | int result = NO_HIT; 16 | bool hit = bvhIntersectFirstHit( bvh, ray.origin, ray.direction, surfaceHit.faceIndices, surfaceHit.faceNormal, surfaceHit.barycoord, surfaceHit.side, surfaceHit.dist ); 17 | 18 | #if FEATURE_FOG 19 | 20 | if ( fogMaterial.fogVolume ) { 21 | 22 | // offset the distance so we don't run into issues with particles on the same surface 23 | // as other objects 24 | float particleDist = intersectFogVolume( fogMaterial, rand( 1 ) ); 25 | if ( particleDist + RAY_OFFSET < surfaceHit.dist ) { 26 | 27 | surfaceHit.side = 1.0; 28 | surfaceHit.faceNormal = normalize( - ray.direction ); 29 | surfaceHit.dist = particleDist; 30 | return FOG_HIT; 31 | 32 | } 33 | 34 | } 35 | 36 | #endif 37 | 38 | if ( hit ) { 39 | 40 | result = SURFACE_HIT; 41 | 42 | } 43 | 44 | return result; 45 | 46 | } 47 | 48 | `; 49 | -------------------------------------------------------------------------------- /src/materials/surface/FogVolumeMaterial.js: -------------------------------------------------------------------------------- 1 | import { Color, MeshStandardMaterial } from 'three'; 2 | 3 | export class FogVolumeMaterial extends MeshStandardMaterial { 4 | 5 | constructor( params ) { 6 | 7 | super( params ); 8 | 9 | this.isFogVolumeMaterial = true; 10 | 11 | this.density = 0.015; 12 | this.emissive = new Color(); 13 | this.emissiveIntensity = 0.0; 14 | this.opacity = 0.15; 15 | this.transparent = true; 16 | this.roughness = 1.0; 17 | this.metalness = 0.0; 18 | 19 | this.setValues( params ); 20 | 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /src/objects/EquirectCamera.js: -------------------------------------------------------------------------------- 1 | import { Camera } from 'three'; 2 | 3 | export class EquirectCamera extends Camera { 4 | 5 | constructor() { 6 | 7 | super(); 8 | 9 | this.isEquirectCamera = true; 10 | 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /src/objects/PhysicalCamera.js: -------------------------------------------------------------------------------- 1 | import { PerspectiveCamera } from 'three'; 2 | 3 | export class PhysicalCamera extends PerspectiveCamera { 4 | 5 | set bokehSize( size ) { 6 | 7 | this.fStop = this.getFocalLength() / size; 8 | 9 | } 10 | 11 | get bokehSize() { 12 | 13 | return this.getFocalLength() / this.fStop; 14 | 15 | } 16 | 17 | constructor( ...args ) { 18 | 19 | super( ...args ); 20 | this.fStop = 1.4; 21 | this.apertureBlades = 0; 22 | this.apertureRotation = 0; 23 | this.focusDistance = 25; 24 | this.anamorphicRatio = 1; 25 | 26 | } 27 | 28 | copy( source, recursive ) { 29 | 30 | super.copy( source, recursive ); 31 | 32 | this.fStop = source.fStop; 33 | this.apertureBlades = source.apertureBlades; 34 | this.apertureRotation = source.apertureRotation; 35 | this.focusDistance = source.focusDistance; 36 | this.anamorphicRatio = source.anamorphicRatio; 37 | 38 | return this; 39 | 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /src/objects/PhysicalSpotLight.js: -------------------------------------------------------------------------------- 1 | import { SpotLight } from 'three'; 2 | 3 | export class PhysicalSpotLight extends SpotLight { 4 | 5 | constructor( ...args ) { 6 | 7 | super( ...args ); 8 | 9 | this.iesMap = null; 10 | this.radius = 0; 11 | 12 | } 13 | 14 | copy( source, recursive ) { 15 | 16 | super.copy( source, recursive ); 17 | 18 | this.iesMap = source.iesMap; 19 | this.radius = source.radius; 20 | 21 | return this; 22 | 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /src/objects/ShapedAreaLight.js: -------------------------------------------------------------------------------- 1 | import { RectAreaLight } from 'three'; 2 | 3 | export class ShapedAreaLight extends RectAreaLight { 4 | 5 | constructor( ...args ) { 6 | 7 | super( ...args ); 8 | this.isCircular = false; 9 | 10 | } 11 | 12 | copy( source, recursive ) { 13 | 14 | super.copy( source, recursive ); 15 | 16 | this.isCircular = source.isCircular; 17 | 18 | return this; 19 | 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /src/shader/bsdf/fog_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const fog_functions = /* glsl */` 2 | 3 | // returns the hit distance given the material density 4 | float intersectFogVolume( Material material, float u ) { 5 | 6 | // https://raytracing.github.io/books/RayTracingTheNextWeek.html#volumes/constantdensitymediums 7 | return material.opacity == 0.0 ? INFINITY : ( - 1.0 / material.opacity ) * log( u ); 8 | 9 | } 10 | 11 | ScatterRecord sampleFogVolume( SurfaceRecord surf, vec2 uv ) { 12 | 13 | ScatterRecord sampleRec; 14 | sampleRec.specularPdf = 0.0; 15 | sampleRec.pdf = 1.0 / ( 2.0 * PI ); 16 | sampleRec.direction = sampleSphere( uv ); 17 | sampleRec.color = surf.color; 18 | return sampleRec; 19 | 20 | } 21 | 22 | `; 23 | -------------------------------------------------------------------------------- /src/shader/bsdf/ggx_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const ggx_functions = /* glsl */` 2 | 3 | // The GGX functions provide sampling and distribution information for normals as output so 4 | // in order to get probability of scatter direction the half vector must be computed and provided. 5 | // [0] https://www.cs.cornell.edu/~srm/publications/EGSR07-btdf.pdf 6 | // [1] https://hal.archives-ouvertes.fr/hal-01509746/document 7 | // [2] http://jcgt.org/published/0007/04/01/ 8 | // [4] http://jcgt.org/published/0003/02/03/ 9 | 10 | // trowbridge-reitz === GGX === GTR 11 | 12 | vec3 ggxDirection( vec3 incidentDir, vec2 roughness, vec2 uv ) { 13 | 14 | // TODO: try GGXVNDF implementation from reference [2], here. Needs to update ggxDistribution 15 | // function below, as well 16 | 17 | // Implementation from reference [1] 18 | // stretch view 19 | vec3 V = normalize( vec3( roughness * incidentDir.xy, incidentDir.z ) ); 20 | 21 | // orthonormal basis 22 | vec3 T1 = ( V.z < 0.9999 ) ? normalize( cross( V, vec3( 0.0, 0.0, 1.0 ) ) ) : vec3( 1.0, 0.0, 0.0 ); 23 | vec3 T2 = cross( T1, V ); 24 | 25 | // sample point with polar coordinates (r, phi) 26 | float a = 1.0 / ( 1.0 + V.z ); 27 | float r = sqrt( uv.x ); 28 | float phi = ( uv.y < a ) ? uv.y / a * PI : PI + ( uv.y - a ) / ( 1.0 - a ) * PI; 29 | float P1 = r * cos( phi ); 30 | float P2 = r * sin( phi ) * ( ( uv.y < a ) ? 1.0 : V.z ); 31 | 32 | // compute normal 33 | vec3 N = P1 * T1 + P2 * T2 + V * sqrt( max( 0.0, 1.0 - P1 * P1 - P2 * P2 ) ); 34 | 35 | // unstretch 36 | N = normalize( vec3( roughness * N.xy, max( 0.0, N.z ) ) ); 37 | 38 | return N; 39 | 40 | } 41 | 42 | // Below are PDF and related functions for use in a Monte Carlo path tracer 43 | // as specified in Appendix B of the following paper 44 | // See equation (34) from reference [0] 45 | float ggxLamda( float theta, float roughness ) { 46 | 47 | float tanTheta = tan( theta ); 48 | float tanTheta2 = tanTheta * tanTheta; 49 | float alpha2 = roughness * roughness; 50 | 51 | float numerator = - 1.0 + sqrt( 1.0 + alpha2 * tanTheta2 ); 52 | return numerator / 2.0; 53 | 54 | } 55 | 56 | // See equation (34) from reference [0] 57 | float ggxShadowMaskG1( float theta, float roughness ) { 58 | 59 | return 1.0 / ( 1.0 + ggxLamda( theta, roughness ) ); 60 | 61 | } 62 | 63 | // See equation (125) from reference [4] 64 | float ggxShadowMaskG2( vec3 wi, vec3 wo, float roughness ) { 65 | 66 | float incidentTheta = acos( wi.z ); 67 | float scatterTheta = acos( wo.z ); 68 | return 1.0 / ( 1.0 + ggxLamda( incidentTheta, roughness ) + ggxLamda( scatterTheta, roughness ) ); 69 | 70 | } 71 | 72 | // See equation (33) from reference [0] 73 | float ggxDistribution( vec3 halfVector, float roughness ) { 74 | 75 | float a2 = roughness * roughness; 76 | a2 = max( EPSILON, a2 ); 77 | float cosTheta = halfVector.z; 78 | float cosTheta4 = pow( cosTheta, 4.0 ); 79 | 80 | if ( cosTheta == 0.0 ) return 0.0; 81 | 82 | float theta = acosSafe( halfVector.z ); 83 | float tanTheta = tan( theta ); 84 | float tanTheta2 = pow( tanTheta, 2.0 ); 85 | 86 | float denom = PI * cosTheta4 * pow( a2 + tanTheta2, 2.0 ); 87 | return ( a2 / denom ); 88 | 89 | } 90 | 91 | // See equation (3) from reference [2] 92 | float ggxPDF( vec3 wi, vec3 halfVector, float roughness ) { 93 | 94 | float incidentTheta = acos( wi.z ); 95 | float D = ggxDistribution( halfVector, roughness ); 96 | float G1 = ggxShadowMaskG1( incidentTheta, roughness ); 97 | 98 | return D * G1 * max( 0.0, dot( wi, halfVector ) ) / wi.z; 99 | 100 | } 101 | 102 | `; 103 | -------------------------------------------------------------------------------- /src/shader/bsdf/index.js: -------------------------------------------------------------------------------- 1 | export * from './bsdf_functions.glsl.js'; 2 | export * from './fog_functions.glsl.js'; 3 | export * from './ggx_functions.glsl.js'; 4 | export * from './iridescence_functions.glsl.js'; 5 | export * from './sheen_functions.glsl.js'; 6 | -------------------------------------------------------------------------------- /src/shader/bsdf/iridescence_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const iridescence_functions = /* glsl */` 2 | 3 | // XYZ to sRGB color space 4 | const mat3 XYZ_TO_REC709 = mat3( 5 | 3.2404542, -0.9692660, 0.0556434, 6 | -1.5371385, 1.8760108, -0.2040259, 7 | -0.4985314, 0.0415560, 1.0572252 8 | ); 9 | 10 | vec3 fresnel0ToIor( vec3 fresnel0 ) { 11 | 12 | vec3 sqrtF0 = sqrt( fresnel0 ); 13 | return ( vec3( 1.0 ) + sqrtF0 ) / ( vec3( 1.0 ) - sqrtF0 ); 14 | 15 | } 16 | 17 | // Conversion FO/IOR 18 | vec3 iorToFresnel0( vec3 transmittedIor, float incidentIor ) { 19 | 20 | return square( ( transmittedIor - vec3( incidentIor ) ) / ( transmittedIor + vec3( incidentIor ) ) ); 21 | 22 | } 23 | 24 | // ior is a value between 1.0 and 3.0. 1.0 is air interface 25 | float iorToFresnel0( float transmittedIor, float incidentIor ) { 26 | 27 | return square( ( transmittedIor - incidentIor ) / ( transmittedIor + incidentIor ) ); 28 | 29 | } 30 | 31 | // Fresnel equations for dielectric/dielectric interfaces. See https://belcour.github.io/blog/research/2017/05/01/brdf-thin-film.html 32 | vec3 evalSensitivity( float OPD, vec3 shift ) { 33 | 34 | float phase = 2.0 * PI * OPD * 1.0e-9; 35 | 36 | vec3 val = vec3( 5.4856e-13, 4.4201e-13, 5.2481e-13 ); 37 | vec3 pos = vec3( 1.6810e+06, 1.7953e+06, 2.2084e+06 ); 38 | vec3 var = vec3( 4.3278e+09, 9.3046e+09, 6.6121e+09 ); 39 | 40 | vec3 xyz = val * sqrt( 2.0 * PI * var ) * cos( pos * phase + shift ) * exp( - square( phase ) * var ); 41 | xyz.x += 9.7470e-14 * sqrt( 2.0 * PI * 4.5282e+09 ) * cos( 2.2399e+06 * phase + shift[ 0 ] ) * exp( - 4.5282e+09 * square( phase ) ); 42 | xyz /= 1.0685e-7; 43 | 44 | vec3 srgb = XYZ_TO_REC709 * xyz; 45 | return srgb; 46 | 47 | } 48 | 49 | // See Section 4. Analytic Spectral Integration, A Practical Extension to Microfacet Theory for the Modeling of Varying Iridescence, https://hal.archives-ouvertes.fr/hal-01518344/document 50 | vec3 evalIridescence( float outsideIOR, float eta2, float cosTheta1, float thinFilmThickness, vec3 baseF0 ) { 51 | 52 | vec3 I; 53 | 54 | // Force iridescenceIor -> outsideIOR when thinFilmThickness -> 0.0 55 | float iridescenceIor = mix( outsideIOR, eta2, smoothstep( 0.0, 0.03, thinFilmThickness ) ); 56 | 57 | // Evaluate the cosTheta on the base layer (Snell law) 58 | float sinTheta2Sq = square( outsideIOR / iridescenceIor ) * ( 1.0 - square( cosTheta1 ) ); 59 | 60 | // Handle TIR: 61 | float cosTheta2Sq = 1.0 - sinTheta2Sq; 62 | if ( cosTheta2Sq < 0.0 ) { 63 | 64 | return vec3( 1.0 ); 65 | 66 | } 67 | 68 | float cosTheta2 = sqrt( cosTheta2Sq ); 69 | 70 | // First interface 71 | float R0 = iorToFresnel0( iridescenceIor, outsideIOR ); 72 | float R12 = schlickFresnel( cosTheta1, R0 ); 73 | float R21 = R12; 74 | float T121 = 1.0 - R12; 75 | float phi12 = 0.0; 76 | if ( iridescenceIor < outsideIOR ) { 77 | 78 | phi12 = PI; 79 | 80 | } 81 | 82 | float phi21 = PI - phi12; 83 | 84 | // Second interface 85 | vec3 baseIOR = fresnel0ToIor( clamp( baseF0, 0.0, 0.9999 ) ); // guard against 1.0 86 | vec3 R1 = iorToFresnel0( baseIOR, iridescenceIor ); 87 | vec3 R23 = schlickFresnel( cosTheta2, R1 ); 88 | vec3 phi23 = vec3( 0.0 ); 89 | if ( baseIOR[0] < iridescenceIor ) { 90 | 91 | phi23[ 0 ] = PI; 92 | 93 | } 94 | 95 | if ( baseIOR[1] < iridescenceIor ) { 96 | 97 | phi23[ 1 ] = PI; 98 | 99 | } 100 | 101 | if ( baseIOR[2] < iridescenceIor ) { 102 | 103 | phi23[ 2 ] = PI; 104 | 105 | } 106 | 107 | // Phase shift 108 | float OPD = 2.0 * iridescenceIor * thinFilmThickness * cosTheta2; 109 | vec3 phi = vec3( phi21 ) + phi23; 110 | 111 | // Compound terms 112 | vec3 R123 = clamp( R12 * R23, 1e-5, 0.9999 ); 113 | vec3 r123 = sqrt( R123 ); 114 | vec3 Rs = square( T121 ) * R23 / ( vec3( 1.0 ) - R123 ); 115 | 116 | // Reflectance term for m = 0 (DC term amplitude) 117 | vec3 C0 = R12 + Rs; 118 | I = C0; 119 | 120 | // Reflectance term for m > 0 (pairs of diracs) 121 | vec3 Cm = Rs - T121; 122 | for ( int m = 1; m <= 2; ++ m ) { 123 | 124 | Cm *= r123; 125 | vec3 Sm = 2.0 * evalSensitivity( float( m ) * OPD, float( m ) * phi ); 126 | I += Cm * Sm; 127 | 128 | } 129 | 130 | // Since out of gamut colors might be produced, negative color values are clamped to 0. 131 | return max( I, vec3( 0.0 ) ); 132 | 133 | } 134 | 135 | `; 136 | -------------------------------------------------------------------------------- /src/shader/bsdf/sheen_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const sheen_functions = /* glsl */` 2 | 3 | // See equation (2) in http://www.aconty.com/pdf/s2017_pbs_imageworks_sheen.pdf 4 | float velvetD( float cosThetaH, float roughness ) { 5 | 6 | float alpha = max( roughness, 0.07 ); 7 | alpha = alpha * alpha; 8 | 9 | float invAlpha = 1.0 / alpha; 10 | 11 | float sqrCosThetaH = cosThetaH * cosThetaH; 12 | float sinThetaH = max( 1.0 - sqrCosThetaH, 0.001 ); 13 | 14 | return ( 2.0 + invAlpha ) * pow( sinThetaH, 0.5 * invAlpha ) / ( 2.0 * PI ); 15 | 16 | } 17 | 18 | float velvetParamsInterpolate( int i, float oneMinusAlphaSquared ) { 19 | 20 | const float p0[5] = float[5]( 25.3245, 3.32435, 0.16801, -1.27393, -4.85967 ); 21 | const float p1[5] = float[5]( 21.5473, 3.82987, 0.19823, -1.97760, -4.32054 ); 22 | 23 | return mix( p1[i], p0[i], oneMinusAlphaSquared ); 24 | 25 | } 26 | 27 | float velvetL( float x, float alpha ) { 28 | 29 | float oneMinusAlpha = 1.0 - alpha; 30 | float oneMinusAlphaSquared = oneMinusAlpha * oneMinusAlpha; 31 | 32 | float a = velvetParamsInterpolate( 0, oneMinusAlphaSquared ); 33 | float b = velvetParamsInterpolate( 1, oneMinusAlphaSquared ); 34 | float c = velvetParamsInterpolate( 2, oneMinusAlphaSquared ); 35 | float d = velvetParamsInterpolate( 3, oneMinusAlphaSquared ); 36 | float e = velvetParamsInterpolate( 4, oneMinusAlphaSquared ); 37 | 38 | return a / ( 1.0 + b * pow( abs( x ), c ) ) + d * x + e; 39 | 40 | } 41 | 42 | // See equation (3) in http://www.aconty.com/pdf/s2017_pbs_imageworks_sheen.pdf 43 | float velvetLambda( float cosTheta, float alpha ) { 44 | 45 | return abs( cosTheta ) < 0.5 ? exp( velvetL( cosTheta, alpha ) ) : exp( 2.0 * velvetL( 0.5, alpha ) - velvetL( 1.0 - cosTheta, alpha ) ); 46 | 47 | } 48 | 49 | // See Section 3, Shadowing Term, in http://www.aconty.com/pdf/s2017_pbs_imageworks_sheen.pdf 50 | float velvetG( float cosThetaO, float cosThetaI, float roughness ) { 51 | 52 | float alpha = max( roughness, 0.07 ); 53 | alpha = alpha * alpha; 54 | 55 | return 1.0 / ( 1.0 + velvetLambda( cosThetaO, alpha ) + velvetLambda( cosThetaI, alpha ) ); 56 | 57 | } 58 | 59 | float directionalAlbedoSheen( float cosTheta, float alpha ) { 60 | 61 | cosTheta = saturate( cosTheta ); 62 | 63 | float c = 1.0 - cosTheta; 64 | float c3 = c * c * c; 65 | 66 | return 0.65584461 * c3 + 1.0 / ( 4.16526551 + exp( -7.97291361 * sqrt( alpha ) + 6.33516894 ) ); 67 | 68 | } 69 | 70 | float sheenAlbedoScaling( vec3 wo, vec3 wi, SurfaceRecord surf ) { 71 | 72 | float alpha = max( surf.sheenRoughness, 0.07 ); 73 | alpha = alpha * alpha; 74 | 75 | float maxSheenColor = max( max( surf.sheenColor.r, surf.sheenColor.g ), surf.sheenColor.b ); 76 | 77 | float eWo = directionalAlbedoSheen( saturateCos( wo.z ), alpha ); 78 | float eWi = directionalAlbedoSheen( saturateCos( wi.z ), alpha ); 79 | 80 | return min( 1.0 - maxSheenColor * eWo, 1.0 - maxSheenColor * eWi ); 81 | 82 | } 83 | 84 | // See Section 5, Layering, in http://www.aconty.com/pdf/s2017_pbs_imageworks_sheen.pdf 85 | float sheenAlbedoScaling( vec3 wo, SurfaceRecord surf ) { 86 | 87 | float alpha = max( surf.sheenRoughness, 0.07 ); 88 | alpha = alpha * alpha; 89 | 90 | float maxSheenColor = max( max( surf.sheenColor.r, surf.sheenColor.g ), surf.sheenColor.b ); 91 | 92 | float eWo = directionalAlbedoSheen( saturateCos( wo.z ), alpha ); 93 | 94 | return 1.0 - maxSheenColor * eWo; 95 | 96 | } 97 | 98 | `; 99 | -------------------------------------------------------------------------------- /src/shader/bvh/index.js: -------------------------------------------------------------------------------- 1 | export * from './inside_fog_volume_function.glsl.js'; 2 | export * from './ray_any_hit_function.glsl.js'; 3 | -------------------------------------------------------------------------------- /src/shader/bvh/inside_fog_volume_function.glsl.js: -------------------------------------------------------------------------------- 1 | export const inside_fog_volume_function = /* glsl */` 2 | 3 | #ifndef FOG_CHECK_ITERATIONS 4 | #define FOG_CHECK_ITERATIONS 30 5 | #endif 6 | 7 | // returns whether the given material is a fog material or not 8 | bool isMaterialFogVolume( sampler2D materials, uint materialIndex ) { 9 | 10 | uint i = materialIndex * uint( MATERIAL_PIXELS ); 11 | vec4 s14 = texelFetch1D( materials, i + 14u ); 12 | return bool( int( s14.b ) & 4 ); 13 | 14 | } 15 | 16 | // returns true if we're within the first fog volume we hit 17 | bool bvhIntersectFogVolumeHit( 18 | vec3 rayOrigin, vec3 rayDirection, 19 | usampler2D materialIndexAttribute, sampler2D materials, 20 | inout Material material 21 | ) { 22 | 23 | material.fogVolume = false; 24 | 25 | for ( int i = 0; i < FOG_CHECK_ITERATIONS; i ++ ) { 26 | 27 | // find nearest hit 28 | uvec4 faceIndices = uvec4( 0u ); 29 | vec3 faceNormal = vec3( 0.0, 0.0, 1.0 ); 30 | vec3 barycoord = vec3( 0.0 ); 31 | float side = 1.0; 32 | float dist = 0.0; 33 | bool hit = bvhIntersectFirstHit( bvh, rayOrigin, rayDirection, faceIndices, faceNormal, barycoord, side, dist ); 34 | if ( hit ) { 35 | 36 | // if it's a fog volume return whether we hit the front or back face 37 | uint materialIndex = uTexelFetch1D( materialIndexAttribute, faceIndices.x ).r; 38 | if ( isMaterialFogVolume( materials, materialIndex ) ) { 39 | 40 | material = readMaterialInfo( materials, materialIndex ); 41 | return side == - 1.0; 42 | 43 | } else { 44 | 45 | // move the ray forward 46 | rayOrigin = stepRayOrigin( rayOrigin, rayDirection, - faceNormal, dist ); 47 | 48 | } 49 | 50 | } else { 51 | 52 | return false; 53 | 54 | } 55 | 56 | } 57 | 58 | return false; 59 | 60 | } 61 | 62 | `; 63 | -------------------------------------------------------------------------------- /src/shader/bvh/ray_any_hit_function.glsl.js: -------------------------------------------------------------------------------- 1 | export const ray_any_hit_function = /* glsl */` 2 | 3 | bool bvhIntersectAnyHit( 4 | vec3 rayOrigin, vec3 rayDirection, 5 | 6 | // output variables 7 | inout float side, inout float dist 8 | ) { 9 | 10 | uvec4 faceIndices; 11 | vec3 faceNormal; 12 | vec3 barycoord; 13 | 14 | // stack needs to be twice as long as the deepest tree we expect because 15 | // we push both the left and right child onto the stack every traversal 16 | int ptr = 0; 17 | uint stack[ 60 ]; 18 | stack[ 0 ] = 0u; 19 | 20 | float triangleDistance = 1e20; 21 | while ( ptr > - 1 && ptr < 60 ) { 22 | 23 | uint currNodeIndex = stack[ ptr ]; 24 | ptr --; 25 | 26 | // check if we intersect the current bounds 27 | float boundsHitDistance = intersectsBVHNodeBounds( rayOrigin, rayDirection, bvh, currNodeIndex ); 28 | if ( boundsHitDistance == INFINITY ) { 29 | 30 | continue; 31 | 32 | } 33 | 34 | uvec2 boundsInfo = uTexelFetch1D( bvh.bvhContents, currNodeIndex ).xy; 35 | bool isLeaf = bool( boundsInfo.x & 0xffff0000u ); 36 | 37 | if ( isLeaf ) { 38 | 39 | uint count = boundsInfo.x & 0x0000ffffu; 40 | uint offset = boundsInfo.y; 41 | 42 | bool found = intersectTriangles( 43 | bvh, rayOrigin, rayDirection, offset, count, triangleDistance, 44 | faceIndices, faceNormal, barycoord, side, dist 45 | ); 46 | 47 | if ( found ) { 48 | 49 | return true; 50 | 51 | } 52 | 53 | } else { 54 | 55 | uint leftIndex = currNodeIndex + 1u; 56 | uint splitAxis = boundsInfo.x & 0x0000ffffu; 57 | uint rightIndex = boundsInfo.y; 58 | 59 | // set c2 in the stack so we traverse it later. We need to keep track of a pointer in 60 | // the stack while we traverse. The second pointer added is the one that will be 61 | // traversed first 62 | ptr ++; 63 | stack[ ptr ] = leftIndex; 64 | 65 | ptr ++; 66 | stack[ ptr ] = rightIndex; 67 | 68 | } 69 | 70 | } 71 | 72 | return false; 73 | 74 | } 75 | 76 | `; 77 | -------------------------------------------------------------------------------- /src/shader/common/fresnel_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const fresnel_functions = /* glsl */` 2 | 3 | bool totalInternalReflection( float cosTheta, float eta ) { 4 | 5 | float sinTheta = sqrt( 1.0 - cosTheta * cosTheta ); 6 | return eta * sinTheta > 1.0; 7 | 8 | } 9 | 10 | // https://google.github.io/filament/Filament.md.html#materialsystem/diffusebrdf 11 | float schlickFresnel( float cosine, float f0 ) { 12 | 13 | return f0 + ( 1.0 - f0 ) * pow( 1.0 - cosine, 5.0 ); 14 | 15 | } 16 | 17 | vec3 schlickFresnel( float cosine, vec3 f0 ) { 18 | 19 | return f0 + ( 1.0 - f0 ) * pow( 1.0 - cosine, 5.0 ); 20 | 21 | } 22 | 23 | vec3 schlickFresnel( float cosine, vec3 f0, vec3 f90 ) { 24 | 25 | return f0 + ( f90 - f0 ) * pow( 1.0 - cosine, 5.0 ); 26 | 27 | } 28 | 29 | float dielectricFresnel( float cosThetaI, float eta ) { 30 | 31 | // https://schuttejoe.github.io/post/disneybsdf/ 32 | float ni = eta; 33 | float nt = 1.0; 34 | 35 | // Check for total internal reflection 36 | float sinThetaISq = 1.0f - cosThetaI * cosThetaI; 37 | float sinThetaTSq = eta * eta * sinThetaISq; 38 | if( sinThetaTSq >= 1.0 ) { 39 | 40 | return 1.0; 41 | 42 | } 43 | 44 | float sinThetaT = sqrt( sinThetaTSq ); 45 | 46 | float cosThetaT = sqrt( max( 0.0, 1.0f - sinThetaT * sinThetaT ) ); 47 | float rParallel = ( ( nt * cosThetaI ) - ( ni * cosThetaT ) ) / ( ( nt * cosThetaI ) + ( ni * cosThetaT ) ); 48 | float rPerpendicular = ( ( ni * cosThetaI ) - ( nt * cosThetaT ) ) / ( ( ni * cosThetaI ) + ( nt * cosThetaT ) ); 49 | return ( rParallel * rParallel + rPerpendicular * rPerpendicular ) / 2.0; 50 | 51 | } 52 | 53 | // https://raytracing.github.io/books/RayTracingInOneWeekend.html#dielectrics/schlickapproximation 54 | float iorRatioToF0( float eta ) { 55 | 56 | return pow( ( 1.0 - eta ) / ( 1.0 + eta ), 2.0 ); 57 | 58 | } 59 | 60 | vec3 evaluateFresnel( float cosTheta, float eta, vec3 f0, vec3 f90 ) { 61 | 62 | if ( totalInternalReflection( cosTheta, eta ) ) { 63 | 64 | return f90; 65 | 66 | } 67 | 68 | return schlickFresnel( cosTheta, f0, f90 ); 69 | 70 | } 71 | 72 | // TODO: disney fresnel was removed and replaced with this fresnel function to better align with 73 | // the glTF but is causing blown out pixels. Should be revisited 74 | // float evaluateFresnelWeight( float cosTheta, float eta, float f0 ) { 75 | 76 | // if ( totalInternalReflection( cosTheta, eta ) ) { 77 | 78 | // return 1.0; 79 | 80 | // } 81 | 82 | // return schlickFresnel( cosTheta, f0 ); 83 | 84 | // } 85 | 86 | // https://schuttejoe.github.io/post/disneybsdf/ 87 | float disneyFresnel( vec3 wo, vec3 wi, vec3 wh, float f0, float eta, float metalness ) { 88 | 89 | float dotHV = dot( wo, wh ); 90 | if ( totalInternalReflection( dotHV, eta ) ) { 91 | 92 | return 1.0; 93 | 94 | } 95 | 96 | float dotHL = dot( wi, wh ); 97 | float dielectricFresnel = dielectricFresnel( abs( dotHV ), eta ); 98 | float metallicFresnel = schlickFresnel( dotHL, f0 ); 99 | 100 | return mix( dielectricFresnel, metallicFresnel, metalness ); 101 | 102 | } 103 | 104 | `; 105 | -------------------------------------------------------------------------------- /src/shader/common/index.js: -------------------------------------------------------------------------------- 1 | export * from './fresnel_functions.glsl.js'; 2 | export * from './math_functions.glsl.js'; 3 | export * from './shape_intersection_functions.glsl.js'; 4 | export * from './texture_sample_functions.glsl.js'; 5 | export * from './util_functions.glsl.js'; 6 | -------------------------------------------------------------------------------- /src/shader/common/math_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const math_functions = /* glsl */` 2 | 3 | // Fast arccos approximation used to remove banding artifacts caused by numerical errors in acos. 4 | // This is a cubic Lagrange interpolating polynomial for x = [-1, -1/2, 0, 1/2, 1]. 5 | // For more information see: https://github.com/gkjohnson/three-gpu-pathtracer/pull/171#issuecomment-1152275248 6 | float acosApprox( float x ) { 7 | 8 | x = clamp( x, -1.0, 1.0 ); 9 | return ( - 0.69813170079773212 * x * x - 0.87266462599716477 ) * x + 1.5707963267948966; 10 | 11 | } 12 | 13 | // An acos with input values bound to the range [-1, 1]. 14 | float acosSafe( float x ) { 15 | 16 | return acos( clamp( x, -1.0, 1.0 ) ); 17 | 18 | } 19 | 20 | float saturateCos( float val ) { 21 | 22 | return clamp( val, 0.001, 1.0 ); 23 | 24 | } 25 | 26 | float square( float t ) { 27 | 28 | return t * t; 29 | 30 | } 31 | 32 | vec2 square( vec2 t ) { 33 | 34 | return t * t; 35 | 36 | } 37 | 38 | vec3 square( vec3 t ) { 39 | 40 | return t * t; 41 | 42 | } 43 | 44 | vec4 square( vec4 t ) { 45 | 46 | return t * t; 47 | 48 | } 49 | 50 | vec2 rotateVector( vec2 v, float t ) { 51 | 52 | float ac = cos( t ); 53 | float as = sin( t ); 54 | return vec2( 55 | v.x * ac - v.y * as, 56 | v.x * as + v.y * ac 57 | ); 58 | 59 | } 60 | 61 | // forms a basis with the normal vector as Z 62 | mat3 getBasisFromNormal( vec3 normal ) { 63 | 64 | vec3 other; 65 | if ( abs( normal.x ) > 0.5 ) { 66 | 67 | other = vec3( 0.0, 1.0, 0.0 ); 68 | 69 | } else { 70 | 71 | other = vec3( 1.0, 0.0, 0.0 ); 72 | 73 | } 74 | 75 | vec3 ortho = normalize( cross( normal, other ) ); 76 | vec3 ortho2 = normalize( cross( normal, ortho ) ); 77 | return mat3( ortho2, ortho, normal ); 78 | 79 | } 80 | 81 | `; 82 | -------------------------------------------------------------------------------- /src/shader/common/shape_intersection_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const shape_intersection_functions = /* glsl */` 2 | 3 | // Finds the point where the ray intersects the plane defined by u and v and checks if this point 4 | // falls in the bounds of the rectangle on that same plane. 5 | // Plane intersection: https://lousodrome.net/blog/light/2020/07/03/intersection-of-a-ray-and-a-plane/ 6 | bool intersectsRectangle( vec3 center, vec3 normal, vec3 u, vec3 v, vec3 rayOrigin, vec3 rayDirection, inout float dist ) { 7 | 8 | float t = dot( center - rayOrigin, normal ) / dot( rayDirection, normal ); 9 | 10 | if ( t > EPSILON ) { 11 | 12 | vec3 p = rayOrigin + rayDirection * t; 13 | vec3 vi = p - center; 14 | 15 | // check if p falls inside the rectangle 16 | float a1 = dot( u, vi ); 17 | if ( abs( a1 ) <= 0.5 ) { 18 | 19 | float a2 = dot( v, vi ); 20 | if ( abs( a2 ) <= 0.5 ) { 21 | 22 | dist = t; 23 | return true; 24 | 25 | } 26 | 27 | } 28 | 29 | } 30 | 31 | return false; 32 | 33 | } 34 | 35 | // Finds the point where the ray intersects the plane defined by u and v and checks if this point 36 | // falls in the bounds of the circle on that same plane. See above URL for a description of the plane intersection algorithm. 37 | bool intersectsCircle( vec3 position, vec3 normal, vec3 u, vec3 v, vec3 rayOrigin, vec3 rayDirection, inout float dist ) { 38 | 39 | float t = dot( position - rayOrigin, normal ) / dot( rayDirection, normal ); 40 | 41 | if ( t > EPSILON ) { 42 | 43 | vec3 hit = rayOrigin + rayDirection * t; 44 | vec3 vi = hit - position; 45 | 46 | float a1 = dot( u, vi ); 47 | float a2 = dot( v, vi ); 48 | 49 | if( length( vec2( a1, a2 ) ) <= 0.5 ) { 50 | 51 | dist = t; 52 | return true; 53 | 54 | } 55 | 56 | } 57 | 58 | return false; 59 | 60 | } 61 | 62 | `; 63 | -------------------------------------------------------------------------------- /src/shader/common/texture_sample_functions.glsl.js: -------------------------------------------------------------------------------- 1 | 2 | export const texture_sample_functions = /*glsl */` 3 | 4 | // add texel fetch functions for texture arrays 5 | vec4 texelFetch1D( sampler2DArray tex, int layer, uint index ) { 6 | 7 | uint width = uint( textureSize( tex, 0 ).x ); 8 | uvec2 uv; 9 | uv.x = index % width; 10 | uv.y = index / width; 11 | 12 | return texelFetch( tex, ivec3( uv, layer ), 0 ); 13 | 14 | } 15 | 16 | vec4 textureSampleBarycoord( sampler2DArray tex, int layer, vec3 barycoord, uvec3 faceIndices ) { 17 | 18 | return 19 | barycoord.x * texelFetch1D( tex, layer, faceIndices.x ) + 20 | barycoord.y * texelFetch1D( tex, layer, faceIndices.y ) + 21 | barycoord.z * texelFetch1D( tex, layer, faceIndices.z ); 22 | 23 | } 24 | 25 | `; 26 | -------------------------------------------------------------------------------- /src/shader/common/util_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const util_functions = /* glsl */` 2 | 3 | // TODO: possibly this should be renamed something related to material or path tracing logic 4 | 5 | #ifndef RAY_OFFSET 6 | #define RAY_OFFSET 1e-4 7 | #endif 8 | 9 | // adjust the hit point by the surface normal by a factor of some offset and the 10 | // maximum component-wise value of the current point to accommodate floating point 11 | // error as values increase. 12 | vec3 stepRayOrigin( vec3 rayOrigin, vec3 rayDirection, vec3 offset, float dist ) { 13 | 14 | vec3 point = rayOrigin + rayDirection * dist; 15 | vec3 absPoint = abs( point ); 16 | float maxPoint = max( absPoint.x, max( absPoint.y, absPoint.z ) ); 17 | return point + offset * ( maxPoint + 1.0 ) * RAY_OFFSET; 18 | 19 | } 20 | 21 | // https://github.com/KhronosGroup/glTF/blob/main/extensions/2.0/Khronos/KHR_materials_volume/README.md#attenuation 22 | vec3 transmissionAttenuation( float dist, vec3 attColor, float attDist ) { 23 | 24 | vec3 ot = - log( attColor ) / attDist; 25 | return exp( - ot * dist ); 26 | 27 | } 28 | 29 | vec3 getHalfVector( vec3 wi, vec3 wo, float eta ) { 30 | 31 | // get the half vector - assuming if the light incident vector is on the other side 32 | // of the that it's transmissive. 33 | vec3 h; 34 | if ( wi.z > 0.0 ) { 35 | 36 | h = normalize( wi + wo ); 37 | 38 | } else { 39 | 40 | // Scale by the ior ratio to retrieve the appropriate half vector 41 | // From Section 2.2 on computing the transmission half vector: 42 | // https://blog.selfshadow.com/publications/s2015-shading-course/burley/s2015_pbs_disney_bsdf_notes.pdf 43 | h = normalize( wi + wo * eta ); 44 | 45 | } 46 | 47 | h *= sign( h.z ); 48 | return h; 49 | 50 | } 51 | 52 | vec3 getHalfVector( vec3 a, vec3 b ) { 53 | 54 | return normalize( a + b ); 55 | 56 | } 57 | 58 | // The discrepancy between interpolated surface normal and geometry normal can cause issues when a ray 59 | // is cast that is on the top side of the geometry normal plane but below the surface normal plane. If 60 | // we find a ray like that we ignore it to avoid artifacts. 61 | // This function returns if the direction is on the same side of both planes. 62 | bool isDirectionValid( vec3 direction, vec3 surfaceNormal, vec3 geometryNormal ) { 63 | 64 | bool aboveSurfaceNormal = dot( direction, surfaceNormal ) > 0.0; 65 | bool aboveGeometryNormal = dot( direction, geometryNormal ) > 0.0; 66 | return aboveSurfaceNormal == aboveGeometryNormal; 67 | 68 | } 69 | 70 | // ray sampling x and z are swapped to align with expected background view 71 | vec2 equirectDirectionToUv( vec3 direction ) { 72 | 73 | // from Spherical.setFromCartesianCoords 74 | vec2 uv = vec2( atan( direction.z, direction.x ), acos( direction.y ) ); 75 | uv /= vec2( 2.0 * PI, PI ); 76 | 77 | // apply adjustments to get values in range [0, 1] and y right side up 78 | uv.x += 0.5; 79 | uv.y = 1.0 - uv.y; 80 | return uv; 81 | 82 | } 83 | 84 | vec3 equirectUvToDirection( vec2 uv ) { 85 | 86 | // undo above adjustments 87 | uv.x -= 0.5; 88 | uv.y = 1.0 - uv.y; 89 | 90 | // from Vector3.setFromSphericalCoords 91 | float theta = uv.x * 2.0 * PI; 92 | float phi = uv.y * PI; 93 | 94 | float sinPhi = sin( phi ); 95 | 96 | return vec3( sinPhi * cos( theta ), cos( phi ), sinPhi * sin( theta ) ); 97 | 98 | } 99 | 100 | // power heuristic for multiple importance sampling 101 | float misHeuristic( float a, float b ) { 102 | 103 | float aa = a * a; 104 | float bb = b * b; 105 | return aa / ( aa + bb ); 106 | 107 | } 108 | 109 | // tentFilter from Peter Shirley's 'Realistic Ray Tracing (2nd Edition)' book, pg. 60 110 | // erichlof/THREE.js-PathTracing-Renderer/ 111 | float tentFilter( float x ) { 112 | 113 | return x < 0.5 ? sqrt( 2.0 * x ) - 1.0 : 1.0 - sqrt( 2.0 - ( 2.0 * x ) ); 114 | 115 | } 116 | `; 117 | -------------------------------------------------------------------------------- /src/shader/rand/index.js: -------------------------------------------------------------------------------- 1 | export * from './pcg.glsl.js'; 2 | export * from './sobol.glsl.js'; 3 | export * from './stratified.glsl.js'; 4 | -------------------------------------------------------------------------------- /src/shader/rand/pcg.glsl.js: -------------------------------------------------------------------------------- 1 | export const pcg_functions = /* glsl */` 2 | 3 | // https://www.shadertoy.com/view/wltcRS 4 | uvec4 WHITE_NOISE_SEED; 5 | 6 | void rng_initialize( vec2 p, int frame ) { 7 | 8 | // white noise seed 9 | WHITE_NOISE_SEED = uvec4( p, uint( frame ), uint( p.x ) + uint( p.y ) ); 10 | 11 | } 12 | 13 | // https://www.pcg-random.org/ 14 | void pcg4d( inout uvec4 v ) { 15 | 16 | v = v * 1664525u + 1013904223u; 17 | v.x += v.y * v.w; 18 | v.y += v.z * v.x; 19 | v.z += v.x * v.y; 20 | v.w += v.y * v.z; 21 | v = v ^ ( v >> 16u ); 22 | v.x += v.y*v.w; 23 | v.y += v.z*v.x; 24 | v.z += v.x*v.y; 25 | v.w += v.y*v.z; 26 | 27 | } 28 | 29 | // returns [ 0, 1 ] 30 | float pcgRand() { 31 | 32 | pcg4d( WHITE_NOISE_SEED ); 33 | return float( WHITE_NOISE_SEED.x ) / float( 0xffffffffu ); 34 | 35 | } 36 | 37 | vec2 pcgRand2() { 38 | 39 | pcg4d( WHITE_NOISE_SEED ); 40 | return vec2( WHITE_NOISE_SEED.xy ) / float(0xffffffffu); 41 | 42 | } 43 | 44 | vec3 pcgRand3() { 45 | 46 | pcg4d( WHITE_NOISE_SEED ); 47 | return vec3( WHITE_NOISE_SEED.xyz ) / float( 0xffffffffu ); 48 | 49 | } 50 | 51 | vec4 pcgRand4() { 52 | 53 | pcg4d( WHITE_NOISE_SEED ); 54 | return vec4( WHITE_NOISE_SEED ) / float( 0xffffffffu ); 55 | 56 | } 57 | `; 58 | -------------------------------------------------------------------------------- /src/shader/rand/stratified.glsl.js: -------------------------------------------------------------------------------- 1 | export const stratified_functions = /* glsl */` 2 | 3 | uniform sampler2D stratifiedTexture; 4 | uniform sampler2D stratifiedOffsetTexture; 5 | 6 | uint sobolPixelIndex = 0u; 7 | uint sobolPathIndex = 0u; 8 | uint sobolBounceIndex = 0u; 9 | vec4 pixelSeed = vec4( 0 ); 10 | 11 | vec4 rand4( int v ) { 12 | 13 | ivec2 uv = ivec2( v, sobolBounceIndex ); 14 | vec4 stratifiedSample = texelFetch( stratifiedTexture, uv, 0 ); 15 | return fract( stratifiedSample + pixelSeed.r ); // blue noise + stratified samples 16 | 17 | } 18 | 19 | vec3 rand3( int v ) { 20 | 21 | return rand4( v ).xyz; 22 | 23 | } 24 | 25 | vec2 rand2( int v ) { 26 | 27 | return rand4( v ).xy; 28 | 29 | } 30 | 31 | float rand( int v ) { 32 | 33 | return rand4( v ).x; 34 | 35 | } 36 | 37 | void rng_initialize( vec2 screenCoord, int frame ) { 38 | 39 | // tile the small noise texture across the entire screen 40 | ivec2 noiseSize = ivec2( textureSize( stratifiedOffsetTexture, 0 ) ); 41 | ivec2 pixel = ivec2( screenCoord.xy ) % noiseSize; 42 | vec2 pixelWidth = 1.0 / vec2( noiseSize ); 43 | vec2 uv = vec2( pixel ) * pixelWidth + pixelWidth * 0.5; 44 | 45 | // note that using "texelFetch" here seems to break Android for some reason 46 | pixelSeed = texture( stratifiedOffsetTexture, uv ); 47 | 48 | } 49 | 50 | `; 51 | -------------------------------------------------------------------------------- /src/shader/sampling/equirect_sampling_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const equirect_functions = /* glsl */` 2 | 3 | // samples the the given environment map in the given direction 4 | vec3 sampleEquirectColor( sampler2D envMap, vec3 direction ) { 5 | 6 | return texture2D( envMap, equirectDirectionToUv( direction ) ).rgb; 7 | 8 | } 9 | 10 | // gets the pdf of the given direction to sample 11 | float equirectDirectionPdf( vec3 direction ) { 12 | 13 | vec2 uv = equirectDirectionToUv( direction ); 14 | float theta = uv.y * PI; 15 | float sinTheta = sin( theta ); 16 | if ( sinTheta == 0.0 ) { 17 | 18 | return 0.0; 19 | 20 | } 21 | 22 | return 1.0 / ( 2.0 * PI * PI * sinTheta ); 23 | 24 | } 25 | 26 | // samples the color given env map with CDF and returns the pdf of the direction 27 | float sampleEquirect( vec3 direction, inout vec3 color ) { 28 | 29 | float totalSum = envMapInfo.totalSum; 30 | if ( totalSum == 0.0 ) { 31 | 32 | color = vec3( 0.0 ); 33 | return 1.0; 34 | 35 | } 36 | 37 | vec2 uv = equirectDirectionToUv( direction ); 38 | color = texture2D( envMapInfo.map, uv ).rgb; 39 | 40 | float lum = luminance( color ); 41 | ivec2 resolution = textureSize( envMapInfo.map, 0 ); 42 | float pdf = lum / totalSum; 43 | 44 | return float( resolution.x * resolution.y ) * pdf * equirectDirectionPdf( direction ); 45 | 46 | } 47 | 48 | // samples a direction of the envmap with color and retrieves pdf 49 | float sampleEquirectProbability( vec2 r, inout vec3 color, inout vec3 direction ) { 50 | 51 | // sample env map cdf 52 | float v = texture2D( envMapInfo.marginalWeights, vec2( r.x, 0.0 ) ).x; 53 | float u = texture2D( envMapInfo.conditionalWeights, vec2( r.y, v ) ).x; 54 | vec2 uv = vec2( u, v ); 55 | 56 | vec3 derivedDirection = equirectUvToDirection( uv ); 57 | direction = derivedDirection; 58 | color = texture2D( envMapInfo.map, uv ).rgb; 59 | 60 | float totalSum = envMapInfo.totalSum; 61 | float lum = luminance( color ); 62 | ivec2 resolution = textureSize( envMapInfo.map, 0 ); 63 | float pdf = lum / totalSum; 64 | 65 | return float( resolution.x * resolution.y ) * pdf * equirectDirectionPdf( direction ); 66 | 67 | } 68 | `; 69 | -------------------------------------------------------------------------------- /src/shader/sampling/index.js: -------------------------------------------------------------------------------- 1 | export * from './equirect_sampling_functions.glsl.js'; 2 | export * from './light_sampling_functions.glsl.js'; 3 | export * from './shape_sampling_functions.glsl.js'; 4 | -------------------------------------------------------------------------------- /src/shader/sampling/shape_sampling_functions.glsl.js: -------------------------------------------------------------------------------- 1 | export const shape_sampling_functions = /* glsl */` 2 | 3 | vec3 sampleHemisphere( vec3 n, vec2 uv ) { 4 | 5 | // https://www.rorydriscoll.com/2009/01/07/better-sampling/ 6 | // https://graphics.pixar.com/library/OrthonormalB/paper.pdf 7 | float sign = n.z == 0.0 ? 1.0 : sign( n.z ); 8 | float a = - 1.0 / ( sign + n.z ); 9 | float b = n.x * n.y * a; 10 | vec3 b1 = vec3( 1.0 + sign * n.x * n.x * a, sign * b, - sign * n.x ); 11 | vec3 b2 = vec3( b, sign + n.y * n.y * a, - n.y ); 12 | 13 | float r = sqrt( uv.x ); 14 | float theta = 2.0 * PI * uv.y; 15 | float x = r * cos( theta ); 16 | float y = r * sin( theta ); 17 | return x * b1 + y * b2 + sqrt( 1.0 - uv.x ) * n; 18 | 19 | } 20 | 21 | vec2 sampleTriangle( vec2 a, vec2 b, vec2 c, vec2 r ) { 22 | 23 | // get the edges of the triangle and the diagonal across the 24 | // center of the parallelogram 25 | vec2 e1 = a - b; 26 | vec2 e2 = c - b; 27 | vec2 diag = normalize( e1 + e2 ); 28 | 29 | // pick the point in the parallelogram 30 | if ( r.x + r.y > 1.0 ) { 31 | 32 | r = vec2( 1.0 ) - r; 33 | 34 | } 35 | 36 | return e1 * r.x + e2 * r.y; 37 | 38 | } 39 | 40 | vec2 sampleCircle( vec2 uv ) { 41 | 42 | float angle = 2.0 * PI * uv.x; 43 | float radius = sqrt( uv.y ); 44 | return vec2( cos( angle ), sin( angle ) ) * radius; 45 | 46 | } 47 | 48 | vec3 sampleSphere( vec2 uv ) { 49 | 50 | float u = ( uv.x - 0.5 ) * 2.0; 51 | float t = uv.y * PI * 2.0; 52 | float f = sqrt( 1.0 - u * u ); 53 | 54 | return vec3( f * cos( t ), f * sin( t ), u ); 55 | 56 | } 57 | 58 | vec2 sampleRegularPolygon( int sides, vec3 uvw ) { 59 | 60 | sides = max( sides, 3 ); 61 | 62 | vec3 r = uvw; 63 | float anglePerSegment = 2.0 * PI / float( sides ); 64 | float segment = floor( float( sides ) * r.x ); 65 | 66 | float angle1 = anglePerSegment * segment; 67 | float angle2 = angle1 + anglePerSegment; 68 | vec2 a = vec2( sin( angle1 ), cos( angle1 ) ); 69 | vec2 b = vec2( 0.0, 0.0 ); 70 | vec2 c = vec2( sin( angle2 ), cos( angle2 ) ); 71 | 72 | return sampleTriangle( a, b, c, r.yz ); 73 | 74 | } 75 | 76 | // samples an aperture shape with the given number of sides. 0 means circle 77 | vec2 sampleAperture( int blades, vec3 uvw ) { 78 | 79 | return blades == 0 ? 80 | sampleCircle( uvw.xy ) : 81 | sampleRegularPolygon( blades, uvw ); 82 | 83 | } 84 | 85 | 86 | `; 87 | -------------------------------------------------------------------------------- /src/shader/structs/camera_struct.glsl.js: -------------------------------------------------------------------------------- 1 | export const camera_struct = /* glsl */` 2 | 3 | struct PhysicalCamera { 4 | 5 | float focusDistance; 6 | float anamorphicRatio; 7 | float bokehSize; 8 | int apertureBlades; 9 | float apertureRotation; 10 | 11 | }; 12 | 13 | `; 14 | -------------------------------------------------------------------------------- /src/shader/structs/equirect_struct.glsl.js: -------------------------------------------------------------------------------- 1 | export const equirect_struct = /* glsl */` 2 | 3 | struct EquirectHdrInfo { 4 | 5 | sampler2D marginalWeights; 6 | sampler2D conditionalWeights; 7 | sampler2D map; 8 | 9 | float totalSum; 10 | 11 | }; 12 | 13 | `; 14 | -------------------------------------------------------------------------------- /src/shader/structs/index.js: -------------------------------------------------------------------------------- 1 | export * from './camera_struct.glsl.js'; 2 | export * from './equirect_struct.glsl.js'; 3 | export * from './lights_struct.glsl.js'; 4 | export * from './material_struct.glsl.js'; 5 | export * from './surface_record_struct.glsl.js'; 6 | -------------------------------------------------------------------------------- /src/shader/structs/lights_struct.glsl.js: -------------------------------------------------------------------------------- 1 | export const lights_struct = /* glsl */` 2 | 3 | #define RECT_AREA_LIGHT_TYPE 0 4 | #define CIRC_AREA_LIGHT_TYPE 1 5 | #define SPOT_LIGHT_TYPE 2 6 | #define DIR_LIGHT_TYPE 3 7 | #define POINT_LIGHT_TYPE 4 8 | 9 | struct LightsInfo { 10 | 11 | sampler2D tex; 12 | uint count; 13 | 14 | }; 15 | 16 | struct Light { 17 | 18 | vec3 position; 19 | int type; 20 | 21 | vec3 color; 22 | float intensity; 23 | 24 | vec3 u; 25 | vec3 v; 26 | float area; 27 | 28 | // spot light fields 29 | float radius; 30 | float near; 31 | float decay; 32 | float distance; 33 | float coneCos; 34 | float penumbraCos; 35 | int iesProfile; 36 | 37 | }; 38 | 39 | Light readLightInfo( sampler2D tex, uint index ) { 40 | 41 | uint i = index * 6u; 42 | 43 | vec4 s0 = texelFetch1D( tex, i + 0u ); 44 | vec4 s1 = texelFetch1D( tex, i + 1u ); 45 | vec4 s2 = texelFetch1D( tex, i + 2u ); 46 | vec4 s3 = texelFetch1D( tex, i + 3u ); 47 | 48 | Light l; 49 | l.position = s0.rgb; 50 | l.type = int( round( s0.a ) ); 51 | 52 | l.color = s1.rgb; 53 | l.intensity = s1.a; 54 | 55 | l.u = s2.rgb; 56 | l.v = s3.rgb; 57 | l.area = s3.a; 58 | 59 | if ( l.type == SPOT_LIGHT_TYPE || l.type == POINT_LIGHT_TYPE ) { 60 | 61 | vec4 s4 = texelFetch1D( tex, i + 4u ); 62 | vec4 s5 = texelFetch1D( tex, i + 5u ); 63 | l.radius = s4.r; 64 | l.decay = s4.g; 65 | l.distance = s4.b; 66 | l.coneCos = s4.a; 67 | 68 | l.penumbraCos = s5.r; 69 | l.iesProfile = int( round( s5.g ) ); 70 | 71 | } else { 72 | 73 | l.radius = 0.0; 74 | l.decay = 0.0; 75 | l.distance = 0.0; 76 | 77 | l.coneCos = 0.0; 78 | l.penumbraCos = 0.0; 79 | l.iesProfile = - 1; 80 | 81 | } 82 | 83 | return l; 84 | 85 | } 86 | 87 | `; 88 | -------------------------------------------------------------------------------- /src/shader/structs/surface_record_struct.glsl.js: -------------------------------------------------------------------------------- 1 | export const surface_record_struct = /* glsl */` 2 | 3 | struct SurfaceRecord { 4 | 5 | // surface type 6 | bool volumeParticle; 7 | 8 | // geometry 9 | vec3 faceNormal; 10 | bool frontFace; 11 | vec3 normal; 12 | mat3 normalBasis; 13 | mat3 normalInvBasis; 14 | 15 | // cached properties 16 | float eta; 17 | float f0; 18 | 19 | // material 20 | float roughness; 21 | float filteredRoughness; 22 | float metalness; 23 | vec3 color; 24 | vec3 emission; 25 | 26 | // transmission 27 | float ior; 28 | float transmission; 29 | bool thinFilm; 30 | vec3 attenuationColor; 31 | float attenuationDistance; 32 | 33 | // clearcoat 34 | vec3 clearcoatNormal; 35 | mat3 clearcoatBasis; 36 | mat3 clearcoatInvBasis; 37 | float clearcoat; 38 | float clearcoatRoughness; 39 | float filteredClearcoatRoughness; 40 | 41 | // sheen 42 | float sheen; 43 | vec3 sheenColor; 44 | float sheenRoughness; 45 | 46 | // iridescence 47 | float iridescence; 48 | float iridescenceIor; 49 | float iridescenceThickness; 50 | 51 | // specular 52 | vec3 specularColor; 53 | float specularIntensity; 54 | }; 55 | 56 | struct ScatterRecord { 57 | float specularPdf; 58 | float pdf; 59 | vec3 direction; 60 | vec3 color; 61 | }; 62 | 63 | `; 64 | -------------------------------------------------------------------------------- /src/textures/BlueNoiseTexture.js: -------------------------------------------------------------------------------- 1 | import { DataTexture, FloatType, NearestFilter, RGBAFormat, RGFormat, RedFormat } from 'three'; 2 | import { BlueNoiseGenerator } from './blueNoise/BlueNoiseGenerator.js'; 3 | 4 | function getStride( channels ) { 5 | 6 | if ( channels >= 3 ) { 7 | 8 | return 4; 9 | 10 | } else { 11 | 12 | return channels; 13 | 14 | } 15 | 16 | } 17 | 18 | function getFormat( channels ) { 19 | 20 | switch ( channels ) { 21 | 22 | case 1: 23 | return RedFormat; 24 | case 2: 25 | return RGFormat; 26 | default: 27 | return RGBAFormat; 28 | 29 | } 30 | 31 | } 32 | 33 | export class BlueNoiseTexture extends DataTexture { 34 | 35 | constructor( size = 64, channels = 1 ) { 36 | 37 | super( new Float32Array( 4 ), 1, 1, RGBAFormat, FloatType ); 38 | this.minFilter = NearestFilter; 39 | this.magFilter = NearestFilter; 40 | 41 | this.size = size; 42 | this.channels = channels; 43 | this.update(); 44 | 45 | } 46 | 47 | update() { 48 | 49 | const channels = this.channels; 50 | const size = this.size; 51 | const generator = new BlueNoiseGenerator(); 52 | generator.channels = channels; 53 | generator.size = size; 54 | 55 | const stride = getStride( channels ); 56 | const format = getFormat( stride ); 57 | if ( this.image.width !== size || format !== this.format ) { 58 | 59 | this.image.width = size; 60 | this.image.height = size; 61 | this.image.data = new Float32Array( ( size ** 2 ) * stride ); 62 | this.format = format; 63 | this.dispose(); 64 | 65 | } 66 | 67 | const data = this.image.data; 68 | for ( let i = 0, l = channels; i < l; i ++ ) { 69 | 70 | const result = generator.generate(); 71 | const bin = result.data; 72 | const maxValue = result.maxValue; 73 | 74 | for ( let j = 0, l2 = bin.length; j < l2; j ++ ) { 75 | 76 | const value = bin[ j ] / maxValue; 77 | data[ j * stride + i ] = value; 78 | 79 | } 80 | 81 | } 82 | 83 | this.needsUpdate = true; 84 | 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /src/textures/GradientEquirectTexture.js: -------------------------------------------------------------------------------- 1 | import { Color, Vector3 } from 'three'; 2 | import { ProceduralEquirectTexture } from './ProceduralEquirectTexture.js'; 3 | 4 | const _direction = new Vector3(); 5 | export class GradientEquirectTexture extends ProceduralEquirectTexture { 6 | 7 | constructor( resolution = 512 ) { 8 | 9 | super( resolution, resolution ); 10 | 11 | this.topColor = new Color().set( 0xffffff ); 12 | this.bottomColor = new Color().set( 0x000000 ); 13 | this.exponent = 2; 14 | this.generationCallback = ( polar, uv, coord, color ) => { 15 | 16 | _direction.setFromSpherical( polar ); 17 | 18 | const t = _direction.y * 0.5 + 0.5; 19 | color.lerpColors( this.bottomColor, this.topColor, t ** this.exponent ); 20 | 21 | }; 22 | 23 | } 24 | 25 | copy( other ) { 26 | 27 | super.copy( other ); 28 | 29 | this.topColor.copy( other.topColor ); 30 | this.bottomColor.copy( other.bottomColor ); 31 | return this; 32 | 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/textures/ProceduralEquirectTexture.js: -------------------------------------------------------------------------------- 1 | import { 2 | ClampToEdgeWrapping, 3 | Color, 4 | DataTexture, 5 | EquirectangularReflectionMapping, 6 | LinearFilter, 7 | RepeatWrapping, 8 | RGBAFormat, 9 | Spherical, 10 | Vector2, 11 | FloatType 12 | } from 'three'; 13 | 14 | const _uv = new Vector2(); 15 | const _coord = new Vector2(); 16 | const _polar = new Spherical(); 17 | const _color = new Color(); 18 | export class ProceduralEquirectTexture extends DataTexture { 19 | 20 | constructor( width = 512, height = 512 ) { 21 | 22 | super( 23 | new Float32Array( width * height * 4 ), 24 | width, height, RGBAFormat, FloatType, EquirectangularReflectionMapping, 25 | RepeatWrapping, ClampToEdgeWrapping, LinearFilter, LinearFilter, 26 | ); 27 | 28 | this.generationCallback = null; 29 | 30 | } 31 | 32 | update() { 33 | 34 | this.dispose(); 35 | this.needsUpdate = true; 36 | 37 | const { data, width, height } = this.image; 38 | for ( let x = 0; x < width; x ++ ) { 39 | 40 | for ( let y = 0; y < height; y ++ ) { 41 | 42 | _coord.set( width, height ); 43 | 44 | _uv.set( x / width, y / height ); 45 | _uv.x -= 0.5; 46 | _uv.y = 1.0 - _uv.y; 47 | 48 | _polar.theta = _uv.x * 2.0 * Math.PI; 49 | _polar.phi = _uv.y * Math.PI; 50 | _polar.radius = 1.0; 51 | 52 | this.generationCallback( _polar, _uv, _coord, _color ); 53 | 54 | const i = y * width + x; 55 | const i4 = 4 * i; 56 | data[ i4 + 0 ] = ( _color.r ); 57 | data[ i4 + 1 ] = ( _color.g ); 58 | data[ i4 + 2 ] = ( _color.b ); 59 | data[ i4 + 3 ] = ( 1.0 ); 60 | 61 | } 62 | 63 | } 64 | 65 | } 66 | 67 | copy( other ) { 68 | 69 | super.copy( other ); 70 | this.generationCallback = other.generationCallback; 71 | return this; 72 | 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /src/textures/blueNoise/BlueNoiseGenerator.js: -------------------------------------------------------------------------------- 1 | import { shuffleArray, fillWithOnes } from './utils.js'; 2 | import { BlueNoiseSamples } from './BlueNoiseSamples.js'; 3 | 4 | export class BlueNoiseGenerator { 5 | 6 | constructor() { 7 | 8 | this.random = Math.random; 9 | this.sigma = 1.5; 10 | this.size = 64; 11 | this.majorityPointsRatio = 0.1; 12 | 13 | this.samples = new BlueNoiseSamples( 1 ); 14 | this.savedSamples = new BlueNoiseSamples( 1 ); 15 | 16 | } 17 | 18 | generate() { 19 | 20 | // http://cv.ulichney.com/papers/1993-void-cluster.pdf 21 | 22 | const { 23 | samples, 24 | savedSamples, 25 | sigma, 26 | majorityPointsRatio, 27 | size, 28 | } = this; 29 | 30 | samples.resize( size ); 31 | samples.setSigma( sigma ); 32 | 33 | // 1. Randomly place the minority points. 34 | const pointCount = Math.floor( size * size * majorityPointsRatio ); 35 | const initialSamples = samples.binaryPattern; 36 | 37 | fillWithOnes( initialSamples, pointCount ); 38 | shuffleArray( initialSamples, this.random ); 39 | 40 | for ( let i = 0, l = initialSamples.length; i < l; i ++ ) { 41 | 42 | if ( initialSamples[ i ] === 1 ) { 43 | 44 | samples.addPointIndex( i ); 45 | 46 | } 47 | 48 | } 49 | 50 | // 2. Remove minority point that is in densest cluster and place it in the largest void. 51 | while ( true ) { 52 | 53 | const clusterIndex = samples.findCluster(); 54 | samples.removePointIndex( clusterIndex ); 55 | 56 | const voidIndex = samples.findVoid(); 57 | if ( clusterIndex === voidIndex ) { 58 | 59 | samples.addPointIndex( clusterIndex ); 60 | break; 61 | 62 | } 63 | 64 | samples.addPointIndex( voidIndex ); 65 | 66 | } 67 | 68 | // 3. PHASE I: Assign a rank to each progressively less dense cluster point and put it 69 | // in the dither array. 70 | const ditherArray = new Uint32Array( size * size ); 71 | savedSamples.copy( samples ); 72 | 73 | let rank; 74 | rank = samples.count - 1; 75 | while ( rank >= 0 ) { 76 | 77 | const clusterIndex = samples.findCluster(); 78 | samples.removePointIndex( clusterIndex ); 79 | 80 | ditherArray[ clusterIndex ] = rank; 81 | rank --; 82 | 83 | } 84 | 85 | // 4. PHASE II: Do the same thing for the largest voids up to half of the total pixels using 86 | // the initial binary pattern. 87 | const totalSize = size * size; 88 | rank = savedSamples.count; 89 | while ( rank < totalSize / 2 ) { 90 | 91 | const voidIndex = savedSamples.findVoid(); 92 | savedSamples.addPointIndex( voidIndex ); 93 | ditherArray[ voidIndex ] = rank; 94 | rank ++; 95 | 96 | } 97 | 98 | // 5. PHASE III: Invert the pattern and finish out by assigning a rank to the remaining 99 | // and iteratively removing them. 100 | savedSamples.invert(); 101 | 102 | while ( rank < totalSize ) { 103 | 104 | const clusterIndex = savedSamples.findCluster(); 105 | savedSamples.removePointIndex( clusterIndex ); 106 | ditherArray[ clusterIndex ] = rank; 107 | rank ++; 108 | 109 | } 110 | 111 | return { data: ditherArray, maxValue: totalSize }; 112 | 113 | } 114 | 115 | } 116 | -------------------------------------------------------------------------------- /src/textures/blueNoise/BlueNoiseSamples.js: -------------------------------------------------------------------------------- 1 | export class BlueNoiseSamples { 2 | 3 | constructor( size ) { 4 | 5 | this.count = 0; 6 | this.size = - 1; 7 | this.sigma = - 1; 8 | this.radius = - 1; 9 | this.lookupTable = null; 10 | this.score = null; 11 | this.binaryPattern = null; 12 | 13 | this.resize( size ); 14 | this.setSigma( 1.5 ); 15 | 16 | } 17 | 18 | findVoid() { 19 | 20 | const { score, binaryPattern } = this; 21 | 22 | let currValue = Infinity; 23 | let currIndex = - 1; 24 | for ( let i = 0, l = binaryPattern.length; i < l; i ++ ) { 25 | 26 | if ( binaryPattern[ i ] !== 0 ) { 27 | 28 | continue; 29 | 30 | } 31 | 32 | const pScore = score[ i ]; 33 | if ( pScore < currValue ) { 34 | 35 | currValue = pScore; 36 | currIndex = i; 37 | 38 | } 39 | 40 | } 41 | 42 | return currIndex; 43 | 44 | } 45 | 46 | findCluster() { 47 | 48 | const { score, binaryPattern } = this; 49 | 50 | let currValue = - Infinity; 51 | let currIndex = - 1; 52 | for ( let i = 0, l = binaryPattern.length; i < l; i ++ ) { 53 | 54 | if ( binaryPattern[ i ] !== 1 ) { 55 | 56 | continue; 57 | 58 | } 59 | 60 | const pScore = score[ i ]; 61 | if ( pScore > currValue ) { 62 | 63 | currValue = pScore; 64 | currIndex = i; 65 | 66 | } 67 | 68 | } 69 | 70 | return currIndex; 71 | 72 | } 73 | 74 | setSigma( sigma ) { 75 | 76 | if ( sigma === this.sigma ) { 77 | 78 | return; 79 | 80 | } 81 | 82 | // generate a radius in which the score will be updated under the 83 | // assumption that e^-10 is insignificant enough to be the border at 84 | // which we drop off. 85 | const radius = ~ ~ ( Math.sqrt( 10 * 2 * ( sigma ** 2 ) ) + 1 ); 86 | const lookupWidth = 2 * radius + 1; 87 | const lookupTable = new Float32Array( lookupWidth * lookupWidth ); 88 | const sigma2 = sigma * sigma; 89 | for ( let x = - radius; x <= radius; x ++ ) { 90 | 91 | for ( let y = - radius; y <= radius; y ++ ) { 92 | 93 | const index = ( radius + y ) * lookupWidth + x + radius; 94 | const dist2 = x * x + y * y; 95 | lookupTable[ index ] = Math.E ** ( - dist2 / ( 2 * sigma2 ) ); 96 | 97 | } 98 | 99 | } 100 | 101 | this.lookupTable = lookupTable; 102 | this.sigma = sigma; 103 | this.radius = radius; 104 | 105 | } 106 | 107 | resize( size ) { 108 | 109 | if ( this.size !== size ) { 110 | 111 | this.size = size; 112 | this.score = new Float32Array( size * size ); 113 | this.binaryPattern = new Uint8Array( size * size ); 114 | 115 | } 116 | 117 | 118 | } 119 | 120 | invert() { 121 | 122 | const { binaryPattern, score, size } = this; 123 | 124 | score.fill( 0 ); 125 | 126 | for ( let i = 0, l = binaryPattern.length; i < l; i ++ ) { 127 | 128 | if ( binaryPattern[ i ] === 0 ) { 129 | 130 | const y = ~ ~ ( i / size ); 131 | const x = i - y * size; 132 | this.updateScore( x, y, 1 ); 133 | binaryPattern[ i ] = 1; 134 | 135 | } else { 136 | 137 | binaryPattern[ i ] = 0; 138 | 139 | } 140 | 141 | } 142 | 143 | } 144 | 145 | updateScore( x, y, multiplier ) { 146 | 147 | // TODO: Is there a way to keep track of the highest and lowest scores here to avoid have to search over 148 | // everything in the buffer? 149 | const { size, score, lookupTable } = this; 150 | 151 | // const sigma2 = sigma * sigma; 152 | // const radius = Math.floor( size / 2 ); 153 | const radius = this.radius; 154 | const lookupWidth = 2 * radius + 1; 155 | for ( let px = - radius; px <= radius; px ++ ) { 156 | 157 | for ( let py = - radius; py <= radius; py ++ ) { 158 | 159 | // const dist2 = px * px + py * py; 160 | // const value = Math.E ** ( - dist2 / ( 2 * sigma2 ) ); 161 | 162 | const lookupIndex = ( radius + py ) * lookupWidth + px + radius; 163 | const value = lookupTable[ lookupIndex ]; 164 | 165 | let sx = ( x + px ); 166 | sx = sx < 0 ? size + sx : sx % size; 167 | 168 | let sy = ( y + py ); 169 | sy = sy < 0 ? size + sy : sy % size; 170 | 171 | const sindex = sy * size + sx; 172 | score[ sindex ] += multiplier * value; 173 | 174 | } 175 | 176 | } 177 | 178 | } 179 | 180 | addPointIndex( index ) { 181 | 182 | this.binaryPattern[ index ] = 1; 183 | 184 | const size = this.size; 185 | const y = ~ ~ ( index / size ); 186 | const x = index - y * size; 187 | this.updateScore( x, y, 1 ); 188 | this.count ++; 189 | 190 | } 191 | 192 | removePointIndex( index ) { 193 | 194 | this.binaryPattern[ index ] = 0; 195 | 196 | const size = this.size; 197 | const y = ~ ~ ( index / size ); 198 | const x = index - y * size; 199 | this.updateScore( x, y, - 1 ); 200 | this.count --; 201 | 202 | } 203 | 204 | copy( source ) { 205 | 206 | this.resize( source.size ); 207 | this.score.set( source.score ); 208 | this.binaryPattern.set( source.binaryPattern ); 209 | this.setSigma( source.sigma ); 210 | this.count = source.count; 211 | 212 | } 213 | 214 | } 215 | -------------------------------------------------------------------------------- /src/textures/blueNoise/utils.js: -------------------------------------------------------------------------------- 1 | export function shuffleArray( array, random = Math.random ) { 2 | 3 | for ( let i = array.length - 1; i > 0; i -- ) { 4 | 5 | const replaceIndex = ~ ~ ( ( random() - 1e-6 ) * i ); 6 | const tmp = array[ i ]; 7 | array[ i ] = array[ replaceIndex ]; 8 | array[ replaceIndex ] = tmp; 9 | 10 | } 11 | 12 | } 13 | 14 | export function fillWithOnes( array, count ) { 15 | 16 | array.fill( 0 ); 17 | 18 | for ( let i = 0; i < count; i ++ ) { 19 | 20 | array[ i ] = 1; 21 | 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /src/uniforms/AttributesTextureArray.js: -------------------------------------------------------------------------------- 1 | import { FloatAttributeTextureArray } from './FloatAttributeTextureArray.js'; 2 | 3 | export class AttributesTextureArray extends FloatAttributeTextureArray { 4 | 5 | updateNormalAttribute( attr ) { 6 | 7 | this.updateAttribute( 0, attr ); 8 | 9 | } 10 | 11 | updateTangentAttribute( attr ) { 12 | 13 | this.updateAttribute( 1, attr ); 14 | 15 | } 16 | 17 | updateUvAttribute( attr ) { 18 | 19 | this.updateAttribute( 2, attr ); 20 | 21 | } 22 | 23 | updateColorAttribute( attr ) { 24 | 25 | this.updateAttribute( 3, attr ); 26 | 27 | } 28 | 29 | updateFrom( normal, tangent, uv, color ) { 30 | 31 | this.setAttributes( [ normal, tangent, uv, color ] ); 32 | 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/uniforms/FloatAttributeTextureArray.js: -------------------------------------------------------------------------------- 1 | import { DataArrayTexture, FloatType, RGBAFormat } from 'three'; 2 | import { FloatVertexAttributeTexture } from 'three-mesh-bvh'; 3 | 4 | function copyArrayToArray( fromArray, fromStride, toArray, toStride, offset ) { 5 | 6 | if ( fromStride > toStride ) { 7 | 8 | throw new Error(); 9 | 10 | } 11 | 12 | // scale non-float values to their normalized range 13 | const count = fromArray.length / fromStride; 14 | const bpe = fromArray.constructor.BYTES_PER_ELEMENT * 8; 15 | let maxValue = 1.0; 16 | switch ( fromArray.constructor ) { 17 | 18 | case Uint8Array: 19 | case Uint16Array: 20 | case Uint32Array: 21 | maxValue = 2 ** bpe - 1; 22 | break; 23 | 24 | case Int8Array: 25 | case Int16Array: 26 | case Int32Array: 27 | maxValue = 2 ** ( bpe - 1 ) - 1; 28 | break; 29 | 30 | } 31 | 32 | for ( let i = 0; i < count; i ++ ) { 33 | 34 | const i4 = 4 * i; 35 | const is = fromStride * i; 36 | for ( let j = 0; j < toStride; j ++ ) { 37 | 38 | toArray[ offset + i4 + j ] = fromStride >= j + 1 ? fromArray[ is + j ] / maxValue : 0; 39 | 40 | } 41 | 42 | } 43 | 44 | } 45 | 46 | export class FloatAttributeTextureArray extends DataArrayTexture { 47 | 48 | constructor() { 49 | 50 | super(); 51 | this._textures = []; 52 | this.type = FloatType; 53 | this.format = RGBAFormat; 54 | this.internalFormat = 'RGBA32F'; 55 | 56 | } 57 | 58 | updateAttribute( index, attr ) { 59 | 60 | // update the texture 61 | const tex = this._textures[ index ]; 62 | tex.updateFrom( attr ); 63 | 64 | // ensure compatibility 65 | const baseImage = tex.image; 66 | const image = this.image; 67 | if ( baseImage.width !== image.width || baseImage.height !== image.height ) { 68 | 69 | throw new Error( 'FloatAttributeTextureArray: Attribute must be the same dimensions when updating single layer.' ); 70 | 71 | } 72 | 73 | // update the image 74 | const { width, height, data } = image; 75 | const length = width * height * 4; 76 | const offset = length * index; 77 | let itemSize = attr.itemSize; 78 | if ( itemSize === 3 ) { 79 | 80 | itemSize = 4; 81 | 82 | } 83 | 84 | // copy the data 85 | copyArrayToArray( tex.image.data, itemSize, data, 4, offset ); 86 | 87 | this.dispose(); 88 | this.needsUpdate = true; 89 | 90 | } 91 | 92 | setAttributes( attrs ) { 93 | 94 | // ensure the attribute count 95 | const itemCount = attrs[ 0 ].count; 96 | const attrsLength = attrs.length; 97 | for ( let i = 0, l = attrsLength; i < l; i ++ ) { 98 | 99 | if ( attrs[ i ].count !== itemCount ) { 100 | 101 | throw new Error( 'FloatAttributeTextureArray: All attributes must have the same item count.' ); 102 | 103 | } 104 | 105 | } 106 | 107 | // initialize all textures 108 | const textures = this._textures; 109 | while ( textures.length < attrsLength ) { 110 | 111 | const tex = new FloatVertexAttributeTexture(); 112 | textures.push( tex ); 113 | 114 | } 115 | 116 | while ( textures.length > attrsLength ) { 117 | 118 | textures.pop(); 119 | 120 | } 121 | 122 | // update all textures 123 | for ( let i = 0, l = attrsLength; i < l; i ++ ) { 124 | 125 | textures[ i ].updateFrom( attrs[ i ] ); 126 | 127 | } 128 | 129 | // determine if we need to create a new array 130 | const baseTexture = textures[ 0 ]; 131 | const baseImage = baseTexture.image; 132 | const image = this.image; 133 | 134 | if ( baseImage.width !== image.width || baseImage.height !== image.height || baseImage.depth !== attrsLength ) { 135 | 136 | image.width = baseImage.width; 137 | image.height = baseImage.height; 138 | image.depth = attrsLength; 139 | image.data = new Float32Array( image.width * image.height * image.depth * 4 ); 140 | 141 | } 142 | 143 | // copy the other texture data into the data array texture 144 | const { data, width, height } = image; 145 | for ( let i = 0, l = attrsLength; i < l; i ++ ) { 146 | 147 | const tex = textures[ i ]; 148 | const length = width * height * 4; 149 | const offset = length * i; 150 | 151 | let itemSize = attrs[ i ].itemSize; 152 | if ( itemSize === 3 ) { 153 | 154 | itemSize = 4; 155 | 156 | } 157 | 158 | copyArrayToArray( tex.image.data, itemSize, data, 4, offset ); 159 | 160 | } 161 | 162 | // reset the texture 163 | this.dispose(); 164 | this.needsUpdate = true; 165 | 166 | } 167 | 168 | 169 | } 170 | -------------------------------------------------------------------------------- /src/uniforms/PhysicalCameraUniform.js: -------------------------------------------------------------------------------- 1 | import { PhysicalCamera } from '../objects/PhysicalCamera.js'; 2 | export class PhysicalCameraUniform { 3 | 4 | constructor() { 5 | 6 | this.bokehSize = 0; 7 | this.apertureBlades = 0; 8 | this.apertureRotation = 0; 9 | this.focusDistance = 10; 10 | this.anamorphicRatio = 1; 11 | 12 | } 13 | 14 | updateFrom( camera ) { 15 | 16 | if ( camera instanceof PhysicalCamera ) { 17 | 18 | this.bokehSize = camera.bokehSize; 19 | this.apertureBlades = camera.apertureBlades; 20 | this.apertureRotation = camera.apertureRotation; 21 | this.focusDistance = camera.focusDistance; 22 | this.anamorphicRatio = camera.anamorphicRatio; 23 | 24 | } else { 25 | 26 | this.bokehSize = 0; 27 | this.apertureRotation = 0; 28 | this.apertureBlades = 0; 29 | this.focusDistance = 10; 30 | this.anamorphicRatio = 1; 31 | 32 | } 33 | 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /src/uniforms/RenderTarget2DArray.js: -------------------------------------------------------------------------------- 1 | import { 2 | WebGLArrayRenderTarget, 3 | RGBAFormat, 4 | UnsignedByteType, 5 | Color, 6 | RepeatWrapping, 7 | LinearFilter, 8 | NoToneMapping, 9 | ShaderMaterial, 10 | } from 'three'; 11 | import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js'; 12 | 13 | const prevColor = new Color(); 14 | function getTextureHash( texture ) { 15 | 16 | return texture ? `${ texture.uuid }:${ texture.version }` : null; 17 | 18 | } 19 | 20 | function assignOptions( target, options ) { 21 | 22 | for ( const key in options ) { 23 | 24 | if ( key in target ) { 25 | 26 | target[ key ] = options[ key ]; 27 | 28 | } 29 | 30 | } 31 | 32 | } 33 | 34 | export class RenderTarget2DArray extends WebGLArrayRenderTarget { 35 | 36 | constructor( width, height, options ) { 37 | 38 | const textureOptions = { 39 | format: RGBAFormat, 40 | type: UnsignedByteType, 41 | minFilter: LinearFilter, 42 | magFilter: LinearFilter, 43 | wrapS: RepeatWrapping, 44 | wrapT: RepeatWrapping, 45 | generateMipmaps: false, 46 | ...options, 47 | }; 48 | 49 | super( width, height, 1, textureOptions ); 50 | 51 | // manually assign the options because passing options into the 52 | // constructor does not work 53 | assignOptions( this.texture, textureOptions ); 54 | 55 | this.texture.setTextures = ( ...args ) => { 56 | 57 | this.setTextures( ...args ); 58 | 59 | }; 60 | 61 | this.hashes = [ null ]; 62 | 63 | const fsQuad = new FullScreenQuad( new CopyMaterial() ); 64 | this.fsQuad = fsQuad; 65 | 66 | } 67 | 68 | setTextures( renderer, textures, width = this.width, height = this.height ) { 69 | 70 | // save previous renderer state 71 | const prevRenderTarget = renderer.getRenderTarget(); 72 | const prevToneMapping = renderer.toneMapping; 73 | const prevAlpha = renderer.getClearAlpha(); 74 | renderer.getClearColor( prevColor ); 75 | 76 | // resize the render target and ensure we don't have an empty texture 77 | // render target depth must be >= 1 to avoid unbound texture error on android devices 78 | const depth = textures.length || 1; 79 | if ( width !== this.width || height !== this.height || this.depth !== depth ) { 80 | 81 | this.setSize( width, height, depth ); 82 | this.hashes = new Array( depth ).fill( null ); 83 | 84 | } 85 | 86 | renderer.setClearColor( 0, 0 ); 87 | renderer.toneMapping = NoToneMapping; 88 | 89 | // render each texture into each layer of the target 90 | const fsQuad = this.fsQuad; 91 | const hashes = this.hashes; 92 | let updated = false; 93 | for ( let i = 0, l = depth; i < l; i ++ ) { 94 | 95 | const texture = textures[ i ]; 96 | const hash = getTextureHash( texture ); 97 | if ( texture && ( hashes[ i ] !== hash || texture.isWebGLRenderTarget ) ) { 98 | 99 | // revert to default texture transform before rendering 100 | texture.matrixAutoUpdate = false; 101 | texture.matrix.identity(); 102 | 103 | fsQuad.material.map = texture; 104 | 105 | renderer.setRenderTarget( this, i ); 106 | fsQuad.render( renderer ); 107 | 108 | // restore custom texture transform 109 | texture.updateMatrix(); 110 | texture.matrixAutoUpdate = true; 111 | 112 | // ensure textures are not updated unnecessarily 113 | hashes[ i ] = hash; 114 | updated = true; 115 | 116 | } 117 | 118 | } 119 | 120 | // reset the renderer 121 | fsQuad.material.map = null; 122 | renderer.setClearColor( prevColor, prevAlpha ); 123 | renderer.setRenderTarget( prevRenderTarget ); 124 | renderer.toneMapping = prevToneMapping; 125 | 126 | return updated; 127 | 128 | } 129 | 130 | dispose() { 131 | 132 | super.dispose(); 133 | this.fsQuad.dispose(); 134 | 135 | } 136 | 137 | } 138 | 139 | class CopyMaterial extends ShaderMaterial { 140 | 141 | get map() { 142 | 143 | return this.uniforms.map.value; 144 | 145 | } 146 | set map( v ) { 147 | 148 | this.uniforms.map.value = v; 149 | 150 | } 151 | 152 | constructor() { 153 | 154 | super( { 155 | uniforms: { 156 | 157 | map: { value: null }, 158 | 159 | }, 160 | 161 | vertexShader: /* glsl */` 162 | varying vec2 vUv; 163 | void main() { 164 | 165 | vUv = uv; 166 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 167 | 168 | } 169 | `, 170 | 171 | fragmentShader: /* glsl */` 172 | uniform sampler2D map; 173 | varying vec2 vUv; 174 | void main() { 175 | 176 | gl_FragColor = texture2D( map, vUv ); 177 | 178 | } 179 | ` 180 | } ); 181 | 182 | } 183 | 184 | } 185 | -------------------------------------------------------------------------------- /src/uniforms/StratifiedSamplesTexture.js: -------------------------------------------------------------------------------- 1 | import { DataTexture, FloatType, NearestFilter, RGBAFormat } from 'three'; 2 | import { StratifiedSamplerCombined } from './stratified/StratifiedSamplerCombined.js'; 3 | 4 | // https://stackoverflow.com/questions/424292/seedable-javascript-random-number-generator 5 | class RandomGenerator { 6 | 7 | constructor( seed = 0 ) { 8 | 9 | // LCG using GCC's constants 10 | this.m = 0x80000000; // 2**31; 11 | this.a = 1103515245; 12 | this.c = 12345; 13 | 14 | this.seed = seed; 15 | 16 | } 17 | 18 | nextInt() { 19 | 20 | this.seed = ( this.a * this.seed + this.c ) % this.m; 21 | return this.seed; 22 | 23 | } 24 | 25 | nextFloat() { 26 | 27 | // returns in range [0,1] 28 | return this.nextInt() / ( this.m - 1 ); 29 | 30 | } 31 | 32 | } 33 | 34 | export class StratifiedSamplesTexture extends DataTexture { 35 | 36 | constructor( count = 1, depth = 1, strata = 8 ) { 37 | 38 | super( new Float32Array( 1 ), 1, 1, RGBAFormat, FloatType ); 39 | this.minFilter = NearestFilter; 40 | this.magFilter = NearestFilter; 41 | 42 | this.strata = strata; 43 | this.sampler = null; 44 | this.generator = new RandomGenerator(); 45 | this.stableNoise = false; 46 | this.random = () => { 47 | 48 | if ( this.stableNoise ) { 49 | 50 | return this.generator.nextFloat(); 51 | 52 | } else { 53 | 54 | return Math.random(); 55 | 56 | } 57 | 58 | }; 59 | 60 | this.init( count, depth, strata ); 61 | 62 | } 63 | 64 | init( count = this.image.height, depth = this.image.width, strata = this.strata ) { 65 | 66 | const { image } = this; 67 | if ( image.width === depth && image.height === count && this.sampler !== null ) { 68 | 69 | return; 70 | 71 | } 72 | 73 | const dimensions = new Array( count * depth ).fill( 4 ); 74 | const sampler = new StratifiedSamplerCombined( strata, dimensions, this.random ); 75 | 76 | image.width = depth; 77 | image.height = count; 78 | image.data = sampler.samples; 79 | 80 | this.sampler = sampler; 81 | 82 | this.dispose(); 83 | this.next(); 84 | 85 | } 86 | 87 | next() { 88 | 89 | this.sampler.next(); 90 | this.needsUpdate = true; 91 | 92 | } 93 | 94 | reset() { 95 | 96 | this.sampler.reset(); 97 | this.generator.seed = 0; 98 | 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/uniforms/stratified/StratifiedSampler.js: -------------------------------------------------------------------------------- 1 | // Stratified Sampling based on implementation from hoverinc pathtracer 2 | // - https://github.com/hoverinc/ray-tracing-renderer 3 | // - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html 4 | 5 | export function shuffle( arr, random = Math.random() ) { 6 | 7 | for ( let i = arr.length - 1; i > 0; i -- ) { 8 | 9 | const j = Math.floor( random() * ( i + 1 ) ); 10 | const x = arr[ i ]; 11 | arr[ i ] = arr[ j ]; 12 | arr[ j ] = x; 13 | 14 | } 15 | 16 | return arr; 17 | 18 | } 19 | 20 | // strataCount : The number of bins per dimension 21 | // dimensions : The number of dimensions to generate stratified values for 22 | export class StratifiedSampler { 23 | 24 | constructor( strataCount, dimensions, random = Math.random ) { 25 | 26 | const l = strataCount ** dimensions; 27 | const strata = new Uint16Array( l ); 28 | let index = l; 29 | 30 | // each integer represents a statum bin 31 | for ( let i = 0; i < l; i ++ ) { 32 | 33 | strata[ i ] = i; 34 | 35 | } 36 | 37 | this.samples = new Float32Array( dimensions ); 38 | 39 | this.strataCount = strataCount; 40 | 41 | this.reset = function () { 42 | 43 | for ( let i = 0; i < l; i ++ ) { 44 | 45 | strata[ i ] = i; 46 | 47 | } 48 | 49 | index = 0; 50 | 51 | }; 52 | 53 | this.reshuffle = function () { 54 | 55 | index = 0; 56 | 57 | }; 58 | 59 | this.next = function () { 60 | 61 | const { samples } = this; 62 | 63 | if ( index >= strata.length ) { 64 | 65 | shuffle( strata, random ); 66 | this.reshuffle(); 67 | 68 | } 69 | 70 | let stratum = strata[ index ++ ]; 71 | 72 | for ( let i = 0; i < dimensions; i ++ ) { 73 | 74 | samples[ i ] = ( stratum % strataCount + random() ) / strataCount; 75 | stratum = Math.floor( stratum / strataCount ); 76 | 77 | } 78 | 79 | return samples; 80 | 81 | }; 82 | 83 | } 84 | 85 | } 86 | -------------------------------------------------------------------------------- /src/uniforms/stratified/StratifiedSamplerCombined.js: -------------------------------------------------------------------------------- 1 | // Stratified Sampling based on implementation from hoverinc pathtracer 2 | // - https://github.com/hoverinc/ray-tracing-renderer 3 | // - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html 4 | 5 | import { StratifiedSampler } from './StratifiedSampler.js'; 6 | 7 | // Stratified set of data with each tuple stratified separately and combined 8 | export class StratifiedSamplerCombined { 9 | 10 | constructor( strataCount, listOfDimensions, random = Math.random ) { 11 | 12 | let totalDim = 0; 13 | for ( const dim of listOfDimensions ) { 14 | 15 | totalDim += dim; 16 | 17 | } 18 | 19 | const combined = new Float32Array( totalDim ); 20 | const strataObjs = []; 21 | let offset = 0; 22 | for ( const dim of listOfDimensions ) { 23 | 24 | const sampler = new StratifiedSampler( strataCount, dim, random ); 25 | sampler.samples = new Float32Array( combined.buffer, offset, sampler.samples.length ); 26 | offset += sampler.samples.length * 4; 27 | strataObjs.push( sampler ); 28 | 29 | } 30 | 31 | this.samples = combined; 32 | 33 | this.strataCount = strataCount; 34 | 35 | this.next = function () { 36 | 37 | for ( const strata of strataObjs ) { 38 | 39 | strata.next(); 40 | 41 | } 42 | 43 | return combined; 44 | 45 | }; 46 | 47 | this.reshuffle = function () { 48 | 49 | for ( const strata of strataObjs ) { 50 | 51 | strata.reshuffle(); 52 | 53 | } 54 | 55 | }; 56 | 57 | this.reset = function () { 58 | 59 | for ( const strata of strataObjs ) { 60 | 61 | strata.reset(); 62 | 63 | } 64 | 65 | }; 66 | 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /src/utils/BlurredEnvMapGenerator.js: -------------------------------------------------------------------------------- 1 | import { WebGLRenderTarget, RGBAFormat, HalfFloatType, PMREMGenerator, DataTexture, EquirectangularReflectionMapping, FloatType, DataUtils } from 'three'; 2 | import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js'; 3 | import { MaterialBase } from '../materials/MaterialBase.js'; 4 | import * as CommonGLSL from '../shader/common/index.js'; 5 | 6 | class PMREMCopyMaterial extends MaterialBase { 7 | 8 | constructor() { 9 | 10 | super( { 11 | 12 | uniforms: { 13 | 14 | envMap: { value: null }, 15 | blur: { value: 0 }, 16 | 17 | }, 18 | 19 | vertexShader: /* glsl */` 20 | 21 | varying vec2 vUv; 22 | void main() { 23 | vUv = uv; 24 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 25 | } 26 | 27 | `, 28 | 29 | fragmentShader: /* glsl */` 30 | 31 | #include 32 | #include 33 | 34 | ${ CommonGLSL.util_functions } 35 | 36 | uniform sampler2D envMap; 37 | uniform float blur; 38 | varying vec2 vUv; 39 | void main() { 40 | 41 | vec3 rayDirection = equirectUvToDirection( vUv ); 42 | gl_FragColor = textureCubeUV( envMap, rayDirection, blur ); 43 | 44 | } 45 | 46 | `, 47 | 48 | } ); 49 | 50 | } 51 | 52 | } 53 | 54 | export class BlurredEnvMapGenerator { 55 | 56 | constructor( renderer ) { 57 | 58 | this.renderer = renderer; 59 | this.pmremGenerator = new PMREMGenerator( renderer ); 60 | this.copyQuad = new FullScreenQuad( new PMREMCopyMaterial() ); 61 | this.renderTarget = new WebGLRenderTarget( 1, 1, { type: FloatType, format: RGBAFormat } ); 62 | 63 | } 64 | 65 | dispose() { 66 | 67 | this.pmremGenerator.dispose(); 68 | this.copyQuad.dispose(); 69 | this.renderTarget.dispose(); 70 | 71 | } 72 | 73 | generate( texture, blur ) { 74 | 75 | const { pmremGenerator, renderTarget, copyQuad, renderer } = this; 76 | 77 | // get the pmrem target 78 | const pmremTarget = pmremGenerator.fromEquirectangular( texture ); 79 | 80 | // set up the material 81 | const { width, height } = texture.image; 82 | renderTarget.setSize( width, height ); 83 | copyQuad.material.envMap = pmremTarget.texture; 84 | copyQuad.material.blur = blur; 85 | 86 | // render 87 | const prevRenderTarget = renderer.getRenderTarget(); 88 | const prevClear = renderer.autoClear; 89 | 90 | renderer.setRenderTarget( renderTarget ); 91 | renderer.autoClear = true; 92 | copyQuad.render( renderer ); 93 | 94 | renderer.setRenderTarget( prevRenderTarget ); 95 | renderer.autoClear = prevClear; 96 | 97 | // read the data back 98 | const buffer = new Uint16Array( width * height * 4 ); 99 | const readBuffer = new Float32Array( width * height * 4 ); 100 | renderer.readRenderTargetPixels( renderTarget, 0, 0, width, height, readBuffer ); 101 | 102 | for ( let i = 0, l = readBuffer.length; i < l; i ++ ) { 103 | 104 | buffer[ i ] = DataUtils.toHalfFloat( readBuffer[ i ] ); 105 | 106 | } 107 | 108 | const result = new DataTexture( buffer, width, height, RGBAFormat, HalfFloatType ); 109 | result.minFilter = texture.minFilter; 110 | result.magFilter = texture.magFilter; 111 | result.wrapS = texture.wrapS; 112 | result.wrapT = texture.wrapT; 113 | result.mapping = EquirectangularReflectionMapping; 114 | result.needsUpdate = true; 115 | 116 | // dispose of the now unneeded target 117 | pmremTarget.dispose(); 118 | 119 | return result; 120 | 121 | } 122 | 123 | } 124 | -------------------------------------------------------------------------------- /src/utils/CubeToEquirectGenerator.js: -------------------------------------------------------------------------------- 1 | import { 2 | DataTexture, 3 | DataUtils, 4 | EquirectangularReflectionMapping, 5 | FloatType, 6 | HalfFloatType, 7 | LinearFilter, 8 | LinearMipMapLinearFilter, 9 | RGBAFormat, 10 | RepeatWrapping, 11 | ShaderMaterial, 12 | WebGLRenderTarget, 13 | } from 'three'; 14 | import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js'; 15 | import * as CommonGLSL from '../shader/common/index.js'; 16 | 17 | class CubeToEquirectMaterial extends ShaderMaterial { 18 | 19 | constructor() { 20 | 21 | super( { 22 | 23 | uniforms: { 24 | 25 | envMap: { value: null }, 26 | flipEnvMap: { value: - 1 }, 27 | 28 | }, 29 | 30 | vertexShader: /* glsl */` 31 | varying vec2 vUv; 32 | void main() { 33 | 34 | vUv = uv; 35 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 36 | 37 | }`, 38 | 39 | fragmentShader: /* glsl */` 40 | #define ENVMAP_TYPE_CUBE_UV 41 | 42 | uniform samplerCube envMap; 43 | uniform float flipEnvMap; 44 | varying vec2 vUv; 45 | 46 | #include 47 | #include 48 | 49 | ${ CommonGLSL.util_functions } 50 | 51 | void main() { 52 | 53 | vec3 rayDirection = equirectUvToDirection( vUv ); 54 | rayDirection.x *= flipEnvMap; 55 | gl_FragColor = textureCube( envMap, rayDirection ); 56 | 57 | }` 58 | } ); 59 | 60 | this.depthWrite = false; 61 | this.depthTest = false; 62 | 63 | } 64 | 65 | } 66 | 67 | export class CubeToEquirectGenerator { 68 | 69 | constructor( renderer ) { 70 | 71 | this._renderer = renderer; 72 | this._quad = new FullScreenQuad( new CubeToEquirectMaterial() ); 73 | 74 | } 75 | 76 | generate( source, width = null, height = null ) { 77 | 78 | if ( ! source.isCubeTexture ) { 79 | 80 | throw new Error( 'CubeToEquirectMaterial: Source can only be cube textures.' ); 81 | 82 | } 83 | 84 | const image = source.images[ 0 ]; 85 | const renderer = this._renderer; 86 | const quad = this._quad; 87 | 88 | // determine the dimensions if not provided 89 | if ( width === null ) { 90 | 91 | width = 4 * image.height; 92 | 93 | } 94 | 95 | if ( height === null ) { 96 | 97 | height = 2 * image.height; 98 | 99 | } 100 | 101 | const target = new WebGLRenderTarget( width, height, { 102 | type: FloatType, 103 | colorSpace: image.colorSpace, 104 | } ); 105 | 106 | // prep the cube map data 107 | const imageHeight = image.height; 108 | const maxMip = Math.log2( imageHeight ) - 2; 109 | const texelHeight = 1.0 / imageHeight; 110 | const texelWidth = 1.0 / ( 3 * Math.max( Math.pow( 2, maxMip ), 7 * 16 ) ); 111 | 112 | quad.material.defines.CUBEUV_MAX_MIP = `${ maxMip }.0`; 113 | quad.material.defines.CUBEUV_TEXEL_WIDTH = texelWidth; 114 | quad.material.defines.CUBEUV_TEXEL_HEIGHT = texelHeight; 115 | quad.material.uniforms.envMap.value = source; 116 | quad.material.uniforms.flipEnvMap.value = source.isRenderTargetTexture ? 1 : - 1; 117 | quad.material.needsUpdate = true; 118 | 119 | // save state and render the contents 120 | const currentTarget = renderer.getRenderTarget(); 121 | const currentAutoClear = renderer.autoClear; 122 | renderer.autoClear = true; 123 | renderer.setRenderTarget( target ); 124 | quad.render( renderer ); 125 | renderer.setRenderTarget( currentTarget ); 126 | renderer.autoClear = currentAutoClear; 127 | 128 | // read the data back 129 | const buffer = new Uint16Array( width * height * 4 ); 130 | const readBuffer = new Float32Array( width * height * 4 ); 131 | renderer.readRenderTargetPixels( target, 0, 0, width, height, readBuffer ); 132 | target.dispose(); 133 | 134 | for ( let i = 0, l = readBuffer.length; i < l; i ++ ) { 135 | 136 | buffer[ i ] = DataUtils.toHalfFloat( readBuffer[ i ] ); 137 | 138 | } 139 | 140 | // produce the data texture 141 | const result = new DataTexture( buffer, width, height, RGBAFormat, HalfFloatType ); 142 | result.minFilter = LinearMipMapLinearFilter; 143 | result.magFilter = LinearFilter; 144 | result.wrapS = RepeatWrapping; 145 | result.wrapT = RepeatWrapping; 146 | result.mapping = EquirectangularReflectionMapping; 147 | result.needsUpdate = true; 148 | 149 | return result; 150 | 151 | } 152 | 153 | dispose() { 154 | 155 | this._quad.dispose(); 156 | 157 | } 158 | 159 | } 160 | -------------------------------------------------------------------------------- /src/utils/SobolNumberMapGenerator.js: -------------------------------------------------------------------------------- 1 | import { FloatType, NearestFilter, NoBlending, RGBAFormat, Vector2, WebGLRenderTarget } from 'three'; 2 | import { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass.js'; 3 | import { MaterialBase } from '../materials/MaterialBase.js'; 4 | import { sobol_common, sobol_point_generation } from '../shader/rand/sobol.glsl.js'; 5 | 6 | class SobolNumbersMaterial extends MaterialBase { 7 | 8 | constructor() { 9 | 10 | super( { 11 | 12 | blending: NoBlending, 13 | 14 | uniforms: { 15 | 16 | resolution: { value: new Vector2() }, 17 | 18 | }, 19 | 20 | vertexShader: /* glsl */` 21 | 22 | varying vec2 vUv; 23 | void main() { 24 | 25 | vUv = uv; 26 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 27 | 28 | } 29 | `, 30 | 31 | fragmentShader: /* glsl */` 32 | 33 | ${ sobol_common } 34 | ${ sobol_point_generation } 35 | 36 | varying vec2 vUv; 37 | uniform vec2 resolution; 38 | void main() { 39 | 40 | uint index = uint( gl_FragCoord.y ) * uint( resolution.x ) + uint( gl_FragCoord.x ); 41 | gl_FragColor = generateSobolPoint( index ); 42 | 43 | } 44 | `, 45 | 46 | } ); 47 | 48 | } 49 | 50 | } 51 | 52 | export class SobolNumberMapGenerator { 53 | 54 | generate( renderer, dimensions = 256 ) { 55 | 56 | const target = new WebGLRenderTarget( dimensions, dimensions, { 57 | 58 | type: FloatType, 59 | format: RGBAFormat, 60 | minFilter: NearestFilter, 61 | magFilter: NearestFilter, 62 | generateMipmaps: false, 63 | 64 | } ); 65 | 66 | const ogTarget = renderer.getRenderTarget(); 67 | renderer.setRenderTarget( target ); 68 | 69 | const quad = new FullScreenQuad( new SobolNumbersMaterial() ); 70 | quad.material.resolution.set( dimensions, dimensions ); 71 | quad.render( renderer ); 72 | 73 | renderer.setRenderTarget( ogTarget ); 74 | quad.dispose(); 75 | 76 | return target; 77 | 78 | } 79 | 80 | } 81 | -------------------------------------------------------------------------------- /src/utils/TextureUtils.js: -------------------------------------------------------------------------------- 1 | import { DataUtils } from 'three'; 2 | 3 | 4 | export function toHalfFloatArray( f32Array ) { 5 | 6 | const f16Array = new Uint16Array( f32Array.length ); 7 | for ( let i = 0, n = f32Array.length; i < n; ++ i ) { 8 | 9 | f16Array[ i ] = DataUtils.toHalfFloat( f32Array[ i ] ); 10 | 11 | } 12 | 13 | return f16Array; 14 | 15 | } 16 | -------------------------------------------------------------------------------- /src/utils/UVUnwrapper.js: -------------------------------------------------------------------------------- 1 | // https://github.com/mozilla/Spoke/commit/9701d647020e09d584885bd457eb225e9995c12f 2 | import XAtlas from 'xatlas-web'; 3 | import { BufferGeometry, Float32BufferAttribute, Uint32BufferAttribute } from 'three'; 4 | 5 | export class UVUnwrapper { 6 | 7 | constructor() { 8 | 9 | this._module = null; 10 | 11 | } 12 | 13 | async load() { 14 | 15 | const wasmurl = new URL( '../../node_modules/xatlas-web/dist/xatlas-web.wasm', import.meta.url ); 16 | this._module = XAtlas( { 17 | 18 | locateFile( path ) { 19 | 20 | if ( path.endsWith( '.wasm' ) ) { 21 | 22 | return wasmurl.toString(); 23 | 24 | } 25 | 26 | return path; 27 | 28 | } 29 | 30 | } ); 31 | return this._module.ready; 32 | 33 | } 34 | 35 | generate( geometry ) { 36 | 37 | const xatlas = this._module; 38 | const originalVertexCount = geometry.attributes.position.count; 39 | const originalIndexCount = geometry.index.count; 40 | 41 | xatlas.createAtlas(); 42 | 43 | const meshInfo = xatlas.createMesh( originalVertexCount, originalIndexCount, true, true ); 44 | xatlas.HEAPU16.set( geometry.index.array, meshInfo.indexOffset / Uint16Array.BYTES_PER_ELEMENT ); 45 | xatlas.HEAPF32.set( geometry.attributes.position.array, meshInfo.positionOffset / Float32Array.BYTES_PER_ELEMENT ); 46 | xatlas.HEAPF32.set( geometry.attributes.normal.array, meshInfo.normalOffset / Float32Array.BYTES_PER_ELEMENT ); 47 | xatlas.HEAPF32.set( geometry.attributes.uv.array, meshInfo.uvOffset / Float32Array.BYTES_PER_ELEMENT ); 48 | 49 | const statusCode = xatlas.addMesh(); 50 | if ( statusCode !== AddMeshStatus.Success ) { 51 | 52 | throw new Error( `UVUnwrapper: Error adding mesh. Status code ${ statusCode }` ); 53 | 54 | } 55 | 56 | xatlas.generateAtlas(); 57 | 58 | const meshData = xatlas.getMeshData( meshInfo.meshId ); 59 | const oldPositionArray = geometry.attributes.position.array; 60 | const oldNormalArray = geometry.attributes.normal.array; 61 | const oldUvArray = geometry.attributes.uv.array; 62 | 63 | const newPositionArray = new Float32Array( meshData.newVertexCount * 3 ); 64 | const newNormalArray = new Float32Array( meshData.newVertexCount * 3 ); 65 | const newUvArray = new Float32Array( meshData.newVertexCount * 2 ); 66 | const newUv2Array = new Float32Array( xatlas.HEAPF32.buffer, meshData.uvOffset, meshData.newVertexCount * 2 ); 67 | const newIndexArray = new Uint32Array( xatlas.HEAPU32.buffer, meshData.indexOffset, meshData.newIndexCount ); 68 | const originalIndexArray = new Uint32Array( 69 | xatlas.HEAPU32.buffer, 70 | meshData.originalIndexOffset, 71 | meshData.newVertexCount 72 | ); 73 | 74 | for ( let i = 0; i < meshData.newVertexCount; i ++ ) { 75 | 76 | const originalIndex = originalIndexArray[ i ]; 77 | newPositionArray[ i * 3 ] = oldPositionArray[ originalIndex * 3 ]; 78 | newPositionArray[ i * 3 + 1 ] = oldPositionArray[ originalIndex * 3 + 1 ]; 79 | newPositionArray[ i * 3 + 2 ] = oldPositionArray[ originalIndex * 3 + 2 ]; 80 | newNormalArray[ i * 3 ] = oldNormalArray[ originalIndex * 3 ]; 81 | newNormalArray[ i * 3 + 1 ] = oldNormalArray[ originalIndex * 3 + 1 ]; 82 | newNormalArray[ i * 3 + 2 ] = oldNormalArray[ originalIndex * 3 + 2 ]; 83 | newUvArray[ i * 2 ] = oldUvArray[ originalIndex * 2 ]; 84 | newUvArray[ i * 2 + 1 ] = oldUvArray[ originalIndex * 2 + 1 ]; 85 | 86 | } 87 | 88 | const newGeometry = new BufferGeometry(); 89 | newGeometry.addAttribute( 'position', new Float32BufferAttribute( newPositionArray, 3 ) ); 90 | newGeometry.addAttribute( 'normal', new Float32BufferAttribute( newNormalArray, 3 ) ); 91 | newGeometry.addAttribute( 'uv', new Float32BufferAttribute( newUvArray, 2 ) ); 92 | newGeometry.addAttribute( 'uv2', new Float32BufferAttribute( newUv2Array, 2 ) ); 93 | newGeometry.setIndex( new Uint32BufferAttribute( newIndexArray, 1 ) ); 94 | 95 | mesh.geometry = newGeometry; 96 | 97 | xatlas.destroyAtlas(); 98 | 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/utils/bufferToHash.js: -------------------------------------------------------------------------------- 1 | // https://www.geeksforgeeks.org/how-to-create-hash-from-string-in-javascript/ 2 | // https://stackoverflow.com/questions/7616461/generate-a-hash-from-string-in-javascript 3 | export function bufferToHash( buffer ) { 4 | 5 | let hash = 0; 6 | 7 | if ( buffer.byteLength !== 0 ) { 8 | 9 | const uintArray = new Uint8Array( buffer ); 10 | for ( let i = 0; i < buffer.byteLength; i ++ ) { 11 | 12 | const byte = uintArray[ i ]; 13 | hash = ( ( hash << 5 ) - hash ) + byte; 14 | hash |= 0; 15 | 16 | } 17 | 18 | } 19 | 20 | return hash; 21 | 22 | } 23 | -------------------------------------------------------------------------------- /src/utils/macroify.js: -------------------------------------------------------------------------------- 1 | export function macroify( contents ) { 2 | 3 | return contents 4 | .trim() 5 | .replace( /\/\/.*[\n\r]/g, '' ) 6 | .split( /[\n\r]+/ ) 7 | .join( '\\\n' ) + '\\\n'; 8 | 9 | } 10 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noEmit": true, 4 | "strict": true, 5 | "strictNullChecks":true, 6 | "noImplicitAny": true 7 | }, 8 | "files": [ 9 | "src/index.d.ts" 10 | ], 11 | } 12 | --------------------------------------------------------------------------------