├── .gitattributes
├── package.json
├── shaders
├── blit
│ ├── screen.vert
│ └── screen.frag
├── shading
│ ├── shadow-ray-miss.rmiss
│ ├── shadow-ray-closest-hit.rchit
│ ├── ray-miss.rmiss
│ ├── structs.glsl
│ ├── disney.glsl
│ ├── ray-generation.rgen
│ ├── utils.glsl
│ └── ray-closest-hit.rchit
└── picking
│ ├── ray-closest-hit.rchit
│ └── ray-generation.rgen
├── Settings.mjs
├── README.md
├── LICENSE
├── .gitignore
├── Scene.mjs
├── buffers
├── TextureArrayBuffer.mjs
├── GeometryBuffer.mjs
└── InstanceBuffer.mjs
├── passes
├── RayPickingPass.mjs
└── RayTracingPass.mjs
├── Camera.mjs
├── utils.mjs
└── index.mjs
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "scripts": {
3 | "start": "node index.mjs"
4 | },
5 | "dependencies": {
6 | "@cwasm/jpeg-turbo": "0.1.0",
7 | "@cwasm/lodepng": "0.1.1",
8 | "gl-matrix": "^3.1.0",
9 | "tolw": "^0.1.8",
10 | "webgpu": "latest"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/shaders/blit/screen.vert:
--------------------------------------------------------------------------------
1 | #version 450
2 | #pragma shader_stage(vertex)
3 |
4 | layout (location = 0) out vec2 uv;
5 |
6 | void main() {
7 | vec2 pos = vec2((gl_VertexIndex << 1) & 2, gl_VertexIndex & 2);
8 | gl_Position = vec4(pos * 2.0 - 1.0, 0.0, 1.0);
9 | uv = pos;
10 | }
11 |
--------------------------------------------------------------------------------
/shaders/shading/shadow-ray-miss.rmiss:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : enable
3 | #extension GL_GOOGLE_include_directive : enable
4 | #pragma shader_stage(miss)
5 |
6 | #include "utils.glsl"
7 |
8 | layout (location = 1) rayPayloadInEXT ShadowRayPayload ShadowRay;
9 |
10 | void main() {
11 | ShadowRay.hit = vec3(0);
12 | ShadowRay.shadowed = false;
13 | }
14 |
--------------------------------------------------------------------------------
/shaders/shading/shadow-ray-closest-hit.rchit:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : enable
3 | #extension GL_GOOGLE_include_directive : enable
4 | #pragma shader_stage(closest)
5 |
6 | #include "utils.glsl"
7 |
8 | layout (location = 1) rayPayloadInEXT ShadowRayPayload ShadowRay;
9 |
10 | hitAttributeEXT vec3 Hit;
11 |
12 | void main() {
13 | ShadowRay.hit = gl_WorldRayOriginEXT + gl_WorldRayDirectionEXT * gl_RayTmaxEXT;
14 | ShadowRay.shadowed = true;
15 | }
16 |
--------------------------------------------------------------------------------
/shaders/picking/ray-closest-hit.rchit:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : enable
3 | #pragma shader_stage(closest)
4 |
5 | struct RayPayload {
6 | vec4 position;
7 | vec4 normal;
8 | uint instanceId;
9 | };
10 |
11 | layout(location = 0) rayPayloadInEXT RayPayload Ray;
12 |
13 | hitAttributeEXT vec4 Hit;
14 |
15 | void main() {
16 | Ray.position.xyz = gl_WorldRayOriginEXT + gl_WorldRayDirectionEXT * gl_RayTmaxEXT;
17 | Ray.instanceId = gl_InstanceCustomIndexEXT + 1;
18 | }
19 |
--------------------------------------------------------------------------------
/shaders/shading/ray-miss.rmiss:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : enable
3 | #extension GL_EXT_nonuniform_qualifier : enable
4 | #extension GL_GOOGLE_include_directive : enable
5 | #pragma shader_stage(miss)
6 |
7 | #include "utils.glsl"
8 |
9 | layout (location = 0) rayPayloadInEXT RayPayload Ray;
10 |
11 | void main() {
12 | // gradient based env
13 | const float t = 0.75 * (normalize(gl_WorldRayDirectionEXT).y + 1.0);
14 | vec3 color = mix(vec3(0.005), vec3(0.0075), t);
15 |
16 | Ray.throughput = vec4(0);
17 | Ray.radianceAndDistance = vec4(pow(color, vec3(2.2)), -1.0);
18 | }
19 |
--------------------------------------------------------------------------------
/Settings.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | fixateToZero
3 | } from "./utils.mjs";
4 |
5 | export default class Settings {
6 | constructor({ device } = _) {
7 | this.device = device || null;
8 | this.buffer = null;
9 | this.init();
10 | }
11 | };
12 |
13 | Settings.prototype.getBuffer = function() {
14 | return this.buffer || null;
15 | };
16 |
17 | Settings.prototype.init = function() {
18 | let {device} = this;
19 | let settingsBufferByteLength = 8 * Float32Array.BYTES_PER_ELEMENT;
20 | let settingsBuffer = device.createBuffer({
21 | size: settingsBufferByteLength,
22 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM
23 | });
24 | settingsBuffer.byteLength = settingsBufferByteLength;
25 | this.buffer = settingsBuffer;
26 | };
27 |
--------------------------------------------------------------------------------
/shaders/blit/screen.frag:
--------------------------------------------------------------------------------
1 | #version 450
2 | #pragma shader_stage(fragment)
3 |
4 | layout (location = 0) in vec2 uv;
5 | layout (location = 0) out vec4 outColor;
6 |
7 | layout (binding = 0, std140) buffer PixelBuffer {
8 | vec4 pixels[];
9 | } pixelBuffer;
10 |
11 | layout (binding = 1) uniform SettingsBuffer {
12 | uint sampleCount;
13 | uint totalSampleCount;
14 | uint lightCount;
15 | uint screenWidth;
16 | uint screenHeight;
17 | uint pad_0;
18 | uint pad_1;
19 | uint pad_2;
20 | } Settings;
21 |
22 | void main() {
23 | const vec2 resolution = vec2(Settings.screenWidth, Settings.screenHeight);
24 | const ivec2 bufferCoord = ivec2(floor(uv * resolution));
25 | const vec2 fragCoord = (uv * resolution);
26 | const uint pixelIndex = bufferCoord.y * uint(resolution.x) + bufferCoord.x;
27 |
28 | vec4 pixelColor = pixelBuffer.pixels[pixelIndex];
29 | outColor = pixelColor;
30 | }
31 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # WebGPU Path Tracer
2 |
3 | Using [node-webgpu](https://github.com/maierfelix/webgpu) and [dawn-ray-tracing](https://github.com/maierfelix/dawn-ray-tracing)
4 |
5 | # Running demo
6 | - Clone this repository
7 | - Download the [assets](https://github.com/maierfelix/WebGPU-Path-Tracer/releases/download/0.0.1/assets.zip) and extract them into the project's root directory
8 | - Install dependencies with `npm install`
9 | - Run the path tracer with `npm run start`
10 |
11 | # Controls
12 |
13 | - Mouse (Left-Click): Look around
14 | - Mouse (Right-Click): Pick object
15 |
16 | - WASD: Move around
17 | - Space: Move upwards
18 | - C: Move downwards
19 |
20 | # Screenshots
21 |
22 | The following scenes were rendered in just a few seconds on a RTX 2070
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Felix Maier
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/shaders/shading/structs.glsl:
--------------------------------------------------------------------------------
1 | #ifndef STRUCTS_H
2 | #define STRUCTS_H
3 |
4 | struct LightSource {
5 | vec4 emissionAndGeometryId;
6 | vec4 directionAndPdf;
7 | };
8 |
9 | struct RayPayload {
10 | vec4 radianceAndDistance;
11 | vec4 scatterDirection;
12 | vec4 throughput;
13 | uint seed;
14 | LightSource lightSource;
15 | bool shadowed;
16 | };
17 |
18 | struct ShadowRayPayload {
19 | vec3 hit;
20 | bool shadowed;
21 | };
22 |
23 | struct ShadingData {
24 | vec3 base_color;
25 | float metallic;
26 | float specular;
27 | float roughness;
28 | float csw;
29 | };
30 |
31 | struct Vertex {
32 | vec4 position;
33 | vec4 normal;
34 | vec4 tangent;
35 | vec2 uv;
36 | vec2 pad_0;
37 | };
38 |
39 | struct Offset {
40 | uint face;
41 | uint vertex;
42 | uint material;
43 | uint pad_0;
44 | };
45 |
46 | struct Material {
47 | vec4 color;
48 | vec4 emission;
49 | float metalness;
50 | float roughness;
51 | float specular;
52 | float textureScaling;
53 | uint albedoIndex;
54 | uint normalIndex;
55 | uint emissionIndex;
56 | uint metalRoughnessIndex;
57 | float emissionIntensity;
58 | float metalnessIntensity;
59 | float roughnessIntensity;
60 | float pad_0;
61 | };
62 |
63 | struct Light {
64 | uint instanceIndex;
65 | float pad_0;
66 | float pad_1;
67 | float pad_2;
68 | };
69 |
70 | struct Instance {
71 | mat4x3 transformMatrix;
72 | vec4 padding_0;
73 | mat4x4 normalMatrix;
74 | uint vertexIndex;
75 | uint faceIndex;
76 | uint faceCount;
77 | uint materialIndex;
78 | };
79 |
80 | #endif // STRUCTS_H
81 |
--------------------------------------------------------------------------------
/shaders/picking/ray-generation.rgen:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : require
3 | #extension GL_EXT_nonuniform_qualifier : enable
4 | #pragma shader_stage(raygen)
5 |
6 | struct RayPayload {
7 | vec4 position;
8 | vec4 normal;
9 | uint instanceId;
10 | };
11 |
12 | layout(location = 0) rayPayloadEXT RayPayload Ray;
13 |
14 | layout(binding = 0, set = 0) uniform accelerationStructureEXT topLevelAS;
15 |
16 | layout(binding = 1, std140) buffer PickingBuffer {
17 | vec4 inputData;
18 | vec4 outputData;
19 | } Picking;
20 |
21 | layout(binding = 2) uniform CameraBuffer {
22 | vec4 forward;
23 | mat4 viewInverse;
24 | mat4 projectionInverse;
25 | mat4 viewProjection;
26 | mat4 previousViewInverse;
27 | mat4 previousProjectionInverse;
28 | float aperture;
29 | float focusDistance;
30 | float zNear;
31 | float zFar;
32 | } Camera;
33 |
34 | layout (binding = 3) uniform SettingsBuffer {
35 | uint sampleCount;
36 | uint totalSampleCount;
37 | uint lightCount;
38 | uint screenWidth;
39 | uint screenHeight;
40 | uint pad_0;
41 | uint pad_1;
42 | uint pad_2;
43 | } Settings;
44 |
45 | void main() {
46 | const vec2 res = vec2(Settings.screenWidth, Settings.screenHeight);
47 | const vec2 pixel = vec2(Picking.inputData.x, res.y - Picking.inputData.y);
48 | const vec2 uv = (pixel / res) * 2.0 - 1.0;
49 |
50 | vec4 origin = Camera.viewInverse * vec4(0, 0, 0, 1);
51 | vec4 target = Camera.projectionInverse * (vec4(uv.x, uv.y, 1, 1));
52 | vec4 direction = Camera.viewInverse * vec4(normalize(target.xyz * Camera.focusDistance), 0);
53 |
54 | traceRayEXT(topLevelAS, gl_RayFlagsOpaqueNV, 0xFF, 0, 0, 0, origin.xyz, Camera.zNear, direction.xyz, Camera.zFar, 0);
55 |
56 | Picking.outputData = vec4(
57 | Ray.position.xyz,
58 | Ray.instanceId
59 | );
60 | }
61 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # TypeScript v1 declaration files
45 | typings/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 |
78 | # Next.js build output
79 | .next
80 |
81 | # Nuxt.js build / generate output
82 | .nuxt
83 | dist
84 |
85 | # Gatsby files
86 | .cache/
87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
88 | # https://nextjs.org/blog/next-9-1#public-directory-support
89 | # public
90 |
91 | # vuepress build output
92 | .vuepress/dist
93 |
94 | # Serverless directories
95 | .serverless/
96 |
97 | # FuseBox cache
98 | .fusebox/
99 |
100 | # DynamoDB Local files
101 | .dynamodb/
102 |
103 | # TernJS port file
104 | .tern-port
105 |
106 | assets
107 |
108 | package-lock.json
109 |
--------------------------------------------------------------------------------
/shaders/shading/disney.glsl:
--------------------------------------------------------------------------------
1 | #ifndef DISNEY_H
2 | #define DISNEY_H
3 |
4 | #include "utils.glsl"
5 |
6 | // based on AMD baikal's disney implementation with some edits:
7 | // https://github.com/GPUOpen-LibrariesAndSDKs/RadeonProRender-Baikal/blob/master/Baikal/Kernels/CL/disney.cl
8 | float DisneyPdf(in const float NdotH, in const float NdotL, in const float HdotL) {
9 | const float d_pdf = NdotL * (1.0 / PI);
10 | const float r_pdf = GTR2(NdotH, shading.roughness) * NdotH / (4.0 * HdotL);
11 | const float c_pdf = GTR1(NdotH, 0.001) * NdotH / (4.0 * HdotL);
12 | return c_pdf * 0.001 + (shading.csw * r_pdf + (1.0 - shading.csw) * d_pdf);
13 | }
14 |
15 | vec3 DisneyEval(in float NdotL, in const float NdotV, in const float NdotH, in const float HdotL) {
16 | if (NdotL <= 0.0 || NdotV <= 0.0) return vec3(0);
17 |
18 | const vec3 cd_lin = shading.base_color;
19 | const vec3 c_spec0 = mix(shading.specular * vec3(0.3), cd_lin, shading.metallic);
20 |
21 | // Diffuse fresnel - go from 1 at normal incidence to 0.5 at grazing
22 | // and mix in diffuse retro-reflection based on roughness
23 | const float f_wo = SchlickFresnelReflectance(NdotV);
24 | const float f_wi = SchlickFresnelReflectance(NdotL);
25 |
26 | const float fd90 = 0.5 + 2.0 * HdotL * HdotL * shading.roughness;
27 | const float fd = mix(1.0, fd90, f_wo) * mix(1.0, fd90, f_wi);
28 |
29 | // Specular
30 | const float ds = GTR2(NdotH, shading.roughness);
31 | const float fh = SchlickFresnelReflectance(HdotL);
32 | const vec3 fs = mix(c_spec0, vec3(1), fh);
33 |
34 | float gs = 0.0;
35 | const float ro2 = sqr(shading.roughness * 0.5 + 0.5);
36 | gs = SmithGGX_G(NdotV, ro2);
37 | gs *= SmithGGX_G(NdotL, ro2);
38 |
39 | // clearcoat (ior = 1.5 -> F0 = 0.04)
40 | const float dr = GTR1(NdotH, 0.04);
41 | const float fr = mix(0.04, 1.0, fh);
42 | const float gr = SmithGGX_G(NdotV, 0.25) * SmithGGX_G(NdotL, 0.25);
43 |
44 | const vec3 f = ((1.0 / PI) * fd * cd_lin) * (1.0 - shading.metallic) + gs * fs * ds + 0.001 * gr * fr * dr;
45 | return f * NdotL;
46 | }
47 |
48 | vec3 DisneySample(inout uint seed, in const vec3 V, in const vec3 N) {
49 | float r1 = Randf01(seed);
50 | float r2 = Randf01(seed);
51 |
52 | const vec3 U = abs(N.z) < (1.0 - EPSILON) ? vec3(0, 0, 1) : vec3(1, 0, 0);
53 | const vec3 T = normalize(cross(U, N));
54 | const vec3 B = cross(N, T);
55 |
56 | // specular
57 | if (r2 < shading.csw) {
58 | r2 /= shading.csw;
59 | const float a = shading.roughness;
60 | const float cosTheta = sqrt((1.0 - r2) / (1.0 + (a*a-1.0) * r2));
61 | const float sinTheta = sqrt(max(0.0, 1.0 - (cosTheta * cosTheta)));
62 | const float phi = r1 * TWO_PI;
63 | vec3 H = normalize(vec3(
64 | cos(phi) * sinTheta,
65 | sin(phi) * sinTheta,
66 | cosTheta
67 | ));
68 | H = H.x * T + H.y * B + H.z * N;
69 | H = dot(H, V) <= 0.0 ? H * -1.0 : H;
70 | return reflect(-V, H);
71 | }
72 | // diffuse
73 | r2 -= shading.csw;
74 | r2 /= (1.0 - shading.csw);
75 | const vec3 H = CosineSampleHemisphere(r1, r2);
76 | return T * H.x + B * H.y + N * H.z;
77 | }
78 |
79 | #endif // DISNEY_H
80 |
--------------------------------------------------------------------------------
/Scene.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | readImageFile,
3 | readObjectFile
4 | } from "./utils.mjs"
5 |
6 | class Texture {
7 | constructor(parent, opts = {}) {
8 | this.parent = parent || null;
9 | this.data = null;
10 | }
11 | };
12 |
13 | Texture.prototype.fromPath = function(path) {
14 | this.data = readImageFile(path);
15 | return this;
16 | };
17 |
18 | class Material {
19 | constructor(parent, opts = {}) {
20 | this.parent = parent || null;
21 | this.data = opts;
22 | }
23 | };
24 |
25 | class Geometry {
26 | constructor(parent, opts = {}) {
27 | this.parent = parent || null;
28 | this.data = null;
29 | this.instances = [];
30 | this.accelerationContainer = null;
31 | }
32 | };
33 |
34 | Geometry.prototype.fromPath = function(path) {
35 | this.data = readObjectFile(path);
36 | return this;
37 | };
38 |
39 | Geometry.prototype.setAccelerationContainer = function(container) {
40 | this.accelerationContainer = container;
41 | };
42 |
43 | Geometry.prototype.addMeshInstance = function(opts = {}) {
44 | let {instances} = this;
45 | let object = new GeometryInstance(this, opts);
46 | instances.push(object);
47 | return object;
48 | };
49 |
50 | Geometry.prototype.addEmitterInstance = function(opts = {}) {
51 | let {instances} = this;
52 | let object = new GeometryInstance(this, opts);
53 | object.isLight = true;
54 | instances.push(object);
55 | return object;
56 | };
57 |
58 | class GeometryInstance {
59 | constructor(parent, opts = {}) {
60 | this.parent = parent || null;
61 | this.data = opts;
62 | this.isLight = false;
63 | this.instanceBufferByteOffset = 0x0;
64 | }
65 | get geometry() {
66 | return this.parent;
67 | }
68 | };
69 |
70 | class Scene {
71 | constructor(opts = {}) {
72 | this.objects = {
73 | textures: [],
74 | materials: [],
75 | geometries: []
76 | };
77 | }
78 | };
79 |
80 | Scene.prototype.createTexture = function(opts = {}) {
81 | let {objects} = this;
82 | let object = new Texture(this, opts);
83 | objects.textures.push(object);
84 | return object;
85 | };
86 |
87 | Scene.prototype.createMaterial = function(opts = {}) {
88 | let {objects} = this;
89 | let object = new Material(this, opts);
90 | objects.materials.push(object);
91 | return object;
92 | };
93 |
94 | Scene.prototype.createGeometry = function(opts = {}) {
95 | let {objects} = this;
96 | let object = new Geometry(this, opts);
97 | objects.geometries.push(object);
98 | return object;
99 | };
100 |
101 | Scene.prototype.getInstancesFlattened = function() {
102 | let {geometries} = this.objects;
103 | let out = [];
104 | for (let ii = 0; ii < geometries.length; ++ii) {
105 | let {instances} = geometries[ii];
106 | out.push(...instances);
107 | };
108 | return out;
109 | };
110 |
111 | Scene.prototype.getLightsFlattened = function() {
112 | let {geometries} = this.objects;
113 | let out = [];
114 | for (let ii = 0; ii < geometries.length; ++ii) {
115 | let {instances} = geometries[ii];
116 | for (let ii = 0; ii < instances.length; ++ii) {
117 | let instance = instances[ii];
118 | if (instance.isLight) out.push(instance);
119 | };
120 | };
121 | return out;
122 | };
123 |
124 | Scene.prototype.getInstanceById = function(id) {
125 | let instances = this.getInstancesFlattened();
126 | return instances[id] || null;
127 | };
128 |
129 | export default Scene;
130 |
--------------------------------------------------------------------------------
/shaders/shading/ray-generation.rgen:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : enable
3 | #extension GL_EXT_nonuniform_qualifier : enable
4 | #extension GL_GOOGLE_include_directive : enable
5 | #pragma shader_stage(raygen)
6 |
7 | #include "utils.glsl"
8 |
9 | layout (location = 0) rayPayloadEXT RayPayload Ray;
10 | layout (location = 1) rayPayloadEXT ShadowRayPayload ShadowRay;
11 |
12 | layout (binding = 0, set = 0) uniform accelerationStructureEXT topLevelAS;
13 |
14 | layout (binding = 1, std140) buffer PixelBuffer {
15 | vec4 pixels[];
16 | } pixelBuffer;
17 |
18 | layout (binding = 2, std140) buffer AccumulationBuffer {
19 | vec4 pixels[];
20 | } accumulationBuffer;
21 |
22 | layout (binding = 3) uniform CameraBuffer {
23 | vec4 forward;
24 | mat4 viewInverse;
25 | mat4 projectionInverse;
26 | mat4 viewProjection;
27 | mat4 previousViewInverse;
28 | mat4 previousProjectionInverse;
29 | float aperture;
30 | float focusDistance;
31 | float zNear;
32 | float zFar;
33 | } Camera;
34 |
35 | layout (binding = 4) uniform SettingsBuffer {
36 | uint sampleCount;
37 | uint totalSampleCount;
38 | uint lightCount;
39 | uint screenWidth;
40 | uint screenHeight;
41 | uint pad_0;
42 | uint pad_1;
43 | uint pad_2;
44 | } Settings;
45 |
46 | void main() {
47 | const ivec2 ipos = ivec2(gl_LaunchIDEXT.xy);
48 | const ivec2 resolution = ivec2(gl_LaunchSizeEXT.xy);
49 | const uint pixelIndex = ipos.y * resolution.x + ipos.x;
50 |
51 | const uint sampleCount = Settings.sampleCount;
52 | const uint bounceCount = 3;
53 | const uint totalSampleCount = Settings.totalSampleCount;
54 |
55 | Ray.seed = Tea(Tea(ipos.x, ipos.y), Settings.totalSampleCount);
56 |
57 | const uint cullMask = 0x80;
58 |
59 | vec3 pixelColor = vec3(0);
60 | for (uint ss = 0; ss < sampleCount; ++ss) {
61 | const vec2 offset = Camera.aperture / 2.0 * RandInUnitDisk(Ray.seed);
62 | const vec2 pixel = vec2(ipos.x + Randf01(Ray.seed), ipos.y + Randf01(Ray.seed));
63 | const vec2 uv = (pixel / gl_LaunchSizeEXT.xy) * 2.0 - 1.0;
64 |
65 | vec4 origin = Camera.viewInverse * vec4(offset, 0, 1);
66 | vec4 target = Camera.projectionInverse * (vec4(uv.x, uv.y, 1, 1));
67 | vec4 direction = Camera.viewInverse * vec4(normalize(target.xyz * Camera.focusDistance - vec3(offset, 0)), 0);
68 |
69 | vec3 radiance = vec3(0);
70 | Ray.throughput = vec4(1);
71 | for (uint bb = 0; bb < bounceCount; ++bb) {
72 | traceRayEXT(topLevelAS, gl_RayFlagsOpaqueEXT, cullMask, 0, 0, 0, origin.xyz, Camera.zNear, direction.xyz, Camera.zFar, 0);
73 |
74 | radiance += Ray.radianceAndDistance.rgb;
75 |
76 | // abort if the ray is either invalid or didn't hit anything
77 | const float t = Ray.radianceAndDistance.w;
78 | if (t < 0.0 || Ray.scatterDirection.w <= 0.0) break;
79 |
80 | // move the ray based on the bsdf direction
81 | origin = origin + t * direction;
82 | direction = vec4(Ray.scatterDirection.xyz, 0);
83 | };
84 | pixelColor += radiance;
85 | };
86 | pixelColor = max(vec3(0), pixelColor);
87 |
88 | const bool accumulate = Settings.sampleCount != Settings.totalSampleCount;
89 | const vec3 accumulatedColor = accumulationBuffer.pixels[pixelIndex].rgb * float(accumulate) + pixelColor;
90 | pixelColor = accumulatedColor * (1.0 / Settings.totalSampleCount);
91 | accumulationBuffer.pixels[pixelIndex].rgb = accumulatedColor;
92 |
93 | pixelColor = Uncharted2ToneMapping(pixelColor);
94 |
95 | pixelBuffer.pixels[pixelIndex] = vec4(pow(pixelColor, vec3(1.0 / 2.2)), 1);
96 | }
97 |
--------------------------------------------------------------------------------
/buffers/TextureArrayBuffer.mjs:
--------------------------------------------------------------------------------
1 | export default class TextureArrayBuffer {
2 | constructor({ device, textures } = _) {
3 | this.device = device || null;
4 | this.sampler = null;
5 | this.texture = null;
6 | this.textureView = null;
7 | this.init(textures);
8 | }
9 | };
10 |
11 | TextureArrayBuffer.prototype.getTextureSampler = function() {
12 | return this.sampler || null;
13 | };
14 |
15 | TextureArrayBuffer.prototype.getTextureView = function() {
16 | return this.textureView || null;
17 | };
18 |
19 | TextureArrayBuffer.prototype.init = function(textures) {
20 | let {device} = this;
21 |
22 | let queue = device.getQueue();
23 |
24 | let initialWidth = textures[0] ? textures[0].data.width : 16;
25 | let initialHeight = textures[0] ? textures[0].data.height : 16;
26 | for (let ii = 1; ii < textures.length; ++ii) {
27 | let {data, width, height} = textures[ii].data;
28 | if (width !== initialWidth) {
29 | throw new Error(`Expected image width of '${initialWidth}' but got '${width}'`);
30 | }
31 | else if (height !== initialHeight) {
32 | throw new Error(`Expected image height of '${initialHeight}' but got '${height}'`);
33 | }
34 | else if (width !== height) {
35 | throw new Error(`Image width '${width}' match image height ${height}`);
36 | }
37 | };
38 |
39 | // create copy and insert placeholder image
40 | let placeHolderTexture = {
41 | data: {
42 | data: new Uint8ClampedArray(initialWidth * initialHeight * 4),
43 | width: initialWidth,
44 | height: initialHeight
45 | }
46 | };
47 | textures = [placeHolderTexture, ...textures];
48 |
49 | let sampler = device.createSampler({
50 | magFilter: "linear",
51 | minFilter: "linear",
52 | addressModeU: "repeat",
53 | addressModeV: "repeat",
54 | addressModeW: "repeat"
55 | });
56 |
57 | let texture = device.createTexture({
58 | size: {
59 | width: initialWidth,
60 | height: initialHeight,
61 | depth: 1
62 | },
63 | arrayLayerCount: textures.length,
64 | mipLevelCount: 1,
65 | sampleCount: 1,
66 | dimension: "2d",
67 | format: "rgba8unorm-srgb",
68 | usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.SAMPLED
69 | });
70 |
71 | let textureView = texture.createView({
72 | dimension: "2d-array",
73 | baseArrayLayer: 0,
74 | arrayLayerCount: textures.length,
75 | format: "rgba8unorm-srgb"
76 | });
77 |
78 | let bytesPerRow = Math.ceil(initialWidth * 4 / 256) * 256;
79 | let textureData = new Uint8Array(bytesPerRow * initialHeight);
80 | let textureCopyBuffer = device.createBuffer({
81 | size: textureData.byteLength,
82 | usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST
83 | });
84 | for (let ii = 0; ii < textures.length; ++ii) {
85 | let {data, width, height} = textures[ii].data;
86 |
87 | // copy image data into buffer
88 | textureCopyBuffer.setSubData(0, data);
89 |
90 | let commandEncoder = device.createCommandEncoder({});
91 | commandEncoder.copyBufferToTexture(
92 | {
93 | buffer: textureCopyBuffer,
94 | bytesPerRow: bytesPerRow,
95 | arrayLayer: 0,
96 | mipLevel: 0,
97 | imageHeight: 0
98 | },
99 | {
100 | texture: texture,
101 | mipLevel: 0,
102 | arrayLayer: ii,
103 | origin: { x: 0, y: 0, z: 0 }
104 | },
105 | {
106 | width: width,
107 | height: height,
108 | depth: 1
109 | }
110 | );
111 | queue.submit([ commandEncoder.finish() ]);
112 | };
113 |
114 | this.sampler = sampler;
115 | this.texture = texture;
116 | this.textureView = textureView;
117 | };
118 |
--------------------------------------------------------------------------------
/passes/RayPickingPass.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | loadShaderFile
3 | } from "../utils.mjs"
4 |
5 | export default class RayPickingPass {
6 | constructor({ device, instanceContainer } = _) {
7 | this.device = device || null;
8 | this.pipeline = null;
9 | this.bindGroup = null;
10 | this.pickingBuffer = null;
11 | this.pickingReadBackBuffer = null;
12 | this.settingsBuffer = null;
13 | this.commandBuffer = null;
14 | this.init(instanceContainer);
15 | }
16 | };
17 |
18 | RayPickingPass.prototype.getPipeline = function() {
19 | return this.pipeline || null;
20 | };
21 |
22 | RayPickingPass.prototype.getBindGroup = function() {
23 | return this.bindGroup || null;
24 | };
25 |
26 | RayPickingPass.prototype.getCommandBuffer = function() {
27 | let {device} = this;
28 | let {pipeline, bindGroup} = this;
29 | // recorde one time, then reuse
30 | if (this.commandBuffer === null) {
31 | let commandEncoder = device.createCommandEncoder({});
32 | let passEncoder = commandEncoder.beginRayTracingPass({});
33 | passEncoder.setPipeline(pipeline);
34 | passEncoder.setBindGroup(0, bindGroup);
35 | passEncoder.traceRays(
36 | 0, 1, 2,
37 | 1, 1, 1
38 | );
39 | passEncoder.endPass();
40 | this.commandBuffer = commandEncoder.finish();
41 | }
42 | return this.commandBuffer;
43 | };
44 |
45 | RayPickingPass.prototype.setMousePickingPosition = function(x, y) {
46 | let {pickingBuffer} = this;
47 | pickingBuffer.setSubData(0, new Float32Array([x | 0, y | 0]));
48 | };
49 |
50 | RayPickingPass.prototype.getPickingResult = async function() {
51 | let {device} = this;
52 | let {pickingBuffer, pickingReadBackBuffer} = this;
53 |
54 | let queue = device.getQueue();
55 |
56 | let commandEncoder = device.createCommandEncoder({});
57 | commandEncoder.copyBufferToBuffer(
58 | pickingBuffer, 0,
59 | pickingReadBackBuffer, 0,
60 | pickingBuffer.byteLength
61 | );
62 | queue.submit([ commandEncoder.finish() ]);
63 |
64 | let result = await pickingReadBackBuffer.mapReadAsync();
65 | let resultF32 = new Float32Array(result);
66 |
67 | let x = resultF32[4];
68 | let y = resultF32[5];
69 | let z = resultF32[6];
70 | let instanceId = resultF32[7];
71 |
72 | pickingReadBackBuffer.unmap();
73 |
74 | return {
75 | instanceId,
76 | x, y, z
77 | };
78 | };
79 |
80 | RayPickingPass.prototype.init = function(instanceContainer) {
81 | let {device} = this;
82 |
83 | let pickingBufferStride = 6;
84 | let pickingBufferByteLength = pickingBufferStride * 4 * Float32Array.BYTES_PER_ELEMENT;
85 | let pickingBuffer = device.createBuffer({ usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST, size: pickingBufferByteLength });
86 | pickingBuffer.byteLength = pickingBufferByteLength;
87 |
88 | let pickingReadBackBuffer = device.createBuffer({ usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, size: pickingBuffer.byteLength });
89 | pickingReadBackBuffer.byteLength = pickingBuffer.byteLength;
90 |
91 | let rayGenShaderModule = device.createShaderModule({ code: loadShaderFile(`shaders/picking/ray-generation.rgen`) });
92 | let rayCHitModule = device.createShaderModule({ code: loadShaderFile(`shaders/picking/ray-closest-hit.rchit`) });
93 |
94 | let shaderBindingTable = device.createRayTracingShaderBindingTable({
95 | stages: [
96 | { module: rayGenShaderModule, stage: GPUShaderStage.RAY_GENERATION },
97 | { module: rayCHitModule, stage: GPUShaderStage.RAY_CLOSEST_HIT },
98 | ],
99 | groups: [
100 | { type: "general", generalIndex: 0 },
101 | { type: "triangles-hit-group", closestHitIndex: 1 },
102 | ]
103 | });
104 |
105 | let bindGroupLayout = device.createBindGroupLayout({
106 | entries: [
107 | { binding: 0, type: "acceleration-container", visibility: GPUShaderStage.RAY_GENERATION | GPUShaderStage.RAY_CLOSEST_HIT },
108 | { binding: 1, type: "storage-buffer", visibility: GPUShaderStage.RAY_GENERATION },
109 | { binding: 2, type: "uniform-buffer", visibility: GPUShaderStage.RAY_GENERATION },
110 | { binding: 3, type: "uniform-buffer", visibility: GPUShaderStage.RAY_GENERATION },
111 | ]
112 | });
113 |
114 | let bindGroup = device.createBindGroup({
115 | layout: bindGroupLayout,
116 | entries: [
117 | { binding: 0, size: 0, accelerationContainer: instanceContainer },
118 | { binding: 1, size: pickingBuffer.byteLength, buffer: pickingBuffer },
119 | { binding: 2, size: camera.getBuffer().byteLength, buffer: camera.getBuffer() },
120 | { binding: 3, size: settings.getBuffer().byteLength, buffer: settings.getBuffer() },
121 | ]
122 | });
123 |
124 | let pipeline = device.createRayTracingPipeline({
125 | layout: device.createPipelineLayout({
126 | bindGroupLayouts: [bindGroupLayout]
127 | }),
128 | rayTracingState: {
129 | shaderBindingTable,
130 | maxRecursionDepth: 1
131 | }
132 | });
133 |
134 | this.pipeline = pipeline;
135 | this.bindGroup = bindGroup;
136 | this.pickingBuffer = pickingBuffer;
137 | this.pickingReadBackBuffer = pickingReadBackBuffer;
138 | };
139 |
--------------------------------------------------------------------------------
/shaders/shading/utils.glsl:
--------------------------------------------------------------------------------
1 | #ifndef UTILS_H
2 | #define UTILS_H
3 |
4 | #pragma optionNV(fastmath on)
5 | #pragma optionNV(ifcvt none)
6 | #pragma optionNV(inline all)
7 | #pragma optionNV(strict on)
8 | #pragma optionNV(unroll all)
9 |
10 | #define PI 3.141592653589793
11 | #define HALF_PI 1.5707963267948966
12 | #define TWO_PI 6.283185307179586
13 | #define INV_PI 0.3183098861837907
14 |
15 | #define GAMMA 2.2
16 | #define INV_GAMMA 0.45454545454545453
17 |
18 | #define EPSILON 0.001
19 |
20 | #define LUMINANCE vec3(0.2126, 0.7152, 0.0722)
21 |
22 | #include "structs.glsl"
23 |
24 | vec2 blerp(vec2 b, vec2 p1, vec2 p2, vec2 p3) {
25 | return (1.0 - b.x - b.y) * p1 + b.x * p2 + b.y * p3;
26 | }
27 |
28 | vec3 blerp(vec2 b, vec3 p1, vec3 p2, vec3 p3) {
29 | return (1.0 - b.x - b.y) * p1 + b.x * p2 + b.y * p3;
30 | }
31 |
32 | vec2 SampleTriangle(vec2 u) {
33 | float uxsqrt = sqrt(u.x);
34 | return vec2(1.0 - uxsqrt, u.y * uxsqrt);
35 | }
36 |
37 | // rand functions taken from neo java lib and
38 | // https://github.com/nvpro-samples/optix_advanced_samples
39 | const uint LCG_A = 1664525u;
40 | const uint LCG_C = 1013904223u;
41 | const int MAX_RAND = 0x7fff;
42 | const int IEEE_ONE = 0x3f800000;
43 | const int IEEE_MASK = 0x007fffff;
44 |
45 | uint Tea(uint val0, uint val1) {
46 | uint v0 = val0;
47 | uint v1 = val1;
48 | uint s0 = 0;
49 | for (uint n = 0; n < 16; n++) {
50 | s0 += 0x9e3779b9;
51 | v0 += ((v1<<4)+0xa341316c)^(v1+s0)^((v1>>5)+0xc8013ea4);
52 | v1 += ((v0<<4)+0xad90777d)^(v0+s0)^((v0>>5)+0x7e95761e);
53 | }
54 | return v0;
55 | }
56 |
57 | uint Rand(inout uint seed) { // random integer in the range [0, MAX_RAND]
58 | seed = 69069 * seed + 1;
59 | return ((seed = 69069 * seed + 1) & MAX_RAND);
60 | }
61 |
62 | float Randf01(inout uint seed) { // random number in the range [0.0f, 1.0f]
63 | seed = (LCG_A * seed + LCG_C);
64 | return float(seed & 0x00FFFFFF) / float(0x01000000u);
65 | }
66 |
67 | float Randf11(inout uint seed) { // random number in the range [-1.0f, 1.0f]
68 | uint i = 0;
69 | seed = LCG_A * seed + LCG_C;
70 | i = IEEE_ONE | (((Rand(seed)) & IEEE_MASK) >> 9);
71 | return uintBitsToFloat(i) - 1.0;
72 | }
73 |
74 | vec2 RandInUnitDisk(inout uint seed) {
75 | vec2 p = vec2(0);
76 | do {
77 | p = 2 * vec2(Randf01(seed), Randf01(seed)) - 1;
78 | } while (dot(p, p) >= 1);
79 | return p;
80 | }
81 |
82 | vec3 RandInUnitSphere(inout uint seed) {
83 | vec3 p = vec3(0);
84 | do {
85 | p = 2 * vec3(Randf01(seed), Randf01(seed), Randf01(seed)) - 1;
86 | } while (dot(p, p) >= 1);
87 | return p;
88 | }
89 |
90 | // source: internetz
91 | vec3 Hash32(vec2 p){
92 | vec3 p3 = fract(vec3(p.xyx) * vec3(443.8975,397.2973, 491.1871));
93 | p3 += dot(p3, p3.yxz + 19.19);
94 | return fract(vec3((p3.x + p3.y) * p3.z, (p3.x + p3.z) * p3.y, (p3.y + p3.z) * p3.x));
95 | }
96 |
97 | vec3 DitherRGB(vec3 c, vec2 seed){
98 | return c + Hash32(seed) / 255.0;
99 | }
100 |
101 | float Luminance(vec3 color) {
102 | const vec3 luminance = { 0.30, 0.59, 0.11 };
103 | return dot(color, luminance);
104 | }
105 |
106 | vec3 SRGBToLinear(vec3 color) {
107 | return pow(color, vec3(INV_GAMMA));
108 | }
109 |
110 | vec3 Uncharted2ToneMapping(vec3 color) {
111 | float A = 0.15;
112 | float B = 0.50;
113 | float C = 0.10;
114 | float D = 0.20;
115 | float E = 0.02;
116 | float F = 0.30;
117 | float W = 11.2;
118 | float exposure = 2.0;
119 | color *= exposure;
120 | color = ((color * (A * color + C * B) + D * E) / (color * (A * color + B) + D * F)) - E / F;
121 | float white = ((W * (A * W + C * B) + D * E) / (W * (A * W + B) + D * F)) - E / F;
122 | return SRGBToLinear(color / white);
123 | }
124 |
125 | vec3 FilmicToneMapping(vec3 color) {
126 | vec3 x = max(vec3(0.0), color - 0.004);
127 | color = (x * (6.2 * x + 0.5)) / (x * (6.2 * x + 1.7) + 0.06);
128 | return SRGBToLinear(color);
129 | }
130 |
131 | float sqr(float f) {
132 | return f * f;
133 | }
134 |
135 | const float saturation = 0.12;
136 | vec3 ColorGrading(vec3 color) {
137 | vec3 gray = vec3(dot(LUMINANCE, color));
138 | color = vec3(mix(color, gray, -saturation)) * 1.0;
139 | return color;
140 | }
141 |
142 | vec3 CosineSampleHemisphere(float u1, float u2) {
143 | vec3 dir;
144 | float r = sqrt(u1);
145 | float phi = TWO_PI * u2;
146 | dir.x = r * cos(phi);
147 | dir.y = r * sin(phi);
148 | dir.z = sqrt(max(0.0, 1.0 - dir.x*dir.x - dir.y*dir.y));
149 | return dir;
150 | }
151 |
152 | float powerHeuristic(float a, float b) {
153 | float t = a * a;
154 | return t / (b * b + t);
155 | }
156 |
157 | float GTR1(float NdotH, float a) {
158 | if (a >= 1.0) return INV_PI;
159 | float a2 = a * a;
160 | float t = 1.0 + (a2 - 1.0) * NdotH * NdotH;
161 | return (a2 - 1.0) / (PI * log(a2) * t);
162 | }
163 |
164 | float GTR2(float NdotH, float a) {
165 | float a2 = a * a;
166 | float t = 1.0 + (a2 - 1.0) * NdotH * NdotH;
167 | return a2 / (PI * t * t);
168 | }
169 |
170 | float SmithGGX_G(float NdotV, float a) {
171 | float a2 = a * a;
172 | float b = NdotV * NdotV;
173 | return 1.0 / (NdotV + sqrt(a2 + b - a2 * b));
174 | }
175 |
176 | float SchlickFresnelReflectance(float u) {
177 | float m = clamp(1.0 - u, 0.0, 1.0);
178 | float m2 = m * m;
179 | return m2 * m2 * m;
180 | }
181 |
182 | #endif // UTILS_H
183 |
--------------------------------------------------------------------------------
/buffers/GeometryBuffer.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | calculateTangentsAndBitangents
3 | } from "../utils.mjs";
4 |
5 | export default class GeometryBuffer {
6 | constructor({ device, geometries } = _) {
7 | this.device = device || null;
8 | this.buffers = {
9 | face: null,
10 | attribute: null
11 | };
12 | this.init(geometries);
13 | }
14 | };
15 |
16 | GeometryBuffer.prototype.getFaceBuffer = function() {
17 | return this.buffers.face || null;
18 | };
19 |
20 | GeometryBuffer.prototype.getAttributeBuffer = function() {
21 | return this.buffers.attribute || null;
22 | };
23 |
24 | GeometryBuffer.prototype.init = function(geometries) {
25 | let {device} = this;
26 | let {buffers} = this;
27 |
28 | let faceBufferStride = 3;
29 | let attributeBufferStride = 16;
30 |
31 | // find total geometry buffer sizes
32 | let faceBufferTotalLength = 0;
33 | let attributeBufferTotalLength = 0;
34 | for (let geometry of geometries) {
35 | let {indices, vertices, normals, uvs} = geometry.data;
36 | faceBufferTotalLength += indices.length / 3 * faceBufferStride;
37 | attributeBufferTotalLength += indices.length * attributeBufferStride;
38 | };
39 |
40 | let faceBuffer = device.createBuffer({
41 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.STORAGE | GPUBufferUsage.RAY_TRACING,
42 | size: faceBufferTotalLength * 4
43 | });
44 | faceBuffer.byteLength = faceBufferTotalLength * 4;
45 | buffers.face = faceBuffer;
46 |
47 | let attributeBuffer = device.createBuffer({
48 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.STORAGE | GPUBufferUsage.RAY_TRACING,
49 | size: attributeBufferTotalLength * 4
50 | });
51 | attributeBuffer.byteLength = attributeBufferTotalLength * 4;
52 | buffers.attribute = attributeBuffer;
53 |
54 | let faceBufferData = new Uint32Array(faceBufferTotalLength);
55 | let attributeBufferData = new Float32Array(attributeBufferTotalLength);
56 | let faceBufferOffset = 0;
57 | let attributeBufferOffset = 0;
58 | for (let geometry of geometries) {
59 | let {indices, vertices, normals, uvs} = geometry.data;
60 |
61 | let {tangents, bitangents} = calculateTangentsAndBitangents(geometry.data);
62 |
63 | // copy each face into the continuous face buffer
64 | for (let ii = 0; ii < indices.length / 3; ++ii) {
65 | let index = ii * 3;
66 | let offset = faceBufferOffset + ii * faceBufferStride;
67 | faceBufferData[offset++] = index + 0;
68 | faceBufferData[offset++] = index + 1;
69 | faceBufferData[offset++] = index + 2;
70 | };
71 |
72 | // copy each attribute into the continuous attribute buffer
73 | for (let ii = 0; ii < indices.length; ++ii) {
74 | let index = indices[ii];
75 | let offset = attributeBufferOffset + ii * attributeBufferStride;
76 | attributeBufferData[offset++] = vertices[index * 3 + 0];
77 | attributeBufferData[offset++] = vertices[index * 3 + 1];
78 | attributeBufferData[offset++] = vertices[index * 3 + 2];
79 | attributeBufferData[offset++] = 0.0; // padding
80 | attributeBufferData[offset++] = normals[index * 3 + 0];
81 | attributeBufferData[offset++] = normals[index * 3 + 1];
82 | attributeBufferData[offset++] = normals[index * 3 + 2];
83 | attributeBufferData[offset++] = 0.0; // padding
84 | attributeBufferData[offset++] = tangents[index * 3 + 0];
85 | attributeBufferData[offset++] = tangents[index * 3 + 1];
86 | attributeBufferData[offset++] = tangents[index * 3 + 2];
87 | attributeBufferData[offset++] = 0.0; // padding
88 | attributeBufferData[offset++] = 0.0 + uvs[index * 2 + 0];
89 | attributeBufferData[offset++] = 1.0 - uvs[index * 2 + 1]; // flip vertical
90 | attributeBufferData[offset++] = 0.0; // padding
91 | attributeBufferData[offset++] = 0.0; // padding
92 | };
93 |
94 | // create acceleration container
95 | // we can already link the face and attribute buffers
96 | // even though their data didnt got uploaded yet
97 | let container = device.createRayTracingAccelerationContainer({
98 | level: "bottom",
99 | flags: GPURayTracingAccelerationContainerFlag.PREFER_FAST_TRACE,
100 | geometries: [
101 | {
102 | flags: GPURayTracingAccelerationGeometryFlag.OPAQUE,
103 | type: "triangles",
104 | index: {
105 | buffer: faceBuffer,
106 | format: "uint32",
107 | offset: faceBufferOffset * Uint32Array.BYTES_PER_ELEMENT,
108 | count: indices.length
109 | },
110 | vertex: {
111 | buffer: attributeBuffer,
112 | format: "float3",
113 | stride: attributeBufferStride * Float32Array.BYTES_PER_ELEMENT,
114 | offset: attributeBufferOffset * Float32Array.BYTES_PER_ELEMENT,
115 | count: vertices.length
116 | }
117 | }
118 | ]
119 | });
120 |
121 | geometry.setAccelerationContainer({
122 | instance: container,
123 | faceOffset: faceBufferOffset,
124 | faceCount: indices.length,
125 | attributeOffset: attributeBufferOffset / attributeBufferStride
126 | });
127 |
128 | faceBufferOffset += indices.length / 3 * faceBufferStride;
129 | attributeBufferOffset += indices.length * attributeBufferStride;
130 | };
131 |
132 | // upload
133 | faceBuffer.setSubData(0, faceBufferData);
134 | attributeBuffer.setSubData(0, attributeBufferData);
135 |
136 | // build bottom-level containers
137 | let commandEncoder = device.createCommandEncoder({});
138 | for (let geometry of geometries) {
139 | let {accelerationContainer} = geometry;
140 | commandEncoder.buildRayTracingAccelerationContainer(accelerationContainer.instance);
141 | };
142 | device.getQueue().submit([ commandEncoder.finish() ]);
143 | };
144 |
--------------------------------------------------------------------------------
/Camera.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | fixateToZero
3 | } from "./utils.mjs";
4 |
5 | export default class Camera {
6 | constructor({ device } = _) {
7 | this.device = device || null;
8 | this.buffer = null;
9 | this.hasMoved = false;
10 | this.deltaMovement = { x: 0, y: 0 };
11 | this.viewMatrix = mat4.create();
12 | this.viewInverseMatrix = mat4.create();
13 | this.projectionMatrix = mat4.create();
14 | this.projectionInverseMatrix = mat4.create();
15 | // previous projections
16 | this.previousViewInverseMatrix = mat4.create();
17 | this.previousProjectionInverseMatrix = mat4.create();
18 | this.viewProjectionMatrix = mat4.create();
19 | this.transforms = {
20 | translation: vec3.create(),
21 | rotation: vec3.create(),
22 | forward: vec3.create(),
23 | up: vec3.create()
24 | };
25 | this.transforms.translation = vec3.fromValues(
26 | 96, 68, 96
27 | );
28 | this.settings = {
29 | sampleCount: 1,
30 | totalSampleCount: 0,
31 | aperture: 0.125,
32 | focusDistance: 32.0,
33 | fieldOfView: Math.tan(70 * Math.PI / 360),
34 | zNear: 0.01,
35 | zFar: 8192.0
36 | };
37 | this.init();
38 | }
39 | };
40 |
41 | Camera.prototype.getBuffer = function() {
42 | return this.buffer || null;
43 | };
44 |
45 | Camera.prototype.init = function() {
46 | let {device} = this;
47 | let cameraBufferByteLength = 90 * Float32Array.BYTES_PER_ELEMENT;
48 | let cameraBuffer = device.createBuffer({
49 | size: cameraBufferByteLength,
50 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM
51 | });
52 | cameraBuffer.byteLength = cameraBufferByteLength;
53 | this.buffer = cameraBuffer;
54 | this.resetAccumulation();
55 | };
56 |
57 | Camera.prototype.resetAccumulation = function() {
58 | let {settings} = this;
59 | settings.totalSampleCount = settings.sampleCount;
60 | };
61 |
62 | Camera.prototype.increaseAccumulation = function() {
63 | let {settings} = this;
64 | settings.totalSampleCount += settings.sampleCount;
65 | };
66 |
67 | Camera.prototype.update = function(delta) {
68 | let {settings, buffer, deltaMovement} = this;
69 |
70 | let mView = this.viewMatrix;
71 | let mViewInverse = this.viewInverseMatrix;
72 | let mProjection = this.projectionMatrix;
73 | let mProjectionInverse = this.projectionInverseMatrix;
74 | let mViewProjection = this.viewProjectionMatrix;
75 |
76 | let mPreviousViewInverse = this.previousViewInverseMatrix;
77 | let mPreviousProjectionInverse = this.previousProjectionInverseMatrix;
78 |
79 | let {translation, rotation, forward, up} = this.transforms;
80 |
81 | this.control(
82 | [
83 | isKeyPressed("W") | 0,
84 | isKeyPressed("S") | 0,
85 | isKeyPressed("A") | 0,
86 | isKeyPressed("D") | 0,
87 | isKeyPressed(" ") | 0,
88 | isKeyPressed("C") | 0,
89 | isKeyPressed("Shift") | 0
90 | ],
91 | delta
92 | );
93 |
94 | this.hasMoved = (
95 | !mat4.exactEquals(mViewInverse, mPreviousViewInverse) ||
96 | !mat4.exactEquals(mProjectionInverse, mPreviousProjectionInverse)
97 | );
98 |
99 | // projection matrix
100 | {
101 | let aspect = window.width / window.height;
102 | mat4.identity(mProjection);
103 | mat4.perspective(
104 | mProjection,
105 | settings.fieldOfView,
106 | aspect,
107 | settings.zNear,
108 | settings.zFar
109 | );
110 | }
111 | // projection-inverse matrix
112 | {
113 | mat4.copy(mPreviousProjectionInverse, mProjectionInverse);
114 | mat4.invert(mProjectionInverse, mProjection);
115 | }
116 | // view matrix
117 | {
118 | mat4.identity(mView);
119 | mat4.rotateX(mView, mView, rotation[0]);
120 | mat4.rotateY(mView, mView, rotation[1]);
121 | mat4.rotateZ(mView, mView, rotation[2]);
122 | mat4.translate(mView, mView, vec3.negate(vec3.create(), translation));
123 | }
124 | // view-inverse matrix
125 | {
126 | mat4.copy(mPreviousViewInverse, mViewInverse);
127 | mat4.invert(mViewInverse, mView);
128 | }
129 | // up, forward vector
130 | {
131 | vec3.set(up, mView[0], mView[4], mView[8]);
132 | vec3.set(forward, mView[2], mView[6], mView[10]);
133 | vec3.normalize(up, up);
134 | vec3.normalize(forward, forward);
135 | }
136 | // view-projection matrix
137 | {
138 | mat4.multiply(mViewProjection, mProjection, mView);
139 | }
140 |
141 | deltaMovement.x = fixateToZero(deltaMovement.x * 0.125, 0.01);
142 | deltaMovement.y = fixateToZero(deltaMovement.y * 0.125, 0.01);
143 |
144 | let dataBuf = new ArrayBuffer(buffer.byteLength);
145 | let dataF32 = new Float32Array(dataBuf);
146 | let dataU32 = new Uint32Array(dataBuf);
147 |
148 | let offset = 0;
149 | dataF32.set(forward, offset); offset += 4;
150 | dataF32.set(mViewInverse, offset); offset += 16;
151 | dataF32.set(mProjectionInverse, offset); offset += 16;
152 | dataF32.set(mViewProjection, offset); offset += 16;
153 | dataF32.set(mPreviousViewInverse, offset); offset += 16;
154 | dataF32.set(mPreviousProjectionInverse, offset); offset += 16;
155 | dataF32.set(new Float32Array([settings.aperture]), offset); offset += 1;
156 | dataF32.set(new Float32Array([settings.focusDistance]), offset); offset += 1;
157 | dataF32.set(new Float32Array([settings.zNear]), offset); offset += 1;
158 | dataF32.set(new Float32Array([settings.zFar]), offset); offset += 1;
159 |
160 | buffer.setSubData(0, dataF32);
161 | };
162 |
163 | Camera.prototype.control = function(move, delta) {
164 | let {deltaMovement} = this;
165 | let dir = vec3.create();
166 | let speed = 64.0 * delta;
167 | if (move[6]) speed *= 2.75;
168 | if (move[0]) dir[2] += speed;
169 | else if (move[1]) dir[2] -= speed;
170 | if (move[2]) dir[0] += speed * 1.0;
171 | else if (move[3]) dir[0] -= speed * 1.0;
172 | if (move[4]) dir[1] -= speed;
173 | else if (move[5]) dir[1] += speed;
174 | this.move(dir, delta);
175 | this.look([deltaMovement.x, deltaMovement.y], delta);
176 | };
177 |
178 | Camera.prototype.move = function(direction, delta) {
179 | let {rotation, translation} = this.transforms;
180 | let dir = vec3.clone(direction);
181 | let rotX = vec3.fromValues(1.0, 0.0, 0.0);
182 | let rotY = vec3.fromValues(0.0, 1.0, 0.0);
183 | vec3.rotateX(dir, dir, rotX, -rotation[0]);
184 | vec3.rotateY(dir, dir, rotY, -rotation[1]);
185 | vec3.add(translation, translation, vec3.negate(vec3.create(), dir));
186 | };
187 |
188 | Camera.prototype.look = function(direction, delta) {
189 | let {rotation} = this.transforms;
190 | rotation[0] -= direction[1] * delta;
191 | rotation[1] -= direction[0] * delta;
192 | if (rotation[0] < -Math.PI * 0.5) rotation[0] = -Math.PI * 0.5;
193 | if (rotation[0] > Math.PI * 0.5) rotation[0] = Math.PI * 0.5;
194 | };
195 |
--------------------------------------------------------------------------------
/passes/RayTracingPass.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | loadShaderFile
3 | } from "../utils.mjs"
4 |
5 | import GeometryBuffer from "../buffers/GeometryBuffer.mjs";
6 | import InstanceBuffer from "../buffers/InstanceBuffer.mjs";
7 | import TextureArrayBuffer from "../buffers/TextureArrayBuffer.mjs";
8 |
9 | export default class RayTracingPass {
10 | constructor({ device, scene } = _) {
11 | this.device = device || null;
12 | this.pipeline = null;
13 | this.bindGroup = null;
14 | this.pixelBuffer = null;
15 | this.commandBuffer = null;
16 | this.init(scene);
17 | }
18 | };
19 |
20 | RayTracingPass.prototype.getPipeline = function() {
21 | return this.pipeline || null;
22 | };
23 |
24 | RayTracingPass.prototype.getBindGroup = function() {
25 | return this.bindGroup || null;
26 | };
27 |
28 | RayTracingPass.prototype.getPixelBuffer = function() {
29 | return this.pixelBuffer || null;
30 | };
31 |
32 | RayTracingPass.prototype.getInstanceBuffer = function() {
33 | return this.instanceBuffer || null;
34 | };
35 |
36 | RayTracingPass.prototype.getCommandBuffer = function() {
37 | let {device} = this;
38 | let {pipeline, bindGroup} = this;
39 | // recorde one time, then reuse
40 | if (this.commandBuffer === null) {
41 | let commandEncoder = device.createCommandEncoder({});
42 | let passEncoder = commandEncoder.beginRayTracingPass({});
43 | passEncoder.setPipeline(pipeline);
44 | passEncoder.setBindGroup(0, bindGroup);
45 | passEncoder.traceRays(
46 | 0, 1, 3,
47 | window.width, window.height, 1
48 | );
49 | passEncoder.endPass();
50 | this.commandBuffer = commandEncoder.finish();
51 | }
52 | return this.commandBuffer;
53 | };
54 |
55 | RayTracingPass.prototype.init = function(scene) {
56 | let {device} = this;
57 |
58 | let {textures, materials, geometries} = scene.objects;
59 |
60 | let instances = scene.getInstancesFlattened();
61 | let lights = scene.getLightsFlattened();
62 |
63 | let geometryBuffer = new GeometryBuffer({ device, geometries });
64 |
65 | let faceBuffer = geometryBuffer.getFaceBuffer();
66 | let attributeBuffer = geometryBuffer.getAttributeBuffer();
67 |
68 | let instanceBuffer = new InstanceBuffer({ device, instances, geometries, materials, textures, lights });
69 |
70 | let materialBuffer = instanceBuffer.getMaterialBuffer();
71 | let instancesBuffer = instanceBuffer.getInstanceBuffer();
72 | let lightsBuffer = instanceBuffer.getLightBuffer();
73 |
74 | let instanceContainer = instanceBuffer.getAccelerationContainer();
75 |
76 | let textureArray = new TextureArrayBuffer({ device, textures });
77 | let textureView = textureArray.getTextureView();
78 | let textureSampler = textureArray.getTextureSampler();
79 |
80 | let pixelBufferByteLength = window.width * window.height * 4 * Float32Array.BYTES_PER_ELEMENT;
81 | let pixelBuffer = device.createBuffer({ usage: GPUBufferUsage.STORAGE, size: pixelBufferByteLength });
82 | pixelBuffer.byteLength = pixelBufferByteLength;
83 |
84 | let accumulationBufferByteLength = window.width * window.height * 4 * Float32Array.BYTES_PER_ELEMENT;
85 | let accumulationBuffer = device.createBuffer({ usage: GPUBufferUsage.STORAGE, size: accumulationBufferByteLength });
86 | accumulationBuffer.byteLength = accumulationBufferByteLength;
87 |
88 | let rayGenShaderModule = device.createShaderModule({ code: loadShaderFile(`shaders/shading/ray-generation.rgen`) });
89 | let rayCHitModule = device.createShaderModule({ code: loadShaderFile(`shaders/shading/ray-closest-hit.rchit`) });
90 | let rayMissShaderModule = device.createShaderModule({ code: loadShaderFile(`shaders/shading/ray-miss.rmiss`) });
91 | let rayShadowCHitShaderModule = device.createShaderModule({ code: loadShaderFile(`shaders/shading/shadow-ray-closest-hit.rchit`) });
92 | let rayShadowMissShaderModule = device.createShaderModule({ code: loadShaderFile(`shaders/shading/shadow-ray-miss.rmiss`) });
93 |
94 | let shaderBindingTable = device.createRayTracingShaderBindingTable({
95 | stages: [
96 | { module: rayGenShaderModule, stage: GPUShaderStage.RAY_GENERATION },
97 | { module: rayCHitModule, stage: GPUShaderStage.RAY_CLOSEST_HIT },
98 | { module: rayShadowCHitShaderModule, stage: GPUShaderStage.RAY_CLOSEST_HIT },
99 | { module: rayMissShaderModule, stage: GPUShaderStage.RAY_MISS },
100 | { module: rayShadowMissShaderModule, stage: GPUShaderStage.RAY_MISS },
101 | ],
102 | groups: [
103 | { type: "general", generalIndex: 0 },
104 | { type: "triangles-hit-group", closestHitIndex: 1 },
105 | { type: "triangles-hit-group", closestHitIndex: 2 },
106 | { type: "general", generalIndex: 3 },
107 | { type: "general", generalIndex: 4 },
108 | ]
109 | });
110 |
111 | let bindGroupLayout = device.createBindGroupLayout({
112 | entries: [
113 | { binding: 0, type: "acceleration-container", visibility: GPUShaderStage.RAY_GENERATION | GPUShaderStage.RAY_CLOSEST_HIT },
114 | { binding: 1, type: "storage-buffer", visibility: GPUShaderStage.RAY_GENERATION },
115 | { binding: 2, type: "storage-buffer", visibility: GPUShaderStage.RAY_GENERATION },
116 | { binding: 3, type: "uniform-buffer", visibility: GPUShaderStage.RAY_GENERATION | GPUShaderStage.RAY_CLOSEST_HIT },
117 | { binding: 4, type: "uniform-buffer", visibility: GPUShaderStage.RAY_GENERATION | GPUShaderStage.RAY_CLOSEST_HIT },
118 | { binding: 5, type: "storage-buffer", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
119 | { binding: 6, type: "storage-buffer", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
120 | { binding: 7, type: "storage-buffer", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
121 | { binding: 8, type: "storage-buffer", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
122 | { binding: 9, type: "storage-buffer", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
123 | { binding: 10, type: "sampler", visibility: GPUShaderStage.RAY_CLOSEST_HIT },
124 | { binding: 11, type: "sampled-texture", visibility: GPUShaderStage.RAY_CLOSEST_HIT, viewDimension: "2d-array" },
125 | ]
126 | });
127 |
128 | let bindGroup = device.createBindGroup({
129 | layout: bindGroupLayout,
130 | entries: [
131 | { binding: 0, size: 0, accelerationContainer: instanceContainer },
132 | { binding: 1, size: pixelBuffer.byteLength, buffer: pixelBuffer },
133 | { binding: 2, size: accumulationBuffer.byteLength, buffer: accumulationBuffer },
134 | { binding: 3, size: camera.getBuffer().byteLength, buffer: camera.getBuffer() },
135 | { binding: 4, size: settings.getBuffer().byteLength, buffer: settings.getBuffer() },
136 | { binding: 5, size: attributeBuffer.byteLength, buffer: attributeBuffer },
137 | { binding: 6, size: faceBuffer.byteLength, buffer: faceBuffer },
138 | { binding: 7, size: instancesBuffer.byteLength, buffer: instancesBuffer },
139 | { binding: 8, size: materialBuffer.byteLength, buffer: materialBuffer },
140 | { binding: 9, size: lightsBuffer.byteLength, buffer: lightsBuffer },
141 | { binding: 10, size: 0, sampler: textureSampler },
142 | { binding: 11, size: 0, textureView: textureView },
143 | ]
144 | });
145 |
146 | let pipeline = device.createRayTracingPipeline({
147 | layout: device.createPipelineLayout({
148 | bindGroupLayouts: [bindGroupLayout]
149 | }),
150 | rayTracingState: {
151 | shaderBindingTable,
152 | maxRecursionDepth: 2
153 | }
154 | });
155 |
156 | this.pipeline = pipeline;
157 | this.bindGroup = bindGroup;
158 | this.pixelBuffer = pixelBuffer;
159 | this.instanceBuffer = instanceBuffer;
160 | };
161 |
--------------------------------------------------------------------------------
/shaders/shading/ray-closest-hit.rchit:
--------------------------------------------------------------------------------
1 | #version 460
2 | #extension GL_EXT_ray_tracing : require
3 | #extension GL_GOOGLE_include_directive : enable
4 | #extension GL_EXT_nonuniform_qualifier : enable
5 | #pragma shader_stage(closest)
6 |
7 | #include "utils.glsl"
8 |
9 | ShadingData shading;
10 |
11 | #include "disney.glsl"
12 |
13 | hitAttributeEXT vec3 Hit;
14 |
15 | layout (location = 0) rayPayloadInEXT RayPayload Ray;
16 | layout (location = 1) rayPayloadEXT ShadowRayPayload ShadowRay;
17 |
18 | layout (binding = 0, set = 0) uniform accelerationStructureEXT topLevelAS;
19 |
20 | layout (binding = 3) uniform CameraBuffer {
21 | vec4 forward;
22 | mat4 viewInverse;
23 | mat4 projectionInverse;
24 | mat4 viewProjection;
25 | mat4 previousViewInverse;
26 | mat4 previousProjectionInverse;
27 | float aperture;
28 | float focusDistance;
29 | float zNear;
30 | float zFar;
31 | } Camera;
32 |
33 | layout (binding = 4) uniform SettingsBuffer {
34 | uint sampleCount;
35 | uint totalSampleCount;
36 | uint lightCount;
37 | uint screenWidth;
38 | uint screenHeight;
39 | uint pad_0;
40 | uint pad_1;
41 | uint pad_2;
42 | } Settings;
43 |
44 | layout (binding = 5, std430) readonly buffer AttributeBuffer {
45 | Vertex Vertices[];
46 | };
47 |
48 | layout (binding = 6, std430) readonly buffer FaceBuffer {
49 | uint Faces[];
50 | };
51 |
52 | layout (binding = 7, std140, row_major) readonly buffer InstanceBuffer {
53 | Instance Instances[];
54 | };
55 |
56 | layout (binding = 8, std430) readonly buffer MaterialBuffer {
57 | Material Materials[];
58 | };
59 |
60 | layout (binding = 9, std430) readonly buffer LightBuffer {
61 | Light Lights[];
62 | };
63 |
64 | layout (binding = 10) uniform sampler TextureSampler;
65 | layout (binding = 11) uniform texture2DArray TextureArray;
66 |
67 | LightSource PickRandomLightSource(inout uint seed, in vec3 surfacePos, out vec3 lightDirection, out float lightDistance) {
68 | const uint lightIndex = 1 + uint(Randf01(seed) * Settings.lightCount);
69 | const uint geometryInstanceId = Lights[nonuniformEXT(lightIndex)].instanceIndex;
70 | const Instance instance = Instances[nonuniformEXT(geometryInstanceId)];
71 |
72 | const uint faceIndex = instance.faceIndex + uint(Randf01(seed) * instance.faceCount);
73 |
74 | const vec2 attribs = SampleTriangle(vec2(Randf01(seed), Randf01(seed)));
75 |
76 | const Vertex v0 = Vertices[instance.vertexIndex + Faces[faceIndex + 0]];
77 | const Vertex v1 = Vertices[instance.vertexIndex + Faces[faceIndex + 1]];
78 | const Vertex v2 = Vertices[instance.vertexIndex + Faces[faceIndex + 2]];
79 |
80 | const vec3 p0 = (instance.transformMatrix * vec4(v0.position.xyz, 1.0)).xyz;
81 | const vec3 p1 = (instance.transformMatrix * vec4(v1.position.xyz, 1.0)).xyz;
82 | const vec3 p2 = (instance.transformMatrix * vec4(v2.position.xyz, 1.0)).xyz;
83 | const vec3 pw = blerp(attribs, p0, p1, p2);
84 |
85 | const vec3 n0 = v0.normal.xyz;
86 | const vec3 n1 = v1.normal.xyz;
87 | const vec3 n2 = v2.normal.xyz;
88 | const vec3 nw = normalize(mat3x3(instance.normalMatrix) * blerp(attribs, n0.xyz, n1.xyz, n2.xyz));
89 |
90 | const float triangleArea = 0.5 * length(cross(p1 - p0, p2 - p0));
91 |
92 | const vec3 lightSurfacePos = pw;
93 | const vec3 lightEmission = Materials[instance.materialIndex].color.rgb;
94 | const vec3 lightNormal = normalize(lightSurfacePos - surfacePos);
95 |
96 | const vec3 lightPos = lightSurfacePos - surfacePos;
97 | const float lightDist = length(lightPos);
98 | const float lightDistSq = lightDist * lightDist;
99 | const vec3 lightDir = lightPos / lightDist;
100 |
101 | const float lightPdf = lightDistSq / (triangleArea * abs(dot(lightNormal, lightDir)));
102 |
103 | const vec4 emissionAndGeometryId = vec4(
104 | lightEmission, geometryInstanceId
105 | );
106 | const vec4 directionAndPdf = vec4(
107 | lightDir, lightPdf
108 | );
109 |
110 | // backface
111 | /*float cosTheta = dot(nw, normalize(lightPos));
112 | if (max(cosTheta, 0.0) >= EPSILON) {
113 | return LightSource(
114 | emissionAndGeometryId,
115 | vec4(lightDir, 0)
116 | );
117 | }*/
118 |
119 | lightDirection = lightDir;
120 | lightDistance = lightDist;
121 |
122 | return LightSource(
123 | emissionAndGeometryId,
124 | directionAndPdf
125 | );
126 | }
127 |
128 | vec3 DirectLight(const uint instanceId, in vec3 normal) {
129 | vec3 Lo = vec3(0.0);
130 |
131 | const LightSource lightSource = Ray.lightSource;
132 |
133 | const vec4 directionAndPdf = lightSource.directionAndPdf;
134 | const vec4 emissionAndGeometryId = lightSource.emissionAndGeometryId;
135 |
136 | const vec3 lightEmission = emissionAndGeometryId.xyz;
137 | const uint lightGeometryInstanceId = uint(emissionAndGeometryId.w);
138 |
139 | // if we hit a light source, then just returns its emission directly
140 | if (instanceId == lightGeometryInstanceId) return lightEmission;
141 |
142 | // abort if we are occluded
143 | if (Ray.shadowed) return Lo;
144 |
145 | const vec3 lightDir = directionAndPdf.xyz;
146 | const float lightPdf = directionAndPdf.w;
147 | const vec3 powerPdf = lightEmission * Settings.lightCount;
148 |
149 | const vec3 N = normal;
150 | const vec3 V = -gl_WorldRayDirectionEXT;
151 | const vec3 L = lightDir;
152 | const vec3 H = normalize(V + L);
153 |
154 | const float NdotH = max(0.0, dot(N, H));
155 | const float NdotL = max(0.0, dot(L, N));
156 | const float HdotL = max(0.0, dot(H, L));
157 | const float NdotV = max(0.0, dot(N, V));
158 |
159 | const float bsdfPdf = DisneyPdf(NdotH, NdotL, HdotL);
160 |
161 | const vec3 f = DisneyEval(NdotL, NdotV, NdotH, HdotL);
162 |
163 | Lo += powerHeuristic(lightPdf, bsdfPdf) * f * powerPdf / max(0.001, lightPdf);
164 |
165 | return max(vec3(0), Lo);
166 | }
167 |
168 | void main() {
169 | const vec3 surfacePosition = gl_WorldRayOriginEXT + gl_WorldRayDirectionEXT * gl_RayTmaxEXT;
170 | const uint instanceId = gl_InstanceCustomIndexEXT;
171 |
172 | const Instance instance = Instances[nonuniformEXT(instanceId)];
173 |
174 | const Vertex v0 = Vertices[instance.vertexIndex + Faces[instance.faceIndex + gl_PrimitiveID * 3 + 0]];
175 | const Vertex v1 = Vertices[instance.vertexIndex + Faces[instance.faceIndex + gl_PrimitiveID * 3 + 1]];
176 | const Vertex v2 = Vertices[instance.vertexIndex + Faces[instance.faceIndex + gl_PrimitiveID * 3 + 2]];
177 |
178 | const vec2 u0 = v0.uv.xy, u1 = v1.uv.xy, u2 = v2.uv.xy;
179 | const vec3 n0 = v0.normal.xyz, n1 = v1.normal.xyz, n2 = v2.normal.xyz;
180 | const vec3 t0 = v0.tangent.xyz, t1 = v1.tangent.xyz, t2 = v2.tangent.xyz;
181 |
182 | const Material material = Materials[instance.materialIndex];
183 |
184 | const vec2 uv = blerp(Hit.xy, u0.xy, u1.xy, u2.xy) * material.textureScaling;
185 | const vec3 no = blerp(Hit.xy, n0.xyz, n1.xyz, n2.xyz);
186 | const vec3 ta = blerp(Hit.xy, t0.xyz, t1.xyz, t2.xyz);
187 |
188 | const vec3 nw = normalize(mat3x3(instance.normalMatrix) * no);
189 | const vec3 tw = normalize(mat3x3(instance.normalMatrix) * ta);
190 | const vec3 bw = cross(nw, tw);
191 |
192 | const vec3 tex0 = texture(sampler2DArray(TextureArray, TextureSampler), vec3(uv, material.albedoIndex)).rgb;
193 | const vec3 tex1 = texture(sampler2DArray(TextureArray, TextureSampler), vec3(uv, material.normalIndex)).rgb;
194 | const vec3 tex2 = texture(sampler2DArray(TextureArray, TextureSampler), vec3(uv, material.metalRoughnessIndex)).rgb;
195 | const vec3 tex3 = texture(sampler2DArray(TextureArray, TextureSampler), vec3(uv, material.emissionIndex)).rgb;
196 |
197 | // material color
198 | vec3 color = tex0 + material.color.rgb;
199 | // material normal
200 | const vec3 normal = normalize(
201 | material.normalIndex > 0 ?
202 | mat3(tw, bw, nw) * normalize((pow(tex1, vec3(INV_GAMMA))) * 2.0 - 1.0).xyz :
203 | nw
204 | );
205 | // material metalness/roughness
206 | const vec2 metalRoughness = pow(vec2(tex2.r, tex2.g), vec2(INV_GAMMA));
207 | // material emission
208 | const vec3 emission = pow(tex3, vec3(GAMMA)) * material.emissionIntensity;
209 |
210 | uint seed = Ray.seed;
211 | float t = gl_HitTEXT;
212 |
213 | vec3 radiance = vec3(0);
214 | vec3 throughput = Ray.throughput.rgb;
215 |
216 | radiance += emission * throughput;
217 |
218 | shading.base_color = color;
219 | shading.metallic = clamp(metalRoughness.r + material.metalness, 0.001, 0.999) * material.metalnessIntensity;
220 | shading.specular = material.specular;
221 | shading.roughness = clamp(metalRoughness.g + material.roughness, 0.001, 0.999) * material.roughnessIntensity;
222 | {
223 | const vec3 cd_lin = shading.base_color;
224 | const float cd_lum = dot(cd_lin, vec3(0.3, 0.6, 0.1));
225 | const vec3 c_spec0 = mix(shading.specular * vec3(0.3), cd_lin, shading.metallic);
226 | const float cs_lum = dot(c_spec0, vec3(0.3, 0.6, 0.1));
227 | const float cs_w = cs_lum / (cs_lum + (1.0 - shading.metallic) * cd_lum);
228 | shading.csw = cs_w;
229 | }
230 |
231 | // pick a random light source
232 | // also returns a direction which we will shoot our shadow ray to
233 | vec3 lightDirection = vec3(0);
234 | float lightDistance = 0.0;
235 | LightSource lightSource = PickRandomLightSource(Ray.seed, surfacePosition, lightDirection, lightDistance);
236 | Ray.lightSource = lightSource;
237 |
238 | // shoot the shadow ray
239 | traceRayEXT(topLevelAS, gl_RayFlagsTerminateOnFirstHitEXT, 0xFF, 1, 0, 1, surfacePosition, EPSILON, lightDirection, lightDistance - EPSILON, 1);
240 | Ray.shadowed = ShadowRay.shadowed;
241 |
242 | radiance += DirectLight(instanceId, normal) * throughput;
243 |
244 | const vec3 bsdfDir = DisneySample(seed, -gl_WorldRayDirectionEXT, normal);
245 |
246 | const vec3 N = normal;
247 | const vec3 V = -gl_WorldRayDirectionEXT;
248 | const vec3 L = bsdfDir;
249 | const vec3 H = normalize(V + L);
250 |
251 | const float NdotH = abs(dot(N, H));
252 | const float NdotL = abs(dot(L, N));
253 | const float HdotL = abs(dot(H, L));
254 | const float NdotV = abs(dot(N, V));
255 |
256 | const float pdf = DisneyPdf(NdotH, NdotL, HdotL);
257 | if (pdf > 0.0) {
258 | throughput *= DisneyEval(NdotL, NdotV, NdotH, HdotL) / pdf;
259 | } else {
260 | t = -1.0;
261 | }
262 |
263 | Ray.radianceAndDistance = vec4(radiance, t);
264 | Ray.scatterDirection = vec4(bsdfDir, t);
265 | Ray.throughput = vec4(throughput, 1);
266 | Ray.seed = seed;
267 | }
268 |
--------------------------------------------------------------------------------
/utils.mjs:
--------------------------------------------------------------------------------
1 | import fs from "fs";
2 | import path from "path";
3 | import tolw from "tolw";
4 | import lodepng from "@cwasm/lodepng";
5 | import jpegturbo from "@cwasm/jpeg-turbo";
6 |
7 | export function fixateToZero(value, range) {
8 | if (value > 0 && value <= range) return 0.0;
9 | if (value < 0 && value >= -range) return 0.0;
10 | return value;
11 | };
12 |
13 | export function clamp(value, min, max) {
14 | return Math.max(Math.min(max, value), min);
15 | };
16 |
17 | export function keyCodeToChar(code) {
18 | return String.fromCharCode((96 <= code && code <= 105) ? code - 48 : code);
19 | };
20 |
21 | export function readBinaryFile(path) {
22 | let buffer = fs.readFileSync(path);
23 | let {byteOffset, byteLength} = buffer;
24 | return new Uint8Array(buffer.buffer).subarray(byteOffset, byteOffset + byteLength);
25 | };
26 |
27 | export function readObjectFile(path) {
28 | let buf = fs.readFileSync(path);
29 | return tolw.loadObj(buf);
30 | };
31 |
32 | export function readImageFile(path) {
33 | let buf = fs.readFileSync(path);
34 | if (isPNGFile(buf)) return readPNGFile(buf);
35 | if (isJPEGFile(buf)) return readJPEGFile(buf);
36 | throw new Error(`Cannot process image file '${path}'`);
37 | };
38 |
39 | export function readPNGFile(buf) {
40 | return lodepng.decode(buf);
41 | };
42 |
43 | export function readJPEGFile(buf) {
44 | return jpegturbo.decode(buf);
45 | };
46 |
47 | export function isPNGFile(buffer) {
48 | let viewU8 = new Uint8Array(buffer);
49 | let offset = 0x0;
50 | return (
51 | viewU8[offset++] === 0x89 &&
52 | viewU8[offset++] === 0x50 &&
53 | viewU8[offset++] === 0x4E &&
54 | viewU8[offset++] === 0x47
55 | );
56 | };
57 |
58 | export function isJPEGFile(buffer) {
59 | let viewU8 = new Uint8Array(buffer);
60 | let offset = 0x0;
61 | return (
62 | viewU8[offset++] === 0xFF &&
63 | viewU8[offset++] === 0xD8 &&
64 | viewU8[offset++] === 0xFF &&
65 | viewU8[offset++] === 0xE0
66 | );
67 | };
68 |
69 | let mModel = null;
70 | let mNormal = null;
71 | let mTransform = null;
72 | export function getTransformMatrix(transform) {
73 | let {scale, rotation, translation} = transform;
74 |
75 | if (mModel === null) mModel = mat4.create();
76 | if (mNormal === null) mNormal = mat4.create();
77 | if (mTransform === null) mTransform = mat4.create().subarray(0, 12);
78 |
79 | mat4.identity(mModel);
80 | mat4.identity(mTransform);
81 |
82 | // translation
83 | mat4.translate(mModel, mModel, vec3.fromValues(translation.x, translation.y, translation.z));
84 | // rotation
85 | mat4.rotateX(mModel, mModel, rotation.x * (Math.PI / 180));
86 | mat4.rotateY(mModel, mModel, rotation.y * (Math.PI / 180));
87 | mat4.rotateZ(mModel, mModel, rotation.z * (Math.PI / 180));
88 | // scaling
89 | mat4.scale(mModel, mModel, vec3.fromValues(scale.x, scale.y, scale.z));
90 |
91 | // build normal matrix
92 | mat4.identity(mNormal);
93 | mat4.invert(mNormal, mModel);
94 | mat4.transpose(mNormal, mNormal);
95 |
96 | // build transform matrix
97 | mTransform.set(mModel.subarray(0x0, 12), 0x0);
98 | mTransform[3] = mModel[12];
99 | mTransform[7] = mModel[13];
100 | mTransform[11] = mModel[14];
101 |
102 | return {
103 | transform: mTransform,
104 | normal: mNormal
105 | };
106 | };
107 |
108 | function findIncludedFile(filePath, includes) {
109 | let matches = [];
110 | for (let ii = 0; ii < includes.length; ++ii) {
111 | let incl = includes[ii];
112 | let stats = fs.lstatSync(incl);
113 | if (!stats.isDirectory()) {
114 | throw new SyntaxError(`Include path '${incl}' is not a directory`);
115 | }
116 | let includeFilePath = path.join(incl, filePath);
117 | if (fs.existsSync(includeFilePath) && fs.lstatSync(includeFilePath).isFile()) {
118 | try {
119 | matches.push(fs.readFileSync(includeFilePath, "utf-8"));
120 | } catch (e) {
121 | throw new ReferenceError(`Cannot read included file from '${includeFilePath}'`);
122 | }
123 | } else {
124 | throw new ReferenceError(`Failed to resolve file include path for '${filePath}': '${includeFilePath}' is not a valid file path`);
125 | }
126 | };
127 | if (matches.length <= 0) {
128 | throw new ReferenceError(`Cannot inline included file '${filePath}'`);
129 | }
130 | if (matches.length > 1) {
131 | throw new ReferenceError(`Ambigious include directive for '${filePath}'. More than one match was found`);
132 | }
133 | return matches[0];
134 | };
135 |
136 | function flattenShaderIncludes(source, includeDirectories) {
137 | let rx = /#include ((<[^>]+>)|("[^"]+"))/g;
138 | let match = null;
139 | while (match = rx.exec(source)) {
140 | let filePath = match[1].slice(1, -1);
141 | let start = match.index;
142 | let length = match[0].length;
143 | let includedFile = flattenShaderIncludes(
144 | findIncludedFile(filePath, includeDirectories),
145 | includeDirectories
146 | );
147 | source = source.substr(0, start) + includedFile + source.substr(start + length);
148 | };
149 | return source;
150 | };
151 |
152 | export function loadShaderFile(srcPath) {
153 | let src = fs.readFileSync(srcPath, "utf-8");
154 | let flattened = flattenShaderIncludes(src, [path.dirname(srcPath)]);
155 | return flattened;
156 | };
157 |
158 | export function calculateTangentsAndBitangents(object) {
159 | let {vertices, normals, uvs, indices} = object;
160 |
161 | let tangents = new Float32Array(vertices.length);
162 | let bitangents = new Float32Array(vertices.length);
163 | for (let ii = 0; ii < indices.length; ii += 3) {
164 | let i0 = indices[ii + 0];
165 | let i1 = indices[ii + 1];
166 | let i2 = indices[ii + 2];
167 |
168 | let xv0 = vertices[i0 * 3 + 0];
169 | let yv0 = vertices[i0 * 3 + 1];
170 | let zv0 = vertices[i0 * 3 + 2];
171 |
172 | let xuv0 = uvs[i0 * 2 + 0];
173 | let yuv0 = uvs[i0 * 2 + 1];
174 |
175 | let xv1 = vertices[i1 * 3 + 0];
176 | let yv1 = vertices[i1 * 3 + 1];
177 | let zv1 = vertices[i1 * 3 + 2];
178 |
179 | let xuv1 = uvs[i1 * 2 + 0];
180 | let yuv1 = uvs[i1 * 2 + 1];
181 |
182 | let xv2 = vertices[i2 * 3 + 0];
183 | let yv2 = vertices[i2 * 3 + 1];
184 | let zv2 = vertices[i2 * 3 + 2];
185 |
186 | let xuv2 = uvs[i2 * 2 + 0];
187 | let yuv2 = uvs[i2 * 2 + 1];
188 |
189 | let deltaPosX1 = xv1 - xv0;
190 | let deltaPosY1 = yv1 - yv0;
191 | let deltaPosZ1 = zv1 - zv0;
192 |
193 | let deltaPosX2 = xv2 - xv0;
194 | let deltaPosY2 = yv2 - yv0;
195 | let deltaPosZ2 = zv2 - zv0;
196 |
197 | let uvDeltaPosX1 = xuv1 - xuv0;
198 | let uvDeltaPosY1 = yuv1 - yuv0;
199 |
200 | let uvDeltaPosX2 = xuv2 - xuv0;
201 | let uvDeltaPosY2 = yuv2 - yuv0;
202 |
203 | let rInv = uvDeltaPosX1 * uvDeltaPosY2 - uvDeltaPosY1 * uvDeltaPosX2;
204 | let r = 1.0 / (Math.abs(rInv < 0.0001) ? 1.0 : rInv);
205 |
206 | // tangent
207 | let xt = (deltaPosX1 * uvDeltaPosY2 - deltaPosX2 * uvDeltaPosY1) * r;
208 | let yt = (deltaPosY1 * uvDeltaPosY2 - deltaPosY2 * uvDeltaPosY1) * r;
209 | let zt = (deltaPosZ1 * uvDeltaPosY2 - deltaPosZ2 * uvDeltaPosY1) * r;
210 |
211 | // bitangent
212 | let xb = (deltaPosX2 * uvDeltaPosX1 - deltaPosX1 * uvDeltaPosX2) * r;
213 | let yb = (deltaPosY2 * uvDeltaPosX1 - deltaPosY1 * uvDeltaPosX2) * r;
214 | let zb = (deltaPosZ2 * uvDeltaPosX1 - deltaPosZ1 * uvDeltaPosX2) * r;
215 |
216 | // orthogonalize
217 | let xn0 = normals[i0 * 3 + 0];
218 | let yn0 = normals[i0 * 3 + 1];
219 | let zn0 = normals[i0 * 3 + 2];
220 |
221 | let xn1 = normals[i1 * 3 + 0];
222 | let yn1 = normals[i1 * 3 + 1];
223 | let zn1 = normals[i1 * 3 + 2];
224 |
225 | let xn2 = normals[i2 * 3 + 0];
226 | let yn2 = normals[i2 * 3 + 1];
227 | let zn2 = normals[i2 * 3 + 2];
228 |
229 | // tangent
230 | let xTangent0 = xt - xn0 * (xt * xn0 + yt * yn0 + zt * zn0);
231 | let yTangent0 = yt - yn0 * (xt * xn0 + yt * yn0 + zt * zn0);
232 | let zTangent0 = zt - zn0 * (xt * xn0 + yt * yn0 + zt * zn0);
233 |
234 | let xTangent1 = xt - xn1 * (xt * xn1 + yt * yn1 + zt * zn1);
235 | let yTangent1 = yt - yn1 * (xt * xn1 + yt * yn1 + zt * zn1);
236 | let zTangent1 = zt - zn1 * (xt * xn1 + yt * yn1 + zt * zn1);
237 |
238 | let xTangent2 = xt - xn2 * (xt * xn2 + yt * yn2 + zt * zn2);
239 | let yTangent2 = yt - yn2 * (xt * xn2 + yt * yn2 + zt * zn2);
240 | let zTangent2 = zt - zn2 * (xt * xn2 + yt * yn2 + zt * zn2);
241 |
242 | let magTangent0 = Math.sqrt(xTangent0 * xTangent0 + yTangent0 * yTangent0 + zTangent0 * zTangent0);
243 | let magTangent1 = Math.sqrt(xTangent1 * xTangent1 + yTangent1 * yTangent1 + zTangent1 * zTangent1);
244 | let magTangent2 = Math.sqrt(xTangent2 * xTangent2 + yTangent2 * yTangent2 + zTangent2 * zTangent2);
245 |
246 | // bitangent
247 | let N0oBt = xb * xn0 + yb * yn0 + zb * zn0;
248 | let N1oBt = xb * xn1 + yb * yn1 + zb * zn1;
249 | let N2oBt = xb * xn2 + yb * yn2 + zb * zn2;
250 |
251 | let magBitangent0 = Math.sqrt(
252 | (xb - xn0 * N0oBt) * 2 +
253 | (yb - yn0 * N0oBt) * 2 +
254 | (zb - zn0 * N0oBt) * 2
255 | );
256 | let magBitangent1 = Math.sqrt(
257 | (xb - xn1 * N1oBt) * 2 +
258 | (yb - yn1 * N1oBt) * 2 +
259 | (zb - zn1 * N1oBt) * 2
260 | );
261 | let magBitangent2 = Math.sqrt(
262 | (xb - xn2 * N2oBt) * 2 +
263 | (yb - yn2 * N2oBt) * 2 +
264 | (zb - zn2 * N2oBt) * 2
265 | );
266 |
267 | tangents[i0 * 3 + 0] += xTangent0 / magTangent0;
268 | tangents[i0 * 3 + 1] += yTangent0 / magTangent0;
269 | tangents[i0 * 3 + 2] += zTangent0 / magTangent0;
270 |
271 | tangents[i1 * 3 + 0] += xTangent1 / magTangent1;
272 | tangents[i1 * 3 + 1] += yTangent1 / magTangent1;
273 | tangents[i1 * 3 + 2] += zTangent1 / magTangent1;
274 |
275 | tangents[i2 * 3 + 0] += xTangent2 / magTangent2;
276 | tangents[i2 * 3 + 1] += yTangent2 / magTangent2;
277 | tangents[i2 * 3 + 2] += zTangent2 / magTangent2;
278 |
279 | bitangents[i0 * 3 + 0] += (xb - xn0 * N0oBt) / magBitangent0;
280 | bitangents[i0 * 3 + 1] += (yb - yn0 * N0oBt) / magBitangent0;
281 | bitangents[i0 * 3 + 2] += (zb - zn0 * N0oBt) / magBitangent0;
282 |
283 | bitangents[i1 * 3 + 0] += (xb - xn1 * N1oBt) / magBitangent1;
284 | bitangents[i1 * 3 + 1] += (yb - yn1 * N1oBt) / magBitangent1;
285 | bitangents[i1 * 3 + 2] += (zb - zn1 * N1oBt) / magBitangent1;
286 |
287 | bitangents[i2 * 3 + 0] += (xb - xn2 * N2oBt) / magBitangent2;
288 | bitangents[i2 * 3 + 1] += (yb - yn2 * N2oBt) / magBitangent2;
289 | bitangents[i2 * 3 + 2] += (zb - zn2 * N2oBt) / magBitangent2;
290 | };
291 |
292 | return { tangents, bitangents };
293 | };
294 |
--------------------------------------------------------------------------------
/buffers/InstanceBuffer.mjs:
--------------------------------------------------------------------------------
1 | import {
2 | clamp,
3 | getTransformMatrix
4 | } from "../utils.mjs";
5 |
6 | export default class InstanceBuffer {
7 | constructor({ device, instances, materials, textures, lights } = _) {
8 | this.device = device || null;
9 | this.buffers = {
10 | instance: null,
11 | material: null,
12 | light: null
13 | };
14 | this.accelerationContainer = null;
15 | this.init(instances, materials, textures, lights);
16 | }
17 | };
18 |
19 | InstanceBuffer.prototype.getInstanceBuffer = function() {
20 | return this.buffers.instance || null;
21 | };
22 |
23 | InstanceBuffer.prototype.getMaterialBuffer = function() {
24 | return this.buffers.material || null;
25 | };
26 |
27 | InstanceBuffer.prototype.getLightBuffer = function() {
28 | return this.buffers.light || null;
29 | };
30 |
31 | InstanceBuffer.prototype.getAccelerationContainer = function() {
32 | return this.accelerationContainer || null;
33 | };
34 |
35 | InstanceBuffer.prototype.updateInstance = function(instanceId, instance) {
36 | let {device} = this;
37 | let {buffers, accelerationContainer} = this;
38 | let {transform} = instance.data;
39 |
40 | let matrices = getTransformMatrix(transform);
41 |
42 | // transform matrix
43 | // padding
44 | // normal matrix
45 | let buffer = new ArrayBuffer(
46 | (3 * 4) * 4 +
47 | (4) * 4 +
48 | (4 * 4) * 4
49 | );
50 | let viewF32 = new Float32Array(buffer);
51 |
52 | viewF32.set(matrices.transform, 0x0);
53 | viewF32.set(matrices.normal, 3 * 4 + 4);
54 |
55 | buffers.instance.setSubData(
56 | instance.instanceBufferByteOffset,
57 | viewF32
58 | );
59 |
60 | accelerationContainer.updateInstance(instanceId, {
61 | flags: GPURayTracingAccelerationInstanceFlag.NONE,
62 | mask: 0xFF,
63 | instanceId: instanceId,
64 | instanceOffset: 0x0,
65 | geometryContainer: instance.parent.accelerationContainer.instance,
66 | transform: transform
67 | });
68 |
69 | };
70 |
71 | InstanceBuffer.prototype.init = function(instances, materials, textures, lights) {
72 | let {device} = this;
73 | let {buffers} = this;
74 |
75 | // create copy and insert placeholder material
76 | let placeHolderMaterial = { data: {} };
77 | materials = [placeHolderMaterial, ...materials];
78 |
79 | // create material buffer
80 | let materialBufferStride = 20;
81 | let materialBufferTotalLength = materials.length * materialBufferStride;
82 | let materialBuffer = device.createBuffer({
83 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.STORAGE,
84 | size: materialBufferTotalLength * 4
85 | });
86 | materialBuffer.byteLength = materialBufferTotalLength * 4;
87 | buffers.material = materialBuffer;
88 |
89 | let materialBufferDataBase = new ArrayBuffer(materialBufferTotalLength * 4);
90 | let materialBufferDataF32 = new Float32Array(materialBufferDataBase);
91 | let materialBufferDataU32 = new Uint32Array(materialBufferDataBase);
92 | for (let ii = 0; ii < materials.length; ++ii) {
93 | let material = materials[ii].data;
94 | let {color, emission} = material;
95 | let {metalness, roughness, specular} = material;
96 | let {textureScaling} = material;
97 | let {albedoMap, normalMap, emissionMap, metalRoughnessMap} = material;
98 | let {emissionIntensity, metalnessIntensity, roughnessIntensity} = material;
99 | let offset = ii * materialBufferStride;
100 | materialBufferDataF32[offset++] = color !== void 0 ? Math.pow(color[0] / 255.0, 1.0 / 2.2) : 0.0;
101 | materialBufferDataF32[offset++] = color !== void 0 ? Math.pow(color[1] / 255.0, 1.0 / 2.2) : 0.0;
102 | materialBufferDataF32[offset++] = color !== void 0 ? Math.pow(color[2] / 255.0, 1.0 / 2.2) : 0.0;
103 | materialBufferDataF32[offset++] = color !== void 0 && color.length === 4 ? Math.pow(color[3] / 255.0, 1.0 / 2.2) : 0.0;
104 | materialBufferDataF32[offset++] = emission !== void 0 ? Math.pow(emission[0] / 255.0, 1.0 / 2.2) : 0.0;
105 | materialBufferDataF32[offset++] = emission !== void 0 ? Math.pow(emission[1] / 255.0, 1.0 / 2.2) : 0.0;
106 | materialBufferDataF32[offset++] = emission !== void 0 ? Math.pow(emission[2] / 255.0, 1.0 / 2.2) : 0.0;
107 | materialBufferDataF32[offset++] = 0.0; // alpha
108 | materialBufferDataF32[offset++] = clamp(parseFloat(metalness), 0.001, 0.999);
109 | materialBufferDataF32[offset++] = clamp(parseFloat(roughness), 0.001, 0.999);
110 | materialBufferDataF32[offset++] = clamp(parseFloat(specular), 0.001, 0.999);
111 | materialBufferDataF32[offset++] = textureScaling !== void 0 ? parseFloat(textureScaling) : 1.0;
112 | materialBufferDataU32[offset++] = albedoMap ? textures.indexOf(albedoMap) + 1 : 0;
113 | materialBufferDataU32[offset++] = normalMap ? textures.indexOf(normalMap) + 1 : 0;
114 | materialBufferDataU32[offset++] = emissionMap ? textures.indexOf(emissionMap) + 1 : 0;
115 | materialBufferDataU32[offset++] = metalRoughnessMap ? textures.indexOf(metalRoughnessMap) + 1 : 0;
116 | materialBufferDataF32[offset++] = emissionIntensity !== void 0 ? parseFloat(emissionIntensity) : 1.0;
117 | materialBufferDataF32[offset++] = metalnessIntensity !== void 0 ? parseFloat(metalnessIntensity) : 1.0;
118 | materialBufferDataF32[offset++] = roughnessIntensity !== void 0 ? parseFloat(roughnessIntensity) : 1.0;
119 | materialBufferDataF32[offset++] = 0.0; // padding
120 | };
121 | materialBuffer.setSubData(0, materialBufferDataU32);
122 |
123 | // create instance buffer
124 | let instanceBufferStride = 36;
125 | let instanceBufferTotalLength = instances.length * instanceBufferStride;
126 | let instanceBuffer = device.createBuffer({
127 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.STORAGE,
128 | size: instanceBufferTotalLength * 4
129 | });
130 | instanceBuffer.byteLength = instanceBufferTotalLength * 4;
131 | buffers.instance = instanceBuffer;
132 |
133 | let instanceBufferDataBase = new ArrayBuffer(instanceBufferTotalLength * 4);
134 | let instanceBufferDataF32 = new Float32Array(instanceBufferDataBase);
135 | let instanceBufferDataU32 = new Uint32Array(instanceBufferDataBase);
136 | for (let ii = 0; ii < instances.length; ++ii) {
137 | let instance = instances[ii];
138 | let geometry = instance.parent;
139 | let {accelerationContainer} = geometry;
140 | let {material, transform} = instance.data;
141 | let matrices = getTransformMatrix(transform);
142 | let offset = ii * instanceBufferStride;
143 | instance.instanceBufferByteOffset = offset * 4;
144 | // transform matrix
145 | instanceBufferDataF32[offset++] = matrices.transform[0];
146 | instanceBufferDataF32[offset++] = matrices.transform[1];
147 | instanceBufferDataF32[offset++] = matrices.transform[2];
148 | instanceBufferDataF32[offset++] = matrices.transform[3];
149 | instanceBufferDataF32[offset++] = matrices.transform[4];
150 | instanceBufferDataF32[offset++] = matrices.transform[5];
151 | instanceBufferDataF32[offset++] = matrices.transform[6];
152 | instanceBufferDataF32[offset++] = matrices.transform[7];
153 | instanceBufferDataF32[offset++] = matrices.transform[8];
154 | instanceBufferDataF32[offset++] = matrices.transform[9];
155 | instanceBufferDataF32[offset++] = matrices.transform[10];
156 | instanceBufferDataF32[offset++] = matrices.transform[11];
157 | instanceBufferDataF32[offset++] = 0.0; // padding
158 | instanceBufferDataF32[offset++] = 0.0; // padding
159 | instanceBufferDataF32[offset++] = 0.0; // padding
160 | instanceBufferDataF32[offset++] = 0.0; // padding
161 | // normal matrix
162 | instanceBufferDataF32[offset++] = matrices.normal[0];
163 | instanceBufferDataF32[offset++] = matrices.normal[1];
164 | instanceBufferDataF32[offset++] = matrices.normal[2];
165 | instanceBufferDataF32[offset++] = matrices.normal[3];
166 | instanceBufferDataF32[offset++] = matrices.normal[4];
167 | instanceBufferDataF32[offset++] = matrices.normal[5];
168 | instanceBufferDataF32[offset++] = matrices.normal[6];
169 | instanceBufferDataF32[offset++] = matrices.normal[7];
170 | instanceBufferDataF32[offset++] = matrices.normal[8];
171 | instanceBufferDataF32[offset++] = matrices.normal[9];
172 | instanceBufferDataF32[offset++] = matrices.normal[10];
173 | instanceBufferDataF32[offset++] = matrices.normal[11];
174 | instanceBufferDataF32[offset++] = matrices.normal[12];
175 | instanceBufferDataF32[offset++] = matrices.normal[13];
176 | instanceBufferDataF32[offset++] = matrices.normal[14];
177 | instanceBufferDataF32[offset++] = matrices.normal[15];
178 | // offsets
179 | instanceBufferDataU32[offset++] = accelerationContainer.attributeOffset;
180 | instanceBufferDataU32[offset++] = accelerationContainer.faceOffset;
181 | instanceBufferDataU32[offset++] = accelerationContainer.faceCount;
182 | instanceBufferDataU32[offset++] = materials.indexOf(material);
183 | };
184 | instanceBuffer.setSubData(0, instanceBufferDataU32);
185 |
186 | // create light buffer
187 | let lightBufferStride = 4;
188 | let lightBufferTotalLength = lights.length * lightBufferStride;
189 | let lightBuffer = device.createBuffer({
190 | usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.STORAGE,
191 | size: lightBufferTotalLength * 4
192 | });
193 | lightBuffer.byteLength = lightBufferTotalLength * 4;
194 | buffers.light = lightBuffer;
195 |
196 | let lightBufferDataBase = new ArrayBuffer(lightBufferTotalLength * 4);
197 | let lightBufferDataF32 = new Float32Array(lightBufferDataBase);
198 | let lightBufferDataU32 = new Uint32Array(lightBufferDataBase);
199 | for (let ii = 0; ii < lights.length; ++ii) {
200 | let light = lights[ii];
201 | let {instance} = light;
202 | let offset = ii * lightBufferStride;
203 | lightBufferDataU32[offset++] = instances.indexOf(light);
204 | lightBufferDataF32[offset++] = 0.0; // padding
205 | lightBufferDataF32[offset++] = 0.0; // padding
206 | lightBufferDataF32[offset++] = 0.0; // padding
207 | };
208 | lightBuffer.setSubData(0, lightBufferDataU32);
209 |
210 | // create acceleration container
211 | let geometryInstances = [];
212 | for (let ii = 0; ii < instances.length; ++ii) {
213 | let instance = instances[ii];
214 | let geometry = instance.parent;
215 | let {accelerationContainer} = geometry;
216 | let {material, transform} = instance.data;
217 | let instanceEntry = {};
218 | instanceEntry.flags = GPURayTracingAccelerationInstanceFlag.FORCE_OPAQUE;
219 | instanceEntry.mask = 0xFF;
220 | instanceEntry.instanceId = ii;
221 | instanceEntry.instanceOffset = 0x0;
222 | instanceEntry.geometryContainer = accelerationContainer.instance;
223 | if (transform) instanceEntry.transform = transform;
224 | geometryInstances.push(instanceEntry);
225 | };
226 |
227 | let accelerationContainer = device.createRayTracingAccelerationContainer({
228 | level: "top",
229 | flags: GPURayTracingAccelerationContainerFlag.ALLOW_UPDATE | GPURayTracingAccelerationContainerFlag.PREFER_FAST_TRACE,
230 | instances: geometryInstances
231 | });
232 |
233 | // build top-level containers
234 | let commandEncoder = device.createCommandEncoder({});
235 | commandEncoder.buildRayTracingAccelerationContainer(accelerationContainer);
236 | device.getQueue().submit([ commandEncoder.finish() ]);
237 |
238 | this.accelerationContainer = accelerationContainer;
239 | };
240 |
--------------------------------------------------------------------------------
/index.mjs:
--------------------------------------------------------------------------------
1 | import WebGPU from "webgpu";
2 |
3 | import fs from "fs";
4 | import tolw from "tolw";
5 | import glMatrix from "gl-matrix";
6 | import { performance } from "perf_hooks";
7 |
8 | import {
9 | keyCodeToChar,
10 | loadShaderFile
11 | } from "./utils.mjs"
12 |
13 | import Camera from "./Camera.mjs";
14 | import Settings from "./Settings.mjs";
15 |
16 | import RayTracingPass from "./passes/RayTracingPass.mjs";
17 | import RayPickingPass from "./passes/RayPickingPass.mjs";
18 |
19 | import Scene from "./Scene.mjs";
20 |
21 | Object.assign(global, WebGPU);
22 | Object.assign(global, glMatrix);
23 |
24 | (async function main() {
25 |
26 | await tolw.init();
27 |
28 | let window = new WebGPUWindow({
29 | width: 1280,
30 | height: 768,
31 | title: "WebGPU RT",
32 | resizable: false
33 | });
34 | global["window"] = window;
35 |
36 | let adapter = await GPU.requestAdapter({
37 | window,
38 | preferredBackend: "Vulkan"
39 | });
40 |
41 | let device = await adapter.requestDevice({
42 | extensions: ["ray_tracing"]
43 | });
44 |
45 | let camera = new Camera({ device });
46 | global["camera"] = camera;
47 |
48 | let settings = new Settings({ device });
49 | global["settings"] = settings;
50 |
51 | let queue = device.getQueue();
52 |
53 | let context = window.getContext("webgpu");
54 |
55 | let swapChainFormat = await context.getSwapChainPreferredFormat(device);
56 |
57 | let swapChain = context.configureSwapChain({
58 | device: device,
59 | format: swapChainFormat
60 | });
61 |
62 | let scene = new Scene();
63 |
64 | let MeetMatBodyAlbedo = scene.createTexture().fromPath(`assets/textures/meetmat/02_Body_Base_Color.jpg`);
65 | let MeetMatBodyNormal = scene.createTexture().fromPath(`assets/textures/meetmat/02_Body_Normal_DirectX.jpg`);
66 | let MeetMatBodyMetallicRoughness = scene.createTexture().fromPath(`assets/textures/meetmat/02_Body_MetallicRoughness.jpg`);
67 |
68 | let MeetMatHeadAlbedo = scene.createTexture().fromPath(`assets/textures/meetmat/01_Head_Base_Color.jpg`);
69 | let MeetMatHeadNormal = scene.createTexture().fromPath(`assets/textures/meetmat/01_Head_Normal_DirectX.jpg`);
70 | let MeetMatHeadMetallicRoughness = scene.createTexture().fromPath(`assets/textures/meetmat/01_Head_MetallicRoughness.jpg`);
71 |
72 | let Fabric19Albedo = scene.createTexture().fromPath(`assets/textures/Fabric19/Fabric19_col.jpg`);
73 | let Fabric19Normal = scene.createTexture().fromPath(`assets/textures/Fabric19/Fabric19_nrm.jpg`);
74 | let Fabric19MetallicRoughness = scene.createTexture().fromPath(`assets/textures/Fabric19/Fabric19_met_rgh.jpg`);
75 |
76 | let Plane = scene.createGeometry().fromPath(`assets/models/plane.obj`);
77 | let MeetMatBody = scene.createGeometry().fromPath(`assets/models/meetmat/body.obj`);
78 | let MeetMatHead = scene.createGeometry().fromPath(`assets/models/meetmat/head.obj`);
79 | let Box = scene.createGeometry().fromPath(`assets/models/box.obj`);
80 |
81 | let FloorMaterial = scene.createMaterial({
82 | color: [0, 0, 0],
83 | metalness: 0.001,
84 | roughness: 0.068,
85 | specular: 0.0117,
86 | albedoMap: Fabric19Albedo,
87 | normalMap: Fabric19Normal,
88 | metalRoughnessMap: Fabric19MetallicRoughness,
89 | textureScaling: 5.5
90 | });
91 |
92 | let MeetMatBodyMaterial = scene.createMaterial({
93 | color: [0, 0, 0],
94 | metalness: 0.0,
95 | roughness: 0.0,
96 | specular: 0.95,
97 | albedoMap: MeetMatBodyAlbedo,
98 | normalMap: MeetMatBodyNormal,
99 | metalRoughnessMap: MeetMatBodyMetallicRoughness,
100 | metalnessIntensity: 1.0,
101 | roughnessIntensity: 0.1125,
102 | });
103 |
104 | let MeetMatHeadMaterial = scene.createMaterial({
105 | color: [0, 0, 0],
106 | metalness: 0.0,
107 | roughness: 0.0,
108 | specular: 0.95,
109 | albedoMap: MeetMatHeadAlbedo,
110 | normalMap: MeetMatHeadNormal,
111 | metalRoughnessMap: MeetMatHeadMetallicRoughness,
112 | metalnessIntensity: 1.0,
113 | roughnessIntensity: 0.1125,
114 | });
115 |
116 | let LightMaterial0 = scene.createMaterial({
117 | color: [14600, 14600, 14600]
118 | });
119 |
120 | let LightMaterial1 = scene.createMaterial({
121 | color: [12900, 12990, 12800]
122 | });
123 |
124 | MeetMatBody.addMeshInstance({
125 | material: MeetMatBodyMaterial,
126 | transform: {
127 | translation: { x: -32, y: 0, z: 128 },
128 | rotation: { x: 0, y: 100, z: 0 },
129 | scale: { x: 512, y: 512, z: 512 }
130 | }
131 | });
132 | MeetMatHead.addMeshInstance({
133 | material: MeetMatHeadMaterial,
134 | transform: {
135 | translation: { x: -32, y: 0, z: 128 },
136 | rotation: { x: 0, y: 100, z: 0 },
137 | scale: { x: 512, y: 512, z: 512 }
138 | }
139 | });
140 |
141 | MeetMatBody.addMeshInstance({
142 | material: MeetMatBodyMaterial,
143 | transform: {
144 | translation: { x: 64, y: 0, z: 128 },
145 | rotation: { x: 0, y: 180, z: 0 },
146 | scale: { x: 512, y: 512, z: 512 }
147 | }
148 | });
149 | MeetMatHead.addMeshInstance({
150 | material: MeetMatHeadMaterial,
151 | transform: {
152 | translation: { x: 64, y: 0, z: 128 },
153 | rotation: { x: 0, y: 180, z: 0 },
154 | scale: { x: 512, y: 512, z: 512 }
155 | }
156 | });
157 |
158 | MeetMatBody.addMeshInstance({
159 | material: MeetMatBodyMaterial,
160 | transform: {
161 | translation: { x: 32, y: 0, z: 256 - 32 },
162 | rotation: { x: 0, y: 180 + 70, z: 0 },
163 | scale: { x: 512, y: 512, z: 512 }
164 | }
165 | });
166 | MeetMatHead.addMeshInstance({
167 | material: MeetMatHeadMaterial,
168 | transform: {
169 | translation: { x: 32, y: 0, z: 256 - 32 },
170 | rotation: { x: 0, y: 180 + 70, z: 0 },
171 | scale: { x: 512, y: 512, z: 512 }
172 | }
173 | });
174 |
175 | Box.addMeshInstance({
176 | material: FloorMaterial,
177 | transform: {
178 | translation: { x: 0, y: 384, z: 0 },
179 | rotation: { x: 0, y: 0, z: 0 },
180 | scale: { x: 384, y: 384, z: 384 }
181 | }
182 | });
183 |
184 | Plane.addEmitterInstance({
185 | material: LightMaterial0,
186 | transform: {
187 | translation: { x: 0, y: 768 - 1, z: 0 },
188 | rotation: { x: 0, y: 0, z: 0 },
189 | scale: { x: 32, y: 32, z: 32 }
190 | }
191 | });
192 |
193 | Plane.addEmitterInstance({
194 | material: LightMaterial1,
195 | transform: {
196 | translation: { x: 0, y: 128, z: 256 + 48 },
197 | rotation: { x: 116, y: 0, z: 0 },
198 | scale: { x: 18, y: 12, z: 12 }
199 | }
200 | });
201 |
202 | Plane.addEmitterInstance({
203 | material: LightMaterial1,
204 | transform: {
205 | translation: { x: 0, y: 128, z: -128 },
206 | rotation: { x: -116, y: 0, z: 0 },
207 | scale: { x: 18, y: 12, z: 12 }
208 | }
209 | });
210 |
211 | let rtPass = new RayTracingPass({ device, scene });
212 |
213 | let pixelBuffer = rtPass.getPixelBuffer();
214 | let instanceContainer = rtPass.getInstanceBuffer().getAccelerationContainer();
215 |
216 | let rpPass = new RayPickingPass({ device, instanceContainer });
217 |
218 | let blitBindGroupLayout = device.createBindGroupLayout({
219 | entries: [
220 | { binding: 0, type: "storage-buffer", visibility: GPUShaderStage.FRAGMENT },
221 | { binding: 1, type: "uniform-buffer", visibility: GPUShaderStage.FRAGMENT },
222 | ]
223 | });
224 |
225 | let blitBindGroup = device.createBindGroup({
226 | layout: blitBindGroupLayout,
227 | entries: [
228 | { binding: 0, size: pixelBuffer.byteLength, buffer: pixelBuffer },
229 | { binding: 1, size: settings.getBuffer().byteLength, buffer: settings.getBuffer() },
230 | ]
231 | });
232 |
233 | let blitPipeline = device.createRenderPipeline({
234 | layout: device.createPipelineLayout({
235 | bindGroupLayouts: [blitBindGroupLayout]
236 | }),
237 | sampleCount: 1,
238 | vertexStage: {
239 | module: device.createShaderModule({ code: loadShaderFile(`shaders/blit/screen.vert`) }),
240 | entryPoint: "main"
241 | },
242 | fragmentStage: {
243 | module: device.createShaderModule({ code: loadShaderFile(`shaders/blit/screen.frag`) }),
244 | entryPoint: "main"
245 | },
246 | primitiveTopology: "triangle-list",
247 | vertexState: {
248 | indexFormat: "uint32",
249 | vertexBuffers: []
250 | },
251 | rasterizationState: {
252 | frontFace: "CCW",
253 | cullMode: "none"
254 | },
255 | colorStates: [{
256 | format: swapChainFormat,
257 | alphaBlend: {},
258 | colorBlend: {}
259 | }]
260 | });
261 |
262 | let pickedInstanceId = 0;
263 | let pickedInstance = null;
264 |
265 | let isLeftMousePressed = false;
266 | window.onmousedown = e => {
267 | isLeftMousePressed = e.button === 0;
268 | // execute ray picking
269 | if (e.button === 1) {
270 | rpPass.setMousePickingPosition(e.x, e.y);
271 | queue.submit([ rpPass.getCommandBuffer() ]);
272 | rpPass.getPickingResult().then(({ x, y, z, instanceId } = _) => {
273 | pickedInstanceId = instanceId - 1;
274 | if (pickedInstanceId >= 0) {
275 | let instance = scene.getInstanceById(pickedInstanceId);
276 | pickedInstance = instance;
277 | }
278 | });
279 | }
280 | };
281 | window.onmouseup = e => {
282 | isLeftMousePressed = false;
283 | };
284 | window.onmousemove = e => {
285 | if (!isLeftMousePressed) return;
286 | camera.deltaMovement.x = e.movementX * 0.25;
287 | camera.deltaMovement.y = e.movementY * 0.25;
288 | };
289 | let resetAccumulation = false;
290 | window.onmousewheel = e => {
291 | // aperture
292 | if (isKeyPressed("Ŕ")) { // shift key
293 | camera.settings.aperture += e.deltaY * 0.01;
294 | resetAccumulation = true;
295 | console.log(`Camera: Aperture: '${camera.settings.aperture}'`);
296 | }
297 | // focus distance
298 | else {
299 | camera.settings.focusDistance += e.deltaY * 0.125;
300 | camera.settings.focusDistance = Math.max(0.1, camera.settings.focusDistance);
301 | resetAccumulation = true;
302 | console.log(`Camera: Focus-Distance: '${camera.settings.focusDistance}'`);
303 | }
304 | };
305 |
306 | let keys = {};
307 | window.onkeydown = function(e) {
308 | let {keyCode} = e;
309 | let key = keyCodeToChar(keyCode);
310 | keys[key] = 1;
311 | };
312 | window.onkeyup = function(e) {
313 | let {keyCode} = e;
314 | let key = keyCodeToChar(keyCode);
315 | keys[key] = 0;
316 | };
317 | global.isKeyPressed = function isKeyPressed(key) {
318 | return keys[key] === 1;
319 | };
320 |
321 | let frames = 0;
322 | let then = performance.now();
323 | (function drawLoop() {
324 | let now = performance.now();
325 | let delta = (now - then);
326 | if (delta > 1.0 || frames === 0) {
327 | let fps = Math.floor((frames / delta) * 1e3);
328 | window.title = `WebGPU RT - FPS: ${fps} - SPP: ${camera.settings.sampleCount}`;
329 | frames = 0;
330 | }
331 | frames++;
332 | then = now;
333 |
334 | camera.update(delta / 1e3);
335 |
336 | // update settings buffer
337 | settings.getBuffer().setSubData(0, new Uint32Array([
338 | camera.settings.sampleCount,
339 | camera.settings.totalSampleCount,
340 | scene.getLightsFlattened().length,
341 | window.width,
342 | window.height
343 | ]));
344 |
345 | // accumulation
346 | if (camera.hasMoved) camera.resetAccumulation();
347 | else camera.increaseAccumulation();
348 |
349 | if (resetAccumulation) {
350 | camera.resetAccumulation();
351 | resetAccumulation = false;
352 | }
353 |
354 | let backBufferView = swapChain.getCurrentTextureView();
355 |
356 | let commands = [];
357 |
358 | // ray tracing pass
359 | commands.push(rtPass.getCommandBuffer());
360 |
361 | // blit pass
362 | {
363 | let commandEncoder = device.createCommandEncoder({});
364 | let passEncoder = commandEncoder.beginRenderPass({
365 | colorAttachments: [{
366 | clearColor: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
367 | loadOp: "clear",
368 | storeOp: "store",
369 | attachment: backBufferView
370 | }]
371 | });
372 | passEncoder.setPipeline(blitPipeline);
373 | passEncoder.setBindGroup(0, blitBindGroup);
374 | passEncoder.draw(3, 1, 0, 0);
375 | passEncoder.endPass();
376 | commands.push(commandEncoder.finish());
377 | }
378 |
379 | queue.submit(commands);
380 |
381 | swapChain.present();
382 |
383 | if (pickedInstanceId >= 0 && pickedInstance) {
384 | // update transform (random for now)
385 | pickedInstance.data.transform.rotation.x += Math.random();
386 | pickedInstance.data.transform.rotation.y += Math.random();
387 | pickedInstance.data.transform.rotation.z += Math.random();
388 | // update instance buffer
389 | rtPass.getInstanceBuffer().updateInstance(pickedInstanceId, pickedInstance);
390 | // update instance container
391 | let commandEncoder = device.createCommandEncoder({});
392 | commandEncoder.updateRayTracingAccelerationContainer(instanceContainer);
393 | queue.submit([ commandEncoder.finish() ]);
394 | resetAccumulation = true;
395 | }
396 |
397 | window.pollEvents();
398 | if (window.shouldClose()) return;
399 |
400 | setImmediate(drawLoop);
401 | })();
402 |
403 | })();
--------------------------------------------------------------------------------