├── .eslintrc.cjs
├── .gitignore
├── .prettierrc
├── LICENSE
├── README.md
├── css
└── styles.css
├── index.html
├── js
├── Demo.js
├── DemoScene.js
├── gltf-scene
│ └── GLTFScene.js
├── intro-scene
│ └── IntroScene.js
├── main.js
├── planes-scene
│ └── PlanesScene.js
├── shaders
│ ├── chunks
│ │ ├── curl-noise.wgsl.js
│ │ ├── discard-particle-fragment.wgsl.js
│ │ ├── get-particle-size.wgsl.js
│ │ ├── get-pcf-soft-shadows.wgsl.js
│ │ ├── get-shadow-position.wgsl.js
│ │ └── gltf-contributions.wgsl.js
│ ├── compute-particles.wgsl.js
│ ├── gallery-planes.wgsl.js
│ ├── gallery-shader-pass.wgsl.js
│ ├── intro-meshes.wgsl.js
│ ├── shadowed-particles.wgsl.js
│ └── shadowed-wrapping-box.wgsl.js
└── shadowed-particles-scene
│ ├── ShadowMap.js
│ └── ShadowedParticlesScene.js
├── package.json
├── public
└── assets
│ ├── fallbacks
│ ├── gltf-scene-bg.jpg
│ ├── intro-scene-bg.jpg
│ └── shadowed-particles-scene-bg.jpg
│ ├── fonts
│ ├── Excon-Bold.ttf
│ ├── Excon-Bold.woff
│ └── Excon-Bold.woff2
│ └── gltf
│ └── metal_credit_card.glb
└── vite.config.js
/.eslintrc.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | parserOptions: {
4 | ecmaVersion: 11,
5 | sourceType: 'module',
6 | },
7 | // parser: '@typescript-eslint/parser',
8 | // plugins: ['@typescript-eslint'],
9 | // extends: ['prettier', 'plugin:@typescript-eslint/recommended'],
10 | extends: ['prettier'],
11 | // add your custom rules here
12 | rules: {},
13 | }
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.idea
2 | /node_modules
3 | /dist
4 |
5 | .DS_Store
6 |
7 | yarn.lock
8 | package-lock.json
9 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "semi": false,
4 | "printWidth": 120
5 | }
6 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Okay Dev
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Dive into WebGPU with gpu-curtains
2 |
3 | 
4 |
5 | ### About the Tutorial
6 |
7 | We are going to build a landing page containing 4 independent WebGPU scenes. Each scene will have its own HTML Canvas Element that we will use for rendering our objects.
8 |
9 | Here is a brief overview of the various scenes and what you’ll learn:
10 |
11 | - Draw multiple meshes, position them so they always fit in the viewport, and add a basic Lambert shader.
12 | - Create a DOM-synced planes gallery and add a post-processing pass to distort them on scroll.
13 | - Load a glTF object, sync its size and position with an HTML element, add the ability to rotate it on drag, and change its base color when clicking on a set of buttons.
14 | - Create a particle system using instanced billboarded quads, use compute shaders to set and update the particles’ positions and velocities, and finally add shadows.
15 |
16 | [Read the article on Okay Dev](https://okaydev.co/articles/dive-into-webgpu-part-1)
17 |
18 | ## Demo
19 |
20 | [Dive into WebGPU](https://okaydev.co/dist/tutorials/dive-into-webgpu/index.html)
21 |
22 | ## Installation
23 |
24 | - Install with `npm run install`
25 | - Run the demo with `npm run dev`
26 | - Build it with `npm run build`
27 |
28 | ## Credits
29 |
30 | - Demo and tutorials by [Martin Laxenaire](https://okaydev.co/u/martinlaxenaire)
31 | - Edited and published by [Eric Van Holtz](https://okaydev.co/u/eric) of [Okay Dev](https://okaydev.co)
32 | - Metal Credit Card model from [Sketchfab](https://sketchfab.com/3d-models/metal-credit-card-b6cff2460421408f84c9af7a85ce906e) by [Maxitaxx](https://sketchfab.com/maxitaxx)
33 |
34 | ## License
35 |
36 | MIT License
37 |
38 | Copyright (c) 2024 Martin Laxenaire
39 |
40 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
41 |
42 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
43 |
44 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/css/styles.css:
--------------------------------------------------------------------------------
1 | @import url('https://fonts.googleapis.com/css2?family=Montserrat:wght@300&display=swap');
2 |
3 | @font-face {
4 | font-family: 'Excon-Bold';
5 | src: url('/assets/fonts/Excon-Bold.woff2') format('woff2'), url('/assets/fonts/Excon-Bold.woff') format('woff'),
6 | url('/assets/fonts/Excon-Bold.ttf') format('truetype');
7 | font-weight: 700;
8 | font-display: swap;
9 | font-style: normal;
10 | }
11 |
12 | @media screen {
13 | :root {
14 | --font-size: clamp(14px, max(1.75vw, 1.75vh), 22px);
15 | --display-font: 'Excon-Bold', sans-serif;
16 | --text-font: 'Montserrat', sans-serif;
17 | --basic-spacing: 2rem;
18 | }
19 |
20 | html,
21 | body {
22 | height: 100%;
23 | }
24 |
25 | html {
26 | font-size: var(--font-size);
27 | line-height: 1.4;
28 | }
29 |
30 | body {
31 | margin: 0;
32 | font-family: var(--text-font);
33 | }
34 |
35 | .no-webgpu .gsap-auto-alpha {
36 | visibility: visible;
37 | }
38 |
39 | .container {
40 | padding: var(--basic-spacing);
41 | }
42 |
43 | .centered-text-content {
44 | max-width: 40rem;
45 | margin: 0 auto;
46 | }
47 |
48 | .tiny {
49 | font-size: 0.66em;
50 | }
51 |
52 | .small {
53 | font-size: 0.825em;
54 | }
55 |
56 | .gsap-auto-alpha {
57 | visibility: hidden;
58 | }
59 |
60 | h1,
61 | h2,
62 | h3,
63 | h4 {
64 | font-family: var(--display-font);
65 | line-height: 1.1;
66 | }
67 |
68 | h1 {
69 | font-size: 4rem;
70 | }
71 |
72 | h2 {
73 | font-size: 2.5rem;
74 | }
75 |
76 | button {
77 | cursor: pointer;
78 | font: inherit;
79 | background: transparent;
80 | color: inherit;
81 | }
82 |
83 | a {
84 | color: inherit;
85 | }
86 |
87 | canvas {
88 | position: absolute;
89 | inset: 0;
90 | }
91 |
92 | /* NO WEBGPU DISCLAIMER */
93 |
94 | #no-webgpu-disclaimer {
95 | position: fixed;
96 | right: calc(var(--basic-spacing) * 0.5);
97 | bottom: calc(var(--basic-spacing) * 0.5);
98 | padding: calc(var(--basic-spacing) * 0.5);
99 | color: #ffffff;
100 | background: rgba(0, 0, 0, 0.7);
101 | border: 1px solid #ffffff;
102 | border-radius: calc(var(--basic-spacing) * 0.25);
103 | z-index: 1;
104 | max-width: 25em;
105 | }
106 |
107 | /* GSAP */
108 |
109 | .gsap-auto-alpha {
110 | visibility: hidden;
111 | }
112 |
113 | /* LENIS */
114 |
115 | html.lenis,
116 | html.lenis body {
117 | height: auto;
118 | }
119 |
120 | .lenis.lenis-smooth {
121 | scroll-behavior: auto !important;
122 | }
123 |
124 | .lenis.lenis-smooth [data-lenis-prevent] {
125 | overscroll-behavior: contain;
126 | }
127 |
128 | .lenis.lenis-stopped {
129 | overflow: hidden;
130 | }
131 |
132 | .lenis.lenis-smooth iframe {
133 | pointer-events: none;
134 | }
135 |
136 | /* INTRO SCENE */
137 |
138 | #intro-scene {
139 | position: relative;
140 | height: 100svh;
141 | overflow: hidden;
142 | display: flex;
143 | justify-content: center;
144 | align-items: center;
145 | background: #191919;
146 | color: #ffffff;
147 | }
148 |
149 | .no-webgpu #intro-scene:after {
150 | content: '';
151 | position: absolute;
152 | inset: 0;
153 | z-index: 0;
154 | background-image: url('/assets/fallbacks/intro-scene-bg.jpg');
155 | background-repeat: no-repeat;
156 | background-size: cover;
157 | background-position: center center;
158 | /* opacity: 0;
159 | transition: opacity 0.25s; */
160 | }
161 |
162 | /* .no-webgpu #intro-scene.is-visible:after {
163 | opacity: 1;
164 | transition: opacity 1s 0.5s;
165 | } */
166 |
167 | #intro-scene-canvas {
168 | position: absolute;
169 | inset: 0;
170 | }
171 |
172 | #intro-scene-content {
173 | position: relative;
174 | z-index: 1;
175 | text-align: center;
176 | }
177 |
178 | #intro-scene-content h1 {
179 | margin: 0;
180 | }
181 |
182 | #intro-scene-content h1 span {
183 | display: block;
184 | }
185 |
186 | #intro-scene-content h2 {
187 | margin: 0 0 1rem 0;
188 | }
189 |
190 | #intro-scene p {
191 | margin: var(--basic-spacing) auto;
192 | text-shadow: #111111 0 0 0.75rem, #000000 0 0 0.15rem;
193 | }
194 |
195 | #intro-scene p button {
196 | background: #111111;
197 | border: 1px solid white;
198 | padding: 0.5em 1em;
199 | border-radius: 2.4em;
200 | text-transform: uppercase;
201 | transition: background 0.35s, color 0.35s;
202 | }
203 |
204 | #intro-scene p button:hover {
205 | color: #111111;
206 | background: white;
207 | }
208 |
209 | /* PLANES SCENE */
210 |
211 | #planes-scene {
212 | position: relative;
213 | min-height: 100lvh;
214 | }
215 |
216 | #planes-scene-canvas {
217 | position: fixed;
218 | top: 0;
219 | width: 100%;
220 | height: 100lvh;
221 | }
222 |
223 | #planes-scene-content {
224 | position: relative;
225 | z-index: 1;
226 | margin: 6rem auto;
227 | }
228 |
229 | #planes-scene-content h2 {
230 | text-align: center;
231 | margin: 0 0 4rem 0;
232 | }
233 |
234 | #planes-scene-content p {
235 | margin: 4rem auto;
236 | }
237 |
238 | #planes-grid {
239 | display: grid;
240 | grid-template-columns: repeat(12, 1fr);
241 | grid-gap: 1rem;
242 | justify-content: space-between;
243 | }
244 |
245 | #planes-grid .plane {
246 | display: flex;
247 | overflow: hidden;
248 | }
249 |
250 | #planes-grid .plane:nth-child(4n + 1) {
251 | grid-column: 4 / span 3;
252 | aspect-ratio: 10 / 15;
253 | }
254 |
255 | #planes-grid .plane:nth-child(4n + 4) {
256 | grid-column: 7 / span 3;
257 | aspect-ratio: 10 / 15;
258 | }
259 |
260 | #planes-grid .plane:nth-child(4n + 2) {
261 | grid-column: 7 / span 4;
262 | aspect-ratio: 15 / 10;
263 | align-self: flex-end;
264 | }
265 |
266 | #planes-grid .plane:nth-child(4n + 3) {
267 | grid-column: 3 / span 4;
268 | aspect-ratio: 15 / 10;
269 | }
270 |
271 | #planes-grid .plane:nth-child(4n + 5) {
272 | margin-top: -60%;
273 | }
274 |
275 | #planes-grid .plane img {
276 | object-fit: cover;
277 | min-width: 100%;
278 | min-height: 100%;
279 | width: auto;
280 | height: auto;
281 | visibility: hidden;
282 | }
283 |
284 | .no-webgpu #planes-grid .plane img {
285 | visibility: visible;
286 | /* opacity: 0;
287 | transition: opacity 0.25s; */
288 | }
289 |
290 | /* .no-webgpu #planes-scene.is-visible #planes-grid .plane img {
291 | opacity: 1;
292 | transition: opacity 0.5s 0.5s;
293 | } */
294 |
295 | /* GLTF SCENE */
296 |
297 | #gltf-scene {
298 | position: relative;
299 | min-height: 120svh;
300 | display: flex;
301 | align-items: center;
302 | overflow: hidden;
303 | transition: color 0.5s ease-in-out, background 0.5s ease-in-out;
304 | }
305 |
306 | #gltf-scene,
307 | #gltf-scene.silver {
308 | background: #333333;
309 | color: #ffffff;
310 | }
311 |
312 | #gltf-scene.gold {
313 | background: #ffe364;
314 | color: #000000;
315 | }
316 |
317 | #gltf-scene.black {
318 | background: #dddddd;
319 | color: #000000;
320 | }
321 |
322 | #gltf-scene-grid {
323 | position: relative;
324 | z-index: 1;
325 | box-sizing: border-box;
326 | margin: var(--basic-spacing) auto;
327 | display: grid;
328 | grid-template-columns: repeat(12, 1fr);
329 | grid-gap: 1rem;
330 | align-items: center;
331 | }
332 |
333 | #gltf-scene-content {
334 | grid-column: 2 / span 4;
335 | }
336 |
337 | #gltf-scene-object-container {
338 | grid-column: 6 / span 7;
339 | display: grid;
340 | grid-template-columns: repeat(7, 1fr);
341 | grid-gap: 1rem;
342 | }
343 |
344 | #gltf-scene-object {
345 | position: relative;
346 | grid-column: 2 / -3;
347 | aspect-ratio: 16 / 10;
348 | }
349 |
350 | #gltf-scene-canvas {
351 | position: absolute;
352 | top: 50%;
353 | right: -25%;
354 | left: -25%;
355 | aspect-ratio: 1.1;
356 | transform: translate3d(0, -50%, 0);
357 | pointer-events: none;
358 | }
359 |
360 | .no-webgpu #gltf-scene-object {
361 | background-image: url('/assets/fallbacks/gltf-scene-bg.jpg');
362 | background-repeat: no-repeat;
363 | background-size: cover;
364 | background-position: center center;
365 | /* opacity: 0;
366 | transition: opacity 0.25s; */
367 | }
368 |
369 | /* .no-webgpu #gltf-scene.is-visible #gltf-scene-object {
370 | opacity: 1;
371 | transition: opacity 1s;
372 | } */
373 |
374 | #gltf-scene-controls {
375 | grid-column: 2 / -3;
376 | display: flex;
377 | justify-content: flex-end;
378 | margin-top: 3rem;
379 | }
380 |
381 | .no-webgpu #gltf-scene-controls {
382 | display: none;
383 | }
384 |
385 | #gltf-scene-controls-buttons {
386 | display: flex;
387 | align-items: center;
388 | }
389 |
390 | #gltf-scene-controls-buttons span {
391 | margin-right: 2em;
392 | }
393 |
394 | #gltf-scene-controls button {
395 | background: #aaaaaa;
396 | font: inherit;
397 | display: block;
398 | width: 2em;
399 | height: 2em;
400 | margin-left: 1em;
401 | border-radius: 50%;
402 | cursor: pointer;
403 | }
404 |
405 | #gltf-scene-controls button.gold {
406 | background: #b8a209;
407 | }
408 |
409 | #gltf-scene-controls button.black {
410 | background: #444444;
411 | }
412 |
413 | #gltf-scene-controls button span {
414 | pointer-events: none;
415 | display: inline-flex;
416 | text-indent: -9999rem;
417 | }
418 |
419 | /* SHADOWED PARTICLES */
420 |
421 | #shadowed-particles-scene {
422 | position: relative;
423 | height: 100lvh;
424 | overflow: hidden;
425 | background: #4c4c4c;
426 | color: #ffffff;
427 | }
428 |
429 | .no-webgpu #shadowed-particles-scene:after {
430 | content: '';
431 | position: absolute;
432 | inset: 0;
433 | z-index: 0;
434 | background-image: url('/assets/fallbacks/shadowed-particles-scene-bg.jpg');
435 | background-repeat: no-repeat;
436 | background-size: cover;
437 | background-position: center center;
438 | /* opacity: 0;
439 | transition: opacity 0.25s; */
440 | }
441 |
442 | /* .no-webgpu #shadowed-particles-scene.is-visible:after {
443 | opacity: 1;
444 | transition: opacity 1s;
445 | } */
446 |
447 | #shadowed-particles-scene-canvas {
448 | position: absolute;
449 | inset: 0;
450 | }
451 |
452 | #shadowed-particles-scene-content {
453 | position: absolute;
454 | inset: 0;
455 | box-sizing: border-box;
456 | display: flex;
457 | flex-direction: column;
458 | justify-content: center;
459 | align-items: center;
460 | }
461 |
462 | #shadowed-particles-scene-content h2 {
463 | position: relative;
464 | z-index: 1;
465 | line-height: 1;
466 | margin: 0 auto 4rem auto;
467 | text-align: center;
468 | width: 5em;
469 | color: #cdcdcd;
470 | font-size: 4rem;
471 | mix-blend-mode: color-burn;
472 | }
473 |
474 | #shadowed-particles-scene-content p {
475 | text-align: center;
476 | text-shadow: #111111 0 0 0.75rem, #000000 0 0 0.15rem;
477 | }
478 |
479 | .no-webgpu #shadowed-particles-scene-content p {
480 | position: relative;
481 | z-index: 1;
482 | }
483 | }
484 |
485 | @media screen and (max-width: 1024px) {
486 | .tiny {
487 | font-size: 0.75em;
488 | }
489 |
490 | .small {
491 | font-size: 0.9em;
492 | }
493 |
494 | /* GLTF SCENE */
495 |
496 | #gltf-scene-content {
497 | grid-column: 2 / -2;
498 | }
499 |
500 | #gltf-scene-object-container {
501 | grid-column: 1 / -1;
502 | grid-template-columns: repeat(12, 1fr);
503 | padding: 5rem 0;
504 | }
505 |
506 | #gltf-scene-object {
507 | grid-column: 4 / -4;
508 | }
509 |
510 | #gltf-scene-controls {
511 | grid-column: 2 / -2;
512 | justify-content: center;
513 | }
514 | }
515 |
516 | @media screen and (max-width: 720px) {
517 | :root {
518 | --basic-spacing: 1.5rem;
519 | }
520 |
521 | .tiny {
522 | font-size: 0.9em;
523 | }
524 |
525 | .small {
526 | font-size: 1em;
527 | }
528 |
529 | /* PLANE SCENE */
530 |
531 | #planes-grid .plane {
532 | grid-column: 1 / -1 !important;
533 | aspect-ratio: 16 / 9 !important;
534 | margin: 0 !important;
535 | }
536 |
537 | /* GLTF SCENE */
538 |
539 | #gltf-scene-content {
540 | grid-column: 1 / -1;
541 | }
542 |
543 | #gltf-scene-object-container {
544 | grid-column: 1 / -1;
545 | grid-template-columns: repeat(12, 1fr);
546 | padding: 3rem 0;
547 | }
548 |
549 | #gltf-scene-object {
550 | grid-column: 2 / -2;
551 | }
552 |
553 | #gltf-scene-controls {
554 | margin-top: 0;
555 | }
556 |
557 | #gltf-scene-controls-buttons {
558 | flex-wrap: wrap;
559 | }
560 |
561 | #gltf-scene-controls-buttons span {
562 | flex-basis: 100%;
563 | margin: 0 0 1rem 0;
564 | }
565 |
566 | #gltf-scene-controls-buttons button {
567 | margin: 0 1.5rem 0 0;
568 | }
569 |
570 | /* SHADOWED PARTICLES */
571 |
572 | #shadowed-particles-scene-content h2 {
573 | font-size: 3rem;
574 | }
575 | }
576 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Dive into WebGPU with gpu-curtains
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | Dive into WebGPU
17 | with gpu-curtains
18 |
19 |
20 | Learn how you could leverage the power of WebGPU to enhance your websites user experience with various usage
21 | examples of the library in a 4 steps tutorial.
22 |
23 |
24 | Start with a basic scene displaying a few hundred meshes contained in the viewport. Then, learn how to apply
25 | a lambert shader with a directional light from scratch.
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
Gallery delight
37 |
38 |
39 | Add a bunch of DOM-synced planes. Easily add an animation every time the plane enters the viewport.
40 | Add an additional post-processing effect based on the scroll velocity.
41 |
42 |
43 |
44 |
45 |
51 |
52 |
53 |
54 |
60 |
61 |
62 |
63 |
69 |
70 |
71 |
72 |
78 |
79 |
80 |
86 |
87 |
88 |
89 |
95 |
96 |
97 |
98 |
104 |
105 |
106 |
107 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
A 3D product configurator
122 |
123 | Load a glTF and synced its size and position with a DOM element. Apply a physically based rendering shader
124 | and customized it to update the mesh base color.
125 |
126 |
127 | Model by Maxitaxx .
128 |
129 |
130 |
131 |
136 |
137 |
138 |
139 | Pick a color
140 |
141 | Silver
142 |
143 |
144 | Gold
145 |
146 |
147 | Black
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
For particles lovers
158 |
159 | Animate 100.000 particles using instancing and compute shaders.
160 | Learn how meshes can cast and receive shadows using shadow mapping.
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
--------------------------------------------------------------------------------
/js/Demo.js:
--------------------------------------------------------------------------------
1 | import { GPUCameraRenderer, GPUCurtains, GPUCurtainsRenderer } from 'gpu-curtains'
2 | import Lenis from 'lenis'
3 | import { gsap } from 'gsap'
4 | import { ScrollTrigger } from 'gsap/ScrollTrigger'
5 | import { IntroScene } from './intro-scene/IntroScene'
6 | import { PlanesScene } from './planes-scene/PlanesScene'
7 | import { GLTFScene } from './gltf-scene/GLTFScene'
8 | import { ShadowedParticlesScene } from './shadowed-particles-scene/ShadowedParticlesScene'
9 |
10 | export class Demo {
11 | constructor() {
12 | gsap.registerPlugin(ScrollTrigger)
13 |
14 | // cap pixel ratio to improve performance
15 | this.pixelRatio = Math.min(1.5, window.devicePixelRatio)
16 |
17 | this.initLenis()
18 |
19 | this.gpuCurtains = new GPUCurtains({
20 | label: 'gpu-curtains demo',
21 | autoRender: false,
22 | watchScroll: false, // we'll use lenis instead
23 | pixelRatio: this.pixelRatio,
24 | })
25 |
26 | // needed to create the renderers
27 | this.deviceManager = this.gpuCurtains.deviceManager
28 |
29 | this.scenes = []
30 |
31 | this.createScenes()
32 |
33 | this.initWebGPU()
34 |
35 | gsap.ticker.add(this.render.bind(this))
36 | gsap.ticker.lagSmoothing(0)
37 | }
38 |
39 | initLenis() {
40 | this.lenis = new Lenis()
41 |
42 | this.lenis.on('scroll', ScrollTrigger.update)
43 | }
44 |
45 | async initWebGPU() {
46 | try {
47 | await this.gpuCurtains.setDevice()
48 | } catch (e) {
49 | //console.warn('WebGPU is not supported.')
50 | const disclaimer = document.createElement('div')
51 | disclaimer.setAttribute('id', 'no-webgpu-disclaimer')
52 | disclaimer.classList.add('tiny')
53 | disclaimer.innerText = 'Unfortunately, it looks like WebGPU is not (yet) supported by your browser or OS.'
54 | document.body.appendChild(disclaimer)
55 | document.body.classList.add('no-webgpu')
56 | }
57 |
58 | // init webgpu
59 | this.scenes.forEach((scene) => scene.initWebGPU())
60 |
61 | this.gpuCurtains.onRender(() => {
62 | this.scenes.forEach((scene) => scene.shouldRender && scene.onRender())
63 | })
64 | }
65 |
66 | render(time) {
67 | this.lenis.raf(time * 1000)
68 | this.gpuCurtains.render()
69 | }
70 |
71 | createScenes() {
72 | this.createIntroScene()
73 | this.createPlanesScene()
74 | this.createGLTFScene()
75 | this.createShadowedParticlesScene()
76 |
77 | this.lenis.on('scroll', (e) => {
78 | this.gpuCurtains.updateScrollValues({ x: 0, y: e.scroll })
79 |
80 | this.scenes.forEach((scene) => scene.onScroll(e.velocity))
81 | })
82 | }
83 |
84 | createIntroScene() {
85 | const introScene = new IntroScene({
86 | renderer: new GPUCameraRenderer({
87 | deviceManager: this.deviceManager,
88 | label: 'Intro scene renderer',
89 | container: '#intro-scene-canvas',
90 | pixelRatio: this.pixelRatio,
91 | }),
92 | })
93 |
94 | this.scenes.push(introScene)
95 | }
96 |
97 | createPlanesScene() {
98 | const planesScene = new PlanesScene({
99 | renderer: new GPUCurtainsRenderer({
100 | deviceManager: this.deviceManager,
101 | label: 'Planes scene renderer',
102 | container: '#planes-scene-canvas',
103 | pixelRatio: this.pixelRatio,
104 | }),
105 | })
106 |
107 | this.scenes.push(planesScene)
108 | }
109 |
110 | createGLTFScene() {
111 | const gltfScene = new GLTFScene({
112 | renderer: new GPUCurtainsRenderer({
113 | deviceManager: this.deviceManager,
114 | label: 'glTF scene renderer',
115 | container: '#gltf-scene-canvas',
116 | pixelRatio: this.pixelRatio,
117 | }),
118 | })
119 |
120 | this.scenes.push(gltfScene)
121 | }
122 |
123 | createShadowedParticlesScene() {
124 | const shadowedParticlesScene = new ShadowedParticlesScene({
125 | renderer: new GPUCameraRenderer({
126 | deviceManager: this.deviceManager,
127 | label: 'Shadowed particles scene renderer',
128 | container: '#shadowed-particles-scene-canvas',
129 | pixelRatio: this.pixelRatio,
130 | }),
131 | })
132 |
133 | this.scenes.push(shadowedParticlesScene)
134 | }
135 |
136 | destroyScenes() {
137 | this.scenes.forEach((scene) => scene.destroy())
138 | }
139 |
140 | destroy() {
141 | this.destroyScenes()
142 | this.gpuCurtains.destroy()
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/js/DemoScene.js:
--------------------------------------------------------------------------------
1 | export class DemoScene {
2 | constructor({ renderer }) {
3 | if (!renderer) {
4 | throw new Error('DemoScene: the renderer parameters is mandatory!')
5 | }
6 |
7 | this.renderer = renderer
8 |
9 | this.init()
10 | }
11 |
12 | init() {
13 | this.addEvents()
14 | this.addScrollTrigger()
15 | this.addEnteringAnimation()
16 | }
17 |
18 | get isWebGPUActive() {
19 | return !!this.renderer.deviceManager.device
20 | }
21 |
22 | get shouldRender() {
23 | return this.renderer.shouldRenderScene && this.renderer.shouldRender
24 | }
25 |
26 | initWebGPU() {
27 | if (this.isWebGPUActive) {
28 | this.setupWebGPU()
29 | }
30 | }
31 |
32 | setupWebGPU() {}
33 |
34 | removeWebGPU() {
35 | if (this.isWebGPUActive) {
36 | this.destroyWebGPU()
37 | }
38 | }
39 |
40 | destroyWebGPU() {}
41 |
42 | addEvents() {}
43 | removeEvents() {}
44 |
45 | addScrollTrigger() {}
46 | removeScrollTrigger() {}
47 |
48 | onSceneVisibilityChanged(isVisible) {}
49 |
50 | addEnteringAnimation() {}
51 | removeEnteringAnimation() {}
52 |
53 | onRender() {}
54 | onScroll(scrollDelta) {}
55 |
56 | destroy() {
57 | this.removeEvents()
58 | this.removeScrollTrigger()
59 | this.removeEnteringAnimation()
60 | this.removeWebGPU()
61 |
62 | this.renderer.destroy()
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/js/gltf-scene/GLTFScene.js:
--------------------------------------------------------------------------------
1 | import { buildPBRShaders, DOMObject3D, GLTFLoader, GLTFScenesManager, Sampler, Vec2, Vec3 } from 'gpu-curtains'
2 | import { ScrollTrigger } from 'gsap/ScrollTrigger'
3 | import { DemoScene } from '../DemoScene'
4 | import {
5 | additionalFragmentHead,
6 | ambientContribution,
7 | lightContribution,
8 | preliminaryColorContribution,
9 | } from '../shaders/chunks/gltf-contributions.wgsl'
10 | import { gsap } from 'gsap'
11 |
12 | export class GLTFScene extends DemoScene {
13 | constructor({ renderer }) {
14 | super({ renderer })
15 | }
16 |
17 | init() {
18 | this.section = document.querySelector('#gltf-scene')
19 | this.gltfElement = document.querySelector('#gltf-scene-object')
20 |
21 | this.parentNode = new DOMObject3D(this.renderer, this.gltfElement, {
22 | watchScroll: false, // no need to watch the scroll
23 | })
24 |
25 | // add it to the scene graph
26 | this.parentNode.parent = this.renderer.scene
27 |
28 | super.init()
29 | }
30 |
31 | setupWebGPU() {
32 | this.loadGLTF()
33 |
34 | this.addButtonInteractions()
35 | }
36 |
37 | destroyWebGPU() {
38 | this.gltfScenesManager?.destroy()
39 | this.removeButtonInteractions()
40 | }
41 |
42 | addButtonInteractions() {
43 | this.buttons = this.section.querySelectorAll('#gltf-scene-controls button')
44 |
45 | // update card color
46 | this.cards = [
47 | { name: 'silver', baseColorFactor: new Vec3(1) },
48 | { name: 'gold', baseColorFactor: new Vec3(240 / 255, 140 / 255, 15 / 255) },
49 | { name: 'black', baseColorFactor: new Vec3(0.55) },
50 | ]
51 |
52 | // init with first color
53 | this.section.classList.add(this.cards[0].name)
54 |
55 | this._buttonClickHandler = this.onButtonClicked.bind(this)
56 |
57 | this.buttons.forEach((button) => {
58 | button.addEventListener('click', this._buttonClickHandler)
59 | })
60 | }
61 |
62 | removeButtonInteractions() {
63 | this.updateColorTween?.kill()
64 |
65 | this.buttons.forEach((button) => {
66 | button.removeEventListener('click', this._buttonClickHandler)
67 | })
68 | }
69 |
70 | onButtonClicked(e) {
71 | const { target } = e
72 | const cardName = target.hasAttribute('data-card-name') ? target.getAttribute('data-card-name') : this.cards[0].name
73 |
74 | const cardIndex = this.cards.findIndex((c) => c.name === cardName)
75 |
76 | // remove all previous card name classes
77 | this.cards.forEach((card) => {
78 | this.section.classList.remove(card.name)
79 | })
80 |
81 | // add active card class name
82 | this.section.classList.add(cardName)
83 |
84 | const changeProgress = {
85 | value: 0,
86 | }
87 |
88 | this.updateColorTween?.kill()
89 |
90 | this.updateColorTween = gsap.to(changeProgress, {
91 | value: 1,
92 | duration: 1.25,
93 | ease: 'expo.inOut',
94 | onStart: () => {
95 | this.gltfMeshes.forEach((mesh) => {
96 | mesh.uniforms.interaction.nextBaseColorBlendIndex.value = cardIndex
97 | })
98 | },
99 | onUpdate: () => {
100 | this.gltfMeshes.forEach((mesh) => {
101 | mesh.uniforms.interaction.colorChangeProgress.value = changeProgress.value
102 | })
103 | },
104 | onComplete: () => {
105 | this.gltfMeshes.forEach((mesh) => {
106 | mesh.uniforms.interaction.currentBaseColorBlendIndex.value = cardIndex
107 | })
108 | },
109 | })
110 | }
111 |
112 | addScrollTrigger() {
113 | this.scrollTrigger = ScrollTrigger.create({
114 | trigger: this.section,
115 | onToggle: ({ isActive }) => {
116 | this.onSceneVisibilityChanged(isActive)
117 | },
118 | })
119 |
120 | this.onSceneVisibilityChanged(this.scrollTrigger.isActive)
121 | }
122 |
123 | removeScrollTrigger() {
124 | this.scrollTrigger.kill()
125 | }
126 |
127 | onSceneVisibilityChanged(isVisible) {
128 | if (isVisible) {
129 | this.section.classList.add('is-visible')
130 | this.renderer.shouldRenderScene = true
131 | this.timeline?.restart(true)
132 | } else {
133 | this.section.classList.remove('is-visible')
134 | this.renderer.shouldRenderScene = false
135 | this.timeline?.paused()
136 | }
137 | }
138 |
139 | addEnteringAnimation() {
140 | this.autoAlphaElements = this.section.querySelectorAll('.gsap-auto-alpha')
141 |
142 | // animation
143 | this.animations = {
144 | meshesProgress: 0,
145 | }
146 |
147 | this.timeline = gsap
148 | .timeline({
149 | paused: true,
150 | })
151 | .call(() => {
152 | // reset mouse interaction and parent node scale on start
153 | this.mouse.currentInteraction.set(0)
154 | this.parentNode.scale.set(0)
155 | })
156 | .set(this.autoAlphaElements, { autoAlpha: 0 })
157 | .to(this.animations, {
158 | meshesProgress: 1,
159 | ease: 'expo.out',
160 | duration: 3,
161 | delay: 0.25,
162 | onUpdate: () => {
163 | this.parentNode.scale.set(this.animations.meshesProgress)
164 | },
165 | })
166 | .to(
167 | this.autoAlphaElements,
168 | {
169 | autoAlpha: 1,
170 | duration: 1,
171 | stagger: 0.125,
172 | ease: 'power2.inOut',
173 | },
174 | 0.5
175 | )
176 | }
177 |
178 | removeEnteringAnimation() {
179 | this.timeline.kill()
180 | }
181 |
182 | async loadGLTF() {
183 | this.gltfLoader = new GLTFLoader()
184 | this.gltf = await this.gltfLoader.loadFromUrl('./assets/gltf/metal_credit_card.glb')
185 |
186 | this.gltfScenesManager = new GLTFScenesManager({
187 | renderer: this.renderer,
188 | gltf: this.gltf,
189 | })
190 |
191 | const { scenesManager } = this.gltfScenesManager
192 | const { node, boundingBox } = scenesManager
193 | const { center, radius } = boundingBox
194 |
195 | // center the scenes manager parent node
196 | node.position.sub(center)
197 | // add parent DOMObject3D as the scenes manager node parent
198 | node.parent = this.parentNode
199 |
200 | // copy new scenes bounding box into DOMObject3D own bounding box
201 | this.parentNode.boundingBox.copy(boundingBox)
202 |
203 | const updateParentNodeDepthPosition = () => {
204 | // move our parent node along the Z axis so the glTF front face lies at (0, 0, 0) instead of the glTF’s center
205 | this.parentNode.position.z = -0.5 * this.parentNode.boundingBox.size.z * this.parentNode.DOMObjectWorldScale.z
206 | }
207 |
208 | updateParentNodeDepthPosition()
209 | this.parentNode.onAfterDOMElementResize(() => updateParentNodeDepthPosition())
210 |
211 | // create a new sampler to address anisotropic issue
212 | this.anisotropicSampler = new Sampler(this.renderer, {
213 | label: 'Anisotropic sampler',
214 | name: 'anisotropicSampler',
215 | maxAnisotropy: 16,
216 | })
217 |
218 | this.gltfMeshes = this.gltfScenesManager.addMeshes((meshDescriptor) => {
219 | const { parameters } = meshDescriptor
220 |
221 | // disable frustum culling
222 | parameters.frustumCulling = false
223 |
224 | // add anisotropic sampler to the parameters
225 | parameters.samplers.push(this.anisotropicSampler)
226 |
227 | // assign our anisotropic sampler
228 | // to every textureSample calls used inside our buildPBRShaders function
229 | meshDescriptor.textures.forEach((texture) => {
230 | texture.sampler = this.anisotropicSampler.name
231 | })
232 |
233 | // add lights
234 | const lightPosition = new Vec3(-radius * 1.25, radius * 0.5, radius * 1.5)
235 | const lightPositionLength = lightPosition.length()
236 |
237 | // put all base color factors into a single array
238 | const baseColorFactorsArray = this.cards
239 | .map((card) => {
240 | return [card.baseColorFactor.x, card.baseColorFactor.y, card.baseColorFactor.z]
241 | })
242 | .flat()
243 |
244 | parameters.uniforms = {
245 | ...parameters.uniforms,
246 | ...{
247 | interaction: {
248 | struct: {
249 | baseColorFactorsArray: {
250 | type: 'array', // we can pass an array of values!
251 | value: baseColorFactorsArray,
252 | },
253 | currentBaseColorBlendIndex: {
254 | type: 'i32',
255 | value: 0,
256 | },
257 | nextBaseColorBlendIndex: {
258 | type: 'i32',
259 | value: 0,
260 | },
261 | colorChangeProgress: {
262 | type: 'f32',
263 | value: 0,
264 | },
265 | },
266 | },
267 | ambientLight: {
268 | struct: {
269 | intensity: {
270 | type: 'f32',
271 | value: 0.35,
272 | },
273 | color: {
274 | type: 'vec3f',
275 | value: new Vec3(1),
276 | },
277 | },
278 | },
279 | pointLight: {
280 | struct: {
281 | position: {
282 | type: 'vec3f',
283 | value: lightPosition,
284 | },
285 | intensity: {
286 | type: 'f32',
287 | value: lightPositionLength * 0.75,
288 | },
289 | color: {
290 | type: 'vec3f',
291 | value: new Vec3(1),
292 | },
293 | range: {
294 | type: 'f32',
295 | value: lightPositionLength * 2.5,
296 | },
297 | },
298 | },
299 | },
300 | }
301 |
302 | parameters.shaders = buildPBRShaders(meshDescriptor, {
303 | chunks: {
304 | additionalFragmentHead,
305 | preliminaryColorContribution,
306 | ambientContribution,
307 | lightContribution,
308 | },
309 | })
310 | })
311 | }
312 |
313 | addEvents() {
314 | this.gltfContainer = document.querySelector('#gltf-scene-object-container')
315 |
316 | this.mouse = {
317 | lerpedInteraction: new Vec2(),
318 | currentInteraction: new Vec2(),
319 | last: new Vec2(),
320 | multiplier: 0.015,
321 | isDown: false,
322 | }
323 |
324 | this._onPointerDownHandler = this.onPointerDown.bind(this)
325 | this._onPointerUpHandler = this.onPointerUp.bind(this)
326 | this._onPointerMoveHandler = this.onPointerMove.bind(this)
327 |
328 | this.section.addEventListener('mousedown', this._onPointerDownHandler)
329 | this.section.addEventListener('mouseup', this._onPointerUpHandler)
330 | this.gltfContainer.addEventListener('mousemove', this._onPointerMoveHandler)
331 |
332 | this.section.addEventListener('touchstart', this._onPointerDownHandler, {
333 | passive: true,
334 | })
335 | this.section.addEventListener('touchend', this._onPointerUpHandler)
336 | this.gltfContainer.addEventListener('touchmove', this._onPointerMoveHandler, {
337 | passive: true,
338 | })
339 | }
340 |
341 | removeEvents() {
342 | this.section.removeEventListener('mousedown', this._onPointerDownHandler)
343 | this.section.removeEventListener('mouseup', this._onPointerUpHandler)
344 | this.gltfContainer.removeEventListener('mousemove', this._onPointerMoveHandler)
345 |
346 | this.section.removeEventListener('touchstart', this._onPointerDownHandler, {
347 | passive: true,
348 | })
349 | this.section.removeEventListener('touchend', this._onPointerUpHandler)
350 | this.gltfContainer.removeEventListener('touchmove', this._onPointerMoveHandler, {
351 | passive: true,
352 | })
353 | }
354 |
355 | onPointerDown(e) {
356 | if (e.which === 1 || (e.targetTouches && e.targetTouches.length)) {
357 | this.mouse.isDown = true
358 | }
359 |
360 | const { clientX, clientY } = e.targetTouches && e.targetTouches.length ? e.targetTouches[0] : e
361 | this.mouse.last.set(clientX, clientY)
362 | }
363 |
364 | onPointerUp() {
365 | this.mouse.isDown = false
366 | }
367 |
368 | onPointerMove(e) {
369 | if (this.mouse.isDown) {
370 | const { clientX, clientY } = e.targetTouches && e.targetTouches.length ? e.targetTouches[0] : e
371 |
372 | const xDelta = clientX - this.mouse.last.x
373 | const yDelta = clientY - this.mouse.last.y
374 |
375 | this.mouse.currentInteraction.x += xDelta * this.mouse.multiplier
376 | this.mouse.currentInteraction.y += yDelta * this.mouse.multiplier
377 |
378 | // clamp X rotation
379 | this.mouse.currentInteraction.y = Math.max(-Math.PI / 4, Math.min(Math.PI / 4, this.mouse.currentInteraction.y))
380 |
381 | this.mouse.last.set(clientX, clientY)
382 | }
383 | }
384 |
385 | onRender() {
386 | this.mouse.lerpedInteraction.lerp(this.mouse.currentInteraction, 0.2)
387 |
388 | this.parentNode.rotation.x = this.mouse.lerpedInteraction.y
389 | this.parentNode.rotation.y = this.animations.meshesProgress * Math.PI * 4 + this.mouse.lerpedInteraction.x
390 | }
391 | }
392 |
--------------------------------------------------------------------------------
/js/intro-scene/IntroScene.js:
--------------------------------------------------------------------------------
1 | import { BoxGeometry, Mesh, SphereGeometry, Vec2, Vec3 } from 'gpu-curtains'
2 | import { ScrollTrigger } from 'gsap/ScrollTrigger'
3 | import { DemoScene } from '../DemoScene'
4 | import { gsap } from 'gsap'
5 | import { introMeshFs, introMeshVs } from '../shaders/intro-meshes.wgsl'
6 |
7 | export class IntroScene extends DemoScene {
8 | constructor({ renderer, nbMeshes = 500 }) {
9 | super({ renderer })
10 |
11 | this.nbMeshes = nbMeshes
12 | }
13 |
14 | init() {
15 | this.section = document.querySelector('#intro-scene')
16 |
17 | // default camera position is (0, 0, 10)
18 | this.renderer.camera.position.z = 80
19 |
20 | // feel free to tweak the light position and see how it goes
21 | // this.lightPosition = new Vec3(50, 20, 100)
22 |
23 | this.lightPosition = this.renderer.camera.position.clone().multiplyScalar(2)
24 | this.currentLightPosition = this.lightPosition.clone()
25 |
26 | this.meshes = []
27 |
28 | super.init()
29 | }
30 |
31 | setupWebGPU() {
32 | this.createMeshes()
33 | }
34 |
35 | destroyWebGPU() {
36 | this.meshes.forEach((mesh) => mesh.remove())
37 | }
38 |
39 | addEvents() {
40 | this._onPointerMoveHandler = this.onPointerMove.bind(this)
41 | window.addEventListener('mousemove', this._onPointerMoveHandler)
42 | window.addEventListener('touchmove', this._onPointerMoveHandler)
43 | }
44 |
45 | removeEvents() {
46 | window.removeEventListener('mousemove', this._onPointerMoveHandler)
47 | window.removeEventListener('touchmove', this._onPointerMoveHandler)
48 | }
49 |
50 | addScrollTrigger() {
51 | this.scrollTrigger = ScrollTrigger.create({
52 | trigger: this.renderer.domElement.element,
53 | onToggle: ({ isActive }) => {
54 | this.onSceneVisibilityChanged(isActive)
55 | },
56 | })
57 |
58 | this.onSceneVisibilityChanged(this.scrollTrigger.isActive)
59 | }
60 |
61 | removeScrollTrigger() {
62 | this.scrollTrigger.kill()
63 | }
64 |
65 | onSceneVisibilityChanged(isVisible) {
66 | if (isVisible) {
67 | this.section.classList.add('is-visible')
68 | this.renderer.shouldRenderScene = true
69 | this.timeline?.restart(true)
70 | } else {
71 | this.section.classList.remove('is-visible')
72 | this.renderer.shouldRenderScene = false
73 | this.timeline?.paused()
74 | }
75 | }
76 |
77 | addEnteringAnimation() {
78 | this.animations = {
79 | meshesPositionProgress: 0,
80 | lightIntensity: 1,
81 | }
82 |
83 | this.autoAlphaElements = this.section.querySelectorAll('.gsap-auto-alpha')
84 |
85 | this.timeline = gsap
86 | .timeline({
87 | paused: true,
88 | delay: 0.5,
89 | })
90 | .to(this.animations, {
91 | meshesPositionProgress: 1,
92 | ease: 'expo.out',
93 | duration: 2,
94 | })
95 | .fromTo(
96 | this.animations,
97 | {
98 | lightIntensity: 1,
99 | },
100 | {
101 | lightIntensity: 0.6,
102 | duration: 0.5,
103 | onUpdate: () => {
104 | this.meshes.forEach((mesh) => {
105 | mesh.uniforms.directionalLight.intensity.value = this.animations.lightIntensity
106 | })
107 | },
108 | },
109 | 1
110 | )
111 | .fromTo(
112 | this.autoAlphaElements,
113 | {
114 | autoAlpha: 0,
115 | },
116 | {
117 | autoAlpha: 1,
118 | duration: 1,
119 | stagger: 0.125,
120 | ease: 'power2.inOut',
121 | },
122 | 0.75
123 | )
124 | }
125 |
126 | removeEnteringAnimation() {
127 | this.timeline.kill()
128 | }
129 |
130 | createMeshes() {
131 | // now add meshes to our scene
132 | const boxGeometry = new BoxGeometry()
133 | const sphereGeometry = new SphereGeometry()
134 |
135 | const grey = new Vec3(0.35)
136 | const gold = new Vec3(184 / 255, 162 / 255, 9 / 255)
137 | const dark = new Vec3(0.05)
138 |
139 | for (let i = 0; i < this.nbMeshes; i++) {
140 | const random = Math.random()
141 | const meshColor = random < 0.5 ? grey : random > 0.85 ? dark : gold
142 |
143 | const mesh = new Mesh(this.renderer, {
144 | label: `Cube ${i}`,
145 | geometry: Math.random() > 0.33 ? boxGeometry : sphereGeometry,
146 | shaders: {
147 | vertex: {
148 | code: introMeshVs,
149 | },
150 | fragment: {
151 | code: introMeshFs,
152 | },
153 | },
154 | uniforms: {
155 | ambientLight: {
156 | visibility: ['fragment'],
157 | struct: {
158 | color: {
159 | type: 'vec3f',
160 | value: new Vec3(1),
161 | },
162 | intensity: {
163 | type: 'f32',
164 | value: 0.05,
165 | },
166 | },
167 | },
168 | directionalLight: {
169 | visibility: ['fragment'],
170 | struct: {
171 | position: {
172 | type: 'vec3f',
173 | value: this.lightPosition,
174 | },
175 | intensity: {
176 | type: 'f32',
177 | value: 1,
178 | },
179 | color: {
180 | type: 'vec3f',
181 | value: new Vec3(1),
182 | },
183 | },
184 | },
185 | shading: {
186 | visibility: ['fragment'],
187 | struct: {
188 | color: {
189 | type: 'vec3f',
190 | value: meshColor,
191 | },
192 | opacity: {
193 | type: 'f32',
194 | value: 1,
195 | },
196 | },
197 | },
198 | },
199 | })
200 |
201 | // set a random initial rotation
202 | mesh.rotation.set(Math.random(), Math.random(), Math.random())
203 |
204 | // a random depth position based on the camera position along Z axis
205 | const zPosition = (Math.random() - 0.5) * this.renderer.camera.position.z
206 |
207 | // store current and end positions into two Vec3
208 | mesh.userData.currentPosition = new Vec3()
209 | mesh.userData.endPosition = new Vec3()
210 |
211 | const setMeshEndPosition = (zPosition) => {
212 | // get the visible width and height in world unit at given depth
213 | const visibleSize = this.renderer.camera.getVisibleSizeAtDepth(zPosition)
214 |
215 | mesh.userData.endPosition.set(
216 | visibleSize.width * (Math.random() * 0.5) * Math.sign(Math.random() - 0.5),
217 | visibleSize.height * (Math.random() * 0.5) * Math.sign(Math.random() - 0.5),
218 | zPosition
219 | )
220 | }
221 |
222 | // updates the positions right away AND after resize!
223 | setMeshEndPosition(zPosition)
224 |
225 | mesh.onAfterResize(() => {
226 | setMeshEndPosition(zPosition)
227 | })
228 |
229 | this.meshes.push(mesh)
230 | }
231 | }
232 |
233 | onPointerMove(e) {
234 | const { clientX, clientY } = e.targetTouches && e.targetTouches.length ? e.targetTouches[0] : e
235 | const { width, height } = this.renderer.boundingRect
236 | const worldPosition = this.renderer.camera.getVisibleSizeAtDepth(this.currentLightPosition.z)
237 |
238 | const normalizedScreenCords = new Vec2((clientX - width * 0.5) / width, (clientY - height * 0.5) / height)
239 |
240 | this.currentLightPosition.set(
241 | normalizedScreenCords.x * worldPosition.width * 0.5,
242 | normalizedScreenCords.y * worldPosition.height * -0.5,
243 | this.currentLightPosition.z
244 | )
245 | }
246 |
247 | onRender() {
248 | // lerp light position for a more pleasant result
249 | this.lightPosition.lerp(this.currentLightPosition, 0.05)
250 |
251 | this.meshes.forEach((mesh) => {
252 | mesh.userData.currentPosition
253 | .copy(mesh.userData.endPosition)
254 | .multiplyScalar(this.animations.meshesPositionProgress)
255 |
256 | mesh.position.copy(mesh.userData.currentPosition)
257 |
258 | mesh.rotation.add(
259 | mesh.userData.currentPosition.normalize().multiplyScalar((1.025 - this.animations.meshesPositionProgress) * 0.2)
260 | )
261 |
262 | mesh.uniforms.shading.opacity.value = this.animations.meshesPositionProgress
263 | })
264 | }
265 | }
266 |
--------------------------------------------------------------------------------
/js/main.js:
--------------------------------------------------------------------------------
1 | // Dynamically import Demo depending on WebGPU support
2 | async function checkWebGPUSupport() {
3 | if (!navigator.gpu) {
4 | console.error("WebGPU is not supported in this browser.");
5 | const disclaimer = document.createElement('div');
6 | disclaimer.setAttribute('id', 'no-webgpu-disclaimer');
7 | disclaimer.classList.add('tiny');
8 | disclaimer.innerHTML = 'Unfortunately, it looks like WebGPU is not (yet) supported by your browser or OS. For more information, visit mdn web docs .';
9 | document.body.appendChild(disclaimer);
10 | document.body.classList.add('no-webgpu');
11 |
12 | return false; // No WebGPU support
13 | }
14 |
15 | return true; // WebGPU is supported
16 | }
17 |
18 | (async () => {
19 | const isWebGPUSupported = await checkWebGPUSupport();
20 |
21 | if (isWebGPUSupported) {
22 | import('./Demo')
23 | .then((module) => {
24 | const Demo = module.default || module.Demo;
25 |
26 | if (document.readyState === 'complete') {
27 | const demo = new Demo();
28 | } else {
29 | window.addEventListener('load', () => {
30 | const demo = new Demo();
31 | });
32 | }
33 | })
34 | .catch(err => {
35 | console.error("Failed to load WebGPU-dependent module:", err);
36 | });
37 | }
38 | })();
39 |
--------------------------------------------------------------------------------
/js/planes-scene/PlanesScene.js:
--------------------------------------------------------------------------------
1 | import { Plane, Sampler, ShaderPass } from 'gpu-curtains'
2 | import { ScrollTrigger } from 'gsap/ScrollTrigger'
3 | import { DemoScene } from '../DemoScene'
4 | import { planesFs, planesVs } from '../shaders/gallery-planes.wgsl'
5 | import { galleryShaderPassFs } from '../shaders/gallery-shader-pass.wgsl'
6 | import { gsap } from 'gsap'
7 |
8 | export class PlanesScene extends DemoScene {
9 | constructor({ renderer }) {
10 | super({ renderer })
11 | }
12 |
13 | init() {
14 | this.section = document.querySelector('#planes-scene')
15 |
16 | this.planes = []
17 | this.planesElements = document.querySelectorAll('#planes-scene .plane')
18 |
19 | this.velocity = {
20 | weightRatio: 0.75, // the smaller, the closer to the original velocity value
21 | weighted: 0,
22 | }
23 |
24 | super.init()
25 | }
26 |
27 | setupWebGPU() {
28 | // keep track of the number of planes currently animated
29 | let nbAnimatedPlanes = 0
30 |
31 | this.planesElements.forEach((planeEl, index) => {
32 | const plane = new Plane(this.renderer, planeEl, {
33 | label: `Plane ${index}`,
34 | shaders: {
35 | vertex: {
36 | code: planesVs,
37 | },
38 | fragment: {
39 | code: planesFs,
40 | },
41 | },
42 | texturesOptions: {
43 | generateMips: true,
44 | },
45 | transparent: true,
46 | uniforms: {
47 | params: {
48 | struct: {
49 | opacity: {
50 | type: 'f32',
51 | value: 1,
52 | },
53 | },
54 | },
55 | },
56 | })
57 |
58 | plane.userData.animationTimeline = gsap
59 | .timeline({
60 | paused: true,
61 | })
62 | .fromTo(
63 | plane.uniforms.params.opacity,
64 | { value: 0 },
65 | {
66 | value: 1,
67 | duration: 1.5,
68 | ease: 'expo.out',
69 | onStart: () => {
70 | nbAnimatedPlanes--
71 | },
72 | onUpdate: () => {
73 | const textureScale = 1.5 - plane.uniforms.params.opacity.value * 0.5
74 | plane.domTextures[0]?.scale.set(textureScale, textureScale, 1)
75 | },
76 | }
77 | )
78 |
79 | plane.onReEnterView(() => {
80 | nbAnimatedPlanes++
81 | plane.userData.animationTimeline.delay(nbAnimatedPlanes * 0.1)
82 | plane.userData.animationTimeline.restart(true)
83 | })
84 |
85 | this.planes.push(plane)
86 | })
87 |
88 | this.shaderPass = new ShaderPass(this.renderer, {
89 | label: 'Distortion shader pass',
90 | shaders: {
91 | fragment: {
92 | code: galleryShaderPassFs,
93 | },
94 | },
95 | samplers: [
96 | new Sampler(this.renderer, {
97 | label: 'Clamp sampler',
98 | name: 'clampSampler',
99 | addressModeU: 'clamp-to-edge',
100 | addressModeV: 'clamp-to-edge',
101 | }),
102 | ],
103 | uniforms: {
104 | deformation: {
105 | struct: {
106 | maxStrength: {
107 | type: 'f32',
108 | value: 0.1,
109 | },
110 | scrollStrength: {
111 | type: 'f32',
112 | value: 0,
113 | },
114 | },
115 | },
116 | },
117 | })
118 | }
119 |
120 | destroyWebGPU() {
121 | this.planes.forEach((plane) => {
122 | plane.userData.animationTimeline.kill()
123 | plane.remove()
124 | })
125 |
126 | this.shaderPass.remove()
127 | }
128 |
129 | addScrollTrigger() {
130 | this.scrollTrigger = ScrollTrigger.create({
131 | trigger: '#planes-scene',
132 | onToggle: ({ isActive }) => {
133 | this.onSceneVisibilityChanged(isActive)
134 | },
135 | })
136 |
137 | this.onSceneVisibilityChanged(this.scrollTrigger.isActive)
138 | }
139 |
140 | removeScrollTrigger() {
141 | this.scrollTrigger.kill()
142 | }
143 |
144 | onSceneVisibilityChanged(isVisible) {
145 | if (isVisible) {
146 | this.section.classList.add('is-visible')
147 | this.renderer.shouldRenderScene = true
148 | this.timeline?.restart(true)
149 | } else {
150 | this.section.classList.remove('is-visible')
151 | this.renderer.shouldRenderScene = false
152 | this.timeline?.paused()
153 | }
154 | }
155 |
156 | addEnteringAnimation() {
157 | this.autoAlphaElements = this.section.querySelectorAll('.gsap-auto-alpha')
158 |
159 | this.timeline = gsap
160 | .timeline({
161 | paused: true,
162 | })
163 | .fromTo(
164 | this.autoAlphaElements,
165 | {
166 | autoAlpha: 0,
167 | },
168 | {
169 | autoAlpha: 1,
170 | duration: 1,
171 | stagger: 0.2,
172 | ease: 'power2.inOut',
173 | },
174 | 0.25
175 | )
176 | }
177 |
178 | removeEnteringAnimation() {
179 | this.timeline.kill()
180 | }
181 |
182 | onScroll(velocity = 0) {
183 | // no weight if current velocity is null
184 | const weight = velocity ? Math.abs(velocity - this.velocity.weighted) * this.velocity.weightRatio : 0
185 |
186 | // apply weight
187 | this.velocity.weighted = (this.velocity.weighted * weight + Math.abs(velocity)) / (weight + 1)
188 |
189 | if (this.shaderPass) {
190 | this.shaderPass.uniforms.deformation.scrollStrength.value = this.velocity.weighted * 0.05
191 | }
192 | }
193 | }
194 |
--------------------------------------------------------------------------------
/js/shaders/chunks/curl-noise.wgsl.js:
--------------------------------------------------------------------------------
1 | export const curlNoise = /* wgsl */ `
2 | // some of the utility functions here were taken from
3 | // https://gist.github.com/munrocket/236ed5ba7e409b8bdf1ff6eca5dcdc39
4 |
5 | // snoise4 and curlNoise have been ported from a previous WebGL experiment
6 | // can't remember where I found them in the first place
7 | // if you know it, please feel free to contact me to add due credit
8 |
9 | fn mod289_4(x: vec4f) -> vec4f {
10 | return x - floor(x * (1.0 / 289.0)) * 289.0;
11 | }
12 |
13 | fn mod289_3(x: vec3f) -> vec3f {
14 | return x - floor(x * (1.0 / 289.0)) * 289.0;
15 | }
16 |
17 | fn mod289_2(x: vec2f) -> vec2f {
18 | return x - floor(x * (1.0 / 289.0)) * 289.0;
19 | }
20 |
21 | fn mod289(x: f32) -> f32 {
22 | return x - floor(x * (1.0 / 289.0)) * 289.0;
23 | }
24 |
25 | fn permute4(x: vec4f) -> vec4f {
26 | return mod289_4(((x*34.0)+1.0)*x);
27 | }
28 |
29 | fn permute3(x: vec3f) -> vec3f {
30 | return mod289_3(((x*34.0)+1.0)*x);
31 | }
32 |
33 | fn permute(x: f32) -> f32 {
34 | return mod289(((x*34.0)+1.0)*x);
35 | }
36 |
37 | fn taylorInvSqrt4(r: vec4f) -> vec4f {
38 | return 1.79284291400159 - 0.85373472095314 * r;
39 | }
40 |
41 | fn taylorInvSqrt(r: f32) -> f32 {
42 | return 1.79284291400159 - 0.85373472095314 * r;
43 | }
44 |
45 | fn lessThan4(a: vec4f, b: vec4f) -> vec4 {
46 | return vec4(a.x < b.x, a.y < b.y, a.z < b.z, a.w < b.w);
47 | }
48 |
49 | fn grad4(j: f32, ip: vec4f) -> vec4f {
50 | let ones: vec4f = vec4(1.0, 1.0, 1.0, -1.0);
51 | var p: vec4f;
52 | var s: vec4f;
53 |
54 | p = vec4(floor( fract (vec3(j) * ip.xyz) * 7.0) * ip.z - 1.0, p.w);
55 | p.w = 1.5 - dot(abs(p.xyz), ones.xyz);
56 | s = vec4(lessThan4(p, vec4(0.0)));
57 |
58 | p = vec4(p.xyz + (s.xyz*2.0 - 1.0) * s.www, p.w);
59 |
60 | return p;
61 | }
62 |
63 | const F4: f32 = 0.309016994374947451;
64 |
65 | fn snoise4(v: vec4f) -> vec4f {
66 | let C: vec4f = vec4( 0.138196601125011,0.276393202250021,0.414589803375032,-0.447213595499958);
67 |
68 | var i: vec4f = floor(v + dot(v, vec4(F4)) );
69 | let x0: vec4f = v - i + dot(i, C.xxxx);
70 |
71 | var i0: vec4f;
72 | var isX: vec3f = step( x0.yzw, x0.xxx );
73 | var isYZ: vec3f = step( x0.zww, x0.yyz );
74 | i0.x = isX.x + isX.y + isX.z;
75 |
76 | i0 = vec4(i0.x, 1.0 - isX);
77 | //i0.yzw = 1.0 - isX;
78 | i0.y += isYZ.x + isYZ.y;
79 |
80 | i0 = vec4(i0.x, i0.y, i0.zw + 1.0 - isYZ.xy);
81 | //i0.zw += 1.0 - isYZ.xy;
82 | i0.z += isYZ.z;
83 | i0.w += 1.0 - isYZ.z;
84 |
85 | var i3: vec4f = clamp( i0, vec4(0.0), vec4(1.0) );
86 | var i2: vec4f = clamp( i0-1.0, vec4(0.0), vec4(1.0) );
87 | var i1: vec4f = clamp( i0-2.0, vec4(0.0), vec4(1.0) );
88 |
89 | var x1: vec4f = x0 - i1 + C.xxxx;
90 | var x2: vec4f = x0 - i2 + C.yyyy;
91 | var x3: vec4f = x0 - i3 + C.zzzz;
92 | var x4: vec4f = x0 + C.wwww;
93 |
94 | i = mod289_4(i);
95 | var j0: f32 = permute( permute( permute( permute(i.w) + i.z) + i.y) + i.x);
96 | var j1: vec4f = permute4( permute4( permute4( permute4 (
97 | i.w + vec4(i1.w, i2.w, i3.w, 1.0 ))
98 | + i.z + vec4(i1.z, i2.z, i3.z, 1.0 ))
99 | + i.y + vec4(i1.y, i2.y, i3.y, 1.0 ))
100 | + i.x + vec4(i1.x, i2.x, i3.x, 1.0 ));
101 |
102 |
103 | var ip: vec4f = vec4(1.0/294.0, 1.0/49.0, 1.0/7.0, 0.0) ;
104 |
105 | var p0: vec4f = grad4(j0, ip);
106 | var p1: vec4f = grad4(j1.x, ip);
107 | var p2: vec4f = grad4(j1.y, ip);
108 | var p3: vec4f = grad4(j1.z, ip);
109 | var p4: vec4f = grad4(j1.w, ip);
110 |
111 | var norm: vec4f = taylorInvSqrt4(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
112 | p0 *= norm.x;
113 | p1 *= norm.y;
114 | p2 *= norm.z;
115 | p3 *= norm.w;
116 | p4 *= taylorInvSqrt(dot(p4,p4));
117 |
118 | var values0: vec3f = vec3(dot(p0, x0), dot(p1, x1), dot(p2, x2)); //value of contributions from each corner at point
119 | var values1: vec2f = vec2(dot(p3, x3), dot(p4, x4));
120 |
121 | var m0: vec3f = max(0.5 - vec3(dot(x0,x0), dot(x1,x1), dot(x2,x2)), vec3(0.0)); //(0.5 - x^2) where x is the distance
122 | var m1: vec2f = max(0.5 - vec2(dot(x3,x3), dot(x4,x4)), vec2(0.0));
123 |
124 | var temp0: vec3f = -6.0 * m0 * m0 * values0;
125 | var temp1: vec2f = -6.0 * m1 * m1 * values1;
126 |
127 | var mmm0: vec3f = m0 * m0 * m0;
128 | var mmm1: vec2f = m1 * m1 * m1;
129 |
130 | let dx: f32 = temp0[0] * x0.x + temp0[1] * x1.x + temp0[2] * x2.x + temp1[0] * x3.x + temp1[1] * x4.x + mmm0[0] * p0.x + mmm0[1] * p1.x + mmm0[2] * p2.x + mmm1[0] * p3.x + mmm1[1] * p4.x;
131 | let dy: f32 = temp0[0] * x0.y + temp0[1] * x1.y + temp0[2] * x2.y + temp1[0] * x3.y + temp1[1] * x4.y + mmm0[0] * p0.y + mmm0[1] * p1.y + mmm0[2] * p2.y + mmm1[0] * p3.y + mmm1[1] * p4.y;
132 | let dz: f32 = temp0[0] * x0.z + temp0[1] * x1.z + temp0[2] * x2.z + temp1[0] * x3.z + temp1[1] * x4.z + mmm0[0] * p0.z + mmm0[1] * p1.z + mmm0[2] * p2.z + mmm1[0] * p3.z + mmm1[1] * p4.z;
133 | let dw: f32 = temp0[0] * x0.w + temp0[1] * x1.w + temp0[2] * x2.w + temp1[0] * x3.w + temp1[1] * x4.w + mmm0[0] * p0.w + mmm0[1] * p1.w + mmm0[2] * p2.w + mmm1[0] * p3.w + mmm1[1] * p4.w;
134 |
135 | return vec4(dx, dy, dz, dw) * 49.0;
136 | }
137 |
138 | fn curlNoise( p: vec3f, noiseTime: f32, persistence: f32 ) -> vec3f {
139 |
140 | var xNoisePotentialDerivatives: vec4f = vec4(0.0);
141 | var yNoisePotentialDerivatives: vec4f = vec4(0.0);
142 | var zNoisePotentialDerivatives: vec4f = vec4(0.0);
143 |
144 | for (var i: i32 = 0; i < 3; i++) {
145 |
146 | let twoPowI: f32 = pow(2.0, f32(i));
147 | let scale: f32 = 0.5 * twoPowI * pow(persistence, f32(i));
148 |
149 | xNoisePotentialDerivatives += snoise4(vec4(p * twoPowI, noiseTime)) * scale;
150 | yNoisePotentialDerivatives += snoise4(vec4((p + vec3(123.4, 129845.6, -1239.1)) * twoPowI, noiseTime)) * scale;
151 | zNoisePotentialDerivatives += snoise4(vec4((p + vec3(-9519.0, 9051.0, -123.0)) * twoPowI, noiseTime)) * scale;
152 | }
153 |
154 | return vec3(
155 | zNoisePotentialDerivatives[1] - yNoisePotentialDerivatives[2],
156 | xNoisePotentialDerivatives[2] - zNoisePotentialDerivatives[0],
157 | yNoisePotentialDerivatives[0] - xNoisePotentialDerivatives[1]
158 | );
159 | }
160 | `
161 |
--------------------------------------------------------------------------------
/js/shaders/chunks/discard-particle-fragment.wgsl.js:
--------------------------------------------------------------------------------
1 | export const discardParticleFragment = /* wgsl */ `
2 | if(distance(fsInput.uv, vec2(0.5)) > 0.5) {
3 | discard;
4 | }
5 | `
6 |
--------------------------------------------------------------------------------
/js/shaders/chunks/get-particle-size.wgsl.js:
--------------------------------------------------------------------------------
1 | export const getParticleSize = /* wgsl */ `
2 | fn getParticleSize(currentLife: f32, initialLife: f32) -> f32 {
3 | // scale from 0 -> 1 when life begins
4 | let startSize = smoothstep(0.0, 0.25, 1.0 - currentLife / initialLife);
5 | // scale from 1 -> 0 when life ends
6 | let endSize = smoothstep(0.0, 0.25, currentLife / initialLife);
7 |
8 | return startSize * endSize * params.size;
9 | }
10 | `
11 |
--------------------------------------------------------------------------------
/js/shaders/chunks/get-pcf-soft-shadows.wgsl.js:
--------------------------------------------------------------------------------
1 | export const getPCFSoftShadows = /* wgsl */ `
2 | fn getPCFSoftShadows(shadowPosition: vec3f) -> f32 {
3 | // Percentage-closer filtering. Sample texels in the region
4 | // to smooth the result.
5 | var visibility: f32 = 0.0;
6 | let bias: f32 = 0.001;
7 |
8 | let size: f32 = f32(textureDimensions(shadowMapDepthTexture).y);
9 |
10 | let oneOverShadowDepthTextureSize = 1.0 / size;
11 | for (var y = -1; y <= 1; y++) {
12 | for (var x = -1; x <= 1; x++) {
13 | let offset = vec2(vec2(x, y)) * oneOverShadowDepthTextureSize;
14 |
15 | visibility += textureSampleCompare(
16 | shadowMapDepthTexture,
17 | depthComparisonSampler,
18 | shadowPosition.xy + offset,
19 | shadowPosition.z - bias
20 | );
21 | }
22 | }
23 |
24 | visibility /= 9.0;
25 |
26 | return visibility;
27 | }
28 | `
29 |
--------------------------------------------------------------------------------
/js/shaders/chunks/get-shadow-position.wgsl.js:
--------------------------------------------------------------------------------
1 | export const getShadowPosition = /* wgsl */ `
2 | fn getShadowPosition(lightProjectionMatrix: mat4x4f, modelViewPosition: vec4f) -> vec3f {
3 | // XY is in (-1, 1) space, Z is in (0, 1) space
4 | let posFromLight = lightProjectionMatrix * modelViewPosition;
5 |
6 | // Convert XY to (0, 1)
7 | // Y is flipped because texture coords are Y-down.
8 | return vec3(
9 | posFromLight.xy * vec2(0.5, -0.5) + vec2(0.5),
10 | posFromLight.z,
11 | );
12 | }
13 | `
14 |
--------------------------------------------------------------------------------
/js/shaders/chunks/gltf-contributions.wgsl.js:
--------------------------------------------------------------------------------
1 | export const additionalFragmentHead = /* wgsl */ `
2 | fn rangeAttenuation(range: f32, distance: f32) -> f32 {
3 | if (range <= 0.0) {
4 | // Negative range means no cutoff
5 | return 1.0 / pow(distance, 2.0);
6 | }
7 | return clamp(1.0 - pow(distance / range, 4.0), 0.0, 1.0) / pow(distance, 2.0);
8 | }
9 |
10 | // photoshop like blending
11 | // port of https://gist.github.com/floz/53ad2765cc846187cdd3
12 | fn rgbToHSL(color: vec3f) -> vec3f {
13 | var hsl: vec3f;
14 |
15 | let fmin: f32 = min(min(color.r, color.g), color.b); //Min. value of RGB
16 | let fmax: f32 = max(max(color.r, color.g), color.b); //Max. value of RGB
17 | let delta: f32 = fmax - fmin; //Delta RGB value
18 |
19 | hsl.z = (fmax + fmin) / 2.0; // Luminance
20 |
21 | //This is a gray, no chroma...
22 | if (delta == 0.0) {
23 | hsl.x = 0.0; // Hue
24 | hsl.y = 0.0; // Saturation
25 | }
26 | else {
27 | //Chromatic data...
28 | if (hsl.z < 0.5) {
29 | hsl.y = delta / (fmax + fmin); // Saturation
30 | }
31 | else {
32 | hsl.y = delta / (2.0 - fmax - fmin); // Saturation
33 | }
34 |
35 | let deltaR: f32 = (((fmax - color.r) / 6.0) + (delta / 2.0)) / delta;
36 | let deltaG: f32 = (((fmax - color.g) / 6.0) + (delta / 2.0)) / delta;
37 | let deltaB: f32 = (((fmax - color.b) / 6.0) + (delta / 2.0)) / delta;
38 |
39 | if (color.r == fmax ) {
40 | hsl.x = deltaB - deltaG; // Hue
41 | }
42 | else if (color.g == fmax) {
43 | hsl.x = (1.0 / 3.0) + deltaR - deltaB; // Hue
44 | }
45 | else if (color.b == fmax) {
46 | hsl.x = (2.0 / 3.0) + deltaG - deltaR; // Hue
47 | }
48 |
49 | if (hsl.x < 0.0) {
50 | hsl.x += 1.0; // Hue
51 | }
52 | else if (hsl.x > 1.0) {
53 | hsl.x -= 1.0; // Hue
54 | }
55 | }
56 |
57 | return hsl;
58 | }
59 |
60 | fn hueToRGB(f1: f32, f2: f32, hue: f32) -> f32 {
61 | var h = hue;
62 |
63 | if (h < 0.0) {
64 | h += 1.0;
65 | }
66 | else if (h > 1.0) {
67 | h -= 1.0;
68 | }
69 |
70 | var res: f32;
71 |
72 | if ((6.0 * h) < 1.0) {
73 | res = f1 + (f2 - f1) * 6.0 * h;
74 | }
75 | else if ((2.0 * h) < 1.0) {
76 | res = f2;
77 | }
78 | else if ((3.0 * h) < 2.0) {
79 | res = f1 + (f2 - f1) * ((2.0 / 3.0) - h) * 6.0;
80 | }
81 | else {
82 | res = f1;
83 | }
84 |
85 | return res;
86 | }
87 |
88 | fn hslToRGB(hsl: vec3f) -> vec3f {
89 | var rgb: vec3f;
90 |
91 | if (hsl.y == 0.0) {
92 | rgb = vec3(hsl.z); // Luminance
93 | }
94 | else {
95 | var f2: f32;
96 |
97 | if (hsl.z < 0.5) {
98 | f2 = hsl.z * (1.0 + hsl.y);
99 | }
100 | else {
101 | f2 = (hsl.z + hsl.y) - (hsl.y * hsl.z);
102 | }
103 |
104 | let f1: f32 = 2.0 * hsl.z - f2;
105 |
106 | rgb.r = hueToRGB(f1, f2, hsl.x + (1.0/3.0));
107 | rgb.g = hueToRGB(f1, f2, hsl.x);
108 | rgb.b= hueToRGB(f1, f2, hsl.x - (1.0/3.0));
109 | }
110 |
111 | return rgb;
112 | }
113 |
114 | // Saturation Blend mode creates the result color by combining the luminance and hue of the base color with the saturation of the blend color.
115 | fn blendSaturation(base: vec3f, blend: vec3f) -> vec3f {
116 | let baseHSL: vec3f = rgbToHSL(base);
117 | return hslToRGB(vec3(baseHSL.r, rgbToHSL(blend).g, baseHSL.b));
118 | }
119 |
120 | // Luminosity Blend mode creates the result color by combining the hue and saturation of the base color with the luminance of the blend color.
121 | fn blendLuminosity(base: vec3f, blend: vec3f) -> vec3f {
122 | let baseHSL: vec3f = rgbToHSL(base);
123 | return hslToRGB(vec3(baseHSL.r, baseHSL.g, rgbToHSL(blend).b));
124 | }
125 |
126 | // use the correct blend equation based on the blendIndex to use
127 | // and add small adjustments for a more visually pleasing result
128 | fn getBlendedColor(baseColor: vec4f, blendIndex: i32) -> vec4f {
129 | var blendedColor: vec4f;
130 | let blendColor: vec3f = interaction.baseColorFactorsArray[blendIndex];
131 |
132 | if(blendIndex == 1) {
133 | // gold
134 | blendedColor = vec4(blendLuminosity(blendColor, baseColor.rgb), baseColor.a);
135 | } else if(blendIndex == 2) {
136 | // different blending for black card
137 | blendedColor = vec4(blendColor * blendSaturation(baseColor.rgb, blendColor), baseColor.a);
138 | } else {
139 | // default to silver
140 | blendedColor = vec4(blendLuminosity(blendColor, baseColor.rgb), baseColor.a);
141 |
142 | // brighten silver card
143 | blendedColor = vec4(blendedColor.rgb * vec3(1.25), blendedColor.a);
144 | }
145 |
146 | return blendedColor;
147 | }
148 | `
149 |
150 | export const ambientContribution = /* wgsl */ `
151 | lightContribution.ambient = ambientLight.intensity * ambientLight.color;
152 | `
153 |
154 | export const preliminaryColorContribution = /* wgsl */ `
155 | // get blended colors
156 | // based on our currentBaseColorBlendIndex and nextBaseColorBlendIndex uniforms
157 | let currentColor: vec4f = getBlendedColor(color, interaction.currentBaseColorBlendIndex);
158 | let nextColor: vec4f = getBlendedColor(color, interaction.nextBaseColorBlendIndex);
159 |
160 | var uv: vec2f = fsInput.uv;
161 | let progress: f32 = interaction.colorChangeProgress;
162 |
163 | // convert to [-1, 1]
164 | uv = uv * 2.0 - 1.0;
165 |
166 | // apply deformation
167 | let uvDeformation: f32 = sin(abs(fsInput.uv.y * 2.0) * 3.141592) * 3.0;
168 |
169 | // 0 -> 0.5 -> 0
170 | let mappedProgress: f32 = 0.5 - (abs(progress * 2.0 - 1.0) * 0.5);
171 |
172 | // apply to X
173 | uv.x *= 1.0 - mappedProgress * uvDeformation;
174 |
175 | // convert back to [0, 1]
176 | uv = uv * 0.5 + 0.5;
177 |
178 | // mix between a simple slide change (from https://gl-transitions.com/editor/wipeRight)
179 | // and our custom animation based on progress
180 | let p: vec2f = mix(uv, fsInput.uv, smoothstep(0.0, 1.0, progress)) / vec2(1.0);
181 |
182 | // add a little white border on the edge of the animation
183 | // use vec4(3.0) to oversaturate the result
184 | color = mix(currentColor, vec4(3.0), step(p.x, progress + 0.1 * pow(mappedProgress, 0.5)));
185 |
186 | color = mix(color, nextColor, step(p.x, progress));
187 | `
188 |
189 | export const lightContribution = /* wgsl */ `
190 | // here N, V and NdotV are already available
191 | // they are defined as follows
192 | // let N: vec3f = normalize(normal);
193 | // let viewDirection: vec3f = fsInput.viewDirection
194 | // let V: vec3f: = normalize(viewDirection);
195 | // let NdotV: f32 = clamp(dot(N, V), 0.0, 1.0);
196 | let L = normalize(pointLight.position - worldPosition);
197 | let H = normalize(V + L);
198 |
199 | let NdotL: f32 = clamp(dot(N, L), 0.0, 1.0);
200 | let NdotH: f32 = clamp(dot(N, H), 0.0, 1.0);
201 | let VdotH: f32 = clamp(dot(V, H), 0.0, 1.0);
202 |
203 | // cook-torrance brdf
204 | let NDF = DistributionGGX(NdotH, roughness);
205 | let G = GeometrySmith(NdotL, NdotV, roughness);
206 | let F = FresnelSchlick(VdotH, f0);
207 |
208 | let kD = (vec3(1.0) - F) * (1.0 - metallic);
209 |
210 | let numerator = NDF * G * F;
211 | let denominator = max(4.0 * NdotV * NdotL, 0.001);
212 |
213 | let specular = numerator / vec3(denominator);
214 |
215 | let distance = length(pointLight.position - worldPosition);
216 | let attenuation = rangeAttenuation(pointLight.range, distance);
217 |
218 | let radiance = pointLight.color * pointLight.intensity * attenuation;
219 |
220 | lightContribution.diffuse += (kD / vec3(PI)) * radiance * NdotL;
221 | lightContribution.specular += specular * radiance * NdotL;
222 | `
223 |
--------------------------------------------------------------------------------
/js/shaders/compute-particles.wgsl.js:
--------------------------------------------------------------------------------
1 | import { curlNoise } from './chunks/curl-noise.wgsl'
2 |
3 | export const computeParticles = /* wgsl */ `
4 | ${curlNoise}
5 |
6 | // https://gist.github.com/munrocket/236ed5ba7e409b8bdf1ff6eca5dcdc39
7 | // On generating random numbers, with help of y= [(a+x)sin(bx)] mod 1", W.J.J. Rey, 22nd European Meeting of Statisticians 1998
8 | fn rand11(n: f32) -> f32 { return fract(sin(n) * 43758.5453123); }
9 |
10 | fn getInitLife(index: f32) -> f32 {
11 | return round(rand11(cos(index)) * params.maxLife * 0.95) + params.maxLife * 0.05;
12 | }
13 |
14 | const PI: f32 = 3.14159265359;
15 |
16 | // set initial positions and data
17 | @compute @workgroup_size(256) fn setInitData(
18 | @builtin(global_invocation_id) GlobalInvocationID: vec3
19 | ) {
20 | let index = GlobalInvocationID.x;
21 |
22 | if(index < arrayLength(&particles)) {
23 | let fIndex: f32 = f32(index);
24 |
25 | // calculate a random particle init life, in number of frames
26 | var initLife: f32 = getInitLife(fIndex);
27 |
28 | initParticles[index].position.w = initLife;
29 | particles[index].position.w = initLife;
30 |
31 | // subtract to init life so the first frames does not look too much like looping
32 | particles[index].position.w -= rand11(cos(sin(fIndex) * PI)) * params.maxLife;
33 |
34 | // now the positions
35 | // calculate an initial random position inside a sphere of a defined radius
36 | var position: vec3f;
37 |
38 | // random radius in the [0.5 * params.radius, params.radius] range
39 | let radius: f32 = (0.5 + rand11(cos(fIndex)) * 0.5) * params.radius;
40 | let phi: f32 = (rand11(sin(fIndex)) - 0.5) * PI;
41 | let theta: f32 = rand11(sin(cos(fIndex) * PI)) * PI * 2;
42 |
43 | position.x = radius * cos(theta) * cos(phi);
44 | position.y = radius * sin(phi);
45 | position.z = radius * sin(theta) * cos(phi);
46 |
47 | // calculate initial velocity
48 | var velocity: vec3f = curlNoise(position * 0.02, 0.0, 0.05);
49 |
50 | particles[index].velocity = vec4(velocity, initLife);
51 |
52 | // apply to position
53 | position += velocity;
54 |
55 | // write positions
56 | particles[index].position.x = position.x;
57 | particles[index].position.y = position.y;
58 | particles[index].position.z = position.z;
59 |
60 | initParticles[index].position.x = position.x;
61 | initParticles[index].position.y = position.y;
62 | initParticles[index].position.z = position.z;
63 | }
64 | }
65 |
66 | @compute @workgroup_size(256) fn updateData(
67 | @builtin(global_invocation_id) GlobalInvocationID: vec3
68 | ) {
69 | let index = GlobalInvocationID.x;
70 |
71 | if(index < arrayLength(&particles)) {
72 | let fIndex: f32 = f32(index);
73 |
74 | var vPos: vec3f = particles[index].position.xyz;
75 | var life: f32 = particles[index].position.w;
76 | life -= 1.0;
77 |
78 | var vVel: vec3f = particles[index].velocity.xyz;
79 |
80 | vVel += curlNoise(vPos * 0.02, 0.0, 0.05);
81 | vVel *= 0.4;
82 |
83 | particles[index].velocity = vec4(vVel, particles[index].velocity.w);
84 |
85 | let mouse = vec3(params.mouse, 0);
86 |
87 | if (life <= 0.0) {
88 | // respawn particle to original position + mouse position
89 | let newPosition = initParticles[index].position.xyz + mouse;
90 |
91 | // reset init life to random value
92 | initParticles[index].position.w = getInitLife(fIndex * cos(fIndex));
93 |
94 | particles[index].position = vec4(
95 | newPosition,
96 | initParticles[index].position.w
97 | );
98 |
99 | particles[index].velocity.w = initParticles[index].position.w;
100 | } else {
101 | // apply new curl noise position and life
102 | // accounting for mouse position
103 | let delta: vec3f = mouse - vPos;
104 | let friction: f32 = 1000.0;
105 |
106 | vPos += delta * 1.0 / friction;
107 | vPos += vVel;
108 |
109 | particles[index].position = vec4(vPos, life);
110 | }
111 | }
112 | }
113 | `
114 |
--------------------------------------------------------------------------------
/js/shaders/gallery-planes.wgsl.js:
--------------------------------------------------------------------------------
1 | export const planesVs = /* wgsl */ `
2 | struct VSOutput {
3 | @builtin(position) position: vec4f,
4 | @location(0) uv: vec2f,
5 | };
6 |
7 | @vertex fn main(
8 | attributes: Attributes,
9 | ) -> VSOutput {
10 | var vsOutput: VSOutput;
11 |
12 | vsOutput.position = getOutputPosition(attributes.position);
13 |
14 | // get correctly scaled UV coordinates
15 | vsOutput.uv = getUVCover(attributes.uv, planeTextureMatrix);
16 |
17 | return vsOutput;
18 | }
19 | `
20 |
21 | export const planesFs = /* wgsl */ `
22 | struct VSOutput {
23 | @builtin(position) position: vec4f,
24 | @location(0) uv: vec2f,
25 | };
26 |
27 | @fragment fn main(fsInput: VSOutput) -> @location(0) vec4f {
28 | var color: vec4f = textureSample(planeTexture, defaultSampler, fsInput.uv);
29 |
30 | color.a *= params.opacity;
31 | return color;
32 | }
33 | `
34 |
--------------------------------------------------------------------------------
/js/shaders/gallery-shader-pass.wgsl.js:
--------------------------------------------------------------------------------
1 | export const galleryShaderPassFs = /* wgsl */ `
2 | struct VSOutput {
3 | @builtin(position) position: vec4f,
4 | @location(0) uv: vec2f,
5 | };
6 |
7 | @fragment fn main(fsInput: VSOutput) -> @location(0) vec4f {
8 | var uv: vec2f = fsInput.uv;
9 |
10 | // convert to [-1, 1]
11 | uv = uv * 2.0 - 1.0;
12 |
13 | // apply deformation
14 | let uvDeformation: f32 = cos(abs(uv.y) * 3.141592 * 0.5);
15 |
16 | // apply deformation uniforms
17 | uv.x *= 1.0 + deformation.maxStrength * deformation.scrollStrength * uvDeformation;
18 |
19 | // convert back to [0, 1]
20 | uv = uv * 0.5 + 0.5;
21 |
22 | return textureSample(renderTexture, clampSampler, uv);
23 | }
24 | `
25 |
--------------------------------------------------------------------------------
/js/shaders/intro-meshes.wgsl.js:
--------------------------------------------------------------------------------
1 | export const introMeshVs = /* wgsl */ `
2 | struct VSOutput {
3 | @builtin(position) position: vec4f,
4 | @location(0) normal: vec3f,
5 | @location(1) worldPosition: vec3f,
6 | };
7 |
8 | @vertex fn main(
9 | attributes: Attributes,
10 | ) -> VSOutput {
11 | var vsOutput: VSOutput;
12 |
13 | // position in world space
14 | let worldPosition: vec4f = matrices.model * vec4(attributes.position, 1.0);
15 |
16 | // outputted position
17 | vsOutput.position = camera.projection * camera.view * worldPosition;
18 |
19 | // normals in world space
20 | vsOutput.normal = getWorldNormal(attributes.normal);
21 |
22 | // will be used in our fragment shader to calculate lightning in world space
23 | vsOutput.worldPosition = worldPosition.xyz;
24 |
25 | return vsOutput;
26 | }
27 | `
28 |
29 | export const introMeshFs = /* wgsl */ `
30 | struct VSOutput {
31 | @builtin(position) position: vec4f,
32 | @location(0) normal: vec3f,
33 | @location(1) worldPosition: vec3f,
34 | };
35 |
36 | // main fragment shader function
37 | @fragment fn main(fsInput: VSOutput) -> @location(0) vec4f {
38 | // color and opacity from our uniforms
39 | var color: vec4f = vec4(shading.color, shading.opacity);
40 |
41 | // ambient light
42 | let ambient: vec3f = ambientLight.intensity * ambientLight.color;
43 |
44 | // diffuse lambert shading
45 | let N = normalize(fsInput.normal);
46 | let L = normalize(directionalLight.position - fsInput.worldPosition);
47 | let NDotL = max(dot(N, L), 0.0);
48 |
49 | let diffuse: vec3f = NDotL * directionalLight.color * directionalLight.intensity;
50 |
51 | color = vec4(
52 | color.rgb * (diffuse + ambient) * color.a, // apply ambient + diffuse and simulate alpha blending
53 | color.a
54 | );
55 |
56 | // display our color
57 | return color;
58 | }
59 | `
60 |
--------------------------------------------------------------------------------
/js/shaders/shadowed-particles.wgsl.js:
--------------------------------------------------------------------------------
1 | import { discardParticleFragment } from './chunks/discard-particle-fragment.wgsl'
2 | import { getParticleSize } from './chunks/get-particle-size.wgsl'
3 | import { getShadowPosition } from './chunks/get-shadow-position.wgsl'
4 | import { getPCFSoftShadows } from './chunks/get-pcf-soft-shadows.wgsl'
5 |
6 | export const shadowedParticlesVs = /* wgsl */ `
7 | struct VSOutput {
8 | @builtin(position) position: vec4f,
9 | @location(0) uv: vec2f,
10 | @location(1) normal: vec3f,
11 | @location(2) velocity: vec4f,
12 | @location(3) shadowPosition: vec3f,
13 | };
14 |
15 | ${getParticleSize}
16 |
17 | ${getShadowPosition}
18 |
19 | @vertex fn main(
20 | attributes: Attributes,
21 | ) -> VSOutput {
22 | var vsOutput : VSOutput;
23 |
24 | let size: f32 = getParticleSize(attributes.particlePosition.w, attributes.particleVelocity.w);
25 |
26 | // billboarding
27 | var mvPosition: vec4f = matrices.modelView * vec4(attributes.particlePosition.xyz, 1.0);
28 | mvPosition += vec4(attributes.position, 0.0) * size;
29 | vsOutput.position = camera.projection * mvPosition;
30 |
31 | vsOutput.uv = attributes.uv;
32 |
33 | // normals in view space to follow billboarding
34 | vsOutput.normal = getViewNormal(attributes.normal);
35 |
36 | vsOutput.velocity = attributes.particleVelocity;
37 |
38 | // the shadow position must account for billboarding as well!
39 | var mvShadowPosition: vec4f = light.viewMatrix * matrices.model * vec4(attributes.particlePosition.xyz, 1.0);
40 | mvShadowPosition += vec4(attributes.position, 0.0) * size;
41 |
42 | vsOutput.shadowPosition = getShadowPosition(
43 | light.projectionMatrix,
44 | mvShadowPosition
45 | );
46 |
47 | return vsOutput;
48 | }
49 | `
50 |
51 | export const shadowedParticlesFs = /* wgsl */ `
52 | struct VSOutput {
53 | @builtin(position) position: vec4f,
54 | @location(0) uv: vec2f,
55 | @location(1) normal: vec3f,
56 | @location(2) velocity: vec4f,
57 | @location(3) shadowPosition: vec3f,
58 | };
59 |
60 | ${getPCFSoftShadows}
61 |
62 | @fragment fn main(fsInput: VSOutput) -> @location(0) vec4f {
63 | ${discardParticleFragment}
64 |
65 | // clamp velocity
66 | let velocity = clamp(length(fsInput.velocity.xyz), 0.0, 1.0);
67 |
68 | // use it to mix between our 2 colors
69 | var color: vec3f = mix(shading.darkColor, shading.lightColor, vec3(velocity));
70 |
71 | var visibility = getPCFSoftShadows(fsInput.shadowPosition);
72 | visibility = clamp(visibility, 1.0 - clamp(shading.shadowIntensity, 0.0, 1.0), 1.0);
73 |
74 | color *= visibility;
75 |
76 | return vec4(color, 1.0);
77 | }
78 | `
79 |
80 | export const particlesDepthPassShaders = /* wgsl */ `
81 | struct DepthVSOutput {
82 | @builtin(position) position: vec4f,
83 | @location(0) uv: vec2f,
84 | };
85 |
86 | ${getParticleSize}
87 |
88 | @vertex fn shadowMapVertex(
89 | attributes: Attributes,
90 | ) -> DepthVSOutput {
91 | var depthVsOutput: DepthVSOutput;
92 |
93 | let size: f32 = getParticleSize(attributes.particlePosition.w, attributes.particleVelocity.w);
94 |
95 | // billboarding
96 | var mvPosition: vec4f = light.viewMatrix * matrices.model * vec4(attributes.particlePosition.xyz, 1.0);
97 | mvPosition += vec4(attributes.position, 0.0) * size;
98 | depthVsOutput.position = light.projectionMatrix * mvPosition;
99 |
100 | depthVsOutput.uv = attributes.uv;
101 |
102 | return depthVsOutput;
103 | }
104 |
105 | @fragment fn shadowMapFragment(fsInput: DepthVSOutput) -> @location(0) vec4f {
106 | ${discardParticleFragment}
107 |
108 | // we could return anything here actually
109 | return vec4f(1.0);
110 | }
111 | `
--------------------------------------------------------------------------------
/js/shaders/shadowed-wrapping-box.wgsl.js:
--------------------------------------------------------------------------------
1 | import { getShadowPosition } from './chunks/get-shadow-position.wgsl'
2 | import { getPCFSoftShadows } from './chunks/get-pcf-soft-shadows.wgsl'
3 |
4 | export const wrappingBoxVs = /* wgsl */ `
5 | struct VSOutput {
6 | @builtin(position) position: vec4f,
7 | @location(0) uv: vec2f,
8 | @location(1) normal: vec3f,
9 | @location(2) shadowPosition: vec3f,
10 | @location(3) worldPosition: vec3f,
11 | };
12 |
13 | ${getShadowPosition}
14 |
15 | @vertex fn main(
16 | attributes: Attributes,
17 | ) -> VSOutput {
18 | var vsOutput : VSOutput;
19 |
20 | let worldPosition: vec4f = matrices.model * vec4(attributes.position, 1.0);
21 | vsOutput.position = camera.projection * camera.view * worldPosition;
22 |
23 | vsOutput.uv = attributes.uv;
24 |
25 | vsOutput.normal = getWorldNormal(attributes.normal);
26 |
27 | vsOutput.shadowPosition = getShadowPosition(
28 | light.projectionMatrix,
29 | light.viewMatrix * matrices.model * vec4(attributes.position, 1.0)
30 | );
31 |
32 | vsOutput.worldPosition = worldPosition.xyz;
33 |
34 | return vsOutput;
35 | }
36 | `
37 |
38 | export const wrappingBoxFs = /* wgsl */ `
39 | struct VSOutput {
40 | @builtin(position) position: vec4f,
41 | @builtin(front_facing) frontFacing: bool,
42 | @location(0) uv: vec2f,
43 | @location(1) normal: vec3f,
44 | @location(2) shadowPosition: vec3f,
45 | @location(3) worldPosition: vec3f,
46 | };
47 |
48 | ${getPCFSoftShadows}
49 |
50 | fn applyDithering(color: vec3f, fragCoord: vec2f) -> vec3f {
51 | // Simple random noise based on fragment coordinates
52 | let scale = 1.0 / 255.0; // Adjust this value to control the strength of the dithering
53 | let noise = fract(sin(dot(fragCoord, vec2(12.9898, 78.233))) * 43758.5453);
54 |
55 | // Apply the noise to the color
56 | return color + vec3(noise * scale);
57 | }
58 |
59 | @fragment fn main(fsInput: VSOutput) -> @location(0) vec4f {
60 | var visibility = getPCFSoftShadows(fsInput.shadowPosition);
61 |
62 | visibility = clamp(visibility, 1.0 - clamp(shading.shadowIntensity, 0.0, 1.0), 1.0);
63 |
64 | // ambient light
65 | let ambient: vec3f = ambientLight.intensity * ambientLight.color;
66 |
67 | // inverse the normals if we're using front face culling
68 | let faceDirection = select(-1.0, 1.0, fsInput.frontFacing);
69 |
70 | // diffuse lambert shading
71 | let N = normalize(faceDirection * fsInput.normal);
72 | let L = normalize(light.position - fsInput.worldPosition);
73 | let NDotL = max(dot(N, L), 0.0);
74 |
75 | let diffuse: vec3f = NDotL * directionalLight.color * directionalLight.intensity;
76 |
77 | // apply shadow to diffuse
78 | let lightAndShadow: vec3f = ambient + visibility * diffuse;
79 |
80 | // apply dithering to reduce color banding
81 | let color = applyDithering(shading.color * lightAndShadow, fsInput.position.xy);
82 |
83 | return vec4(color, 1.0);
84 | }
85 | `
86 |
--------------------------------------------------------------------------------
/js/shadowed-particles-scene/ShadowMap.js:
--------------------------------------------------------------------------------
1 | import { BufferBinding, Mat4, RenderMaterial, RenderTarget, Sampler, Texture, Vec3 } from 'gpu-curtains'
2 |
3 | export class ShadowMap {
4 | constructor({
5 | renderer,
6 | depthTextureSize = 1024,
7 | depthTextureFormat = 'depth24plus',
8 | light = {
9 | position: new Vec3(renderer?.camera.position.z || 1),
10 | target: new Vec3(),
11 | up: new Vec3(0, 1, 0),
12 | orthographicCamera: {
13 | left: renderer?.camera.position.z * -0.5,
14 | right: renderer?.camera.position.z * 0.5,
15 | top: renderer?.camera.position.z * 0.5,
16 | bottom: renderer?.camera.position.z * -0.5,
17 | near: 0.1,
18 | far: renderer?.camera.position.z * 5,
19 | },
20 | },
21 | }) {
22 | this.renderer = renderer
23 |
24 | this.depthTextureSize = depthTextureSize
25 | this.depthTextureFormat = depthTextureFormat
26 |
27 | // mandatory so we could use textureSampleCompare()
28 | // if we'd like to use MSAA, we would have to use an additional pass
29 | // to manually resolve the depth texture before using it
30 | this.sampleCount = 1
31 |
32 | this.light = light
33 |
34 | // keep track of the meshes that will cast shadows
35 | this.meshes = []
36 |
37 | this.createLightSource()
38 | this.createShadowMap()
39 | this.setDepthPass()
40 | }
41 |
42 | createLightSource() {
43 | // create the light view matrix
44 | // equivalent to Mat4().lookAt(this.light.position, this.light.target, this.light.up).invert() but faster
45 | this.light.viewMatrix = new Mat4().makeView(this.light.position, this.light.target, this.light.up)
46 |
47 | // create the light projection matrix
48 | this.light.projectionMatrix = new Mat4().makeOrthographic(this.light.orthographicCamera)
49 |
50 | // create one uniform buffer that will be used by all the shadow casting meshes
51 | this.lightProjectionBinding = new BufferBinding({
52 | label: 'Light',
53 | name: 'light',
54 | bindingType: 'uniform',
55 | struct: {
56 | viewMatrix: {
57 | type: 'mat4x4f',
58 | value: this.light.viewMatrix,
59 | },
60 | projectionMatrix: {
61 | type: 'mat4x4f',
62 | value: this.light.projectionMatrix,
63 | },
64 | position: {
65 | type: 'vec3f',
66 | value: this.light.position,
67 | },
68 | },
69 | })
70 | }
71 |
72 | createShadowMap() {
73 | // create the depth texture
74 | this.depthTexture = new Texture(this.renderer, {
75 | label: 'Shadow map depth texture',
76 | name: 'shadowMapDepthTexture',
77 | type: 'depth',
78 | format: this.depthTextureFormat,
79 | sampleCount: this.sampleCount,
80 | fixedSize: {
81 | width: this.depthTextureSize,
82 | height: this.depthTextureSize,
83 | },
84 | })
85 |
86 | // create the render target
87 | this.depthPassTarget = new RenderTarget(this.renderer, {
88 | label: 'Depth pass render target',
89 | useColorAttachments: false,
90 | depthTexture: this.depthTexture,
91 | sampleCount: this.sampleCount,
92 | })
93 |
94 | // create depth comparison sampler
95 | // used to compute shadow receiving object visibility
96 | this.depthComparisonSampler = new Sampler(this.renderer, {
97 | label: 'Depth comparison sampler',
98 | name: 'depthComparisonSampler',
99 | // we do not want to repeat the shadows
100 | addressModeU: 'clamp-to-edge',
101 | addressModeV: 'clamp-to-edge',
102 | compare: 'less',
103 | type: 'comparison',
104 | })
105 | }
106 |
107 | setDepthPass() {
108 | // add the depth pass (rendered each tick before our main scene)
109 | this.depthPassTaskID = this.renderer.onBeforeRenderScene.add((commandEncoder) => {
110 | if (!this.meshes.length) return
111 |
112 | // assign depth material to meshes
113 | this.meshes.forEach((mesh) => {
114 | mesh.useMaterial(mesh.userData.depthMaterial)
115 | })
116 |
117 | // reset renderer current pipeline
118 | this.renderer.pipelineManager.resetCurrentPipeline()
119 |
120 | // begin depth pass
121 | const depthPass = commandEncoder.beginRenderPass(this.depthPassTarget.renderPass.descriptor)
122 |
123 | // render meshes with their depth material
124 | this.meshes.forEach((mesh) => {
125 | if (mesh.ready) mesh.render(depthPass)
126 | })
127 |
128 | depthPass.end()
129 |
130 | // reset depth meshes material to use the original
131 | // so the scene renders them normally
132 | this.meshes.forEach((mesh) => {
133 | mesh.useMaterial(mesh.userData.originalMaterial)
134 | })
135 |
136 | // reset renderer current pipeline again
137 | this.renderer.pipelineManager.resetCurrentPipeline()
138 | })
139 | }
140 |
141 | addShadowCastingMesh(mesh, parameters = {}) {
142 | if (!parameters.shaders) {
143 | const defaultDepthVs = /* wgsl */ `
144 | @vertex fn main(
145 | attributes: Attributes,
146 | ) -> @builtin(position) vec4f {
147 | return light.projectionMatrix * light.viewMatrix * matrices.model * vec4(attributes.position, 1.0);
148 | }
149 | `
150 |
151 | parameters.shaders = {
152 | vertex: {
153 | code: defaultDepthVs,
154 | },
155 | fragment: false, // we do not need to output to a fragment shader unless we do late Z writing
156 | }
157 | }
158 |
159 | parameters = { ...mesh.material.options.rendering, ...parameters }
160 |
161 | // explicitly set empty output targets
162 | // we just want to write to the depth texture
163 | parameters.targets = []
164 |
165 | parameters.sampleCount = this.sampleCount
166 | parameters.depthFormat = this.depthTextureFormat
167 |
168 | if (parameters.bindings) {
169 | parameters.bindings = [
170 | this.lightProjectionBinding,
171 | mesh.material.getBufferBindingByName('matrices'),
172 | ...parameters.bindings,
173 | ]
174 | } else {
175 | parameters.bindings = [this.lightProjectionBinding, mesh.material.getBufferBindingByName('matrices')]
176 | }
177 |
178 | mesh.userData.depthMaterial = new RenderMaterial(this.renderer, {
179 | label: mesh.options.label + ' Depth render material',
180 | ...parameters,
181 | })
182 |
183 | // keep track of original material as well
184 | mesh.userData.originalMaterial = mesh.material
185 |
186 | this.meshes.push(mesh)
187 | }
188 |
189 | patchShadowReceivingParameters(params = {}) {
190 | if (params.textures) {
191 | params.textures = [...params.textures, this.depthTexture]
192 | } else {
193 | params.textures = [this.depthTexture]
194 | }
195 |
196 | if (params.samplers) {
197 | params.samplers = [...params.samplers, this.depthComparisonSampler]
198 | } else {
199 | params.samplers = [this.depthComparisonSampler]
200 | }
201 |
202 | if (params.bindings) {
203 | params.bindings = [...params.bindings, this.lightProjectionBinding]
204 | } else {
205 | params.bindings = [this.lightProjectionBinding]
206 | }
207 |
208 | return params
209 | }
210 |
211 | destroy() {
212 | this.renderer.onBeforeRenderScene.remove(this.depthPassTaskID)
213 |
214 | this.meshes.forEach((mesh) => {
215 | mesh.userData.depthMaterial.destroy()
216 | mesh.userData.depthMaterial = null
217 | })
218 |
219 | this.depthPassTarget.destroy()
220 | this.depthTexture.destroy()
221 | }
222 | }
223 |
--------------------------------------------------------------------------------
/js/shadowed-particles-scene/ShadowedParticlesScene.js:
--------------------------------------------------------------------------------
1 | import { ScrollTrigger } from 'gsap/ScrollTrigger'
2 | import { DemoScene } from '../DemoScene'
3 | import { BindGroup, BoxGeometry, BufferBinding, ComputePass, Mesh, PlaneGeometry, Vec2, Vec3 } from 'gpu-curtains'
4 | import { particlesDepthPassShaders, shadowedParticlesFs, shadowedParticlesVs } from '../shaders/shadowed-particles.wgsl'
5 | import { computeParticles } from '../shaders/compute-particles.wgsl'
6 | import { ShadowMap } from './ShadowMap'
7 | import { wrappingBoxFs, wrappingBoxVs } from '../shaders/shadowed-wrapping-box.wgsl'
8 | import { gsap } from 'gsap'
9 |
10 | export class ShadowedParticlesScene extends DemoScene {
11 | constructor({ renderer, nbInstances = 100_000 }) {
12 | super({ renderer })
13 | this.nbInstances = nbInstances
14 | }
15 |
16 | init() {
17 | this.section = document.querySelector('#shadowed-particles-scene')
18 |
19 | // particle system radius
20 | this.radius = 50
21 |
22 | this.renderer.camera.position.z = 375
23 |
24 | this.setSizeDependentValues()
25 | this.renderer.onResize(this.setSizeDependentValues.bind(this))
26 |
27 | super.init()
28 | }
29 |
30 | setSizeDependentValues() {
31 | // account for scroll on mouse move
32 | this.offsetTop = this.renderer.boundingRect.top + window.pageYOffset
33 | this.visibleSize = this.renderer.camera.getVisibleSizeAtDepth()
34 | }
35 |
36 | addScrollTrigger() {
37 | this.scrollTrigger = ScrollTrigger.create({
38 | trigger: this.section,
39 | onToggle: ({ isActive }) => {
40 | this.onSceneVisibilityChanged(isActive)
41 | },
42 | })
43 |
44 | this.onSceneVisibilityChanged(this.scrollTrigger.isActive)
45 | }
46 |
47 | removeScrollTrigger() {
48 | this.scrollTrigger.kill()
49 | }
50 |
51 | onSceneVisibilityChanged(isVisible) {
52 | if (isVisible) {
53 | this.section.classList.add('is-visible')
54 | this.renderer.shouldRender = true
55 | this.timeline?.restart(true)
56 | } else {
57 | this.section.classList.remove('is-visible')
58 | this.renderer.shouldRender = false
59 | this.timeline?.pause()
60 | }
61 | }
62 |
63 | addEnteringAnimation() {
64 | this.animation = {
65 | progress: 0,
66 | }
67 |
68 | this.autoAlphaElements = this.section.querySelectorAll('.gsap-auto-alpha')
69 |
70 | // animation
71 | this.timeline = gsap
72 | .timeline({
73 | paused: true,
74 | })
75 | .set(this.animation, { progress: 0 })
76 | .fromTo(
77 | this.autoAlphaElements,
78 | {
79 | autoAlpha: 0,
80 | },
81 | {
82 | autoAlpha: 1,
83 | duration: 1,
84 | stagger: 0.2,
85 | ease: 'power2.inOut',
86 | },
87 | 0
88 | )
89 | .to(
90 | this.animation,
91 | {
92 | progress: 0.7, // final particle size
93 | duration: 1,
94 | ease: 'expo.in',
95 | },
96 | 0
97 | )
98 | }
99 |
100 | removeEnteringAnimation() {
101 | this.timeline.kill()
102 | }
103 |
104 | setupWebGPU() {
105 | const distance = this.renderer.camera.position.z
106 |
107 | this.shadowMap = new ShadowMap({
108 | renderer: this.renderer,
109 | depthTextureSize: 1024,
110 | light: {
111 | position: new Vec3(distance * 0.5, distance * 0.325, distance * 0.5),
112 | // add a bit of spacing on every side
113 | // to avoid out of view particles to be culled
114 | // by the shadow map orthographic matrix
115 | orthographicCamera: {
116 | left: distance * -1.05,
117 | right: distance * 1.05,
118 | top: distance * 1.05,
119 | bottom: distance * -1.05,
120 | near: 0.1,
121 | far: distance * 5,
122 | },
123 | },
124 | })
125 |
126 | this.createComputePasses()
127 | this.createParticles()
128 | this.createWrappingBox()
129 | }
130 |
131 | destroyWebGPU() {
132 | this.shadowMap.destroy()
133 |
134 | // destroy both compute pass and compute bind group
135 | this.computePass?.destroy()
136 | this.computeBindGroup?.destroy()
137 |
138 | this.particlesSystem?.remove()
139 | this.wrappingBox?.remove()
140 | }
141 |
142 | async createComputePasses() {
143 | this.initComputeBuffer = new BufferBinding({
144 | label: 'Compute particles init buffer',
145 | name: 'initParticles',
146 | bindingType: 'storage',
147 | access: 'read_write', // we want a readable AND writable buffer!
148 | usage: ['vertex'], // we're going to use this buffer as a vertex buffer along default usages
149 | visibility: ['compute'],
150 | struct: {
151 | position: {
152 | type: 'array',
153 | value: new Float32Array(this.nbInstances * 4),
154 | },
155 | velocity: {
156 | type: 'array',
157 | value: new Float32Array(this.nbInstances * 4),
158 | },
159 | },
160 | })
161 |
162 | // update buffer, cloned from init one
163 | this.updateComputeBuffer = this.initComputeBuffer.clone({
164 | ...this.initComputeBuffer.options,
165 | label: 'Compute particles update buffer',
166 | name: 'particles',
167 | })
168 |
169 | this.computeBindGroup = new BindGroup(this.renderer, {
170 | label: 'Compute instances bind group',
171 | bindings: [this.initComputeBuffer, this.updateComputeBuffer],
172 | uniforms: {
173 | params: {
174 | visibility: ['compute'],
175 | struct: {
176 | radius: {
177 | type: 'f32',
178 | value: this.radius,
179 | },
180 | maxLife: {
181 | type: 'f32',
182 | value: 60, // in frames
183 | },
184 | mouse: {
185 | type: 'vec2f',
186 | value: this.mouse.lerped,
187 | },
188 | },
189 | },
190 | },
191 | })
192 |
193 | const computeInitDataPass = new ComputePass(this.renderer, {
194 | label: 'Compute initial data',
195 | shaders: {
196 | compute: {
197 | code: computeParticles,
198 | entryPoint: 'setInitData',
199 | },
200 | },
201 | dispatchSize: Math.ceil(this.nbInstances / 256),
202 | bindGroups: [this.computeBindGroup],
203 | autoRender: false, // we don't want to run this pass each frame
204 | })
205 |
206 | // we should wait for pipeline compilation!
207 | await computeInitDataPass.material.compileMaterial()
208 |
209 | // now run the compute pass just once
210 | this.renderer.renderOnce([computeInitDataPass])
211 |
212 | this.computePass = new ComputePass(this.renderer, {
213 | label: 'Compute particles pass',
214 | shaders: {
215 | compute: {
216 | code: computeParticles,
217 | entryPoint: 'updateData',
218 | },
219 | },
220 | dispatchSize: Math.ceil(this.nbInstances / 256),
221 | bindGroups: [this.computeBindGroup],
222 | })
223 |
224 | // we're done with our first compute pass, remove it
225 | computeInitDataPass.remove()
226 | }
227 |
228 | createParticles() {
229 | const geometry = new PlaneGeometry({
230 | instancesCount: this.nbInstances,
231 | vertexBuffers: [
232 | {
233 | // use instancing
234 | stepMode: 'instance',
235 | name: 'instanceAttributes',
236 | buffer: this.updateComputeBuffer.buffer, // pass the compute buffer right away
237 | attributes: [
238 | {
239 | name: 'particlePosition',
240 | type: 'vec4f',
241 | bufferFormat: 'float32x4',
242 | size: 4,
243 | },
244 | {
245 | name: 'particleVelocity',
246 | type: 'vec4f',
247 | bufferFormat: 'float32x4',
248 | size: 4,
249 | },
250 | ],
251 | },
252 | ],
253 | })
254 |
255 | // since we need this uniform in both the depth pass and regular pass
256 | // create a new buffer binding that will be shared by both materials
257 | const particlesParamsBindings = new BufferBinding({
258 | label: 'Params',
259 | name: 'params',
260 | bindingType: 'uniform',
261 | visibility: ['vertex'],
262 | struct: {
263 | size: {
264 | type: 'f32',
265 | value: 0.7,
266 | },
267 | },
268 | })
269 |
270 | this.particlesSystem = new Mesh(
271 | this.renderer,
272 | this.shadowMap.patchShadowReceivingParameters({
273 | label: 'Shadowed particles system',
274 | geometry,
275 | frustumCulling: false,
276 | shaders: {
277 | vertex: {
278 | code: shadowedParticlesVs,
279 | },
280 | fragment: {
281 | code: shadowedParticlesFs,
282 | },
283 | },
284 | uniforms: {
285 | shading: {
286 | struct: {
287 | lightColor: {
288 | type: 'vec3f',
289 | value: new Vec3(255 / 255, 240 / 255, 97 / 255),
290 | },
291 | darkColor: {
292 | type: 'vec3f',
293 | value: new Vec3(184 / 255, 162 / 255, 9 / 255),
294 | },
295 | shadowIntensity: {
296 | type: 'f32',
297 | value: 0.75,
298 | },
299 | },
300 | },
301 | },
302 | bindings: [particlesParamsBindings],
303 | })
304 | )
305 |
306 | this.shadowMap.addShadowCastingMesh(this.particlesSystem, {
307 | shaders: {
308 | vertex: {
309 | code: particlesDepthPassShaders,
310 | entryPoint: 'shadowMapVertex',
311 | },
312 | fragment: {
313 | code: particlesDepthPassShaders,
314 | entryPoint: 'shadowMapFragment',
315 | },
316 | },
317 | bindings: [particlesParamsBindings],
318 | })
319 | }
320 |
321 | createWrappingBox() {
322 | this.wrappingBox = new Mesh(
323 | this.renderer,
324 | this.shadowMap.patchShadowReceivingParameters({
325 | label: 'Shadowed wrapping box',
326 | geometry: new BoxGeometry(),
327 | frustumCulling: false,
328 | cullMode: 'front',
329 | shaders: {
330 | vertex: {
331 | code: wrappingBoxVs,
332 | },
333 | fragment: {
334 | code: wrappingBoxFs,
335 | },
336 | },
337 | uniforms: {
338 | shading: {
339 | struct: {
340 | color: {
341 | type: 'vec3f',
342 | value: new Vec3(0.3),
343 | },
344 | shadowIntensity: {
345 | type: 'f32',
346 | value: 0.5,
347 | },
348 | },
349 | },
350 | ambientLight: {
351 | struct: {
352 | color: {
353 | type: 'vec3f',
354 | value: new Vec3(1),
355 | },
356 | intensity: {
357 | type: 'f32',
358 | value: 0.35,
359 | },
360 | },
361 | },
362 | directionalLight: {
363 | struct: {
364 | intensity: {
365 | type: 'f32',
366 | value: 1.25,
367 | },
368 | color: {
369 | type: 'vec3f',
370 | value: new Vec3(1),
371 | },
372 | },
373 | },
374 | },
375 | })
376 | )
377 |
378 | const setWrappingBoxScale = () => {
379 | this.wrappingBox.scale.x = this.visibleSize.width * 0.5
380 | this.wrappingBox.scale.y = this.visibleSize.height * 0.5
381 | }
382 |
383 | this.wrappingBox.scale.z = this.radius * 1.5
384 | this.wrappingBox.position.z = -this.wrappingBox.scale.z
385 |
386 | setWrappingBoxScale()
387 |
388 | this.wrappingBox.onAfterResize(setWrappingBoxScale)
389 | }
390 |
391 | addEvents() {
392 | this.mouse = {
393 | current: new Vec2(),
394 | lerped: new Vec2(),
395 | clamp: {
396 | min: new Vec2(-0.5),
397 | max: new Vec2(0.5),
398 | },
399 | }
400 |
401 | this._onPointerMoveHandler = this.onPointerMove.bind(this)
402 | window.addEventListener('mousemove', this._onPointerMoveHandler)
403 | window.addEventListener('touchmove', this._onPointerMoveHandler)
404 | }
405 |
406 | removeEvents() {
407 | window.removeEventListener('mousemove', this._onPointerMoveHandler)
408 | window.removeEventListener('touchmove', this._onPointerMoveHandler)
409 | }
410 |
411 | onPointerMove(e) {
412 | const { clientX, clientY } = e.targetTouches && e.targetTouches.length ? e.targetTouches[0] : e
413 | const { width, height } = this.renderer.boundingRect
414 | const scroll = window.pageYOffset
415 |
416 | // normalized between -0.5 and 0.5
417 | this.mouse.current.set(
418 | (clientX - width * 0.5) / width,
419 | -(clientY - (this.offsetTop - scroll) - height * 0.5) / height
420 | )
421 |
422 | // clamp
423 | this.mouse.current.clamp(this.mouse.clamp.min, this.mouse.clamp.max)
424 |
425 | // multiply by camera visible size
426 | this.mouse.current.x *= this.visibleSize.width
427 | this.mouse.current.y *= this.visibleSize.height
428 | }
429 |
430 | onRender() {
431 | this.mouse.lerped.lerp(this.mouse.current, 0.5)
432 |
433 | if (this.particlesSystem) {
434 | this.particlesSystem.uniforms.params.size.value = this.animation.progress
435 | }
436 | }
437 | }
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "gpu-curtains": "~0.7.10",
4 | "gsap": "^3.12.5",
5 | "lenis": "1.1.3",
6 | "vite": "4.5.5"
7 | },
8 | "scripts": {
9 | "dev": "vite --port 3333",
10 | "build": "vite build",
11 | "preview": "vite preview"
12 | },
13 | "name": "basic-scene",
14 | "version": "1.0.0",
15 | "main": "index.js",
16 | "license": "MIT",
17 | "devDependencies": {
18 | "eslint": "^8.56.0",
19 | "eslint-config-prettier": "^9.1.0",
20 | "prettier": "2.8.8"
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/public/assets/fallbacks/gltf-scene-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fallbacks/gltf-scene-bg.jpg
--------------------------------------------------------------------------------
/public/assets/fallbacks/intro-scene-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fallbacks/intro-scene-bg.jpg
--------------------------------------------------------------------------------
/public/assets/fallbacks/shadowed-particles-scene-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fallbacks/shadowed-particles-scene-bg.jpg
--------------------------------------------------------------------------------
/public/assets/fonts/Excon-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fonts/Excon-Bold.ttf
--------------------------------------------------------------------------------
/public/assets/fonts/Excon-Bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fonts/Excon-Bold.woff
--------------------------------------------------------------------------------
/public/assets/fonts/Excon-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/fonts/Excon-Bold.woff2
--------------------------------------------------------------------------------
/public/assets/gltf/metal_credit_card.glb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/okaydevco/dive-into-webgpu/70efb006a70ce8e02479e4ef79bef212aa1a55bd/public/assets/gltf/metal_credit_card.glb
--------------------------------------------------------------------------------
/vite.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 |
3 | export default defineConfig({
4 | base: '',
5 | })
6 |
--------------------------------------------------------------------------------