├── .gitignore
├── LICENSE
├── README.md
├── index.html
├── package-lock.json
├── package.json
├── scripts
└── convert_to_ply.py
├── src
├── lib
│ ├── Camera.ts
│ ├── DistanceSorter.ts
│ ├── LoadPLY.ts
│ ├── MortonSorter.ts
│ └── Viewer.ts
├── main.ts
├── style.css
├── vite-env.d.ts
└── workers
│ └── SortWorker.ts
└── tsconfig.json
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 SamuSynth, Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SVRaster WebGL Viewer
2 |
3 | A WebGL-based viewer for visualizing sparse voxel scenes from the Nvidia [Sparse Voxels Rasterization paper](https://svraster.github.io/). This viewer provides an interactive way to explore and visualize the voxel radiance field from the web. You can try the viewer at [vid2scene.com/voxel](https://vid2scene.com/voxel)
4 |
5 | The rendering isn't exactly the same as the reference CUDA implementation, but it's pretty similar.
6 |
7 | ## Features
8 |
9 | - Interactive camera controls:
10 | - Left-click + drag: Orbit camera
11 | - Right-click + drag: Pan camera
12 | - Mouse wheel: Zoom
13 | - WASD/Arrow keys: Move camera
14 | - Q/E: Rotate scene around view direction
15 | - Space/Shift: Move up/down
16 | - Touch controls for mobile devices:
17 | - 1 finger drag: Orbit
18 | - 2 finger drag: Pan/zoom
19 | - Performance metrics display (FPS counter)
20 |
21 | ## How to Run
22 |
23 | ### Prerequisites
24 |
25 | Before running the project, you need to have Node.js and NPM (Node Package Manager) installed:
26 |
27 | 1. Install Node.js and NPM:
28 | - Download and install from [nodejs.org](https://nodejs.org/)
29 |
30 | 2. Verify installation:
31 | ```bash
32 | node --version
33 | npm --version
34 | ```
35 |
36 | ### Running the Project
37 |
38 | To run this project locally:
39 |
40 | 1. Clone the repository:
41 | ```bash
42 | git clone https://github.com/samuelm2/svraster-webgl.git
43 | cd svraster-webgl
44 | ```
45 |
46 | 2. Install dependencies:
47 | ```bash
48 | npm install
49 | ```
50 |
51 | 3. Start the development server:
52 | ```bash
53 | npm run dev
54 | ```
55 | This will start the Vite development server, typically at http://localhost:5173
56 |
57 |
58 | ## Implementation and Performance Notes
59 |
60 | - This viewer uses a distance-based sorting approach rather than the ray direction-dependent Morton ordering described in the paper
61 | - The current implementation has only the most basic optimizations applied - there's significant room for performance improvements. Right now, the fragment shader is the bottleneck. Memory usage could also be lowered because nothing is quantized right now. If you have a perf improvement suggestion, please feel free to submit a PR!
62 | - It runs at ~60-80 FPS on my laptop with a Laptop 3080 GPU
63 | - It runs at about ~12-20 FPS on my iPhone 13 Pro Max
64 | - Right now, only scenes trained with spherical harmonic degree 1 are supported (so 12 total SH coefficients per voxel). See the command below to train your SVRaster scene with sh degree 1.
65 |
66 |
67 | You can pass ?samples=X as a URL param which will adjust the amount of density samples per ray in the fragment shader. The default is 3, but you can get a pretty good performance increase by decreasing this value, at the cost of a little less accurate rendering.
68 |
69 | ## URL Parameters
70 |
71 | The viewer supports a few URL parameters to customize its behavior:
72 |
73 | - `?samples=1` - Adjusts the amount of density samples per ray in the fragment shader (default: 3). Lower value increases performance, at a slight cost of rendering accuracy.
74 | - `?url=https://example.com/myply.ply` - Loads a custom PLY file from the specified URL (default: pumpkin_600k.ply from HuggingFace here: https://huggingface.co/samuelm2/voxel-data/blob/main/pumpkin_600k.ply). For example, I also have a 1.2 million voxel pumpkin ply which you can load with this url: https://vid2scene.com/voxel/?url=https://huggingface.co/samuelm2/voxel-data/resolve/main/pumpkin_1200k.ply
75 | - `?showLoadingUI=true` - Shows the PLY file upload UI, allowing users to load their own files
76 |
77 | ## How to Generate Your Own Scenes
78 |
79 | If you have your own SVRaster scenes that you'd like to visualize in this WebGL viewer:
80 |
81 | 1. Train your SVRaster scene with SH degree 1 to create a model compatible with this viewer:
82 | ```bash
83 | python train.py --source_path /PATH/TO/COLMAP/SFM/DIR --model_path outputs/pumpkin/ --sh_degree 1 --sh_degree_init 1 --subdivide_max_num 600000
84 | ```
85 | Note: The PLY files generated by this process are very unoptimized and uncompressed, so they can get very large quickly. I usually keep the number of voxels down to 600k to 1m range using the subdivide_max_num flag above.
86 |
87 | 2. For converting SVRaster models to PLY format, use the standalone script included in this repository:
88 | ```bash
89 | python scripts/convert_to_ply.py outputs/pumpkin/model.pt outputs/pumpkin/pumpkin.ply
90 | ```
91 |
92 | 3. Open the WebGL viewer and use the URL parameter or file upload UI to load your custom PLY file
93 |
94 | ## Other note
95 |
96 | This project was made with heavy use of AI assistance ("vibe coded"). I wanted to see how it would go for something graphics related. My brief thoughts: it is super good for the boilerplate (defining/binding buffers, uniforms, etc). I was able to get simple rendering within hours. But when it comes to solving the harder graphics bugs, the benefits are a lot lower. There were multiple times where it would go in the complete wrong direction and I would have to rewrite portions manually. But overall, I think it is definitely a net positive for smaller projects like this one. In a more complex graphics engine / production environment, the benefits might be less clear for now. I'm interested in what others think.
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | WebGL SVRaster
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "svraster-webgl",
3 | "version": "0.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "svraster-webgl",
9 | "version": "0.0.0",
10 | "dependencies": {
11 | "gl-matrix": "^3.4.3"
12 | },
13 | "devDependencies": {
14 | "typescript": "~5.7.2",
15 | "vite": "^6.2.0"
16 | }
17 | },
18 | "node_modules/@esbuild/aix-ppc64": {
19 | "version": "0.25.1",
20 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz",
21 | "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==",
22 | "cpu": [
23 | "ppc64"
24 | ],
25 | "dev": true,
26 | "license": "MIT",
27 | "optional": true,
28 | "os": [
29 | "aix"
30 | ],
31 | "engines": {
32 | "node": ">=18"
33 | }
34 | },
35 | "node_modules/@esbuild/android-arm": {
36 | "version": "0.25.1",
37 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz",
38 | "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==",
39 | "cpu": [
40 | "arm"
41 | ],
42 | "dev": true,
43 | "license": "MIT",
44 | "optional": true,
45 | "os": [
46 | "android"
47 | ],
48 | "engines": {
49 | "node": ">=18"
50 | }
51 | },
52 | "node_modules/@esbuild/android-arm64": {
53 | "version": "0.25.1",
54 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz",
55 | "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==",
56 | "cpu": [
57 | "arm64"
58 | ],
59 | "dev": true,
60 | "license": "MIT",
61 | "optional": true,
62 | "os": [
63 | "android"
64 | ],
65 | "engines": {
66 | "node": ">=18"
67 | }
68 | },
69 | "node_modules/@esbuild/android-x64": {
70 | "version": "0.25.1",
71 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz",
72 | "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==",
73 | "cpu": [
74 | "x64"
75 | ],
76 | "dev": true,
77 | "license": "MIT",
78 | "optional": true,
79 | "os": [
80 | "android"
81 | ],
82 | "engines": {
83 | "node": ">=18"
84 | }
85 | },
86 | "node_modules/@esbuild/darwin-arm64": {
87 | "version": "0.25.1",
88 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz",
89 | "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==",
90 | "cpu": [
91 | "arm64"
92 | ],
93 | "dev": true,
94 | "license": "MIT",
95 | "optional": true,
96 | "os": [
97 | "darwin"
98 | ],
99 | "engines": {
100 | "node": ">=18"
101 | }
102 | },
103 | "node_modules/@esbuild/darwin-x64": {
104 | "version": "0.25.1",
105 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz",
106 | "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==",
107 | "cpu": [
108 | "x64"
109 | ],
110 | "dev": true,
111 | "license": "MIT",
112 | "optional": true,
113 | "os": [
114 | "darwin"
115 | ],
116 | "engines": {
117 | "node": ">=18"
118 | }
119 | },
120 | "node_modules/@esbuild/freebsd-arm64": {
121 | "version": "0.25.1",
122 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz",
123 | "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==",
124 | "cpu": [
125 | "arm64"
126 | ],
127 | "dev": true,
128 | "license": "MIT",
129 | "optional": true,
130 | "os": [
131 | "freebsd"
132 | ],
133 | "engines": {
134 | "node": ">=18"
135 | }
136 | },
137 | "node_modules/@esbuild/freebsd-x64": {
138 | "version": "0.25.1",
139 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz",
140 | "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==",
141 | "cpu": [
142 | "x64"
143 | ],
144 | "dev": true,
145 | "license": "MIT",
146 | "optional": true,
147 | "os": [
148 | "freebsd"
149 | ],
150 | "engines": {
151 | "node": ">=18"
152 | }
153 | },
154 | "node_modules/@esbuild/linux-arm": {
155 | "version": "0.25.1",
156 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz",
157 | "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==",
158 | "cpu": [
159 | "arm"
160 | ],
161 | "dev": true,
162 | "license": "MIT",
163 | "optional": true,
164 | "os": [
165 | "linux"
166 | ],
167 | "engines": {
168 | "node": ">=18"
169 | }
170 | },
171 | "node_modules/@esbuild/linux-arm64": {
172 | "version": "0.25.1",
173 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz",
174 | "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==",
175 | "cpu": [
176 | "arm64"
177 | ],
178 | "dev": true,
179 | "license": "MIT",
180 | "optional": true,
181 | "os": [
182 | "linux"
183 | ],
184 | "engines": {
185 | "node": ">=18"
186 | }
187 | },
188 | "node_modules/@esbuild/linux-ia32": {
189 | "version": "0.25.1",
190 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz",
191 | "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==",
192 | "cpu": [
193 | "ia32"
194 | ],
195 | "dev": true,
196 | "license": "MIT",
197 | "optional": true,
198 | "os": [
199 | "linux"
200 | ],
201 | "engines": {
202 | "node": ">=18"
203 | }
204 | },
205 | "node_modules/@esbuild/linux-loong64": {
206 | "version": "0.25.1",
207 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz",
208 | "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==",
209 | "cpu": [
210 | "loong64"
211 | ],
212 | "dev": true,
213 | "license": "MIT",
214 | "optional": true,
215 | "os": [
216 | "linux"
217 | ],
218 | "engines": {
219 | "node": ">=18"
220 | }
221 | },
222 | "node_modules/@esbuild/linux-mips64el": {
223 | "version": "0.25.1",
224 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz",
225 | "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==",
226 | "cpu": [
227 | "mips64el"
228 | ],
229 | "dev": true,
230 | "license": "MIT",
231 | "optional": true,
232 | "os": [
233 | "linux"
234 | ],
235 | "engines": {
236 | "node": ">=18"
237 | }
238 | },
239 | "node_modules/@esbuild/linux-ppc64": {
240 | "version": "0.25.1",
241 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz",
242 | "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==",
243 | "cpu": [
244 | "ppc64"
245 | ],
246 | "dev": true,
247 | "license": "MIT",
248 | "optional": true,
249 | "os": [
250 | "linux"
251 | ],
252 | "engines": {
253 | "node": ">=18"
254 | }
255 | },
256 | "node_modules/@esbuild/linux-riscv64": {
257 | "version": "0.25.1",
258 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz",
259 | "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==",
260 | "cpu": [
261 | "riscv64"
262 | ],
263 | "dev": true,
264 | "license": "MIT",
265 | "optional": true,
266 | "os": [
267 | "linux"
268 | ],
269 | "engines": {
270 | "node": ">=18"
271 | }
272 | },
273 | "node_modules/@esbuild/linux-s390x": {
274 | "version": "0.25.1",
275 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz",
276 | "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==",
277 | "cpu": [
278 | "s390x"
279 | ],
280 | "dev": true,
281 | "license": "MIT",
282 | "optional": true,
283 | "os": [
284 | "linux"
285 | ],
286 | "engines": {
287 | "node": ">=18"
288 | }
289 | },
290 | "node_modules/@esbuild/linux-x64": {
291 | "version": "0.25.1",
292 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz",
293 | "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==",
294 | "cpu": [
295 | "x64"
296 | ],
297 | "dev": true,
298 | "license": "MIT",
299 | "optional": true,
300 | "os": [
301 | "linux"
302 | ],
303 | "engines": {
304 | "node": ">=18"
305 | }
306 | },
307 | "node_modules/@esbuild/netbsd-arm64": {
308 | "version": "0.25.1",
309 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz",
310 | "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==",
311 | "cpu": [
312 | "arm64"
313 | ],
314 | "dev": true,
315 | "license": "MIT",
316 | "optional": true,
317 | "os": [
318 | "netbsd"
319 | ],
320 | "engines": {
321 | "node": ">=18"
322 | }
323 | },
324 | "node_modules/@esbuild/netbsd-x64": {
325 | "version": "0.25.1",
326 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz",
327 | "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==",
328 | "cpu": [
329 | "x64"
330 | ],
331 | "dev": true,
332 | "license": "MIT",
333 | "optional": true,
334 | "os": [
335 | "netbsd"
336 | ],
337 | "engines": {
338 | "node": ">=18"
339 | }
340 | },
341 | "node_modules/@esbuild/openbsd-arm64": {
342 | "version": "0.25.1",
343 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz",
344 | "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==",
345 | "cpu": [
346 | "arm64"
347 | ],
348 | "dev": true,
349 | "license": "MIT",
350 | "optional": true,
351 | "os": [
352 | "openbsd"
353 | ],
354 | "engines": {
355 | "node": ">=18"
356 | }
357 | },
358 | "node_modules/@esbuild/openbsd-x64": {
359 | "version": "0.25.1",
360 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz",
361 | "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==",
362 | "cpu": [
363 | "x64"
364 | ],
365 | "dev": true,
366 | "license": "MIT",
367 | "optional": true,
368 | "os": [
369 | "openbsd"
370 | ],
371 | "engines": {
372 | "node": ">=18"
373 | }
374 | },
375 | "node_modules/@esbuild/sunos-x64": {
376 | "version": "0.25.1",
377 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz",
378 | "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==",
379 | "cpu": [
380 | "x64"
381 | ],
382 | "dev": true,
383 | "license": "MIT",
384 | "optional": true,
385 | "os": [
386 | "sunos"
387 | ],
388 | "engines": {
389 | "node": ">=18"
390 | }
391 | },
392 | "node_modules/@esbuild/win32-arm64": {
393 | "version": "0.25.1",
394 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz",
395 | "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==",
396 | "cpu": [
397 | "arm64"
398 | ],
399 | "dev": true,
400 | "license": "MIT",
401 | "optional": true,
402 | "os": [
403 | "win32"
404 | ],
405 | "engines": {
406 | "node": ">=18"
407 | }
408 | },
409 | "node_modules/@esbuild/win32-ia32": {
410 | "version": "0.25.1",
411 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz",
412 | "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==",
413 | "cpu": [
414 | "ia32"
415 | ],
416 | "dev": true,
417 | "license": "MIT",
418 | "optional": true,
419 | "os": [
420 | "win32"
421 | ],
422 | "engines": {
423 | "node": ">=18"
424 | }
425 | },
426 | "node_modules/@esbuild/win32-x64": {
427 | "version": "0.25.1",
428 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz",
429 | "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==",
430 | "cpu": [
431 | "x64"
432 | ],
433 | "dev": true,
434 | "license": "MIT",
435 | "optional": true,
436 | "os": [
437 | "win32"
438 | ],
439 | "engines": {
440 | "node": ">=18"
441 | }
442 | },
443 | "node_modules/@rollup/rollup-android-arm-eabi": {
444 | "version": "4.37.0",
445 | "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz",
446 | "integrity": "sha512-l7StVw6WAa8l3vA1ov80jyetOAEo1FtHvZDbzXDO/02Sq/QVvqlHkYoFwDJPIMj0GKiistsBudfx5tGFnwYWDQ==",
447 | "cpu": [
448 | "arm"
449 | ],
450 | "dev": true,
451 | "license": "MIT",
452 | "optional": true,
453 | "os": [
454 | "android"
455 | ]
456 | },
457 | "node_modules/@rollup/rollup-android-arm64": {
458 | "version": "4.37.0",
459 | "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.37.0.tgz",
460 | "integrity": "sha512-6U3SlVyMxezt8Y+/iEBcbp945uZjJwjZimu76xoG7tO1av9VO691z8PkhzQ85ith2I8R2RddEPeSfcbyPfD4hA==",
461 | "cpu": [
462 | "arm64"
463 | ],
464 | "dev": true,
465 | "license": "MIT",
466 | "optional": true,
467 | "os": [
468 | "android"
469 | ]
470 | },
471 | "node_modules/@rollup/rollup-darwin-arm64": {
472 | "version": "4.37.0",
473 | "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.37.0.tgz",
474 | "integrity": "sha512-+iTQ5YHuGmPt10NTzEyMPbayiNTcOZDWsbxZYR1ZnmLnZxG17ivrPSWFO9j6GalY0+gV3Jtwrrs12DBscxnlYA==",
475 | "cpu": [
476 | "arm64"
477 | ],
478 | "dev": true,
479 | "license": "MIT",
480 | "optional": true,
481 | "os": [
482 | "darwin"
483 | ]
484 | },
485 | "node_modules/@rollup/rollup-darwin-x64": {
486 | "version": "4.37.0",
487 | "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.37.0.tgz",
488 | "integrity": "sha512-m8W2UbxLDcmRKVjgl5J/k4B8d7qX2EcJve3Sut7YGrQoPtCIQGPH5AMzuFvYRWZi0FVS0zEY4c8uttPfX6bwYQ==",
489 | "cpu": [
490 | "x64"
491 | ],
492 | "dev": true,
493 | "license": "MIT",
494 | "optional": true,
495 | "os": [
496 | "darwin"
497 | ]
498 | },
499 | "node_modules/@rollup/rollup-freebsd-arm64": {
500 | "version": "4.37.0",
501 | "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.37.0.tgz",
502 | "integrity": "sha512-FOMXGmH15OmtQWEt174v9P1JqqhlgYge/bUjIbiVD1nI1NeJ30HYT9SJlZMqdo1uQFyt9cz748F1BHghWaDnVA==",
503 | "cpu": [
504 | "arm64"
505 | ],
506 | "dev": true,
507 | "license": "MIT",
508 | "optional": true,
509 | "os": [
510 | "freebsd"
511 | ]
512 | },
513 | "node_modules/@rollup/rollup-freebsd-x64": {
514 | "version": "4.37.0",
515 | "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.37.0.tgz",
516 | "integrity": "sha512-SZMxNttjPKvV14Hjck5t70xS3l63sbVwl98g3FlVVx2YIDmfUIy29jQrsw06ewEYQ8lQSuY9mpAPlmgRD2iSsA==",
517 | "cpu": [
518 | "x64"
519 | ],
520 | "dev": true,
521 | "license": "MIT",
522 | "optional": true,
523 | "os": [
524 | "freebsd"
525 | ]
526 | },
527 | "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
528 | "version": "4.37.0",
529 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.37.0.tgz",
530 | "integrity": "sha512-hhAALKJPidCwZcj+g+iN+38SIOkhK2a9bqtJR+EtyxrKKSt1ynCBeqrQy31z0oWU6thRZzdx53hVgEbRkuI19w==",
531 | "cpu": [
532 | "arm"
533 | ],
534 | "dev": true,
535 | "license": "MIT",
536 | "optional": true,
537 | "os": [
538 | "linux"
539 | ]
540 | },
541 | "node_modules/@rollup/rollup-linux-arm-musleabihf": {
542 | "version": "4.37.0",
543 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.37.0.tgz",
544 | "integrity": "sha512-jUb/kmn/Gd8epbHKEqkRAxq5c2EwRt0DqhSGWjPFxLeFvldFdHQs/n8lQ9x85oAeVb6bHcS8irhTJX2FCOd8Ag==",
545 | "cpu": [
546 | "arm"
547 | ],
548 | "dev": true,
549 | "license": "MIT",
550 | "optional": true,
551 | "os": [
552 | "linux"
553 | ]
554 | },
555 | "node_modules/@rollup/rollup-linux-arm64-gnu": {
556 | "version": "4.37.0",
557 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.37.0.tgz",
558 | "integrity": "sha512-oNrJxcQT9IcbcmKlkF+Yz2tmOxZgG9D9GRq+1OE6XCQwCVwxixYAa38Z8qqPzQvzt1FCfmrHX03E0pWoXm1DqA==",
559 | "cpu": [
560 | "arm64"
561 | ],
562 | "dev": true,
563 | "license": "MIT",
564 | "optional": true,
565 | "os": [
566 | "linux"
567 | ]
568 | },
569 | "node_modules/@rollup/rollup-linux-arm64-musl": {
570 | "version": "4.37.0",
571 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.37.0.tgz",
572 | "integrity": "sha512-pfxLBMls+28Ey2enpX3JvjEjaJMBX5XlPCZNGxj4kdJyHduPBXtxYeb8alo0a7bqOoWZW2uKynhHxF/MWoHaGQ==",
573 | "cpu": [
574 | "arm64"
575 | ],
576 | "dev": true,
577 | "license": "MIT",
578 | "optional": true,
579 | "os": [
580 | "linux"
581 | ]
582 | },
583 | "node_modules/@rollup/rollup-linux-loongarch64-gnu": {
584 | "version": "4.37.0",
585 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.37.0.tgz",
586 | "integrity": "sha512-yCE0NnutTC/7IGUq/PUHmoeZbIwq3KRh02e9SfFh7Vmc1Z7atuJRYWhRME5fKgT8aS20mwi1RyChA23qSyRGpA==",
587 | "cpu": [
588 | "loong64"
589 | ],
590 | "dev": true,
591 | "license": "MIT",
592 | "optional": true,
593 | "os": [
594 | "linux"
595 | ]
596 | },
597 | "node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
598 | "version": "4.37.0",
599 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.37.0.tgz",
600 | "integrity": "sha512-NxcICptHk06E2Lh3a4Pu+2PEdZ6ahNHuK7o6Np9zcWkrBMuv21j10SQDJW3C9Yf/A/P7cutWoC/DptNLVsZ0VQ==",
601 | "cpu": [
602 | "ppc64"
603 | ],
604 | "dev": true,
605 | "license": "MIT",
606 | "optional": true,
607 | "os": [
608 | "linux"
609 | ]
610 | },
611 | "node_modules/@rollup/rollup-linux-riscv64-gnu": {
612 | "version": "4.37.0",
613 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.37.0.tgz",
614 | "integrity": "sha512-PpWwHMPCVpFZLTfLq7EWJWvrmEuLdGn1GMYcm5MV7PaRgwCEYJAwiN94uBuZev0/J/hFIIJCsYw4nLmXA9J7Pw==",
615 | "cpu": [
616 | "riscv64"
617 | ],
618 | "dev": true,
619 | "license": "MIT",
620 | "optional": true,
621 | "os": [
622 | "linux"
623 | ]
624 | },
625 | "node_modules/@rollup/rollup-linux-riscv64-musl": {
626 | "version": "4.37.0",
627 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.37.0.tgz",
628 | "integrity": "sha512-DTNwl6a3CfhGTAOYZ4KtYbdS8b+275LSLqJVJIrPa5/JuIufWWZ/QFvkxp52gpmguN95eujrM68ZG+zVxa8zHA==",
629 | "cpu": [
630 | "riscv64"
631 | ],
632 | "dev": true,
633 | "license": "MIT",
634 | "optional": true,
635 | "os": [
636 | "linux"
637 | ]
638 | },
639 | "node_modules/@rollup/rollup-linux-s390x-gnu": {
640 | "version": "4.37.0",
641 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.37.0.tgz",
642 | "integrity": "sha512-hZDDU5fgWvDdHFuExN1gBOhCuzo/8TMpidfOR+1cPZJflcEzXdCy1LjnklQdW8/Et9sryOPJAKAQRw8Jq7Tg+A==",
643 | "cpu": [
644 | "s390x"
645 | ],
646 | "dev": true,
647 | "license": "MIT",
648 | "optional": true,
649 | "os": [
650 | "linux"
651 | ]
652 | },
653 | "node_modules/@rollup/rollup-linux-x64-gnu": {
654 | "version": "4.37.0",
655 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.37.0.tgz",
656 | "integrity": "sha512-pKivGpgJM5g8dwj0ywBwe/HeVAUSuVVJhUTa/URXjxvoyTT/AxsLTAbkHkDHG7qQxLoW2s3apEIl26uUe08LVQ==",
657 | "cpu": [
658 | "x64"
659 | ],
660 | "dev": true,
661 | "license": "MIT",
662 | "optional": true,
663 | "os": [
664 | "linux"
665 | ]
666 | },
667 | "node_modules/@rollup/rollup-linux-x64-musl": {
668 | "version": "4.37.0",
669 | "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.37.0.tgz",
670 | "integrity": "sha512-E2lPrLKE8sQbY/2bEkVTGDEk4/49UYRVWgj90MY8yPjpnGBQ+Xi1Qnr7b7UIWw1NOggdFQFOLZ8+5CzCiz143w==",
671 | "cpu": [
672 | "x64"
673 | ],
674 | "dev": true,
675 | "license": "MIT",
676 | "optional": true,
677 | "os": [
678 | "linux"
679 | ]
680 | },
681 | "node_modules/@rollup/rollup-win32-arm64-msvc": {
682 | "version": "4.37.0",
683 | "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.37.0.tgz",
684 | "integrity": "sha512-Jm7biMazjNzTU4PrQtr7VS8ibeys9Pn29/1bm4ph7CP2kf21950LgN+BaE2mJ1QujnvOc6p54eWWiVvn05SOBg==",
685 | "cpu": [
686 | "arm64"
687 | ],
688 | "dev": true,
689 | "license": "MIT",
690 | "optional": true,
691 | "os": [
692 | "win32"
693 | ]
694 | },
695 | "node_modules/@rollup/rollup-win32-ia32-msvc": {
696 | "version": "4.37.0",
697 | "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.37.0.tgz",
698 | "integrity": "sha512-e3/1SFm1OjefWICB2Ucstg2dxYDkDTZGDYgwufcbsxTHyqQps1UQf33dFEChBNmeSsTOyrjw2JJq0zbG5GF6RA==",
699 | "cpu": [
700 | "ia32"
701 | ],
702 | "dev": true,
703 | "license": "MIT",
704 | "optional": true,
705 | "os": [
706 | "win32"
707 | ]
708 | },
709 | "node_modules/@rollup/rollup-win32-x64-msvc": {
710 | "version": "4.37.0",
711 | "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.37.0.tgz",
712 | "integrity": "sha512-LWbXUBwn/bcLx2sSsqy7pK5o+Nr+VCoRoAohfJ5C/aBio9nfJmGQqHAhU6pwxV/RmyTk5AqdySma7uwWGlmeuA==",
713 | "cpu": [
714 | "x64"
715 | ],
716 | "dev": true,
717 | "license": "MIT",
718 | "optional": true,
719 | "os": [
720 | "win32"
721 | ]
722 | },
723 | "node_modules/@types/estree": {
724 | "version": "1.0.6",
725 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
726 | "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
727 | "dev": true,
728 | "license": "MIT"
729 | },
730 | "node_modules/esbuild": {
731 | "version": "0.25.1",
732 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz",
733 | "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==",
734 | "dev": true,
735 | "hasInstallScript": true,
736 | "license": "MIT",
737 | "bin": {
738 | "esbuild": "bin/esbuild"
739 | },
740 | "engines": {
741 | "node": ">=18"
742 | },
743 | "optionalDependencies": {
744 | "@esbuild/aix-ppc64": "0.25.1",
745 | "@esbuild/android-arm": "0.25.1",
746 | "@esbuild/android-arm64": "0.25.1",
747 | "@esbuild/android-x64": "0.25.1",
748 | "@esbuild/darwin-arm64": "0.25.1",
749 | "@esbuild/darwin-x64": "0.25.1",
750 | "@esbuild/freebsd-arm64": "0.25.1",
751 | "@esbuild/freebsd-x64": "0.25.1",
752 | "@esbuild/linux-arm": "0.25.1",
753 | "@esbuild/linux-arm64": "0.25.1",
754 | "@esbuild/linux-ia32": "0.25.1",
755 | "@esbuild/linux-loong64": "0.25.1",
756 | "@esbuild/linux-mips64el": "0.25.1",
757 | "@esbuild/linux-ppc64": "0.25.1",
758 | "@esbuild/linux-riscv64": "0.25.1",
759 | "@esbuild/linux-s390x": "0.25.1",
760 | "@esbuild/linux-x64": "0.25.1",
761 | "@esbuild/netbsd-arm64": "0.25.1",
762 | "@esbuild/netbsd-x64": "0.25.1",
763 | "@esbuild/openbsd-arm64": "0.25.1",
764 | "@esbuild/openbsd-x64": "0.25.1",
765 | "@esbuild/sunos-x64": "0.25.1",
766 | "@esbuild/win32-arm64": "0.25.1",
767 | "@esbuild/win32-ia32": "0.25.1",
768 | "@esbuild/win32-x64": "0.25.1"
769 | }
770 | },
771 | "node_modules/fsevents": {
772 | "version": "2.3.3",
773 | "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
774 | "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
775 | "dev": true,
776 | "hasInstallScript": true,
777 | "license": "MIT",
778 | "optional": true,
779 | "os": [
780 | "darwin"
781 | ],
782 | "engines": {
783 | "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
784 | }
785 | },
786 | "node_modules/gl-matrix": {
787 | "version": "3.4.3",
788 | "resolved": "https://registry.npmjs.org/gl-matrix/-/gl-matrix-3.4.3.tgz",
789 | "integrity": "sha512-wcCp8vu8FT22BnvKVPjXa/ICBWRq/zjFfdofZy1WSpQZpphblv12/bOQLBC1rMM7SGOFS9ltVmKOHil5+Ml7gA==",
790 | "license": "MIT"
791 | },
792 | "node_modules/nanoid": {
793 | "version": "3.3.11",
794 | "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
795 | "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
796 | "dev": true,
797 | "funding": [
798 | {
799 | "type": "github",
800 | "url": "https://github.com/sponsors/ai"
801 | }
802 | ],
803 | "license": "MIT",
804 | "bin": {
805 | "nanoid": "bin/nanoid.cjs"
806 | },
807 | "engines": {
808 | "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
809 | }
810 | },
811 | "node_modules/picocolors": {
812 | "version": "1.1.1",
813 | "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
814 | "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
815 | "dev": true,
816 | "license": "ISC"
817 | },
818 | "node_modules/postcss": {
819 | "version": "8.5.3",
820 | "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
821 | "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
822 | "dev": true,
823 | "funding": [
824 | {
825 | "type": "opencollective",
826 | "url": "https://opencollective.com/postcss/"
827 | },
828 | {
829 | "type": "tidelift",
830 | "url": "https://tidelift.com/funding/github/npm/postcss"
831 | },
832 | {
833 | "type": "github",
834 | "url": "https://github.com/sponsors/ai"
835 | }
836 | ],
837 | "license": "MIT",
838 | "dependencies": {
839 | "nanoid": "^3.3.8",
840 | "picocolors": "^1.1.1",
841 | "source-map-js": "^1.2.1"
842 | },
843 | "engines": {
844 | "node": "^10 || ^12 || >=14"
845 | }
846 | },
847 | "node_modules/rollup": {
848 | "version": "4.37.0",
849 | "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz",
850 | "integrity": "sha512-iAtQy/L4QFU+rTJ1YUjXqJOJzuwEghqWzCEYD2FEghT7Gsy1VdABntrO4CLopA5IkflTyqNiLNwPcOJ3S7UKLg==",
851 | "dev": true,
852 | "license": "MIT",
853 | "dependencies": {
854 | "@types/estree": "1.0.6"
855 | },
856 | "bin": {
857 | "rollup": "dist/bin/rollup"
858 | },
859 | "engines": {
860 | "node": ">=18.0.0",
861 | "npm": ">=8.0.0"
862 | },
863 | "optionalDependencies": {
864 | "@rollup/rollup-android-arm-eabi": "4.37.0",
865 | "@rollup/rollup-android-arm64": "4.37.0",
866 | "@rollup/rollup-darwin-arm64": "4.37.0",
867 | "@rollup/rollup-darwin-x64": "4.37.0",
868 | "@rollup/rollup-freebsd-arm64": "4.37.0",
869 | "@rollup/rollup-freebsd-x64": "4.37.0",
870 | "@rollup/rollup-linux-arm-gnueabihf": "4.37.0",
871 | "@rollup/rollup-linux-arm-musleabihf": "4.37.0",
872 | "@rollup/rollup-linux-arm64-gnu": "4.37.0",
873 | "@rollup/rollup-linux-arm64-musl": "4.37.0",
874 | "@rollup/rollup-linux-loongarch64-gnu": "4.37.0",
875 | "@rollup/rollup-linux-powerpc64le-gnu": "4.37.0",
876 | "@rollup/rollup-linux-riscv64-gnu": "4.37.0",
877 | "@rollup/rollup-linux-riscv64-musl": "4.37.0",
878 | "@rollup/rollup-linux-s390x-gnu": "4.37.0",
879 | "@rollup/rollup-linux-x64-gnu": "4.37.0",
880 | "@rollup/rollup-linux-x64-musl": "4.37.0",
881 | "@rollup/rollup-win32-arm64-msvc": "4.37.0",
882 | "@rollup/rollup-win32-ia32-msvc": "4.37.0",
883 | "@rollup/rollup-win32-x64-msvc": "4.37.0",
884 | "fsevents": "~2.3.2"
885 | }
886 | },
887 | "node_modules/source-map-js": {
888 | "version": "1.2.1",
889 | "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
890 | "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
891 | "dev": true,
892 | "license": "BSD-3-Clause",
893 | "engines": {
894 | "node": ">=0.10.0"
895 | }
896 | },
897 | "node_modules/typescript": {
898 | "version": "5.7.3",
899 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz",
900 | "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
901 | "dev": true,
902 | "license": "Apache-2.0",
903 | "bin": {
904 | "tsc": "bin/tsc",
905 | "tsserver": "bin/tsserver"
906 | },
907 | "engines": {
908 | "node": ">=14.17"
909 | }
910 | },
911 | "node_modules/vite": {
912 | "version": "6.2.3",
913 | "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
914 | "integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
915 | "dev": true,
916 | "license": "MIT",
917 | "dependencies": {
918 | "esbuild": "^0.25.0",
919 | "postcss": "^8.5.3",
920 | "rollup": "^4.30.1"
921 | },
922 | "bin": {
923 | "vite": "bin/vite.js"
924 | },
925 | "engines": {
926 | "node": "^18.0.0 || ^20.0.0 || >=22.0.0"
927 | },
928 | "funding": {
929 | "url": "https://github.com/vitejs/vite?sponsor=1"
930 | },
931 | "optionalDependencies": {
932 | "fsevents": "~2.3.3"
933 | },
934 | "peerDependencies": {
935 | "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0",
936 | "jiti": ">=1.21.0",
937 | "less": "*",
938 | "lightningcss": "^1.21.0",
939 | "sass": "*",
940 | "sass-embedded": "*",
941 | "stylus": "*",
942 | "sugarss": "*",
943 | "terser": "^5.16.0",
944 | "tsx": "^4.8.1",
945 | "yaml": "^2.4.2"
946 | },
947 | "peerDependenciesMeta": {
948 | "@types/node": {
949 | "optional": true
950 | },
951 | "jiti": {
952 | "optional": true
953 | },
954 | "less": {
955 | "optional": true
956 | },
957 | "lightningcss": {
958 | "optional": true
959 | },
960 | "sass": {
961 | "optional": true
962 | },
963 | "sass-embedded": {
964 | "optional": true
965 | },
966 | "stylus": {
967 | "optional": true
968 | },
969 | "sugarss": {
970 | "optional": true
971 | },
972 | "terser": {
973 | "optional": true
974 | },
975 | "tsx": {
976 | "optional": true
977 | },
978 | "yaml": {
979 | "optional": true
980 | }
981 | }
982 | }
983 | }
984 | }
985 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "svraster-webgl",
3 | "private": true,
4 | "version": "0.0.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite",
8 | "build": "tsc && vite build",
9 | "preview": "vite preview"
10 | },
11 | "devDependencies": {
12 | "typescript": "~5.7.2",
13 | "vite": "^6.2.0"
14 | },
15 | "dependencies": {
16 | "gl-matrix": "^3.4.3"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/scripts/convert_to_ply.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import argparse
4 | import numpy as np
5 | from plyfile import PlyData, PlyElement
6 | from tqdm import tqdm
7 | import multiprocessing
8 | from multiprocessing import Pool
9 | from functools import partial
10 |
11 | # Define constants
12 | MAX_NUM_LEVELS = 16
13 |
14 | def get_voxel_size(scene_extent, octlevel):
15 | '''The voxel size at the given levels.'''
16 | return np.ldexp(scene_extent, -octlevel)
17 |
18 | # Process a single octree path
19 | def process_single_path(params):
20 | """Process a single octree path to coordinates"""
21 | idx, path, lv = params
22 |
23 | # Shift right to remove irrelevant bits based on octlevel
24 | path >>= (3 * (MAX_NUM_LEVELS - lv))
25 |
26 | i, j, k = 0, 0, 0
27 | # Process each 3-bit group
28 | for l in range(lv):
29 | bits = path & 0b111
30 | i |= ((bits & 0b100) >> 2) << l
31 | j |= ((bits & 0b010) >> 1) << l
32 | k |= (bits & 0b001) << l
33 |
34 | path >>= 3
35 |
36 | return (i, j, k)
37 |
38 | def octpath_to_coords(octpath, octlevel, desc="Processing octree paths"):
39 | """Convert octree path to coordinates using parallel processing"""
40 | P = len(octpath)
41 |
42 | # Prepare parameters for each path
43 | params = [(idx, int(octpath[idx]), int(octlevel[idx])) for idx in range(P)]
44 |
45 | # Get number of CPU cores
46 | num_cores = multiprocessing.cpu_count()
47 |
48 | # Process in parallel
49 | with Pool(processes=num_cores) as pool:
50 | # Use imap to process in chunks for better progress bar
51 | results = list(tqdm(pool.imap(process_single_path, params, chunksize=max(1, P//num_cores)),
52 | total=P, desc=desc))
53 |
54 | # Convert results to numpy array
55 | coords = np.array(results, dtype=np.int64)
56 | return coords
57 |
58 | def octpath_decoding(octpath, octlevel, scene_center, scene_extent):
59 | '''Compute world-space voxel center positions using numpy'''
60 | octpath = octpath.reshape(-1)
61 | octlevel = octlevel.reshape(-1)
62 |
63 | scene_min_xyz = scene_center - 0.5 * scene_extent
64 | vox_size = get_voxel_size(scene_extent, octlevel.reshape(-1, 1))
65 | vox_ijk = octpath_to_coords(octpath, octlevel, desc="Computing voxel centers")
66 | vox_center = scene_min_xyz + (vox_ijk + 0.5) * vox_size
67 |
68 | return vox_center
69 |
70 | # Process a batch of grid points
71 | def process_grid_batch(batch_idx, vox_ijk, lv2max, subtree_shift):
72 | """Process a batch of grid points"""
73 | i = batch_idx
74 | shift = lv2max[i, 0]
75 | base = np.expand_dims(vox_ijk[i] << shift, axis=0)
76 | return base + (subtree_shift * (1 << shift))
77 |
78 | def link_grid_pts(octpath, octlevel):
79 | '''Build link between voxel and grid_pts using numpy unique'''
80 | # Binary encoding of the eight octants
81 | subtree_shift = np.array([
82 | [0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1],
83 | [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1],
84 | ], dtype=np.int64)
85 |
86 | # Get all voxel coordinates
87 | vox_ijk = octpath_to_coords(octpath, octlevel, desc="Generating voxel coordinates")
88 |
89 | # Calculate shift for each voxel based on octlevel
90 | lv2max = (MAX_NUM_LEVELS - octlevel).reshape(-1, 1)
91 |
92 | # Create grid points array
93 | N = len(octpath)
94 |
95 | num_cores = multiprocessing.cpu_count()
96 | # Prepare the worker function with fixed arguments
97 | process_func = partial(process_grid_batch, vox_ijk=vox_ijk, lv2max=lv2max, subtree_shift=subtree_shift)
98 |
99 | #Process in parallel
100 | with Pool(processes=num_cores) as pool:
101 | results = list(tqdm(pool.imap(process_func, range(N), chunksize=max(1, N//num_cores)),
102 | total=N, desc="Generating grid point coordinates"))
103 |
104 | gridpts = np.array(results)
105 |
106 | # print("Processing grid points...")
107 | # gridpts = np.zeros((N, 8, 3), dtype=np.int64)
108 | # for i in tqdm(range(N), desc="Building grid points"):
109 | # shift = lv2max[i, 0]
110 | # base = np.expand_dims(vox_ijk[i] << shift, axis=0)
111 | # gridpts[i] = base + (subtree_shift * (1 << shift))
112 |
113 | # Reshape and find unique grid points
114 | gridpts_flat = gridpts.reshape(-1, 3)
115 | print("Mapping grid points to voxels ...")
116 |
117 | unique_coords, vox_key = np.unique(gridpts_flat, axis=0, return_inverse=True)
118 | vox_key = vox_key.reshape(N, 8)
119 |
120 | print(f"Total unique grid points: {len(unique_coords)}")
121 | return vox_key
122 |
123 | # Process grid values for a specific corner and range of voxels
124 | def process_grid_values_batch(params):
125 | """Process a batch of grid values"""
126 | i, start_idx, end_idx, vox_key, geo_grid_pts_len = params
127 |
128 | result = np.zeros(end_idx - start_idx, dtype=np.float32)
129 |
130 | for idx, vox_idx in enumerate(range(start_idx, end_idx)):
131 | grid_idx = vox_key[vox_idx, i]
132 | if grid_idx < geo_grid_pts_len:
133 | result[idx] = geo_grid_pts[grid_idx]
134 | else:
135 | result[idx] = 0.0
136 |
137 | return (start_idx, result)
138 |
139 | def convert_to_ply(input_path, output_path, use_cpu=False):
140 | """Convert a model.pt file to PLY format using numpy for memory efficiency"""
141 | print(f"Loading model from {input_path}...")
142 |
143 | # Load the state dictionary
144 | device = 'cpu'
145 | state_dict = torch.load(input_path, map_location=device)
146 |
147 | # Convert to numpy immediately
148 | if state_dict.get('quantized', False):
149 | raise NotImplementedError("Quantized models are not supported yet")
150 | else:
151 | _geo_grid_pts = state_dict['_geo_grid_pts']
152 | _sh0 = state_dict['_sh0']
153 | _shs = state_dict['_shs']
154 |
155 | # Convert everything to numpy
156 | scene_center = state_dict['scene_center'].cpu().numpy()
157 | scene_extent = float(state_dict['scene_extent'].item())
158 | active_sh_degree = state_dict['active_sh_degree']
159 |
160 | octpath = state_dict['octpath'].cpu().numpy().squeeze()
161 | octlevel = state_dict['octlevel'].cpu().numpy().squeeze()
162 |
163 | _geo_grid_pts = _geo_grid_pts.cpu().numpy().squeeze()
164 | sh0_np = _sh0.cpu().numpy()
165 | shs_np = _shs.reshape(_shs.shape[0], -1).cpu().numpy()
166 |
167 | # Calculate derived values
168 | vox_center = octpath_decoding(octpath, octlevel, scene_center, scene_extent)
169 | vox_key = link_grid_pts(octpath, octlevel)
170 |
171 | # Define attribute list
172 | attributes = [
173 | ('x', 'f4'), ('y', 'f4'), ('z', 'f4'), # positions
174 | ('octpath', 'u4'), ('octlevel', 'u1'), # octree data
175 | ('f_dc_0', 'f4'), ('f_dc_1', 'f4'), ('f_dc_2', 'f4') # DC components
176 | ]
177 |
178 | # Add rest of SH coefficients
179 | for i in range(shs_np.shape[1]):
180 | attributes.append((f'f_rest_{i}', 'f4'))
181 |
182 | # Add grid point values for each corner
183 | for i in range(8):
184 | attributes.append((f'grid{i}_value', 'f4'))
185 |
186 | # Create elements array
187 | elements = np.empty(len(vox_center), dtype=attributes)
188 |
189 | # Fill position and octree data
190 | elements['x'] = vox_center[:, 0]
191 | elements['y'] = vox_center[:, 1]
192 | elements['z'] = vox_center[:, 2]
193 | elements['octpath'] = octpath.astype(np.uint32)
194 | elements['octlevel'] = octlevel.astype(np.uint8)
195 |
196 | # Fill DC components
197 | elements['f_dc_0'] = sh0_np[:, 0]
198 | elements['f_dc_1'] = sh0_np[:, 1]
199 | elements['f_dc_2'] = sh0_np[:, 2]
200 |
201 | # Fill higher-order SH coefficients
202 | for i in range(shs_np.shape[1]):
203 | elements[f'f_rest_{i}'] = shs_np[:, i]
204 |
205 | # Shared variable for multiprocessing
206 | global geo_grid_pts
207 | geo_grid_pts = _geo_grid_pts
208 |
209 | # Parallel grid filling for large datasets
210 | N = len(vox_center)
211 | for i in tqdm(range(8), desc="Writing grid points to PLY data structure"):
212 | for vox_idx in range(len(vox_center)):
213 | grid_idx = vox_key[vox_idx, i]
214 | if grid_idx < len(_geo_grid_pts):
215 | elements[f'grid{i}_value'][vox_idx] = _geo_grid_pts[grid_idx]
216 | else:
217 | elements[f'grid{i}_value'][vox_idx] = 0.0
218 |
219 | # Add comments to the PLY file
220 | header_comments = []
221 | header_comments.append(f"scene_center {scene_center[0]} {scene_center[1]} {scene_center[2]}")
222 | header_comments.append(f"scene_extent {scene_extent}")
223 | header_comments.append(f"active_sh_degree {active_sh_degree}")
224 |
225 | # Write PLY file
226 | print(f"Writing PLY file to {output_path}...")
227 | el = PlyElement.describe(elements, 'vertex')
228 | PlyData([el], comments=header_comments).write(output_path)
229 | print(f"PLY file saved to: {output_path} with {len(vox_center)} points")
230 |
231 | def main():
232 | parser = argparse.ArgumentParser(description='Convert sparse voxel model PT file to PLY format')
233 | parser.add_argument('input_path', type=str, help='Path to the model.pt file')
234 | parser.add_argument('output_path', type=str, help='Path where to save the PLY file')
235 |
236 | args = parser.parse_args()
237 |
238 | if __name__ == '__main__':
239 | # This is important for multiprocessing on Windows
240 | multiprocessing.freeze_support()
241 |
242 | convert_to_ply(args.input_path, args.output_path)
243 |
244 | if __name__ == "__main__":
245 | # This is important for multiprocessing on Windows
246 | multiprocessing.freeze_support()
247 | main()
--------------------------------------------------------------------------------
/src/lib/Camera.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Camera class for WebGL rendering
3 | * Handles view and projection matrices
4 | */
5 | import { mat4, vec3 } from 'gl-matrix';
6 |
7 | export class Camera {
8 | // Camera matrices
9 | private viewMatrix: mat4 = mat4.create();
10 | private projectionMatrix: mat4 = mat4.create();
11 |
12 | // Camera properties
13 | private position: vec3 = vec3.fromValues(0, 0, 0);
14 | private target: vec3 = vec3.fromValues(0, 0, -1);
15 | private up: vec3 = vec3.fromValues(0, 1, 0);
16 |
17 | // Projection parameters
18 | private fieldOfView: number = 45 * Math.PI / 180; // in radians
19 | private aspectRatio: number = 1.0;
20 | private nearClip: number = 0.1;
21 | private farClip: number = 1000.0;
22 |
23 | constructor() {
24 | this.updateViewMatrix();
25 | this.updateProjectionMatrix();
26 | }
27 |
28 | /**
29 | * Updates the view matrix based on current camera properties
30 | */
31 | private updateViewMatrix(): void {
32 | mat4.lookAt(this.viewMatrix, this.position, this.target, this.up);
33 | }
34 |
35 | /**
36 | * Updates the projection matrix based on current camera properties
37 | */
38 | private updateProjectionMatrix(): void {
39 | mat4.perspective(
40 | this.projectionMatrix,
41 | this.fieldOfView,
42 | this.aspectRatio,
43 | this.nearClip,
44 | this.farClip
45 | );
46 | }
47 |
48 | /**
49 | * Sets the camera position
50 | */
51 | public setPosition(x: number, y: number, z: number): void {
52 | vec3.set(this.position, x, y, z);
53 | this.updateViewMatrix();
54 | }
55 |
56 | /**
57 | * Sets the camera target/look-at point
58 | */
59 | public setTarget(x: number, y: number, z: number): void {
60 | vec3.set(this.target, x, y, z);
61 | this.updateViewMatrix();
62 | }
63 |
64 | /**
65 | * Sets the camera's up vector
66 | */
67 | public setUp(x: number, y: number, z: number): void {
68 | vec3.set(this.up, x, y, z);
69 | this.updateViewMatrix();
70 | }
71 |
72 | /**
73 | * Sets the camera's field of view
74 | * @param fovInRadians Field of view in radians
75 | */
76 | public setFieldOfView(fovInRadians: number): void {
77 | this.fieldOfView = fovInRadians;
78 | this.updateProjectionMatrix();
79 | }
80 |
81 | /**
82 | * Sets the aspect ratio (width/height)
83 | */
84 | public setAspectRatio(aspect: number): void {
85 | this.aspectRatio = aspect;
86 | this.updateProjectionMatrix();
87 | }
88 |
89 | /**
90 | * Sets the near and far clipping planes
91 | */
92 | public setClippingPlanes(near: number, far: number): void {
93 | this.nearClip = near;
94 | this.farClip = far;
95 | this.updateProjectionMatrix();
96 | }
97 |
98 | /**
99 | * Get the view matrix
100 | */
101 | public getViewMatrix(): mat4 {
102 | return this.viewMatrix;
103 | }
104 |
105 | /**
106 | * Get the projection matrix
107 | */
108 | public getProjectionMatrix(): mat4 {
109 | return this.projectionMatrix;
110 | }
111 |
112 | public getPosition(): vec3 {
113 | return this.position;
114 | }
115 |
116 | public getTarget(): vec3 {
117 | return this.target;
118 | }
119 |
120 | /**
121 | * Orbit the camera around the target
122 | * @param angleY Angle in radians to rotate around Y axis
123 | */
124 | public orbit(angleY: number): void {
125 | // Calculate direction vector from target to position
126 | const direction = vec3.create();
127 | vec3.subtract(direction, this.position, this.target);
128 |
129 | // Rotate around Y axis
130 | const rotatedDirection = vec3.create();
131 | vec3.rotateY(rotatedDirection, direction, [0, 0, 0], angleY);
132 |
133 | // Update position based on rotated direction
134 | vec3.add(this.position, this.target, rotatedDirection);
135 |
136 | this.updateViewMatrix();
137 | }
138 | }
139 |
--------------------------------------------------------------------------------
/src/lib/DistanceSorter.ts:
--------------------------------------------------------------------------------
1 | export class DistanceSorter {
2 | // Static arrays that will be reused between calls
3 | private static BUCKET_COUNT = 256 * 256;
4 | private static counts: Uint32Array | null = null;
5 | private static starts: Uint32Array | null = null;
6 | private static lastCameraPosition: [number, number, number] | null = null;
7 |
8 | /**
9 | * Sorts voxels based on their distance from the camera position
10 | * @param positions Float32Array of voxel positions [x1,y1,z1,x2,y2,z2,...]
11 | * @param cameraPosition [x,y,z] position of the camera
12 | * @param outIndices Optional pre-allocated array to store the result
13 | * @returns Uint32Array of sorted indices
14 | */
15 | static sortVoxels(
16 | positions: Float32Array,
17 | cameraPosition: [number, number, number],
18 | outIndices?: Uint32Array
19 | ): Uint32Array {
20 | const numVoxels = positions.length / 3;
21 |
22 | // Early exit if camera hasn't moved significantly and number of voxels hasn't changed
23 | if (this.lastCameraPosition && outIndices && outIndices.length === numVoxels) {
24 | const [lastX, lastY, lastZ] = this.lastCameraPosition;
25 | const [x, y, z] = cameraPosition;
26 | const dot = lastX * x + lastY * y + lastZ * z;
27 | const lenSq1 = lastX * lastX + lastY * lastY + lastZ * lastZ;
28 | const lenSq2 = x * x + y * y + z * z;
29 |
30 | // If camera direction hasn't changed significantly, reuse last sort
31 | if (Math.abs(dot / Math.sqrt(lenSq1 * lenSq2) - 1) < 0.01) {
32 | return outIndices;
33 | }
34 | }
35 |
36 | // Initialize or reuse output array
37 | const indices = (outIndices && outIndices.length === numVoxels)
38 | ? outIndices
39 | : new Uint32Array(numVoxels);
40 |
41 | // Initialize static arrays if needed
42 | if (!this.counts || !this.starts || this.counts.length !== this.BUCKET_COUNT) {
43 | this.counts = new Uint32Array(this.BUCKET_COUNT);
44 | this.starts = new Uint32Array(this.BUCKET_COUNT);
45 | } else {
46 | // Clear counts array
47 | this.counts.fill(0);
48 | }
49 |
50 | // Find min/max distances
51 | let minDist = Infinity;
52 | let maxDist = -Infinity;
53 |
54 | // Calculate and store log distances directly instead of squared distances
55 | const logDistances = new Float32Array(numVoxels);
56 |
57 | // Compute squared distances and their logarithms
58 | for (let i = 0; i < numVoxels; i++) {
59 | const x = positions[i * 3];
60 | const y = positions[i * 3 + 1];
61 | const z = positions[i * 3 + 2];
62 |
63 | const dx = x - cameraPosition[0];
64 | const dy = y - cameraPosition[1];
65 | const dz = z - cameraPosition[2];
66 | const distanceSquared = dx * dx + dy * dy + dz * dz;
67 |
68 | // Store the log of the distance directly
69 | const logDist = Math.log(distanceSquared);
70 | logDistances[i] = logDist;
71 |
72 | if (logDist < minDist) minDist = logDist;
73 | if (logDist > maxDist) maxDist = logDist;
74 | }
75 |
76 | const logRange = maxDist - minDist;
77 |
78 | // Scale factor for logarithmic mapping
79 | const distInv = (this.BUCKET_COUNT - 1) / (logRange || 1);
80 |
81 | // Count occurrences of each bucket using log scale (first pass)
82 | for (let i = 0; i < numVoxels; i++) {
83 | // Reuse the pre-calculated log distance
84 | const bucketIndex = Math.min(
85 | this.BUCKET_COUNT - 1,
86 | ((logDistances[i] - minDist) * distInv) | 0
87 | );
88 | this.counts![bucketIndex]++;
89 | }
90 |
91 | // Calculate bucket positions with farthest buckets first
92 | let position = 0;
93 | const bucketStarts = new Uint32Array(this.BUCKET_COUNT);
94 | for (let i = this.BUCKET_COUNT - 1; i >= 0; i--) {
95 | bucketStarts[i] = position;
96 | position += this.counts![i];
97 | }
98 |
99 | // Reset counts for the second pass
100 | this.counts!.fill(0);
101 |
102 | // Distribute indices in back-to-front order
103 | for (let i = 0; i < numVoxels; i++) {
104 | // Reuse the pre-calculated log distance again
105 | const bucketIndex = Math.min(
106 | this.BUCKET_COUNT - 1,
107 | ((logDistances[i] - minDist) * distInv) | 0
108 | );
109 |
110 | // Calculate the position for this voxel
111 | const pos = bucketStarts[bucketIndex] + this.counts![bucketIndex];
112 | indices[pos] = i;
113 | this.counts![bucketIndex]++;
114 | }
115 |
116 | // Store camera position for next call
117 | this.lastCameraPosition = [...cameraPosition];
118 |
119 | return indices;
120 | }
121 | }
--------------------------------------------------------------------------------
/src/lib/LoadPLY.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * LoadPLY utility class for loading specialized binary PLY files with f_dc color data
3 | */
4 | export interface PLYData {
5 | vertices: Float32Array; // Position data (x, y, z)
6 | sh0Values: Float32Array; // Color data from f_dc fields
7 | octlevels: Uint8Array; // Octlevel data for scaling
8 | octpaths: Uint32Array; // Octpath data
9 | shRestValues: Float32Array | undefined; // f_rest values (0-8)
10 | gridValues: Float32Array; // grid point density values (0-7)
11 | vertexCount: number;
12 | sceneCenter: [number, number, number]; // scene center
13 | sceneExtent: number; // scene extent as a single value
14 | }
15 |
16 | export class LoadPLY {
17 | /**
18 | * Load a PLY file from a URL
19 | */
20 | public static async loadFromUrl(url: string, onProgress?: (progress: number) => void): Promise {
21 | const response = await fetch(url);
22 | if (!response.ok) {
23 | throw new Error(`Failed to load PLY file: ${response.statusText}`);
24 | }
25 |
26 | const contentLength = parseInt(response.headers.get('Content-Length') || '0');
27 | const reader = response.body?.getReader();
28 | if (!reader) {
29 | throw new Error('Failed to get response reader');
30 | }
31 |
32 | // Read the response as chunks
33 | const chunks: Uint8Array[] = [];
34 | let receivedLength = 0;
35 |
36 | while (true) {
37 | const { done, value } = await reader.read();
38 | if (done) break;
39 |
40 | chunks.push(value);
41 | receivedLength += value.length;
42 |
43 | if (contentLength && onProgress) {
44 | onProgress(receivedLength / contentLength);
45 | }
46 | }
47 |
48 | // Combine all chunks into a single array buffer
49 | const arrayBuffer = new ArrayBuffer(receivedLength);
50 | const view = new Uint8Array(arrayBuffer);
51 | let position = 0;
52 | for (const chunk of chunks) {
53 | view.set(chunk, position);
54 | position += chunk.length;
55 | }
56 |
57 | return LoadPLY.parse(arrayBuffer);
58 | }
59 |
60 | /**
61 | * Load a PLY file from a File object
62 | */
63 | public static async loadFromFile(file: File): Promise {
64 | return new Promise((resolve, reject) => {
65 | const reader = new FileReader();
66 |
67 | reader.onload = (event) => {
68 | if (!event.target || !event.target.result) {
69 | reject(new Error('Failed to read file'));
70 | return;
71 | }
72 |
73 | try {
74 | const arrayBuffer = event.target.result as ArrayBuffer;
75 | const plyData = LoadPLY.parse(arrayBuffer);
76 | resolve(plyData);
77 | } catch (error: any) {
78 | reject(new Error(`Failed to parse PLY: ${error.message}`));
79 | }
80 | };
81 |
82 | reader.onerror = () => {
83 | reject(new Error('Error reading file'));
84 | };
85 |
86 | reader.readAsArrayBuffer(file);
87 | });
88 | }
89 |
90 | /**
91 | * Parse a binary PLY file with the specified format
92 | */
93 | private static parse(arrayBuffer: ArrayBuffer): PLYData {
94 | // First, get the header as text
95 | const textDecoder = new TextDecoder();
96 |
97 | // Read the first chunk of data to get the header
98 | const headerView = new Uint8Array(arrayBuffer, 0, Math.min(10000, arrayBuffer.byteLength));
99 | const headerText = textDecoder.decode(headerView);
100 |
101 | // Find the end of the header
102 | const headerEndIndex = headerText.indexOf('end_header');
103 | if (headerEndIndex === -1) {
104 | throw new Error('Invalid PLY file: Missing end_header');
105 | }
106 |
107 | const header = headerText.substring(0, headerEndIndex);
108 | const lines = header.split('\n');
109 |
110 | // Extract scene center and extent from comments
111 | let sceneCenter: [number, number, number] = [0, 0, 0];
112 | let sceneExtent: number = 3.0;
113 |
114 | for (const line of lines) {
115 | const trimmed = line.trim();
116 |
117 | // Look for scene center in comments
118 | if (trimmed.startsWith('comment scene_center ')) {
119 | const parts = trimmed.substring('comment scene_center '.length).trim().split(/\s+/);
120 | if (parts.length >= 3) {
121 | sceneCenter = [
122 | parseFloat(parts[0]),
123 | parseFloat(parts[1]),
124 | parseFloat(parts[2])
125 | ];
126 | console.log(`Found scene center: [${sceneCenter}]`);
127 | }
128 | }
129 | // Look for scene extent in comments (single value)
130 | if (trimmed.startsWith('comment scene_extent ')) {
131 | const value = parseFloat(trimmed.substring('comment scene_extent '.length).trim());
132 | if (!isNaN(value)) {
133 | sceneExtent = value;
134 | console.log(`Found scene extent: ${sceneExtent}`);
135 | }
136 | }
137 | }
138 |
139 | // Check that this is a valid PLY file
140 | if (lines[0].trim() !== 'ply') {
141 | throw new Error('Invalid PLY file: Does not start with "ply"');
142 | }
143 |
144 | // Verify binary format
145 | const formatLine = lines.find(line => line.trim().startsWith('format'));
146 | if (!formatLine || !formatLine.includes('binary_little_endian')) {
147 | throw new Error('Only binary_little_endian format is supported');
148 | }
149 |
150 | // Get vertex count
151 | const vertexLine = lines.find(line => line.trim().startsWith('element vertex'));
152 | if (!vertexLine) {
153 | throw new Error('Invalid PLY file: Missing element vertex');
154 | }
155 |
156 | const vertexCount = parseInt(vertexLine.split(/\s+/)[2], 10);
157 | console.log(`Vertex count: ${vertexCount}`);
158 |
159 | // Collect property information
160 | const propertyLines = lines.filter(line => line.trim().startsWith('property') && !line.includes('list'));
161 | const properties = propertyLines.map(line => {
162 | const parts = line.trim().split(/\s+/);
163 | return {
164 | type: parts[1],
165 | name: parts[2]
166 | };
167 | });
168 |
169 | // Check for required properties
170 | if (!properties.some(p => p.name === 'x') ||
171 | !properties.some(p => p.name === 'y') ||
172 | !properties.some(p => p.name === 'z')) {
173 | throw new Error('PLY file missing required position properties (x, y, z)');
174 | }
175 |
176 | if (!properties.some(p => p.name === 'f_dc_0') ||
177 | !properties.some(p => p.name === 'f_dc_1') ||
178 | !properties.some(p => p.name === 'f_dc_2')) {
179 | throw new Error('PLY file missing required color properties (f_dc_0, f_dc_1, f_dc_2)');
180 | }
181 |
182 | // Check if the file has octlevel property
183 | const hasOctlevel = properties.some(p => p.name === 'octlevel');
184 | let octlevels: Uint8Array;
185 |
186 | if (hasOctlevel) {
187 | octlevels = new Uint8Array(vertexCount);
188 | console.log('File has octlevel property');
189 | } else {
190 | throw new Error('PLY file missing required octlevel property');
191 | }
192 |
193 | // Check if the file has octpath property
194 | const hasOctpath = properties.some(p => p.name === 'octpath');
195 | let octpaths: Uint32Array;
196 |
197 | if (hasOctpath) {
198 | octpaths = new Uint32Array(vertexCount);
199 | console.log('File has octpath property');
200 | } else {
201 | throw new Error('PLY file missing required octpath property');
202 | }
203 |
204 | // Check if the file has f_rest properties
205 | const hasRestValues = properties.some(p => p.name.startsWith('f_rest_'));
206 | let restValues: Float32Array | undefined;
207 |
208 | if (hasRestValues) {
209 | // Count how many f_rest properties are present (should be 9 from f_rest_0 to f_rest_8)
210 | const restCount = properties.filter(p => p.name.startsWith('f_rest_')).length;
211 | restValues = new Float32Array(vertexCount * restCount);
212 | console.log(`File has ${restCount} f_rest properties`);
213 | } else {
214 | console.log('PLY file missing f_rest values. No directional lighting will be visible.');
215 | restValues = undefined;
216 | }
217 |
218 | // Check if the file has grid value properties
219 | const hasGridValues = properties.some(p => p.name.includes('grid') && p.name.includes('_value'));
220 | let gridValues: Float32Array;
221 |
222 | if (hasGridValues) {
223 | // Count grid properties (should be 8: grid0_value to grid7_value)
224 | const gridCount = properties.filter(p => p.name.includes('grid') && p.name.includes('_value')).length;
225 | gridValues = new Float32Array(vertexCount * gridCount);
226 | console.log(`File has ${gridCount} grid value properties`);
227 | } else {
228 | throw new Error('PLY file missing required grid value properties');
229 | }
230 |
231 | // Calculate data offsets for binary reading
232 | const propertyOffsets: { [key: string]: number } = {};
233 | const propertySizes: { [key: string]: number } = {};
234 | let currentOffset = 0;
235 |
236 | for (const prop of properties) {
237 | propertyOffsets[prop.name] = currentOffset;
238 |
239 | switch (prop.type) {
240 | case 'char':
241 | case 'uchar':
242 | propertySizes[prop.name] = 1;
243 | break;
244 | case 'short':
245 | case 'ushort':
246 | propertySizes[prop.name] = 2;
247 | break;
248 | case 'int':
249 | case 'uint':
250 | case 'float':
251 | propertySizes[prop.name] = 4;
252 | break;
253 | case 'double':
254 | propertySizes[prop.name] = 8;
255 | break;
256 | default:
257 | propertySizes[prop.name] = 4; // Default to 4 bytes
258 | }
259 |
260 | currentOffset += propertySizes[prop.name];
261 | }
262 |
263 | const vertexSize = currentOffset;
264 | console.log(`Vertex size: ${vertexSize} bytes`);
265 |
266 | // Calculate the start of the data section
267 | const dataOffset = headerEndIndex + 'end_header'.length + 1; // +1 for the newline
268 |
269 | // Prepare arrays for the data
270 | const vertices = new Float32Array(vertexCount * 3); // x, y, z for each vertex
271 | const sh0s = new Float32Array(vertexCount * 3); // r, g, b for each vertex
272 |
273 | // Create a DataView for binary reading
274 | const dataView = new DataView(arrayBuffer);
275 |
276 | // Process each vertex
277 | for (let i = 0; i < vertexCount; i++) {
278 | const vertexOffset = dataOffset + (i * vertexSize);
279 | const vertexIndex = i * 3;
280 | const colorIndex = i * 3;
281 |
282 | // Read position (x, y, z)
283 | vertices[vertexIndex] = dataView.getFloat32(vertexOffset + propertyOffsets['x'], true);
284 | vertices[vertexIndex + 1] = dataView.getFloat32(vertexOffset + propertyOffsets['y'], true);
285 | vertices[vertexIndex + 2] = dataView.getFloat32(vertexOffset + propertyOffsets['z'], true);
286 |
287 | // Read sh0s (f_dc_0, f_dc_1, f_dc_2)
288 | sh0s[colorIndex] = dataView.getFloat32(vertexOffset + propertyOffsets['f_dc_0'], true);
289 | sh0s[colorIndex + 1] = dataView.getFloat32(vertexOffset + propertyOffsets['f_dc_1'], true);
290 | sh0s[colorIndex + 2] = dataView.getFloat32(vertexOffset + propertyOffsets['f_dc_2'], true);
291 |
292 | // Read octlevel if present
293 | if (hasOctlevel && octlevels && propertyOffsets['octlevel'] !== undefined) {
294 | octlevels[i] = dataView.getUint8(vertexOffset + propertyOffsets['octlevel']);
295 | }
296 |
297 | // Read octpath if present
298 | if (hasOctpath && octpaths && propertyOffsets['octpath'] !== undefined) {
299 | octpaths[i] = dataView.getUint32(vertexOffset + propertyOffsets['octpath'], true);
300 | }
301 |
302 | // Read grid values if present
303 | if (hasGridValues && gridValues) {
304 | const gridCount = properties.filter(p => p.name.includes('grid') && p.name.includes('_value')).length;
305 | for (let g = 0; g < gridCount; g++) {
306 | const propName = `grid${g}_value`;
307 | if (propertyOffsets[propName] !== undefined) {
308 | gridValues[i * gridCount + g] = dataView.getFloat32(vertexOffset + propertyOffsets[propName], true);
309 | }
310 | }
311 | }
312 |
313 | // Read f_rest values if present
314 | if (hasRestValues && restValues) {
315 | const restCount = properties.filter(p => p.name.startsWith('f_rest_')).length;
316 | for (let r = 0; r < restCount; r++) {
317 | const propName = `f_rest_${r}`;
318 | if (propertyOffsets[propName] !== undefined) {
319 | restValues[i * restCount + r] = dataView.getFloat32(vertexOffset + propertyOffsets[propName], true);
320 | }
321 | }
322 | }
323 |
324 |
325 | // For debugging, log a few vertices
326 | if (i < 5) {
327 | console.log(`Vertex ${i}: (${vertices[vertexIndex]}, ${vertices[vertexIndex + 1]}, ${vertices[vertexIndex + 2]})`);
328 | console.log(`SH0 ${i}: (${sh0s[colorIndex]}, ${sh0s[colorIndex + 1]}, ${sh0s[colorIndex + 2]})`);
329 | }
330 | }
331 |
332 | return {
333 | vertices,
334 | sh0Values: sh0s,
335 | octlevels,
336 | octpaths,
337 | shRestValues: restValues,
338 | gridValues,
339 | vertexCount,
340 | sceneCenter,
341 | sceneExtent
342 | };
343 | }
344 | }
--------------------------------------------------------------------------------
/src/lib/MortonSorter.ts:
--------------------------------------------------------------------------------
1 | /**
2 | *
3 | * NOTE: This file is complete AI SLOP right now. It doesn't work and doesn't follow the paper.
4 | *
5 | * MortonSorter class for sorting voxels in correct back-to-front order
6 | * Based on direction-dependent Morton order sorting as described in the paper
7 | */
8 | export class MortonSorter {
9 | // Maps ray sign bits to the appropriate permutation mapping
10 | // The 8 permutations of Morton order based on ray direction
11 | private static readonly PERMUTATION_MAPS = [
12 | [0, 1, 2, 3, 4, 5, 6, 7], // [+x, +y, +z] -> [0b000]
13 | [1, 0, 3, 2, 5, 4, 7, 6], // [+x, +y, -z] -> [0b001]
14 | [2, 3, 0, 1, 6, 7, 4, 5], // [+x, -y, +z] -> [0b010]
15 | [3, 2, 1, 0, 7, 6, 5, 4], // [+x, -y, -z] -> [0b011]
16 | [4, 5, 6, 7, 0, 1, 2, 3], // [-x, +y, +z] -> [0b100]
17 | [5, 4, 7, 6, 1, 0, 3, 2], // [-x, +y, -z] -> [0b101]
18 | [6, 7, 4, 5, 2, 3, 0, 1], // [-x, -y, +z] -> [0b110]
19 | [7, 6, 5, 4, 3, 2, 1, 0] // [-x, -y, -z] -> [0b111]
20 | ];
21 |
22 | /**
23 | * Compute the ray sign bits (3 bits) from the ray direction
24 | * @param rayDirection The normalized ray direction vector [x, y, z]
25 | * @returns A number 0-7 representing the ray sign bits
26 | */
27 | public static getRaySignBits(rayDirection: [number, number, number]): number {
28 | const [x, y, z] = rayDirection;
29 | let signBits = 0;
30 |
31 | // Create a 3-bit code where each bit represents the sign of x, y, z
32 | // Negative: 1, Positive: 0
33 | if (x < 0) signBits |= 0b100;
34 | if (y < 0) signBits |= 0b010;
35 | if (z < 0) signBits |= 0b001;
36 |
37 | return signBits;
38 | }
39 |
40 | /**
41 | * Extracts a morton code from an octpath value and level
42 | * @param octpath The octpath value storing the path through the octree
43 | * @param octlevel The octree level (depth)
44 | * @returns The morton code as a BigInt
45 | */
46 | public static octpathToMorton(octpath: number, octlevel: number): bigint {
47 | let result = 0n;
48 |
49 | // Each level is represented by 3 bits in the octpath
50 | // We extract these bits from the octpath for each level
51 | for (let level = 0; level < octlevel; level++) {
52 | // Shift bits to get the 3 bits for this level (from most to least significant)
53 | // i.e., first level is top 3 bits, second level is next 3 bits, etc.
54 | const shift = 3 * (octlevel - 1 - level);
55 | const levelBits = (octpath >> shift) & 0b111;
56 |
57 | // Add these bits to our morton code
58 | result |= BigInt(levelBits) << BigInt(level * 3);
59 | }
60 |
61 | return result;
62 | }
63 |
64 | /**
65 | * Computes a Morton code from 3D coordinates
66 | * @param x X coordinate (normalized 0-1)
67 | * @param y Y coordinate (normalized 0-1)
68 | * @param z Z coordinate (normalized 0-1)
69 | * @param bits Number of bits per dimension
70 | * @returns A Morton code as a BigInt
71 | */
72 | public static mortonCode(x: number, y: number, z: number, bits: number = 16): bigint {
73 | // Normalize coordinates to integers (0 to 2^bits-1)
74 | const scale = (1 << bits) - 1;
75 | const ix = Math.min(Math.max(Math.floor(x * scale), 0), scale);
76 | const iy = Math.min(Math.max(Math.floor(y * scale), 0), scale);
77 | const iz = Math.min(Math.max(Math.floor(z * scale), 0), scale);
78 |
79 | // Convert to BigInt for operations
80 | let bx = BigInt(ix);
81 | let by = BigInt(iy);
82 | let bz = BigInt(iz);
83 |
84 | // Spread the bits
85 | bx = this.spreadBits(bx, bits);
86 | by = this.spreadBits(by, bits);
87 | bz = this.spreadBits(bz, bits);
88 |
89 | // Interleave bits (x in bit positions 2, 5, 8...)
90 | // y in positions 1, 4, 7..., and z in 0, 3, 6...
91 | return (bx << 2n) | (by << 1n) | bz;
92 | }
93 |
94 | /**
95 | * Spreads the bits of a value apart to make room for interleaving
96 | * @param val The value to spread
97 | * @param bits Number of bits in the original value
98 | * @returns The value with bits spread apart
99 | */
100 | private static spreadBits(val: bigint, bits: number): bigint {
101 | // For a 21-bit number, we need to process in 3 groups of 7 bits
102 | const groupBits = 7;
103 | const groups = Math.ceil(bits / groupBits);
104 |
105 | let result = 0n;
106 | for (let i = 0; i < groups; i++) {
107 | // Extract a group of bits
108 | const mask = (1n << BigInt(groupBits)) - 1n;
109 | const group = (val >> BigInt(i * groupBits)) & mask;
110 |
111 | // Spread the bits - putting each bit 3 positions apart
112 | let spreadGroup = 0n;
113 | for (let b = 0; b < groupBits; b++) {
114 | if ((group & (1n << BigInt(b))) !== 0n) {
115 | spreadGroup |= (1n << BigInt(b * 3));
116 | }
117 | }
118 |
119 | // Add to result at the appropriate position
120 | result |= (spreadGroup << BigInt(i * groupBits * 3));
121 | }
122 |
123 | return result;
124 | }
125 |
126 | /**
127 | * Apply the direction-dependent permutation to a Morton code
128 | * @param mortonCode The original Morton code
129 | * @param raySignBits The ray sign bits (0-7)
130 | * @param maxLevel The maximum octree level
131 | * @returns The direction-adjusted Morton code
132 | */
133 | public static applyDirectionPermutation(
134 | mortonCode: bigint,
135 | raySignBits: number,
136 | maxLevel: number = 16
137 | ): bigint {
138 | // Get the permutation mapping for these ray sign bits
139 | const permutation = this.PERMUTATION_MAPS[raySignBits];
140 |
141 | // For each level, extract the 3-bit code and remap it
142 | let result = 0n;
143 | for (let level = 0; level < maxLevel; level++) {
144 | // Get the 3 bits at this level (each level is 3 bits in the Morton code)
145 | const shift = BigInt(level * 3);
146 | const levelBits = Number((mortonCode >> shift) & 0b111n);
147 |
148 | // Apply the permutation
149 | const remappedBits = permutation[levelBits];
150 |
151 | // Add back to the result
152 | result |= BigInt(remappedBits) << shift;
153 | }
154 |
155 | return result;
156 | }
157 |
158 | /**
159 | * Sort voxels based on direction-dependent Morton codes using octree information
160 | * @param positions Array of voxel positions [x1,y1,z1, x2,y2,z2, ...]
161 | * @param cameraPosition Camera position [x,y,z]
162 | * @param cameraTarget Camera target [x,y,z]
163 | * @param octlevels Optional octree levels per voxel
164 | * @param octpaths Optional octree paths per voxel
165 | * @returns Indices array with sorted order
166 | */
167 | public static sortVoxels(
168 | positions: Float32Array,
169 | cameraPosition: [number, number, number],
170 | cameraTarget: [number, number, number],
171 | octlevels?: Uint8Array,
172 | octpaths?: Uint32Array
173 | ): Uint32Array {
174 | // Create a ray direction from camera to target
175 | const rayDirection: [number, number, number] = [
176 | cameraTarget[0] - cameraPosition[0],
177 | cameraTarget[1] - cameraPosition[1],
178 | cameraTarget[2] - cameraPosition[2]
179 | ];
180 |
181 | // Normalize the ray direction
182 | const length = Math.sqrt(
183 | rayDirection[0] * rayDirection[0] +
184 | rayDirection[1] * rayDirection[1] +
185 | rayDirection[2] * rayDirection[2]
186 | );
187 |
188 | rayDirection[0] /= length;
189 | rayDirection[1] /= length;
190 | rayDirection[2] /= length;
191 |
192 | // Get ray sign bits
193 | const raySignBits = this.getRaySignBits(rayDirection);
194 |
195 | // Find min/max for normalization (only needed if we don't have octpath info)
196 | let minX = Infinity, minY = Infinity, minZ = Infinity;
197 | let maxX = -Infinity, maxY = -Infinity, maxZ = -Infinity;
198 |
199 | if (!octpaths || !octlevels) {
200 | for (let i = 0; i < positions.length; i += 3) {
201 | minX = Math.min(minX, positions[i]);
202 | minY = Math.min(minY, positions[i+1]);
203 | minZ = Math.min(minZ, positions[i+2]);
204 |
205 | maxX = Math.max(maxX, positions[i]);
206 | maxY = Math.max(maxY, positions[i+1]);
207 | maxZ = Math.max(maxZ, positions[i+2]);
208 | }
209 | }
210 |
211 | // Range for normalization
212 | const rangeX = maxX - minX || 1;
213 | const rangeY = maxY - minY || 1;
214 | const rangeZ = maxZ - minZ || 1;
215 |
216 | // Create an array of indices and their Morton codes
217 | const voxelCount = positions.length / 3;
218 | const indexMortonPairs: { index: number; mortonCode: bigint }[] = [];
219 |
220 | for (let i = 0; i < voxelCount; i++) {
221 | let mortonCode: bigint;
222 |
223 | // Use octpath if available
224 | if (octpaths && octlevels && i < octpaths.length && i < octlevels.length) {
225 | mortonCode = this.octpathToMorton(octpaths[i], octlevels[i]);
226 | } else {
227 | // Fall back to computing from position
228 | const x = (positions[i*3] - minX) / rangeX;
229 | const y = (positions[i*3+1] - minY) / rangeY;
230 | const z = (positions[i*3+2] - minZ) / rangeZ;
231 |
232 | mortonCode = this.mortonCode(x, y, z);
233 | }
234 |
235 | // Apply direction-dependent permutation
236 | mortonCode = this.applyDirectionPermutation(mortonCode, raySignBits);
237 |
238 | indexMortonPairs.push({ index: i, mortonCode });
239 | }
240 |
241 | // Sort by Morton code
242 | indexMortonPairs.sort((a, b) => {
243 | // Compare with the correct ordering based on ray direction
244 | // For back-to-front rendering, we want furthest first
245 | if (a.mortonCode < b.mortonCode) return -1;
246 | if (a.mortonCode > b.mortonCode) return 1;
247 | return 0;
248 | });
249 |
250 | // Return sorted indices
251 | const sortedIndices = new Uint32Array(voxelCount);
252 | for (let i = 0; i < voxelCount; i++) {
253 | sortedIndices[i] = indexMortonPairs[i].index;
254 | }
255 |
256 | return sortedIndices;
257 | }
258 | }
--------------------------------------------------------------------------------
/src/lib/Viewer.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Viewer class for WebGL2 rendering with instanced cubes
3 | * Specialized for binary PLY point cloud visualization
4 | */
5 | import { Camera } from './Camera';
6 | import { mat4, vec3 } from 'gl-matrix';
7 |
8 | enum TextureType {
9 | MainAttributes,
10 | GridValues,
11 | ShCoefficients
12 | }
13 |
14 |
15 | export class Viewer {
16 | private canvas: HTMLCanvasElement;
17 | private gl: WebGL2RenderingContext | null;
18 | private program: WebGLProgram | null;
19 | private positionBuffer: WebGLBuffer | null = null;
20 | private indexBuffer: WebGLBuffer | null = null;
21 | private instanceBuffer: WebGLBuffer | null = null;
22 | private instanceCount: number = 10; // Number of instances to render
23 | private indexCount: number = 0; // Number of indices in the cube geometry
24 | private vao: WebGLVertexArrayObject | null = null;
25 | private container: HTMLElement;
26 | private resizeObserver: ResizeObserver;
27 |
28 | // Camera
29 | private camera: Camera;
30 |
31 | // Scene properties for scaling calculation
32 |
33 | private baseVoxelSize: number = 0.01;
34 |
35 | private lastCameraPosition: vec3 = vec3.create();
36 | private resortThreshold: number = 0.1; // Threshold for camera movement to trigger resort
37 |
38 | // Add these properties to the Viewer class definition at the top
39 | private sortWorker: Worker | null = null;
40 | private pendingSortRequest: boolean = false;
41 | private originalPositions: Float32Array | null = null;
42 | private originalSH0Values: Float32Array | null = null;
43 | private originalScales: Float32Array | null = null;
44 | private originalGridValues1: Float32Array | null = null;
45 | private originalGridValues2: Float32Array | null = null;
46 | private sortedIndices: Uint32Array | null = null;
47 |
48 | // Add these properties to store the original octree data
49 | private originalOctlevels: Uint8Array | null = null;
50 | private originalOctpaths: Uint32Array | null = null;
51 |
52 |
53 | // Add these properties to the Viewer class definition at the top
54 | private isDragging: boolean = false;
55 | private isPanning: boolean = false;
56 | private lastMouseX: number = 0;
57 | private lastMouseY: number = 0;
58 | private orbitSpeed: number = 0.005;
59 | private panSpeed: number = 0.01;
60 | private zoomSpeed: number = 0.1;
61 |
62 | // Add this property to the Viewer class
63 | private sceneTransformMatrix: mat4 = mat4.fromValues(
64 | 1, 0, 0, 0, // First row
65 | 0, -1, 0, 0, // Second row
66 | 0, 0, -1, 0, // Third row
67 | 0, 0, 0, 1 // Fourth row
68 | );
69 |
70 | // Add these properties to the Viewer class
71 |
72 | private originalSH1Values: Float32Array | null = null;
73 |
74 | private instanceIndexBuffer: WebGLBuffer | null = null;
75 | private sortedIndicesArray: Uint32Array | null = null;
76 |
77 | private textureWidth: number = 0;
78 | private textureHeight: number = 0;
79 |
80 | // Update class properties
81 | private posScaleTexture: WebGLTexture | null = null; // pos + scale (4 values)
82 | private gridValuesTexture: WebGLTexture | null = null; // grid values (8 values)
83 | private shTexture: WebGLTexture | null = null; // sh0 + sh1 (4+8 = 12 values)
84 |
85 | // Add these properties to the class
86 | private posScaleWidth: number = 0;
87 | private posScaleHeight: number = 0;
88 | private gridValuesWidth: number = 0;
89 | private gridValuesHeight: number = 0;
90 | private shWidth: number = 0;
91 | private shHeight: number = 0;
92 |
93 | private fpsUpdateInterval: number = 500; // Update FPS display every 500ms
94 | private lastFpsUpdateTime: number = 0;
95 | private fpsElement: HTMLElement | null = null;
96 | private currentFps: number = 0;
97 |
98 | // Update these properties
99 | private lastRafTime: number = 0;
100 | private currentFrameTime: number = 0; // in milliseconds
101 |
102 | // Add these properties to the class
103 | private frameTimeHistory: number[] = [];
104 | private frameTimeHistoryMaxLength: number = 10;
105 |
106 | // Add these properties to the class
107 | private sortTimeElement: HTMLElement | null = null;
108 | private lastSortTime: number = 0;
109 |
110 | private isIntelGPU: boolean = false;
111 | private customPixelRatio: number = 1.0;
112 |
113 | // Add these properties to the class
114 | private touchStartPositions: { [key: number]: { x: number; y: number } } = {};
115 | private lastTouchDistance: number = 0;
116 | private isTouchOrbit: boolean = false;
117 |
118 | constructor(containerId: string) {
119 | // Create canvas element
120 | this.canvas = document.createElement('canvas');
121 |
122 | // Get container
123 | this.container = document.getElementById(containerId)!;
124 | if (!this.container) {
125 | throw new Error(`Container element with id "${containerId}" not found`);
126 | }
127 |
128 | // Set canvas to fill container completely
129 | this.canvas.style.width = '100vw';
130 | this.canvas.style.height = '100vh';
131 | this.canvas.style.display = 'block';
132 |
133 | // Append to container
134 | this.container.appendChild(this.canvas);
135 |
136 | // Initialize WebGL2 context
137 | this.gl = this.canvas.getContext('webgl2');
138 | if (!this.gl) {
139 | throw new Error('WebGL2 not supported in this browser');
140 | }
141 |
142 | if (!this.gl.getExtension('EXT_color_buffer_float')) {
143 | console.error('EXT_color_buffer_float extension not supported');
144 | }
145 |
146 | // Initialize camera - revert to original positive Z position
147 | this.camera = new Camera();
148 | this.camera.setPosition(0, 0, 1);
149 | this.camera.setTarget(0, 0, 0);
150 |
151 | // Get camera position and copy it to lastCameraPosition
152 | const pos = this.camera.getPosition();
153 | vec3.set(this.lastCameraPosition, pos[0], pos[1], pos[2]);
154 |
155 | // Detect GPU vendor and set appropriate pixel ratio
156 | this.detectGPUVendor();
157 |
158 | // Set initial size
159 | this.updateCanvasSize();
160 |
161 | // Create a resize observer to handle container size changes
162 | this.resizeObserver = new ResizeObserver(() => {
163 | this.updateCanvasSize();
164 | });
165 | this.resizeObserver.observe(this.container);
166 |
167 | // Also handle window resize
168 | window.addEventListener('resize', () => {
169 | this.updateCanvasSize();
170 | });
171 |
172 | // Setup program and buffers
173 | this.program = null;
174 | this.initShaders();
175 | this.initBuffers();
176 |
177 | // Initialize the sort worker
178 | this.initSortWorker();
179 |
180 | // Add mouse event listeners for orbital controls
181 | this.initOrbitControls();
182 |
183 | // Initialize the FPS counter
184 | this.initFpsCounter();
185 |
186 | this.initWebGLConstants();
187 |
188 | // Add keyboard controls for scene rotation
189 | this.initKeyboardControls();
190 |
191 | this.render(0);
192 |
193 | }
194 |
195 | private initWebGLConstants(): void {
196 | const gl = this.gl!;
197 | // Ensure blending is properly set up
198 | gl.disable(gl.DEPTH_TEST);
199 | gl.enable(gl.BLEND);
200 | gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
201 |
202 | // Enable backface culling
203 | gl.enable(gl.CULL_FACE);
204 | gl.cullFace(gl.BACK);
205 | }
206 |
207 | private detectGPUVendor(): void {
208 | const gl = this.gl!;
209 | const debugInfo = gl.getExtension('WEBGL_debug_renderer_info');
210 |
211 | if (debugInfo) {
212 | const renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
213 | console.log('GPU detected:', renderer);
214 |
215 | // Check if this is an Intel GPU
216 | this.isIntelGPU = renderer.toLowerCase().includes('intel');
217 |
218 | if (this.isIntelGPU) {
219 | console.log('Intel GPU detected - reducing resolution for better performance');
220 | // Use a lower pixel ratio for Intel GPUs (0.75 = 75% of normal resolution)
221 | this.customPixelRatio = 0.75;
222 | } else {
223 | // Use device pixel ratio for non-Intel GPUs, but cap it for high-DPI displays
224 | this.customPixelRatio = Math.min(window.devicePixelRatio, 2.0);
225 | }
226 | }
227 | }
228 |
229 | /**
230 | * Updates canvas size to match container dimensions
231 | */
232 | private updateCanvasSize(): void {
233 | // Get container dimensions (using getBoundingClientRect for true pixel dimensions)
234 | const rect = this.container.getBoundingClientRect();
235 |
236 | // Apply custom pixel ratio
237 | const width = Math.floor(rect.width * this.customPixelRatio);
238 | const height = Math.floor(rect.height * this.customPixelRatio);
239 |
240 | // Set canvas dimensions while keeping display size
241 | this.canvas.width = width;
242 | this.canvas.height = height;
243 |
244 | // Make sure canvas still appears at the browser-reported size
245 | this.canvas.style.width = `${rect.width}px`;
246 | this.canvas.style.height = `${rect.height}px`;
247 |
248 | // Update camera aspect ratio
249 | this.camera.setAspectRatio(width / height);
250 |
251 | // Update WebGL viewport
252 | if (this.gl) {
253 | this.gl.viewport(0, 0, width, height);
254 | }
255 | }
256 |
257 | private initShaders(): void {
258 | const gl = this.gl!;
259 |
260 | // Get sample count from URL
261 | const sampleCount = this.getSampleCountFromURL();
262 | console.log(`Using ${sampleCount} samples for rendering`);
263 |
264 | // Updated vertex shader to use texture fetches
265 | const vsSource = `#version 300 es
266 | precision mediump float;
267 | precision mediump sampler2D;
268 |
269 | // Core attributes
270 | in vec4 aVertexPosition;
271 | in uint aInstanceIndex;
272 |
273 | // Uniforms for matrices and camera
274 | uniform mat4 uProjectionMatrix;
275 | uniform mat4 uViewMatrix;
276 | uniform mat4 uSceneTransformMatrix;
277 | uniform mat4 uInverseTransformMatrix;
278 | uniform vec3 uCameraPosition;
279 |
280 | // Uniforms for textures
281 | uniform sampler2D uPosScaleTexture;
282 | uniform sampler2D uGridValuesTexture;
283 | uniform sampler2D uShTexture;
284 |
285 | // Texture dimensions
286 | uniform ivec2 uPosScaleDims;
287 | uniform ivec2 uGridValuesDims;
288 | uniform ivec2 uShDims;
289 |
290 | // Outputs to fragment shader
291 | out vec3 vWorldPos;
292 | out float vScale;
293 | out vec3 vVoxelCenter;
294 | out vec4 vDensity0;
295 | out vec4 vDensity1;
296 | out vec3 vColor;
297 |
298 | // Helper function to calculate texture coordinate
299 | vec2 getTexCoord(int instanceIdx, int offsetIdx, ivec2 dims, int vec4sPerInstance) {
300 | int texelIdx = instanceIdx * vec4sPerInstance + offsetIdx;
301 | int x = texelIdx % dims.x;
302 | int y = texelIdx / dims.x;
303 | return (vec2(x, y) + 0.5) / vec2(dims);
304 | }
305 |
306 | // Helper functions to fetch data
307 | vec4 fetch4(sampler2D tex, int idx, int offsetIdx, ivec2 dims, int vec4sPerInstance) {
308 | vec2 coord = getTexCoord(idx, offsetIdx, dims, vec4sPerInstance);
309 | return texture(tex, coord);
310 | }
311 |
312 | // Modified SH evaluation with proper transform handling
313 | vec3 evaluateSH(vec3 sh0, vec3 sh1_0, vec3 sh1_1, vec3 sh1_2, vec3 direction) {
314 | // Transform the direction vector using the inverse transform matrix
315 | // This handles rotations correctly in the shader space
316 | vec4 transformedDir = uInverseTransformMatrix * vec4(direction, 0.0);
317 |
318 | // Normalize the transformed direction
319 | vec3 dir = normalize(transformedDir.xyz);
320 |
321 | // Rest of the SH evaluation remains the same
322 | // SH0
323 | vec3 color = sh0 * 0.28209479177387814;
324 |
325 | // Calculate basis functions for SH1 (first order terms only)
326 | // Y_1,-1 = 0.488603 * y
327 | // Y_1,0 = 0.488603 * z
328 | // Y_1,1 = 0.488603 * x
329 | float basis_y = 0.488603 * dir.y;
330 | float basis_z = 0.488603 * dir.z;
331 | float basis_x = 0.488603 * dir.x;
332 |
333 | vec3 sh1_contrib = vec3(0);
334 | // Apply SH1 coefficients per color channel
335 | sh1_contrib += sh1_0 * basis_x;
336 | sh1_contrib += sh1_1 * basis_y;
337 | sh1_contrib += sh1_2 * basis_z;
338 |
339 | color += sh1_contrib;
340 | color += 0.5;
341 |
342 | return max(color, 0.0);
343 | }
344 |
345 |
346 | void main() {
347 | // Get the index for this instance
348 | int idx = int(aInstanceIndex);
349 |
350 | // Fetch position and scale (1 vec4 per instance)
351 | vec4 posAndScale = fetch4(uPosScaleTexture, idx, 0, uPosScaleDims, 1);
352 | vec3 instancePosition = posAndScale.xyz;
353 | float instanceScale = posAndScale.w;
354 |
355 | // Fetch grid values (2 vec4s per instance)
356 | vec4 gridValues1 = fetch4(uGridValuesTexture, idx, 0, uGridValuesDims, 2);
357 | vec4 gridValues2 = fetch4(uGridValuesTexture, idx, 1, uGridValuesDims, 2);
358 |
359 | // Fetch SH values (3 vec4s per instance)
360 | vec4 sh0_vec4 = fetch4(uShTexture, idx, 0, uShDims, 3);
361 | vec4 sh1_part1 = fetch4(uShTexture, idx, 1, uShDims, 3);
362 | vec4 sh1_part2 = fetch4(uShTexture, idx, 2, uShDims, 3);
363 |
364 | // Extract SH0 (rgb only - first 3 components)
365 | vec3 sh0 = sh0_vec4.rgb;
366 |
367 | // Extract SH1 values (all 9 components)
368 | // The 4th value of sh0_vec4 is the first SH1 value
369 | vec3 sh1_0 = vec3(sh0_vec4.a, sh1_part1.xy);
370 | vec3 sh1_1 = vec3(sh1_part1.zw, sh1_part2.x);
371 | vec3 sh1_2 = sh1_part2.yzw;
372 |
373 | // Scale the vertex position for this instance
374 | vec3 scaledVertexPos = aVertexPosition.xyz * instanceScale;
375 |
376 | // Position vertex relative to instance position
377 | vec3 worldVertexPos = scaledVertexPos + instancePosition;
378 |
379 | // Apply scene transform to the entire positioned vertex
380 | vec4 transformedPos = uSceneTransformMatrix * vec4(worldVertexPos, 1.0);
381 |
382 | // Calculate final position
383 | gl_Position = uProjectionMatrix * uViewMatrix * transformedPos;
384 |
385 | // Pass transformed world position of the vertex to fragment shader
386 | vWorldPos = transformedPos.xyz;
387 |
388 | // Calculate voxel center in transformed space
389 | vVoxelCenter = (uSceneTransformMatrix * vec4(instancePosition, 1.0)).xyz;
390 |
391 | // Pass scale to fragment shader
392 | vScale = instanceScale;
393 |
394 | // Calculate viewing direction from voxel center to camera in world space
395 | vec3 viewDir = normalize(vVoxelCenter - uCameraPosition);
396 |
397 | // Calculate color using SH and pass to fragment shader
398 | vColor = evaluateSH(sh0, sh1_0, sh1_1, sh1_2, viewDir);
399 |
400 | // Pass density values to fragment shader
401 | vDensity0 = gridValues1;
402 | vDensity1 = gridValues2;
403 | }
404 | `;
405 |
406 | // Updated fragment shader with sampling loop
407 | const fsSource = `#version 300 es
408 | precision mediump float;
409 |
410 | // Define the sample count as a constant
411 | const int SAMPLE_COUNT = ${sampleCount};
412 |
413 | in vec3 vWorldPos;
414 | in float vScale;
415 | in vec3 vVoxelCenter;
416 | in vec4 vDensity0; // Density values for corners 0-3
417 | in vec4 vDensity1; // Density values for corners 4-7
418 | in vec3 vColor; // Pre-calculated color from vertex shader
419 |
420 | uniform vec3 uCameraPosition;
421 | uniform mat4 uViewMatrix;
422 | uniform mat4 uInverseTransformMatrix;
423 | uniform vec3 uTransformFlips; // x, y, z components will be -1 for flipped axes, 1 for unchanged
424 |
425 | out vec4 fragColor;
426 |
427 | // Ray-box intersection function - returns entry and exit t values
428 | vec2 rayBoxIntersection(vec3 rayOrigin, vec3 rayDir, vec3 boxCenter, float boxScale) {
429 | const float EPSILON = 1e-3;
430 |
431 | // Get box dimensions
432 | vec3 halfExtent = vec3(boxScale * 0.5);
433 |
434 | // For non-axis aligned boxes, we should transform the ray into box space
435 | // rather than transforming the box into world space
436 |
437 | // 1. Create a coordinate system for the box (this is the inverse transform)
438 | // This moves ray into the box's local space where it's axis-aligned
439 | vec3 localRayOrigin = rayOrigin - boxCenter;
440 |
441 | // Apply inverse rotation (would be done by multiplying by inverse matrix)
442 | // Since we're in the fragment shader, we can use the uniform
443 | vec4 transformedOrigin = uInverseTransformMatrix * vec4(localRayOrigin, 0.0);
444 | vec4 transformedDir = uInverseTransformMatrix * vec4(rayDir, 0.0);
445 |
446 | // Now perform standard AABB intersection in this space
447 | vec3 invDir = 1.0 / transformedDir.xyz;
448 | vec3 boxMin = -halfExtent;
449 | vec3 boxMax = halfExtent;
450 |
451 | vec3 tMin = (boxMin - transformedOrigin.xyz) * invDir;
452 | vec3 tMax = (boxMax - transformedOrigin.xyz) * invDir;
453 | vec3 t1 = min(tMin, tMax);
454 | vec3 t2 = max(tMin, tMax);
455 | float tNear = max(max(t1.x, t1.y), t1.z);
456 | float tFar = min(min(t2.x, t2.y), t2.z);
457 |
458 | // If camera is inside the box, tNear will be negative
459 | tNear = max(0.0, tNear);
460 |
461 | return vec2(tNear, tFar);
462 | }
463 |
464 | // Modified trilinear interpolation for arbitrary transforms
465 | float trilinearInterpolation(vec3 pos, vec3 boxMin, vec3 boxMax, vec4 density0, vec4 density1) {
466 |
467 | // TODO: This is very unoptimized. Need to optimize this so we don't have to transform the position
468 | // back and forth between different spaces.
469 |
470 | // Calculate the size of the box
471 | vec3 boxSize = boxMax - boxMin;
472 |
473 | // First, transform the sample position back to the original data space
474 | // 1. Convert the position to a normalized position in the box [0,1]
475 | vec3 normalizedPos = (pos - boxMin) / boxSize;
476 |
477 | // 2. Convert to box-local coordinates [-0.5, 0.5]
478 | vec3 localPos = normalizedPos - 0.5;
479 |
480 | // 3. Transform this position back to the original data space using inverse transform
481 | vec4 originalLocalPos = uInverseTransformMatrix * vec4(localPos, 0.0);
482 |
483 | // 4. Convert back to normalized [0,1] range
484 | vec3 originalNormalizedPos = originalLocalPos.xyz + 0.5;
485 |
486 | // 5. Clamp to ensure we're in the valid range [0,1]
487 | originalNormalizedPos = clamp(originalNormalizedPos, 0.0, 1.0);
488 |
489 | // Now use these coordinates to sample the grid values in their original orientation
490 | float fx = originalNormalizedPos.x;
491 | float fy = originalNormalizedPos.y;
492 | float fz = originalNormalizedPos.z;
493 | float fx1 = 1.0 - fx;
494 | float fy1 = 1.0 - fy;
495 | float fz1 = 1.0 - fz;
496 |
497 | // Standard grid corner ordering
498 | float c000 = density0.x; // Corner [0,0,0]
499 | float c001 = density0.y; // Corner [0,0,1]
500 | float c010 = density0.z; // Corner [0,1,0]
501 | float c011 = density0.w; // Corner [0,1,1]
502 | float c100 = density1.x; // Corner [1,0,0]
503 | float c101 = density1.y; // Corner [1,0,1]
504 | float c110 = density1.z; // Corner [1,1,0]
505 | float c111 = density1.w; // Corner [1,1,1]
506 |
507 | // Trilinear interpolation using original-space coordinates
508 | float c00 = fx1 * c000 + fx * c100;
509 | float c01 = fx1 * c001 + fx * c101;
510 | float c10 = fx1 * c010 + fx * c110;
511 | float c11 = fx1 * c011 + fx * c111;
512 |
513 | float c0 = fy1 * c00 + fy * c10;
514 | float c1 = fy1 * c01 + fy * c11;
515 |
516 | return fz1 * c0 + fz * c1;
517 | }
518 |
519 | float explin(float x) {
520 | float threshold = 1.1;
521 | if (x > threshold) {
522 | return x;
523 | } else {
524 | float ln1_1 = 0.0953101798043; // pre-computed ln(1.1)
525 | return exp((x / threshold) - 1.0 + ln1_1);
526 | }
527 | }
528 |
529 | void main() {
530 | // Calculate ray from camera to this fragment
531 | vec3 rayOrigin = uCameraPosition;
532 | vec3 rayDir = normalize(vWorldPos - uCameraPosition);
533 |
534 | // Get ray-box intersection
535 | vec2 tIntersect = rayBoxIntersection(rayOrigin, rayDir, vVoxelCenter, vScale);
536 | float tNear = max(0.0, tIntersect.x);
537 | float tFar = min(tIntersect.y, 1000.0);
538 |
539 | if (tNear < tFar) {
540 | // Calculate box min and max
541 | vec3 boxMin = vVoxelCenter - vec3(vScale * 0.5);
542 | vec3 boxMax = vVoxelCenter + vec3(vScale * 0.5);
543 |
544 | // Calculate entry and exit points in world space
545 | vec3 entryPoint = rayOrigin + rayDir * tNear;
546 | vec3 exitPoint = rayOrigin + rayDir * tFar;
547 |
548 | // Transform to view space
549 | vec4 entryPointView = uViewMatrix * vec4(entryPoint, 1.0);
550 | vec4 exitPointView = uViewMatrix * vec4(exitPoint, 1.0);
551 |
552 | // Calculate ray length in view space
553 | float viewSpaceRayLength = distance(entryPointView.xyz, exitPointView.xyz);
554 | float stepLength = viewSpaceRayLength / float(SAMPLE_COUNT);
555 |
556 | // Use a loop to calculate total density
557 | float totalDensity = 0.0;
558 |
559 | // Apply explin after interpolation
560 | // The CUDA reference has a 100x scale factor
561 | const float STEP_SCALE = 100.0;
562 |
563 | for (int i = 0; i < SAMPLE_COUNT; i++) {
564 | // Calculate sample position - evenly distribute samples
565 | float t = tNear + (tFar - tNear) * (float(i) + 0.5) / float(SAMPLE_COUNT);
566 | vec3 samplePoint = rayOrigin + rayDir * t;
567 |
568 | // Get density at sample point
569 | float rawDensity = trilinearInterpolation(samplePoint, boxMin, boxMax, vDensity0, vDensity1);
570 |
571 | // Apply explin and accumulate
572 | float density = STEP_SCALE * stepLength * explin(rawDensity);
573 | totalDensity += density;
574 | }
575 |
576 | // Use view space ray length for Beer-Lambert law
577 | float alpha = 1.0 - exp(-totalDensity);
578 |
579 | // Premultiply the color by alpha
580 | vec3 premultipliedColor = vColor * alpha;
581 | fragColor = vec4(premultipliedColor, alpha);
582 | } else {
583 | discard;
584 | }
585 | }
586 | `;
587 |
588 | // Create shaders with better error handling
589 | const vertexShader = gl.createShader(gl.VERTEX_SHADER);
590 | if (!vertexShader) {
591 | console.error('Failed to create vertex shader');
592 | return;
593 | }
594 |
595 | gl.shaderSource(vertexShader, vsSource);
596 | gl.compileShader(vertexShader);
597 |
598 | if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
599 | const info = gl.getShaderInfoLog(vertexShader);
600 | console.error('Vertex shader compilation failed:', info);
601 | gl.deleteShader(vertexShader);
602 | return;
603 | } else {
604 | console.log('Vertex shader compiled successfully');
605 | }
606 |
607 | const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
608 | if (!fragmentShader) {
609 | console.error('Failed to create fragment shader');
610 | return;
611 | }
612 |
613 | gl.shaderSource(fragmentShader, fsSource);
614 | gl.compileShader(fragmentShader);
615 |
616 | if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
617 | const info = gl.getShaderInfoLog(fragmentShader);
618 | console.error('Fragment shader compilation failed:', info);
619 | gl.deleteShader(fragmentShader);
620 | return;
621 | } else {
622 | console.log('Fragment shader compiled successfully');
623 | }
624 |
625 | // Create and link program
626 | this.program = gl.createProgram();
627 | if (!this.program) {
628 | console.error('Failed to create shader program');
629 | return;
630 | }
631 |
632 | gl.attachShader(this.program, vertexShader);
633 | gl.attachShader(this.program, fragmentShader);
634 | gl.linkProgram(this.program);
635 |
636 | if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
637 | const info = gl.getProgramInfoLog(this.program);
638 | console.error('Shader program linking failed:', info);
639 | return;
640 | } else {
641 | console.log('Shader program linked successfully');
642 | }
643 | }
644 |
645 |
646 | private initBuffers(): void {
647 | const gl = this.gl!;
648 |
649 | // Create a vertex array object (VAO)
650 | this.vao = gl.createVertexArray();
651 | gl.bindVertexArray(this.vao);
652 |
653 | // Initialize cube geometry for instancing
654 | this.initCubeGeometry(1);
655 |
656 | // Create and initialize the instance index buffer (this is all we need now for instancing)
657 | const instanceIndices = new Uint32Array(this.instanceCount);
658 | for (let i = 0; i < this.instanceCount; i++) {
659 | instanceIndices[i] = i;
660 | }
661 |
662 | // Create and fill the instance index buffer
663 | this.instanceIndexBuffer = gl.createBuffer();
664 | gl.bindBuffer(gl.ARRAY_BUFFER, this.instanceIndexBuffer);
665 | gl.bufferData(gl.ARRAY_BUFFER, instanceIndices, gl.DYNAMIC_DRAW);
666 |
667 | // Get attribute location for instance index
668 | const instanceIndexLocation = gl.getAttribLocation(this.program!, 'aInstanceIndex');
669 | console.log('Instance index attribute location:', instanceIndexLocation);
670 |
671 | // Only set up instance index attribute if it exists in the shader
672 | if (instanceIndexLocation !== -1) {
673 | gl.enableVertexAttribArray(instanceIndexLocation);
674 | gl.vertexAttribIPointer(
675 | instanceIndexLocation,
676 | 1, // 1 component per instance index
677 | gl.UNSIGNED_INT, // data type
678 | 0, // stride
679 | 0 // offset
680 | );
681 | gl.vertexAttribDivisor(instanceIndexLocation, 1);
682 | } else {
683 | console.error('Could not find aInstanceIndex attribute in shader');
684 | }
685 |
686 | // Unbind the VAO
687 | gl.bindVertexArray(null);
688 | }
689 |
690 | /**
691 | * Initialize cube geometry for instance rendering
692 | */
693 | private initCubeGeometry(size: number): void {
694 | const gl = this.gl!;
695 |
696 | // Create cube geometry
697 | const halfSize = size / 2;
698 | const positions = [
699 | // Front face
700 | -halfSize, -halfSize, halfSize,
701 | halfSize, -halfSize, halfSize,
702 | halfSize, halfSize, halfSize,
703 | -halfSize, halfSize, halfSize,
704 |
705 | // Back face
706 | -halfSize, -halfSize, -halfSize,
707 | -halfSize, halfSize, -halfSize,
708 | halfSize, halfSize, -halfSize,
709 | halfSize, -halfSize, -halfSize,
710 | ];
711 |
712 | if (this.positionBuffer) {
713 | gl.deleteBuffer(this.positionBuffer);
714 | }
715 |
716 | // Create position buffer
717 | this.positionBuffer = gl.createBuffer();
718 | gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
719 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
720 |
721 | // Get attribute location for vertex position
722 | const positionAttributeLocation = gl.getAttribLocation(this.program!, 'aVertexPosition');
723 |
724 | if (positionAttributeLocation !== -1) {
725 | gl.enableVertexAttribArray(positionAttributeLocation);
726 | gl.vertexAttribPointer(
727 | positionAttributeLocation,
728 | 3, // 3 components per vertex
729 | gl.FLOAT, // data type
730 | false, // no normalization
731 | 0, // stride
732 | 0 // offset
733 | );
734 | } else {
735 | console.error('Could not find aVertexPosition attribute in shader');
736 | }
737 |
738 | // Indices as before
739 | const indices = [
740 | 0, 1, 2, 0, 2, 3, // Front face
741 | 4, 5, 6, 4, 6, 7, // Back face
742 | 0, 3, 5, 0, 5, 4, // Left face
743 | 1, 7, 6, 1, 6, 2, // Right face
744 | 3, 2, 6, 3, 6, 5, // Top face
745 | 0, 4, 7, 0, 7, 1 // Bottom face
746 | ];
747 |
748 |
749 | if (this.indexBuffer) {
750 | gl.deleteBuffer(this.indexBuffer);
751 | }
752 |
753 | // Create index buffer
754 | this.indexBuffer = gl.createBuffer();
755 | gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
756 | gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
757 |
758 | // Store the number of indices
759 | this.indexCount = indices.length;
760 |
761 | console.log(`Initialized cube geometry with ${this.indexCount} indices`);
762 | }
763 | /**
764 | * Main render function with time-based animation
765 | */
766 | private render(timestamp: number): void {
767 | const gl = this.gl!;
768 |
769 | // Calculate actual frame time in milliseconds
770 | if (this.lastRafTime > 0) {
771 | const frameTime = timestamp - this.lastRafTime;
772 |
773 | // Add to frame time history
774 | this.frameTimeHistory.push(frameTime);
775 |
776 | // Keep only last N frames
777 | if (this.frameTimeHistory.length > this.frameTimeHistoryMaxLength) {
778 | this.frameTimeHistory.shift(); // Remove oldest frame time
779 | }
780 |
781 | // Calculate average frame time
782 | const avgFrameTime = this.frameTimeHistory.reduce((sum, time) => sum + time, 0) /
783 | this.frameTimeHistory.length;
784 |
785 | this.currentFrameTime = avgFrameTime;
786 | this.currentFps = 1000 / avgFrameTime; // FPS = 1000ms / frame time
787 | }
788 | this.lastRafTime = timestamp;
789 |
790 | // Update display at specified intervals
791 | if (timestamp - this.lastFpsUpdateTime > this.fpsUpdateInterval) {
792 | if (this.fpsElement) {
793 | const fps = Math.round(this.currentFps);
794 | const frameTime = this.currentFrameTime.toFixed(1);
795 | this.fpsElement.textContent = `FPS: ${fps} | Frame: ${frameTime}ms`;
796 | }
797 |
798 | // Update sort time display
799 | if (this.sortTimeElement) {
800 | this.sortTimeElement.textContent = `Sort: ${this.lastSortTime.toFixed(1)}ms`;
801 | }
802 |
803 | this.lastFpsUpdateTime = timestamp;
804 | }
805 |
806 | // Check if camera has moved enough to trigger a resort
807 | const cameraPos = this.camera.getPosition();
808 |
809 | // Use vec3.distance for a cleaner distance calculation
810 | const cameraMoveDistance = vec3.distance(
811 | this.lastCameraPosition,
812 | vec3.fromValues(cameraPos[0], cameraPos[1], cameraPos[2])
813 | );
814 |
815 | if (cameraMoveDistance > this.resortThreshold && !this.pendingSortRequest) {
816 | // Update lastCameraPosition with current position
817 | vec3.set(this.lastCameraPosition, cameraPos[0], cameraPos[1], cameraPos[2]);
818 | this.requestSort();
819 | }
820 |
821 | // Debug: Ensure we have valid data to render
822 | if (this.instanceCount === 0) {
823 | console.warn('No instances to render');
824 | }
825 |
826 | // Clear the canvas with a slightly visible color to see if rendering is happening
827 | // gl.clearColor(1.0 / 255.0, 121.0 / 255.0, 51.0 / 255.0, 1.0);
828 | gl.clearColor(0.0, 0.0, 0.0, 1.0);
829 | gl.clear(gl.COLOR_BUFFER_BIT);
830 |
831 | // Use our shader program
832 | gl.useProgram(this.program);
833 |
834 | // Debug: Check if program is valid
835 | if (!this.program) {
836 | console.error('Shader program is null');
837 | requestAnimationFrame((time) => this.render(time));
838 | return;
839 | }
840 |
841 | // Bind the VAO
842 | gl.bindVertexArray(this.vao);
843 |
844 | // Debug: Check if VAO is valid
845 | if (!this.vao) {
846 | console.error('VAO is null');
847 | requestAnimationFrame((time) => this.render(time));
848 | return;
849 | }
850 |
851 | // Calculate the inverse transform matrix
852 | const inverseTransformMatrix = this.getInverseTransformMatrix();
853 |
854 | // Set uniforms with camera matrices
855 | const projectionMatrixLocation = gl.getUniformLocation(this.program, 'uProjectionMatrix');
856 | const viewMatrixLocation = gl.getUniformLocation(this.program, 'uViewMatrix');
857 | const sceneTransformMatrixLocation = gl.getUniformLocation(this.program, 'uSceneTransformMatrix');
858 | const inverseTransformMatrixLocation = gl.getUniformLocation(this.program, 'uInverseTransformMatrix');
859 | const cameraPositionLocation = gl.getUniformLocation(this.program, 'uCameraPosition');
860 |
861 | // Pass matrices to shader
862 | gl.uniformMatrix4fv(projectionMatrixLocation, false, this.camera.getProjectionMatrix());
863 | gl.uniformMatrix4fv(viewMatrixLocation, false, this.camera.getViewMatrix());
864 | gl.uniformMatrix4fv(sceneTransformMatrixLocation, false, this.sceneTransformMatrix);
865 | gl.uniformMatrix4fv(inverseTransformMatrixLocation, false, inverseTransformMatrix);
866 |
867 | // Pass camera position to the shader
868 | gl.uniform3f(cameraPositionLocation, cameraPos[0], cameraPos[1], cameraPos[2]);
869 |
870 | // Set texture uniforms
871 | const textureWidthLocation = gl.getUniformLocation(this.program, 'uTextureWidth');
872 | const textureHeightLocation = gl.getUniformLocation(this.program, 'uTextureHeight');
873 | gl.uniform1i(textureWidthLocation, this.textureWidth);
874 | gl.uniform1i(textureHeightLocation, this.textureHeight);
875 |
876 | // Bind textures and set uniforms
877 | gl.activeTexture(gl.TEXTURE0);
878 | gl.bindTexture(gl.TEXTURE_2D, this.posScaleTexture);
879 | gl.uniform1i(gl.getUniformLocation(this.program!, 'uPosScaleTexture'), 0);
880 | gl.uniform2i(gl.getUniformLocation(this.program!, 'uPosScaleDims'),
881 | this.posScaleWidth, this.posScaleHeight);
882 |
883 | gl.activeTexture(gl.TEXTURE1);
884 | gl.bindTexture(gl.TEXTURE_2D, this.gridValuesTexture);
885 | gl.uniform1i(gl.getUniformLocation(this.program!, 'uGridValuesTexture'), 1);
886 | gl.uniform2i(gl.getUniformLocation(this.program!, 'uGridValuesDims'),
887 | this.gridValuesWidth, this.gridValuesHeight);
888 |
889 | gl.activeTexture(gl.TEXTURE2);
890 | gl.bindTexture(gl.TEXTURE_2D, this.shTexture);
891 | gl.uniform1i(gl.getUniformLocation(this.program!, 'uShTexture'), 2);
892 | gl.uniform2i(gl.getUniformLocation(this.program!, 'uShDims'),
893 | this.shWidth, this.shHeight);
894 |
895 | // Add a uniform to pass transformation information to the fragment shader
896 | const flipsX = this.sceneTransformMatrix[0] < 0 ? -1 : 1;
897 | const flipsY = this.sceneTransformMatrix[5] < 0 ? -1 : 1;
898 | const flipsZ = this.sceneTransformMatrix[10] < 0 ? -1 : 1;
899 |
900 | const transformFlipsLocation = gl.getUniformLocation(this.program, 'uTransformFlips');
901 | gl.uniform3f(transformFlipsLocation, flipsX, flipsY, flipsZ);
902 |
903 | // Draw instanced geometry
904 | if (this.instanceCount <= 0) {
905 | console.warn('No instances to draw');
906 | requestAnimationFrame((time) => this.render(time));
907 | return;
908 | }
909 |
910 | gl.drawElementsInstanced(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0, this.instanceCount);
911 |
912 | // Check for GL errors
913 | // const error = gl.getError();
914 | // if (error !== gl.NO_ERROR) {
915 | // console.error(`WebGL error: ${error}`);
916 | // }
917 |
918 | // Unbind the VAO
919 | gl.bindVertexArray(null);
920 |
921 | // Request animation frame for continuous rendering
922 | requestAnimationFrame((time) => this.render(time));
923 | }
924 |
925 | /**
926 | * Load a point cloud from positions and colors
927 | */
928 | public loadPointCloud(
929 | positions: Float32Array,
930 | sh0Values: Float32Array,
931 | octlevels: Uint8Array,
932 | octpaths: Uint32Array,
933 | gridValues: Float32Array,
934 | shRestValues?: Float32Array
935 | ): void {
936 | console.log(`Loading point cloud with ${positions.length / 3} points`);
937 |
938 | // Save original data (we still need it for the sort worker)
939 | this.originalPositions = new Float32Array(positions);
940 |
941 | // Save SH0 (base colors)
942 | this.originalSH0Values = new Float32Array(sh0Values);
943 |
944 | // We need space for 9 values per vertex
945 |
946 | // Extract SH1 coefficients from shRestValues if provided
947 | if (shRestValues && shRestValues.length > 0) {
948 | // Each vertex has multiple rest values, we need to extract 9 values for SH1
949 | const restPerVertex = shRestValues.length / positions.length * 3;
950 | console.log(`Found ${restPerVertex} rest values per vertex, extracting SH1 (9 values per vertex)`);
951 |
952 | // We need space for 9 values per vertex
953 | this.originalSH1Values = new Float32Array(positions.length / 3 * 9);
954 |
955 | for (let i = 0; i < positions.length / 3; i++) {
956 | // Extract 9 values from shRestValues for each vertex
957 | for (let j = 0; j < 9; j++) {
958 | // Only extract if we have enough values
959 | if (j < restPerVertex) {
960 | this.originalSH1Values[i * 9 + j] = shRestValues[i * restPerVertex + j];
961 | } else {
962 | // If not enough rest values, set to 0
963 | this.originalSH1Values[i * 9 + j] = 0.0;
964 | }
965 | }
966 | }
967 |
968 | console.log(`Extracted ${this.originalSH1Values.length / 9} SH1 sets with 9 values each`);
969 | console.log('SH1 sample values (first vertex):',
970 | this.originalSH1Values.slice(0, 9));
971 | } else {
972 | // If no rest values provided, use zeros (no directional lighting)
973 | this.originalSH1Values = new Float32Array(positions.length / 3 * 9);
974 | console.log('No SH1 values provided, using default (no directional lighting)');
975 | }
976 |
977 | // Save octree data
978 | this.originalOctlevels = new Uint8Array(octlevels);
979 |
980 | this.originalScales = new Float32Array(octlevels.length);
981 | for (let i = 0; i < octlevels.length; i++) {
982 | this.originalScales[i] = this.baseVoxelSize * Math.pow(2, -octlevels[i]);
983 | }
984 |
985 | this.originalOctpaths = new Uint32Array(octpaths);
986 |
987 | // Save grid values
988 | console.log(`Saving ${gridValues.length} grid values`);
989 | this.originalGridValues1 = new Float32Array(positions.length / 3 * 4);
990 | this.originalGridValues2 = new Float32Array(positions.length / 3 * 4);
991 |
992 | // Check if gridValues contains non-1.0 values
993 | let hasNonOneValues = false;
994 | let minVal = 1.0, maxVal = 1.0;
995 |
996 | for (let i = 0; i < Math.min(gridValues.length, 100); i++) {
997 | if (gridValues[i] !== 1.0) {
998 | hasNonOneValues = true;
999 | minVal = Math.min(minVal, gridValues[i]);
1000 | maxVal = Math.max(maxVal, gridValues[i]);
1001 | }
1002 | }
1003 |
1004 | console.log(`Grid values check: Has values != 1.0: ${hasNonOneValues}, Min: ${minVal}, Max: ${maxVal}`);
1005 | console.log(`First few grid values:`, gridValues.slice(0, 24));
1006 |
1007 | for (let i = 0; i < positions.length / 3; i++) {
1008 | // First 4 corners in density0 (following the specified ordering)
1009 | this.originalGridValues1[i * 4 + 0] = gridValues[i * 8 + 0]; // Corner [0,0,0]
1010 | this.originalGridValues1[i * 4 + 1] = gridValues[i * 8 + 1]; // Corner [0,0,1]
1011 | this.originalGridValues1[i * 4 + 2] = gridValues[i * 8 + 2]; // Corner [0,1,0]
1012 | this.originalGridValues1[i * 4 + 3] = gridValues[i * 8 + 3]; // Corner [0,1,1]
1013 |
1014 | // Next 4 corners in density1 (following the specified ordering)
1015 | this.originalGridValues2[i * 4 + 0] = gridValues[i * 8 + 4]; // Corner [1,0,0]
1016 | this.originalGridValues2[i * 4 + 1] = gridValues[i * 8 + 5]; // Corner [1,0,1]
1017 | this.originalGridValues2[i * 4 + 2] = gridValues[i * 8 + 6]; // Corner [1,1,0]
1018 | this.originalGridValues2[i * 4 + 3] = gridValues[i * 8 + 7]; // Corner [1,1,1]
1019 | }
1020 |
1021 | // Set the instance count
1022 | this.instanceCount = positions.length / 3;
1023 |
1024 | // Initialize initial indices (sequential ordering)
1025 | this.sortedIndicesArray = new Uint32Array(this.instanceCount);
1026 | for (let i = 0; i < this.instanceCount; i++) {
1027 | this.sortedIndicesArray[i] = i;
1028 | }
1029 |
1030 | // Initialize WebGL resources
1031 | const gl = this.gl!;
1032 |
1033 | // Create VAO
1034 | this.vao = gl.createVertexArray();
1035 | gl.bindVertexArray(this.vao);
1036 |
1037 | // Initialize cube geometry
1038 | this.initCubeGeometry(1.0);
1039 |
1040 | // Create and upload index buffer
1041 | this.instanceIndexBuffer = gl.createBuffer();
1042 | gl.bindBuffer(gl.ARRAY_BUFFER, this.instanceIndexBuffer);
1043 | gl.bufferData(gl.ARRAY_BUFFER, this.sortedIndicesArray, gl.DYNAMIC_DRAW);
1044 |
1045 | const instanceIndexLocation = gl.getAttribLocation(this.program!, 'aInstanceIndex');
1046 | gl.enableVertexAttribArray(instanceIndexLocation);
1047 | gl.vertexAttribIPointer(
1048 | instanceIndexLocation,
1049 | 1,
1050 | gl.UNSIGNED_INT,
1051 | 0,
1052 | 0
1053 | );
1054 | gl.vertexAttribDivisor(instanceIndexLocation, 1);
1055 |
1056 | // Create optimized textures instead of individual texture creation
1057 | this.createOptimizedTextures();
1058 |
1059 | // Unbind the VAO
1060 | gl.bindVertexArray(null);
1061 |
1062 | // Request initial sorting
1063 | this.requestSort();
1064 | }
1065 |
1066 | /**
1067 | * Set camera position
1068 | */
1069 | public setCameraPosition(x: number, y: number, z: number): void {
1070 | this.camera.setPosition(x, y, z);
1071 | }
1072 |
1073 | /**
1074 | * Set camera target
1075 | */
1076 | public setCameraTarget(x: number, y: number, z: number): void {
1077 | this.camera.setTarget(x, y, z);
1078 | }
1079 |
1080 | /**
1081 | * Resize the canvas to specified dimensions
1082 | */
1083 | public resize(width: number, height: number): void {
1084 | if (this.canvas) {
1085 | this.canvas.width = width;
1086 | this.canvas.height = height;
1087 | this.gl!.viewport(0, 0, width, height);
1088 | this.camera.setAspectRatio(width / height);
1089 | }
1090 | }
1091 |
1092 | /**
1093 | * Get direct access to the camera
1094 | */
1095 | public getCamera(): Camera {
1096 | return this.camera;
1097 | }
1098 |
1099 | /**
1100 | * Clean up resources when the viewer is no longer needed
1101 | */
1102 | public dispose(): void {
1103 | // Stop observing resize events
1104 | if (this.resizeObserver) {
1105 | this.resizeObserver.disconnect();
1106 | }
1107 |
1108 | // Remove event listeners
1109 | window.removeEventListener('resize', this.updateCanvasSize);
1110 |
1111 | // Clean up WebGL resources
1112 | const gl = this.gl;
1113 | if (gl) {
1114 | // Delete buffers
1115 | if (this.positionBuffer) gl.deleteBuffer(this.positionBuffer);
1116 | if (this.indexBuffer) gl.deleteBuffer(this.indexBuffer);
1117 | if (this.instanceBuffer) gl.deleteBuffer(this.instanceBuffer);
1118 |
1119 | if (this.instanceIndexBuffer) gl.deleteBuffer(this.instanceIndexBuffer);
1120 |
1121 | // Delete VAO
1122 | if (this.vao) gl.deleteVertexArray(this.vao);
1123 |
1124 | // Delete program and shaders
1125 | if (this.program) gl.deleteProgram(this.program);
1126 | }
1127 |
1128 | // Clear reference data
1129 | this.originalPositions = null;
1130 | this.originalSH0Values = null;
1131 | this.originalScales = null;
1132 | this.originalGridValues1 = null;
1133 | this.originalGridValues2 = null;
1134 | this.originalOctlevels = null;
1135 | this.originalOctpaths = null;
1136 | this.originalSH1Values = null;
1137 | this.sortedIndices = null;
1138 |
1139 | // Terminate the sort worker
1140 | if (this.sortWorker) {
1141 | this.sortWorker.terminate();
1142 | this.sortWorker = null;
1143 | }
1144 | }
1145 |
1146 | /**
1147 | * Set scene parameters from PLY file header
1148 | */
1149 | public setSceneParameters(center: [number, number, number], extent: number): void {
1150 | this.baseVoxelSize = extent; // Use the extent as the base voxel size
1151 |
1152 | console.log(`Scene center: [${center}], extent: ${extent}, base voxel size: ${this.baseVoxelSize}`);
1153 | }
1154 |
1155 | /**
1156 | * Initialize the worker for sorting voxels
1157 | */
1158 | private initSortWorker(): void {
1159 | try {
1160 | console.log('Initializing sort worker...');
1161 |
1162 | // Create worker
1163 | this.sortWorker = new Worker(new URL('../workers/SortWorker.ts', import.meta.url), { type: 'module' });
1164 |
1165 | // Log initialization
1166 | console.log('Sort worker created, setting up event handlers');
1167 |
1168 | // Set up message handler
1169 | this.sortWorker.onmessage = (event) => {
1170 | console.log('Received message from sort worker:', event.data.type);
1171 |
1172 | const data = event.data;
1173 |
1174 | if (data.type === 'ready') {
1175 | console.log('Sort worker initialized and ready');
1176 | } else if (data.type === 'sorted') {
1177 | console.log('Received sorted indices from worker');
1178 |
1179 | // Store the sorted indices for rendering
1180 | this.sortedIndices = data.indices;
1181 | this.pendingSortRequest = false;
1182 |
1183 | // Store the sort time
1184 | this.lastSortTime = data.sortTime || 0;
1185 |
1186 | if (this.sortedIndices) {
1187 | console.log(`Received ${this.sortedIndices.length} sorted indices from worker`);
1188 |
1189 | // Apply the sorted order to the buffers
1190 | this.applySortedOrder();
1191 | } else {
1192 | console.error('Received null indices from worker');
1193 | }
1194 | }
1195 | };
1196 |
1197 | this.sortWorker.onerror = (error) => {
1198 | console.error('Sort worker error:', error);
1199 | this.pendingSortRequest = false;
1200 | };
1201 |
1202 | console.log('Sort worker event handlers configured');
1203 | } catch (error) {
1204 | console.error('Failed to initialize sort worker:', error);
1205 | }
1206 | }
1207 |
1208 | /**
1209 | * Apply sorted indices to reorder instance data
1210 | */
1211 | private applySortedOrder(): void {
1212 | if (!this.sortedIndices || this.sortedIndices.length === 0) {
1213 | console.warn('Missing indices for sorting');
1214 | return;
1215 | }
1216 |
1217 | console.log('Applying sort order with indices');
1218 |
1219 | const gl = this.gl!;
1220 |
1221 | // Simply update the index buffer with the new sorted indices
1222 | this.sortedIndicesArray = new Uint32Array(this.sortedIndices);
1223 |
1224 | gl.bindVertexArray(this.vao);
1225 | gl.bindBuffer(gl.ARRAY_BUFFER, this.instanceIndexBuffer);
1226 | gl.bufferData(gl.ARRAY_BUFFER, this.sortedIndicesArray, gl.DYNAMIC_DRAW);
1227 | gl.bindVertexArray(null);
1228 |
1229 | // Check for GL errors
1230 | // const error = gl.getError();
1231 | // if (error !== gl.NO_ERROR) {
1232 | // console.error('WebGL error during index buffer update:', error);
1233 | // }
1234 | }
1235 |
1236 | /**
1237 | * Request voxel sorting based on current camera position
1238 | */
1239 | private requestSort(): void {
1240 | if (!this.sortWorker || this.pendingSortRequest || this.instanceCount === 0 || !this.originalPositions) {
1241 | return;
1242 | }
1243 |
1244 | // Get camera position and target
1245 | const cameraPos = this.camera.getPosition();
1246 | const cameraTarget = this.camera.getTarget();
1247 |
1248 | // Send data to worker for sorting
1249 | this.pendingSortRequest = true;
1250 |
1251 | // Clone positions to send to worker
1252 | const positions = new Float32Array(this.originalPositions);
1253 |
1254 | // Create copies of octree data to send to worker
1255 | let octlevels: Uint8Array | undefined = undefined;
1256 | let octpaths: Uint32Array | undefined = undefined;
1257 |
1258 | // Use the original octree data if available
1259 | if (this.originalOctlevels) {
1260 | octlevels = new Uint8Array(this.originalOctlevels);
1261 | }
1262 |
1263 | if (this.originalOctpaths) {
1264 | octpaths = new Uint32Array(this.originalOctpaths);
1265 | }
1266 |
1267 | // Create a copy of the scene transform matrix
1268 | const transformMatrix = new Float32Array(this.sceneTransformMatrix);
1269 |
1270 | // Send the data to the worker
1271 | this.sortWorker.postMessage({
1272 | type: 'sort',
1273 | positions: positions,
1274 | cameraPosition: cameraPos,
1275 | cameraTarget: cameraTarget,
1276 | sceneTransformMatrix: transformMatrix,
1277 | octlevels: octlevels,
1278 | octpaths: octpaths
1279 | }, [positions.buffer]);
1280 |
1281 | // Transfer buffers to avoid copying large data
1282 | if (octlevels) {
1283 | this.sortWorker.postMessage({}, [octlevels.buffer]);
1284 | }
1285 |
1286 | if (octpaths) {
1287 | this.sortWorker.postMessage({}, [octpaths.buffer]);
1288 | }
1289 |
1290 | this.sortWorker.postMessage({}, [transformMatrix.buffer]);
1291 | }
1292 |
1293 |
1294 | /**
1295 | * Initialize orbital camera controls
1296 | */
1297 | private initOrbitControls(): void {
1298 | // Mouse down event
1299 | this.canvas.addEventListener('mousedown', (event: MouseEvent) => {
1300 | if (event.button === 0) { // Left click
1301 | this.isDragging = true;
1302 | this.isPanning = false;
1303 | } else if (event.button === 2) { // Right click
1304 | this.isPanning = true;
1305 | this.isDragging = false;
1306 | // Prevent context menu on right click
1307 | event.preventDefault();
1308 | }
1309 | this.lastMouseX = event.clientX;
1310 | this.lastMouseY = event.clientY;
1311 | });
1312 |
1313 | // Mouse move event
1314 | this.canvas.addEventListener('mousemove', (event: MouseEvent) => {
1315 | if (!this.isDragging && !this.isPanning) return;
1316 |
1317 | const deltaX = event.clientX - this.lastMouseX;
1318 | const deltaY = event.clientY - this.lastMouseY;
1319 |
1320 | if (this.isDragging) {
1321 | // Orbit the camera
1322 | this.orbit(deltaX, -deltaY);
1323 | } else if (this.isPanning) {
1324 | // Pan the camera
1325 | this.pan(deltaX, deltaY);
1326 | }
1327 |
1328 | this.lastMouseX = event.clientX;
1329 | this.lastMouseY = event.clientY;
1330 | });
1331 |
1332 | // Mouse up event
1333 | window.addEventListener('mouseup', () => {
1334 | this.isDragging = false;
1335 | this.isPanning = false;
1336 | });
1337 |
1338 | // Mouse wheel event for zooming
1339 | this.canvas.addEventListener('wheel', (event: WheelEvent) => {
1340 | event.preventDefault();
1341 | // Zoom in or out
1342 | const zoomAmount = event.deltaY * this.zoomSpeed * 0.01;
1343 | this.zoom(zoomAmount);
1344 | });
1345 |
1346 | // Prevent context menu on right click
1347 | this.canvas.addEventListener('contextmenu', (event) => {
1348 | event.preventDefault();
1349 | });
1350 |
1351 | // Add touch event listeners
1352 | this.canvas.addEventListener('touchstart', (event: TouchEvent) => {
1353 | event.preventDefault();
1354 |
1355 | // Reset touch state
1356 | this.isTouchOrbit = false;
1357 |
1358 | // Store initial touch positions
1359 | for (let i = 0; i < event.touches.length; i++) {
1360 | const touch = event.touches[i];
1361 | this.touchStartPositions[touch.identifier] = {
1362 | x: touch.clientX,
1363 | y: touch.clientY
1364 | };
1365 | }
1366 |
1367 | if (event.touches.length === 1) {
1368 | // Single touch = orbit
1369 | this.isTouchOrbit = true;
1370 | this.lastMouseX = event.touches[0].clientX;
1371 | this.lastMouseY = event.touches[0].clientY;
1372 | } else if (event.touches.length === 2) {
1373 | // Two finger touch - initialize for both zoom and pan
1374 | const touch1 = event.touches[0];
1375 | const touch2 = event.touches[1];
1376 | this.lastTouchDistance = Math.hypot(
1377 | touch2.clientX - touch1.clientX,
1378 | touch2.clientY - touch1.clientY
1379 | );
1380 | this.lastMouseX = (touch1.clientX + touch2.clientX) / 2;
1381 | this.lastMouseY = (touch1.clientY + touch2.clientY) / 2;
1382 | }
1383 | });
1384 |
1385 | this.canvas.addEventListener('touchmove', (event: TouchEvent) => {
1386 | event.preventDefault();
1387 |
1388 | if (event.touches.length === 1 && this.isTouchOrbit) {
1389 | // Single touch orbit
1390 | const touch = event.touches[0];
1391 | const deltaX = touch.clientX - this.lastMouseX;
1392 | const deltaY = touch.clientY - this.lastMouseY;
1393 |
1394 | this.orbit(deltaX, -deltaY);
1395 |
1396 | this.lastMouseX = touch.clientX;
1397 | this.lastMouseY = touch.clientY;
1398 | } else if (event.touches.length === 2) {
1399 | const touch1 = event.touches[0];
1400 | const touch2 = event.touches[1];
1401 |
1402 | // Calculate new touch distance for zoom
1403 | const newTouchDistance = Math.hypot(
1404 | touch2.clientX - touch1.clientX,
1405 | touch2.clientY - touch1.clientY
1406 | );
1407 |
1408 | // Calculate center point of the two touches
1409 | const centerX = (touch1.clientX + touch2.clientX) / 2;
1410 | const centerY = (touch1.clientY + touch2.clientY) / 2;
1411 |
1412 | // Handle both zoom and pan simultaneously
1413 | // Handle zoom
1414 | const zoomDelta = (this.lastTouchDistance - newTouchDistance) * 0.01;
1415 | this.zoom(zoomDelta);
1416 |
1417 | // Handle pan
1418 | const deltaX = centerX - this.lastMouseX;
1419 | const deltaY = centerY - this.lastMouseY;
1420 | this.pan(deltaX, deltaY);
1421 |
1422 | this.lastTouchDistance = newTouchDistance;
1423 | this.lastMouseX = centerX;
1424 | this.lastMouseY = centerY;
1425 | }
1426 | });
1427 |
1428 | this.canvas.addEventListener('touchend', (event: TouchEvent) => {
1429 | event.preventDefault();
1430 |
1431 | // Remove ended touches from tracking
1432 | for (let i = 0; i < event.changedTouches.length; i++) {
1433 | delete this.touchStartPositions[event.changedTouches[i].identifier];
1434 | }
1435 |
1436 | // Reset state if no touches remain
1437 | if (event.touches.length === 0) {
1438 | this.isTouchOrbit = false;
1439 | }
1440 | });
1441 | }
1442 |
1443 | /**
1444 | * Orbit the camera around the target
1445 | */
1446 | private orbit(deltaX: number, deltaY: number): void {
1447 | const pos = this.camera.getPosition();
1448 | const target = this.camera.getTarget();
1449 |
1450 | // Create vec3 objects from camera position and target
1451 | const position = vec3.fromValues(pos[0], pos[1], pos[2]);
1452 | const targetVec = vec3.fromValues(target[0], target[1], target[2]);
1453 |
1454 | // Calculate the camera's current position relative to the target
1455 | const eyeDir = vec3.create();
1456 | vec3.subtract(eyeDir, position, targetVec);
1457 |
1458 | // Calculate distance from target
1459 | const distance = vec3.length(eyeDir);
1460 |
1461 | // Calculate current spherical coordinates
1462 | let theta = Math.atan2(eyeDir[0], eyeDir[2]);
1463 | let phi = Math.acos(eyeDir[1] / distance);
1464 |
1465 | // Update angles based on mouse movement
1466 | theta -= deltaX * this.orbitSpeed;
1467 | phi = Math.max(0.1, Math.min(Math.PI - 0.1, phi + deltaY * this.orbitSpeed));
1468 |
1469 | // Convert back to Cartesian coordinates
1470 | const newRelX = distance * Math.sin(phi) * Math.sin(theta);
1471 | const newRelY = distance * Math.cos(phi);
1472 | const newRelZ = distance * Math.sin(phi) * Math.cos(theta);
1473 |
1474 | // Update camera position
1475 | this.camera.setPosition(
1476 | target[0] + newRelX,
1477 | target[1] + newRelY,
1478 | target[2] + newRelZ
1479 | );
1480 | }
1481 |
1482 | /**
1483 | * Pan the camera (move target and camera together)
1484 | */
1485 | private pan(deltaX: number, deltaY: number): void {
1486 | const pos = this.camera.getPosition();
1487 | const target = this.camera.getTarget();
1488 |
1489 | // Calculate forward vector (from camera to target)
1490 | const forwardX = target[0] - pos[0];
1491 | const forwardY = target[1] - pos[1];
1492 | const forwardZ = target[2] - pos[2];
1493 | const forwardLength = Math.sqrt(forwardX * forwardX + forwardY * forwardY + forwardZ * forwardZ);
1494 |
1495 | // Normalize forward vector
1496 | const forwardNormX = forwardX / forwardLength;
1497 | const forwardNormY = forwardY / forwardLength;
1498 | const forwardNormZ = forwardZ / forwardLength;
1499 |
1500 | // Calculate right vector (cross product of forward and up)
1501 | // Back to original up vector
1502 | const upX = 0, upY = 1, upZ = 0; // Standard world up vector
1503 | const rightX = forwardNormZ * upY - forwardNormY * upZ;
1504 | const rightY = forwardNormX * upZ - forwardNormZ * upX;
1505 | const rightZ = forwardNormY * upX - forwardNormX * upY;
1506 |
1507 | // Calculate normalized up vector (cross product of right and forward)
1508 | const upNormX = rightY * forwardNormZ - rightZ * forwardNormY;
1509 | const upNormY = rightZ * forwardNormX - rightX * forwardNormZ;
1510 | const upNormZ = rightX * forwardNormY - rightY * forwardNormX;
1511 |
1512 | // Calculate pan amounts based on delta
1513 | const panAmount = this.panSpeed * Math.max(1, forwardLength / 10);
1514 | const panX = -(rightX * -deltaX + upNormX * deltaY) * panAmount;
1515 | const panY = -(rightY * -deltaX + upNormY * deltaY) * panAmount;
1516 | const panZ = -(rightZ * -deltaX + upNormZ * deltaY) * panAmount;
1517 |
1518 | // Move both camera and target
1519 | this.camera.setPosition(pos[0] + panX, pos[1] + panY, pos[2] + panZ);
1520 | this.camera.setTarget(target[0] + panX, target[1] + panY, target[2] + panZ);
1521 | }
1522 |
1523 | /**
1524 | * Zoom the camera by adjusting distance to target
1525 | */
1526 | private zoom(zoomAmount: number): void {
1527 | const pos = this.camera.getPosition();
1528 | const target = this.camera.getTarget();
1529 |
1530 | // Calculate direction vector from target to camera
1531 | const dirX = pos[0] - target[0];
1532 | const dirY = pos[1] - target[1];
1533 | const dirZ = pos[2] - target[2];
1534 |
1535 | // Get current distance
1536 | const distance = Math.sqrt(dirX * dirX + dirY * dirY + dirZ * dirZ);
1537 |
1538 | // Calculate new distance with zoom factor
1539 | const newDistance = Math.max(0.1, distance * (1 + zoomAmount));
1540 |
1541 | // Calculate zoom ratio
1542 | const ratio = newDistance / distance;
1543 |
1544 | // Update camera position
1545 | this.camera.setPosition(
1546 | target[0] + dirX * ratio,
1547 | target[1] + dirY * ratio,
1548 | target[2] + dirZ * ratio
1549 | );
1550 | }
1551 |
1552 | /**
1553 | * Creates a texture from float data
1554 | */
1555 | private createDataTexture(
1556 | data: Float32Array,
1557 | componentsPerElement: number,
1558 | textureType: TextureType
1559 | ): WebGLTexture | null {
1560 | const gl = this.gl!;
1561 |
1562 | // Calculate dimensions that won't exceed hardware limits
1563 | const numElements = data.length / componentsPerElement;
1564 | const maxTextureSize = Math.min(4096, gl.getParameter(gl.MAX_TEXTURE_SIZE));
1565 | const width = Math.min(maxTextureSize, Math.ceil(Math.sqrt(numElements)));
1566 | const height = Math.ceil(numElements / width);
1567 |
1568 | // Store dimensions based on texture type
1569 | const paddedWidth = Math.ceil(width * componentsPerElement / 4) * 4;
1570 | const finalWidth = paddedWidth / 4;
1571 |
1572 | switch (textureType) {
1573 | case TextureType.MainAttributes:
1574 | this.posScaleWidth = finalWidth;
1575 | this.posScaleHeight = height;
1576 | console.log(`Creating main attributes texture: ${finalWidth}x${height}`);
1577 | break;
1578 | case TextureType.GridValues:
1579 | this.gridValuesWidth = finalWidth;
1580 | this.gridValuesHeight = height;
1581 | console.log(`Creating grid values texture: ${finalWidth}x${height}`);
1582 | break;
1583 | case TextureType.ShCoefficients:
1584 | this.shWidth = finalWidth;
1585 | this.shHeight = height;
1586 | console.log(`Creating SH coefficients texture: ${finalWidth}x${height}`);
1587 | break;
1588 | }
1589 |
1590 | // Create and set up texture
1591 | const texture = gl.createTexture();
1592 | gl.bindTexture(gl.TEXTURE_2D, texture);
1593 |
1594 | // Create padded data array
1595 | const paddedData = new Float32Array(paddedWidth * height);
1596 | paddedData.set(data);
1597 |
1598 | // Upload data to texture
1599 | gl.texImage2D(
1600 | gl.TEXTURE_2D,
1601 | 0,
1602 | gl.RGBA32F,
1603 | finalWidth,
1604 | height,
1605 | 0,
1606 | gl.RGBA,
1607 | gl.FLOAT,
1608 | paddedData
1609 | );
1610 |
1611 | // Set texture parameters
1612 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
1613 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
1614 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
1615 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
1616 |
1617 | return texture;
1618 | }
1619 |
1620 | private createOptimizedTextures(): void {
1621 | // First, check if all the data is available
1622 | if (!this.originalPositions || !this.originalSH0Values || !this.originalScales ||
1623 | !this.originalGridValues1 || !this.originalGridValues2 || !this.originalSH1Values) {
1624 | console.error('Missing required data for texture creation');
1625 | return;
1626 | }
1627 |
1628 | // Get the WebGL context
1629 | const gl = this.gl!;
1630 | if (!gl) {
1631 | console.error('WebGL context is null');
1632 | return;
1633 | }
1634 |
1635 | // 1. Create position + scale texture (4 values per instance)
1636 | const posScaleData = new Float32Array(this.instanceCount * 4);
1637 | for (let i = 0; i < this.instanceCount; i++) {
1638 | // xyz position
1639 | posScaleData[i * 4 + 0] = this.originalPositions[i * 3 + 0];
1640 | posScaleData[i * 4 + 1] = this.originalPositions[i * 3 + 1];
1641 | posScaleData[i * 4 + 2] = this.originalPositions[i * 3 + 2];
1642 |
1643 | // scale
1644 | posScaleData[i * 4 + 3] = this.originalScales ?
1645 | this.originalScales[i] : this.baseVoxelSize;
1646 | }
1647 |
1648 | // 2. Create grid values texture (8 values per instance = 2 vec4s)
1649 | const gridValuesData = new Float32Array(this.instanceCount * 8);
1650 | for (let i = 0; i < this.instanceCount; i++) {
1651 | // First 4 corners
1652 | gridValuesData[i * 8 + 0] = this.originalGridValues1[i * 4 + 0];
1653 | gridValuesData[i * 8 + 1] = this.originalGridValues1[i * 4 + 1];
1654 | gridValuesData[i * 8 + 2] = this.originalGridValues1[i * 4 + 2];
1655 | gridValuesData[i * 8 + 3] = this.originalGridValues1[i * 4 + 3];
1656 |
1657 | // Second 4 corners
1658 | gridValuesData[i * 8 + 4] = this.originalGridValues2[i * 4 + 0];
1659 | gridValuesData[i * 8 + 5] = this.originalGridValues2[i * 4 + 1];
1660 | gridValuesData[i * 8 + 6] = this.originalGridValues2[i * 4 + 2];
1661 | gridValuesData[i * 8 + 7] = this.originalGridValues2[i * 4 + 3];
1662 | }
1663 |
1664 | // 3. Create SH0 + SH1 texture (3+9=12 values per instance = 3 vec4s)
1665 | const shData = new Float32Array(this.instanceCount * 12);
1666 | for (let i = 0; i < this.instanceCount; i++) {
1667 | // SH0 (rgb) - first 3 values
1668 | shData[i * 12 + 0] = this.originalSH0Values[i * 3 + 0]; // R
1669 | shData[i * 12 + 1] = this.originalSH0Values[i * 3 + 1]; // G
1670 | shData[i * 12 + 2] = this.originalSH0Values[i * 3 + 2]; // B
1671 |
1672 | // SH1 (all 9 values) - starting from the 4th position
1673 | for (let j = 0; j < 9; j++) {
1674 | if (j < this.originalSH1Values.length / this.instanceCount) {
1675 | shData[i * 12 + 3 + j] = this.originalSH1Values[i * 9 + j];
1676 | } else {
1677 | shData[i * 12 + 3 + j] = 0.0;
1678 | }
1679 | }
1680 | }
1681 |
1682 | // Create textures using the existing createDataTexture method with the correct parameters
1683 | this.posScaleTexture = this.createDataTexture(
1684 | posScaleData,
1685 | 4, // 4 components per element (exactly one vec4)
1686 | TextureType.MainAttributes
1687 | );
1688 |
1689 | this.gridValuesTexture = this.createDataTexture(
1690 | gridValuesData,
1691 | 8, // 8 components per element (2 vec4s)
1692 | TextureType.GridValues
1693 | );
1694 |
1695 | this.shTexture = this.createDataTexture(
1696 | shData,
1697 | 12, // 12 components per element (3 vec4s)
1698 | TextureType.ShCoefficients
1699 | );
1700 | }
1701 |
1702 | private initFpsCounter(): void {
1703 | // Create container for performance metrics
1704 | const perfContainer = document.createElement('div');
1705 | perfContainer.style.position = 'absolute';
1706 | perfContainer.style.bottom = '10px';
1707 | perfContainer.style.right = '10px';
1708 | perfContainer.style.backgroundColor = 'rgba(0, 0, 0, 0.5)';
1709 | perfContainer.style.padding = '5px';
1710 | perfContainer.style.borderRadius = '3px';
1711 | perfContainer.style.fontFamily = 'monospace';
1712 | perfContainer.style.fontSize = '14px';
1713 | perfContainer.style.color = 'white';
1714 |
1715 | // Create FPS counter element
1716 | this.fpsElement = document.createElement('div');
1717 | this.fpsElement.textContent = 'FPS: --';
1718 | perfContainer.appendChild(this.fpsElement);
1719 |
1720 | // Create sort time element
1721 | this.sortTimeElement = document.createElement('div');
1722 | this.sortTimeElement.textContent = 'Sort: -- ms';
1723 | perfContainer.appendChild(this.sortTimeElement);
1724 |
1725 | // Append container to document
1726 | document.body.appendChild(perfContainer);
1727 | }
1728 |
1729 | // Add this method to Viewer class
1730 | public setSceneTransformMatrix(matrix: Float32Array | number[]): void {
1731 | if (matrix.length !== 16) {
1732 | throw new Error('Transform matrix must be a 4x4 matrix with 16 elements');
1733 | }
1734 |
1735 | // Create a new mat4 from the input
1736 | mat4.copy(this.sceneTransformMatrix, matrix as Float32Array);
1737 |
1738 | // Request a resort to update the view with the new transform
1739 | this.requestSort();
1740 | }
1741 |
1742 | // Add this method to get the inverse transform matrix for use in direction calculations
1743 | private getInverseTransformMatrix(): Float32Array {
1744 | // Create a new matrix to store the inverse
1745 | const inverse = mat4.create();
1746 |
1747 | // Calculate the inverse of the scene transform matrix
1748 | mat4.invert(inverse, this.sceneTransformMatrix);
1749 |
1750 | // Return as Float32Array for WebGL
1751 | return inverse as Float32Array;
1752 | }
1753 |
1754 | /**
1755 | * Rotates the scene around the camera's forward axis (view direction)
1756 | * @param angleInRadians Angle to rotate in radians (positive = clockwise, negative = counterclockwise)
1757 | */
1758 | public rotateSceneAroundViewDirection(angleInRadians: number): void {
1759 | // Get camera position and target to determine view direction
1760 | const pos = this.camera.getPosition();
1761 | const target = this.camera.getTarget();
1762 |
1763 | // Calculate the forward vector
1764 | const forward = vec3.create();
1765 | vec3.subtract(forward,
1766 | vec3.fromValues(target[0], target[1], target[2]),
1767 | vec3.fromValues(pos[0], pos[1], pos[2])
1768 | );
1769 | vec3.normalize(forward, forward);
1770 |
1771 | // Create rotation matrix around the forward vector
1772 | const rotationMatrix = mat4.create();
1773 | mat4.fromRotation(rotationMatrix, angleInRadians, forward);
1774 |
1775 | // Create a new matrix for the result
1776 | const newTransform = mat4.create();
1777 | mat4.multiply(newTransform, rotationMatrix, this.sceneTransformMatrix);
1778 |
1779 | // Update the scene transform
1780 | this.setSceneTransformMatrix(newTransform as Float32Array);
1781 | }
1782 |
1783 | /**
1784 | * Initialize keyboard controls
1785 | */
1786 | private initKeyboardControls(): void {
1787 | // Rotation amount per keypress in radians
1788 | const rotationAmount = 0.1; // About 5.7 degrees
1789 |
1790 | // Movement speed (adjust this value to change movement sensitivity)
1791 | const moveSpeed = 0.1;
1792 |
1793 | // Track which keys are currently pressed
1794 | const pressedKeys = new Set();
1795 |
1796 | // Add event listener for keydown
1797 | window.addEventListener('keydown', (event) => {
1798 | pressedKeys.add(event.key.toLowerCase());
1799 |
1800 | switch (event.key.toLowerCase()) {
1801 | case 'q':
1802 | this.rotateSceneAroundViewDirection(-rotationAmount);
1803 | break;
1804 | case 'e':
1805 | this.rotateSceneAroundViewDirection(rotationAmount);
1806 | break;
1807 | }
1808 | });
1809 |
1810 | // Add event listener for keyup
1811 | window.addEventListener('keyup', (event) => {
1812 | pressedKeys.delete(event.key.toLowerCase());
1813 | });
1814 |
1815 | // Add continuous movement update
1816 | const updateMovement = () => {
1817 | const pos = this.camera.getPosition();
1818 | const target = this.camera.getTarget();
1819 |
1820 | // Calculate forward vector (from camera to target)
1821 | const forward = vec3.create();
1822 | vec3.subtract(forward,
1823 | vec3.fromValues(target[0], target[1], target[2]),
1824 | vec3.fromValues(pos[0], pos[1], pos[2])
1825 | );
1826 | vec3.normalize(forward, forward);
1827 |
1828 | // Calculate right vector (cross product of forward and up)
1829 | const up = vec3.fromValues(0, 1, 0);
1830 | const right = vec3.create();
1831 | vec3.cross(right, forward, up);
1832 | vec3.normalize(right, right);
1833 |
1834 | let moveX = 0;
1835 | let moveY = 0;
1836 | let moveZ = 0;
1837 |
1838 | // Check for WASD and arrow keys
1839 | if (pressedKeys.has('w') || pressedKeys.has('arrowup')) {
1840 | // Move in forward direction
1841 | moveX += forward[0] * moveSpeed;
1842 | moveY += forward[1] * moveSpeed;
1843 | moveZ += forward[2] * moveSpeed;
1844 | }
1845 | if (pressedKeys.has('s') || pressedKeys.has('arrowdown')) {
1846 | // Move in backward direction
1847 | moveX -= forward[0] * moveSpeed;
1848 | moveY -= forward[1] * moveSpeed;
1849 | moveZ -= forward[2] * moveSpeed;
1850 | }
1851 | if (pressedKeys.has('a') || pressedKeys.has('arrowleft')) {
1852 | // Move left
1853 | moveX -= right[0] * moveSpeed;
1854 | moveY -= right[1] * moveSpeed;
1855 | moveZ -= right[2] * moveSpeed;
1856 | }
1857 | if (pressedKeys.has('d') || pressedKeys.has('arrowright')) {
1858 | // Move right
1859 | moveX += right[0] * moveSpeed;
1860 | moveY += right[1] * moveSpeed;
1861 | moveZ += right[2] * moveSpeed;
1862 | }
1863 |
1864 | // Space and Shift for up/down movement
1865 | if (pressedKeys.has(' ')) {
1866 | moveY += moveSpeed;
1867 | }
1868 | if (pressedKeys.has('shift')) {
1869 | moveY -= moveSpeed;
1870 | }
1871 |
1872 | // Apply movement if any keys were pressed
1873 | if (moveX !== 0 || moveY !== 0 || moveZ !== 0) {
1874 | this.camera.setPosition(pos[0] + moveX, pos[1] + moveY, pos[2] + moveZ);
1875 | this.camera.setTarget(target[0] + moveX, target[1] + moveY, target[2] + moveZ);
1876 | }
1877 |
1878 | // Continue the update loop
1879 | requestAnimationFrame(updateMovement);
1880 | };
1881 |
1882 | // Start the movement update loop
1883 | updateMovement();
1884 | }
1885 |
1886 | private getSampleCountFromURL(): number {
1887 | const urlParams = new URLSearchParams(window.location.search);
1888 | const sampleCount = parseInt(urlParams.get('samples') || '3', 10);
1889 | // Ensure the count is at least 1 and not too high for performance
1890 | return Math.max(1, Math.min(sampleCount, 64));
1891 | }
1892 | }
1893 |
--------------------------------------------------------------------------------
/src/main.ts:
--------------------------------------------------------------------------------
1 | import './style.css';
2 | import { Viewer } from './lib/Viewer';
3 | import { Camera } from './lib/Camera';
4 | import { LoadPLY } from './lib/LoadPLY';
5 |
6 | // Add these at the top of the file, after imports
7 | let progressContainer: HTMLDivElement;
8 | let progressBarInner: HTMLDivElement;
9 | let progressText: HTMLDivElement;
10 |
11 | // Global references
12 | let currentViewer: Viewer;
13 | let currentCamera: Camera;
14 | let mainInfoDisplay: HTMLElement;
15 |
16 | // Create a function to initialize the progress bar
17 | function createProgressBar() {
18 | progressContainer = document.createElement('div');
19 | progressContainer.style.position = 'absolute';
20 | progressContainer.style.top = '50%';
21 | progressContainer.style.left = '50%';
22 | progressContainer.style.transform = 'translate(-50%, -50%)';
23 | progressContainer.style.padding = '20px';
24 | progressContainer.style.backgroundColor = 'rgba(0, 0, 0, 0.7)';
25 | progressContainer.style.color = 'white';
26 | progressContainer.style.fontFamily = 'sans-serif';
27 | progressContainer.style.borderRadius = '10px';
28 | progressContainer.style.textAlign = 'center';
29 | progressContainer.style.display = 'none';
30 |
31 | progressText = document.createElement('div');
32 | progressText.style.marginBottom = '10px';
33 | progressText.textContent = 'Loading PLY file...';
34 |
35 | const progressBarOuter = document.createElement('div');
36 | progressBarOuter.style.width = '200px';
37 | progressBarOuter.style.height = '20px';
38 | progressBarOuter.style.backgroundColor = 'rgba(255, 255, 255, 0.2)';
39 | progressBarOuter.style.borderRadius = '10px';
40 | progressBarOuter.style.overflow = 'hidden';
41 |
42 | progressBarInner = document.createElement('div');
43 | progressBarInner.style.width = '0%';
44 | progressBarInner.style.height = '100%';
45 | progressBarInner.style.backgroundColor = '#4CAF50';
46 |
47 | progressBarOuter.appendChild(progressBarInner);
48 | progressContainer.appendChild(progressText);
49 | progressContainer.appendChild(progressBarOuter);
50 | document.body.appendChild(progressContainer);
51 | }
52 |
53 | // Helper function to update progress
54 | function updateProgress(progress: number) {
55 | const percentage = Math.round(progress * 100);
56 | // Remove transition for 100% to ensure it completes
57 | if (percentage === 100) {
58 | progressBarInner.style.transition = 'none';
59 | }
60 |
61 | // Update both the width and text in the same frame
62 | const width = `${percentage}%`;
63 | const text = `Loading PLY file... ${percentage}%`;
64 |
65 | // Ensure both updates happen in the same frame
66 | requestAnimationFrame(() => {
67 | progressBarInner.style.width = width;
68 | progressText.textContent = text;
69 | });
70 | }
71 |
72 | // Also, let's reset the progress bar when starting a new load
73 | function resetProgress() {
74 | progressBarInner.style.width = '0%';
75 | progressText.textContent = 'Loading PLY file... 0%';
76 | }
77 |
78 | // Process PLY data after loading
79 | function processPLYData(plyData: any, fileName: string, loadTime: string, fileSize?: number, infoElement?: HTMLElement) {
80 | if (plyData.sceneCenter && plyData.sceneExtent) {
81 | currentViewer.setSceneParameters(plyData.sceneCenter, plyData.sceneExtent);
82 | }
83 |
84 | currentViewer.loadPointCloud(
85 | plyData.vertices,
86 | plyData.sh0Values,
87 | plyData.octlevels,
88 | plyData.octpaths,
89 | plyData.gridValues,
90 | plyData.shRestValues
91 | );
92 |
93 | let octlevelInfo = '';
94 | if (plyData.octlevels && plyData.octlevels.length > 0) {
95 | const minOct = plyData.octlevels.reduce((min: number, val: number) => val < min ? val : min, plyData.octlevels[0]);
96 | const maxOct = plyData.octlevels.reduce((max: number, val: number) => val > max ? val : max, plyData.octlevels[0]);
97 | octlevelInfo = `\nOctlevels: ${minOct} to ${maxOct}`;
98 | }
99 |
100 | const sizeInfo = fileSize ? `\nSize: ${(fileSize / (1024 * 1024)).toFixed(2)} MB` : '';
101 | const infoText = `Loaded: ${fileName}
102 | Voxels: ${plyData.vertexCount.toLocaleString()}${sizeInfo}
103 | Load time: ${loadTime}s${octlevelInfo}`;
104 |
105 | // Update the main info display
106 | mainInfoDisplay.textContent = infoText;
107 |
108 | // Also update the upload info element if provided
109 | if (infoElement && infoElement !== mainInfoDisplay) {
110 | infoElement.textContent = infoText;
111 | }
112 |
113 | currentViewer.setSceneTransformMatrix([0.9964059591293335,0.07686585187911987,0.03559183329343796,0,0.06180455908179283,-0.9470552206039429,0.3150659501552582,0,0.05792524665594101,-0.3117338716983795,-0.9484022259712219,0,0,0,0,1]);
114 |
115 | if (plyData.sceneCenter && plyData.sceneExtent) {
116 | currentCamera.setPosition(-5.3627543449401855,-0.40146273374557495,3.546692371368408);
117 | currentCamera.setTarget(
118 | plyData.sceneCenter[0],
119 | plyData.sceneCenter[1],
120 | plyData.sceneCenter[2]
121 | );
122 | }
123 | }
124 |
125 | // Load PLY from URL
126 | async function loadPLYFromUrl(url: string, infoElement: HTMLElement) {
127 | try {
128 | progressContainer.style.display = 'block';
129 | resetProgress();
130 |
131 | const startTime = performance.now();
132 | const plyData = await LoadPLY.loadFromUrl(url, (progress) => {
133 | updateProgress(progress);
134 | });
135 | const loadTime = ((performance.now() - startTime) / 1000).toFixed(2);
136 |
137 | progressContainer.style.display = 'none';
138 |
139 | const fileName = url.split('/').pop() || 'remote-ply';
140 | processPLYData(plyData, fileName, loadTime, undefined, infoElement);
141 |
142 | return true;
143 | } catch (error: any) {
144 | progressContainer.style.display = 'none';
145 | console.error('Error loading PLY from URL:', error);
146 | infoElement.textContent = `Error loading PLY: ${error.message}`;
147 |
148 | // Update the main info display too if different from infoElement
149 | if (infoElement !== mainInfoDisplay) {
150 | mainInfoDisplay.textContent = `Error loading PLY: ${error.message}`;
151 | }
152 |
153 | return false;
154 | }
155 | }
156 |
157 | // Load PLY from File
158 | async function loadPLYFromFile(file: File, infoElement: HTMLElement) {
159 | try {
160 | const startTime = performance.now();
161 | const plyData = await LoadPLY.loadFromFile(file);
162 | const loadTime = ((performance.now() - startTime) / 1000).toFixed(2);
163 |
164 | processPLYData(plyData, file.name, loadTime, file.size, infoElement);
165 |
166 | return true;
167 | } catch (error: any) {
168 | console.error('Error loading PLY from file:', error);
169 | infoElement.textContent = `Error loading PLY: ${error.message}`;
170 |
171 | // Update the main info display too if different from infoElement
172 | if (infoElement !== mainInfoDisplay) {
173 | mainInfoDisplay.textContent = `Error loading PLY: ${error.message}`;
174 | }
175 |
176 | return false;
177 | }
178 | }
179 |
180 | /**
181 | * Add some basic UI controls for the demo
182 | */
183 | function addControls() {
184 | // Create a simple control panel
185 | const controls = document.createElement('div');
186 | controls.style.position = 'absolute';
187 | controls.style.top = '10px';
188 | controls.style.left = '10px';
189 | controls.style.padding = '10px';
190 | controls.style.backgroundColor = 'rgba(0, 0, 0, 0.5)';
191 | controls.style.color = 'white';
192 | controls.style.fontFamily = 'sans-serif';
193 | controls.style.borderRadius = '5px';
194 | controls.style.fontSize = '14px';
195 |
196 | // Check if device is mobile
197 | const isMobile = window.matchMedia('(max-width: 768px)').matches;
198 |
199 | // Different instructions based on device type
200 | const controlInstructions = isMobile ? `
201 |