├── LICENSE ├── README.md ├── compress.ipynb ├── index.html ├── main.js └── styles.css /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Kevin Kwok 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SwinGS viewer 2 | WebGL viewer for SwinGS. Originally developed by [antimatter15](https://github.com/antimatter15/splat). Commit history removed for Double-Blind review process. 3 | 4 | dataset by: 5 | ``` 6 | @article{isik2023humanrf, 7 | title = {HumanRF: High-Fidelity Neural Radiance Fields for Humans in Motion}, 8 | author = {I\c{s}{\i}k, Mustafa and Rünz, Martin and Georgopoulos, Markos and Khakhulin, Taras 9 | and Starck, Jonathan and Agapito, Lourdes and Nießner, Matthias}, 10 | journal = {ACM Transactions on Graphics (TOG)}, 11 | volume = {42}, 12 | number = {4}, 13 | pages = {1--12}, 14 | year = {2023}, 15 | publisher = {ACM New York, NY, USA}, 16 | doi = {10.1145/3592415}, 17 | url = {https://doi.org/10.1145/3592415}, 18 | } 19 | @inproceedings{li2022neural, 20 | title={Neural 3d video synthesis from multi-view video}, 21 | author={Li, Tianye and Slavcheva, Mira and Zollhoefer, Michael and Green, Simon and Lassner, Christoph and Kim, Changil and Schmidt, Tanner and Lovegrove, Steven and Goesele, Michael and Newcombe, Richard and others}, 22 | booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, 23 | pages={5521--5531}, 24 | year={2022} 25 | } 26 | ``` 27 | 28 | # splat 29 | 30 | This is a WebGL implementation of a real-time renderer for [3D Gaussian Splatting for Real-Time Radiance Field Rendering](https://repo-sam.inria.fr/fungraph/3d-gaussian-splatting/), a recently developed technique for taking a set of pictures and generating a photorealistic navigable 3D scene out of it. As it is essentially an extension of rendering point clouds, rendering scenes generated with this technique can be done very efficiently on ordinary graphics hardware- unlike prior comparable techniques such as NeRFs. 31 | 32 | You can [try it out here](https://antimatter15.com/splat/). 33 | 34 | 35 | 36 | https://github.com/antimatter15/splat/assets/30054/878d5d34-e0a7-4336-85df-111ff22daf4b 37 | 38 | 39 | 40 | ## controls 41 | 42 | movement (arrow keys) 43 | 44 | - left/right arrow keys to strafe side to side 45 | - up/down arrow keys to move forward/back 46 | - `space` to jump 47 | 48 | camera angle (wasd) 49 | 50 | - `a`/`d` to turn camera left/right 51 | - `w`/`s` to tilt camera up/down 52 | - `q`/`e` to roll camera counterclockwise/clockwise 53 | - `i`/`k` and `j`/`l` to orbit 54 | 55 | trackpad 56 | - scroll up/down to orbit down 57 | - scroll left/right to orbit left/right 58 | - pinch to move forward/back 59 | - ctrl key + scroll up/down to move forward/back 60 | - shift + scroll up/down to move up/down 61 | - shift + scroll left/right to strafe side to side 62 | 63 | mouse 64 | - click and drag to orbit 65 | - right click (or ctrl/cmd key) and drag up/down to move forward/back 66 | - right click (or ctrl/cmd key) and drag left/right to strafe side to side 67 | 68 | touch (mobile) 69 | - one finger to orbit 70 | - two finger pinch to move forward/back 71 | - two finger rotate to rotate camera clockwise/counterclockwise 72 | - two finger pan to move side-to-side and up-down 73 | 74 | other 75 | - press 0-9 to switch to one of the pre-loaded camera views 76 | - press '-' or '+'key to cycle loaded cameras 77 | - press `p` to resume default animation 78 | - drag and drop .ply file to convert to .splat 79 | - drag and drop cameras.json to load cameras 80 | 81 | ## other features 82 | 83 | - press `v` to save the current view coordinates to the url 84 | - open custom `.splat` files by adding a `url` param to a CORS-enabled URL 85 | - drag and drop a `.ply` file which has been processed with the 3d gaussian splatting software onto the page and it will automatically convert the file to the `.splat` format 86 | 87 | ## examples 88 | 89 | note that as long as your `.splat` file is hosted in a CORS-accessible way, you can open it with the `url` field. 90 | 91 | - https://antimatter15.com/splat/?url=plush.splat#[0.95,0.19,-0.23,0,-0.16,0.98,0.12,0,0.24,-0.08,0.97,0,-0.33,-1.52,1.53,1] 92 | - https://antimatter15.com/splat/?url=truck.splat 93 | - https://antimatter15.com/splat/?url=garden.splat 94 | - https://antimatter15.com/splat/?url=treehill.splat 95 | - https://antimatter15.com/splat/?url=stump.splat#[-0.86,-0.23,0.45,0,0.27,0.54,0.8,0,-0.43,0.81,-0.4,0,0.92,-2.02,4.1,1] 96 | - https://antimatter15.com/splat/?url=bicycle.splat 97 | - https://antimatter15.com/splat/?url=https://media.reshot.ai/models/nike_next/model.splat#[0.95,0.16,-0.26,0,-0.16,0.99,0.01,0,0.26,0.03,0.97,0,0.01,-1.96,2.82,1] 98 | 99 | ## notes 100 | 101 | - written in javascript with webgl 1.0 with no external dependencies, you can just hit view source and read the unminified code. webgl 2.0 doesn't really add any new features that aren't possible with webgl 1.0 with extensions. webgpu is apparently nice but still not very well supported outside of chromium. 102 | - we sorts splats by a combination of size and opacity and supports progressive loading so you can see and interact with the model without having all the splats loaded. 103 | - does not currently support view dependent shading effects with spherical harmonics, this is primarily done to reduce the file size of the splat format so it can be loaded easily into web browsers. For third-order spherical harmonics we need 48 coefficients which is nearly 200 bytes per splat! 104 | - splat sorting is done asynchronously on the cpu in a webworker. it might be interesting to investigate performing the sort on the gpu with an implementation of bitonic or radix sorting, but it seems plausible to me that it'd be better to let the gpu focus rather than splitting its time between rendering and sorting. 105 | - earlier experiments used [stochastic transparency](https://research.nvidia.com/publication/2011-08_stochastic-transparency) which looked grainy, and [weighted blended order independent transparency](https://learnopengl.com/Guest-Articles/2020/OIT/Weighted-Blended) which didn't seem to work. 106 | 107 | 108 | ## words 109 | 110 | gaussian splats are very efficient to render because they work in a way which is very similar to point clouds— in fact they use the same file format (`.ply`) and open them up with the same tools (though to see colors in meshlab, you should convert the spherical harmonic zeroth order terms into rgb colors first). you can think of them as essentially generalizing individual points into translucent 3D blobs (the eponymous splats). 111 | 112 | that said, even though the inference process is very similar to a traditional 3d rendering, the reference implementation doesn't leverage any of that because for training it needs the entire render pipeline to be differentiable (i.e. you need to be able to run the rendering process "backwards" to figure out how to wiggle the location, size and color of each blob to make a particular camera's view incrementally closer to that of a reference photograph). whether or not this gradient based optimization counts as neural is i guess a somewhat debated question online. 113 | 114 | since this implementation is just a viewer we don't need to do any differentiable rendering. our general approach is to take each splat and feed it into a vertex shader. we take the xyz position of the splat and project it to the screen coordinates with a projection matrix, and we take the scale and quaternion rotation parameters of the splat and figure out the projected eigenvectors so we can draw a bounding quadrilateral. these quadrilaterals are then individually shaded with a fragment shader. 115 | 116 | the fragment shader is a program which essentially runs for each pixel on each fragment (i.e. quadrilateral that was generated by the vertex shader) and outputs a color. It takes its position, calculates the distance from the center of the splat and uses it to determine the opacity channel of the splat's color. right now this implementation only stores 3 (red, blue, green) channels of color for a splat, but the full implementation uses essentially 48 channels to encode arbitrary view-dependent lighting. 117 | 118 | the most annoying problem comes with how these fragments come together and create an actual image. it turns out that rendering translucent objects in general is a somewhat unsolved problem in computer graphics which ultimately stems from the fact that compositing translucent things is not commutative, i.e. a stack of translucent objects looks different based on the order in which they are drawn. 119 | 120 | one easy solution is called speculative transparency, where basically you pretend that you actually have no translucency at all- objects are just different levels of randomized swiss cheese. the graphics card keeps track of a z-buffer and discards all the pixels which are not the top-most, and we generate a random number at each pixel and then discard it if it 90% of the time if it is 90% transparent. this works but it gives everything a noisy, dithered look. 121 | 122 | another easy approach is to use the painter's algorithm, which basically involves pre-sorting all your objects before rendering them. doing this on the CPU can be rather expensive, with the ~1M splats on the demo page, it takes about 150ms to sort through them all on my computer. 123 | 124 | the approach that the reference implementation, and most other implementations of gaussian splatting take is to do the sorting on the GPU. one common algorithm for doing sorts on the gpu is called the [bitonic sort](https://en.wikipedia.org/wiki/Bitonic_sorter) as it is very parallelizable. a normal cpu comparison sorting algorithm like quicksort/mergesort can run in O(n log n) time, the bitonic sort is a bit slower at O(n log^2 n), but the n factor can be done in parallel, so the overall latency is O(log^2 n) which is faster than than O(n log n). the reference implementation uses a radix sort based on [onesweep](https://arxiv.org/abs/2206.01784), which can happen in O(n) time because you can leverage the fact that you're sorting numbers to get more information at each cycle than a single comparison. 125 | 126 | chrome has recently shipped webgpu, which is a new very clean api that apparently makes it possible to write things like compute shaders similar to CUDA that work in the browser. however, webgpu is not yet supported by firefox and safari. this means that if we want to build something that is broadly usable, we have to stick with the older webgl (and maybe even webgl 1.0, since there are reports that webgl 2.0 is buggy or slow on safari with the new M1 chips). It's still probably possible to implement a bitonic sort on top of webgl, but it would take about 200 iterations to sort 1M numbers, so it might still be too slow. 127 | 128 | another approach to rendering translucent objects is called depth peeling, where you enable the z-buffer and only render the translucent objects that are on the top, and then feed that z-buffer back into the render process to "peel" off the top and render only the layer beneath, before stacking those translucent layers together to a final image. I didn't manage to get this to work, but it's likely that it would be slow anyway. 129 | 130 | another interesting approach is something called [weighted blended order independent transparency](https://learnopengl.com/Guest-Articles/2020/OIT/Weighted-Blended) which adds an additional number saved to a different render buffer which is used as a weight for an approximation of translucency which is commutative. it didn't work in my experiments, which is somewhat expected in situations where you have certain splats with high opacity on top of each other. 131 | 132 | the final approach that i settled on is to run the sorting process on the CPU in a webworker, which happens a bit more slowly (at roughly 4fps whereas the main render is at 60fps), but that's fine because most of the time when you are moving around the z order doesn't actually change very fast (this results in momentary artifacts when jumping directly between different camera orientations on opposite sides). 133 | 134 | 135 | ## acknowledgements 136 | 137 | Thanks to Otavio Good for discussions on different approaches for [order independent transparency](https://en.wikipedia.org/wiki/Order-independent_transparency), Mikola Lysenko for [regl](http://regl.party/) and also for helpful advice about webgl and webgpu, Ethan Weber for discussions about how NeRFs work and letting me know that sorting is hard, Gray Crawford for identifying issues with color rendering and camera controls, Anna Brewer for help with implementing animations, and GPT-4 for writing all the WebGL boilerplate. 138 | -------------------------------------------------------------------------------- /compress.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 6, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import struct\n", 10 | "import time\n", 11 | "import json\n", 12 | "import os\n", 13 | "import math\n", 14 | "\n", 15 | "SHS_DEGREE = 0\n", 16 | "ENDIAN = \"!\"\n", 17 | "\n", 18 | "READ_FILE = \"cs.dat\"\n", 19 | "\n", 20 | "READ_FORMAT = {\n", 21 | " 'start_frame': 'I',\n", 22 | " 'end_frame': 'I',\n", 23 | " 'xyz': 'fff',\n", 24 | " 'f_dc': 'fff',\n", 25 | " 'f_rest': 'fff' * ((SHS_DEGREE + 1) ** 2 -1),\n", 26 | " 'scaling': 'fff',\n", 27 | " 'rotation': 'ffff',\n", 28 | " 'opacity': 'f'\n", 29 | "}\n", 30 | "\n", 31 | "WRITE_FORMAT = {\n", 32 | " 'start_frame': 'H',\n", 33 | " 'end_frame': 'H',\n", 34 | " 'xyz': 'fff',\n", 35 | " 'color': 'BBB',\n", 36 | " 'opacity': 'B',\n", 37 | " 'scaling': 'fff',\n", 38 | " 'rotation': 'BBBB'\n", 39 | "}\n" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 7, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "\n", 49 | "def stream_dump(params, filename, format):\n", 50 | " '''\n", 51 | " name: fromF, toF, xyz, feature, s, r, o\n", 52 | " data: I , I, fff, fffx4, fff, ffff, f\n", 53 | " '''\n", 54 | " FORMAT = format\n", 55 | " N = len(params)\n", 56 | "\n", 57 | " fmt = f\"{ENDIAN}{''.join(FORMAT.values())}\"\n", 58 | " print(f\"Format: {fmt}, total bytes: {struct.calcsize(fmt)}\")\n", 59 | "\n", 60 | " dir = os.path.dirname(filename)\n", 61 | " with open(os.path.join(dir, 'format.json'), 'w') as f:\n", 62 | " json.dump(FORMAT, f, indent=4)\n", 63 | "\n", 64 | " time_start = time.time()\n", 65 | " values = []\n", 66 | " for v in params:\n", 67 | " values.append(struct.pack(fmt, *v))\n", 68 | " with open(filename, 'ab') as f:\n", 69 | " f.writelines(values)\n", 70 | " time_end = time.time()\n", 71 | "\n", 72 | " print(f\"Dumped {N} gaussians in {time_end - time_start} seconds\")\n", 73 | "\n", 74 | "def stream_load(filename, format):\n", 75 | " FORMAT = format\n", 76 | " fmt = f\"{ENDIAN}{''.join(FORMAT.values())}\"\n", 77 | " print(f\"Format: {fmt} total bytes: {struct.calcsize(fmt)}\")\n", 78 | " with open(filename, 'rb') as f:\n", 79 | " data = f.read()\n", 80 | " N = len(data) // struct.calcsize(fmt)\n", 81 | " print(f\"Loading {N} gaussians\")\n", 82 | " unpacked = []\n", 83 | " for i in range(N):\n", 84 | " unpacked.append(\n", 85 | " struct.unpack(fmt,\n", 86 | " data[i * struct.calcsize(fmt): (i+1) * struct.calcsize(fmt)]))\n", 87 | " return unpacked" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": 8, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "def rgba(f_dc, opacity):\n", 97 | " clamp = lambda x: min(255, max(0, x))\n", 98 | " SH_C0 = 0.28209479177387814\n", 99 | " color = [0, 0, 0]\n", 100 | " color[0] = clamp(int((0.5 + SH_C0 * f_dc[0]) * 255))\n", 101 | " color[1] = clamp(int((0.5 + SH_C0 * f_dc[1]) * 255))\n", 102 | " color[2] = clamp(int((0.5 + SH_C0 * f_dc[2]) * 255))\n", 103 | " alpha = int((1 / (1 + math.exp(-opacity))) * 255)\n", 104 | " return (*color, alpha)\n", 105 | "def rot(rotation):\n", 106 | " qlen = math.sqrt(\n", 107 | " rotation[0] ** 2 +\n", 108 | " rotation[1] ** 2 +\n", 109 | " rotation[2] ** 2 +\n", 110 | " rotation[3] ** 2,\n", 111 | " )\n", 112 | " rot = [0, 0, 0, 0]\n", 113 | " rot[0] = int((rotation[0] / qlen) * 128 + 128)\n", 114 | " rot[1] = int((rotation[1] / qlen) * 128 + 128)\n", 115 | " rot[2] = int((rotation[2] / qlen) * 128 + 128)\n", 116 | " rot[3] = int((rotation[3] / qlen) * 128 + 128)\n", 117 | " return rot\n", 118 | "\n", 119 | "def scale(scaling):\n", 120 | " scales = [0, 0, 0]\n", 121 | " scales[0] = math.exp(scaling[0])\n", 122 | " scales[1] = math.exp(scaling[1])\n", 123 | " scales[2] = math.exp(scaling[2])\n", 124 | " return scales" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [ 133 | "\n", 134 | "ret = stream_load(READ_FILE, READ_FORMAT)\n", 135 | "print(ret[0])\n", 136 | "\n", 137 | "shrank = []\n", 138 | "for r in ret:\n", 139 | " start_frame = r[0]\n", 140 | " end_frame = r[1]\n", 141 | " xyz = r[2:5]\n", 142 | " f_dc = r[5:8]\n", 143 | " shs_length = 3 * ((SHS_DEGREE + 1) ** 2 - 1)\n", 144 | " f_rest = r[8:8 + shs_length]\n", 145 | " scaling = r[8 + shs_length: 11 + shs_length]\n", 146 | " rotation = r[11 + shs_length: 15 + shs_length]\n", 147 | " opacity = r[-1]\n", 148 | " shrank.append(\n", 149 | " (\n", 150 | " start_frame,\n", 151 | " end_frame,\n", 152 | " *xyz,\n", 153 | " *(rgba(f_dc, opacity)),\n", 154 | " *(scale(scaling)),\n", 155 | " *(rot(rotation)),\n", 156 | " )\n", 157 | " )\n", 158 | "\n" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "\n", 168 | "\n", 169 | "new_name = f\"{READ_FILE[:-4]}_36B.dat\"\n", 170 | "stream_dump(shrank, new_name, WRITE_FORMAT)\n", 171 | "# test = stream_load(new_name, WRITE_FORMAT)\n", 172 | "# print(test[0])" 173 | ] 174 | } 175 | ], 176 | "metadata": { 177 | "kernelspec": { 178 | "display_name": "Python 3", 179 | "language": "python", 180 | "name": "python3" 181 | }, 182 | "language_info": { 183 | "codemirror_mode": { 184 | "name": "ipython", 185 | "version": 3 186 | }, 187 | "file_extension": ".py", 188 | "mimetype": "text/x-python", 189 | "name": "python", 190 | "nbconvert_exporter": "python", 191 | "pygments_lexer": "ipython3", 192 | "version": "3.8.18" 193 | } 194 | }, 195 | "nbformat": 4, 196 | "nbformat_minor": 2 197 | } 198 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | WebGL SwinGS Viewer 5 | 6 | 10 | 11 | 15 | 16 | 17 | 18 | 21 |
22 |

WebGL SwinGS Viewer

23 |

24 | 25 | By SwinGS group, codes at SwinGSplat 26 |
Modified from 27 | antimatter15/splat. 28 |
Dataset from ActorsHQ. 29 |
Minimal 22MB/s bandwidth expected. 30 |
31 |

32 | 33 |
34 | Use mouse or arrow keys to navigate. 35 | 36 |
movement (arrow keys) 37 | - left/right arrow keys to strafe side to side 38 | - up/down arrow keys to move forward/back 39 | - space to jump 40 | 41 | camera angle (wasd) 42 | - a/d to turn camera left/right 43 | - w/s to tilt camera up/down 44 | - q/e to roll camera counterclockwise/clockwise 45 | - i/k and j/l to orbit 46 | 47 | trackpad 48 | - scroll up/down/left/right to orbit 49 | - pinch to move forward/back 50 | - ctrl key + scroll to move forward/back 51 | - shift + scroll to move up/down or strafe 52 | 53 | mouse 54 | - click and drag to orbit 55 | - right click (or ctrl/cmd key) and drag up/down to move 56 | 57 | touch (mobile) 58 | - one finger to orbit 59 | - two finger pinch to move forward/back 60 | - two finger rotate to rotate camera clockwise/counterclockwise 61 | - two finger pan to move side-to-side and up-down 62 | 63 | gamepad 64 | - if you have a game controller connected it should work 65 | 66 | other 67 | - press 0-9 to switch to one of the pre-loaded camera views 68 | - press '-' or '+'key to cycle loaded cameras 69 |
70 | 71 |
72 | 73 |
74 | 75 |
76 | 77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 | 93 | 94 |
95 |
96 |
97 | 103 |

104 |
105 | ⏸️ Pause
106 | 🔄 Reset Cam
107 |
108 | frame: 0
109 | stream fps: 0
110 | render fps: 111 |
112 |
113 | 114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 | 122 | 123 | 124 | 125 | 126 | -------------------------------------------------------------------------------- /main.js: -------------------------------------------------------------------------------- 1 | var global_log_1 = {"parse":[], "vertex":[]}; 2 | // parse_raw_bytes, gs_to_vertex, generate_texture, run_sort 3 | let cameras = [ 4 | { 5 | "position": [ 6 | 0.007232260564530732, 7 | 0.809987791156739, 8 | 5.33903731414556 9 | ], 10 | "rotation": [ 11 | [ 12 | 0.9999727319388764, 13 | -0.0007335337134094191, 14 | 0.007348285990037004 15 | ], 16 | [ 17 | -0.0008200432778977561, 18 | -0.9999303163911514, 19 | 0.011776667224411872 20 | ], 21 | [ 22 | 0.007339135352509657, 23 | -0.011782372010060363, 24 | -0.9999036517595554 25 | ] 26 | ], 27 | "fy": 1834.5526065144063, 28 | "fx": 1834.8835144895522, 29 | } 30 | ]; 31 | 32 | 33 | 34 | let camera = cameras[0]; 35 | 36 | function getProjectionMatrix(fx, fy, width, height) { 37 | const znear = 0.2; 38 | const zfar = 200; 39 | return [ 40 | [(2 * fx) / width, 0, 0, 0], 41 | [0, -(2 * fy) / height, 0, 0], 42 | [0, 0, zfar / (zfar - znear), 1], 43 | [0, 0, -(zfar * znear) / (zfar - znear), 0], 44 | ].flat(); 45 | } 46 | 47 | function getViewMatrix(camera) { 48 | const R = camera.rotation.flat(); 49 | const t = [ 50 | camera.position[0], 51 | camera.position[1], 52 | camera.position[2]]; 53 | // const camToWorld = [ 54 | // [R[0], R[1], R[2], 0], 55 | // [R[3], R[4], R[5], 0], 56 | // [R[6], R[7], R[8], 0], 57 | // [ 58 | // -t[0] * R[0] - t[1] * R[3] - t[2] * R[6], 59 | // -t[0] * R[1] - t[1] * R[4] - t[2] * R[7], 60 | // -t[0] * R[2] - t[1] * R[5] - t[2] * R[8], 61 | // 1, 62 | // ], 63 | // ].flat(); 64 | 65 | const camToWorld = [ 66 | [R[0], R[1], R[2], 0], 67 | [R[3], R[4], R[5], 0], 68 | [R[6], R[7], R[8], 0], 69 | [t[0], t[1], t[2], 1], 70 | ].flat(); 71 | return camToWorld; 72 | } 73 | function getViewMatrixDefault(camera) { 74 | return defaultViewMatrix; 75 | } 76 | 77 | // function translate4(a, x, y, z) { 78 | // return [ 79 | // ...a.slice(0, 12), 80 | // a[0] * x + a[4] * y + a[8] * z + a[12], 81 | // a[1] * x + a[5] * y + a[9] * z + a[13], 82 | // a[2] * x + a[6] * y + a[10] * z + a[14], 83 | // a[3] * x + a[7] * y + a[11] * z + a[15], 84 | // ]; 85 | // } 86 | 87 | function multiply4(a, b) { 88 | return [ 89 | b[0] * a[0] + b[1] * a[4] + b[2] * a[8] + b[3] * a[12], 90 | b[0] * a[1] + b[1] * a[5] + b[2] * a[9] + b[3] * a[13], 91 | b[0] * a[2] + b[1] * a[6] + b[2] * a[10] + b[3] * a[14], 92 | b[0] * a[3] + b[1] * a[7] + b[2] * a[11] + b[3] * a[15], 93 | b[4] * a[0] + b[5] * a[4] + b[6] * a[8] + b[7] * a[12], 94 | b[4] * a[1] + b[5] * a[5] + b[6] * a[9] + b[7] * a[13], 95 | b[4] * a[2] + b[5] * a[6] + b[6] * a[10] + b[7] * a[14], 96 | b[4] * a[3] + b[5] * a[7] + b[6] * a[11] + b[7] * a[15], 97 | b[8] * a[0] + b[9] * a[4] + b[10] * a[8] + b[11] * a[12], 98 | b[8] * a[1] + b[9] * a[5] + b[10] * a[9] + b[11] * a[13], 99 | b[8] * a[2] + b[9] * a[6] + b[10] * a[10] + b[11] * a[14], 100 | b[8] * a[3] + b[9] * a[7] + b[10] * a[11] + b[11] * a[15], 101 | b[12] * a[0] + b[13] * a[4] + b[14] * a[8] + b[15] * a[12], 102 | b[12] * a[1] + b[13] * a[5] + b[14] * a[9] + b[15] * a[13], 103 | b[12] * a[2] + b[13] * a[6] + b[14] * a[10] + b[15] * a[14], 104 | b[12] * a[3] + b[13] * a[7] + b[14] * a[11] + b[15] * a[15], 105 | ]; 106 | } 107 | 108 | function invert4(a) { 109 | let b00 = a[0] * a[5] - a[1] * a[4]; 110 | let b01 = a[0] * a[6] - a[2] * a[4]; 111 | let b02 = a[0] * a[7] - a[3] * a[4]; 112 | let b03 = a[1] * a[6] - a[2] * a[5]; 113 | let b04 = a[1] * a[7] - a[3] * a[5]; 114 | let b05 = a[2] * a[7] - a[3] * a[6]; 115 | let b06 = a[8] * a[13] - a[9] * a[12]; 116 | let b07 = a[8] * a[14] - a[10] * a[12]; 117 | let b08 = a[8] * a[15] - a[11] * a[12]; 118 | let b09 = a[9] * a[14] - a[10] * a[13]; 119 | let b10 = a[9] * a[15] - a[11] * a[13]; 120 | let b11 = a[10] * a[15] - a[11] * a[14]; 121 | let det = 122 | b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06; 123 | if (!det) return null; 124 | return [ 125 | (a[5] * b11 - a[6] * b10 + a[7] * b09) / det, 126 | (a[2] * b10 - a[1] * b11 - a[3] * b09) / det, 127 | (a[13] * b05 - a[14] * b04 + a[15] * b03) / det, 128 | (a[10] * b04 - a[9] * b05 - a[11] * b03) / det, 129 | (a[6] * b08 - a[4] * b11 - a[7] * b07) / det, 130 | (a[0] * b11 - a[2] * b08 + a[3] * b07) / det, 131 | (a[14] * b02 - a[12] * b05 - a[15] * b01) / det, 132 | (a[8] * b05 - a[10] * b02 + a[11] * b01) / det, 133 | (a[4] * b10 - a[5] * b08 + a[7] * b06) / det, 134 | (a[1] * b08 - a[0] * b10 - a[3] * b06) / det, 135 | (a[12] * b04 - a[13] * b02 + a[15] * b00) / det, 136 | (a[9] * b02 - a[8] * b04 - a[11] * b00) / det, 137 | (a[5] * b07 - a[4] * b09 - a[6] * b06) / det, 138 | (a[0] * b09 - a[1] * b07 + a[2] * b06) / det, 139 | (a[13] * b01 - a[12] * b03 - a[14] * b00) / det, 140 | (a[8] * b03 - a[9] * b01 + a[10] * b00) / det, 141 | ]; 142 | } 143 | 144 | function rotate4(a, rad, x, y, z) { 145 | let len = Math.hypot(x, y, z); 146 | x /= len; 147 | y /= len; 148 | z /= len; 149 | let s = Math.sin(rad); 150 | let c = Math.cos(rad); 151 | let t = 1 - c; 152 | let b00 = x * x * t + c; 153 | let b01 = y * x * t + z * s; 154 | let b02 = z * x * t - y * s; 155 | let b10 = x * y * t - z * s; 156 | let b11 = y * y * t + c; 157 | let b12 = z * y * t + x * s; 158 | let b20 = x * z * t + y * s; 159 | let b21 = y * z * t - x * s; 160 | let b22 = z * z * t + c; 161 | return [ 162 | a[0] * b00 + a[4] * b01 + a[8] * b02, 163 | a[1] * b00 + a[5] * b01 + a[9] * b02, 164 | a[2] * b00 + a[6] * b01 + a[10] * b02, 165 | a[3] * b00 + a[7] * b01 + a[11] * b02, 166 | a[0] * b10 + a[4] * b11 + a[8] * b12, 167 | a[1] * b10 + a[5] * b11 + a[9] * b12, 168 | a[2] * b10 + a[6] * b11 + a[10] * b12, 169 | a[3] * b10 + a[7] * b11 + a[11] * b12, 170 | a[0] * b20 + a[4] * b21 + a[8] * b22, 171 | a[1] * b20 + a[5] * b21 + a[9] * b22, 172 | a[2] * b20 + a[6] * b21 + a[10] * b22, 173 | a[3] * b20 + a[7] * b21 + a[11] * b22, 174 | ...a.slice(12, 16), 175 | ]; 176 | } 177 | 178 | function translate4(a, x, y, z) { 179 | return [ 180 | ...a.slice(0, 12), 181 | a[0] * x + a[4] * y + a[8] * z + a[12], 182 | a[1] * x + a[5] * y + a[9] * z + a[13], 183 | a[2] * x + a[6] * y + a[10] * z + a[14], 184 | a[3] * x + a[7] * y + a[11] * z + a[15], 185 | ]; 186 | } 187 | 188 | var SLICE_NUM 189 | var TOTAL_CAP 190 | var SLICE_CAP 191 | var SH_DEGREE 192 | var STREAM_ROW_LENGTH 193 | var VERTEX_ROW_LENGTH 194 | var defaultViewMatrix 195 | var MAX_FRAME 196 | var MINIMAL_BW = 22 197 | function setup_consts(config) { 198 | MAX_FRAME = config.MAX_FRAME; 199 | SLICE_NUM = config.SLICE_NUM; 200 | TOTAL_CAP = config.TOTAL_CAP; 201 | SLICE_CAP = Math.ceil(TOTAL_CAP / SLICE_NUM); 202 | STREAM_ROW_LENGTH = config.STREAM_ROW_LENGTH; 203 | SH_DEGREE = config.SH_DEGREE; 204 | console.log("STREAM_ROW_LENGTH", STREAM_ROW_LENGTH); 205 | VERTEX_ROW_LENGTH = config.VERTEX_ROW_LENGTH; 206 | defaultViewMatrix = config.INIT_VIEW; 207 | let R = [ 208 | [defaultViewMatrix[0], defaultViewMatrix[1], defaultViewMatrix[2]], 209 | [defaultViewMatrix[4], defaultViewMatrix[5], defaultViewMatrix[6]], 210 | [defaultViewMatrix[8], defaultViewMatrix[9], defaultViewMatrix[10]], 211 | ] 212 | let t = [ 213 | defaultViewMatrix[12], 214 | defaultViewMatrix[13], 215 | defaultViewMatrix[14], 216 | ] 217 | cameras[0].rotation = R; 218 | cameras[0].position = t; 219 | cameras[0].fx = config.fx; 220 | cameras[0].fy = config.fy; 221 | MINIMAL_BW = Math.ceil(config.STREAM_ROW_LENGTH * config.TOTAL_CAP * config.FPS / 1e6 / SLICE_NUM); 222 | console.log("MINIMAL_BW", MINIMAL_BW); 223 | const min_bw_ele = document.getElementById("min_bw"); 224 | min_bw_ele.innerHTML = MINIMAL_BW; 225 | } 226 | function GS_TO_VERTEX_COMPACT(gs, full_gs=false) { 227 | // input list of gs objects 228 | // output buffer of binary data 229 | let start = Date.now(); 230 | const buffer = new ArrayBuffer(gs.length * VERTEX_ROW_LENGTH); 231 | const vertexCount = gs.length; 232 | console.time("build buffer"); 233 | if (full_gs) { 234 | // for the full gs, sort by end frame 235 | gs.sort((a, b) => a.end_frame - b.end_frame); 236 | } else { 237 | // for the slice gs, sort by start frame 238 | gs.sort((a, b) => a.start_frame - b.start_frame); 239 | } 240 | let curFrame = gs[0].start_frame; 241 | let curSliceStart = 0; 242 | let frame_spans = []; 243 | for (let j = 0; j < vertexCount; j++) { 244 | let attrs = gs[j]; 245 | if (! full_gs) { // for slice 246 | if (attrs.start_frame != curFrame || j == vertexCount - 1) { 247 | frame_spans.push( 248 | { 249 | frame: curFrame, 250 | from: curSliceStart, 251 | to: j, 252 | total: j - curSliceStart -1 253 | } 254 | ) 255 | curFrame = gs[j].start_frame; 256 | curSliceStart = j; 257 | } 258 | } else { 259 | frame_spans.push(attrs.end_frame) 260 | } 261 | // memory pointer we need to fill 262 | const position = new Float32Array(buffer, j * VERTEX_ROW_LENGTH, 3); 263 | const scales = new Float32Array(buffer, j * VERTEX_ROW_LENGTH + 4 * 3, 3); 264 | const rgba = new Uint8ClampedArray( 265 | buffer, 266 | j * VERTEX_ROW_LENGTH + 4 * 3 + 4 * 3, 267 | 4, 268 | ); 269 | const rot = new Uint8ClampedArray( 270 | buffer, 271 | j * VERTEX_ROW_LENGTH + 4 * 3 + 4 * 3 + 4, 272 | 4, 273 | ); 274 | 275 | rot[0] = attrs.rotation[0]; 276 | rot[1] = attrs.rotation[1]; 277 | rot[2] = attrs.rotation[2]; 278 | rot[3] = attrs.rotation[3]; 279 | scales[0] = attrs.scaling[0]; 280 | scales[1] = attrs.scaling[1]; 281 | scales[2] = attrs.scaling[2]; 282 | position[0] = attrs.xyz[0]; 283 | position[1] = attrs.xyz[1]; 284 | position[2] = attrs.xyz[2]; 285 | rgba[0] = attrs.color[0]; 286 | rgba[1] = attrs.color[1]; 287 | rgba[2] = attrs.color[2]; 288 | rgba[3] = attrs.opacity; 289 | 290 | } 291 | console.timeEnd("build buffer"); 292 | let end = Date.now(); 293 | if (!full_gs) global_log_1["vertex"].push(end - start); 294 | return {all:new Uint8Array(buffer), spans:frame_spans}; 295 | } 296 | 297 | function PARSE_RAW_BYTES_COMPACT(arrayLike) { 298 | let start = Date.now(); 299 | const view = new DataView(arrayLike.buffer, arrayLike.byteOffset, arrayLike.byteLength); 300 | const jsonObjects = []; 301 | const sizeOfObject = STREAM_ROW_LENGTH; // total bytes for one object 302 | 303 | for (let offset = 0; offset < arrayLike.byteLength; offset += sizeOfObject) { 304 | const start_frame = view.getUint16(offset, false); // true for little-endian 305 | const end_frame = view.getUint16(offset + 2, false); 306 | 307 | const xyz = [ 308 | view.getFloat32(offset + 4, false), 309 | view.getFloat32(offset + 8, false), 310 | view.getFloat32(offset + 12, false), 311 | ]; 312 | 313 | const color = [ 314 | view.getUint8(offset + 16, false), 315 | view.getUint8(offset + 17, false), 316 | view.getUint8(offset + 18, false), 317 | ]; 318 | const opacity = view.getUint8(offset + 19, false); 319 | 320 | // we dont really need f_rest 321 | 322 | const scaling = [ 323 | view.getFloat32(offset + 20, false), 324 | view.getFloat32(offset + 24, false), 325 | view.getFloat32(offset + 28, false), 326 | ]; 327 | 328 | const rotation = [ 329 | view.getUint8(offset + 32, false), 330 | view.getUint8(offset + 33, false), 331 | view.getUint8(offset + 34, false), 332 | view.getUint8(offset + 35, false), 333 | ]; 334 | 335 | 336 | jsonObjects.push({ 337 | start_frame, 338 | end_frame, 339 | xyz, 340 | color, 341 | opacity, 342 | scaling, 343 | rotation, 344 | }); 345 | } 346 | let end = Date.now(); 347 | global_log_1["parse"].push(end - start); 348 | return jsonObjects; 349 | } 350 | 351 | function createWorker(self, SLICE_CAP, SLICE_NUM) { 352 | var global_log_2 = {"sort":[], "texture":[]}; 353 | function average(arr){ 354 | return arr.reduce((a,b) => a+b, 0) / arr.length; 355 | } 356 | let buffer; 357 | let vertexCount = 0; 358 | let viewProj; 359 | // 6*4 + 4 + 4 = 8*4 360 | // XYZ - Position (Float32) 361 | // XYZ - Scale (Float32) 362 | // RGBA - colors (uint8) 363 | // IJKL - quaternion/rot (uint8) 364 | const rowLength = 3 * 4 + 3 * 4 + 4 + 4; 365 | let lastProj = []; 366 | let depthIndex = new Uint32Array(); 367 | let lastVertexCount = 0; 368 | 369 | var _floatView = new Float32Array(1); 370 | var _int32View = new Int32Array(_floatView.buffer); 371 | 372 | function floatToHalf(float) { 373 | _floatView[0] = float; 374 | var f = _int32View[0]; 375 | 376 | var sign = (f >> 31) & 0x0001; 377 | var exp = (f >> 23) & 0x00ff; 378 | var frac = f & 0x007fffff; 379 | 380 | var newExp; 381 | if (exp == 0) { 382 | newExp = 0; 383 | } else if (exp < 113) { 384 | newExp = 0; 385 | frac |= 0x00800000; 386 | frac = frac >> (113 - exp); 387 | if (frac & 0x01000000) { 388 | newExp = 1; 389 | frac = 0; 390 | } 391 | } else if (exp < 142) { 392 | newExp = exp - 112; 393 | } else { 394 | newExp = 31; 395 | frac = 0; 396 | } 397 | 398 | return (sign << 15) | (newExp << 10) | (frac >> 13); 399 | } 400 | 401 | function packHalf2x16(x, y) { 402 | return (floatToHalf(x) | (floatToHalf(y) << 16)) >>> 0; 403 | } 404 | 405 | function generateTexture() { 406 | let start = Date.now(); 407 | if (!buffer) return; 408 | const f_buffer = new Float32Array(buffer); 409 | const u_buffer = new Uint8Array(buffer); 410 | 411 | var texwidth = 1024 * 2; // Set to your desired width 412 | var texheight = Math.ceil((2 * vertexCount) / texwidth); // Set to your desired height 413 | var texdata = new Uint32Array(texwidth * texheight * 4); // 4 components per pixel (RGBA) 414 | var texdata_c = new Uint8Array(texdata.buffer); 415 | var texdata_f = new Float32Array(texdata.buffer); 416 | 417 | // Here we convert from a .splat file buffer into a texture 418 | // With a little bit more foresight perhaps this texture file 419 | // should have been the native format as it'd be very easy to 420 | // load it into webgl. 421 | // for each 8xUin32 Row in the texture buffer: 422 | 423 | // 0-2: XYZ in 4 Byte each : 12B 424 | // 3: RGBA in 1 Byte each : 4B 425 | // 4-6: texture in 4 Byte each : 12B 426 | // 7: placeholder for future : 4B 427 | 428 | for (let i = 0; i < vertexCount; i++) { 429 | // x, y, z 430 | texdata_f[8 * i + 0] = f_buffer[8 * i + 0]; 431 | texdata_f[8 * i + 1] = f_buffer[8 * i + 1]; 432 | texdata_f[8 * i + 2] = f_buffer[8 * i + 2]; 433 | 434 | // r, g, b, a 435 | texdata_c[4 * (8 * i + 7) + 0] = u_buffer[32 * i + 24 + 0]; 436 | texdata_c[4 * (8 * i + 7) + 1] = u_buffer[32 * i + 24 + 1]; 437 | texdata_c[4 * (8 * i + 7) + 2] = u_buffer[32 * i + 24 + 2]; 438 | texdata_c[4 * (8 * i + 7) + 3] = u_buffer[32 * i + 24 + 3]; 439 | 440 | // quaternions 441 | let scale = [ 442 | f_buffer[8 * i + 3 + 0], 443 | f_buffer[8 * i + 3 + 1], 444 | f_buffer[8 * i + 3 + 2], 445 | ]; 446 | let rot = [ 447 | (u_buffer[32 * i + 28 + 0] - 128) / 128, 448 | (u_buffer[32 * i + 28 + 1] - 128) / 128, 449 | (u_buffer[32 * i + 28 + 2] - 128) / 128, 450 | (u_buffer[32 * i + 28 + 3] - 128) / 128, 451 | ]; 452 | 453 | // Compute the matrix product of S and R (M = S * R) 454 | const M = [ 455 | 1.0 - 2.0 * (rot[2] * rot[2] + rot[3] * rot[3]), 456 | 2.0 * (rot[1] * rot[2] + rot[0] * rot[3]), 457 | 2.0 * (rot[1] * rot[3] - rot[0] * rot[2]), 458 | 459 | 2.0 * (rot[1] * rot[2] - rot[0] * rot[3]), 460 | 1.0 - 2.0 * (rot[1] * rot[1] + rot[3] * rot[3]), 461 | 2.0 * (rot[2] * rot[3] + rot[0] * rot[1]), 462 | 463 | 2.0 * (rot[1] * rot[3] + rot[0] * rot[2]), 464 | 2.0 * (rot[2] * rot[3] - rot[0] * rot[1]), 465 | 1.0 - 2.0 * (rot[1] * rot[1] + rot[2] * rot[2]), 466 | ].map((k, i) => k * scale[Math.floor(i / 3)]); 467 | 468 | const sigma = [ 469 | M[0] * M[0] + M[3] * M[3] + M[6] * M[6], 470 | M[0] * M[1] + M[3] * M[4] + M[6] * M[7], 471 | M[0] * M[2] + M[3] * M[5] + M[6] * M[8], 472 | M[1] * M[1] + M[4] * M[4] + M[7] * M[7], 473 | M[1] * M[2] + M[4] * M[5] + M[7] * M[8], 474 | M[2] * M[2] + M[5] * M[5] + M[8] * M[8], 475 | ]; 476 | 477 | texdata[8 * i + 4] = packHalf2x16(4 * sigma[0], 4 * sigma[1]); 478 | texdata[8 * i + 5] = packHalf2x16(4 * sigma[2], 4 * sigma[3]); 479 | texdata[8 * i + 6] = packHalf2x16(4 * sigma[4], 4 * sigma[5]); 480 | } 481 | let end = Date.now(); 482 | global_log_2["texture"].push(end - start); 483 | console.log(`avg time cost of each step: 484 | sort: ${average(global_log_2["sort"])}ms 485 | texture: ${average(global_log_2["texture"])}ms 486 | `); 487 | self.postMessage({ texdata, texwidth, texheight}, [texdata.buffer]); 488 | } 489 | 490 | function runSort(viewProj, enforce_update=false) { 491 | if (!buffer) return; 492 | const f_buffer = new Float32Array(buffer); 493 | if (lastVertexCount == vertexCount && !enforce_update) { 494 | let dot = 495 | lastProj[2] * viewProj[2] + 496 | lastProj[6] * viewProj[6] + 497 | lastProj[10] * viewProj[10]; 498 | if (Math.abs(dot - 1) < 0.01) { 499 | return; 500 | } 501 | } else { 502 | lastVertexCount = vertexCount; 503 | } 504 | let start = Date.now(); 505 | console.time("sort"); 506 | let maxDepth = -Infinity; 507 | let minDepth = Infinity; 508 | let sizeList = new Int32Array(vertexCount); 509 | for (let i = 0; i < vertexCount; i++) { 510 | let depth = 511 | ((viewProj[2] * f_buffer[8 * i + 0] + 512 | viewProj[6] * f_buffer[8 * i + 1] + 513 | viewProj[10] * f_buffer[8 * i + 2]) * 514 | 4096) | 515 | 0; 516 | sizeList[i] = depth; 517 | if (depth > maxDepth) maxDepth = depth; 518 | if (depth < minDepth) minDepth = depth; 519 | } 520 | 521 | // This is a 16 bit single-pass counting sort 522 | let depthInv = (256 * 256) / (maxDepth - minDepth); 523 | let counts0 = new Uint32Array(256 * 256); 524 | for (let i = 0; i < vertexCount; i++) { 525 | sizeList[i] = ((sizeList[i] - minDepth) * depthInv) | 0; 526 | counts0[sizeList[i]]++; 527 | } 528 | let starts0 = new Uint32Array(256 * 256); 529 | for (let i = 1; i < 256 * 256; i++) 530 | starts0[i] = starts0[i - 1] + counts0[i - 1]; 531 | depthIndex = new Uint32Array(vertexCount); 532 | for (let i = 0; i < vertexCount; i++) 533 | depthIndex[starts0[sizeList[i]]++] = i; 534 | 535 | console.timeEnd("sort"); 536 | let end = Date.now(); 537 | global_log_2["sort"].push(end - start); 538 | lastProj = viewProj; 539 | // put texture update and depth update together 540 | generateTexture(); 541 | self.postMessage({ depthIndex, viewProj, vertexCount }, [ 542 | depthIndex.buffer, 543 | ]); 544 | } 545 | 546 | function processPlyBuffer(inputBuffer) { 547 | const ubuf = new Uint8Array(inputBuffer); 548 | // 10KB ought to be enough for a header... 549 | const header = new TextDecoder().decode(ubuf.slice(0, 1024 * 10)); 550 | const header_end = "end_header\n"; 551 | const header_end_index = header.indexOf(header_end); 552 | if (header_end_index < 0) 553 | throw new Error("Unable to read .ply file header"); 554 | const vertexCount = parseInt(/element vertex (\d+)\n/.exec(header)[1]); 555 | console.log("Vertex Count", vertexCount); 556 | let row_offset = 0, 557 | offsets = {}, 558 | types = {}; 559 | const TYPE_MAP = { 560 | double: "getFloat64", 561 | int: "getInt32", 562 | uint: "getUint32", 563 | float: "getFloat32", 564 | short: "getInt16", 565 | ushort: "getUint16", 566 | uchar: "getUint8", 567 | }; 568 | for (let prop of header 569 | .slice(0, header_end_index) 570 | .split("\n") 571 | .filter((k) => k.startsWith("property "))) { 572 | const [p, type, name] = prop.split(" "); 573 | const arrayType = TYPE_MAP[type] || "getInt8"; 574 | types[name] = arrayType; 575 | offsets[name] = row_offset; 576 | row_offset += parseInt(arrayType.replace(/[^\d]/g, "")) / 8; 577 | } 578 | console.log("Bytes per row", row_offset, types, offsets); 579 | 580 | let dataView = new DataView( 581 | inputBuffer, 582 | header_end_index + header_end.length, 583 | ); 584 | let row = 0; 585 | const attrs = new Proxy( 586 | {}, 587 | { 588 | get(target, prop) { 589 | if (!types[prop]) throw new Error(prop + " not found"); 590 | return dataView[types[prop]]( 591 | row * row_offset + offsets[prop], 592 | true, 593 | ); 594 | }, 595 | }, 596 | ); 597 | 598 | console.time("calculate importance"); 599 | let sizeList = new Float32Array(vertexCount); 600 | let sizeIndex = new Uint32Array(vertexCount); 601 | for (row = 0; row < vertexCount; row++) { 602 | sizeIndex[row] = row; 603 | if (!types["scale_0"]) continue; 604 | const size = 605 | Math.exp(attrs.scale_0) * 606 | Math.exp(attrs.scale_1) * 607 | Math.exp(attrs.scale_2); 608 | const opacity = 1 / (1 + Math.exp(-attrs.opacity)); 609 | sizeList[row] = size * opacity; 610 | } 611 | console.timeEnd("calculate importance"); 612 | 613 | console.time("sort"); 614 | sizeIndex.sort((b, a) => sizeList[a] - sizeList[b]); 615 | console.timeEnd("sort"); 616 | 617 | // 6*4 + 4 + 4 = 8*4 618 | // XYZ - Position (Float32) 619 | // XYZ - Scale (Float32) 620 | // RGBA - colors (uint8) 621 | // IJKL - quaternion/rot (uint8) 622 | const rowLength = 3 * 4 + 3 * 4 + 4 + 4; 623 | const buffer = new ArrayBuffer(rowLength * vertexCount); 624 | 625 | console.time("build buffer"); 626 | for (let j = 0; j < vertexCount; j++) { 627 | row = sizeIndex[j]; 628 | 629 | const position = new Float32Array(buffer, j * rowLength, 3); 630 | const scales = new Float32Array(buffer, j * rowLength + 4 * 3, 3); 631 | const rgba = new Uint8ClampedArray( 632 | buffer, 633 | j * rowLength + 4 * 3 + 4 * 3, 634 | 4, 635 | ); 636 | const rot = new Uint8ClampedArray( 637 | buffer, 638 | j * rowLength + 4 * 3 + 4 * 3 + 4, 639 | 4, 640 | ); 641 | 642 | if (types["scale_0"]) { 643 | const qlen = Math.sqrt( 644 | attrs.rot_0 ** 2 + 645 | attrs.rot_1 ** 2 + 646 | attrs.rot_2 ** 2 + 647 | attrs.rot_3 ** 2, 648 | ); 649 | 650 | rot[0] = (attrs.rot_0 / qlen) * 128 + 128; 651 | rot[1] = (attrs.rot_1 / qlen) * 128 + 128; 652 | rot[2] = (attrs.rot_2 / qlen) * 128 + 128; 653 | rot[3] = (attrs.rot_3 / qlen) * 128 + 128; 654 | 655 | scales[0] = Math.exp(attrs.scale_0); 656 | scales[1] = Math.exp(attrs.scale_1); 657 | scales[2] = Math.exp(attrs.scale_2); 658 | } else { 659 | scales[0] = 0.01; 660 | scales[1] = 0.01; 661 | scales[2] = 0.01; 662 | 663 | rot[0] = 255; 664 | rot[1] = 0; 665 | rot[2] = 0; 666 | rot[3] = 0; 667 | } 668 | 669 | position[0] = attrs.x; 670 | position[1] = attrs.y; 671 | position[2] = attrs.z; 672 | 673 | if (types["f_dc_0"]) { 674 | const SH_C0 = 0.28209479177387814; 675 | rgba[0] = (0.5 + SH_C0 * attrs.f_dc_0) * 255; 676 | rgba[1] = (0.5 + SH_C0 * attrs.f_dc_1) * 255; 677 | rgba[2] = (0.5 + SH_C0 * attrs.f_dc_2) * 255; 678 | } else { 679 | rgba[0] = attrs.red; 680 | rgba[1] = attrs.green; 681 | rgba[2] = attrs.blue; 682 | } 683 | if (types["opacity"]) { 684 | rgba[3] = (1 / (1 + Math.exp(-attrs.opacity))) * 255; 685 | } else { 686 | rgba[3] = 255; 687 | } 688 | } 689 | console.timeEnd("build buffer"); 690 | return buffer; 691 | } 692 | 693 | const throttledSort = () => { 694 | if (!sortRunning) { 695 | sortRunning = true; 696 | let lastView = viewProj; 697 | runSort(lastView); 698 | setTimeout(() => { 699 | sortRunning = false; 700 | if (lastView !== viewProj) { 701 | throttledSort(); 702 | } 703 | }, 0); 704 | } 705 | }; 706 | 707 | let sortRunning; 708 | let slicePtr = new Array(SLICE_NUM).fill(0); // indicates the number of gaussians in the slice 709 | function getSlice(sId){ // returns the slice with the given id 710 | return new Uint8Array(buffer, sId * SLICE_CAP * rowLength, SLICE_CAP * rowLength); 711 | } 712 | 713 | 714 | self.onmessage = (e) => { 715 | if (e.data.ply) { 716 | vertexCount = 0; 717 | runSort(viewProj); 718 | buffer = processPlyBuffer(e.data.ply); 719 | vertexCount = Math.floor(buffer.byteLength / rowLength); 720 | postMessage({ buffer: buffer }); 721 | } else if (e.data.buffer) { 722 | buffer = e.data.buffer; 723 | vertexCount = e.data.vertexCount; 724 | } else if (e.data.vertexCount) { 725 | vertexCount = e.data.vertexCount; 726 | } else if (e.data.view) { 727 | viewProj = e.data.view; 728 | throttledSort(); 729 | } else if (e.data.resetSlice) { 730 | // vertexCount should not change 731 | let sId = e.data.resetSlice.sliceId; 732 | let data = e.data.resetSlice.data; 733 | let num_of_gs = Math.floor(data.length / rowLength); 734 | let num_of_gs_capped = Math.min(num_of_gs, SLICE_CAP); 735 | let bufferSlice = getSlice(sId); 736 | // fill in the slice with data 737 | bufferSlice.set(data.slice(0, num_of_gs_capped * rowLength)); 738 | if (num_of_gs_capped < SLICE_CAP) { 739 | // fill the rest with zeros 740 | bufferSlice.fill(0, num_of_gs_capped * rowLength); 741 | } 742 | slicePtr[sId] = num_of_gs_capped; 743 | console.log("slice #", e.data.resetSlice.sliceId, `: reset with ${slicePtr[sId]} gs`); 744 | } else if (e.data.appendSlice){ 745 | let sId = e.data.appendSlice.sliceId; 746 | let data = e.data.appendSlice.data; // data is bufferView not buffer, no need to create a view 747 | if (slicePtr[sId] >= SLICE_CAP) return; 748 | let num_of_gs = Math.floor(data.length / rowLength); 749 | let num_of_gs_capped = Math.min(num_of_gs, SLICE_CAP - slicePtr[sId]); 750 | if (num_of_gs > num_of_gs_capped) { 751 | console.warn("slice #", e.data.appendSlice.sliceId, `overflow from frame #`, e.data.appendSlice.frame); 752 | } 753 | let bufferSlice = getSlice(sId); 754 | // fill in the slice with data 755 | bufferSlice.set(data.slice(0, num_of_gs_capped * rowLength), slicePtr[sId] * rowLength); 756 | console.log("slice #", e.data.appendSlice.sliceId, `: increase from ${slicePtr[sId]} gs to ${slicePtr[sId] + num_of_gs_capped} gs, with ${num_of_gs} gs`); 757 | slicePtr[sId] += num_of_gs_capped; 758 | // do not runSort here 759 | } else if (e.data.reSort) { 760 | runSort(viewProj, true); 761 | } 762 | }; 763 | } 764 | 765 | const vertexShaderSource = ` 766 | #version 300 es 767 | precision highp float; 768 | precision highp int; 769 | 770 | uniform highp usampler2D u_texture; 771 | uniform mat4 projection, view; 772 | uniform vec2 focal; 773 | uniform vec2 viewport; 774 | 775 | in vec2 position; 776 | in int index; 777 | 778 | out vec4 vColor; 779 | out vec2 vPosition; 780 | 781 | void main () { 782 | uvec4 cen = texelFetch(u_texture, ivec2((uint(index) & 0x3ffu) << 1, uint(index) >> 10), 0); 783 | vec4 cam = view * vec4(uintBitsToFloat(cen.xyz), 1); 784 | vec4 pos2d = projection * cam; 785 | 786 | float clip = 1.2 * pos2d.w; 787 | if (pos2d.z < -clip || pos2d.x < -clip || pos2d.x > clip || pos2d.y < -clip || pos2d.y > clip) { 788 | gl_Position = vec4(0.0, 0.0, 2.0, 1.0); 789 | return; 790 | } 791 | 792 | uvec4 cov = texelFetch(u_texture, ivec2(((uint(index) & 0x3ffu) << 1) | 1u, uint(index) >> 10), 0); 793 | vec2 u1 = unpackHalf2x16(cov.x), u2 = unpackHalf2x16(cov.y), u3 = unpackHalf2x16(cov.z); 794 | mat3 Vrk = mat3(u1.x, u1.y, u2.x, u1.y, u2.y, u3.x, u2.x, u3.x, u3.y); 795 | 796 | mat3 J = mat3( 797 | focal.x / cam.z, 0., -(focal.x * cam.x) / (cam.z * cam.z), 798 | 0., -focal.y / cam.z, (focal.y * cam.y) / (cam.z * cam.z), 799 | 0., 0., 0. 800 | ); 801 | 802 | mat3 T = transpose(mat3(view)) * J; 803 | mat3 cov2d = transpose(T) * Vrk * T; 804 | 805 | float mid = (cov2d[0][0] + cov2d[1][1]) / 2.0; 806 | float radius = length(vec2((cov2d[0][0] - cov2d[1][1]) / 2.0, cov2d[0][1])); 807 | float lambda1 = mid + radius, lambda2 = mid - radius; 808 | 809 | if(lambda2 < 0.0) return; 810 | vec2 diagonalVector = normalize(vec2(cov2d[0][1], lambda1 - cov2d[0][0])); 811 | vec2 majorAxis = min(sqrt(2.0 * lambda1), 1024.0) * diagonalVector; 812 | vec2 minorAxis = min(sqrt(2.0 * lambda2), 1024.0) * vec2(diagonalVector.y, -diagonalVector.x); 813 | 814 | vColor = clamp(pos2d.z/pos2d.w+1.0, 0.0, 1.0) * vec4((cov.w) & 0xffu, (cov.w >> 8) & 0xffu, (cov.w >> 16) & 0xffu, (cov.w >> 24) & 0xffu) / 255.0; 815 | vPosition = position; 816 | 817 | vec2 vCenter = vec2(pos2d) / pos2d.w; 818 | gl_Position = vec4( 819 | vCenter 820 | + position.x * majorAxis / viewport 821 | + position.y * minorAxis / viewport, 0.0, 1.0); 822 | 823 | } 824 | `.trim(); 825 | 826 | const fragmentShaderSource = ` 827 | #version 300 es 828 | precision highp float; 829 | 830 | in vec4 vColor; 831 | in vec2 vPosition; 832 | 833 | out vec4 fragColor; 834 | 835 | void main () { 836 | float A = -dot(vPosition, vPosition); 837 | if (A < -4.0) discard; 838 | float B = exp(A) * vColor.a; 839 | fragColor = vec4(B * vColor.rgb, B); 840 | } 841 | 842 | `.trim(); 843 | 844 | 845 | let viewMatrix; 846 | async function main(config) { 847 | viewMatrix = defaultViewMatrix; 848 | let carousel = false; 849 | try { 850 | viewMatrix = JSON.parse(decodeURIComponent(location.hash.slice(1))); 851 | carousel = false; 852 | } catch (err) {} 853 | const url = config.MODEL_URL; 854 | const req = await fetch(url, { 855 | mode: "cors", // no-cors, *cors, same-origin 856 | credentials: "omit", // include, *same-origin, omit 857 | }); 858 | console.log(req); 859 | if (req.status != 200) 860 | throw new Error(req.status + " Unable to load " + req.url); 861 | 862 | const rowLength = 3 * 4 + 3 * 4 + 4 + 4; 863 | const reader = req.body.getReader(); 864 | 865 | // TODO reduce the size of the buffer, we dont need a full size buffer 866 | // let splatData = new Uint8Array(req.headers.get("content-length")); 867 | 868 | // TODO we dont need it 869 | const downsample = 1; 870 | // splatData.length / rowLength > 500000 ? 1 : 1 / devicePixelRatio; 871 | // console.log(splatData.length / rowLength, downsample); 872 | 873 | const worker = new Worker( 874 | URL.createObjectURL( 875 | new Blob(["(", createWorker.toString(), `)(self, ${SLICE_CAP}, ${SLICE_NUM})`], { 876 | type: "application/javascript", 877 | }), 878 | ), 879 | ); 880 | 881 | const canvas = document.getElementById("canvas"); 882 | const fps = document.getElementById("fps"); 883 | const camid = document.getElementById("camid"); 884 | 885 | let projectionMatrix; 886 | 887 | const gl = canvas.getContext("webgl2", { 888 | antialias: false, 889 | }); 890 | 891 | const vertexShader = gl.createShader(gl.VERTEX_SHADER); 892 | gl.shaderSource(vertexShader, vertexShaderSource); 893 | gl.compileShader(vertexShader); 894 | if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) 895 | console.error(gl.getShaderInfoLog(vertexShader)); 896 | 897 | const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); 898 | gl.shaderSource(fragmentShader, fragmentShaderSource); 899 | gl.compileShader(fragmentShader); 900 | if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) 901 | console.error(gl.getShaderInfoLog(fragmentShader)); 902 | 903 | const program = gl.createProgram(); 904 | gl.attachShader(program, vertexShader); 905 | gl.attachShader(program, fragmentShader); 906 | gl.linkProgram(program); 907 | gl.useProgram(program); 908 | 909 | if (!gl.getProgramParameter(program, gl.LINK_STATUS)) 910 | console.error(gl.getProgramInfoLog(program)); 911 | 912 | gl.disable(gl.DEPTH_TEST); // Disable depth testing 913 | 914 | // Enable blending 915 | gl.enable(gl.BLEND); 916 | gl.blendFuncSeparate( 917 | gl.ONE_MINUS_DST_ALPHA, 918 | gl.ONE, 919 | gl.ONE_MINUS_DST_ALPHA, 920 | gl.ONE, 921 | ); 922 | gl.blendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD); 923 | 924 | const u_projection = gl.getUniformLocation(program, "projection"); 925 | const u_viewport = gl.getUniformLocation(program, "viewport"); 926 | const u_focal = gl.getUniformLocation(program, "focal"); 927 | const u_view = gl.getUniformLocation(program, "view"); 928 | 929 | // positions 930 | const triangleVertices = new Float32Array([-2, -2, 2, -2, 2, 2, -2, 2]); 931 | const vertexBuffer = gl.createBuffer(); 932 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); 933 | gl.bufferData(gl.ARRAY_BUFFER, triangleVertices, gl.STATIC_DRAW); 934 | const a_position = gl.getAttribLocation(program, "position"); 935 | gl.enableVertexAttribArray(a_position); 936 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); 937 | gl.vertexAttribPointer(a_position, 2, gl.FLOAT, false, 0, 0); 938 | 939 | var texture = gl.createTexture(); 940 | gl.bindTexture(gl.TEXTURE_2D, texture); 941 | 942 | var u_textureLocation = gl.getUniformLocation(program, "u_texture"); 943 | gl.uniform1i(u_textureLocation, 0); 944 | 945 | const indexBuffer = gl.createBuffer(); 946 | const a_index = gl.getAttribLocation(program, "index"); 947 | gl.enableVertexAttribArray(a_index); 948 | gl.bindBuffer(gl.ARRAY_BUFFER, indexBuffer); 949 | gl.vertexAttribIPointer(a_index, 1, gl.INT, false, 0, 0); 950 | gl.vertexAttribDivisor(a_index, 1); 951 | 952 | const resize = () => { 953 | gl.uniform2fv(u_focal, new Float32Array([camera.fx, camera.fy])); 954 | 955 | let w = config.W; 956 | let h = config.H; 957 | let ratio = Math.min(innerWidth / w, innerHeight / h); 958 | w = innerWidth / ratio; 959 | h = innerHeight / ratio; 960 | projectionMatrix = getProjectionMatrix( 961 | camera.fx, 962 | camera.fy, 963 | w, 964 | h, 965 | ); 966 | 967 | gl.uniform2fv(u_viewport, new Float32Array([w, h])); 968 | 969 | gl.canvas.width = Math.round(innerWidth / downsample); 970 | gl.canvas.height = Math.round(innerHeight / downsample); 971 | gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); 972 | 973 | gl.uniformMatrix4fv(u_projection, false, projectionMatrix); 974 | }; 975 | 976 | window.addEventListener("resize", resize); 977 | resize(); 978 | 979 | worker.onmessage = (e) => { 980 | if (e.data.buffer) { 981 | splatData = new Uint8Array(e.data.buffer); 982 | const blob = new Blob([splatData.buffer], { 983 | type: "application/octet-stream", 984 | }); 985 | const link = document.createElement("a"); 986 | link.download = "model.splat"; 987 | link.href = URL.createObjectURL(blob); 988 | document.body.appendChild(link); 989 | link.click(); 990 | } else if (e.data.texdata) { 991 | const { texdata, texwidth, texheight, subset} = e.data; 992 | // console.log(texdata) 993 | gl.bindTexture(gl.TEXTURE_2D, texture); 994 | gl.texParameteri( 995 | gl.TEXTURE_2D, 996 | gl.TEXTURE_WRAP_S, 997 | gl.CLAMP_TO_EDGE, 998 | ); 999 | gl.texParameteri( 1000 | gl.TEXTURE_2D, 1001 | gl.TEXTURE_WRAP_T, 1002 | gl.CLAMP_TO_EDGE, 1003 | ); 1004 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 1005 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 1006 | 1007 | gl.texImage2D( 1008 | gl.TEXTURE_2D, 1009 | 0, 1010 | gl.RGBA32UI, 1011 | texwidth, 1012 | texheight, 1013 | 0, 1014 | gl.RGBA_INTEGER, 1015 | gl.UNSIGNED_INT, 1016 | texdata, 1017 | ); 1018 | 1019 | gl.activeTexture(gl.TEXTURE0); 1020 | gl.bindTexture(gl.TEXTURE_2D, texture); 1021 | } else if (e.data.depthIndex) { 1022 | const { depthIndex, viewProj } = e.data; 1023 | gl.bindBuffer(gl.ARRAY_BUFFER, indexBuffer); 1024 | gl.bufferData(gl.ARRAY_BUFFER, depthIndex, gl.DYNAMIC_DRAW); 1025 | vertexCount = e.data.vertexCount; 1026 | } 1027 | }; 1028 | 1029 | let activeKeys = []; 1030 | let currentCameraIndex = 0; 1031 | 1032 | window.addEventListener("keydown", (e) => { 1033 | // if (document.activeElement != document.body) return; 1034 | carousel = false; 1035 | if (!activeKeys.includes(e.code)) activeKeys.push(e.code); 1036 | if (/\d/.test(e.key)) { 1037 | currentCameraIndex = parseInt(e.key) 1038 | camera = cameras[currentCameraIndex]; 1039 | viewMatrix = getViewMatrix(camera); 1040 | } 1041 | if (['-', '_'].includes(e.key)){ 1042 | currentCameraIndex = (currentCameraIndex + cameras.length - 1) % cameras.length; 1043 | viewMatrix = getViewMatrix(cameras[currentCameraIndex]); 1044 | } 1045 | if (['+', '='].includes(e.key)){ 1046 | currentCameraIndex = (currentCameraIndex + 1) % cameras.length; 1047 | viewMatrix = getViewMatrix(cameras[currentCameraIndex]); 1048 | } 1049 | camid.innerText = "cam " + currentCameraIndex; 1050 | if (e.code == "KeyV") { 1051 | location.hash = 1052 | "#" + 1053 | JSON.stringify( 1054 | viewMatrix.map((k) => Math.round(k * 100) / 100), 1055 | ); 1056 | camid.innerText ="" 1057 | } else if (e.code === "KeyP") { 1058 | carousel = true; 1059 | camid.innerText ="" 1060 | } 1061 | }); 1062 | window.addEventListener("keyup", (e) => { 1063 | activeKeys = activeKeys.filter((k) => k !== e.code); 1064 | }); 1065 | window.addEventListener("blur", () => { 1066 | activeKeys = []; 1067 | }); 1068 | 1069 | window.addEventListener( 1070 | "wheel", 1071 | (e) => { 1072 | carousel = false; 1073 | e.preventDefault(); 1074 | const lineHeight = 10; 1075 | const scale = 1076 | e.deltaMode == 1 1077 | ? lineHeight 1078 | : e.deltaMode == 2 1079 | ? innerHeight 1080 | : 1; 1081 | let inv = invert4(viewMatrix); 1082 | if (e.shiftKey) { 1083 | inv = translate4( 1084 | inv, 1085 | (e.deltaX * scale) / innerWidth, 1086 | (e.deltaY * scale) / innerHeight, 1087 | 0, 1088 | ); 1089 | } else if (e.ctrlKey || e.metaKey) { 1090 | // inv = rotate4(inv, (e.deltaX * scale) / innerWidth, 0, 0, 1); 1091 | // inv = translate4(inv, 0, (e.deltaY * scale) / innerHeight, 0); 1092 | // let preY = inv[13]; 1093 | inv = translate4( 1094 | inv, 1095 | 0, 1096 | 0, 1097 | (-10 * (e.deltaY * scale)) / innerHeight, 1098 | ); 1099 | // inv[13] = preY; 1100 | } else { 1101 | let d = 4; 1102 | inv = translate4(inv, 0, 0, d); 1103 | inv = rotate4(inv, -(e.deltaX * scale) / innerWidth, 0, 1, 0); 1104 | inv = rotate4(inv, (e.deltaY * scale) / innerHeight, 1, 0, 0); 1105 | inv = translate4(inv, 0, 0, -d); 1106 | } 1107 | 1108 | viewMatrix = invert4(inv); 1109 | }, 1110 | { passive: false }, 1111 | ); 1112 | 1113 | let startX, startY, down; 1114 | canvas.addEventListener("mousedown", (e) => { 1115 | carousel = false; 1116 | e.preventDefault(); 1117 | startX = e.clientX; 1118 | startY = e.clientY; 1119 | down = e.ctrlKey || e.metaKey ? 2 : 1; 1120 | }); 1121 | canvas.addEventListener("contextmenu", (e) => { 1122 | carousel = false; 1123 | e.preventDefault(); 1124 | startX = e.clientX; 1125 | startY = e.clientY; 1126 | down = 2; 1127 | }); 1128 | 1129 | canvas.addEventListener("mousemove", (e) => { 1130 | e.preventDefault(); 1131 | if (down == 1) { 1132 | let inv = invert4(viewMatrix); 1133 | let dx = (5 * (e.clientX - startX)) / innerWidth; 1134 | let dy = (5 * (e.clientY - startY)) / innerHeight; 1135 | let d = 4; 1136 | 1137 | inv = translate4(inv, 0, 0, d); 1138 | inv = rotate4(inv, dx, 0, 1, 0); 1139 | inv = rotate4(inv, -dy, 1, 0, 0); 1140 | inv = translate4(inv, 0, 0, -d); 1141 | // let postAngle = Math.atan2(inv[0], inv[10]) 1142 | // inv = rotate4(inv, postAngle - preAngle, 0, 0, 1) 1143 | // console.log(postAngle) 1144 | viewMatrix = invert4(inv); 1145 | 1146 | startX = e.clientX; 1147 | startY = e.clientY; 1148 | } else if (down == 2) { 1149 | let inv = invert4(viewMatrix); 1150 | // inv = rotateY(inv, ); 1151 | // let preY = inv[13]; 1152 | inv = translate4( 1153 | inv, 1154 | (-10 * (e.clientX - startX)) / innerWidth, 1155 | 0, 1156 | (10 * (e.clientY - startY)) / innerHeight, 1157 | ); 1158 | // inv[13] = preY; 1159 | viewMatrix = invert4(inv); 1160 | 1161 | startX = e.clientX; 1162 | startY = e.clientY; 1163 | } 1164 | }); 1165 | canvas.addEventListener("mouseup", (e) => { 1166 | e.preventDefault(); 1167 | down = false; 1168 | startX = 0; 1169 | startY = 0; 1170 | }); 1171 | 1172 | let altX = 0, 1173 | altY = 0; 1174 | canvas.addEventListener( 1175 | "touchstart", 1176 | (e) => { 1177 | e.preventDefault(); 1178 | if (e.touches.length === 1) { 1179 | carousel = false; 1180 | startX = e.touches[0].clientX; 1181 | startY = e.touches[0].clientY; 1182 | down = 1; 1183 | } else if (e.touches.length === 2) { 1184 | // console.log('beep') 1185 | carousel = false; 1186 | startX = e.touches[0].clientX; 1187 | altX = e.touches[1].clientX; 1188 | startY = e.touches[0].clientY; 1189 | altY = e.touches[1].clientY; 1190 | down = 1; 1191 | } 1192 | }, 1193 | { passive: false }, 1194 | ); 1195 | canvas.addEventListener( 1196 | "touchmove", 1197 | (e) => { 1198 | e.preventDefault(); 1199 | if (e.touches.length === 1 && down) { 1200 | let inv = invert4(viewMatrix); 1201 | let dx = (4 * (e.touches[0].clientX - startX)) / innerWidth; 1202 | let dy = (4 * (e.touches[0].clientY - startY)) / innerHeight; 1203 | 1204 | let d = 4; 1205 | inv = translate4(inv, 0, 0, d); 1206 | // inv = translate4(inv, -x, -y, -z); 1207 | // inv = translate4(inv, x, y, z); 1208 | inv = rotate4(inv, dx, 0, 1, 0); 1209 | inv = rotate4(inv, -dy, 1, 0, 0); 1210 | inv = translate4(inv, 0, 0, -d); 1211 | 1212 | viewMatrix = invert4(inv); 1213 | 1214 | startX = e.touches[0].clientX; 1215 | startY = e.touches[0].clientY; 1216 | } else if (e.touches.length === 2) { 1217 | // alert('beep') 1218 | const dtheta = 1219 | Math.atan2(startY - altY, startX - altX) - 1220 | Math.atan2( 1221 | e.touches[0].clientY - e.touches[1].clientY, 1222 | e.touches[0].clientX - e.touches[1].clientX, 1223 | ); 1224 | const dscale = 1225 | Math.hypot(startX - altX, startY - altY) / 1226 | Math.hypot( 1227 | e.touches[0].clientX - e.touches[1].clientX, 1228 | e.touches[0].clientY - e.touches[1].clientY, 1229 | ); 1230 | const dx = 1231 | (e.touches[0].clientX + 1232 | e.touches[1].clientX - 1233 | (startX + altX)) / 1234 | 2; 1235 | const dy = 1236 | (e.touches[0].clientY + 1237 | e.touches[1].clientY - 1238 | (startY + altY)) / 1239 | 2; 1240 | let inv = invert4(viewMatrix); 1241 | // inv = translate4(inv, 0, 0, d); 1242 | inv = rotate4(inv, dtheta, 0, 0, 1); 1243 | 1244 | inv = translate4(inv, -dx / innerWidth, -dy / innerHeight, 0); 1245 | 1246 | // let preY = inv[13]; 1247 | inv = translate4(inv, 0, 0, 3 * (1 - dscale)); 1248 | // inv[13] = preY; 1249 | 1250 | viewMatrix = invert4(inv); 1251 | 1252 | startX = e.touches[0].clientX; 1253 | altX = e.touches[1].clientX; 1254 | startY = e.touches[0].clientY; 1255 | altY = e.touches[1].clientY; 1256 | } 1257 | }, 1258 | { passive: false }, 1259 | ); 1260 | canvas.addEventListener( 1261 | "touchend", 1262 | (e) => { 1263 | e.preventDefault(); 1264 | down = false; 1265 | startX = 0; 1266 | startY = 0; 1267 | }, 1268 | { passive: false }, 1269 | ); 1270 | 1271 | let jumpDelta = 0; 1272 | let vertexCount = 0; 1273 | 1274 | let lastFrame = 0; 1275 | let avgFps = 0; 1276 | let start = 0; 1277 | 1278 | window.addEventListener("gamepadconnected", (e) => { 1279 | const gp = navigator.getGamepads()[e.gamepad.index]; 1280 | console.log( 1281 | `Gamepad connected at index ${gp.index}: ${gp.id}. It has ${gp.buttons.length} buttons and ${gp.axes.length} axes.`, 1282 | ); 1283 | }); 1284 | window.addEventListener("gamepaddisconnected", (e) => { 1285 | console.log("Gamepad disconnected"); 1286 | }); 1287 | 1288 | let leftGamepadTrigger, rightGamepadTrigger; 1289 | let curFrame = 0; 1290 | const frame = (now) => { 1291 | let inv = invert4(viewMatrix); 1292 | let shiftKey = activeKeys.includes("Shift") || activeKeys.includes("ShiftLeft") || activeKeys.includes("ShiftRight") 1293 | 1294 | if (activeKeys.includes("ArrowUp")) { 1295 | if (shiftKey) { 1296 | inv = translate4(inv, 0, -0.03, 0); 1297 | } else { 1298 | inv = translate4(inv, 0, 0, 0.1); 1299 | } 1300 | } 1301 | if (activeKeys.includes("ArrowDown")) { 1302 | if (shiftKey) { 1303 | inv = translate4(inv, 0, 0.03, 0); 1304 | } else { 1305 | inv = translate4(inv, 0, 0, -0.1); 1306 | } 1307 | } 1308 | if (activeKeys.includes("ArrowLeft")) 1309 | inv = translate4(inv, -0.03, 0, 0); 1310 | // 1311 | if (activeKeys.includes("ArrowRight")) 1312 | inv = translate4(inv, 0.03, 0, 0); 1313 | // inv = rotate4(inv, 0.01, 0, 1, 0); 1314 | if (activeKeys.includes("KeyA")) inv = rotate4(inv, -0.01, 0, 1, 0); 1315 | if (activeKeys.includes("KeyD")) inv = rotate4(inv, 0.01, 0, 1, 0); 1316 | if (activeKeys.includes("KeyQ")) inv = rotate4(inv, 0.01, 0, 0, 1); 1317 | if (activeKeys.includes("KeyE")) inv = rotate4(inv, -0.01, 0, 0, 1); 1318 | if (activeKeys.includes("KeyW")) inv = rotate4(inv, 0.005, 1, 0, 0); 1319 | if (activeKeys.includes("KeyS")) inv = rotate4(inv, -0.005, 1, 0, 0); 1320 | 1321 | const gamepads = navigator.getGamepads ? navigator.getGamepads() : []; 1322 | let isJumping = activeKeys.includes("Space"); 1323 | for (let gamepad of gamepads) { 1324 | if (!gamepad) continue; 1325 | 1326 | const axisThreshold = 0.1; // Threshold to detect when the axis is intentionally moved 1327 | const moveSpeed = 0.06; 1328 | const rotateSpeed = 0.02; 1329 | 1330 | // Assuming the left stick controls translation (axes 0 and 1) 1331 | if (Math.abs(gamepad.axes[0]) > axisThreshold) { 1332 | inv = translate4(inv, moveSpeed * gamepad.axes[0], 0, 0); 1333 | carousel = false; 1334 | } 1335 | if (Math.abs(gamepad.axes[1]) > axisThreshold) { 1336 | inv = translate4(inv, 0, 0, -moveSpeed * gamepad.axes[1]); 1337 | carousel = false; 1338 | } 1339 | if(gamepad.buttons[12].pressed || gamepad.buttons[13].pressed){ 1340 | inv = translate4(inv, 0, -moveSpeed*(gamepad.buttons[12].pressed - gamepad.buttons[13].pressed), 0); 1341 | carousel = false; 1342 | } 1343 | 1344 | if(gamepad.buttons[14].pressed || gamepad.buttons[15].pressed){ 1345 | inv = translate4(inv, -moveSpeed*(gamepad.buttons[14].pressed - gamepad.buttons[15].pressed), 0, 0); 1346 | carousel = false; 1347 | } 1348 | 1349 | // Assuming the right stick controls rotation (axes 2 and 3) 1350 | if (Math.abs(gamepad.axes[2]) > axisThreshold) { 1351 | inv = rotate4(inv, rotateSpeed * gamepad.axes[2], 0, 1, 0); 1352 | carousel = false; 1353 | } 1354 | if (Math.abs(gamepad.axes[3]) > axisThreshold) { 1355 | inv = rotate4(inv, -rotateSpeed * gamepad.axes[3], 1, 0, 0); 1356 | carousel = false; 1357 | } 1358 | 1359 | let tiltAxis = gamepad.buttons[6].value - gamepad.buttons[7].value; 1360 | if (Math.abs(tiltAxis) > axisThreshold) { 1361 | inv = rotate4(inv, rotateSpeed * tiltAxis, 0, 0, 1); 1362 | carousel = false; 1363 | } 1364 | if (gamepad.buttons[4].pressed && !leftGamepadTrigger) { 1365 | camera = cameras[(cameras.indexOf(camera)+1)%cameras.length] 1366 | inv = invert4(getViewMatrix(camera)); 1367 | carousel = false; 1368 | } 1369 | if (gamepad.buttons[5].pressed && !rightGamepadTrigger) { 1370 | camera = cameras[(cameras.indexOf(camera)+cameras.length-1)%cameras.length] 1371 | inv = invert4(getViewMatrix(camera)); 1372 | carousel = false; 1373 | } 1374 | leftGamepadTrigger = gamepad.buttons[4].pressed; 1375 | rightGamepadTrigger = gamepad.buttons[5].pressed; 1376 | if (gamepad.buttons[0].pressed) { 1377 | isJumping = true; 1378 | carousel = false; 1379 | } 1380 | if(gamepad.buttons[3].pressed){ 1381 | carousel = true; 1382 | } 1383 | } 1384 | 1385 | if ( 1386 | ["KeyJ", "KeyK", "KeyL", "KeyI"].some((k) => activeKeys.includes(k)) 1387 | ) { 1388 | let d = 4; 1389 | inv = translate4(inv, 0, 0, d); 1390 | inv = rotate4( 1391 | inv, 1392 | activeKeys.includes("KeyJ") 1393 | ? -0.05 1394 | : activeKeys.includes("KeyL") 1395 | ? 0.05 1396 | : 0, 1397 | 0, 1398 | 1, 1399 | 0, 1400 | ); 1401 | inv = rotate4( 1402 | inv, 1403 | activeKeys.includes("KeyI") 1404 | ? 0.05 1405 | : activeKeys.includes("KeyK") 1406 | ? -0.05 1407 | : 0, 1408 | 1, 1409 | 0, 1410 | 0, 1411 | ); 1412 | inv = translate4(inv, 0, 0, -d); 1413 | } 1414 | 1415 | viewMatrix = invert4(inv); 1416 | 1417 | if (carousel) { 1418 | let inv = invert4(defaultViewMatrix); 1419 | 1420 | // const t = Math.sin((Date.now() - start) / 1500); 1421 | // const t = (Date.now() - start) / 1500; 1422 | const t = 0; 1423 | // inv = translate4(inv, .1*Math.sin(t*Math.PI), 0, .1*Math.cos(t*Math.PI)); 1424 | // inv = rotate4(inv, -0.1 * t, 1, 0, 0); 1425 | 1426 | viewMatrix = invert4(inv); 1427 | } 1428 | 1429 | if (isJumping) { 1430 | jumpDelta = Math.min(1, jumpDelta + 0.05); 1431 | } else { 1432 | jumpDelta = Math.max(0, jumpDelta - 0.05); 1433 | } 1434 | 1435 | let inv2 = invert4(viewMatrix); 1436 | inv2 = translate4(inv2, 0, -jumpDelta, 0); 1437 | inv2 = rotate4(inv2, -0.1 * jumpDelta, 1, 0, 0); 1438 | let actualViewMatrix = invert4(inv2); 1439 | 1440 | const viewProj = multiply4(projectionMatrix, actualViewMatrix); 1441 | worker.postMessage({ view: viewProj }); 1442 | 1443 | const currentFps = 1000 / (now - lastFrame) || 0; 1444 | avgFps = avgFps * 0.9 + currentFps * 0.1; 1445 | 1446 | if (vertexCount > 0) { 1447 | document.getElementById("spinner").style.display = "none"; 1448 | gl.uniformMatrix4fv(u_view, false, actualViewMatrix); 1449 | gl.clear(gl.COLOR_BUFFER_BIT); 1450 | gl.drawArraysInstanced(gl.TRIANGLE_FAN, 0, 4, vertexCount); 1451 | } else { 1452 | gl.clear(gl.COLOR_BUFFER_BIT); 1453 | document.getElementById("spinner").style.display = ""; 1454 | start = Date.now() + 2000; 1455 | } 1456 | // const progress = (100 * vertexCount) / (splatData.length / rowLength); 1457 | // if (progress < 100) { 1458 | // document.getElementById("progress").style.width = progress + "%"; 1459 | // } else { 1460 | // document.getElementById("progress").style.display = "none"; 1461 | // } 1462 | fps.innerText = Math.round(avgFps); 1463 | const curFrameDOM = document.getElementById("frame"); 1464 | curFrameDOM.innerText = curFrame; 1465 | if (isNaN(currentCameraIndex)){ 1466 | camid.innerText = ""; 1467 | } 1468 | lastFrame = now; 1469 | requestAnimationFrame(frame); 1470 | }; 1471 | frame(); 1472 | 1473 | 1474 | // const max_fps = 100; 1475 | // function animate() { 1476 | // // perform some animation task here 1477 | // setTimeout(() => { 1478 | // frame(Date.now()); 1479 | // requestAnimationFrame(animate); 1480 | // }, 1000.0 / max_fps); 1481 | // } 1482 | // animate(); 1483 | 1484 | 1485 | 1486 | 1487 | 1488 | window.addEventListener("hashchange", (e) => { 1489 | try { 1490 | viewMatrix = JSON.parse(decodeURIComponent(location.hash.slice(1))); 1491 | carousel = false; 1492 | } catch (err) {} 1493 | }); 1494 | 1495 | const preventDefault = (e) => { 1496 | e.preventDefault(); 1497 | e.stopPropagation(); 1498 | }; 1499 | document.addEventListener("dragenter", preventDefault); 1500 | document.addEventListener("dragover", preventDefault); 1501 | document.addEventListener("dragleave", preventDefault); 1502 | 1503 | let bytesRead = 0; 1504 | let gaussians = []; 1505 | let rowBuffer = new Uint8Array(STREAM_ROW_LENGTH); 1506 | let rowBufferOffset = 0; 1507 | 1508 | console.log("start reading"); 1509 | /* --------------------------------- step 0 --------------------------------- */ 1510 | // read frame 0, with sizing MAX_CAP 1511 | while (bytesRead < TOTAL_CAP * STREAM_ROW_LENGTH) { 1512 | let { done, value } = await reader.read(); 1513 | // if there is any reminding fro previous read 1514 | let value_offset = 0; 1515 | if (rowBufferOffset > 0){ 1516 | if (value.length + rowBufferOffset < STREAM_ROW_LENGTH){ 1517 | rowBuffer.set(value, rowBufferOffset); 1518 | rowBufferOffset += value.length; 1519 | continue; 1520 | } 1521 | value_offset = STREAM_ROW_LENGTH - rowBufferOffset; 1522 | rowBuffer.set(value.slice(0, STREAM_ROW_LENGTH - rowBufferOffset), rowBufferOffset); 1523 | gaussians.push(PARSE_RAW_BYTES_COMPACT(rowBuffer)[0]); 1524 | // console.log("load single gaussian #", gaussians.length); 1525 | bytesRead += STREAM_ROW_LENGTH; 1526 | rowBuffer.fill(0); 1527 | rowBufferOffset = 0 1528 | } 1529 | // batch parse this read 1530 | value = value.slice(value_offset); 1531 | const num_of_gs = Math.floor(value.length / STREAM_ROW_LENGTH); 1532 | let parsed = PARSE_RAW_BYTES_COMPACT(value.slice(0, num_of_gs * STREAM_ROW_LENGTH)); 1533 | gaussians = gaussians.concat(parsed); 1534 | let value_rest = value.slice(num_of_gs * STREAM_ROW_LENGTH); 1535 | rowBuffer.set(value_rest); 1536 | rowBufferOffset = value_rest.length; 1537 | bytesRead += num_of_gs * STREAM_ROW_LENGTH; 1538 | } 1539 | console.log("read " + gaussians.length + " gaussians"); 1540 | console.log("finished reading"); 1541 | /* --------------------------------- step 1 --------------------------------- */ 1542 | // post frame 0 to worker 1543 | let ret = GS_TO_VERTEX_COMPACT(gaussians.slice(0, TOTAL_CAP), full_gs = true); 1544 | worker.postMessage({ 1545 | buffer: ret.all.buffer, 1546 | vertexCount: TOTAL_CAP, 1547 | }); 1548 | console.log(ret.spans) 1549 | 1550 | /* --------------------------------- step 2 --------------------------------- */ 1551 | 1552 | 1553 | 1554 | // setup frame ticker 1555 | curFrame ++; 1556 | const FPS = config.FPS; 1557 | 1558 | 1559 | const pauseBtn = document.getElementById("pause"); 1560 | var paused = 0; 1561 | pauseBtn.addEventListener("click", () => { 1562 | paused = !paused; 1563 | if (paused){ 1564 | pauseBtn.innerText = "⏩ Resume"; 1565 | } else { 1566 | pauseBtn.innerText = "⏸️ Pause"; 1567 | } 1568 | }) 1569 | pauseBtn.style.cursor = "pointer"; 1570 | 1571 | 1572 | let frameEvents = []; 1573 | 1574 | const frame_ticker = setInterval(() => { 1575 | if ((loadedFrame < Math.min(curFrame, MAX_FRAME-2))) { 1576 | console.log("loaded:", loadedFrame, " / cur:", curFrame, " / max:", MAX_FRAME); 1577 | return; 1578 | } 1579 | if (paused) return; 1580 | let updated = false; 1581 | let reset_slices = []; 1582 | for (let i = 0; i < frameEvents.length; i++){ 1583 | if (frameEvents[i].frame == curFrame && frameEvents[i].type == "reset"){ 1584 | reset_slices.push(frameEvents[i].sliceId); 1585 | worker.postMessage({ 1586 | resetSlice: { 1587 | sliceId: frameEvents[i].sliceId, 1588 | data: frameEvents[i].data, 1589 | frame: frameEvents[i].frame, 1590 | } 1591 | }) 1592 | updated = true; 1593 | } else if (frameEvents[i].frame == curFrame && frameEvents[i].type == "append"){ 1594 | worker.postMessage({ 1595 | appendSlice: { 1596 | sliceId: frameEvents[i].sliceId, 1597 | data: frameEvents[i].data, 1598 | frame: frameEvents[i].frame, 1599 | } 1600 | }); 1601 | updated = true; 1602 | } 1603 | } 1604 | 1605 | if (updated) { 1606 | console.log("frame #", curFrame, " : starts"); 1607 | worker.postMessage( 1608 | {reSort: { 1609 | reset_slices: reset_slices, 1610 | }}); 1611 | curFrame++; 1612 | update_FPS(); 1613 | console.log(`avg time cost of each step: 1614 | parse: ${average(global_log_1["parse"])} ms 1615 | vertex: ${average(global_log_1["vertex"])} ms 1616 | `); 1617 | } 1618 | 1619 | if (curFrame >= MAX_FRAME){ 1620 | // clearInterval(frame_ticker); 1621 | // return; 1622 | console.log("restart ticker"); 1623 | let ret = GS_TO_VERTEX_COMPACT(init_gs.slice(0, TOTAL_CAP), full_gs = true) 1624 | worker.postMessage({ 1625 | buffer: ret.all.buffer, 1626 | vertexCount: TOTAL_CAP, 1627 | }); 1628 | curFrame = 1; 1629 | } 1630 | update_curframe(curFrame); 1631 | }, Math.ceil(1000 / FPS)); 1632 | 1633 | /* --------------------------------- step 3 --------------------------------- */ 1634 | // append per frame events based on received data 1635 | 1636 | bytesRead = 0; 1637 | let init_gs = gaussians.splice(0, TOTAL_CAP); 1638 | loadedFrame = 0; 1639 | // should not touch rowBuffer and rowBufferOffset 1640 | const reset_cam_btn = document.getElementById("reset_cam"); 1641 | reset_cam_btn.addEventListener("click", () => { 1642 | viewMatrix = getViewMatrix(cameras[0]); 1643 | }); 1644 | while (bytesRead < SLICE_CAP * STREAM_ROW_LENGTH && loadedFrame < MAX_FRAME) { 1645 | let { done, value } = await reader.read(); 1646 | // if there is any reminding fro previous read 1647 | let value_offset = 0; 1648 | if (rowBufferOffset > 0){ 1649 | if (value.length + rowBufferOffset < STREAM_ROW_LENGTH){ 1650 | rowBuffer.set(value, rowBufferOffset); 1651 | rowBufferOffset += value.length; 1652 | continue; 1653 | } 1654 | value_offset = STREAM_ROW_LENGTH - rowBufferOffset; 1655 | rowBuffer.set(value.slice(0, STREAM_ROW_LENGTH - rowBufferOffset), rowBufferOffset); 1656 | gaussians.push(PARSE_RAW_BYTES_COMPACT(rowBuffer)[0]); 1657 | // console.log("load single gaussian #", gaussians.length); 1658 | bytesRead += STREAM_ROW_LENGTH; 1659 | rowBuffer.fill(0); 1660 | rowBufferOffset = 0 1661 | } 1662 | if (done && isNaN(value)) { 1663 | break; 1664 | } 1665 | // batch parse this read 1666 | value = value.slice(value_offset); 1667 | const num_of_gs = Math.floor(value.length / STREAM_ROW_LENGTH); 1668 | let parsed = PARSE_RAW_BYTES_COMPACT(value.slice(0, num_of_gs * STREAM_ROW_LENGTH)); 1669 | gaussians = gaussians.concat(parsed); 1670 | let value_rest = value.slice(num_of_gs * STREAM_ROW_LENGTH); 1671 | rowBuffer.set(value_rest); 1672 | rowBufferOffset = value_rest.length; 1673 | bytesRead += num_of_gs * STREAM_ROW_LENGTH; 1674 | 1675 | const PREFETCH_FRAME = FPS * config.PREFETCH_SEC; 1676 | while (bytesRead >= SLICE_CAP * STREAM_ROW_LENGTH){ 1677 | loadedFrame++; 1678 | update_buffered(loadedFrame); 1679 | let ret = GS_TO_VERTEX_COMPACT(gaussians.splice(0, SLICE_CAP)); 1680 | console.log("frame #", loadedFrame, " : spans", ret.spans); 1681 | // frameEvents.push({ 1682 | // frame: loadedFrame, 1683 | // data: ret.all, 1684 | // type: "reset" 1685 | // }); 1686 | let curFrameOverwrite = false; 1687 | for (let span of ret.spans){ 1688 | frameEvents.push({ 1689 | frame: span.frame, 1690 | sliceId: (loadedFrame-1) % SLICE_NUM, 1691 | data: ret.all.slice(span.from * VERTEX_ROW_LENGTH, span.to * VERTEX_ROW_LENGTH), 1692 | type: (span.frame == loadedFrame) ? "reset": "append", 1693 | }); 1694 | if (span.frame == loadedFrame) curFrameOverwrite = true; 1695 | } 1696 | if (!curFrameOverwrite){ 1697 | console.warn("frame #", loadedFrame, " has no starting gaussians") 1698 | frameEvents.push({ 1699 | frame: loadedFrame, 1700 | sliceId: (loadedFrame-1) % SLICE_NUM, 1701 | data: ret.all.slice(0, 0), 1702 | type: "reset", 1703 | }); 1704 | } 1705 | 1706 | bytesRead -= SLICE_CAP * STREAM_ROW_LENGTH; 1707 | } 1708 | if (done) { 1709 | break; 1710 | } 1711 | } 1712 | 1713 | 1714 | } 1715 | 1716 | function average(arr){ 1717 | return arr.reduce((a,b) => a+b, 0) / arr.length; 1718 | } 1719 | 1720 | async function entry_point() { 1721 | const params = new URLSearchParams(location.search); 1722 | 1723 | const target = params.get("target") || "default"; 1724 | setup_dropdown(target); 1725 | let target_config = new URL( 1726 | `config_${target}.json`, 1727 | atob('aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9OZXV0cmlub0xpdS90ZXN0R1MvcmF3L21haW4v'), 1728 | ); 1729 | // target_config = "config_act.json"; 1730 | 1731 | const resp = await fetch(target_config); 1732 | const config = await resp.json(); 1733 | console.log("config loaded: ", config); 1734 | setup_consts(config); 1735 | main(config); 1736 | } 1737 | 1738 | function setup_dropdown(target) { 1739 | const dropdown = document.getElementById("scene_name"); 1740 | for (let i = 0; i < dropdown.options.length; i++) { 1741 | if (dropdown.options[i].value == target) { 1742 | dropdown.selectedIndex = i; 1743 | break; 1744 | } 1745 | } 1746 | dropdown.addEventListener("change", (e) => { 1747 | location.search = `?target=${e.target.value}`; 1748 | }); 1749 | } 1750 | 1751 | function update_curframe(cur_frame) { 1752 | let curFrameElem = document.getElementById("progress-current"); 1753 | 1754 | // Calculate percentage for the current frame 1755 | let curFramePercent = (cur_frame / (MAX_FRAME-1)) * 100; 1756 | curFrameElem.style.width = curFramePercent + "%"; 1757 | } 1758 | 1759 | // Function to update the buffered (loaded) frame position 1760 | function update_buffered(loaded_frame) { 1761 | let bufferedElem = document.getElementById("progress-buffered"); 1762 | 1763 | // Calculate percentage for the buffered frame 1764 | let bufferedPercent = ((loaded_frame+1) / (MAX_FRAME-1)) * 100; 1765 | bufferedElem.style.width = bufferedPercent + "%"; 1766 | } 1767 | 1768 | var lastFrame = NaN; 1769 | var videoAvgFps = 30; 1770 | function update_FPS() { 1771 | if (isNaN(lastFrame)){ 1772 | lastFrame = Date.now(); 1773 | return; 1774 | } 1775 | let now = Date.now(); 1776 | let currentFps = 1000 / (now - lastFrame) || 0; 1777 | videoAvgFps = videoAvgFps * 0.9 + currentFps * 0.1; 1778 | 1779 | const curFPS= document.getElementById("FPS"); 1780 | curFPS.innerText = Math.ceil(videoAvgFps); 1781 | lastFrame = now; 1782 | } 1783 | 1784 | entry_point().catch((err) => { 1785 | document.getElementById("spinner").style.display = "none"; 1786 | document.getElementById("message").innerText = err.toString(); 1787 | }); 1788 | -------------------------------------------------------------------------------- /styles.css: -------------------------------------------------------------------------------- 1 | body { 2 | overflow: hidden; 3 | margin: 0; 4 | height: 100vh; 5 | width: 100vw; 6 | font-family: sans-serif; 7 | background: black; 8 | text-shadow: 0 0 3px black; 9 | } 10 | a, body { 11 | color: white; 12 | } 13 | #info { 14 | z-index: 100; 15 | position: absolute; 16 | top: 10px; 17 | left: 15px; 18 | } 19 | h3 { 20 | margin: 5px 0; 21 | } 22 | p { 23 | margin: 5px 0; 24 | font-size: small; 25 | } 26 | 27 | .cube-wrapper { 28 | transform-style: preserve-3d; 29 | } 30 | 31 | .cube { 32 | transform-style: preserve-3d; 33 | transform: rotateX(45deg) rotateZ(45deg); 34 | animation: rotation 2s infinite; 35 | } 36 | 37 | .cube-faces { 38 | transform-style: preserve-3d; 39 | height: 80px; 40 | width: 80px; 41 | position: relative; 42 | transform-origin: 0 0; 43 | transform: translateX(0) translateY(0) translateZ(-40px); 44 | } 45 | 46 | .cube-face { 47 | position: absolute; 48 | inset: 0; 49 | background: #0017ff; 50 | border: solid 1px #ffffff; 51 | } 52 | .cube-face.top { 53 | transform: translateZ(80px); 54 | } 55 | .cube-face.front { 56 | transform-origin: 0 50%; 57 | transform: rotateY(-90deg); 58 | } 59 | .cube-face.back { 60 | transform-origin: 0 50%; 61 | transform: rotateY(-90deg) translateZ(-80px); 62 | } 63 | .cube-face.right { 64 | transform-origin: 50% 0; 65 | transform: rotateX(-90deg) translateY(-80px); 66 | } 67 | .cube-face.left { 68 | transform-origin: 50% 0; 69 | transform: rotateX(-90deg) translateY(-80px) translateZ(80px); 70 | } 71 | 72 | @keyframes rotation { 73 | 0% { 74 | transform: rotateX(45deg) rotateY(0) rotateZ(45deg); 75 | animation-timing-function: cubic-bezier( 76 | 0.17, 77 | 0.84, 78 | 0.44, 79 | 1 80 | ); 81 | } 82 | 50% { 83 | transform: rotateX(45deg) rotateY(0) rotateZ(225deg); 84 | animation-timing-function: cubic-bezier( 85 | 0.76, 86 | 0.05, 87 | 0.86, 88 | 0.06 89 | ); 90 | } 91 | 100% { 92 | transform: rotateX(45deg) rotateY(0) rotateZ(405deg); 93 | animation-timing-function: cubic-bezier( 94 | 0.17, 95 | 0.84, 96 | 0.44, 97 | 1 98 | ); 99 | } 100 | } 101 | 102 | .scene, 103 | #message { 104 | position: absolute; 105 | display: flex; 106 | top: 0; 107 | right: 0; 108 | left: 0; 109 | bottom: 0; 110 | z-index: 2; 111 | height: 100%; 112 | width: 100%; 113 | align-items: center; 114 | justify-content: center; 115 | } 116 | #message { 117 | font-weight: bold; 118 | font-size: large; 119 | color: red; 120 | pointer-events: none; 121 | } 122 | 123 | details { 124 | font-size: small; 125 | 126 | } 127 | 128 | #progress { 129 | position: absolute; 130 | top: 0; 131 | height: 5px; 132 | background: blue; 133 | z-index: 99; 134 | transition: width 0.1s ease-in-out; 135 | } 136 | 137 | #quality { 138 | position: absolute; 139 | bottom: 10px; 140 | z-index: 999; 141 | right: 50px; 142 | } 143 | 144 | #caminfo { 145 | position: absolute; 146 | top: 10px; 147 | z-index: 999; 148 | right: 10px; 149 | } 150 | #canvas { 151 | display: block; 152 | position: absolute; 153 | top: 0; 154 | left: 0; 155 | width: 100%; 156 | height: 100%; 157 | touch-action: none; 158 | } 159 | 160 | #instructions { 161 | background: rgba(0,0,0,0.6); 162 | white-space: pre-wrap; 163 | padding: 10px; 164 | border-radius: 10px; 165 | font-size: x-small; 166 | } 167 | body.nohf .nohf { 168 | display: none; 169 | } 170 | body.nohf #progress, body.nohf .cube-face { 171 | background: #ff9d0d; 172 | } 173 | 174 | /* Styling for the progress bar container */ 175 | .progress-container { 176 | width: 40%; 177 | margin: 0 auto; 178 | background-color: #f3f3f3; 179 | border-radius: 10px; 180 | position: relative; 181 | padding: 2px 2px 2px 2px; 182 | top: 90%; 183 | height: 10px; 184 | box-shadow: #adacac 0 0 10px; 185 | } 186 | .progress-container-inner { 187 | /* margin: 0 2px 0 2px; */ 188 | position: relative; 189 | } 190 | /* Buffered progress (loaded frames) */ 191 | .progress-buffered { 192 | position: absolute; 193 | height: 10px; 194 | background-color: #a5a5a5; 195 | border-radius: 10px; 196 | } 197 | 198 | /* Current frame progress */ 199 | .progress-current { 200 | position: absolute; 201 | height: 10px; 202 | background-color: #3a3a3a; 203 | border-radius: 10px; 204 | } 205 | 206 | .control-btn { 207 | cursor: pointer; 208 | text-decoration: underline; 209 | font-weight: 800; 210 | } --------------------------------------------------------------------------------