` and adding an `onMouseMove` event handler to it:
129 |
130 | ```jsx
131 | const onMouseMove = useCallback(({ clientX, clientY, currentTarget }) => {
132 | const { left, top, width, height } = currentTarget.getBoundingClientRect()
133 | setLightPosition({
134 | x: ((clientX - left) / width) * 2 - 1,
135 | y: ((clientY - top) / height) * -2 + 1
136 | })
137 | }, [])
138 |
139 | // ...
140 |
141 |
142 |
143 |
144 | ```
145 |
146 | That's it! Now we should have a MagicCanvas component that passes updates to its `renderProps` prop along to the Renderer on each frame.
147 |
148 | When rendering locally (with `remote={false}`), these state updates are sent as messages to the WebWorker where the Renderer is running. When rendering remotely (with `remote={true}`), the state updates are sent over WebSockets to the Renderer running in the backend.
149 |
--------------------------------------------------------------------------------
/client/dis-lib/MagicCanvas.tsx:
--------------------------------------------------------------------------------
1 | import React, { useCallback, useEffect, useRef } from "react"
2 | import { LocalBackend } from "./LocalBackend"
3 | import { RemoteBackend } from "./RemoteBackend"
4 |
5 | // Types
6 |
7 | export type RenderProps = Record
8 | export type RenderState = Record
9 |
10 | /** Backends generically represent the methods we can use to render:
11 | * local or remote.
12 | */
13 | export interface RenderBackend {
14 | /** Set the container to render into. */
15 | setContainer: (container: HTMLElement) => void
16 |
17 | /** Set the props passed into the renderer on every frame. */
18 | setRenderProps: (props: RenderProps) => void
19 |
20 | setRenderState: (state: RenderState) => void
21 |
22 | /** Get the current render state. */
23 | getRenderState: () => Promise
24 |
25 | /** Clean up. */
26 | destroy: () => void
27 | }
28 |
29 | class MCanvas {
30 | remote: boolean = false
31 | container: HTMLElement | null = null
32 | localContainer: HTMLElement | null = null
33 | remoteContainer: HTMLElement | null = null
34 |
35 | localBackend: RenderBackend
36 | remoteBackend: RenderBackend
37 |
38 | constructor(renderUrl: string, renderProps: RenderProps = {}, initialRenderState: RenderState = {}) {
39 | this.localBackend = new LocalBackend(renderUrl, renderProps, initialRenderState)
40 | this.remoteBackend = new RemoteBackend(renderUrl, renderProps, initialRenderState)
41 | }
42 |
43 | setContainer(container: HTMLElement) {
44 | const innerContainer = document.createElement('div')
45 | innerContainer.classList.add('magiccanvas-root')
46 | innerContainer.style.position = 'relative'
47 | innerContainer.style.height = `${container.offsetHeight}px`
48 | innerContainer.style.width = `${container.offsetWidth}px`
49 | container.appendChild(innerContainer)
50 |
51 | const localContainer = document.createElement('div')
52 | localContainer.classList.add('magiccanvas-local')
53 | localContainer.style.position = 'absolute'
54 | localContainer.style.top = '0'
55 | localContainer.style.left = '0'
56 | localContainer.style.bottom = '0'
57 | localContainer.style.right = '0'
58 | localContainer.style.transition = 'all 150ms linear'
59 | localContainer.style.opacity = this.remote ? '0' : '1'
60 | container.appendChild(localContainer)
61 |
62 | const remoteContainer = document.createElement('div')
63 | remoteContainer.classList.add('magiccanvas-remote')
64 | remoteContainer.style.position = 'absolute'
65 | remoteContainer.style.top = '0'
66 | remoteContainer.style.left = '0'
67 | remoteContainer.style.bottom = '0'
68 | remoteContainer.style.right = '0'
69 | remoteContainer.style.transition = 'all 150ms linear'
70 | remoteContainer.style.opacity = this.remote ? '1' : '0'
71 | container.appendChild(remoteContainer)
72 |
73 | this.container = innerContainer
74 | this.localContainer = localContainer
75 | this.remoteContainer = remoteContainer
76 |
77 | this.localBackend.setContainer(localContainer)
78 | this.remoteBackend.setContainer(remoteContainer)
79 | }
80 |
81 | destroy() {
82 | this.localBackend.destroy()
83 | this.remoteBackend.destroy()
84 | }
85 |
86 | async setRemote(remote: boolean) {
87 | if (this.remote === remote) {
88 | return
89 | }
90 |
91 | this.remote = remote
92 |
93 | if (this.remote) {
94 | console.log('getting local state')
95 | let renderState = await this.localBackend.getRenderState()
96 | console.log('got local state', renderState)
97 | this.remoteBackend.setRenderState(renderState)
98 | } else {
99 | console.log('getting remote state')
100 | let renderState = await this.remoteBackend.getRenderState()
101 | console.log('got remote state', renderState)
102 | this.localBackend.setRenderState(renderState)
103 | }
104 |
105 | if (this.localContainer !== null && this.remoteContainer !== null) {
106 | this.localContainer.style.opacity = this.remote ? '0' : '1'
107 | this.remoteContainer.style.opacity = this.remote ? '1' : '0'
108 | }
109 | }
110 |
111 | setRenderProps(props: RenderProps) {
112 | this.localBackend.setRenderProps(props)
113 | this.remoteBackend.setRenderProps(props)
114 | }
115 | }
116 |
117 | type MagicCanvasProps = {
118 | /** Height of the canvas. */
119 | height: number
120 |
121 | /** Width of the canvas. */
122 | width: number
123 |
124 | /** Props passed into the renderer. */
125 | renderProps?: RenderProps,
126 |
127 | /** Initial state passed to the renderer.
128 | * Changes to this after initialization are ignored,
129 | * because renderer is stateful. */
130 | initialRenderState?: RenderState,
131 |
132 | /** The URL of the JavaScript file to be loaded as the renderer.
133 | * Note: changes to this after initial construction currently have no effect.
134 | */
135 | rendererUrl: string
136 |
137 | /** Whether to run the renderer remotely. */
138 | remote?: boolean
139 | }
140 |
141 | /** Hook to create a MCanvas instance. */
142 | function useMagicCanvas(rendererUrl: string, renderProps: RenderProps = {}, renderState: RenderState = {}): MCanvas {
143 | const mcRef = useRef(null)
144 |
145 | if (typeof window === 'undefined') {
146 | // If we are in SSR, return a dummy object.
147 | return {} as MCanvas
148 | }
149 |
150 | if (mcRef.current === null) {
151 | mcRef.current = new MCanvas(rendererUrl, renderProps, renderState)
152 | }
153 |
154 | return mcRef.current
155 | }
156 |
157 | export function MagicCanvas(props: MagicCanvasProps): React.ReactElement {
158 | const mcRef = useMagicCanvas(props.rendererUrl, props.renderProps, props.initialRenderState)
159 |
160 | useEffect(() => {
161 | mcRef.setRenderProps(props.renderProps || {})
162 | }, [props.renderProps])
163 |
164 | useEffect(() => {
165 | mcRef.setRemote(props.remote || false)
166 | }, [props.remote])
167 |
168 | const setContainer = useCallback((container: HTMLElement | null) => {
169 | if (container === null) {
170 | mcRef.destroy()
171 | } else {
172 | mcRef.setContainer(container)
173 | }
174 | }, [])
175 |
176 | return
184 | }
185 |
--------------------------------------------------------------------------------
/client/dis-lib/RemoteBackend.ts:
--------------------------------------------------------------------------------
1 | import { RenderBackend, RenderProps, RenderState } from "./MagicCanvas";
2 | import { PromiseBox } from "./util";
3 |
4 | /** Copy of IncomingMessage from the handler; todo: make this an import from a common package. */
5 | interface MessageToRemote {
6 | /** Update the local render props. */
7 | renderProps?: RenderProps
8 |
9 | /** Register the active websocket connection as a belonging to the client or streamer. */
10 | register?: 'client'
11 |
12 | init?: {
13 | /** Renderer JavaScript module as a string. */
14 | renderer: string
15 | }
16 |
17 | /** Initial render state. */
18 | renderState?: RenderState
19 |
20 | toStreamer?: {
21 | /** Send the RTC SDP to the other party. */
22 | rtc?: RTCSessionDescriptionInit
23 |
24 | /** Send the ICE candidate to the other party. */
25 | ice?: { candidate: string }
26 | }
27 |
28 | requestState?: boolean
29 | }
30 |
31 | interface MessageFromRemote {
32 | renderState?: RenderState
33 |
34 | rtc?: RTCSessionDescriptionInit
35 | }
36 |
37 | /** Represents a WebSocket connection to a remote handler. */
38 | class HandlerConnection {
39 | ws: WebSocket
40 | private _remoteSdp: PromiseBox
41 | private _ready: PromiseBox
42 | private statePromise: PromiseBox | null = null
43 |
44 | constructor(url: string) {
45 | this._remoteSdp = new PromiseBox()
46 | this._ready = new PromiseBox()
47 | this.ws = new WebSocket(url)
48 |
49 | this.ws.onopen = () => {
50 | console.log("Connected to handler.")
51 | this._ready.set()
52 | }
53 |
54 | this.ws.onmessage = (e) => {
55 | const msg = JSON.parse(e.data)
56 | this.dispatch(msg)
57 | }
58 |
59 | this.ws.onerror = (e) => {
60 | console.log('error', e)
61 | }
62 | }
63 |
64 | async init(rendererJavascript: string, initialRenderProps: RenderProps, initialRenderState: RenderState) {
65 | await this._ready.get()
66 | this.send({
67 | register: "client",
68 | init: {
69 | renderer: rendererJavascript,
70 | },
71 | renderState: initialRenderState,
72 | renderProps: initialRenderProps
73 | })
74 | }
75 |
76 | updateProps(props: RenderProps) {
77 | this.send({ renderProps: props })
78 | }
79 |
80 | updateState(state: RenderState) {
81 | this.send({ renderState: state })
82 | }
83 |
84 | remoteSdp(): Promise {
85 | return this._remoteSdp.get()
86 | }
87 |
88 | sendSdp(sdp: RTCSessionDescriptionInit) {
89 | this.send({ toStreamer: { rtc: sdp } })
90 | }
91 |
92 | destroy() {
93 | this.ws.close()
94 | }
95 |
96 | private dispatch(msg: MessageFromRemote) {
97 | console.log('Got message', msg)
98 | if (msg.rtc !== undefined) {
99 | this._remoteSdp.set(msg.rtc)
100 | }
101 | if (msg.renderState !== undefined) {
102 | if (this.statePromise !== null) {
103 | this.statePromise.set(msg.renderState)
104 | }
105 | }
106 | }
107 |
108 | private send(msg: MessageToRemote) {
109 | if (this.ws.readyState === WebSocket.OPEN) {
110 | this.ws.send(JSON.stringify(msg))
111 | }
112 | }
113 |
114 | getRenderState(): Promise {
115 | this.statePromise = new PromiseBox()
116 | this.send({ requestState: true })
117 | return this.statePromise.get()
118 | }
119 |
120 | ready(): Promise {
121 | return this._ready.get()
122 | }
123 | }
124 |
125 | class RTCConnection {
126 | conn: RTCPeerConnection
127 | iceGatheringComplete: PromiseBox = new PromiseBox()
128 |
129 | constructor(
130 | private handlerConnection: HandlerConnection,
131 | private rendererUrl: string,
132 | private initialRenderProps: RenderProps,
133 | private initialRenderState: RenderState,
134 | private videoEl: HTMLVideoElement
135 | ) {
136 |
137 | // for the client, this URL will end up being a jamsocket backend
138 | this.conn = new RTCPeerConnection({
139 | bundlePolicy: "max-bundle",
140 | iceTransportPolicy: "relay",
141 | iceServers: [
142 | {
143 | urls: "stun:relay.metered.ca:80",
144 | },
145 | {
146 | urls: "turn:relay.metered.ca:80",
147 | username: "c7f21a3a3a5f693e365dbc55",
148 | credential: "FyMQ1pmIIaUiFeCQ",
149 | },
150 | {
151 | urls: "turn:relay.metered.ca:443",
152 | username: "c7f21a3a3a5f693e365dbc55",
153 | credential: "FyMQ1pmIIaUiFeCQ",
154 | },
155 | {
156 | urls: "turn:relay.metered.ca:443?transport=tcp",
157 | username: "c7f21a3a3a5f693e365dbc55",
158 | credential: "FyMQ1pmIIaUiFeCQ",
159 | },
160 | ],
161 | });
162 |
163 | this.conn.onicecandidate = (e) => {
164 | console.log('Candidate', e)
165 | console.log('status', this.conn?.iceGatheringState)
166 | }
167 |
168 | this.conn.ontrack = (t) => {
169 | console.log('Got track')
170 | this.videoEl.srcObject = t.streams[0]
171 | console.log('paused', this.videoEl.paused)
172 | }
173 |
174 | this.conn.onicegatheringstatechange = () => {
175 | console.log("Ice gathering state", this.conn.iceGatheringState)
176 | if (this.conn.iceGatheringState === "complete") {
177 | this.iceGatheringComplete.set()
178 | }
179 | }
180 | }
181 |
182 | async connect() {
183 | console.log("Fetching renderer")
184 | let rendererJavascript = await fetch(this.rendererUrl).then(r => r.text())
185 |
186 | console.log("Calling init on handler")
187 | this.handlerConnection.init(rendererJavascript, this.initialRenderProps, this.initialRenderState)
188 |
189 | console.log('Waiting for server SDP')
190 | const sdp = await this.handlerConnection.remoteSdp()
191 |
192 | console.log('Gathering ICE candidates.')
193 | await this.conn.setRemoteDescription(sdp)
194 |
195 | console.log('Creating an answer.')
196 | const answer = await this.conn.createAnswer()
197 |
198 | console.log('Setting local description.')
199 | await this.conn.setLocalDescription(answer)
200 | await this.iceGatheringComplete.get()
201 |
202 | if (this.conn.localDescription === null) {
203 | throw new Error('conn.localDescription should not be null')
204 | }
205 |
206 | console.log('Sending client description.', this.conn.localDescription)
207 | this.handlerConnection.sendSdp(this.conn.localDescription)
208 | }
209 | }
210 |
211 | export class RemoteBackend implements RenderBackend {
212 | container: PromiseBox = new PromiseBox()
213 | connection: HandlerConnection
214 | rtcConnection: RTCConnection | null = null
215 | videoElement: HTMLVideoElement | null = null
216 |
217 | getWsUrl(): string {
218 | const url = new URL(window.location.href)
219 | if (url.protocol === 'https:') {
220 | url.protocol = 'wss:'
221 | } else {
222 | url.protocol = 'ws:'
223 | }
224 |
225 | url.pathname = '/ws'
226 | url.port = '8080'
227 |
228 | return url.href
229 | }
230 |
231 | constructor(private renderUrl: string, private renderProps: RenderState = {}, private initialRenderState: RenderState = {}) {
232 | this.connection = new HandlerConnection(this.getWsUrl())
233 | this.initConnection()
234 | }
235 |
236 | private async initConnection() {
237 | await this.connection.init(this.renderUrl, this.renderProps, this.initialRenderState)
238 | this.rtcConnection = new RTCConnection(this.connection, this.renderUrl, this.renderProps, this.initialRenderState, await this.getVideoElement())
239 | await this.rtcConnection.connect()
240 | }
241 |
242 | /** If we already have a video element, return it; otherwise, create one (waiting for the container if necessary) */
243 | private async getVideoElement(): Promise {
244 | if (this.videoElement !== null) {
245 | return this.videoElement
246 | }
247 |
248 | const container = await this.container.get()
249 | const video = document.createElement("video")
250 |
251 | video.height = container.clientHeight
252 | video.width = container.clientWidth
253 |
254 | video.autoplay = true
255 | video.muted = true
256 | video.playsInline = true
257 | // video.style.width = "100%"
258 | // video.style.height = "100%"
259 |
260 | video.style.transition = 'all 150ms linear'
261 | video.style.border = '1px solid #ddd'
262 | video.style.width = '100%'
263 | video.style.height = '100%'
264 | video.style.opacity = '1'
265 | video.style.pointerEvents = 'initial'
266 |
267 | container.appendChild(video)
268 | this.videoElement = video
269 | return video
270 | }
271 |
272 | /** Set the container. No-op if the container is already set. */
273 | setContainer(container: HTMLElement) {
274 | this.container.set(container)
275 | }
276 |
277 | setRenderProps(props: RenderProps) {
278 | this.connection.updateProps(props)
279 | }
280 |
281 | setRenderState(state: RenderState) {
282 | this.connection.updateState(state)
283 | }
284 |
285 | getRenderState(): Promise {
286 | return this.connection.getRenderState()
287 | }
288 |
289 | destroy() {
290 | this.connection.destroy()
291 | }
292 | }
293 |
--------------------------------------------------------------------------------
/docs/static/img/logo.svg:
--------------------------------------------------------------------------------
1 |
28 |
--------------------------------------------------------------------------------
/client/renderers/lidar.render.ts:
--------------------------------------------------------------------------------
1 | import { mat4 } from 'gl-matrix'
2 |
3 | export default async function createRenderer(gl: WebGLRenderingContext) {
4 | const ext = gl.getExtension('OES_texture_float')
5 | if (!ext) {
6 | throw new Error('Unable to get OES_texture_float extension')
7 | }
8 |
9 | // const config = { dataset: '987210.bin', fadeHeightOffsetRange: [350, 1000] }
10 | // const config = { dataset: 'midtown-sampled-sm.bin', fadeHeightOffsetRange: [450, 1200] }
11 | const config = { dataset: 'midtown-sampled-md.bin', fadeHeightOffsetRange: [450, 1200] }
12 | // const config = { dataset: 'midtown-sampled-lg.bin', fadeHeightOffsetRange: [450, 1200] }
13 | // const config = { dataset: 'midtown-sampled-xl.bin', fadeHeightOffsetRange: [450, 1200] }
14 | // const config = { dataset: 'manhattan-sampled-sm.bin', fadeHeightOffsetRange: [900, 2400] }
15 | // const config = { dataset: 'manhattan-sampled-md.bin', fadeHeightOffsetRange: [900, 2400] }
16 | // const config = { dataset: 'manhattan-sampled-lg.bin', fadeHeightOffsetRange: [900, 2400] }
17 |
18 |
19 | const result = await getLidarStreamer(gl, `https://nyc-lidar-demo.s3.amazonaws.com/${config.dataset}`)
20 |
21 | const { getCurrentPointCount, offset, buffer, batchIds, animationTextureSize, animationTexture } = result
22 |
23 | const minZ = offset[2]
24 |
25 | const vs = `
26 | precision highp float;
27 |
28 | attribute vec3 position;
29 | attribute float intensity;
30 | attribute float batchId;
31 | uniform mat4 projection;
32 | uniform mat4 view;
33 | uniform float fadeHeightStart;
34 | uniform float fadeHeightEnd;
35 | uniform sampler2D animationStartTexture;
36 | uniform float textureSize;
37 | uniform float time;
38 | varying vec4 color;
39 |
40 | #define C1 vec3(0.22745, 0.06667, 0.10980)
41 | #define C2 vec3(0.34118, 0.28627, 0.31765)
42 | #define C3 vec3(0.51373, 0.59608, 0.55686)
43 | #define C4 vec3(0.73725, 0.87059, 0.64706)
44 | #define C5 vec3(0.90196, 0.97647, 0.73725)
45 |
46 | vec3 getColorFromPalette(float t) {
47 | if (t < 0.25) return mix(C1, C2, smoothstep(0.0, 0.25, t));
48 | if (t < 0.5) return mix(C2, C3, smoothstep(0.25, 0.5, t));
49 | if (t < 0.75) return mix(C3, C4, smoothstep(0.5, 0.75, t));
50 | return mix(C4, C5, smoothstep(0.75, 1.0, t));
51 | }
52 |
53 | void main() {
54 | vec3 p = position;
55 | float colorPow = 2.0;
56 | float colorOffset = 0.5;
57 | float t = intensity;
58 | float texIdx = floor(batchId / 4.0);
59 | int texComponent = int(mod(batchId, 4.0));
60 | vec2 texCoord = vec2(
61 | mod(texIdx, textureSize) / (textureSize - 1.0),
62 | floor(texIdx / textureSize) / (textureSize - 1.0)
63 | );
64 |
65 | vec4 animationDataPx = texture2D(animationStartTexture, texCoord);
66 |
67 | // an annoying limitation of GLSL 1 (WebGL1) is that you cannot index into a vector
68 | // with a variable - only a constant
69 | float animationStart;
70 | if (texComponent == 0) {
71 | animationStart = animationDataPx.x;
72 | } else if (texComponent == 1) {
73 | animationStart = animationDataPx.y;
74 | } else if (texComponent == 2) {
75 | animationStart = animationDataPx.z;
76 | } else {
77 | animationStart = animationDataPx.w;
78 | }
79 |
80 | float animationDurationMs = 3000.0;
81 | float animationT = clamp((time - animationStart) / animationDurationMs, 0.0, 1.0);
82 | // apply easing
83 | animationT = 1.0 - pow(1.0 - animationT, 4.0);
84 | // if animationStart is 0.0, then zero out animationT
85 | animationT *= float(bool(animationStart));
86 | // have the points animate up into position slightly
87 | p.z -= 50.0 * (1.0 - animationT);
88 |
89 | vec3 c = getColorFromPalette(pow(t + colorOffset, colorPow));
90 | // points that are closer to the ground should be darker
91 | float colorMult = 0.05 + smoothstep(fadeHeightEnd, fadeHeightStart, p.z);
92 | c *= colorMult;
93 | color = vec4(c, animationT);
94 |
95 | // get the position of the point with respect to the camera
96 | vec4 translatedPosition = view * vec4(p, 1);
97 | float distToCamera = length(translatedPosition);
98 | float sizeT = 1.0 - pow(smoothstep(20.0, 2200.0, distToCamera), 0.5);
99 | float size = mix(1.0, 7.0, sizeT);
100 | float hide = step(fadeHeightEnd + 1.0, p.z) * (animationT * 0.5 + 0.5);
101 | gl_PointSize = size * hide;
102 | gl_Position = projection * translatedPosition * hide;
103 | }
104 | `
105 |
106 | const fs = `
107 | precision highp float;
108 | varying vec4 color;
109 | void main() {
110 | gl_FragColor = color;
111 | }
112 | `
113 |
114 | gl.clearColor(0.11, 0.12, 0.13, 1)
115 | // this isn't perfectly correct since we have blending turned on, but once all the data is
116 | // loaded, there are no transparent pixels anymore, and the performance win from depth testing
117 | // is too good to turn off
118 | gl.enable(gl.DEPTH_TEST)
119 | gl.enable(gl.BLEND)
120 | gl.blendEquation(gl.FUNC_ADD)
121 | gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE)
122 |
123 | const program = linkProgram(gl, vs, fs)
124 | gl.useProgram(program)
125 |
126 | const batchIdsBuffer = gl.createBuffer()
127 | gl.bindBuffer(gl.ARRAY_BUFFER, batchIdsBuffer)
128 | gl.bufferData(gl.ARRAY_BUFFER, batchIds, gl.STATIC_DRAW)
129 | const batchIdsAttributeLocation = gl.getAttribLocation(program, 'batchId')
130 | gl.enableVertexAttribArray(batchIdsAttributeLocation)
131 | gl.vertexAttribPointer(batchIdsAttributeLocation, 1, gl.UNSIGNED_BYTE, false, 1, 0)
132 |
133 | gl.bindBuffer(gl.ARRAY_BUFFER, buffer)
134 | const positionAttributeLocation = gl.getAttribLocation(program, 'position')
135 | gl.enableVertexAttribArray(positionAttributeLocation)
136 | gl.vertexAttribPointer(positionAttributeLocation, 3, gl.UNSIGNED_SHORT, false, 8, 0)
137 |
138 | const intensityAttributeLocation = gl.getAttribLocation(program, 'intensity')
139 | gl.enableVertexAttribArray(intensityAttributeLocation)
140 | gl.vertexAttribPointer(intensityAttributeLocation, 1, gl.UNSIGNED_SHORT, true, 8, 6)
141 |
142 | const viewUniform = gl.getUniformLocation(program, 'view')
143 | const projectionUniform = gl.getUniformLocation(program, 'projection')
144 | const fadeHeightStartUniform = gl.getUniformLocation(program, 'fadeHeightStart')
145 | const fadeHeighEndUniform = gl.getUniformLocation(program, 'fadeHeightEnd')
146 | const textureSizeUniform = gl.getUniformLocation(program, 'textureSize')
147 | const timeUniform = gl.getUniformLocation(program, 'time')
148 | const animationStartTextureUniform = gl.getUniformLocation(program, 'animationStartTexture')
149 |
150 | return function render(renderState: { matrix: number[] }) {
151 | const { matrix } = renderState
152 | const time = Math.floor(performance.now())
153 |
154 | gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT)
155 | // TODO: pass in width and height here to allow for dynamic resizing
156 | const width = gl.drawingBufferWidth
157 | const height = gl.drawingBufferHeight
158 | gl.viewport(0, 0, width, height)
159 |
160 | const projection = mat4.perspective(new Float32Array(16), Math.PI / 4, width / height, 1, 1000000)
161 |
162 | gl.bindTexture(gl.TEXTURE_2D, animationTexture)
163 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)
164 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)
165 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
166 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
167 |
168 | gl.uniformMatrix4fv(viewUniform, false, matrix)
169 | gl.uniformMatrix4fv(projectionUniform, false, projection)
170 | // TODO: have these controlled by UI sliders?
171 | gl.uniform1f(fadeHeightStartUniform, minZ + config.fadeHeightOffsetRange[1])
172 | gl.uniform1f(fadeHeighEndUniform, minZ + config.fadeHeightOffsetRange[0])
173 | gl.uniform1f(textureSizeUniform, animationTextureSize)
174 | gl.uniform1f(timeUniform, time)
175 | gl.uniform1i(animationStartTextureUniform, 0)
176 |
177 | gl.drawArrays(gl.POINTS, 0, getCurrentPointCount())
178 | }
179 | }
180 |
181 | async function getLidarStreamer(gl: WebGLRenderingContext, url: string) {
182 | const startTime = performance.now()
183 | const response = await fetch(url)
184 |
185 | if (!response.body) {
186 | throw new Error('Unable to fetch lidar data. No response.body.')
187 | }
188 |
189 | const littleEndian = isLittleEndian()
190 |
191 | /*
192 | Binary Data format:
193 | pointCount - uint32
194 | xOffset, yOffset, zOffset - int32s
195 | pt1 xDelta, yDelta, zDelta - uint16s
196 | pt1 intensity - uint16
197 | pt2...
198 | */
199 | const reader = response.body.getReader()
200 |
201 | const result = await reader.read()
202 | if (result.done || !result.value) throw new Error('Unable to fetch lidar data. Stream completed before any data was received.')
203 | const dataview = new DataView(result.value.buffer)
204 | const pointCount = dataview.getUint32(0, littleEndian)
205 | const offset = [
206 | dataview.getInt32(4, littleEndian),
207 | dataview.getInt32(8, littleEndian),
208 | dataview.getInt32(12, littleEndian)
209 | ]
210 |
211 | console.log({ pointCount, offset })
212 |
213 | const pointSizeInBytes = 4 * 2 // each point has 4 uint16 values
214 | const lidarData = new Uint8Array(pointCount * pointSizeInBytes)
215 | const initialData = new Uint8Array(result.value.buffer, 16)
216 | lidarData.set(initialData)
217 |
218 | let i = initialData.length
219 | let currentPointCount = Math.floor(i / pointSizeInBytes)
220 |
221 | const buffer = gl.createBuffer()
222 | if (!buffer) throw new Error('Could not create WebGL buffer')
223 | gl.bindBuffer(gl.ARRAY_BUFFER, buffer)
224 | gl.bufferData(gl.ARRAY_BUFFER, lidarData, gl.DYNAMIC_DRAW)
225 |
226 | const textureSize = 8 // needs to be 8x8 texture in order to have fewer than 256 pointers (so we can use uint8s for pointers)
227 | const texturePxCount = textureSize * textureSize
228 | const batchCount = 4 * texturePxCount // 4 slots per pixel
229 | const pointBatchSize = Math.ceil(pointCount / batchCount)
230 | const animationData = new Float32Array(texturePxCount * 4)
231 | const batchIds = new Uint8Array(pointCount)
232 | for (let j = 0; j < batchIds.length; j++) {
233 | const batchId = Math.floor(j / pointBatchSize)
234 | batchIds[j] = batchId
235 | }
236 |
237 | const animationTexture = gl.createTexture()
238 | gl.bindTexture(gl.TEXTURE_2D, animationTexture)
239 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureSize, textureSize, 0, gl.RGBA, gl.FLOAT, animationData)
240 |
241 | setTimeout(async function loadChunk() {
242 | let chunks = 1
243 | while (true) {
244 | const result = await reader.read()
245 | if (result.done) {
246 | console.log(`finished loading data in ${chunks} chunks. time(ms):`, performance.now() - startTime)
247 | return
248 | }
249 | chunks += 1
250 | // this should always have a value, but this check will satisfy typescript
251 | if (result.value) {
252 | const prevCompletedBatches = Math.floor(currentPointCount / pointBatchSize)
253 | gl.bufferSubData(gl.ARRAY_BUFFER, i, result.value)
254 | i += result.value.length
255 | currentPointCount = Math.floor(i / pointSizeInBytes)
256 | const curCompletedBatches = Math.floor(currentPointCount / pointBatchSize)
257 | const curTime = performance.now()
258 | for (let k = prevCompletedBatches; k < curCompletedBatches; k++) {
259 | animationData[k] = curTime
260 | }
261 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureSize, textureSize, 0, gl.RGBA, gl.FLOAT, animationData)
262 | }
263 | }
264 | }, 0)
265 |
266 | return {
267 | offset,
268 | pointCount,
269 | getCurrentPointCount: () => currentPointCount,
270 | buffer,
271 | animationTextureSize: textureSize,
272 | batchIds,
273 | animationTexture
274 | }
275 | }
276 |
277 | function isLittleEndian () {
278 | const buffer = new ArrayBuffer(2)
279 | new DataView(buffer).setInt16(0, 256, true /* littleEndian */)
280 | // Int16Array uses the platform's endianness.
281 | return new Int16Array(buffer)[0] === 256
282 | }
283 |
284 | function createShader(gl: WebGLRenderingContext, type: number, source: string) {
285 | const shader = gl.createShader(type)
286 |
287 | if (!shader) {
288 | throw new Error('Could not create shader.')
289 | }
290 |
291 | gl.shaderSource(shader, source)
292 | gl.compileShader(shader)
293 | const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS)
294 | if (!success) {
295 | const err = gl.getShaderInfoLog(shader)
296 | throw new Error(`Shader error: ${err}`)
297 | }
298 |
299 | return shader
300 | }
301 |
302 | function linkProgram(gl: WebGLRenderingContext, vertexSource: string, fragmentSource: string) {
303 | const program = gl.createProgram()
304 | if (!program) {
305 | throw new Error('Could not create program')
306 | }
307 |
308 | const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexSource)
309 | const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentSource)
310 |
311 | gl.attachShader(program, vertexShader)
312 | gl.attachShader(program, fragmentShader)
313 | gl.linkProgram(program)
314 |
315 | const success = gl.getProgramParameter(program, gl.LINK_STATUS)
316 | if (!success) {
317 | const err = gl.getProgramInfoLog(program)
318 | throw new Error(`Link error: ${err}`)
319 | }
320 |
321 | return program
322 | }
323 |
--------------------------------------------------------------------------------