├── .gitattributes ├── .gitignore ├── README.md ├── client ├── index.html └── main.js └── server ├── main.js ├── package.json └── static ├── game.js ├── index.html └── index.js /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 18 | .grunt 19 | 20 | # node-waf configuration 21 | .lock-wscript 22 | 23 | # Compiled binary addons (http://nodejs.org/api/addons.html) 24 | build/Release 25 | 26 | # Dependency directories 27 | node_modules 28 | jspm_packages 29 | 30 | # Optional npm cache directory 31 | .npm 32 | 33 | # Optional REPL history 34 | .node_repl_history 35 | 36 | # ========================= 37 | # Operating System Files 38 | # ========================= 39 | 40 | # OSX 41 | # ========================= 42 | 43 | .DS_Store 44 | .AppleDouble 45 | .LSOverride 46 | 47 | # Thumbnails 48 | ._* 49 | 50 | # Files that might appear in the root of a volume 51 | .DocumentRevisions-V100 52 | .fseventsd 53 | .Spotlight-V100 54 | .TemporaryItems 55 | .Trashes 56 | .VolumeIcon.icns 57 | 58 | # Directories potentially created on remote AFP share 59 | .AppleDB 60 | .AppleDesktop 61 | Network Trash Folder 62 | Temporary Items 63 | .apdisk 64 | 65 | # Windows 66 | # ========================= 67 | 68 | # Windows image file caches 69 | Thumbs.db 70 | ehthumbs.db 71 | 72 | # Folder config file 73 | Desktop.ini 74 | 75 | # Recycle Bin used on file shares 76 | $RECYCLE.BIN/ 77 | 78 | # Windows Installer files 79 | *.cab 80 | *.msi 81 | *.msm 82 | *.msp 83 | 84 | # Windows shortcuts 85 | *.lnk 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Canvas Live Stream 2 | 3 | ### Screenshot: 4 | 5 | 6 | **Left** The live stream inside the browser
7 | **Right** The server and stream preview (can't be disabled due to a chromium bug) 8 | 9 | ### Description 10 | 11 | This is an experiment with the new ``canvas.captureStream`` and if it is already possible to create a canvas game stream server with it. I'm abusing [electron](https://github.com/electron/electron) to have the original api to the canvas + super easy setup, and then just broadcasting it's stream into the client's video element. 12 | 13 | The HTML media api has many streaming related bugs right now, for example it not possible (without slow hacks) to preserve the alpha channel of the streamed canvas. Also the video timing offsets are clunky and doesn't fit all time. Canvas frames only get captured when the canvas is attached to the dom and visible - this really makes it impossible to create a realistic server. I recommend to use the exact chromium version proposed here, because many things change quickly e.g. in the latest chromium build you have to explicitly tell the video codec to use, otherwise it will silently fail and send bad encoded stream chunks. 14 | 15 | **Conclusion**: It is still **very** experimental and has some quirky bugs here and there. Also the ``MediaStream`` instance returns a blob, which is very slow to turn into a arraybuffer to send over with websockets. I'm not sure if this will ever change (or I'm doing it wrong). So all this is maybe possible in a near future but it has really great potential! 16 | 17 | ### Setup 18 | 19 | The server uses the electron beta build, which can be found [here](https://github.com/electron/electron/tree/v1.7.2). The actual streamed game is contained in server/static/game.js and gets passed in the connected user related canvas and 2d context parameters which can be drawn onto. 20 | The client just connects to the server, shares some initial information like which video compression format to use and then receives the stream data. 21 | 22 | You can run the server by ``npm run start``. 23 | -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /client/main.js: -------------------------------------------------------------------------------- 1 | let ws = null; 2 | let ready = false; 3 | 4 | let canvas = document.createElement("canvas"); 5 | let ctx = canvas.getContext("2d"); 6 | 7 | document.body.appendChild(canvas); 8 | 9 | function applyHDScaling(canvas, ctx) { 10 | const width = canvas.width; 11 | const height = canvas.height; 12 | const scale = window.devicePixelRatio; 13 | canvas.width = width * scale; 14 | canvas.height = height * scale; 15 | canvas.style.width = width + "px"; 16 | canvas.style.height = height + "px"; 17 | ctx.setTransform(scale, 0, 0, scale, 0, 0); 18 | }; 19 | 20 | let width = window.innerWidth; 21 | let height = window.innerHeight; 22 | 23 | let queue = []; 24 | 25 | const PACKET_CANVAS_SIZE = 1; 26 | const PACKET_SYNC_PIXELS = 2; 27 | const PACKET_VPX_CODEC = 3; 28 | 29 | const connect = () => { 30 | const Socket = window.MozWebSocket || window.WebSocket; 31 | const url = "ws://127.0.0.1:8080"; 32 | ws = new Socket(url); 33 | ws.binaryType = "arraybuffer"; 34 | addEventListeners(); 35 | }; 36 | 37 | const addEventListeners = () => { 38 | ws.addEventListener("open", onOpen); 39 | ws.addEventListener("close", onClose); 40 | ws.addEventListener("error", onError); 41 | ws.addEventListener("message", onMessage); 42 | }; 43 | 44 | const errorMsg = `Outdated or unsupported browser!`; 45 | 46 | if ( 47 | typeof MediaSource === "undefined" || 48 | !MediaSource.isTypeSupported("video/webm; codecs=vp8") 49 | ) { 50 | throw new Error(errorMsg); 51 | } 52 | 53 | const getVpxCodec = () => { 54 | if (MediaSource.isTypeSupported("video/webm; codecs=vp9")) { 55 | return (9); 56 | } 57 | if (MediaSource.isTypeSupported("video/webm; codecs=vp8")) { 58 | return (8); 59 | } 60 | if (MediaSource.isTypeSupported("video/webm; codecs=vp7")) { 61 | return (7); 62 | } 63 | return (8); 64 | }; 65 | 66 | const onOpen = (e) => { 67 | ready = true; 68 | ws.send(new Uint16Array([PACKET_CANVAS_SIZE, width | 0, height | 0])); 69 | ws.send(new Uint8Array([PACKET_VPX_CODEC, getVpxCodec()])); 70 | }; 71 | 72 | const onClose = (e) => { 73 | 74 | }; 75 | 76 | const onError = (e) => { 77 | 78 | }; 79 | 80 | /*requestAnimationFrame(function draw() { 81 | requestAnimationFrame(draw); 82 | ctx.drawImage( 83 | video, 84 | 0, 0, 85 | video.width, video.height 86 | ); 87 | });*/ 88 | 89 | requestAnimationFrame(function draw() { 90 | requestAnimationFrame(draw, 1e3 / 60); 91 | if (queue.length && !sb.updating) { 92 | sb.appendBuffer(queue.shift()); 93 | } 94 | }); 95 | 96 | let ms = new MediaSource(); 97 | let sb = null; 98 | video.src = window.URL.createObjectURL(ms); 99 | video.crossOrigin = "anonymous"; 100 | video.style.backgroundColor = "transparent"; 101 | //video.style.display = "none"; 102 | video.play(); 103 | 104 | ms.addEventListener('sourceopen', function(e) { 105 | 106 | sb = ms.addSourceBuffer(`video/webm; codecs=vp${getVpxCodec()}`); 107 | 108 | sb.addEventListener("error", (e) => { console.log("error: " + ms.readyState); }); 109 | sb.addEventListener("abort", (e) => { console.log("abort: " + ms.readyState); }); 110 | sb.addEventListener("update", (e) => { 111 | if (queue.length && !sb.updating) { 112 | sb.appendBuffer(queue.shift()); 113 | } 114 | }); 115 | 116 | }, false); 117 | 118 | ms.addEventListener("sourceopen", (e) => { console.log(e); }); 119 | ms.addEventListener("sourceended", (e) => { console.log(e); }); 120 | ms.addEventListener("sourceclose", (e) => { console.log(e); }); 121 | ms.addEventListener("error", (e) => { console.log(e); }); 122 | 123 | const onMessage = (e) => { 124 | if (sb === null) return; 125 | let data = e.data; 126 | if (sb.updating || queue.length) { 127 | queue.push(data); 128 | } else { 129 | sb.appendBuffer(data); 130 | } 131 | }; 132 | 133 | const resize = () => { 134 | canvas.width = window.innerWidth; 135 | canvas.height = window.innerHeight; 136 | video.width = width; 137 | video.height = height; 138 | applyHDScaling(canvas, ctx); 139 | }; 140 | window.addEventListener("resize", resize); 141 | 142 | resize(); 143 | connect(); 144 | -------------------------------------------------------------------------------- /server/main.js: -------------------------------------------------------------------------------- 1 | const url = require("url"); 2 | const path = require("path"); 3 | const electron = require("electron"); 4 | 5 | // now open up electron with our fresh rolluped bundle 6 | const initElectron = () => { 7 | return new Promise((resolve) => { 8 | 9 | const app = electron.app; 10 | const BrowserWindow = electron.BrowserWindow; 11 | 12 | let win = null; 13 | const createWindow = () => { 14 | win = new BrowserWindow({ 15 | width: 650, 16 | height: 420, 17 | titleBarStyle: "hidden" 18 | }); 19 | 20 | win.loadURL(url.format({ 21 | pathname: path.join(__dirname, "/static/index.html"), 22 | protocol: "file:", 23 | slashes: true 24 | })); 25 | win.setMenu(null); 26 | 27 | //win.setFullScreen(true); 28 | win.webContents.openDevTools(); 29 | win.on("closed", () => { 30 | win = null; 31 | }); 32 | resolve({ win, app }); 33 | }; 34 | 35 | app.on("ready", createWindow); 36 | app.on("window-all-closed", () => { 37 | if (process.platform !== "darwin") app.quit(); 38 | }); 39 | app.on("activate", () => { 40 | if (win === null) createWindow(); 41 | }); 42 | 43 | }); 44 | }; 45 | 46 | // simple live reload system 47 | initElectron().then((old) => { 48 | old.win.reload(); 49 | old.win.webContents.reloadIgnoringCache(); 50 | old.win.webContents.openDevTools(); 51 | }); 52 | -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "main": "main.js", 3 | "scripts": { 4 | "start": "electron ." 5 | }, 6 | "engines": { 7 | "node": ">= 7.x" 8 | }, 9 | "dependencies": { 10 | "electron": "^1.7.2" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /server/static/game.js: -------------------------------------------------------------------------------- 1 | const init = function(canvas, ctx) { 2 | 3 | let width = canvas.width; 4 | let height = canvas.height; 5 | 6 | let bg = "black"; 7 | let counter = 0; 8 | 9 | document.body.style.background = bg; 10 | /* 11 | ctx.font = "30px Verdana"; 12 | let gradient = ctx.createLinearGradient(0, 0, width, 0); 13 | gradient.addColorStop("0", "magenta"); 14 | gradient.addColorStop("0.5", "blue"); 15 | gradient.addColorStop("1.0", "red"); 16 | 17 | function rndColor() { 18 | const r = (Math.random() * 256) | 0; 19 | const g = (Math.random() * 256) | 0; 20 | const b = (Math.random() * 256) | 0; 21 | const a = (Math.random() * 256) | 0; 22 | return (`rgba(${r},${g},${b},${a})`); 23 | }; 24 | 25 | requestAnimationFrame(function draw() { 26 | requestAnimationFrame(draw); 27 | let date = new Date(); 28 | let str = date.getHours() + ":" + date.getMinutes() + ":" + String(date.getMilliseconds())[0]; 29 | ctx.fillStyle = rndColor(); 30 | ctx.fillRect(0, 0, width, height); 31 | ctx.fillStyle = gradient; 32 | ctx.fillText(counter++, 10, 90); 33 | }); 34 | 35 | return; 36 | */ 37 | 38 | // demo code taken from codepen/ihavenofuckingidea 39 | 40 | var NUM_PARTICLES = ( ( ROWS = 100 ) * ( COLS = 300 ) ), 41 | THICKNESS = Math.pow( 80, 2 ), 42 | SPACING = 3, 43 | MARGIN = 100, 44 | COLOR = 220, 45 | DRAG = 0.95, 46 | EASE = 0.25, 47 | 48 | /* 49 | 50 | used for sine approximation, but Math.sin in Chrome is still fast enough :)http://jsperf.com/math-sin-vs-sine-approximation 51 | 52 | B = 4 / Math.PI, 53 | C = -4 / Math.pow( Math.PI, 2 ), 54 | P = 0.225, 55 | 56 | */ 57 | 58 | container, 59 | particle, 60 | mouse, 61 | stats, 62 | list, 63 | ctx, 64 | tog, 65 | man, 66 | dx, dy, 67 | mx, my, 68 | d, t, f, 69 | a, b, 70 | i, n, 71 | w, h, 72 | p, s, 73 | r, c 74 | ; 75 | 76 | particle = { 77 | vx: 0, 78 | vy: 0, 79 | x: 0, 80 | y: 0 81 | }; 82 | 83 | function init() { 84 | 85 | man = false; 86 | tog = true; 87 | 88 | list = []; 89 | 90 | w = width = COLS * SPACING + MARGIN * 2; 91 | h = height = ROWS * SPACING + MARGIN * 2; 92 | 93 | for ( i = 0; i < NUM_PARTICLES; i++ ) { 94 | 95 | p = Object.create( particle ); 96 | p.x = p.ox = MARGIN + SPACING * ( i % COLS ); 97 | p.y = p.oy = MARGIN + SPACING * Math.floor( i / COLS ); 98 | 99 | list[i] = p; 100 | } 101 | 102 | } 103 | 104 | function step() { 105 | 106 | if ( tog = !tog ) { 107 | 108 | if ( !man ) { 109 | 110 | t = +new Date() * 0.001; 111 | mx = w * 0.5 + ( Math.cos( t * 2.1 ) * Math.cos( t * 0.9 ) * w * 0.45 ); 112 | my = h * 0.5 + ( Math.sin( t * 3.2 ) * Math.tan( Math.sin( t * 0.8 ) ) * h * 0.45 ); 113 | } 114 | 115 | for ( i = 0; i < NUM_PARTICLES; i++ ) { 116 | 117 | p = list[i]; 118 | 119 | d = ( dx = mx - p.x ) * dx + ( dy = my - p.y ) * dy; 120 | f = -THICKNESS / d; 121 | 122 | if ( d < THICKNESS ) { 123 | t = Math.atan2( dy, dx ); 124 | p.vx += f * Math.cos(t); 125 | p.vy += f * Math.sin(t); 126 | } 127 | 128 | p.x += ( p.vx *= DRAG ) + (p.ox - p.x) * EASE; 129 | p.y += ( p.vy *= DRAG ) + (p.oy - p.y) * EASE; 130 | 131 | } 132 | 133 | } else { 134 | 135 | b = ( a = ctx.createImageData( w, h ) ).data; 136 | 137 | for ( i = 0; i < NUM_PARTICLES; i++ ) { 138 | 139 | p = list[i]; 140 | b[n = ( ~~p.x + ( ~~p.y * w ) ) * 4] = b[n+1] = b[n+2] = COLOR, b[n+3] = 255; 141 | } 142 | 143 | ctx.putImageData( a, 0, 0 ); 144 | } 145 | 146 | if ( stats ) stats.end(); 147 | 148 | requestAnimationFrame( step ); 149 | } 150 | 151 | init(); 152 | step(); 153 | 154 | }; 155 | -------------------------------------------------------------------------------- /server/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 🦄🦄🦄 6 | 7 | 8 | 9 | 10 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /server/static/index.js: -------------------------------------------------------------------------------- 1 | const WebSocket = require("ws").Server; 2 | 3 | const port = 8080; 4 | const ws = new WebSocket({ port: port, maxReceivedFrameSize: 0x10000 }); 5 | 6 | const MAX_CANVAS_WIDTH = 2000; 7 | const MAX_CANVAS_HEIGHT = 2000; 8 | 9 | const PACKET_CANVAS_SIZE = 1; 10 | const PACKET_SYNC_PIXELS = 2; 11 | const PACKET_VPX_CODEC = 3; 12 | 13 | let users = []; 14 | 15 | class User { 16 | constructor(socket) { 17 | this.socket = socket; 18 | this.reader = new FileReader(); 19 | this.vpx = 0; 20 | this.width = 0; 21 | this.height = 0; 22 | this.chunks = []; 23 | this.ctx = null; 24 | this.view = null; 25 | this.stream = null; 26 | this.recorder = null; 27 | } 28 | allocateShadowCanvas(width, height) { 29 | let canvas = document.createElement("canvas"); 30 | let ctx = canvas.getContext("2d"); 31 | canvas.width = width; 32 | canvas.height = height; 33 | this.view = canvas; 34 | this.ctx = ctx; 35 | } 36 | createCanvasStream() { 37 | const canvas = this.view; 38 | const ctx = this.ctx; 39 | applyHDScaling(canvas, ctx); 40 | document.body.appendChild(canvas); 41 | init(canvas, ctx); 42 | const video = canvas.captureStream(60); 43 | const stream = new MediaStream(); 44 | const track = video.getTracks()[0]; 45 | const recorder = new MediaRecorder(stream, { mimeType: `video/webm; codecs=vp${this.vpx}` }); 46 | this.stream = stream; 47 | this.recorder = recorder; 48 | stream.addTrack(track); 49 | const onData = (e) => { 50 | //time = Date.now(); 51 | if (e.data.size > 0) { 52 | this.chunks.push(e.data); 53 | //console.log("Added chunk!"); 54 | this.processChunks(); 55 | } 56 | }; 57 | const onError = (e) => { 58 | console.log("error:", e); 59 | }; 60 | recorder.ondataavailable = onData; 61 | recorder.onerror = onError; 62 | console.log(`Recording using MIME ${recorder.mimeType}`); 63 | recorder.start(0); 64 | } 65 | processChunks(e) { 66 | let chunks = this.chunks; 67 | if (chunks.length <= 0) return; 68 | const blob = new Blob(chunks, { "type": "video/webm" }); 69 | const length = chunks.length; 70 | const reader = new FileReader(); 71 | reader.addEventListener("loadend", (e) => { 72 | if (!this.closed) this.send(new Uint8Array(reader.result)); 73 | // release processed chunks 74 | }, false); 75 | reader.readAsArrayBuffer(blob); 76 | this.chunks = []; 77 | return; 78 | } 79 | close() { 80 | this.closed = true; 81 | this.socket.close(); 82 | this.recorder.stop(); 83 | this.stream.getVideoTracks()[0].stop(); 84 | this.socket = null; 85 | this.reader = null; 86 | this.chunks = []; 87 | this.stream = null; 88 | this.recorder = null; 89 | this.removeUiReference(); 90 | this.ctx = null; 91 | this.view = null; 92 | removeUserFromUsers(this); 93 | } 94 | // chrome < 58 bug 95 | // canvas.captureStream only fires 96 | // if the canvas is attached to the body 97 | removeUiReference() { 98 | this.view.parentNode.removeChild(this.view); 99 | } 100 | send(msg) { 101 | const socket = this.socket; 102 | if (socket.readyState === socket.CLOSE) { 103 | this.close(); 104 | } 105 | else if (socket.readyState === socket.OPEN) { 106 | socket.send(msg); 107 | } 108 | } 109 | onMessage(msg) { 110 | const kind = msg[0] << 0; 111 | switch (kind) { 112 | case PACKET_CANVAS_SIZE: 113 | // canvas size not defined yet 114 | if (this.width <= 0 && this.height <= 0) { 115 | this.applyCanvasBoundings(msg); 116 | } 117 | break; 118 | case PACKET_VPX_CODEC: 119 | if (this.vpx === 0) { 120 | this.applyVbxCodec(msg); 121 | } 122 | break; 123 | }; 124 | // auto initialize canvas stream 125 | if (this.isReady() && this.stream === null) { 126 | this.createCanvasStream(); 127 | } 128 | } 129 | isReady() { 130 | return ( 131 | this.vpx !== 0 && 132 | this.width > 0 && this.height > 0 133 | ); 134 | } 135 | applyCanvasBoundings(msg) { 136 | const data = decode16ByteBuffer(msg); 137 | if (data.length === 3) { 138 | const width = Math.min(data[1] | 0, MAX_CANVAS_WIDTH); 139 | const height = Math.min(data[2] | 0, MAX_CANVAS_HEIGHT); 140 | this.width = width | 0; 141 | this.height = height | 0; 142 | console.log("User boundings:", width, height); 143 | this.allocateShadowCanvas(width, height); 144 | } 145 | } 146 | applyVbxCodec(msg) { 147 | const data = decode8ByteBuffer(msg); 148 | if (data.length === 2) { 149 | const code = data[1]; 150 | if (code === 8 || code === 9) { 151 | this.vpx = code | 0; 152 | console.log("User vpx:", code); 153 | } 154 | } 155 | } 156 | }; 157 | 158 | function rndColor() { 159 | const r = (Math.random() * 256) | 0; 160 | const g = (Math.random() * 256) | 0; 161 | const b = (Math.random() * 256) | 0; 162 | const a = (Math.random() * 256) | 0; 163 | return (`rgba(${r},${g},${b},${a})`); 164 | }; 165 | 166 | ws.on("connection", (socket) => { 167 | let user = new User(socket); 168 | users.push(user); 169 | socket.on("message", (msg) => { 170 | if (Buffer.isBuffer(msg) && msg.length > 0) { 171 | user.onMessage(msg); 172 | } 173 | }); 174 | socket.on("close", () => { 175 | user.close(); 176 | user = null; 177 | socket = null; 178 | }); 179 | }); 180 | 181 | setTimeout(function update() { 182 | setTimeout(update, 0); 183 | users.map((user) => user.processChunks()); 184 | }, 0); 185 | 186 | function removeUserFromUsers(user) { 187 | for (let ii = 0; ii < users.length; ++ii) { 188 | if (users[ii] === user) { 189 | users.splice(ii, 1); 190 | return (true); 191 | } 192 | }; 193 | console.log("Failed to remove user", user); 194 | return (false); 195 | }; 196 | 197 | function applyHDScaling(canvas, ctx) { 198 | const width = canvas.width; 199 | const height = canvas.height; 200 | const scale = window.devicePixelRatio; 201 | canvas.width = width * scale; 202 | canvas.height = height * scale; 203 | canvas.style.width = width + "px"; 204 | canvas.style.height = height + "px"; 205 | ctx.setTransform(scale, 0, 0, scale, 0, 0); 206 | }; 207 | 208 | function decode8ByteBuffer(buffer) { 209 | const bytes = Uint8Array.BYTES_PER_ELEMENT; 210 | const data = new Uint8Array(buffer.length | 0); 211 | for (let ii = 0; ii < buffer.length; ++ii) { 212 | data[ii | 0] = buffer[ii] & 0xff; 213 | }; 214 | return (data); 215 | }; 216 | 217 | function decode16ByteBuffer(buffer) { 218 | const bytes = Uint16Array.BYTES_PER_ELEMENT; 219 | const data = new Uint16Array((buffer.length / bytes) | 0); 220 | for (let ii = 0; ii < buffer.length; ii += bytes) { 221 | data[(ii / bytes) | 0] = get16BufferByteAt(buffer, ii); 222 | }; 223 | return (data); 224 | }; 225 | 226 | function decode32ByteBuffer(buffer) { 227 | const bytes = Uint32Array.BYTES_PER_ELEMENT; 228 | const data = new Uint32Array((buffer.length / bytes) | 0); 229 | for (let ii = 0; ii < buffer.length; ii += bytes) { 230 | data[(ii / bytes) | 0] = get32BufferByteAt(buffer, ii); 231 | }; 232 | return (data); 233 | }; 234 | 235 | function get16BufferByteAt(buffer, index) { 236 | return (( 237 | (buffer[index + 0]) << 0 | 238 | (buffer[index + 1]) << 8 239 | ) | 0); 240 | }; 241 | 242 | function get32BufferByteAt(buffer, index) { 243 | return (( 244 | (buffer[index + 0]) << 0 | 245 | (buffer[index + 1]) << 8 | 246 | (buffer[index + 2]) << 16 | 247 | (buffer[index + 3]) << 24 248 | ) | 0); 249 | }; 250 | 251 | console.log("Listening on", port); 252 | --------------------------------------------------------------------------------