├── .dockerignore ├── .gitignore ├── Decoder.js ├── Dockerfile ├── LICENSE ├── Player.js ├── README.md ├── YUVCanvas.js ├── client.css ├── client.js ├── index.html ├── index.js ├── package.json ├── raspi.sh └── visio.service /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | 36 | # Optional REPL history 37 | .node_repl_history 38 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:boron 2 | 3 | RUN mkdir -p /usr/src/app 4 | WORKDIR /usr/src/app 5 | 6 | COPY package.json /usr/src/app/ 7 | RUN npm install 8 | 9 | COPY . /usr/src/app 10 | 11 | EXPOSE 8000 12 | EXPOSE 8000/udp 13 | EXPOSE 8080 14 | EXPOSE 8081 15 | 16 | CMD ["node", "index.js"] 17 | 18 | # This is for swarm mode load balancing, since this thing doesn't work fast 19 | # enough on a single xeon core with over 150 viewers. 20 | 21 | # Set up 4 workers on one quad core machine: 22 | 23 | # docker build -t dregu/visio . 24 | # docker swarm init 25 | # docker network create --driver overlay --subnet 10.0.9.0/24 visionet 26 | # docker service create --replicas 4 --name visio --network visionet --publish 8081:8081 dregu/visio 27 | 28 | # ws://127.0.0.1:8081/ should be balanced between the 4 workers. 29 | # Now just duplicate the video stream to all the workers. 30 | # Check raspi.sh for examples... 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Dregu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Player.js: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | 4 | usage: 5 | 6 | p = new Player({ 7 | useWorker: , 8 | workerFile: // give path to Decoder.js 9 | webgl: true | false | "auto" // defaults to "auto" 10 | }); 11 | 12 | // canvas property represents the canvas node 13 | // put it somewhere in the dom 14 | p.canvas; 15 | 16 | p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in 17 | 18 | p.decode(); 19 | 20 | 21 | */ 22 | 23 | 24 | 25 | // universal module definition 26 | (function (root, factory) { 27 | if (typeof define === 'function' && define.amd) { 28 | // AMD. Register as an anonymous module. 29 | define(["./Decoder", "./YUVCanvas"], factory); 30 | } else if (typeof exports === 'object') { 31 | // Node. Does not work with strict CommonJS, but 32 | // only CommonJS-like environments that support module.exports, 33 | // like Node. 34 | module.exports = factory(require("./Decoder"), require("./YUVCanvas")); 35 | } else { 36 | // Browser globals (root is window) 37 | root.Player = factory(root.Decoder, root.YUVCanvas); 38 | } 39 | }(this, function (Decoder, WebGLCanvas) { 40 | "use strict"; 41 | 42 | 43 | var nowValue = Decoder.nowValue; 44 | 45 | 46 | var Player = function(parOptions){ 47 | var self = this; 48 | this._config = parOptions || {}; 49 | 50 | this.render = true; 51 | if (this._config.render === false){ 52 | this.render = false; 53 | }; 54 | 55 | this.nowValue = nowValue; 56 | 57 | this._config.workerFile = this._config.workerFile || "Decoder.js"; 58 | if (this._config.preserveDrawingBuffer){ 59 | this._config.contextOptions = this._config.contextOptions || {}; 60 | this._config.contextOptions.preserveDrawingBuffer = true; 61 | }; 62 | 63 | var webgl = "auto"; 64 | if (this._config.webgl === true){ 65 | webgl = true; 66 | }else if (this._config.webgl === false){ 67 | webgl = false; 68 | }; 69 | 70 | if (webgl == "auto"){ 71 | webgl = true; 72 | try{ 73 | if (!window.WebGLRenderingContext) { 74 | // the browser doesn't even know what WebGL is 75 | webgl = false; 76 | } else { 77 | var canvas = document.createElement('canvas'); 78 | var ctx = canvas.getContext("webgl"); 79 | if (!ctx) { 80 | // browser supports WebGL but initialization failed. 81 | webgl = false; 82 | }; 83 | }; 84 | }catch(e){ 85 | webgl = false; 86 | }; 87 | }; 88 | 89 | this.webgl = webgl; 90 | 91 | // choose functions 92 | if (this.webgl){ 93 | this.createCanvasObj = this.createCanvasWebGL; 94 | this.renderFrame = this.renderFrameWebGL; 95 | }else{ 96 | this.createCanvasObj = this.createCanvasRGB; 97 | this.renderFrame = this.renderFrameRGB; 98 | }; 99 | 100 | 101 | var lastWidth; 102 | var lastHeight; 103 | var onPictureDecoded = function(buffer, width, height, infos) { 104 | self.onPictureDecoded(buffer, width, height, infos); 105 | 106 | var startTime = nowValue(); 107 | 108 | if (!buffer || !self.render) { 109 | return; 110 | }; 111 | 112 | self.renderFrame({ 113 | canvasObj: self.canvasObj, 114 | data: buffer, 115 | width: width, 116 | height: height 117 | }); 118 | 119 | if (self.onRenderFrameComplete){ 120 | self.onRenderFrameComplete({ 121 | data: buffer, 122 | width: width, 123 | height: height, 124 | infos: infos, 125 | canvasObj: self.canvasObj 126 | }); 127 | }; 128 | 129 | }; 130 | 131 | // provide size 132 | 133 | if (!this._config.size){ 134 | this._config.size = {}; 135 | }; 136 | this._config.size.width = this._config.size.width || 200; 137 | this._config.size.height = this._config.size.height || 200; 138 | 139 | if (this._config.useWorker){ 140 | var worker = new Worker(this._config.workerFile); 141 | this.worker = worker; 142 | worker.addEventListener('message', function(e) { 143 | var data = e.data; 144 | if (data.consoleLog){ 145 | console.log(data.consoleLog); 146 | return; 147 | }; 148 | 149 | onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos); 150 | 151 | }, false); 152 | 153 | worker.postMessage({type: "Broadway.js - Worker init", options: { 154 | rgb: !webgl, 155 | memsize: this.memsize, 156 | reuseMemory: this._config.reuseMemory ? true : false 157 | }}); 158 | 159 | if (this._config.transferMemory){ 160 | this.decode = function(parData, parInfo){ 161 | // no copy 162 | // instead we are transfering the ownership of the buffer 163 | // dangerous!!! 164 | 165 | worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker. 166 | }; 167 | 168 | }else{ 169 | this.decode = function(parData, parInfo){ 170 | // Copy the sample so that we only do a structured clone of the 171 | // region of interest 172 | var copyU8 = new Uint8Array(parData.length); 173 | copyU8.set( parData, 0, parData.length ); 174 | worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker. 175 | }; 176 | 177 | }; 178 | 179 | if (this._config.reuseMemory){ 180 | this.recycleMemory = function(parArray){ 181 | //this.beforeRecycle(); 182 | worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker. 183 | //this.afterRecycle(); 184 | }; 185 | } 186 | 187 | }else{ 188 | 189 | this.decoder = new Decoder({ 190 | rgb: !webgl 191 | }); 192 | this.decoder.onPictureDecoded = onPictureDecoded; 193 | 194 | this.decode = function(parData, parInfo){ 195 | self.decoder.decode(parData, parInfo); 196 | }; 197 | 198 | }; 199 | 200 | 201 | 202 | if (this.render){ 203 | this.canvasObj = this.createCanvasObj({ 204 | contextOptions: this._config.contextOptions 205 | }); 206 | this.canvas = this.canvasObj.canvas; 207 | }; 208 | 209 | this.domNode = this.canvas; 210 | 211 | lastWidth = this._config.size.width; 212 | lastHeight = this._config.size.height; 213 | 214 | }; 215 | 216 | Player.prototype = { 217 | 218 | onPictureDecoded: function(buffer, width, height, infos){}, 219 | 220 | // call when memory of decoded frames is not used anymore 221 | recycleMemory: function(buf){ 222 | }, 223 | /*beforeRecycle: function(){}, 224 | afterRecycle: function(){},*/ 225 | 226 | // for both functions options is: 227 | // 228 | // width 229 | // height 230 | // enableScreenshot 231 | // 232 | // returns a object that has a property canvas which is a html5 canvas 233 | createCanvasWebGL: function(options){ 234 | var canvasObj = this._createBasicCanvasObj(options); 235 | canvasObj.contextOptions = options.contextOptions; 236 | return canvasObj; 237 | }, 238 | 239 | createCanvasRGB: function(options){ 240 | var canvasObj = this._createBasicCanvasObj(options); 241 | return canvasObj; 242 | }, 243 | 244 | // part that is the same for webGL and RGB 245 | _createBasicCanvasObj: function(options){ 246 | options = options || {}; 247 | 248 | var obj = {}; 249 | var width = options.width; 250 | if (!width){ 251 | width = this._config.size.width; 252 | }; 253 | var height = options.height; 254 | if (!height){ 255 | height = this._config.size.height; 256 | }; 257 | obj.canvas = document.createElement('canvas'); 258 | obj.canvas.width = width; 259 | obj.canvas.height = height; 260 | obj.canvas.style.backgroundColor = "#0D0E1B"; 261 | 262 | 263 | return obj; 264 | }, 265 | 266 | // options: 267 | // 268 | // canvas 269 | // data 270 | renderFrameWebGL: function(options){ 271 | 272 | var canvasObj = options.canvasObj; 273 | 274 | var width = options.width || canvasObj.canvas.width; 275 | var height = options.height || canvasObj.canvas.height; 276 | 277 | if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){ 278 | canvasObj.canvas.width = width; 279 | canvasObj.canvas.height = height; 280 | canvasObj.webGLCanvas = new WebGLCanvas({ 281 | canvas: canvasObj.canvas, 282 | contextOptions: canvasObj.contextOptions, 283 | width: width, 284 | height: height 285 | }); 286 | }; 287 | 288 | var ylen = width * height; 289 | var uvlen = (width / 2) * (height / 2); 290 | 291 | canvasObj.webGLCanvas.drawNextOutputPicture({ 292 | yData: options.data.subarray(0, ylen), 293 | uData: options.data.subarray(ylen, ylen + uvlen), 294 | vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen) 295 | }); 296 | 297 | var self = this; 298 | self.recycleMemory(options.data); 299 | 300 | }, 301 | renderFrameRGB: function(options){ 302 | var canvasObj = options.canvasObj; 303 | 304 | var width = options.width || canvasObj.canvas.width; 305 | var height = options.height || canvasObj.canvas.height; 306 | 307 | if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){ 308 | canvasObj.canvas.width = width; 309 | canvasObj.canvas.height = height; 310 | }; 311 | 312 | var ctx = canvasObj.ctx; 313 | var imgData = canvasObj.imgData; 314 | 315 | if (!ctx){ 316 | canvasObj.ctx = canvasObj.canvas.getContext('2d'); 317 | ctx = canvasObj.ctx; 318 | 319 | canvasObj.imgData = ctx.createImageData(width, height); 320 | imgData = canvasObj.imgData; 321 | }; 322 | 323 | imgData.data.set(options.data); 324 | ctx.putImageData(imgData, 0, 0); 325 | var self = this; 326 | self.recycleMemory(options.data); 327 | 328 | } 329 | 330 | }; 331 | 332 | return Player; 333 | 334 | })); 335 | 336 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # visio 2 | Ultra fast live streaming raw h264 from Raspberry Pi to multiple browser 3 | clients with Node, websockets and 4 | [Broadway](https://github.com/mbebenita/Broadway). The latency on LAN at 25fps 5 | is about 4 frames or 160ms. 6 | 7 | ## Installation 8 | ``` 9 | npm install 10 | ``` 11 | If you are going to use UDP, also install socat. 12 | ``` 13 | apt install socat 14 | ``` 15 | Tested on RPI1, raspbian jessie and node v7.4.0. 16 | 17 | ## Server 18 | Receives h264 stream from raspivid and serves it to websocket clients. 19 | Start with ```node index.js --udpport 8000 --wsport 8081``` for UDP mode 20 | or ```node index.js --tcpport 8000 --wsport 8081``` for TCP mode. 21 | 22 | ## Streamer 23 | Streams live h264 from raspivid (or gstreamer) to the server. Check raspi.sh 24 | and start with ```./raspi.sh```. You could use something like 25 | ```ffmpeg -re -i foo.mp4 -c:v copy -f h264 udp://localhost:8000``` 26 | to stream anything, just remember Broadway only supports h264 baseline and 27 | no audio. 28 | 29 | ## HTTP-server 30 | You should get one. Tested with ```http-server``` from npm. 31 | 32 | ## Client 33 | Minimal client is now running at ```http://server-ip:8080/```. 34 | Works on most things with canvas and websockets. 35 | -------------------------------------------------------------------------------- /YUVCanvas.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2015 Paperspace Co. All rights reserved. 3 | // 4 | // Permission is hereby granted, free of charge, to any person obtaining a copy 5 | // of this software and associated documentation files (the "Software"), to 6 | // deal in the Software without restriction, including without limitation the 7 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | // sell copies of the Software, and to permit persons to whom the Software is 9 | // furnished to do so, subject to the following conditions: 10 | // 11 | // The above copyright notice and this permission notice shall be included in 12 | // all copies or substantial portions of the Software. 13 | // 14 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | // IN THE SOFTWARE. 21 | // 22 | 23 | 24 | // universal module definition 25 | (function (root, factory) { 26 | if (typeof define === 'function' && define.amd) { 27 | // AMD. Register as an anonymous module. 28 | define([], factory); 29 | } else if (typeof exports === 'object') { 30 | // Node. Does not work with strict CommonJS, but 31 | // only CommonJS-like environments that support module.exports, 32 | // like Node. 33 | module.exports = factory(); 34 | } else { 35 | // Browser globals (root is window) 36 | root.YUVCanvas = factory(); 37 | } 38 | }(this, function () { 39 | 40 | 41 | /** 42 | * This class can be used to render output pictures from an H264bsdDecoder to a canvas element. 43 | * If available the content is rendered using WebGL. 44 | */ 45 | function YUVCanvas(parOptions) { 46 | 47 | parOptions = parOptions || {}; 48 | 49 | this.canvasElement = parOptions.canvas || document.createElement("canvas"); 50 | this.contextOptions = parOptions.contextOptions; 51 | 52 | this.type = parOptions.type || "yuv420"; 53 | 54 | this.customYUV444 = parOptions.customYUV444; 55 | 56 | this.conversionType = parOptions.conversionType || "rec601"; 57 | 58 | this.width = parOptions.width || 640; 59 | this.height = parOptions.height || 320; 60 | 61 | this.animationTime = parOptions.animationTime || 0; 62 | 63 | this.canvasElement.width = this.width; 64 | this.canvasElement.height = this.height; 65 | 66 | this.initContextGL(); 67 | 68 | if(this.contextGL) { 69 | this.initProgram(); 70 | this.initBuffers(); 71 | this.initTextures(); 72 | }; 73 | 74 | 75 | /** 76 | * Draw the next output picture using WebGL 77 | */ 78 | if (this.type === "yuv420"){ 79 | this.drawNextOuptutPictureGL = function(par) { 80 | var gl = this.contextGL; 81 | var texturePosBuffer = this.texturePosBuffer; 82 | var uTexturePosBuffer = this.uTexturePosBuffer; 83 | var vTexturePosBuffer = this.vTexturePosBuffer; 84 | 85 | var yTextureRef = this.yTextureRef; 86 | var uTextureRef = this.uTextureRef; 87 | var vTextureRef = this.vTextureRef; 88 | 89 | var yData = par.yData; 90 | var uData = par.uData; 91 | var vData = par.vData; 92 | 93 | var width = this.width; 94 | var height = this.height; 95 | 96 | var yDataPerRow = par.yDataPerRow || width; 97 | var yRowCnt = par.yRowCnt || height; 98 | 99 | var uDataPerRow = par.uDataPerRow || (width / 2); 100 | var uRowCnt = par.uRowCnt || (height / 2); 101 | 102 | var vDataPerRow = par.vDataPerRow || uDataPerRow; 103 | var vRowCnt = par.vRowCnt || uRowCnt; 104 | 105 | gl.viewport(0, 0, width, height); 106 | 107 | var tTop = 0; 108 | var tLeft = 0; 109 | var tBottom = height / yRowCnt; 110 | var tRight = width / yDataPerRow; 111 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 112 | 113 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 114 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); 115 | 116 | if (this.customYUV444){ 117 | tBottom = height / uRowCnt; 118 | tRight = width / uDataPerRow; 119 | }else{ 120 | tBottom = (height / 2) / uRowCnt; 121 | tRight = (width / 2) / uDataPerRow; 122 | }; 123 | var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 124 | 125 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); 126 | gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW); 127 | 128 | 129 | if (this.customYUV444){ 130 | tBottom = height / vRowCnt; 131 | tRight = width / vDataPerRow; 132 | }else{ 133 | tBottom = (height / 2) / vRowCnt; 134 | tRight = (width / 2) / vDataPerRow; 135 | }; 136 | var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 137 | 138 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); 139 | gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW); 140 | 141 | 142 | gl.activeTexture(gl.TEXTURE0); 143 | gl.bindTexture(gl.TEXTURE_2D, yTextureRef); 144 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData); 145 | 146 | gl.activeTexture(gl.TEXTURE1); 147 | gl.bindTexture(gl.TEXTURE_2D, uTextureRef); 148 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData); 149 | 150 | gl.activeTexture(gl.TEXTURE2); 151 | gl.bindTexture(gl.TEXTURE_2D, vTextureRef); 152 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData); 153 | 154 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 155 | }; 156 | 157 | }else if (this.type === "yuv422"){ 158 | this.drawNextOuptutPictureGL = function(par) { 159 | var gl = this.contextGL; 160 | var texturePosBuffer = this.texturePosBuffer; 161 | 162 | var textureRef = this.textureRef; 163 | 164 | var data = par.data; 165 | 166 | var width = this.width; 167 | var height = this.height; 168 | 169 | var dataPerRow = par.dataPerRow || (width * 2); 170 | var rowCnt = par.rowCnt || height; 171 | 172 | gl.viewport(0, 0, width, height); 173 | 174 | var tTop = 0; 175 | var tLeft = 0; 176 | var tBottom = height / rowCnt; 177 | var tRight = width / (dataPerRow / 2); 178 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 179 | 180 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 181 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); 182 | 183 | gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height); 184 | 185 | gl.activeTexture(gl.TEXTURE0); 186 | gl.bindTexture(gl.TEXTURE_2D, textureRef); 187 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data); 188 | 189 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 190 | }; 191 | }; 192 | 193 | }; 194 | 195 | /** 196 | * Returns true if the canvas supports WebGL 197 | */ 198 | YUVCanvas.prototype.isWebGL = function() { 199 | return this.contextGL; 200 | }; 201 | 202 | /** 203 | * Create the GL context from the canvas element 204 | */ 205 | YUVCanvas.prototype.initContextGL = function() { 206 | var canvas = this.canvasElement; 207 | var gl = null; 208 | 209 | var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"]; 210 | var nameIndex = 0; 211 | 212 | while(!gl && nameIndex < validContextNames.length) { 213 | var contextName = validContextNames[nameIndex]; 214 | 215 | try { 216 | if (this.contextOptions){ 217 | gl = canvas.getContext(contextName, this.contextOptions); 218 | }else{ 219 | gl = canvas.getContext(contextName); 220 | }; 221 | } catch (e) { 222 | gl = null; 223 | } 224 | 225 | if(!gl || typeof gl.getParameter !== "function") { 226 | gl = null; 227 | } 228 | 229 | ++nameIndex; 230 | }; 231 | 232 | this.contextGL = gl; 233 | }; 234 | 235 | /** 236 | * Initialize GL shader program 237 | */ 238 | YUVCanvas.prototype.initProgram = function() { 239 | var gl = this.contextGL; 240 | 241 | // vertex shader is the same for all types 242 | var vertexShaderScript; 243 | var fragmentShaderScript; 244 | 245 | if (this.type === "yuv420"){ 246 | 247 | vertexShaderScript = [ 248 | 'attribute vec4 vertexPos;', 249 | 'attribute vec4 texturePos;', 250 | 'attribute vec4 uTexturePos;', 251 | 'attribute vec4 vTexturePos;', 252 | 'varying vec2 textureCoord;', 253 | 'varying vec2 uTextureCoord;', 254 | 'varying vec2 vTextureCoord;', 255 | 256 | 'void main()', 257 | '{', 258 | ' gl_Position = vertexPos;', 259 | ' textureCoord = texturePos.xy;', 260 | ' uTextureCoord = uTexturePos.xy;', 261 | ' vTextureCoord = vTexturePos.xy;', 262 | '}' 263 | ].join('\n'); 264 | 265 | fragmentShaderScript = [ 266 | 'precision highp float;', 267 | 'varying highp vec2 textureCoord;', 268 | 'varying highp vec2 uTextureCoord;', 269 | 'varying highp vec2 vTextureCoord;', 270 | 'uniform sampler2D ySampler;', 271 | 'uniform sampler2D uSampler;', 272 | 'uniform sampler2D vSampler;', 273 | 'uniform mat4 YUV2RGB;', 274 | 275 | 'void main(void) {', 276 | ' highp float y = texture2D(ySampler, textureCoord).r;', 277 | ' highp float u = texture2D(uSampler, uTextureCoord).r;', 278 | ' highp float v = texture2D(vSampler, vTextureCoord).r;', 279 | ' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', 280 | '}' 281 | ].join('\n'); 282 | 283 | }else if (this.type === "yuv422"){ 284 | vertexShaderScript = [ 285 | 'attribute vec4 vertexPos;', 286 | 'attribute vec4 texturePos;', 287 | 'varying vec2 textureCoord;', 288 | 289 | 'void main()', 290 | '{', 291 | ' gl_Position = vertexPos;', 292 | ' textureCoord = texturePos.xy;', 293 | '}' 294 | ].join('\n'); 295 | 296 | fragmentShaderScript = [ 297 | 'precision highp float;', 298 | 'varying highp vec2 textureCoord;', 299 | 'uniform sampler2D sampler;', 300 | 'uniform highp vec2 resolution;', 301 | 'uniform mat4 YUV2RGB;', 302 | 303 | 'void main(void) {', 304 | 305 | ' highp float texPixX = 1.0 / resolution.x;', 306 | ' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture 307 | ' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel 308 | ' highp float steps = floor(textureCoord.x / logPixX);', 309 | ' highp float uvSteps = floor(textureCoord.x / logHalfPixX);', 310 | ' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;', 311 | ' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;', 312 | ' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;', 313 | 314 | //' highp float y = texture2D(sampler, textureCoord).r;', 315 | //' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', 316 | ' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;', 317 | '}' 318 | ].join('\n'); 319 | }; 320 | 321 | var YUV2RGB = []; 322 | 323 | if (this.conversionType == "rec709") { 324 | // ITU-T Rec. 709 325 | YUV2RGB = [ 326 | 1.16438, 0.00000, 1.79274, -0.97295, 327 | 1.16438, -0.21325, -0.53291, 0.30148, 328 | 1.16438, 2.11240, 0.00000, -1.13340, 329 | 0, 0, 0, 1, 330 | ]; 331 | } else { 332 | // assume ITU-T Rec. 601 333 | YUV2RGB = [ 334 | 1.16438, 0.00000, 1.59603, -0.87079, 335 | 1.16438, -0.39176, -0.81297, 0.52959, 336 | 1.16438, 2.01723, 0.00000, -1.08139, 337 | 0, 0, 0, 1 338 | ]; 339 | }; 340 | 341 | var vertexShader = gl.createShader(gl.VERTEX_SHADER); 342 | gl.shaderSource(vertexShader, vertexShaderScript); 343 | gl.compileShader(vertexShader); 344 | if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) { 345 | console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader)); 346 | } 347 | 348 | var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); 349 | gl.shaderSource(fragmentShader, fragmentShaderScript); 350 | gl.compileShader(fragmentShader); 351 | if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) { 352 | console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader)); 353 | } 354 | 355 | var program = gl.createProgram(); 356 | gl.attachShader(program, vertexShader); 357 | gl.attachShader(program, fragmentShader); 358 | gl.linkProgram(program); 359 | if(!gl.getProgramParameter(program, gl.LINK_STATUS)) { 360 | console.log('Program failed to compile: ' + gl.getProgramInfoLog(program)); 361 | } 362 | 363 | gl.useProgram(program); 364 | 365 | var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB'); 366 | gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB); 367 | 368 | this.shaderProgram = program; 369 | }; 370 | 371 | /** 372 | * Initialize vertex buffers and attach to shader program 373 | */ 374 | YUVCanvas.prototype.initBuffers = function() { 375 | var gl = this.contextGL; 376 | var program = this.shaderProgram; 377 | 378 | var vertexPosBuffer = gl.createBuffer(); 379 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); 380 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW); 381 | 382 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); 383 | gl.enableVertexAttribArray(vertexPosRef); 384 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); 385 | 386 | if (this.animationTime){ 387 | 388 | var animationTime = this.animationTime; 389 | var timePassed = 0; 390 | var stepTime = 15; 391 | 392 | var aniFun = function(){ 393 | 394 | timePassed += stepTime; 395 | var mul = ( 1 * timePassed ) / animationTime; 396 | 397 | if (timePassed >= animationTime){ 398 | mul = 1; 399 | }else{ 400 | setTimeout(aniFun, stepTime); 401 | }; 402 | 403 | var neg = -1 * mul; 404 | var pos = 1 * mul; 405 | 406 | var vertexPosBuffer = gl.createBuffer(); 407 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); 408 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW); 409 | 410 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); 411 | gl.enableVertexAttribArray(vertexPosRef); 412 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); 413 | 414 | try{ 415 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 416 | }catch(e){}; 417 | 418 | }; 419 | aniFun(); 420 | 421 | }; 422 | 423 | 424 | 425 | var texturePosBuffer = gl.createBuffer(); 426 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 427 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 428 | 429 | var texturePosRef = gl.getAttribLocation(program, 'texturePos'); 430 | gl.enableVertexAttribArray(texturePosRef); 431 | gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0); 432 | 433 | this.texturePosBuffer = texturePosBuffer; 434 | 435 | if (this.type === "yuv420"){ 436 | var uTexturePosBuffer = gl.createBuffer(); 437 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); 438 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 439 | 440 | var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos'); 441 | gl.enableVertexAttribArray(uTexturePosRef); 442 | gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0); 443 | 444 | this.uTexturePosBuffer = uTexturePosBuffer; 445 | 446 | 447 | var vTexturePosBuffer = gl.createBuffer(); 448 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); 449 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 450 | 451 | var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos'); 452 | gl.enableVertexAttribArray(vTexturePosRef); 453 | gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0); 454 | 455 | this.vTexturePosBuffer = vTexturePosBuffer; 456 | }; 457 | 458 | }; 459 | 460 | /** 461 | * Initialize GL textures and attach to shader program 462 | */ 463 | YUVCanvas.prototype.initTextures = function() { 464 | var gl = this.contextGL; 465 | var program = this.shaderProgram; 466 | 467 | if (this.type === "yuv420"){ 468 | 469 | var yTextureRef = this.initTexture(); 470 | var ySamplerRef = gl.getUniformLocation(program, 'ySampler'); 471 | gl.uniform1i(ySamplerRef, 0); 472 | this.yTextureRef = yTextureRef; 473 | 474 | var uTextureRef = this.initTexture(); 475 | var uSamplerRef = gl.getUniformLocation(program, 'uSampler'); 476 | gl.uniform1i(uSamplerRef, 1); 477 | this.uTextureRef = uTextureRef; 478 | 479 | var vTextureRef = this.initTexture(); 480 | var vSamplerRef = gl.getUniformLocation(program, 'vSampler'); 481 | gl.uniform1i(vSamplerRef, 2); 482 | this.vTextureRef = vTextureRef; 483 | 484 | }else if (this.type === "yuv422"){ 485 | // only one texture for 422 486 | var textureRef = this.initTexture(); 487 | var samplerRef = gl.getUniformLocation(program, 'sampler'); 488 | gl.uniform1i(samplerRef, 0); 489 | this.textureRef = textureRef; 490 | 491 | }; 492 | }; 493 | 494 | /** 495 | * Create and configure a single texture 496 | */ 497 | YUVCanvas.prototype.initTexture = function() { 498 | var gl = this.contextGL; 499 | 500 | var textureRef = gl.createTexture(); 501 | gl.bindTexture(gl.TEXTURE_2D, textureRef); 502 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 503 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 504 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 505 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 506 | gl.bindTexture(gl.TEXTURE_2D, null); 507 | 508 | return textureRef; 509 | }; 510 | 511 | /** 512 | * Draw picture data to the canvas. 513 | * If this object is using WebGL, the data must be an I420 formatted ArrayBuffer, 514 | * Otherwise, data must be an RGBA formatted ArrayBuffer. 515 | */ 516 | YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) { 517 | var gl = this.contextGL; 518 | 519 | if(gl) { 520 | this.drawNextOuptutPictureGL(width, height, croppingParams, data); 521 | } else { 522 | this.drawNextOuptutPictureRGBA(width, height, croppingParams, data); 523 | } 524 | }; 525 | 526 | 527 | 528 | /** 529 | * Draw next output picture using ARGB data on a 2d canvas. 530 | */ 531 | YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) { 532 | var canvas = this.canvasElement; 533 | 534 | var croppingParams = null; 535 | 536 | var argbData = data; 537 | 538 | var ctx = canvas.getContext('2d'); 539 | var imageData = ctx.getImageData(0, 0, width, height); 540 | imageData.data.set(argbData); 541 | 542 | if(croppingParams === null) { 543 | ctx.putImageData(imageData, 0, 0); 544 | } else { 545 | ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height); 546 | } 547 | }; 548 | 549 | return YUVCanvas; 550 | 551 | })); 552 | -------------------------------------------------------------------------------- /client.css: -------------------------------------------------------------------------------- 1 | @import url('https://fonts.googleapis.com/css?family=Ubuntu:300'); 2 | 3 | body { 4 | background: #050505; 5 | text-align: center; 6 | font-family: 'Ubuntu', sans-serif; 7 | font-weight: 300; 8 | letter-spacing: .15em; 9 | color: #fff; 10 | } 11 | canvas { 12 | border: 1px solid #eee; 13 | margin-bottom: 20px; 14 | } 15 | -------------------------------------------------------------------------------- /client.js: -------------------------------------------------------------------------------- 1 | startStream('container', window.location.protocol.replace(/http/, 'ws')+'//'+window.location.hostname+':8081', true, 'auto', 2000) 2 | 3 | function startStream(playerId, wsUri, useWorker, webgl, reconnectMs) { 4 | if (!window.player) { 5 | window.player = new Player({ useWorker: useWorker, webgl: webgl, size: { width: 848, height: 480 } }) 6 | var playerElement = document.getElementById(playerId) 7 | playerElement.appendChild(window.player.canvas) 8 | window.player.canvas.addEventListener('dblclick', function() { 9 | if(window.player.canvas.requestFullScreen) window.player.canvas.requestFullScreen(); 10 | else if(window.player.canvas.webkitRequestFullScreen) window.player.canvas.webkitRequestFullScreen(); 11 | else if(window.player.canvas.mozRequestFullScreen) window.player.canvas.mozRequestFullScreen(); 12 | }) 13 | window.debugger = new debug(playerId) //show statistics, you can remove me if you dont need stats 14 | } 15 | document.addEventListener('webkitfullscreenchange', exitHandler, false); 16 | document.addEventListener('mozfullscreenchange', exitHandler, false); 17 | document.addEventListener('fullscreenchange', exitHandler, false); 18 | document.addEventListener('MSFullscreenChange', exitHandler, false); 19 | 20 | function exitHandler() { 21 | if(document.fullScreenElement || document.webkitCurrentFullScreenElement || document.mozFullScreenElement) { 22 | window.player.canvas.style.width = '100vw' 23 | window.player.canvas.style.marginBottom = '0' 24 | window.player.canvas.style.border = '0' 25 | } else { 26 | window.player.canvas.style.width = '' 27 | window.player.canvas.style.marginBottom = '20px' 28 | window.player.canvas.style.border = '1px solid #eee' 29 | } 30 | } 31 | 32 | var separator = new Uint8Array([0, 0, 0, 1]) 33 | function addSeparator(buffer) { 34 | var tmp = new Uint8Array(4+buffer.byteLength) 35 | tmp.set(separator, 0) 36 | tmp.set(new Uint8Array(buffer), 4) 37 | return tmp.buffer 38 | } 39 | 40 | var ws = new WebSocket(wsUri) 41 | ws.binaryType = 'arraybuffer' 42 | ws.onopen = function (e) { 43 | console.log('websocket connected') 44 | ws.onmessage = function (msg) { 45 | window.player.decode(new Uint8Array(addSeparator(msg.data))) 46 | if(window.debugger) window.debugger.nal(msg.data.byteLength) 47 | } 48 | } 49 | ws.onclose = function (e) { 50 | console.log('websocket disconnected') 51 | if (reconnectMs > 0) { 52 | setTimeout(function() { startStream(playerId, wsUri, useWorker, webgl, reconnectMs) }, reconnectMs) 53 | } 54 | } 55 | } 56 | 57 | // debugger stuff 58 | function avgFPS(length) { 59 | this.index = 0 60 | this.sum = 0 61 | this.length = length 62 | this.buffer = Array.apply(null, Array(length)).map(Number.prototype.valueOf,0); 63 | this.tick = function(tick) { 64 | this.sum -= this.buffer[this.index] 65 | this.sum += tick 66 | this.buffer[this.index] = tick 67 | if (++this.index == this.length) this.index = 0 68 | return Math.floor(this.sum/this.length) 69 | } 70 | this.avg = function() { 71 | return Math.floor(this.sum/this.length) 72 | } 73 | return this 74 | } 75 | 76 | function debug(playerId) { 77 | this.started = +new Date() 78 | this.fps = new avgFPS(50) 79 | this.last = +new Date() 80 | this.nals = 0 81 | this.frames = 0 82 | this.total = 0 83 | this.secondTotal = 0 84 | this.playerWidth = 0 85 | this.playerHeight = 0 86 | this.statsElement = document.createElement('div') 87 | document.getElementById(playerId).appendChild(this.statsElement) 88 | window.player.onPictureDecoded = function(buffer, width, height, infos) { 89 | window.debugger.frame(width, height) 90 | } 91 | this.nal = function(bytes) { 92 | this.nals++ 93 | this.total += bytes 94 | this.secondTotal += bytes 95 | } 96 | this.frame = function(w, h) { 97 | this.playerWidth = w 98 | this.playerHeight = h 99 | this.frames++ 100 | var now = +new Date(), delta = now - window.debugger.last 101 | this.fps.tick(delta) 102 | this.last = now 103 | } 104 | setInterval(function() { 105 | var mib = (window.debugger.total/1048576).toFixed(2) 106 | var date = new Date(null) 107 | date.setSeconds((+new Date()-window.debugger.started)/1000) 108 | var dur = date.toISOString().substr(11, 8) 109 | window.debugger.statsElement.innerHTML = window.debugger.playerWidth+'x'+window.debugger.playerHeight+', '+Math.floor(1/window.debugger.fps.avg()*1000)+' fps, '+(window.debugger.secondTotal/1024).toFixed(2)+' KiB/s, total '+mib+' MiB, '+window.debugger.nals+' NAL units, '+window.debugger.frames+' frames in '+dur 110 | window.debugger.secondTotal = 0 111 | }, 1000) 112 | } 113 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | visio 7 | 8 | 9 |

visio

10 |
11 | 12 | 13 | 14 | 15 | Fork me on GitHub 16 | 17 | 18 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const net = require('net') 4 | const dgram = require('dgram') 5 | const WSServer = require('uws').Server 6 | const Split = require('stream-split') 7 | const NALSeparator = new Buffer([0, 0, 0, 1]) 8 | const express = require('express') 9 | const systemd = require('systemd') 10 | const app = express() 11 | 12 | var wsServer, conf = require('nconf'), 13 | headers = [] 14 | conf.argv().defaults({ 15 | tcpport: 8000, 16 | udpport: 8000, 17 | wsport: 8081, 18 | queryport: false, 19 | limit: 150 20 | }) 21 | 22 | if (conf.get('queryport')) { 23 | app.get('/', (req, res) => { 24 | var count = 0 25 | wsServer.clients.forEach((ws) => { 26 | if (ws.readyState == 1) { 27 | count++ 28 | } 29 | }) 30 | res.set('Content-type', 'text/plain') 31 | res.send(count.toString()) 32 | }) 33 | app.listen(conf.get('queryport')) 34 | } 35 | 36 | function broadcast(data) { 37 | wsServer.clients.forEach((ws) => { 38 | if (ws.readyState === 1) { 39 | ws.send(data, { binary: true }) 40 | } 41 | }) 42 | } 43 | 44 | if (conf.get('tcpport')) { 45 | const tcpServer = net.createServer((socket) => { 46 | console.log('streamer connected') 47 | socket.on('end', () => { 48 | console.log('streamer disconnected') 49 | }) 50 | headers = [] 51 | const NALSplitter = new Split(NALSeparator) 52 | NALSplitter.on('data', (data) => { 53 | if (wsServer && wsServer.clients.length > 0) { 54 | if (headers.length < 3) headers.push(data) 55 | broadcast(data) 56 | } 57 | }).on('error', (e) => { 58 | console.log('splitter error ' + e) 59 | process.exit(0) 60 | }) 61 | socket.pipe(NALSplitter) 62 | }) 63 | tcpServer.listen(conf.get('tcpport')) 64 | if (conf.get('tcpport') == 'systemd') { 65 | console.log('TCP server listening on systemd socket') 66 | } else { 67 | var address = tcpServer.address() 68 | if (address) console.log( 69 | `TCP server listening on ${address.address}:${address.port}`) 70 | } 71 | } 72 | 73 | if (conf.get('udpport')) { 74 | const udpServer = dgram.createSocket('udp4') 75 | udpServer.on('listening', () => { 76 | var address = udpServer.address() 77 | console.log( 78 | `UDP server listening on ${address.address}:${address.port}`) 79 | }) 80 | const NALSplitter = new Split(NALSeparator) 81 | NALSplitter.on('data', (data) => { 82 | if (wsServer && wsServer.clients.length > 0) { 83 | broadcast(data) 84 | } 85 | }).on('error', (e) => { 86 | console.log('splitter error ' + e) 87 | process.exit(0) 88 | }) 89 | udpServer.on('message', (msg, rinfo) => { 90 | NALSplitter.write(msg) 91 | }) 92 | udpServer.bind(conf.get('udpport')) 93 | } 94 | 95 | if (conf.get('wsport')) { 96 | wsServer = new WSServer({ port: conf.get('wsport') }) 97 | console.log( 98 | `WS server listening on`, conf.get('wsport') 99 | ) 100 | wsServer.on('connection', (ws) => { 101 | if (wsServer.clients.length >= conf.get('limit')) { 102 | console.log('client rejected, limit reached') 103 | ws.close() 104 | return 105 | } 106 | console.log('client connected, watching ' + wsServer.clients.length) 107 | for (let i in headers) { 108 | ws.send(headers[i]) 109 | } 110 | ws.on('close', (ws, id) => { 111 | console.log('client disconnected, watching ' + wsServer.clients.length) 112 | }) 113 | }) 114 | } 115 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "visio", 3 | "version": "1.0.0", 4 | "description": "Barebones live streaming h264 from raspivid to browser with node and broadway", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "Dregu", 10 | "license": "MIT", 11 | "repository": "Dregu/visio", 12 | "dependencies": { 13 | "express": "^4.15.2", 14 | "nconf": "^0.8.4", 15 | "stream-split": "^1.1.0", 16 | "systemd": "^0.2.6", 17 | "uws": "^0.14.5" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /raspi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # UDP mode, with extra options for my halogen lighting. Don't worry about it... 4 | #raspivid -w 848 -h 480 -t 0 -fps 25 -ih -b 700000 -pf baseline -mm average -ISO 800 -awb off -awbg 1.0,2.5 -ex fixedfps -ev 0 -co 50 -br 65 -o - |\ 5 | #socat - udp-sendto:localhost:8000,shut-none 6 | 7 | # TCP mode, minimal setup 8 | #raspivid -w 848 -h 480 -t 0 -fps 25 -ih -b 700000 -pf baseline -o - | nc localhost 8000 9 | 10 | # GStreamer kinda works, but the OMX encoder doesn't use inline headers. 11 | # Our server however saves the SPS/PPS headers in TCP mode and sends them to every new client. 12 | #modprobe bcm2835-v4l2 13 | #gst-launch-1.0 v4l2src ! video/x-raw,width=848,height=480,framerate=25/1 ! omxh264enc control-rate=2 target-bitrate=700000 ! video/x-h264,width=848,height=480,framerate=25/1,stream-format=byte-stream,profile=baseline ! tcpclientsink host=localhost port=8000 14 | 15 | # Test stream for swarm mode 16 | # You can duplicate the actual raspivid stream on the server with socat and tee 17 | 18 | #gst-launch-1.0 videotestsrc ! video/x-raw,width=848,height=480,framerate=25/1 ! x264enc bitrate=700 ! video/x-h264,width=848,height=480,framerate=25/1,stream-format=byte-stream,profile=baseline ! tee name=t\ 19 | # t. ! queue ! tcpclientsink host=172.18.0.3 port=8000 \ 20 | # t. ! queue ! tcpclientsink host=172.18.0.4 port=8000 \ 21 | # t. ! queue ! tcpclientsink host=172.18.0.5 port=8000 \ 22 | # t. ! queue ! tcpclientsink host=172.18.0.6 port=8000 23 | 24 | # TODO socat and tee example 25 | -------------------------------------------------------------------------------- /visio.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Visio webcam 3 | After=network.target 4 | 5 | [Service] 6 | Type=simple 7 | ExecStart=/root/visio/raspi.sh 8 | Restart=always 9 | RestartSec=3 10 | 11 | [Install] 12 | WantedBy=multi-user.target 13 | --------------------------------------------------------------------------------