├── LICENSE
├── README.md
├── stream_ffmpeg_hls_dash
├── dash.js
├── hls.js
├── index.html
├── run.sh
└── server.py
├── stream_picamera_h264
├── Decoder.js
├── PiCamera_H264_Server.py
├── Player.js
├── YUVCanvas.js
├── avc.wasm
└── index.html
└── stream_picamera_mjpeg
├── Picamera_MJPG_Server.py
├── index.html
└── run.sh
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 vuquangtrong
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pi_streaming
2 | Stream live camera on Raspberry Pi using HLS, MPEG-DASH, MJPEG (MJPG), and H264.
3 |
4 | ## HLS/ DASH
5 | This streaming method can stream H264 video chunks with some advantages from adaptive bitrate but it has delay of more than 3 seconds.
6 |
7 | Read more at: https://www.codeinsideout.com/blog/pi/stream-ffmpeg-hls-dash/
8 |
9 | ## MPJEG
10 | This can archive low-latency streaming for video but it consumes a lot of network bandwidth due to the size of each JPEG frame
11 |
12 | Read more at: https://www.codeinsideout.com/blog/pi/stream-picamera-mjpeg/
13 |
14 | ## H264
15 | This method streams H264 NAL units to clients so that it can keep low bandwidth and low latency.
16 |
17 | Read more at: https://www.codeinsideout.com/blog/pi/stream-picamera-h264/
18 |
19 | Find more about Raspberry Pi and embedded system at [Code Inside Out](https://www.codeinsideout.com/)
20 |
21 | # License
22 |
23 | MIT License
24 |
--------------------------------------------------------------------------------
/stream_ffmpeg_hls_dash/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | HLS/DASH Live Stream
6 |
7 |
8 | HLS Live Stream
9 |
10 |
11 |
31 | DASH Live Stream
32 |
33 |
34 |
52 |
53 |
--------------------------------------------------------------------------------
/stream_ffmpeg_hls_dash/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # vuquangtrong.github.io
3 |
4 | # create a folder in shared memory
5 | mkdir -p /dev/shm/streaming
6 |
7 | # link it to current folder
8 | if [[ -L hls && -d $(readlink hls) ]]; then
9 | echo ""
10 | else
11 | ln -s /dev/shm/streaming streaming
12 | fi
13 |
14 | # create video segments for HLS and DASH
15 | ffmpeg -y \
16 | -input_format h264 \
17 | -f video4linux2 \
18 | -framerate 25 \
19 | -use_wallclock_as_timestamps 1 \
20 | -i /dev/video0 \
21 | -c:v copy \
22 | -f dash \
23 | -ldash 1 \
24 | -seg_duration 1 \
25 | -frag_duration 1 \
26 | -streaming 1 \
27 | -window_size 30 -remove_at_exit 1 \
28 | -strict experimental -lhls 1 \
29 | -hls_playlist 1 -hls_master_name live.m3u8 \
30 | -utc_timing_url https://time.akamai.com/?iso \
31 | -write_prft 1 \
32 | -target_latency 1 \
33 | /dev/shm/streaming/live.mpd &
34 |
35 | python3 server.py
36 |
--------------------------------------------------------------------------------
/stream_ffmpeg_hls_dash/server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import http.server
4 |
5 | def main():
6 | server_address = ('', 8000)
7 | hls_handler = http.server.SimpleHTTPRequestHandler
8 |
9 | httpd = http.server.ThreadingHTTPServer(server_address, hls_handler)
10 | httpd.serve_forever()
11 |
12 | if __name__ == "__main__":
13 | main()
--------------------------------------------------------------------------------
/stream_picamera_h264/Decoder.js:
--------------------------------------------------------------------------------
1 | // universal module definition
2 | (function (root, factory) {
3 | if (typeof define === 'function' && define.amd) {
4 | // AMD. Register as an anonymous module.
5 | define([], factory);
6 | } else if (typeof exports === 'object') {
7 | // Node. Does not work with strict CommonJS, but
8 | // only CommonJS-like environments that support module.exports,
9 | // like Node.
10 | module.exports = factory();
11 | } else {
12 | // Browser globals (root is window)
13 | root.Decoder = factory();
14 | }
15 | }(this, function () {
16 |
17 | var global;
18 |
19 | function initglobal(){
20 | global = this;
21 | if (!global){
22 | if (typeof window != "undefined"){
23 | global = window;
24 | }else if (typeof self != "undefined"){
25 | global = self;
26 | };
27 | };
28 | };
29 | initglobal();
30 |
31 |
32 | function error(message) {
33 | console.error(message);
34 | console.trace();
35 | };
36 |
37 |
38 | function assert(condition, message) {
39 | if (!condition) {
40 | error(message);
41 | };
42 | };
43 |
44 |
45 |
46 |
47 | var getModule = function(par_broadwayOnHeadersDecoded, par_broadwayOnPictureDecoded){
48 |
49 |
50 | /*var ModuleX = {
51 | 'print': function(text) { console.log('stdout: ' + text); },
52 | 'printErr': function(text) { console.log('stderr: ' + text); }
53 | };*/
54 |
55 |
56 | /*
57 |
58 | The reason why this is all packed into one file is that this file can also function as worker.
59 | you can integrate the file into your build system and provide the original file to be loaded into a worker.
60 |
61 | */
62 |
63 | //var Module = (function(){
64 |
65 |
66 | var Module=typeof Module!=="undefined"?Module:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}Module["arguments"]=[];Module["thisProgram"]="./this.program";Module["quit"]=(function(status,toThrow){throw toThrow});Module["preRun"]=[];Module["postRun"]=[];var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;if(Module["ENVIRONMENT"]){if(Module["ENVIRONMENT"]==="WEB"){ENVIRONMENT_IS_WEB=true}else if(Module["ENVIRONMENT"]==="WORKER"){ENVIRONMENT_IS_WORKER=true}else if(Module["ENVIRONMENT"]==="NODE"){ENVIRONMENT_IS_NODE=true}else if(Module["ENVIRONMENT"]==="SHELL"){ENVIRONMENT_IS_SHELL=true}else{throw new Error("Module['ENVIRONMENT'] value is not valid. must be one of: WEB|WORKER|NODE|SHELL.")}}else{ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof null==="function"&&!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_WORKER;ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER}if(ENVIRONMENT_IS_NODE){var nodeFS;var nodePath;Module["read"]=function shell_read(filename,binary){var ret;if(!nodeFS)nodeFS=(null)("fs");if(!nodePath)nodePath=(null)("path");filename=nodePath["normalize"](filename);ret=nodeFS["readFileSync"](filename);return binary?ret:ret.toString()};Module["readBinary"]=function readBinary(filename){var ret=Module["read"](filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){Module["thisProgram"]=process["argv"][1].replace(/\\/g,"/")}Module["arguments"]=process["argv"].slice(2);if(typeof module!=="undefined"){module["exports"]=Module}process["on"]("uncaughtException",(function(ex){if(!(ex instanceof ExitStatus)){throw ex}}));process["on"]("unhandledRejection",(function(reason,p){process["exit"](1)}));Module["inspect"]=(function(){return"[Emscripten Module object]"})}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){Module["read"]=function shell_read(f){return read(f)}}Module["readBinary"]=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){Module["arguments"]=scriptArgs}else if(typeof arguments!="undefined"){Module["arguments"]=arguments}if(typeof quit==="function"){Module["quit"]=(function(status,toThrow){quit(status)})}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){Module["read"]=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){Module["readBinary"]=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}Module["readAsync"]=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)};Module["setWindowTitle"]=(function(title){document.title=title})}else{throw new Error("not compiled for this environment")}Module["print"]=typeof console!=="undefined"?console.log.bind(console):typeof print!=="undefined"?print:null;Module["printErr"]=typeof printErr!=="undefined"?printErr:typeof console!=="undefined"&&console.warn.bind(console)||Module["print"];Module.print=Module["print"];Module.printErr=Module["printErr"];for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=undefined;var STACK_ALIGN=16;function staticAlloc(size){assert(!staticSealed);var ret=STATICTOP;STATICTOP=STATICTOP+size+15&-16;return ret}function alignMemory(size,factor){if(!factor)factor=STACK_ALIGN;var ret=size=Math.ceil(size/factor)*factor;return ret}var asm2wasmImports={"f64-rem":(function(x,y){return x%y}),"debugger":(function(){debugger})};var functionPointers=new Array(0);var GLOBAL_BASE=1024;var ABORT=0;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}function Pointer_stringify(ptr,length){if(length===0||!ptr)return"";var hasUtf=0;var t;var i=0;while(1){t=HEAPU8[ptr+i>>0];hasUtf|=t;if(t==0&&!length)break;i++;if(length&&i==length)break}if(!length)length=i;var ret="";if(hasUtf<128){var MAX_CHUNK=1024;var curr;while(length>0){curr=String.fromCharCode.apply(String,HEAPU8.subarray(ptr,ptr+Math.min(length,MAX_CHUNK)));ret=ret?ret+curr:curr;ptr+=MAX_CHUNK;length-=MAX_CHUNK}return ret}return UTF8ToString(ptr)}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(u8Array,idx){var endPtr=idx;while(u8Array[endPtr])++endPtr;if(endPtr-idx>16&&u8Array.subarray&&UTF8Decoder){return UTF8Decoder.decode(u8Array.subarray(idx,endPtr))}else{var u0,u1,u2,u3,u4,u5;var str="";while(1){u0=u8Array[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}u1=u8Array[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}u2=u8Array[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u3=u8Array[idx++]&63;if((u0&248)==240){u0=(u0&7)<<18|u1<<12|u2<<6|u3}else{u4=u8Array[idx++]&63;if((u0&252)==248){u0=(u0&3)<<24|u1<<18|u2<<12|u3<<6|u4}else{u5=u8Array[idx++]&63;u0=(u0&1)<<30|u1<<24|u2<<18|u3<<12|u4<<6|u5}}}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}}}function UTF8ToString(ptr){return UTF8ArrayToString(HEAPU8,ptr)}var UTF16Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf-16le"):undefined;var WASM_PAGE_SIZE=65536;var ASMJS_PAGE_SIZE=16777216;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBuffer(buf){Module["buffer"]=buffer=buf}function updateGlobalBufferViews(){Module["HEAP8"]=HEAP8=new Int8Array(buffer);Module["HEAP16"]=HEAP16=new Int16Array(buffer);Module["HEAP32"]=HEAP32=new Int32Array(buffer);Module["HEAPU8"]=HEAPU8=new Uint8Array(buffer);Module["HEAPU16"]=HEAPU16=new Uint16Array(buffer);Module["HEAPU32"]=HEAPU32=new Uint32Array(buffer);Module["HEAPF32"]=HEAPF32=new Float32Array(buffer);Module["HEAPF64"]=HEAPF64=new Float64Array(buffer)}var STATIC_BASE,STATICTOP,staticSealed;var STACK_BASE,STACKTOP,STACK_MAX;var DYNAMIC_BASE,DYNAMICTOP_PTR;STATIC_BASE=STATICTOP=STACK_BASE=STACKTOP=STACK_MAX=DYNAMIC_BASE=DYNAMICTOP_PTR=0;staticSealed=false;function abortOnCannotGrowMemory(){abort("Cannot enlarge memory arrays. Either (1) compile with -s TOTAL_MEMORY=X with X higher than the current value "+TOTAL_MEMORY+", (2) compile with -s ALLOW_MEMORY_GROWTH=1 which allows increasing the size at runtime, or (3) if you want malloc to return NULL (0) instead of this abort, compile with -s ABORTING_MALLOC=0 ")}function enlargeMemory(){abortOnCannotGrowMemory()}var TOTAL_STACK=Module["TOTAL_STACK"]||5242880;var TOTAL_MEMORY=Module["TOTAL_MEMORY"]||52428800;if(TOTAL_MEMORY0){var callback=callbacks.shift();if(typeof callback=="function"){callback();continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){Module["dynCall_v"](func)}else{Module["dynCall_vi"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function ensureInitRuntime(){if(runtimeInitialized)return;runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){callRuntimeCallbacks(__ATEXIT__);runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_abs=Math.abs;var Math_cos=Math.cos;var Math_sin=Math.sin;var Math_tan=Math.tan;var Math_acos=Math.acos;var Math_asin=Math.asin;var Math_atan=Math.atan;var Math_atan2=Math.atan2;var Math_exp=Math.exp;var Math_log=Math.log;var Math_sqrt=Math.sqrt;var Math_ceil=Math.ceil;var Math_floor=Math.floor;var Math_pow=Math.pow;var Math_imul=Math.imul;var Math_fround=Math.fround;var Math_round=Math.round;var Math_min=Math.min;var Math_max=Math.max;var Math_clz32=Math.clz32;var Math_trunc=Math.trunc;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return String.prototype.startsWith?filename.startsWith(dataURIPrefix):filename.indexOf(dataURIPrefix)===0}function integrateWasmJS(){var wasmTextFile="avc.wast";var wasmBinaryFile="avc.wasm";var asmjsCodeFile="avc.temp.asm.js";if(typeof Module["locateFile"]==="function"){if(!isDataURI(wasmTextFile)){wasmTextFile=Module["locateFile"](wasmTextFile)}if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=Module["locateFile"](wasmBinaryFile)}if(!isDataURI(asmjsCodeFile)){asmjsCodeFile=Module["locateFile"](asmjsCodeFile)}}var wasmPageSize=64*1024;var info={"global":null,"env":null,"asm2wasm":asm2wasmImports,"parent":Module};var exports=null;function mergeMemory(newBuffer){var oldBuffer=Module["buffer"];if(newBuffer.byteLength>2];return ret}),getStr:(function(){var ret=Pointer_stringify(SYSCALLS.get());return ret}),get64:(function(){var low=SYSCALLS.get(),high=SYSCALLS.get();if(low>=0)assert(high===0);else assert(high===-1);return low}),getZero:(function(){assert(SYSCALLS.get()===0)})};function ___syscall140(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.getStreamFromFD(),offset_high=SYSCALLS.get(),offset_low=SYSCALLS.get(),result=SYSCALLS.get(),whence=SYSCALLS.get();var offset=offset_low;FS.llseek(stream,offset,whence);HEAP32[result>>2]=stream.position;if(stream.getdents&&offset===0&&whence===0)stream.getdents=null;return 0}catch(e){if(typeof FS==="undefined"||!(e instanceof FS.ErrnoError))abort(e);return-e.errno}}function ___syscall146(which,varargs){SYSCALLS.varargs=varargs;try{var stream=SYSCALLS.get(),iov=SYSCALLS.get(),iovcnt=SYSCALLS.get();var ret=0;if(!___syscall146.buffers){___syscall146.buffers=[null,[],[]];___syscall146.printChar=(function(stream,curr){var buffer=___syscall146.buffers[stream];assert(buffer);if(curr===0||curr===10){(stream===1?Module["print"]:Module["printErr"])(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}})}for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=value;return value}DYNAMICTOP_PTR=staticAlloc(4);STACK_BASE=STACKTOP=alignMemory(STATICTOP);STACK_MAX=STACK_BASE+TOTAL_STACK;DYNAMIC_BASE=alignMemory(STACK_MAX);HEAP32[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE;staticSealed=true;Module["wasmTableSize"]=10;Module["wasmMaxTableSize"]=10;Module.asmGlobalArg={};Module.asmLibraryArg={"abort":abort,"enlargeMemory":enlargeMemory,"getTotalMemory":getTotalMemory,"abortOnCannotGrowMemory":abortOnCannotGrowMemory,"___setErrNo":___setErrNo,"___syscall140":___syscall140,"___syscall146":___syscall146,"___syscall54":___syscall54,"___syscall6":___syscall6,"_broadwayOnHeadersDecoded":_broadwayOnHeadersDecoded,"_broadwayOnPictureDecoded":_broadwayOnPictureDecoded,"_emscripten_memcpy_big":_emscripten_memcpy_big,"DYNAMICTOP_PTR":DYNAMICTOP_PTR,"STACKTOP":STACKTOP};var asm=Module["asm"](Module.asmGlobalArg,Module.asmLibraryArg,buffer);Module["asm"]=asm;var _broadwayCreateStream=Module["_broadwayCreateStream"]=(function(){return Module["asm"]["_broadwayCreateStream"].apply(null,arguments)});var _broadwayExit=Module["_broadwayExit"]=(function(){return Module["asm"]["_broadwayExit"].apply(null,arguments)});var _broadwayGetMajorVersion=Module["_broadwayGetMajorVersion"]=(function(){return Module["asm"]["_broadwayGetMajorVersion"].apply(null,arguments)});var _broadwayGetMinorVersion=Module["_broadwayGetMinorVersion"]=(function(){return Module["asm"]["_broadwayGetMinorVersion"].apply(null,arguments)});var _broadwayInit=Module["_broadwayInit"]=(function(){return Module["asm"]["_broadwayInit"].apply(null,arguments)});var _broadwayPlayStream=Module["_broadwayPlayStream"]=(function(){return Module["asm"]["_broadwayPlayStream"].apply(null,arguments)});Module["asm"]=asm;function ExitStatus(status){this.name="ExitStatus";this.message="Program terminated with exit("+status+")";this.status=status}ExitStatus.prototype=new Error;ExitStatus.prototype.constructor=ExitStatus;var initialStackTop;dependenciesFulfilled=function runCaller(){if(!Module["calledRun"])run();if(!Module["calledRun"])dependenciesFulfilled=runCaller};function run(args){args=args||Module["arguments"];if(runDependencies>0){return}preRun();if(runDependencies>0)return;if(Module["calledRun"])return;function doRun(){if(Module["calledRun"])return;Module["calledRun"]=true;if(ABORT)return;ensureInitRuntime();preMain();if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout((function(){setTimeout((function(){Module["setStatus"]("")}),1);doRun()}),1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){if(implicit&&Module["noExitRuntime"]&&status===0){return}if(Module["noExitRuntime"]){}else{ABORT=true;EXITSTATUS=status;STACKTOP=initialStackTop;exitRuntime();if(Module["onExit"])Module["onExit"](status)}if(ENVIRONMENT_IS_NODE){process["exit"](status)}Module["quit"](status,new ExitStatus(status))}Module["exit"]=exit;function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}if(what!==undefined){Module.print(what);Module.printErr(what);what=JSON.stringify(what)}else{what=""}ABORT=true;EXITSTATUS=1;throw"abort("+what+"). Build with -s ASSERTIONS=1 for more info."}Module["abort"]=abort;if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}Module["noExitRuntime"]=true;run()
67 |
68 |
69 |
70 | // return Module;
71 | //})();
72 |
73 | var resultModule;
74 | if (typeof global !== "undefined"){
75 | if (global.Module){
76 | resultModule = global.Module;
77 | };
78 | };
79 | if (typeof Module != "undefined"){
80 | resultModule = Module;
81 | };
82 |
83 | resultModule._broadwayOnHeadersDecoded = par_broadwayOnHeadersDecoded;
84 | resultModule._broadwayOnPictureDecoded = par_broadwayOnPictureDecoded;
85 |
86 | var moduleIsReady = false;
87 | var cbFun;
88 | var moduleReady = function(){
89 | moduleIsReady = true;
90 | if (cbFun){
91 | cbFun(resultModule);
92 | }
93 | };
94 |
95 | resultModule.onRuntimeInitialized = function(){
96 | moduleReady(resultModule);
97 | };
98 | return function(callback){
99 | if (moduleIsReady){
100 | callback(resultModule);
101 | }else{
102 | cbFun = callback;
103 | };
104 | };
105 | };
106 |
107 | return (function(){
108 | "use strict";
109 |
110 |
111 | var nowValue = function(){
112 | return (new Date()).getTime();
113 | };
114 |
115 | if (typeof performance != "undefined"){
116 | if (performance.now){
117 | nowValue = function(){
118 | return performance.now();
119 | };
120 | };
121 | };
122 |
123 |
124 | var Decoder = function(parOptions){
125 | this.options = parOptions || {};
126 |
127 | this.now = nowValue;
128 |
129 | var asmInstance;
130 |
131 | var fakeWindow = {
132 | };
133 |
134 | var toU8Array;
135 | var toU32Array;
136 |
137 | var onPicFun = function ($buffer, width, height) {
138 | var buffer = this.pictureBuffers[$buffer];
139 | if (!buffer) {
140 | buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2);
141 | };
142 |
143 | var infos;
144 | var doInfo = false;
145 | if (this.infoAr.length){
146 | doInfo = true;
147 | infos = this.infoAr;
148 | };
149 | this.infoAr = [];
150 |
151 | if (this.options.rgb){
152 | if (!asmInstance){
153 | asmInstance = getAsm(width, height);
154 | };
155 | asmInstance.inp.set(buffer);
156 | asmInstance.doit();
157 |
158 | var copyU8 = new Uint8Array(asmInstance.outSize);
159 | copyU8.set( asmInstance.out );
160 |
161 | if (doInfo){
162 | infos[0].finishDecoding = nowValue();
163 | };
164 |
165 | this.onPictureDecoded(copyU8, width, height, infos);
166 | return;
167 |
168 | };
169 |
170 | if (doInfo){
171 | infos[0].finishDecoding = nowValue();
172 | };
173 | this.onPictureDecoded(buffer, width, height, infos);
174 | }.bind(this);
175 |
176 | var ignore = false;
177 |
178 | if (this.options.sliceMode){
179 | onPicFun = function ($buffer, width, height, $sliceInfo) {
180 | if (ignore){
181 | return;
182 | };
183 | var buffer = this.pictureBuffers[$buffer];
184 | if (!buffer) {
185 | buffer = this.pictureBuffers[$buffer] = toU8Array($buffer, (width * height * 3) / 2);
186 | };
187 | var sliceInfo = this.pictureBuffers[$sliceInfo];
188 | if (!sliceInfo) {
189 | sliceInfo = this.pictureBuffers[$sliceInfo] = toU32Array($sliceInfo, 18);
190 | };
191 |
192 | var infos;
193 | var doInfo = false;
194 | if (this.infoAr.length){
195 | doInfo = true;
196 | infos = this.infoAr;
197 | };
198 | this.infoAr = [];
199 |
200 | /*if (this.options.rgb){
201 |
202 | no rgb in slice mode
203 |
204 | };*/
205 |
206 | infos[0].finishDecoding = nowValue();
207 | var sliceInfoAr = [];
208 | for (var i = 0; i < 20; ++i){
209 | sliceInfoAr.push(sliceInfo[i]);
210 | };
211 | infos[0].sliceInfoAr = sliceInfoAr;
212 |
213 | this.onPictureDecoded(buffer, width, height, infos);
214 | }.bind(this);
215 | };
216 |
217 | var ModuleCallback = getModule.apply(fakeWindow, [function () {
218 | }, onPicFun]);
219 |
220 |
221 | var MAX_STREAM_BUFFER_LENGTH = 1024 * 1024;
222 |
223 | var instance = this;
224 | this.onPictureDecoded = function (buffer, width, height, infos) {
225 |
226 | };
227 |
228 | this.onDecoderReady = function(){};
229 |
230 | var bufferedCalls = [];
231 | this.decode = function decode(typedAr, parInfo, copyDoneFun) {
232 | bufferedCalls.push([typedAr, parInfo, copyDoneFun]);
233 | };
234 |
235 | ModuleCallback(function(Module){
236 | var HEAP8 = Module.HEAP8;
237 | var HEAPU8 = Module.HEAPU8;
238 | var HEAP16 = Module.HEAP16;
239 | var HEAP32 = Module.HEAP32;
240 | // from old constructor
241 | Module._broadwayInit();
242 |
243 | /**
244 | * Creates a typed array from a HEAP8 pointer.
245 | */
246 | toU8Array = function(ptr, length) {
247 | return HEAPU8.subarray(ptr, ptr + length);
248 | };
249 | toU32Array = function(ptr, length) {
250 | //var tmp = HEAPU8.subarray(ptr, ptr + (length * 4));
251 | return new Uint32Array(HEAPU8.buffer, ptr, length);
252 | };
253 | instance.streamBuffer = toU8Array(Module._broadwayCreateStream(MAX_STREAM_BUFFER_LENGTH), MAX_STREAM_BUFFER_LENGTH);
254 | instance.pictureBuffers = {};
255 | // collect extra infos that are provided with the nal units
256 | instance.infoAr = [];
257 |
258 | /**
259 | * Decodes a stream buffer. This may be one single (unframed) NAL unit without the
260 | * start code, or a sequence of NAL units with framing start code prefixes. This
261 | * function overwrites stream buffer allocated by the codec with the supplied buffer.
262 | */
263 |
264 | var sliceNum = 0;
265 | if (instance.options.sliceMode){
266 | sliceNum = instance.options.sliceNum;
267 |
268 | instance.decode = function decode(typedAr, parInfo, copyDoneFun) {
269 | instance.infoAr.push(parInfo);
270 | parInfo.startDecoding = nowValue();
271 | var nals = parInfo.nals;
272 | var i;
273 | if (!nals){
274 | nals = [];
275 | parInfo.nals = nals;
276 | var l = typedAr.length;
277 | var foundSomething = false;
278 | var lastFound = 0;
279 | var lastStart = 0;
280 | for (i = 0; i < l; ++i){
281 | if (typedAr[i] === 1){
282 | if (
283 | typedAr[i - 1] === 0 &&
284 | typedAr[i - 2] === 0
285 | ){
286 | var startPos = i - 2;
287 | if (typedAr[i - 3] === 0){
288 | startPos = i - 3;
289 | };
290 | // its a nal;
291 | if (foundSomething){
292 | nals.push({
293 | offset: lastFound,
294 | end: startPos,
295 | type: typedAr[lastStart] & 31
296 | });
297 | };
298 | lastFound = startPos;
299 | lastStart = startPos + 3;
300 | if (typedAr[i - 3] === 0){
301 | lastStart = startPos + 4;
302 | };
303 | foundSomething = true;
304 | };
305 | };
306 | };
307 | if (foundSomething){
308 | nals.push({
309 | offset: lastFound,
310 | end: i,
311 | type: typedAr[lastStart] & 31
312 | });
313 | };
314 | };
315 |
316 | var currentSlice = 0;
317 | var playAr;
318 | var offset = 0;
319 | for (i = 0; i < nals.length; ++i){
320 | if (nals[i].type === 1 || nals[i].type === 5){
321 | if (currentSlice === sliceNum){
322 | playAr = typedAr.subarray(nals[i].offset, nals[i].end);
323 | instance.streamBuffer[offset] = 0;
324 | offset += 1;
325 | instance.streamBuffer.set(playAr, offset);
326 | offset += playAr.length;
327 | };
328 | currentSlice += 1;
329 | }else{
330 | playAr = typedAr.subarray(nals[i].offset, nals[i].end);
331 | instance.streamBuffer[offset] = 0;
332 | offset += 1;
333 | instance.streamBuffer.set(playAr, offset);
334 | offset += playAr.length;
335 | Module._broadwayPlayStream(offset);
336 | offset = 0;
337 | };
338 | };
339 | copyDoneFun();
340 | Module._broadwayPlayStream(offset);
341 | };
342 |
343 | }else{
344 | instance.decode = function decode(typedAr, parInfo) {
345 | // console.info("Decoding: " + buffer.length);
346 | // collect infos
347 | if (parInfo){
348 | instance.infoAr.push(parInfo);
349 | parInfo.startDecoding = nowValue();
350 | };
351 |
352 | instance.streamBuffer.set(typedAr);
353 | Module._broadwayPlayStream(typedAr.length);
354 | };
355 | };
356 |
357 | if (bufferedCalls.length){
358 | var bi = 0;
359 | for (bi = 0; bi < bufferedCalls.length; ++bi){
360 | instance.decode(bufferedCalls[bi][0], bufferedCalls[bi][1], bufferedCalls[bi][2]);
361 | };
362 | bufferedCalls = [];
363 | };
364 |
365 | instance.onDecoderReady(instance);
366 |
367 | });
368 |
369 |
370 | };
371 |
372 |
373 | Decoder.prototype = {
374 |
375 | };
376 |
377 |
378 |
379 |
380 | /*
381 |
382 | asm.js implementation of a yuv to rgb convertor
383 | provided by @soliton4
384 |
385 | based on
386 | http://www.wordsaretoys.com/2013/10/18/making-yuv-conversion-a-little-faster/
387 |
388 | */
389 |
390 |
391 | // factory to create asm.js yuv -> rgb convertor for a given resolution
392 | var asmInstances = {};
393 | var getAsm = function(parWidth, parHeight){
394 | var idStr = "" + parWidth + "x" + parHeight;
395 | if (asmInstances[idStr]){
396 | return asmInstances[idStr];
397 | };
398 |
399 | var lumaSize = parWidth * parHeight;
400 | var chromaSize = (lumaSize|0) >> 2;
401 |
402 | var inpSize = lumaSize + chromaSize + chromaSize;
403 | var outSize = parWidth * parHeight * 4;
404 | var cacheSize = Math.pow(2, 24) * 4;
405 | var size = inpSize + outSize + cacheSize;
406 |
407 | var chunkSize = Math.pow(2, 24);
408 | var heapSize = chunkSize;
409 | while (heapSize < size){
410 | heapSize += chunkSize;
411 | };
412 | var heap = new ArrayBuffer(heapSize);
413 |
414 | var res = asmFactory(global, {}, heap);
415 | res.init(parWidth, parHeight);
416 | asmInstances[idStr] = res;
417 |
418 | res.heap = heap;
419 | res.out = new Uint8Array(heap, 0, outSize);
420 | res.inp = new Uint8Array(heap, outSize, inpSize);
421 | res.outSize = outSize;
422 |
423 | return res;
424 | };
425 |
426 |
427 | function asmFactory(stdlib, foreign, heap) {
428 | "use asm";
429 |
430 | var imul = stdlib.Math.imul;
431 | var min = stdlib.Math.min;
432 | var max = stdlib.Math.max;
433 | var pow = stdlib.Math.pow;
434 | var out = new stdlib.Uint8Array(heap);
435 | var out32 = new stdlib.Uint32Array(heap);
436 | var inp = new stdlib.Uint8Array(heap);
437 | var mem = new stdlib.Uint8Array(heap);
438 | var mem32 = new stdlib.Uint32Array(heap);
439 |
440 | // for double algo
441 | /*var vt = 1.370705;
442 | var gt = 0.698001;
443 | var gt2 = 0.337633;
444 | var bt = 1.732446;*/
445 |
446 | var width = 0;
447 | var height = 0;
448 | var lumaSize = 0;
449 | var chromaSize = 0;
450 | var inpSize = 0;
451 | var outSize = 0;
452 |
453 | var inpStart = 0;
454 | var outStart = 0;
455 |
456 | var widthFour = 0;
457 |
458 | var cacheStart = 0;
459 |
460 |
461 | function init(parWidth, parHeight){
462 | parWidth = parWidth|0;
463 | parHeight = parHeight|0;
464 |
465 | var i = 0;
466 | var s = 0;
467 |
468 | width = parWidth;
469 | widthFour = imul(parWidth, 4)|0;
470 | height = parHeight;
471 | lumaSize = imul(width|0, height|0)|0;
472 | chromaSize = (lumaSize|0) >> 2;
473 | outSize = imul(imul(width, height)|0, 4)|0;
474 | inpSize = ((lumaSize + chromaSize)|0 + chromaSize)|0;
475 |
476 | outStart = 0;
477 | inpStart = (outStart + outSize)|0;
478 | cacheStart = (inpStart + inpSize)|0;
479 |
480 | // initializing memory (to be on the safe side)
481 | s = ~~(+pow(+2, +24));
482 | s = imul(s, 4)|0;
483 |
484 | for (i = 0|0; ((i|0) < (s|0))|0; i = (i + 4)|0){
485 | mem32[((cacheStart + i)|0) >> 2] = 0;
486 | };
487 | };
488 |
489 | function doit(){
490 | var ystart = 0;
491 | var ustart = 0;
492 | var vstart = 0;
493 |
494 | var y = 0;
495 | var yn = 0;
496 | var u = 0;
497 | var v = 0;
498 |
499 | var o = 0;
500 |
501 | var line = 0;
502 | var col = 0;
503 |
504 | var usave = 0;
505 | var vsave = 0;
506 |
507 | var ostart = 0;
508 | var cacheAdr = 0;
509 |
510 | ostart = outStart|0;
511 |
512 | ystart = inpStart|0;
513 | ustart = (ystart + lumaSize|0)|0;
514 | vstart = (ustart + chromaSize)|0;
515 |
516 | for (line = 0; (line|0) < (height|0); line = (line + 2)|0){
517 | usave = ustart;
518 | vsave = vstart;
519 | for (col = 0; (col|0) < (width|0); col = (col + 2)|0){
520 | y = inp[ystart >> 0]|0;
521 | yn = inp[((ystart + width)|0) >> 0]|0;
522 |
523 | u = inp[ustart >> 0]|0;
524 | v = inp[vstart >> 0]|0;
525 |
526 | cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0;
527 | o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0;
528 | if (o){}else{
529 | o = yuv2rgbcalc(y,u,v)|0;
530 | mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0;
531 | };
532 | mem32[ostart >> 2] = o;
533 |
534 | cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0;
535 | o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0;
536 | if (o){}else{
537 | o = yuv2rgbcalc(yn,u,v)|0;
538 | mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0;
539 | };
540 | mem32[((ostart + widthFour)|0) >> 2] = o;
541 |
542 | //yuv2rgb5(y, u, v, ostart);
543 | //yuv2rgb5(yn, u, v, (ostart + widthFour)|0);
544 | ostart = (ostart + 4)|0;
545 |
546 | // next step only for y. u and v stay the same
547 | ystart = (ystart + 1)|0;
548 | y = inp[ystart >> 0]|0;
549 | yn = inp[((ystart + width)|0) >> 0]|0;
550 |
551 | //yuv2rgb5(y, u, v, ostart);
552 | cacheAdr = (((((y << 16)|0) + ((u << 8)|0))|0) + v)|0;
553 | o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0;
554 | if (o){}else{
555 | o = yuv2rgbcalc(y,u,v)|0;
556 | mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0;
557 | };
558 | mem32[ostart >> 2] = o;
559 |
560 | //yuv2rgb5(yn, u, v, (ostart + widthFour)|0);
561 | cacheAdr = (((((yn << 16)|0) + ((u << 8)|0))|0) + v)|0;
562 | o = mem32[((cacheStart + cacheAdr)|0) >> 2]|0;
563 | if (o){}else{
564 | o = yuv2rgbcalc(yn,u,v)|0;
565 | mem32[((cacheStart + cacheAdr)|0) >> 2] = o|0;
566 | };
567 | mem32[((ostart + widthFour)|0) >> 2] = o;
568 | ostart = (ostart + 4)|0;
569 |
570 | //all positions inc 1
571 |
572 | ystart = (ystart + 1)|0;
573 | ustart = (ustart + 1)|0;
574 | vstart = (vstart + 1)|0;
575 | };
576 | ostart = (ostart + widthFour)|0;
577 | ystart = (ystart + width)|0;
578 |
579 | };
580 |
581 | };
582 |
583 | function yuv2rgbcalc(y, u, v){
584 | y = y|0;
585 | u = u|0;
586 | v = v|0;
587 |
588 | var r = 0;
589 | var g = 0;
590 | var b = 0;
591 |
592 | var o = 0;
593 |
594 | var a0 = 0;
595 | var a1 = 0;
596 | var a2 = 0;
597 | var a3 = 0;
598 | var a4 = 0;
599 |
600 | a0 = imul(1192, (y - 16)|0)|0;
601 | a1 = imul(1634, (v - 128)|0)|0;
602 | a2 = imul(832, (v - 128)|0)|0;
603 | a3 = imul(400, (u - 128)|0)|0;
604 | a4 = imul(2066, (u - 128)|0)|0;
605 |
606 | r = (((a0 + a1)|0) >> 10)|0;
607 | g = (((((a0 - a2)|0) - a3)|0) >> 10)|0;
608 | b = (((a0 + a4)|0) >> 10)|0;
609 |
610 | if ((((r & 255)|0) != (r|0))|0){
611 | r = min(255, max(0, r|0)|0)|0;
612 | };
613 | if ((((g & 255)|0) != (g|0))|0){
614 | g = min(255, max(0, g|0)|0)|0;
615 | };
616 | if ((((b & 255)|0) != (b|0))|0){
617 | b = min(255, max(0, b|0)|0)|0;
618 | };
619 |
620 | o = 255;
621 | o = (o << 8)|0;
622 | o = (o + b)|0;
623 | o = (o << 8)|0;
624 | o = (o + g)|0;
625 | o = (o << 8)|0;
626 | o = (o + r)|0;
627 |
628 | return o|0;
629 |
630 | };
631 |
632 |
633 |
634 | return {
635 | init: init,
636 | doit: doit
637 | };
638 | };
639 |
640 |
641 | /*
642 | potential worker initialization
643 |
644 | */
645 |
646 |
647 | if (typeof self != "undefined"){
648 | var isWorker = false;
649 | var decoder;
650 | var reuseMemory = false;
651 | var sliceMode = false;
652 | var sliceNum = 0;
653 | var sliceCnt = 0;
654 | var lastSliceNum = 0;
655 | var sliceInfoAr;
656 | var lastBuf;
657 | var awaiting = 0;
658 | var pile = [];
659 | var startDecoding;
660 | var finishDecoding;
661 | var timeDecoding;
662 |
663 | var memAr = [];
664 | var getMem = function(length){
665 | if (memAr.length){
666 | var u = memAr.shift();
667 | while (u && u.byteLength !== length){
668 | u = memAr.shift();
669 | };
670 | if (u){
671 | return u;
672 | };
673 | };
674 | return new ArrayBuffer(length);
675 | };
676 |
677 | var copySlice = function(source, target, infoAr, width, height){
678 |
679 | var length = width * height;
680 | var length4 = length / 4
681 | var plane2 = length;
682 | var plane3 = length + length4;
683 |
684 | var copy16 = function(parBegin, parEnd){
685 | var i = 0;
686 | for (i = 0; i < 16; ++i){
687 | var begin = parBegin + (width * i);
688 | var end = parEnd + (width * i)
689 | target.set(source.subarray(begin, end), begin);
690 | };
691 | };
692 | var copy8 = function(parBegin, parEnd){
693 | var i = 0;
694 | for (i = 0; i < 8; ++i){
695 | var begin = parBegin + ((width / 2) * i);
696 | var end = parEnd + ((width / 2) * i)
697 | target.set(source.subarray(begin, end), begin);
698 | };
699 | };
700 | var copyChunk = function(begin, end){
701 | target.set(source.subarray(begin, end), begin);
702 | };
703 |
704 | var begin = infoAr[0];
705 | var end = infoAr[1];
706 | if (end > 0){
707 | copy16(begin, end);
708 | copy8(infoAr[2], infoAr[3]);
709 | copy8(infoAr[4], infoAr[5]);
710 | };
711 | begin = infoAr[6];
712 | end = infoAr[7];
713 | if (end > 0){
714 | copy16(begin, end);
715 | copy8(infoAr[8], infoAr[9]);
716 | copy8(infoAr[10], infoAr[11]);
717 | };
718 |
719 | begin = infoAr[12];
720 | end = infoAr[15];
721 | if (end > 0){
722 | copyChunk(begin, end);
723 | copyChunk(infoAr[13], infoAr[16]);
724 | copyChunk(infoAr[14], infoAr[17]);
725 | };
726 |
727 | };
728 |
729 | var sliceMsgFun = function(){};
730 |
731 | var setSliceCnt = function(parSliceCnt){
732 | sliceCnt = parSliceCnt;
733 | lastSliceNum = sliceCnt - 1;
734 | };
735 |
736 |
737 | self.addEventListener('message', function(e) {
738 |
739 | if (isWorker){
740 | if (reuseMemory){
741 | if (e.data.reuse){
742 | memAr.push(e.data.reuse);
743 | };
744 | };
745 | if (e.data.buf){
746 | if (sliceMode && awaiting !== 0){
747 | pile.push(e.data);
748 | }else{
749 | decoder.decode(
750 | new Uint8Array(e.data.buf, e.data.offset || 0, e.data.length),
751 | e.data.info,
752 | function(){
753 | if (sliceMode && sliceNum !== lastSliceNum){
754 | postMessage(e.data, [e.data.buf]);
755 | };
756 | }
757 | );
758 | };
759 | return;
760 | };
761 |
762 | if (e.data.slice){
763 | // update ref pic
764 | var copyStart = nowValue();
765 | copySlice(new Uint8Array(e.data.slice), lastBuf, e.data.infos[0].sliceInfoAr, e.data.width, e.data.height);
766 | // is it the one? then we need to update it
767 | if (e.data.theOne){
768 | copySlice(lastBuf, new Uint8Array(e.data.slice), sliceInfoAr, e.data.width, e.data.height);
769 | if (timeDecoding > e.data.infos[0].timeDecoding){
770 | e.data.infos[0].timeDecoding = timeDecoding;
771 | };
772 | e.data.infos[0].timeCopy += (nowValue() - copyStart);
773 | };
774 | // move on
775 | postMessage(e.data, [e.data.slice]);
776 |
777 | // next frame in the pipe?
778 | awaiting -= 1;
779 | if (awaiting === 0 && pile.length){
780 | var data = pile.shift();
781 | decoder.decode(
782 | new Uint8Array(data.buf, data.offset || 0, data.length),
783 | data.info,
784 | function(){
785 | if (sliceMode && sliceNum !== lastSliceNum){
786 | postMessage(data, [data.buf]);
787 | };
788 | }
789 | );
790 | };
791 | return;
792 | };
793 |
794 | if (e.data.setSliceCnt){
795 | setSliceCnt(e.data.sliceCnt);
796 | return;
797 | };
798 |
799 | }else{
800 | if (e.data && e.data.type === "Broadway.js - Worker init"){
801 | isWorker = true;
802 | decoder = new Decoder(e.data.options);
803 |
804 | if (e.data.options.sliceMode){
805 | reuseMemory = true;
806 | sliceMode = true;
807 | sliceNum = e.data.options.sliceNum;
808 | setSliceCnt(e.data.options.sliceCnt);
809 |
810 | decoder.onPictureDecoded = function (buffer, width, height, infos) {
811 |
812 | // buffer needs to be copied because we give up ownership
813 | var copyU8 = new Uint8Array(getMem(buffer.length));
814 | copySlice(buffer, copyU8, infos[0].sliceInfoAr, width, height);
815 |
816 | startDecoding = infos[0].startDecoding;
817 | finishDecoding = infos[0].finishDecoding;
818 | timeDecoding = finishDecoding - startDecoding;
819 | infos[0].timeDecoding = timeDecoding;
820 | infos[0].timeCopy = 0;
821 |
822 | postMessage({
823 | slice: copyU8.buffer,
824 | sliceNum: sliceNum,
825 | width: width,
826 | height: height,
827 | infos: infos
828 | }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership
829 |
830 | awaiting = sliceCnt - 1;
831 |
832 | lastBuf = buffer;
833 | sliceInfoAr = infos[0].sliceInfoAr;
834 |
835 | };
836 |
837 | }else if (e.data.options.reuseMemory){
838 | reuseMemory = true;
839 | decoder.onPictureDecoded = function (buffer, width, height, infos) {
840 |
841 | // buffer needs to be copied because we give up ownership
842 | var copyU8 = new Uint8Array(getMem(buffer.length));
843 | copyU8.set( buffer, 0, buffer.length );
844 |
845 | postMessage({
846 | buf: copyU8.buffer,
847 | length: buffer.length,
848 | width: width,
849 | height: height,
850 | infos: infos
851 | }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership
852 |
853 | };
854 |
855 | }else{
856 | decoder.onPictureDecoded = function (buffer, width, height, infos) {
857 | if (buffer) {
858 | buffer = new Uint8Array(buffer);
859 | };
860 |
861 | // buffer needs to be copied because we give up ownership
862 | var copyU8 = new Uint8Array(buffer.length);
863 | copyU8.set( buffer, 0, buffer.length );
864 |
865 | postMessage({
866 | buf: copyU8.buffer,
867 | length: buffer.length,
868 | width: width,
869 | height: height,
870 | infos: infos
871 | }, [copyU8.buffer]); // 2nd parameter is used to indicate transfer of ownership
872 |
873 | };
874 | };
875 | postMessage({ consoleLog: "broadway worker initialized" });
876 | };
877 | };
878 |
879 |
880 | }, false);
881 | };
882 |
883 | Decoder.nowValue = nowValue;
884 |
885 | return Decoder;
886 |
887 | })();
888 |
889 |
890 | }));
891 |
892 |
--------------------------------------------------------------------------------
/stream_picamera_h264/PiCamera_H264_Server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # vuquangtrong.github.io
3 |
4 | import io
5 | import picamera
6 | import time
7 | from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
8 | from wsgiref.simple_server import make_server
9 | from ws4py.websocket import WebSocket
10 | from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIHandler, WebSocketWSGIRequestHandler
11 | from ws4py.server.wsgiutils import WebSocketWSGIApplication
12 | from threading import Thread, Condition
13 |
14 |
15 | class FrameBuffer(object):
16 | def __init__(self):
17 | self.frame = None
18 | self.buffer = io.BytesIO()
19 | self.condition = Condition()
20 |
21 | def write(self, buf):
22 | if buf.startswith(b'\x00\x00\x00\x01'):
23 | with self.condition:
24 | self.buffer.seek(0)
25 | self.buffer.write(buf)
26 | self.buffer.truncate()
27 | self.frame = self.buffer.getvalue()
28 | self.condition.notify_all()
29 |
30 |
31 | def stream():
32 | with picamera.PiCamera(resolution='640x480', framerate=24) as camera:
33 | broadcasting = True
34 | frame_buffer = FrameBuffer()
35 | camera.start_recording(frame_buffer, format='h264', profile="baseline")
36 | try:
37 | WebSocketWSGIHandler.http_version = '1.1'
38 | websocketd = make_server('', 9000, server_class=WSGIServer,
39 | handler_class=WebSocketWSGIRequestHandler,
40 | app=WebSocketWSGIApplication(handler_cls=WebSocket))
41 | websocketd.initialize_websockets_manager()
42 | websocketd_thread = Thread(target=websocketd.serve_forever)
43 |
44 | httpd = ThreadingHTTPServer(('', 8000), SimpleHTTPRequestHandler)
45 | httpd_thread = Thread(target=httpd.serve_forever)
46 |
47 | try:
48 | websocketd_thread.start()
49 | httpd_thread.start()
50 | while broadcasting:
51 | with frame_buffer.condition:
52 | frame_buffer.condition.wait()
53 | websocketd.manager.broadcast(frame_buffer.frame, binary=True)
54 | except KeyboardInterrupt:
55 | pass
56 | finally:
57 | websocketd.shutdown()
58 | httpd.shutdown()
59 | broadcasting = False
60 | raise KeyboardInterrupt
61 | except KeyboardInterrupt:
62 | pass
63 | finally:
64 | camera.stop_recording()
65 |
66 | if __name__ == "__main__":
67 | stream()
68 |
--------------------------------------------------------------------------------
/stream_picamera_h264/Player.js:
--------------------------------------------------------------------------------
1 | /*
2 |
3 |
4 | usage:
5 |
6 | p = new Player({
7 | useWorker: ,
8 | workerFile: // give path to Decoder.js
9 | webgl: true | false | "auto" // defaults to "auto"
10 | });
11 |
12 | // canvas property represents the canvas node
13 | // put it somewhere in the dom
14 | p.canvas;
15 |
16 | p.webgl; // contains the used rendering mode. if you pass auto to webgl you can see what auto detection resulted in
17 |
18 | p.decode();
19 |
20 |
21 | */
22 |
23 |
24 |
25 | // universal module definition
26 | (function (root, factory) {
27 | if (typeof define === 'function' && define.amd) {
28 | // AMD. Register as an anonymous module.
29 | define(["./Decoder", "./YUVCanvas"], factory);
30 | } else if (typeof exports === 'object') {
31 | // Node. Does not work with strict CommonJS, but
32 | // only CommonJS-like environments that support module.exports,
33 | // like Node.
34 | module.exports = factory(require("./Decoder"), require("./YUVCanvas"));
35 | } else {
36 | // Browser globals (root is window)
37 | root.Player = factory(root.Decoder, root.YUVCanvas);
38 | }
39 | }(this, function (Decoder, WebGLCanvas) {
40 | "use strict";
41 |
42 |
43 | var nowValue = Decoder.nowValue;
44 |
45 |
46 | var Player = function(parOptions){
47 | var self = this;
48 | this._config = parOptions || {};
49 |
50 | this.render = true;
51 | if (this._config.render === false){
52 | this.render = false;
53 | };
54 |
55 | this.nowValue = nowValue;
56 |
57 | this._config.workerFile = this._config.workerFile || "Decoder.js";
58 | if (this._config.preserveDrawingBuffer){
59 | this._config.contextOptions = this._config.contextOptions || {};
60 | this._config.contextOptions.preserveDrawingBuffer = true;
61 | };
62 |
63 | var webgl = "auto";
64 | if (this._config.webgl === true){
65 | webgl = true;
66 | }else if (this._config.webgl === false){
67 | webgl = false;
68 | };
69 |
70 | if (webgl == "auto"){
71 | webgl = true;
72 | try{
73 | if (!window.WebGLRenderingContext) {
74 | // the browser doesn't even know what WebGL is
75 | webgl = false;
76 | } else {
77 | var canvas = document.createElement('canvas');
78 | var ctx = canvas.getContext("webgl");
79 | if (!ctx) {
80 | // browser supports WebGL but initialization failed.
81 | webgl = false;
82 | };
83 | };
84 | }catch(e){
85 | webgl = false;
86 | };
87 | };
88 |
89 | this.webgl = webgl;
90 |
91 | // choose functions
92 | if (this.webgl){
93 | this.createCanvasObj = this.createCanvasWebGL;
94 | this.renderFrame = this.renderFrameWebGL;
95 | }else{
96 | this.createCanvasObj = this.createCanvasRGB;
97 | this.renderFrame = this.renderFrameRGB;
98 | };
99 |
100 |
101 | var lastWidth;
102 | var lastHeight;
103 | var onPictureDecoded = function(buffer, width, height, infos) {
104 | self.onPictureDecoded(buffer, width, height, infos);
105 |
106 | var startTime = nowValue();
107 |
108 | if (!buffer || !self.render) {
109 | return;
110 | };
111 |
112 | self.renderFrame({
113 | canvasObj: self.canvasObj,
114 | data: buffer,
115 | width: width,
116 | height: height
117 | });
118 |
119 | if (self.onRenderFrameComplete){
120 | self.onRenderFrameComplete({
121 | data: buffer,
122 | width: width,
123 | height: height,
124 | infos: infos,
125 | canvasObj: self.canvasObj
126 | });
127 | };
128 |
129 | };
130 |
131 | // provide size
132 |
133 | if (!this._config.size){
134 | this._config.size = {};
135 | };
136 | this._config.size.width = this._config.size.width || 200;
137 | this._config.size.height = this._config.size.height || 200;
138 |
139 | if (this._config.useWorker){
140 | var worker = new Worker(this._config.workerFile);
141 | this.worker = worker;
142 | worker.addEventListener('message', function(e) {
143 | var data = e.data;
144 | if (data.consoleLog){
145 | console.log(data.consoleLog);
146 | return;
147 | };
148 |
149 | onPictureDecoded.call(self, new Uint8Array(data.buf, 0, data.length), data.width, data.height, data.infos);
150 |
151 | }, false);
152 |
153 | worker.postMessage({type: "Broadway.js - Worker init", options: {
154 | rgb: !webgl,
155 | memsize: this.memsize,
156 | reuseMemory: this._config.reuseMemory ? true : false
157 | }});
158 |
159 | if (this._config.transferMemory){
160 | this.decode = function(parData, parInfo){
161 | // no copy
162 | // instead we are transfering the ownership of the buffer
163 | // dangerous!!!
164 |
165 | worker.postMessage({buf: parData.buffer, offset: parData.byteOffset, length: parData.length, info: parInfo}, [parData.buffer]); // Send data to our worker.
166 | };
167 |
168 | }else{
169 | this.decode = function(parData, parInfo){
170 | // Copy the sample so that we only do a structured clone of the
171 | // region of interest
172 | var copyU8 = new Uint8Array(parData.length);
173 | copyU8.set( parData, 0, parData.length );
174 | worker.postMessage({buf: copyU8.buffer, offset: 0, length: parData.length, info: parInfo}, [copyU8.buffer]); // Send data to our worker.
175 | };
176 |
177 | };
178 |
179 | if (this._config.reuseMemory){
180 | this.recycleMemory = function(parArray){
181 | //this.beforeRecycle();
182 | worker.postMessage({reuse: parArray.buffer}, [parArray.buffer]); // Send data to our worker.
183 | //this.afterRecycle();
184 | };
185 | }
186 |
187 | }else{
188 |
189 | this.decoder = new Decoder({
190 | rgb: !webgl
191 | });
192 | this.decoder.onPictureDecoded = onPictureDecoded;
193 |
194 | this.decode = function(parData, parInfo){
195 | self.decoder.decode(parData, parInfo);
196 | };
197 |
198 | };
199 |
200 |
201 |
202 | if (this.render){
203 | this.canvasObj = this.createCanvasObj({
204 | contextOptions: this._config.contextOptions
205 | });
206 | this.canvas = this.canvasObj.canvas;
207 | };
208 |
209 | this.domNode = this.canvas;
210 |
211 | lastWidth = this._config.size.width;
212 | lastHeight = this._config.size.height;
213 |
214 | };
215 |
216 | Player.prototype = {
217 |
218 | onPictureDecoded: function(buffer, width, height, infos){},
219 |
220 | // call when memory of decoded frames is not used anymore
221 | recycleMemory: function(buf){
222 | },
223 | /*beforeRecycle: function(){},
224 | afterRecycle: function(){},*/
225 |
226 | // for both functions options is:
227 | //
228 | // width
229 | // height
230 | // enableScreenshot
231 | //
232 | // returns a object that has a property canvas which is a html5 canvas
233 | createCanvasWebGL: function(options){
234 | var canvasObj = this._createBasicCanvasObj(options);
235 | canvasObj.contextOptions = options.contextOptions;
236 | return canvasObj;
237 | },
238 |
239 | createCanvasRGB: function(options){
240 | var canvasObj = this._createBasicCanvasObj(options);
241 | return canvasObj;
242 | },
243 |
244 | // part that is the same for webGL and RGB
245 | _createBasicCanvasObj: function(options){
246 | options = options || {};
247 |
248 | var obj = {};
249 | var width = options.width;
250 | if (!width){
251 | width = this._config.size.width;
252 | };
253 | var height = options.height;
254 | if (!height){
255 | height = this._config.size.height;
256 | };
257 | obj.canvas = document.createElement('canvas');
258 | obj.canvas.width = width;
259 | obj.canvas.height = height;
260 | obj.canvas.style.backgroundColor = "#0D0E1B";
261 |
262 |
263 | return obj;
264 | },
265 |
266 | // options:
267 | //
268 | // canvas
269 | // data
270 | renderFrameWebGL: function(options){
271 |
272 | var canvasObj = options.canvasObj;
273 |
274 | var width = options.width || canvasObj.canvas.width;
275 | var height = options.height || canvasObj.canvas.height;
276 |
277 | if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height || !canvasObj.webGLCanvas){
278 | canvasObj.canvas.width = width;
279 | canvasObj.canvas.height = height;
280 | canvasObj.webGLCanvas = new WebGLCanvas({
281 | canvas: canvasObj.canvas,
282 | contextOptions: canvasObj.contextOptions,
283 | width: width,
284 | height: height
285 | });
286 | };
287 |
288 | var ylen = width * height;
289 | var uvlen = (width / 2) * (height / 2);
290 |
291 | canvasObj.webGLCanvas.drawNextOutputPicture({
292 | yData: options.data.subarray(0, ylen),
293 | uData: options.data.subarray(ylen, ylen + uvlen),
294 | vData: options.data.subarray(ylen + uvlen, ylen + uvlen + uvlen)
295 | });
296 |
297 | var self = this;
298 | self.recycleMemory(options.data);
299 |
300 | },
301 | renderFrameRGB: function(options){
302 | var canvasObj = options.canvasObj;
303 |
304 | var width = options.width || canvasObj.canvas.width;
305 | var height = options.height || canvasObj.canvas.height;
306 |
307 | if (canvasObj.canvas.width !== width || canvasObj.canvas.height !== height){
308 | canvasObj.canvas.width = width;
309 | canvasObj.canvas.height = height;
310 | };
311 |
312 | var ctx = canvasObj.ctx;
313 | var imgData = canvasObj.imgData;
314 |
315 | if (!ctx){
316 | canvasObj.ctx = canvasObj.canvas.getContext('2d');
317 | ctx = canvasObj.ctx;
318 |
319 | canvasObj.imgData = ctx.createImageData(width, height);
320 | imgData = canvasObj.imgData;
321 | };
322 |
323 | imgData.data.set(options.data);
324 | ctx.putImageData(imgData, 0, 0);
325 | var self = this;
326 | self.recycleMemory(options.data);
327 |
328 | }
329 |
330 | };
331 |
332 | return Player;
333 |
334 | }));
335 |
336 |
--------------------------------------------------------------------------------
/stream_picamera_h264/YUVCanvas.js:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) 2015 Paperspace Co. All rights reserved.
3 | //
4 | // Permission is hereby granted, free of charge, to any person obtaining a copy
5 | // of this software and associated documentation files (the "Software"), to
6 | // deal in the Software without restriction, including without limitation the
7 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 | // sell copies of the Software, and to permit persons to whom the Software is
9 | // furnished to do so, subject to the following conditions:
10 | //
11 | // The above copyright notice and this permission notice shall be included in
12 | // all copies or substantial portions of the Software.
13 | //
14 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 | // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 | // IN THE SOFTWARE.
21 | //
22 |
23 |
24 | // universal module definition
25 | (function (root, factory) {
26 | if (typeof define === 'function' && define.amd) {
27 | // AMD. Register as an anonymous module.
28 | define([], factory);
29 | } else if (typeof exports === 'object') {
30 | // Node. Does not work with strict CommonJS, but
31 | // only CommonJS-like environments that support module.exports,
32 | // like Node.
33 | module.exports = factory();
34 | } else {
35 | // Browser globals (root is window)
36 | root.YUVCanvas = factory();
37 | }
38 | }(this, function () {
39 |
40 |
41 | /**
42 | * This class can be used to render output pictures from an H264bsdDecoder to a canvas element.
43 | * If available the content is rendered using WebGL.
44 | */
45 | function YUVCanvas(parOptions) {
46 |
47 | parOptions = parOptions || {};
48 |
49 | this.canvasElement = parOptions.canvas || document.createElement("canvas");
50 | this.contextOptions = parOptions.contextOptions;
51 |
52 | this.type = parOptions.type || "yuv420";
53 |
54 | this.customYUV444 = parOptions.customYUV444;
55 |
56 | this.conversionType = parOptions.conversionType || "rec601";
57 |
58 | this.width = parOptions.width || 640;
59 | this.height = parOptions.height || 320;
60 |
61 | this.animationTime = parOptions.animationTime || 0;
62 |
63 | this.canvasElement.width = this.width;
64 | this.canvasElement.height = this.height;
65 |
66 | this.initContextGL();
67 |
68 | if(this.contextGL) {
69 | this.initProgram();
70 | this.initBuffers();
71 | this.initTextures();
72 | };
73 |
74 |
75 | /**
76 | * Draw the next output picture using WebGL
77 | */
78 | if (this.type === "yuv420"){
79 | this.drawNextOuptutPictureGL = function(par) {
80 | var gl = this.contextGL;
81 | var texturePosBuffer = this.texturePosBuffer;
82 | var uTexturePosBuffer = this.uTexturePosBuffer;
83 | var vTexturePosBuffer = this.vTexturePosBuffer;
84 |
85 | var yTextureRef = this.yTextureRef;
86 | var uTextureRef = this.uTextureRef;
87 | var vTextureRef = this.vTextureRef;
88 |
89 | var yData = par.yData;
90 | var uData = par.uData;
91 | var vData = par.vData;
92 |
93 | var width = this.width;
94 | var height = this.height;
95 |
96 | var yDataPerRow = par.yDataPerRow || width;
97 | var yRowCnt = par.yRowCnt || height;
98 |
99 | var uDataPerRow = par.uDataPerRow || (width / 2);
100 | var uRowCnt = par.uRowCnt || (height / 2);
101 |
102 | var vDataPerRow = par.vDataPerRow || uDataPerRow;
103 | var vRowCnt = par.vRowCnt || uRowCnt;
104 |
105 | gl.viewport(0, 0, width, height);
106 |
107 | var tTop = 0;
108 | var tLeft = 0;
109 | var tBottom = height / yRowCnt;
110 | var tRight = width / yDataPerRow;
111 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
112 |
113 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
114 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
115 |
116 | if (this.customYUV444){
117 | tBottom = height / uRowCnt;
118 | tRight = width / uDataPerRow;
119 | }else{
120 | tBottom = (height / 2) / uRowCnt;
121 | tRight = (width / 2) / uDataPerRow;
122 | };
123 | var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
124 |
125 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
126 | gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW);
127 |
128 |
129 | if (this.customYUV444){
130 | tBottom = height / vRowCnt;
131 | tRight = width / vDataPerRow;
132 | }else{
133 | tBottom = (height / 2) / vRowCnt;
134 | tRight = (width / 2) / vDataPerRow;
135 | };
136 | var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
137 |
138 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
139 | gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW);
140 |
141 |
142 | gl.activeTexture(gl.TEXTURE0);
143 | gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
144 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData);
145 |
146 | gl.activeTexture(gl.TEXTURE1);
147 | gl.bindTexture(gl.TEXTURE_2D, uTextureRef);
148 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData);
149 |
150 | gl.activeTexture(gl.TEXTURE2);
151 | gl.bindTexture(gl.TEXTURE_2D, vTextureRef);
152 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData);
153 |
154 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
155 | };
156 |
157 | }else if (this.type === "yuv422"){
158 | this.drawNextOuptutPictureGL = function(par) {
159 | var gl = this.contextGL;
160 | var texturePosBuffer = this.texturePosBuffer;
161 |
162 | var textureRef = this.textureRef;
163 |
164 | var data = par.data;
165 |
166 | var width = this.width;
167 | var height = this.height;
168 |
169 | var dataPerRow = par.dataPerRow || (width * 2);
170 | var rowCnt = par.rowCnt || height;
171 |
172 | gl.viewport(0, 0, width, height);
173 |
174 | var tTop = 0;
175 | var tLeft = 0;
176 | var tBottom = height / rowCnt;
177 | var tRight = width / (dataPerRow / 2);
178 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
179 |
180 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
181 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
182 |
183 | gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height);
184 |
185 | gl.activeTexture(gl.TEXTURE0);
186 | gl.bindTexture(gl.TEXTURE_2D, textureRef);
187 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
188 |
189 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
190 | };
191 | };
192 |
193 | };
194 |
195 | /**
196 | * Returns true if the canvas supports WebGL
197 | */
198 | YUVCanvas.prototype.isWebGL = function() {
199 | return this.contextGL;
200 | };
201 |
202 | /**
203 | * Create the GL context from the canvas element
204 | */
205 | YUVCanvas.prototype.initContextGL = function() {
206 | var canvas = this.canvasElement;
207 | var gl = null;
208 |
209 | var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"];
210 | var nameIndex = 0;
211 |
212 | while(!gl && nameIndex < validContextNames.length) {
213 | var contextName = validContextNames[nameIndex];
214 |
215 | try {
216 | if (this.contextOptions){
217 | gl = canvas.getContext(contextName, this.contextOptions);
218 | }else{
219 | gl = canvas.getContext(contextName);
220 | };
221 | } catch (e) {
222 | gl = null;
223 | }
224 |
225 | if(!gl || typeof gl.getParameter !== "function") {
226 | gl = null;
227 | }
228 |
229 | ++nameIndex;
230 | };
231 |
232 | this.contextGL = gl;
233 | };
234 |
235 | /**
236 | * Initialize GL shader program
237 | */
238 | YUVCanvas.prototype.initProgram = function() {
239 | var gl = this.contextGL;
240 |
241 | // vertex shader is the same for all types
242 | var vertexShaderScript;
243 | var fragmentShaderScript;
244 |
245 | if (this.type === "yuv420"){
246 |
247 | vertexShaderScript = [
248 | 'attribute vec4 vertexPos;',
249 | 'attribute vec4 texturePos;',
250 | 'attribute vec4 uTexturePos;',
251 | 'attribute vec4 vTexturePos;',
252 | 'varying vec2 textureCoord;',
253 | 'varying vec2 uTextureCoord;',
254 | 'varying vec2 vTextureCoord;',
255 |
256 | 'void main()',
257 | '{',
258 | ' gl_Position = vertexPos;',
259 | ' textureCoord = texturePos.xy;',
260 | ' uTextureCoord = uTexturePos.xy;',
261 | ' vTextureCoord = vTexturePos.xy;',
262 | '}'
263 | ].join('\n');
264 |
265 | fragmentShaderScript = [
266 | 'precision highp float;',
267 | 'varying highp vec2 textureCoord;',
268 | 'varying highp vec2 uTextureCoord;',
269 | 'varying highp vec2 vTextureCoord;',
270 | 'uniform sampler2D ySampler;',
271 | 'uniform sampler2D uSampler;',
272 | 'uniform sampler2D vSampler;',
273 | 'uniform mat4 YUV2RGB;',
274 |
275 | 'void main(void) {',
276 | ' highp float y = texture2D(ySampler, textureCoord).r;',
277 | ' highp float u = texture2D(uSampler, uTextureCoord).r;',
278 | ' highp float v = texture2D(vSampler, vTextureCoord).r;',
279 | ' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
280 | '}'
281 | ].join('\n');
282 |
283 | }else if (this.type === "yuv422"){
284 | vertexShaderScript = [
285 | 'attribute vec4 vertexPos;',
286 | 'attribute vec4 texturePos;',
287 | 'varying vec2 textureCoord;',
288 |
289 | 'void main()',
290 | '{',
291 | ' gl_Position = vertexPos;',
292 | ' textureCoord = texturePos.xy;',
293 | '}'
294 | ].join('\n');
295 |
296 | fragmentShaderScript = [
297 | 'precision highp float;',
298 | 'varying highp vec2 textureCoord;',
299 | 'uniform sampler2D sampler;',
300 | 'uniform highp vec2 resolution;',
301 | 'uniform mat4 YUV2RGB;',
302 |
303 | 'void main(void) {',
304 |
305 | ' highp float texPixX = 1.0 / resolution.x;',
306 | ' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture
307 | ' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel
308 | ' highp float steps = floor(textureCoord.x / logPixX);',
309 | ' highp float uvSteps = floor(textureCoord.x / logHalfPixX);',
310 | ' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;',
311 | ' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;',
312 | ' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;',
313 |
314 | //' highp float y = texture2D(sampler, textureCoord).r;',
315 | //' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;',
316 | ' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;',
317 | '}'
318 | ].join('\n');
319 | };
320 |
321 | var YUV2RGB = [];
322 |
323 | if (this.conversionType == "rec709") {
324 | // ITU-T Rec. 709
325 | YUV2RGB = [
326 | 1.16438, 0.00000, 1.79274, -0.97295,
327 | 1.16438, -0.21325, -0.53291, 0.30148,
328 | 1.16438, 2.11240, 0.00000, -1.13340,
329 | 0, 0, 0, 1,
330 | ];
331 | } else {
332 | // assume ITU-T Rec. 601
333 | YUV2RGB = [
334 | 1.16438, 0.00000, 1.59603, -0.87079,
335 | 1.16438, -0.39176, -0.81297, 0.52959,
336 | 1.16438, 2.01723, 0.00000, -1.08139,
337 | 0, 0, 0, 1
338 | ];
339 | };
340 |
341 | var vertexShader = gl.createShader(gl.VERTEX_SHADER);
342 | gl.shaderSource(vertexShader, vertexShaderScript);
343 | gl.compileShader(vertexShader);
344 | if(!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
345 | console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader));
346 | }
347 |
348 | var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
349 | gl.shaderSource(fragmentShader, fragmentShaderScript);
350 | gl.compileShader(fragmentShader);
351 | if(!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
352 | console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader));
353 | }
354 |
355 | var program = gl.createProgram();
356 | gl.attachShader(program, vertexShader);
357 | gl.attachShader(program, fragmentShader);
358 | gl.linkProgram(program);
359 | if(!gl.getProgramParameter(program, gl.LINK_STATUS)) {
360 | console.log('Program failed to compile: ' + gl.getProgramInfoLog(program));
361 | }
362 |
363 | gl.useProgram(program);
364 |
365 | var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB');
366 | gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB);
367 |
368 | this.shaderProgram = program;
369 | };
370 |
371 | /**
372 | * Initialize vertex buffers and attach to shader program
373 | */
374 | YUVCanvas.prototype.initBuffers = function() {
375 | var gl = this.contextGL;
376 | var program = this.shaderProgram;
377 |
378 | var vertexPosBuffer = gl.createBuffer();
379 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
380 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW);
381 |
382 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
383 | gl.enableVertexAttribArray(vertexPosRef);
384 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
385 |
386 | if (this.animationTime){
387 |
388 | var animationTime = this.animationTime;
389 | var timePassed = 0;
390 | var stepTime = 15;
391 |
392 | var aniFun = function(){
393 |
394 | timePassed += stepTime;
395 | var mul = ( 1 * timePassed ) / animationTime;
396 |
397 | if (timePassed >= animationTime){
398 | mul = 1;
399 | }else{
400 | setTimeout(aniFun, stepTime);
401 | };
402 |
403 | var neg = -1 * mul;
404 | var pos = 1 * mul;
405 |
406 | var vertexPosBuffer = gl.createBuffer();
407 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer);
408 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW);
409 |
410 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos');
411 | gl.enableVertexAttribArray(vertexPosRef);
412 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0);
413 |
414 | try{
415 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
416 | }catch(e){};
417 |
418 | };
419 | aniFun();
420 |
421 | };
422 |
423 |
424 |
425 | var texturePosBuffer = gl.createBuffer();
426 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
427 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
428 |
429 | var texturePosRef = gl.getAttribLocation(program, 'texturePos');
430 | gl.enableVertexAttribArray(texturePosRef);
431 | gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0);
432 |
433 | this.texturePosBuffer = texturePosBuffer;
434 |
435 | if (this.type === "yuv420"){
436 | var uTexturePosBuffer = gl.createBuffer();
437 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer);
438 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
439 |
440 | var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos');
441 | gl.enableVertexAttribArray(uTexturePosRef);
442 | gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0);
443 |
444 | this.uTexturePosBuffer = uTexturePosBuffer;
445 |
446 |
447 | var vTexturePosBuffer = gl.createBuffer();
448 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer);
449 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW);
450 |
451 | var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos');
452 | gl.enableVertexAttribArray(vTexturePosRef);
453 | gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0);
454 |
455 | this.vTexturePosBuffer = vTexturePosBuffer;
456 | };
457 |
458 | };
459 |
460 | /**
461 | * Initialize GL textures and attach to shader program
462 | */
463 | YUVCanvas.prototype.initTextures = function() {
464 | var gl = this.contextGL;
465 | var program = this.shaderProgram;
466 |
467 | if (this.type === "yuv420"){
468 |
469 | var yTextureRef = this.initTexture();
470 | var ySamplerRef = gl.getUniformLocation(program, 'ySampler');
471 | gl.uniform1i(ySamplerRef, 0);
472 | this.yTextureRef = yTextureRef;
473 |
474 | var uTextureRef = this.initTexture();
475 | var uSamplerRef = gl.getUniformLocation(program, 'uSampler');
476 | gl.uniform1i(uSamplerRef, 1);
477 | this.uTextureRef = uTextureRef;
478 |
479 | var vTextureRef = this.initTexture();
480 | var vSamplerRef = gl.getUniformLocation(program, 'vSampler');
481 | gl.uniform1i(vSamplerRef, 2);
482 | this.vTextureRef = vTextureRef;
483 |
484 | }else if (this.type === "yuv422"){
485 | // only one texture for 422
486 | var textureRef = this.initTexture();
487 | var samplerRef = gl.getUniformLocation(program, 'sampler');
488 | gl.uniform1i(samplerRef, 0);
489 | this.textureRef = textureRef;
490 |
491 | };
492 | };
493 |
494 | /**
495 | * Create and configure a single texture
496 | */
497 | YUVCanvas.prototype.initTexture = function() {
498 | var gl = this.contextGL;
499 |
500 | var textureRef = gl.createTexture();
501 | gl.bindTexture(gl.TEXTURE_2D, textureRef);
502 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
503 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
504 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
505 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
506 | gl.bindTexture(gl.TEXTURE_2D, null);
507 |
508 | return textureRef;
509 | };
510 |
511 | /**
512 | * Draw picture data to the canvas.
513 | * If this object is using WebGL, the data must be an I420 formatted ArrayBuffer,
514 | * Otherwise, data must be an RGBA formatted ArrayBuffer.
515 | */
516 | YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) {
517 | var gl = this.contextGL;
518 |
519 | if(gl) {
520 | this.drawNextOuptutPictureGL(width, height, croppingParams, data);
521 | } else {
522 | this.drawNextOuptutPictureRGBA(width, height, croppingParams, data);
523 | }
524 | };
525 |
526 |
527 |
528 | /**
529 | * Draw next output picture using ARGB data on a 2d canvas.
530 | */
531 | YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) {
532 | var canvas = this.canvasElement;
533 |
534 | var croppingParams = null;
535 |
536 | var argbData = data;
537 |
538 | var ctx = canvas.getContext('2d');
539 | var imageData = ctx.getImageData(0, 0, width, height);
540 | imageData.data.set(argbData);
541 |
542 | if(croppingParams === null) {
543 | ctx.putImageData(imageData, 0, 0);
544 | } else {
545 | ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height);
546 | }
547 | };
548 |
549 | return YUVCanvas;
550 |
551 | }));
552 |
--------------------------------------------------------------------------------
/stream_picamera_h264/avc.wasm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vuquangtrong/pi_streaming/28e896a7e8236376f757b64fe90b09eef62b9cf6/stream_picamera_h264/avc.wasm
--------------------------------------------------------------------------------
/stream_picamera_h264/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | PiCamera H264 Streaming
6 |
7 |
8 | PiCamera H264 Streaming
9 |
10 |
11 |
12 |
13 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/stream_picamera_mjpeg/Picamera_MJPG_Server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # vuquangtrong.github.io
3 |
4 | import io
5 | import time
6 | import picamera
7 | from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
8 | from threading import Condition
9 |
10 |
11 | """
12 | FrameBuffer is a synchronized buffer which gets each frame and notifies to all waiting clients.
13 | It implements write() method to be used in picamera.start_recording()
14 | """
15 | class FrameBuffer(object):
16 | def __init__(self):
17 | self.frame = None
18 | self.buffer = io.BytesIO()
19 | self.condition = Condition()
20 |
21 | def write(self, buf):
22 | if buf.startswith(b'\xff\xd8'):
23 | # New frame
24 | with self.condition:
25 | # write to buffer
26 | self.buffer.seek(0)
27 | self.buffer.write(buf)
28 | # crop buffer to exact size
29 | self.buffer.truncate()
30 | # save the frame
31 | self.frame = self.buffer.getvalue()
32 | # notify all other threads
33 | self.condition.notify_all()
34 |
35 |
36 | """
37 | StreamingHandler extent http.server.SimpleHTTPRequestHandler class to handle mjpg file for live stream
38 | """
39 | class StreamingHandler(SimpleHTTPRequestHandler):
40 | def __init__(self, frames_buffer, *args):
41 | self.frames_buffer = frames_buffer
42 | print("New StreamingHandler, using frames_buffer=", frames_buffer)
43 | super().__init__(*args)
44 |
45 | def __del__(self):
46 | print("Remove StreamingHandler")
47 |
48 | def do_GET(self):
49 | if self.path == '/stream.mjpg':
50 | self.send_response(200)
51 | self.send_header('Age', 0)
52 | self.send_header('Cache-Control', 'no-cache, private')
53 | self.send_header('Pragma', 'no-cache')
54 | self.send_header('Content-Type', 'multipart/x-mixed-replace; boundary=FRAME')
55 | self.end_headers()
56 | try:
57 | # tracking serving time
58 | start_time = time.time()
59 | frame_count = 0
60 | # endless stream
61 | while True:
62 | with self.frames_buffer.condition:
63 | # wait for a new frame
64 | self.frames_buffer.condition.wait()
65 | # it's available, pick it up
66 | frame = self.frames_buffer.frame
67 | # send it
68 | self.wfile.write(b'--FRAME\r\n')
69 | self.send_header('Content-Type', 'image/jpeg')
70 | self.send_header('Content-Length', len(frame))
71 | self.end_headers()
72 | self.wfile.write(frame)
73 | self.wfile.write(b'\r\n')
74 | # count frames
75 | frame_count += 1
76 | # calculate FPS every 5s
77 | if (time.time() - start_time) > 5:
78 | print("FPS: ", frame_count / (time.time() - start_time))
79 | frame_count = 0
80 | start_time = time.time()
81 | except Exception as e:
82 | print(f'Removed streaming client {self.client_address}, {str(e)}')
83 | else:
84 | # fallback to default handler
85 | super().do_GET()
86 |
87 | def stream():
88 | # open camera
89 | with picamera.PiCamera(resolution='640x480', framerate=24) as camera:
90 | # create buffer
91 | frame_buffer = FrameBuffer()
92 |
93 | # stream to buffer
94 | camera.start_recording(frame_buffer, format='mjpeg')
95 |
96 | # run server
97 | try:
98 | address = ('', 8000)
99 | httpd = ThreadingHTTPServer(address, lambda *args: StreamingHandler(frame_buffer, *args))
100 | httpd.serve_forever()
101 | finally:
102 | camera.stop_recording()
103 |
104 | if __name__ == "__main__":
105 | stream()
106 |
--------------------------------------------------------------------------------
/stream_picamera_mjpeg/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | PiCamera MJPEG Streaming
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/stream_picamera_mjpeg/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # vuquangtrong.github.io
3 |
4 | sudo python3 Picamera_MJPG_Server.py
5 |
--------------------------------------------------------------------------------