├── .gitignore ├── packetsender ├── cmd │ ├── server.go │ └── versions.go ├── tspackets.go └── server.go ├── packetrecevier ├── src │ └── app │ │ └── app.js ├── index.html ├── worker.js └── YUVCanvas.js ├── dev ├── 1-dev-runserver.sh └── dev.env ├── .gitmodules ├── README.md ├── scripts ├── generatevideo.sh └── buildffmpeg.sh └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | bin/ 2 | share/ 3 | *.h 4 | *.a 5 | *.pc -------------------------------------------------------------------------------- /packetsender/cmd/server.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "github.com/colek42/streamingDemo/packetsender" 5 | ) 6 | 7 | func main() { 8 | packetsender.Serve() 9 | } 10 | -------------------------------------------------------------------------------- /packetrecevier/src/app/app.js: -------------------------------------------------------------------------------- 1 | function onSubmit() { 2 | var uri = document.getElementById('URI'); 3 | var websocket = new WebSocket("ws://" + window.location.host + "/") 4 | 5 | 6 | } 7 | -------------------------------------------------------------------------------- /dev/1-dev-runserver.sh: -------------------------------------------------------------------------------- 1 | #this script runs the server 2 | 3 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" 4 | 5 | source $DIR/dev/dev.env 6 | go run $DIR/packetsender/cmd/server.go 7 | -------------------------------------------------------------------------------- /dev/dev.env: -------------------------------------------------------------------------------- 1 | export ROOTDIR="$GOPATH/src/github.com/colek42/streamingDemo" 2 | export PATH="$ROOTDIR/bin":$PATH 3 | 4 | export PKG_CONFIG_PATH="${ROOTDIR}/shared/lib/pkgconfig:$PKG_CONFIG_PATH" 5 | export LIBRARY_PATH="${ROOTDIR}/shared/lib:$LIBRARY_PATH" -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "lib/FFmpeg"] 2 | path = lib/FFmpeg 3 | url = git@github.com:FFmpeg/FFmpeg.git 4 | [submodule "lib/x264"] 5 | path = lib/x264 6 | url = git://git.videolan.org/x264.git 7 | [submodule "lib/libvpx"] 8 | path = lib/libvpx 9 | url = https://chromium.googlesource.com/webm/libvpx 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Proof of concept streaming low latency to the browser. This code is for proof of concept only. No attempt was made to handle errors or write code that would ever pass code review. If you have any questions, or are using some of this send me a note on github issues. 2 | 3 | Inspired by https://github.com/opensensorhub 4 | -------------------------------------------------------------------------------- /scripts/generatevideo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # FFMPEG multicast video generation script 3 | 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" 5 | source $DIR/dev/dev.env 6 | 7 | OUT_URI=${1:-"udp://@234.5.5.5:8209"} 8 | 9 | echo "If you don't have libx264 support, please re-run /build/makeffmpeg.sh" 10 | echo "Generating H.264 video on multicast video on ${OUT_URI}..." 11 | 12 | ffmpeg \ 13 | -v info \ 14 | -re \ 15 | -f lavfi \ 16 | -i "testsrc=size=1280x720:rate=ntsc" \ 17 | -f lavfi \ 18 | -i "anoisesrc=c=pink" \ 19 | -c:v libx264 \ 20 | -pix_fmt yuv420p \ 21 | -c:a aac \ 22 | -f mpegts \ 23 | "${OUT_URI}" 24 | -------------------------------------------------------------------------------- /scripts/buildffmpeg.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -x 5 | 6 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" 7 | PREFIX_DIR="$DIR/shared" 8 | BIN_DIR="$DIR/bin" 9 | 10 | rm -rf $PREFIX_DIR 11 | 12 | # Build libvpx 13 | cd $DIR/lib/libvpx 14 | make distclean || true 15 | ./configure --prefix="$PREFIX_DIR" --disable-examples 16 | time make -j16 17 | make install 18 | make clean 19 | 20 | # Build x264 21 | cd $DIR/lib/x264 22 | make distclean || true 23 | ./configure --prefix="$PREFIX_DIR" --bindir="$BIN_DIR" --enable-static 24 | time make -j16 25 | make install 26 | make distclean 27 | 28 | cd $DIR/lib/FFmpeg 29 | make distclean || true 30 | PKG_CONFIG_PATH="$PREFIX_DIR/lib/pkgconfig" ./configure \ 31 | --prefix="$PREFIX_DIR" \ 32 | --bindir="$BIN_DIR" \ 33 | --enable-gpl \ 34 | --enable-static \ 35 | --disable-shared \ 36 | --enable-libx264 \ 37 | --enable-nonfree \ 38 | --enable-libvpx 39 | time make -j16 40 | make install 41 | make distclean -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Cole Kennedy 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /packetsender/cmd/versions.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "log" 5 | 6 | "gopkg.in/targodan/ffgopeg.v1/avcodec" 7 | "gopkg.in/targodan/ffgopeg.v1/avdevice" 8 | "gopkg.in/targodan/ffgopeg.v1/avfilter" 9 | "gopkg.in/targodan/ffgopeg.v1/avformat" 10 | "gopkg.in/targodan/ffgopeg.v1/avutil" 11 | "gopkg.in/targodan/ffgopeg.v1/swresample" 12 | "gopkg.in/targodan/ffgopeg.v1/swscale" 13 | ) 14 | 15 | func PrintVersions() { 16 | log.Printf("AvCodec Version:\t%v", avcodec.Version()) 17 | log.Printf("AvCodec License:\t%v", avcodec.License()) 18 | log.Printf("AvDevice Version:\t%v", avdevice.Version()) 19 | log.Printf("AvDevice License:\t%v", avdevice.License()) 20 | log.Printf("AvFilter Version:\t%v", avfilter.Version()) 21 | log.Printf("AvFilter License:\t%v", avfilter.License()) 22 | log.Printf("AvFormat Version:\t%v", avformat.Version()) 23 | log.Printf("AvFormat License:\t%v", avformat.License()) 24 | log.Printf("AvUtil Version:\t%v", avutil.Version()) 25 | log.Printf("AvUtil License:\t%v", avutil.License()) 26 | log.Printf("SWResample Version:\t%v", swresample.Version()) 27 | log.Printf("SWResample License:\t%v", swresample.License()) 28 | log.Printf("SWScale Version:\t%v", swscale.Version()) 29 | log.Printf("SWScale License:\t%v", swscale.License()) 30 | } 31 | -------------------------------------------------------------------------------- /packetsender/tspackets.go: -------------------------------------------------------------------------------- 1 | package packetsender 2 | 3 | import ( 4 | "errors" 5 | "github.com/colek42/ffgopeg/avcodec" 6 | "github.com/colek42/ffgopeg/avformat" 7 | "github.com/colek42/ffgopeg/avutil" 8 | "log" 9 | "time" 10 | ) 11 | 12 | func init() { 13 | avformat.RegisterAll() 14 | avformat.NetworkInit() 15 | } 16 | 17 | func OpenStream(uri string, packetChan chan tspacket) { 18 | formatCtx, code1 := avformat.OpenInput(uri, nil, nil) 19 | if !code1.Ok() { 20 | log.Printf("%v", code1.Error()) 21 | } 22 | defer formatCtx.Close() 23 | 24 | //formatCtx.FindStreamInfo(nil) 25 | 26 | videoSteamIndex, err := findFirstVideoStream(formatCtx) 27 | if err != nil { 28 | log.Printf("Error") 29 | } 30 | log.Printf("VideoStream Index: %v", videoSteamIndex) 31 | 32 | codec := avcodec.FindDecoder(formatCtx.Streams()[videoSteamIndex].CodecPar().CodecID()) 33 | codecCtx := avcodec.NewCodecContext(codec) 34 | 35 | frame := avutil.NewFrame() 36 | defer frame.Free() 37 | 38 | var packet avcodec.Packet 39 | packet.Init() 40 | 41 | for { 42 | err := formatCtx.ReadFrame(&packet) 43 | if err.IsOneOf(avutil.AVERROR_EOF()) { 44 | break 45 | } 46 | 47 | //for now we only care about video packets 48 | if packet.StreamIndex() != videoSteamIndex { 49 | packet.Unref() 50 | continue 51 | } 52 | 53 | code := codecCtx.SendPacket(&packet) 54 | if code.Ok() { 55 | //frame.Unref() 56 | } 57 | 58 | if len(packetChan) < 5 { 59 | packetChan <- tspacket{ 60 | data: packet.GetData(), 61 | pts: packet.Pts(), 62 | dts: packet.Dts(), 63 | timeStamp: time.Now().UnixNano(), 64 | } 65 | } 66 | packet.Unref() 67 | } 68 | 69 | } 70 | 71 | func findFirstVideoStream(ctx *avformat.FormatContext) (int, error) { 72 | for i, s := range ctx.Streams() { 73 | if s.CodecPar().CodecType() == avutil.AVMEDIA_TYPE_VIDEO { 74 | return i, nil 75 | } 76 | } 77 | 78 | return -1, errors.New("Could Not Find Video Stream") 79 | } 80 | -------------------------------------------------------------------------------- /packetrecevier/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 12 | 13 |
14 | 15 | 16 | 17 | 69 | 70 | -------------------------------------------------------------------------------- /packetsender/server.go: -------------------------------------------------------------------------------- 1 | package packetsender 2 | 3 | import ( 4 | "github.com/gorilla/websocket" 5 | "log" 6 | "net/http" 7 | ) 8 | 9 | type VideoRoom struct { 10 | uri string 11 | tsPackets chan tspacket 12 | } 13 | 14 | type client struct { 15 | conn *websocket.Conn 16 | } 17 | 18 | type tspacket struct { 19 | pts int64 20 | dts int64 21 | timeStamp int64 22 | data []byte 23 | } 24 | 25 | var webroot = "/home/cole/go/src/github.com/colek42/streamingDemo/packetrecevier" 26 | 27 | var upgrader = websocket.Upgrader{ 28 | ReadBufferSize: 1024, 29 | WriteBufferSize: 1024, 30 | } 31 | 32 | func startVideo(room *VideoRoom) { 33 | log.Printf("starting Video") 34 | OpenStream(room.uri, room.tsPackets) 35 | } 36 | 37 | func serveWs(w http.ResponseWriter, r *http.Request) { 38 | 39 | uri := r.URL.Query().Get("uri") 40 | log.Println(uri) 41 | // if uri == "" { 42 | // return 43 | // } 44 | conn, err := upgrader.Upgrade(w, r, nil) 45 | if err != nil { 46 | log.Printf("Error %v", err) 47 | return 48 | } 49 | 50 | pktChan := make(chan tspacket, 10) 51 | 52 | vr := &VideoRoom{ 53 | uri: uri, 54 | tsPackets: pktChan, 55 | } 56 | go startVideo(vr) 57 | 58 | c := &client{ 59 | conn: conn, 60 | } 61 | 62 | messageWriter(c, vr) 63 | log.Println("Closed") 64 | } 65 | 66 | func Serve() { 67 | http.HandleFunc("/ws", serveWs) 68 | http.HandleFunc("/", home) 69 | err := http.ListenAndServe("0.0.0.0:8787", nil) 70 | if err != nil { 71 | log.Fatal("ListenAndServe: ", err) 72 | } 73 | } 74 | 75 | func messageWriter(c *client, v *VideoRoom) { 76 | for { 77 | select { 78 | case pkt, ok := <-v.tsPackets: 79 | if !ok { 80 | continue 81 | } 82 | err := c.conn.WriteMessage(websocket.BinaryMessage, pkt.data) 83 | if err != nil { 84 | log.Printf("Err Sending Websocket: %v", err) 85 | } 86 | 87 | } 88 | } 89 | } 90 | 91 | func home(w http.ResponseWriter, r *http.Request) { 92 | log.Printf("Req: %v", webroot+r.URL.Path) 93 | http.ServeFile(w, r, webroot+r.URL.Path) 94 | } 95 | -------------------------------------------------------------------------------- /packetrecevier/worker.js: -------------------------------------------------------------------------------- 1 | importScripts('ffmpeg-h264.js'); 2 | 3 | 4 | i = 0; 5 | 6 | initDecoder(); 7 | 8 | onmessage = function(e) { 9 | console.log(e.data); 10 | if (e.data.topic) { 11 | 12 | switch (e.data.topic) { 13 | 14 | case "openUri": 15 | console.log("Starting Decode on Uri: " + e.data.topic); 16 | startDecode(e.data.data); 17 | break; 18 | } 19 | } 20 | }; 21 | 22 | function startDecode(uri) { 23 | initWebsocket(uri); 24 | } 25 | 26 | function initDecoder() { 27 | Module.ccall('avcodec_register_all'); 28 | // find h264 decoder 29 | codec = Module.ccall('avcodec_find_decoder_by_name', 'number', ['string'], ["h264"]); 30 | if (codec === 0) 31 | alert("Could not find H264 codec "); 32 | 33 | ctx = Module.ccall('avcodec_alloc_context3', 'number', ['number'], [codec]); 34 | ret = Module.ccall('avcodec_open2', 'number', ['number', 'number', 'number'], [ctx, codec, 0]); 35 | if (ret < 0) 36 | alert("Could not open codec "); 37 | 38 | // allocate packet 39 | pkt = Module._malloc(96); 40 | Module.ccall('av_init_packet', 'null', ['number'], [pkt]); 41 | pktData = Module._malloc(1024 * 3000); 42 | Module.setValue(pkt + 24, pktData, '*'); 43 | // allocate video frame 44 | frame = Module.ccall('avcodec_alloc_frame', 'number'); 45 | if (!frame) 46 | alert("Could not allocate video frame "); 47 | 48 | // init decode frame function 49 | new_packet = Module.cwrap('av_packet_from_data', 'number', ['number', 'number', 'number']); 50 | decode_frame = Module.cwrap('avcodec_decode_video2', 'number', ['number', 'number', 'number', 'number']); 51 | got_frame = Module._malloc(4); 52 | } 53 | 54 | function initWebsocket(uri) { 55 | ws = new WebSocket(uri); 56 | ws.binaryType = 'arraybuffer'; 57 | console.log("Opening ws " + uri); 58 | ws.onmessage = decodePkt; 59 | } 60 | 61 | function decodePkt(event) { 62 | var decodedFrame = decode(event.data); 63 | if (decodedFrame) { 64 | self.postMessage(decodedFrame, [ 65 | decodedFrame.frameYData.buffer, 66 | decodedFrame.frameUData.buffer, 67 | decodedFrame.frameVData.buffer 68 | ]); 69 | } 70 | } 71 | 72 | function decode(data) { 73 | var buffer = new Uint8Array(data); 74 | var dataSize = data.byteLength; 75 | Module.setValue(pkt + 28, dataSize, 'i32'); 76 | Module.writeArrayToMemory(buffer, pktData); 77 | 78 | var len = decode_frame(ctx, frame, got_frame, pkt); 79 | 80 | if (len < 0) { 81 | console.log("Error while decoding frame"); 82 | return; 83 | } 84 | 85 | if (Module.getValue(got_frame, 'i8') === 0) { 86 | console.log("No frame"); 87 | return; 88 | } 89 | 90 | var decoded_frame = frame; 91 | var frame_width = Module.getValue(decoded_frame + 68, 'i32'); 92 | var frame_height = Module.getValue(decoded_frame + 72, 'i32'); 93 | 94 | // copy Y channel to canvas 95 | var frameYDataPtr = Module.getValue(decoded_frame, '*'); 96 | var frameUDataPtr = Module.getValue(decoded_frame + 4, '*'); 97 | var frameVDataPtr = Module.getValue(decoded_frame + 8, '*'); 98 | 99 | 100 | return { 101 | frame_width: frame_width, 102 | frame_height: frame_height, 103 | frameYDataPtr: frameYDataPtr, 104 | frameUDataPtr: frameUDataPtr, 105 | frameVDataPtr: frameVDataPtr, 106 | frameYData: new Uint8Array(Module.HEAPU8.buffer.slice(frameYDataPtr, frameYDataPtr + frame_width * frame_height)), 107 | frameUData: new Uint8Array(Module.HEAPU8.buffer.slice(frameUDataPtr, frameUDataPtr + frame_width / 2 * frame_height / 2)), 108 | frameVData: new Uint8Array(Module.HEAPU8.buffer.slice(frameVDataPtr, frameVDataPtr + frame_width / 2 * frame_height / 2)) 109 | }; 110 | } 111 | -------------------------------------------------------------------------------- /packetrecevier/YUVCanvas.js: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2015 Paperspace Co. All rights reserved. 3 | // 4 | // Permission is hereby granted, free of charge, to any person obtaining a copy 5 | // of this software and associated documentation files (the "Software"), to 6 | // deal in the Software without restriction, including without limitation the 7 | // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | // sell copies of the Software, and to permit persons to whom the Software is 9 | // furnished to do so, subject to the following conditions: 10 | // 11 | // The above copyright notice and this permission notice shall be included in 12 | // all copies or substantial portions of the Software. 13 | // 14 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | // IN THE SOFTWARE. 21 | // 22 | 23 | 24 | // universal module definition 25 | (function(root, factory) { 26 | if (typeof define === 'function' && define.amd) { 27 | // AMD. Register as an anonymous module. 28 | define([], factory); 29 | } else if (typeof exports === 'object') { 30 | // Node. Does not work with strict CommonJS, but 31 | // only CommonJS-like environments that support module.exports, 32 | // like Node. 33 | module.exports = factory(); 34 | } else { 35 | // Browser globals (root is window) 36 | root.YUVCanvas = factory(); 37 | } 38 | }(this, function() { 39 | 40 | 41 | /** 42 | * This class can be used to render output pictures from an H264bsdDecoder to a canvas element. 43 | * If available the content is rendered using WebGL. 44 | */ 45 | function YUVCanvas(parOptions) { 46 | 47 | parOptions = parOptions || {}; 48 | 49 | this.canvasElement = parOptions.canvas || document.createElement("canvas"); 50 | this.contextOptions = parOptions.contextOptions; 51 | 52 | this.type = parOptions.type || "yuv420"; 53 | 54 | this.customYUV444 = parOptions.customYUV444; 55 | 56 | this.conversionType = parOptions.conversionType || "rec601"; 57 | 58 | this.width = parOptions.width || 640; 59 | this.height = parOptions.height || 320; 60 | 61 | this.animationTime = parOptions.animationTime || 0; 62 | 63 | this.canvasElement.width = this.width; 64 | this.canvasElement.height = this.height; 65 | 66 | this.initContextGL(); 67 | 68 | if (this.contextGL) { 69 | this.initProgram(); 70 | this.initBuffers(); 71 | this.initTextures(); 72 | } 73 | 74 | 75 | 76 | 77 | /** 78 | * Draw the next output picture using WebGL 79 | */ 80 | if (this.type === "yuv420") { 81 | this.drawNextOuptutPictureGL = function(par) { 82 | var gl = this.contextGL; 83 | var texturePosBuffer = this.texturePosBuffer; 84 | var uTexturePosBuffer = this.uTexturePosBuffer; 85 | var vTexturePosBuffer = this.vTexturePosBuffer; 86 | 87 | var yTextureRef = this.yTextureRef; 88 | var uTextureRef = this.uTextureRef; 89 | var vTextureRef = this.vTextureRef; 90 | 91 | var yData = par.yData; 92 | var uData = par.uData; 93 | var vData = par.vData; 94 | 95 | var width = this.width; 96 | var height = this.height; 97 | 98 | var yDataPerRow = par.yDataPerRow || width; 99 | var yRowCnt = par.yRowCnt || height; 100 | 101 | var uDataPerRow = par.uDataPerRow || (width / 2); 102 | var uRowCnt = par.uRowCnt || (height / 2); 103 | 104 | var vDataPerRow = par.vDataPerRow || uDataPerRow; 105 | var vRowCnt = par.vRowCnt || uRowCnt; 106 | 107 | gl.viewport(0, 0, width, height); 108 | 109 | var tTop = 0; 110 | var tLeft = 0; 111 | var tBottom = 1.0; //height / yRowCnt; 112 | var tRight = 1.0; //width / yDataPerRow; 113 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 114 | 115 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 116 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); 117 | 118 | if (this.customYUV444) { 119 | tBottom = height / uRowCnt; 120 | tRight = width / uDataPerRow; 121 | } else { 122 | tBottom = 1.0; //(height / 2) / uRowCnt; 123 | tRight = 1.0; //(width / 2) / uDataPerRow; 124 | }; 125 | var uTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 126 | 127 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); 128 | gl.bufferData(gl.ARRAY_BUFFER, uTexturePosValues, gl.DYNAMIC_DRAW); 129 | 130 | 131 | if (this.customYUV444) { 132 | tBottom = height / vRowCnt; 133 | tRight = width / vDataPerRow; 134 | } else { 135 | tBottom = 1.0; //(height / 2) / vRowCnt; 136 | tRight = 1.0; //(width / 2) / vDataPerRow; 137 | }; 138 | var vTexturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 139 | 140 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); 141 | gl.bufferData(gl.ARRAY_BUFFER, vTexturePosValues, gl.DYNAMIC_DRAW); 142 | 143 | 144 | gl.activeTexture(gl.TEXTURE0); 145 | gl.bindTexture(gl.TEXTURE_2D, yTextureRef); 146 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, yDataPerRow, yRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, yData); 147 | 148 | gl.activeTexture(gl.TEXTURE1); 149 | gl.bindTexture(gl.TEXTURE_2D, uTextureRef); 150 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, uDataPerRow, uRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, uData); 151 | 152 | gl.activeTexture(gl.TEXTURE2); 153 | gl.bindTexture(gl.TEXTURE_2D, vTextureRef); 154 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, vDataPerRow, vRowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, vData); 155 | 156 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 157 | }; 158 | 159 | } else if (this.type === "yuv422") { 160 | this.drawNextOuptutPictureGL = function(par) { 161 | var gl = this.contextGL; 162 | var texturePosBuffer = this.texturePosBuffer; 163 | 164 | var textureRef = this.textureRef; 165 | 166 | var data = par.data; 167 | 168 | var width = this.width; 169 | var height = this.height; 170 | 171 | var dataPerRow = par.dataPerRow || (width * 2); 172 | var rowCnt = par.rowCnt || height; 173 | 174 | gl.viewport(0, 0, width, height); 175 | 176 | var tTop = 0; 177 | var tLeft = 0; 178 | var tBottom = height / rowCnt; 179 | var tRight = width / (dataPerRow / 2); 180 | var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]); 181 | 182 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 183 | gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW); 184 | 185 | gl.uniform2f(gl.getUniformLocation(this.shaderProgram, 'resolution'), dataPerRow, height); 186 | 187 | gl.activeTexture(gl.TEXTURE0); 188 | gl.bindTexture(gl.TEXTURE_2D, textureRef); 189 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, dataPerRow, rowCnt, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data); 190 | 191 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 192 | }; 193 | }; 194 | 195 | }; 196 | 197 | /** 198 | * Returns true if the canvas supports WebGL 199 | */ 200 | YUVCanvas.prototype.isWebGL = function() { 201 | return this.contextGL; 202 | }; 203 | 204 | /** 205 | * Create the GL context from the canvas element 206 | */ 207 | YUVCanvas.prototype.initContextGL = function() { 208 | var canvas = this.canvasElement; 209 | var gl = null; 210 | 211 | var validContextNames = ["webgl", "experimental-webgl", "moz-webgl", "webkit-3d"]; 212 | var nameIndex = 0; 213 | 214 | while (!gl && nameIndex < validContextNames.length) { 215 | var contextName = validContextNames[nameIndex]; 216 | 217 | try { 218 | if (this.contextOptions) { 219 | gl = canvas.getContext(contextName, this.contextOptions); 220 | } else { 221 | gl = canvas.getContext(contextName); 222 | } 223 | } catch (e) { 224 | gl = null; 225 | } 226 | 227 | if (!gl || typeof gl.getParameter !== "function") { 228 | gl = null; 229 | } 230 | 231 | ++nameIndex; 232 | } 233 | 234 | this.contextGL = gl; 235 | }; 236 | 237 | /** 238 | * Initialize GL shader program 239 | */ 240 | YUVCanvas.prototype.initProgram = function() { 241 | var gl = this.contextGL; 242 | 243 | // vertex shader is the same for all types 244 | var vertexShaderScript; 245 | var fragmentShaderScript; 246 | 247 | if (this.type === "yuv420") { 248 | 249 | vertexShaderScript = [ 250 | 'attribute vec4 vertexPos;', 251 | 'attribute vec4 texturePos;', 252 | 'attribute vec4 uTexturePos;', 253 | 'attribute vec4 vTexturePos;', 254 | 'varying vec2 textureCoord;', 255 | 'varying vec2 uTextureCoord;', 256 | 'varying vec2 vTextureCoord;', 257 | 258 | 'void main()', 259 | '{', 260 | ' gl_Position = vertexPos;', 261 | ' textureCoord = texturePos.xy;', 262 | ' uTextureCoord = uTexturePos.xy;', 263 | ' vTextureCoord = vTexturePos.xy;', 264 | '}' 265 | ].join('\n'); 266 | 267 | fragmentShaderScript = [ 268 | 'precision highp float;', 269 | 'varying highp vec2 textureCoord;', 270 | 'varying highp vec2 uTextureCoord;', 271 | 'varying highp vec2 vTextureCoord;', 272 | 'uniform sampler2D ySampler;', 273 | 'uniform sampler2D uSampler;', 274 | 'uniform sampler2D vSampler;', 275 | 'uniform mat4 YUV2RGB;', 276 | 277 | 'void main(void) {', 278 | ' highp float y = texture2D(ySampler, textureCoord).r;', 279 | ' highp float u = texture2D(uSampler, uTextureCoord).r;', 280 | ' highp float v = texture2D(vSampler, vTextureCoord).r;', 281 | ' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', 282 | '}' 283 | ].join('\n'); 284 | 285 | } else if (this.type === "yuv422") { 286 | vertexShaderScript = [ 287 | 'attribute vec4 vertexPos;', 288 | 'attribute vec4 texturePos;', 289 | 'varying vec2 textureCoord;', 290 | 291 | 'void main()', 292 | '{', 293 | ' gl_Position = vertexPos;', 294 | ' textureCoord = texturePos.xy;', 295 | '}' 296 | ].join('\n'); 297 | 298 | fragmentShaderScript = [ 299 | 'precision highp float;', 300 | 'varying highp vec2 textureCoord;', 301 | 'uniform sampler2D sampler;', 302 | 'uniform highp vec2 resolution;', 303 | 'uniform mat4 YUV2RGB;', 304 | 305 | 'void main(void) {', 306 | 307 | ' highp float texPixX = 1.0 / resolution.x;', 308 | ' highp float logPixX = 2.0 / resolution.x;', // half the resolution of the texture 309 | ' highp float logHalfPixX = 4.0 / resolution.x;', // half of the logical resolution so every 4th pixel 310 | ' highp float steps = floor(textureCoord.x / logPixX);', 311 | ' highp float uvSteps = floor(textureCoord.x / logHalfPixX);', 312 | ' highp float y = texture2D(sampler, vec2((logPixX * steps) + texPixX, textureCoord.y)).r;', 313 | ' highp float u = texture2D(sampler, vec2((logHalfPixX * uvSteps), textureCoord.y)).r;', 314 | ' highp float v = texture2D(sampler, vec2((logHalfPixX * uvSteps) + texPixX + texPixX, textureCoord.y)).r;', 315 | 316 | //' highp float y = texture2D(sampler, textureCoord).r;', 317 | //' gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;', 318 | ' gl_FragColor = vec4(y, u, v, 1.0) * YUV2RGB;', 319 | '}' 320 | ].join('\n'); 321 | }; 322 | 323 | var YUV2RGB = []; 324 | 325 | if (this.conversionType == "rec709") { 326 | // ITU-T Rec. 709 327 | YUV2RGB = [ 328 | 1.16438, 0.00000, 1.79274, -0.97295, 329 | 1.16438, -0.21325, -0.53291, 0.30148, 330 | 1.16438, 2.11240, 0.00000, -1.13340, 331 | 0, 0, 0, 1, 332 | ]; 333 | } else { 334 | // assume ITU-T Rec. 601 335 | YUV2RGB = [ 336 | 1.16438, 0.00000, 1.59603, -0.87079, 337 | 1.16438, -0.39176, -0.81297, 0.52959, 338 | 1.16438, 2.01723, 0.00000, -1.08139, 339 | 0, 0, 0, 1 340 | ]; 341 | }; 342 | 343 | var vertexShader = gl.createShader(gl.VERTEX_SHADER); 344 | gl.shaderSource(vertexShader, vertexShaderScript); 345 | gl.compileShader(vertexShader); 346 | if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) { 347 | console.log('Vertex shader failed to compile: ' + gl.getShaderInfoLog(vertexShader)); 348 | } 349 | 350 | var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); 351 | gl.shaderSource(fragmentShader, fragmentShaderScript); 352 | gl.compileShader(fragmentShader); 353 | if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) { 354 | console.log('Fragment shader failed to compile: ' + gl.getShaderInfoLog(fragmentShader)); 355 | } 356 | 357 | var program = gl.createProgram(); 358 | gl.attachShader(program, vertexShader); 359 | gl.attachShader(program, fragmentShader); 360 | gl.linkProgram(program); 361 | if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { 362 | console.log('Program failed to compile: ' + gl.getProgramInfoLog(program)); 363 | } 364 | 365 | gl.useProgram(program); 366 | 367 | var YUV2RGBRef = gl.getUniformLocation(program, 'YUV2RGB'); 368 | gl.uniformMatrix4fv(YUV2RGBRef, false, YUV2RGB); 369 | 370 | this.shaderProgram = program; 371 | }; 372 | 373 | /** 374 | * Initialize vertex buffers and attach to shader program 375 | */ 376 | YUVCanvas.prototype.initBuffers = function() { 377 | var gl = this.contextGL; 378 | var program = this.shaderProgram; 379 | 380 | var vertexPosBuffer = gl.createBuffer(); 381 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); 382 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 1, -1, 1, 1, -1, -1, -1]), gl.STATIC_DRAW); 383 | 384 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); 385 | gl.enableVertexAttribArray(vertexPosRef); 386 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); 387 | 388 | if (this.animationTime) { 389 | 390 | var animationTime = this.animationTime; 391 | var timePassed = 0; 392 | var stepTime = 15; 393 | 394 | var aniFun = function() { 395 | 396 | timePassed += stepTime; 397 | var mul = (1 * timePassed) / animationTime; 398 | 399 | if (timePassed >= animationTime) { 400 | mul = 1; 401 | } else { 402 | setTimeout(aniFun, stepTime); 403 | }; 404 | 405 | var neg = -1 * mul; 406 | var pos = 1 * mul; 407 | 408 | var vertexPosBuffer = gl.createBuffer(); 409 | gl.bindBuffer(gl.ARRAY_BUFFER, vertexPosBuffer); 410 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([pos, pos, neg, pos, pos, neg, neg, neg]), gl.STATIC_DRAW); 411 | 412 | var vertexPosRef = gl.getAttribLocation(program, 'vertexPos'); 413 | gl.enableVertexAttribArray(vertexPosRef); 414 | gl.vertexAttribPointer(vertexPosRef, 2, gl.FLOAT, false, 0, 0); 415 | 416 | try { 417 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 418 | } catch (e) {}; 419 | 420 | }; 421 | aniFun(); 422 | 423 | }; 424 | 425 | 426 | 427 | var texturePosBuffer = gl.createBuffer(); 428 | gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer); 429 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 430 | 431 | var texturePosRef = gl.getAttribLocation(program, 'texturePos'); 432 | gl.enableVertexAttribArray(texturePosRef); 433 | gl.vertexAttribPointer(texturePosRef, 2, gl.FLOAT, false, 0, 0); 434 | 435 | this.texturePosBuffer = texturePosBuffer; 436 | 437 | if (this.type === "yuv420") { 438 | var uTexturePosBuffer = gl.createBuffer(); 439 | gl.bindBuffer(gl.ARRAY_BUFFER, uTexturePosBuffer); 440 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 441 | 442 | var uTexturePosRef = gl.getAttribLocation(program, 'uTexturePos'); 443 | gl.enableVertexAttribArray(uTexturePosRef); 444 | gl.vertexAttribPointer(uTexturePosRef, 2, gl.FLOAT, false, 0, 0); 445 | 446 | this.uTexturePosBuffer = uTexturePosBuffer; 447 | 448 | 449 | var vTexturePosBuffer = gl.createBuffer(); 450 | gl.bindBuffer(gl.ARRAY_BUFFER, vTexturePosBuffer); 451 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1, 0, 0, 0, 1, 1, 0, 1]), gl.STATIC_DRAW); 452 | 453 | var vTexturePosRef = gl.getAttribLocation(program, 'vTexturePos'); 454 | gl.enableVertexAttribArray(vTexturePosRef); 455 | gl.vertexAttribPointer(vTexturePosRef, 2, gl.FLOAT, false, 0, 0); 456 | 457 | this.vTexturePosBuffer = vTexturePosBuffer; 458 | }; 459 | 460 | }; 461 | 462 | /** 463 | * Initialize GL textures and attach to shader program 464 | */ 465 | YUVCanvas.prototype.initTextures = function() { 466 | var gl = this.contextGL; 467 | var program = this.shaderProgram; 468 | 469 | if (this.type === "yuv420") { 470 | 471 | var yTextureRef = this.initTexture(); 472 | var ySamplerRef = gl.getUniformLocation(program, 'ySampler'); 473 | gl.uniform1i(ySamplerRef, 0); 474 | this.yTextureRef = yTextureRef; 475 | 476 | var uTextureRef = this.initTexture(); 477 | var uSamplerRef = gl.getUniformLocation(program, 'uSampler'); 478 | gl.uniform1i(uSamplerRef, 1); 479 | this.uTextureRef = uTextureRef; 480 | 481 | var vTextureRef = this.initTexture(); 482 | var vSamplerRef = gl.getUniformLocation(program, 'vSampler'); 483 | gl.uniform1i(vSamplerRef, 2); 484 | this.vTextureRef = vTextureRef; 485 | 486 | } else if (this.type === "yuv422") { 487 | // only one texture for 422 488 | var textureRef = this.initTexture(); 489 | var samplerRef = gl.getUniformLocation(program, 'sampler'); 490 | gl.uniform1i(samplerRef, 0); 491 | this.textureRef = textureRef; 492 | 493 | }; 494 | }; 495 | 496 | /** 497 | * Create and configure a single texture 498 | */ 499 | YUVCanvas.prototype.initTexture = function() { 500 | var gl = this.contextGL; 501 | 502 | var textureRef = gl.createTexture(); 503 | gl.bindTexture(gl.TEXTURE_2D, textureRef); 504 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); 505 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); 506 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 507 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); 508 | gl.bindTexture(gl.TEXTURE_2D, null); 509 | 510 | return textureRef; 511 | }; 512 | 513 | /** 514 | * Draw picture data to the canvas. 515 | * If this object is using WebGL, the data must be an I420 formatted ArrayBuffer, 516 | * Otherwise, data must be an RGBA formatted ArrayBuffer. 517 | */ 518 | YUVCanvas.prototype.drawNextOutputPicture = function(width, height, croppingParams, data) { 519 | var gl = this.contextGL; 520 | 521 | if (gl) { 522 | this.drawNextOuptutPictureGL(width, height, croppingParams, data); 523 | } else { 524 | this.drawNextOuptutPictureRGBA(width, height, croppingParams, data); 525 | } 526 | }; 527 | 528 | 529 | 530 | /** 531 | * Draw next output picture using ARGB data on a 2d canvas. 532 | */ 533 | YUVCanvas.prototype.drawNextOuptutPictureRGBA = function(width, height, croppingParams, data) { 534 | var canvas = this.canvasElement; 535 | 536 | var croppingParams = null; 537 | 538 | var argbData = data; 539 | 540 | var ctx = canvas.getContext('2d'); 541 | var imageData = ctx.getImageData(0, 0, width, height); 542 | imageData.data.set(argbData); 543 | 544 | if (croppingParams === null) { 545 | ctx.putImageData(imageData, 0, 0); 546 | } else { 547 | ctx.putImageData(imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height); 548 | } 549 | }; 550 | 551 | return YUVCanvas; 552 | 553 | })); 554 | --------------------------------------------------------------------------------