├── .gitignore ├── .jshintrc ├── .npmignore ├── CHANGELOG.md ├── Gruntfile.js ├── README.md ├── buildPlayer.py ├── debug └── BufferDisplay.js ├── demo ├── dash.all.debug.js └── example_dash.html ├── package.json └── src ├── as3 └── com │ ├── dash │ ├── boxes │ │ ├── Box.as │ │ ├── FLVTag.as │ │ ├── FullBox.as │ │ ├── HandlerReferenceBox.as │ │ ├── MediaBox.as │ │ ├── MediaHeaderBox.as │ │ ├── MediaInformationBox.as │ │ ├── MovieBox.as │ │ ├── MovieExtendsBox.as │ │ ├── MovieFragmentBox.as │ │ ├── Muxer.as │ │ ├── NalUnit.as │ │ ├── SampleDescriptionBox.as │ │ ├── SampleEntry.as │ │ ├── SampleTableBox.as │ │ ├── TrackBox.as │ │ ├── TrackExtendsBox.as │ │ ├── TrackFragmentBox.as │ │ ├── TrackFragmentHeaderBox.as │ │ ├── TrackFragmentRunBox.as │ │ └── TrackHeaderBox.as │ ├── events │ │ └── MessageEvent.as │ ├── handlers │ │ ├── AudioSegmentHandler.as │ │ ├── IndexSegmentHandler.as │ │ ├── InitializationAudioSegmentHandler.as │ │ ├── InitializationSegmentHandler.as │ │ ├── InitializationVideoSegmentHandler.as │ │ ├── MediaSegmentHandler.as │ │ ├── SegmentHandler.as │ │ └── VideoSegmentHandler.as │ ├── loaders │ │ └── FragmentLoader.as │ └── utils │ │ ├── BandwidthMonitor.as │ │ ├── Base64.as │ │ ├── Bytes.as │ │ ├── Console.as │ │ ├── Manifest.as │ │ └── SmoothMonitor.as │ └── streamroot │ ├── MSEPolyfill.as │ ├── Main.as │ ├── NetStreamWrapper.as │ ├── Skin.as │ ├── buffer │ ├── Segment.as │ ├── SourceBuffer.as │ ├── StreamBuffer.as │ └── StreamBufferController.as │ ├── events │ └── PlaybackEvent.as │ ├── transcoder │ ├── TranscodeWorker.as │ └── Transcoder.as │ └── util │ ├── Conf.as │ └── TrackTypeHelper.as └── js ├── Main.js ├── lib ├── B64Encoder.js ├── B64MainThread.js ├── B64Worker.js ├── MediaSourceFlash.js ├── SegmentAppender.js ├── SourceBuffer.js ├── VideoExtension.js └── utils │ └── CustomTimeRange.js └── test └── utils.customTimeRange.js /.gitignore: -------------------------------------------------------------------------------- 1 | **/*.swf 2 | **/*.swc 3 | node_modules/ 4 | npm-debug.log 5 | build/ 6 | -------------------------------------------------------------------------------- /.jshintrc: -------------------------------------------------------------------------------- 1 | { 2 | "undef": true, 3 | "unused": true, 4 | "curly": true, 5 | "indent": 4, 6 | "camelcase":false, 7 | "forin":false, 8 | "newcap":true, 9 | "quotmark":false, 10 | "typed":true, 11 | "worker":true, 12 | "browser":true, 13 | "browserify":true, 14 | "mocha":true, 15 | "devel":true, 16 | "predef": [], 17 | "esnext": true 18 | } 19 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | com/streamroot/TranscodeWorker.swf 2 | node_modules/ 3 | /grunt-aws.json 4 | /dist/ -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | This project adheres to [Semantic Versioning](http://semver.org/). 4 | This changelog's template come from [keepachangelog.com](http://keepachangelog.com/). When editing this document, please follow the convention specified there. 5 | 6 | 7 | ## [Unreleased] 8 | ### Changed 9 | -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | module.exports = function(grunt) { 2 | require("matchdep").filterDev("grunt-*").forEach(grunt.loadNpmTasks); 3 | 4 | grunt.registerTask('default', ['browserify']); 5 | 6 | grunt.initConfig({ 7 | pkg: grunt.file.readJSON('package.json'), 8 | browserify: { 9 | main: { 10 | src: 'src/js/Main.js', 11 | dest: 'build/fMSE.js', 12 | options: { 13 | transform: ['babelify'], 14 | browserifyOptions: { 15 | standalone: 'fMSE.init', 16 | debug: true 17 | }, 18 | watch: true, 19 | keepAlive: true 20 | } 21 | }, 22 | debug: { 23 | src: 'debug/BufferDisplay.js', 24 | dest: 'debug/build/BufferDisplay.js', 25 | options: { 26 | transform: ['babelify'], 27 | browserifyOptions: { 28 | standalone: 'fMSE.debug.bufferDisplay', 29 | debug: true 30 | }, 31 | watch: true, 32 | keepAlive: true 33 | } 34 | }, 35 | } 36 | }); 37 | }; 38 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### :warning: This repository is not maintained and has critical known issues (see issues section). 2 | 3 | # Flash Media Source Extensions polyfill 4 | 5 | fMSE is a library that emulates MSE in browsers that do not support it. 6 | 7 | It allows transparent fallback for players supporting modern video formats such as MPEG-DASH or HLS when MSE is not available. 8 | 9 | Adobe Flash is used to do the actual playback, and communicates with a small JS polyfill that implements the MSE API. 10 | 11 | ## Building 12 | 13 | This is a two steps process: 14 | 15 | ### Building JavaScript 16 | 17 | Quite simple: 18 | ``` 19 | $ npm install 20 | $ grunt 21 | ``` 22 | 23 | This will create `fMSE.js` in the `build` directory. 24 | 25 | ### Building ActionScript files 26 | 27 | If you don't have Flex SDK: 28 | 29 | 1. Download [Apache Flex SDK Installer](http://flex.apache.org/installer.html) 30 | 1. Install the latest Flex SDK. 31 | 32 | Edit `buildPlayer.py` and specify path to your Flex SDK directory and run the script: 33 | 34 | ``` 35 | $ python buildPlayer.py 36 | ``` 37 | 38 | You can provide additional options to customize the build. Use 39 | 40 | ``` 41 | $ python buildPlayer.py -h 42 | ``` 43 | 44 | to get list of supported options. Successful build will create `fMSE.swf` in the `build` directory. 45 | 46 | ##### NOTE: 47 | 48 | We're targeting flash versions 11.4+, so you should make sure you have playerglobal.swc v11.4 at `$FLEX_PATH/frameworks/libs/player/11.4/playerglobal.swc` 49 | 50 | ## Example 51 | 52 | Here is an example of MPEG-DASH playback using [dash.js v2.0](https://github.com/Dash-Industry-Forum/dash.js) and fMSE 53 | 54 | ```html 55 | 56 | 57 | 58 | fMSE Polyfill - dash.js test page 59 | 60 | 61 | 62 | 63 | 64 |
65 | 66 |
67 | 68 |
69 | 70 | 71 |
72 | 73 | 98 | 99 | 100 | ``` 101 | 102 | This demo page is here `demo/example_dash.html`. You can go to `demo` directory and run simple HTTP server (like Python's) to test it in a browser. 103 | 104 | ## Integration 105 | 106 | 1. Include fSME.js in your page 107 | 108 | ```html 109 | 110 | 111 | 112 | ... 113 | 114 | ``` 115 | 116 | 1. Initialize fMSE 117 | 118 | ```html 119 | 120 | ... 121 | 138 | 139 | ``` 140 | 141 | ## Requirements & Compatibilities 142 | 143 | This library requires Adobe Flash Player 11.4 or higher installed. 144 | This library also needs [Web workers support](http://caniuse.com/webworkers), so it might not be working correctly on Safari and Edge. 145 | 146 | fMSE has been tested with the following media libraries: 147 | - [dash.js](https://github.com/Dash-Industry-Forum/dash.js) 148 | 149 | Media engines to we want to provide support with: 150 | - [shakapayer](https://github.com/google/shaka-player) 151 | - [hls.js](https://github.com/dailymotion/hls.js) 152 | 153 | ## Known issues (PRs appreciated!) 154 | 155 | - seek not working yet 156 | - Muxed streams not supported 157 | - EME is not supported (planning to provide EME support with Flash Access DRM) 158 | 159 | For more details see our [issues tracker](https://github.com/streamroot/fmse/issues). 160 | 161 | ## Contributing 162 | 163 | fMSE is a free and open source library. It's a work in progress and we appreciate any help you're willing to give. Don't hesitate add and comment issues on Github, or contact us directly at contact@streamroot.io. 164 | 165 | ## License 166 | 167 | This project is licensed under [MPL 2.0](https://www.mozilla.org/en-US/MPL/2.0/) 168 | If you need a special type of license for this project, you can contact us at contact@streamroot.io. 169 | 170 | ## Credits 171 | 172 | The project uses [dash.as](https://github.com/castlabs/dashas) for MP4 to FLV transmuxing. 173 | -------------------------------------------------------------------------------- /buildPlayer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import sys 4 | import subprocess 5 | import time 6 | 7 | if (os.path.exists("/opt/flex")): 8 | flex = "/opt/flex" 9 | exe = "" 10 | elif (os.path.exists(os.path.normpath("C:/flex_sdk_4.6"))): 11 | flex = ("C:/flex_sdk_4.6") 12 | exe = ".exe" 13 | elif (os.path.exists(os.path.expanduser("~/SDKs/Flex/4.14"))): 14 | flex = os.path.expanduser("~/SDKs/Flex/4.14") 15 | exe = "" 16 | 17 | SOURCE_PATH = "./src/as3/" 18 | TRANSCODER_MAIN_CLASS = SOURCE_PATH + "com/streamroot/transcoder/TranscodeWorker.as" 19 | TRANSCODER_OUTPUT = SOURCE_PATH + "com/streamroot/transcoder/TranscodeWorker.swf" 20 | POLYFILL_MAIN_CLASS = SOURCE_PATH + "com/streamroot/Main.as" 21 | POLYFILL_OUTPUT = "build/fMSE.swf" 22 | 23 | debug = "false" 24 | log_debug = "false" 25 | log_error = "false" 26 | verbose = False 27 | swfversion = "17" 28 | targetPlayer = "11.4.0" 29 | color = True 30 | 31 | startTime = time.time() 32 | 33 | def helpParam(): 34 | print "\npython buildPlayer.py [options]" 35 | print "options:" 36 | print "\t--debug : set debug flag to true" 37 | print "\t--log-debug : enables debug messages logging to browser console" 38 | print "\t--log-error : enables error messages logging to browser console" 39 | print "\t--no-color : disable color" 40 | print "\t-v : verbose mode" 41 | print "\t-h : display this menu" 42 | print "" 43 | sys.exit(0) 44 | 45 | def printRed(text): 46 | if color: 47 | print "\033[31m" + text + "\033[0m" 48 | else: 49 | print text 50 | 51 | def printPurple(text): 52 | if color: 53 | print "\033[35m" + text + "\033[0m" 54 | else: 55 | print text 56 | 57 | def printGreen(text): 58 | if color: 59 | print "\033[32m" + text + "\033[0m" 60 | else: 61 | print text 62 | 63 | def printYellow(text): 64 | if color: 65 | print "\033[33m" + text + "\033[0m" 66 | else: 67 | print text 68 | 69 | if (len(sys.argv)>1): 70 | for i in range(1, len(sys.argv)): 71 | if sys.argv[i] == "--debug": 72 | debug = "true" 73 | elif sys.argv[i] == "--log-debug": 74 | log_debug = "true" 75 | elif sys.argv[i] == "--log-error": 76 | log_error = "true" 77 | elif sys.argv[i] == "-v": 78 | verbose = True 79 | elif sys.argv[i] in ["--help","-h"]: 80 | helpParam() 81 | elif sys.argv[i] == "--no-color": 82 | color = False 83 | else: 84 | print "incorrect argument" 85 | helpParam() 86 | if verbose: 87 | print "Debug flag = " + debug 88 | print "-swf-version="+swfversion 89 | print "-target-player="+targetPlayer 90 | 91 | def popenPrint(result): 92 | result.wait() 93 | if verbose: 94 | for line in result.stdout: 95 | print(line) 96 | 97 | for line in result.stderr: 98 | while line.endswith("\n"): 99 | line = line[:-1] 100 | if not line == "": 101 | if color: 102 | line = line.replace("Error:", "\n\033[31mError\033[0m:"); 103 | line = line.replace("Erreur:", "\n\033[31mErreur\033[0m:"); 104 | line = line.replace("Warning:", "\n\033[33mWarning\033[0m:"); 105 | line = line.replace("Avertissement:", "\n\033[33mAvertissement\033[0m:"); 106 | else: 107 | line = line.replace("Error:", "Error:"); 108 | line = line.replace("Erreur:", "Erreur:"); 109 | line = line.replace("Warning:", "Warning:"); 110 | line = line.replace("Avertissement:", "Avertissement:"); 111 | if line.startswith('\n'): 112 | line = line[1:] 113 | print(line) 114 | 115 | #Compile worker 116 | if os.path.exists(TRANSCODER_OUTPUT): 117 | os.remove(os.path.normpath(TRANSCODER_OUTPUT)) 118 | workerResult = subprocess.Popen([os.path.normpath(flex + "/bin/mxmlc" + exe), 119 | os.path.normpath(TRANSCODER_MAIN_CLASS), 120 | os.path.normpath("-output=" + TRANSCODER_OUTPUT), 121 | "-static-link-runtime-shared-libraries=true", 122 | "-compiler.source-path=" + SOURCE_PATH, 123 | "-target-player="+targetPlayer+"", 124 | "-swf-version="+swfversion+"", 125 | "-debug="+debug+"", 126 | "-define+=CONFIG::LOG_DEBUG," + log_debug, 127 | "-define+=CONFIG::LOG_ERROR," + log_error], 128 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) 129 | 130 | popenPrint(workerResult) 131 | if not os.path.exists(TRANSCODER_OUTPUT): 132 | printRed("Transcoder build failed") 133 | sys.exit(0) 134 | else: 135 | printPurple(">> " + TRANSCODER_OUTPUT + " has been generated, build successful") 136 | 137 | #compiling polyfill 138 | if os.path.exists(POLYFILL_OUTPUT): 139 | os.remove(os.path.normpath(POLYFILL_OUTPUT)) 140 | polyfillResult = subprocess.Popen([os.path.normpath(flex +"/bin/mxmlc" + exe), 141 | os.path.normpath(POLYFILL_MAIN_CLASS), 142 | os.path.normpath("-output=" + POLYFILL_OUTPUT), 143 | "-compiler.source-path=" + SOURCE_PATH, 144 | "-target-player="+targetPlayer+"", 145 | "-swf-version="+swfversion+"", 146 | "-debug="+debug+"", 147 | "-static-link-runtime-shared-libraries=true", 148 | "-use-network=false", 149 | "-compiler.optimize=true", 150 | "-default-background-color=0x000000", 151 | "-default-frame-rate=30", 152 | "-define+=CONFIG::LOG_DEBUG," + log_debug, 153 | "-define+=CONFIG::LOG_ERROR," + log_error], stdout=subprocess.PIPE, stderr=subprocess.PIPE) 154 | popenPrint(polyfillResult) 155 | if not os.path.exists(POLYFILL_OUTPUT): 156 | printRed("Polyfill build failed") 157 | sys.exit(0) 158 | else: 159 | printPurple(">> " + POLYFILL_OUTPUT + " has been generated, build successful") 160 | 161 | printGreen("Build successful") 162 | time = time.time() - startTime 163 | print "Time elapsed : " + str(time) + "s" 164 | -------------------------------------------------------------------------------- /debug/BufferDisplay.js: -------------------------------------------------------------------------------- 1 | const CACHE_HEIGHT = 40; 2 | const BUFFER_HEIGHT = 25; 3 | const TRACK_TOP_MARGIN = 3; 4 | const BUFFERED_COLOR = "#0c1b2e"; 5 | const PENDING_COLOR = "#07659f"; 6 | const CURRENT_TIME_COLOR = "#bf0101"; 7 | const CANVAS_WIDTH = 700; 8 | const TRACK_TYPE_WIDTH = 60; 9 | const FONT_STYLE = "12px Arial"; 10 | const TRACK_TYPE_COLOR = "#000824"; 11 | 12 | 13 | class BufferDisplay { 14 | constructor(){ 15 | this._sourceBuffers = []; 16 | } 17 | 18 | attachVideo(video) { 19 | this._video = video; 20 | this._startIfReady(); 21 | } 22 | 23 | attachSourceBuffer(sourceBuffer) { 24 | this._sourceBuffers.push(sourceBuffer); 25 | this._startIfReady(); 26 | } 27 | 28 | _startIfReady() { 29 | if (this._sourceBuffers.length && this._video && !this._started) { 30 | this._started = true; 31 | this._canvas = document.createElement('canvas'); 32 | 33 | this._canvas.width = CANVAS_WIDTH; 34 | let div = document.getElementById("bufferDisplay"); 35 | if(!div){ 36 | div = document.createElement('div'); 37 | document.body.appendChild(div); 38 | } 39 | div.appendChild(this._canvas); 40 | 41 | let render = this._render.bind(this); 42 | setInterval(render, 30); 43 | } 44 | } 45 | 46 | _render(){ 47 | let { currentTime } = this._video; 48 | let context2D = this._canvas.getContext('2d'); 49 | 50 | this._canvas.height = (CACHE_HEIGHT + TRACK_TOP_MARGIN)*this._sourceBuffers.length; 51 | 52 | // calculate the scale of the chart 53 | let min = Infinity, max = 0; 54 | for (let sourceBuffer of this._sourceBuffers) { 55 | let buffered = sourceBuffer.debugBuffered || sourceBuffer.buffered; 56 | if(buffered.length){ 57 | let bufferedMin = buffered.start(0); 58 | let bufferedMax = buffered.end(buffered.length-1); 59 | 60 | if( bufferedMin < min ){ 61 | min = bufferedMin; 62 | } 63 | if(bufferedMax > max){ 64 | max = bufferedMax; 65 | } 66 | } 67 | } 68 | 69 | let scale = {min, max, canvasWidth: this._canvas.width}; 70 | 71 | //for each SourceBuffer, draw TimeRanges. 72 | for (let i=0, sourceBuffer; sourceBuffer = this._sourceBuffers[i]; i++) { 73 | let buffered = sourceBuffer.debugBuffered || sourceBuffer.buffered; 74 | let debug = !!sourceBuffer.debugBuffered; 75 | 76 | let yPosition = (CACHE_HEIGHT + TRACK_TOP_MARGIN)*i; 77 | let opt = { 78 | scale, 79 | height: BUFFER_HEIGHT, 80 | yPosition: yPosition+(CACHE_HEIGHT - BUFFER_HEIGHT), 81 | color: BUFFERED_COLOR, 82 | debug 83 | }; 84 | this._drawTimeRanges(context2D, opt, buffered, currentTime); 85 | if (debug) { 86 | let captionYPosition = yPosition + (CACHE_HEIGHT * 1 / 4); 87 | this._writeTrackType(context2D, sourceBuffer.type, captionYPosition); 88 | } 89 | } 90 | let currentTimeLineOptions = { 91 | height:this._canvas.height, 92 | color: CURRENT_TIME_COLOR, 93 | scale 94 | }; 95 | this._drawCurrentTimeLine(context2D, currentTimeLineOptions, currentTime); 96 | } 97 | 98 | //The actual canvas drawing functions 99 | _drawTimeRanges(context2D, options, timeRanges, currentTime){ 100 | let {scale, height, yPosition, color, debug} = options; 101 | 102 | if (debug && timeRanges.length > 2) { 103 | throw new Error("Expected debug buffered attribute with a buffered time interval and a pending time interval. Got more than 2 time intervals"); 104 | } 105 | 106 | for (let j = 0; j < timeRanges.length; j++) { 107 | 108 | if (debug && j===1) { 109 | color = PENDING_COLOR; 110 | } 111 | 112 | 113 | let start = timeRanges.start(j); 114 | let end = timeRanges.end(j); 115 | 116 | let startX = this._convertTimeToPixel(scale, start); 117 | let endX = this._convertTimeToPixel(scale, end); 118 | let length = endX - startX > 1 ? endX - startX : 1; 119 | context2D.fillStyle = color; 120 | context2D.fillRect(startX, yPosition, length, height); 121 | 122 | if (start <= currentTime && currentTime <= end) { 123 | context2D.fillStyle = "#868686"; 124 | context2D.font = "11px Arial"; 125 | context2D.fillText(start.toFixed(3), startX + 2, yPosition + 10); 126 | context2D.fillText(end.toFixed(3), endX - 38, yPosition + height - 2); 127 | } 128 | } 129 | } 130 | 131 | 132 | _drawCurrentTimeLine(context2D, options, time){ 133 | let {color, scale, height} = options; 134 | let position = this._convertTimeToPixel(scale,time); 135 | context2D.fillStyle = color; 136 | context2D.fillRect(position, 0, 1, height); 137 | context2D.fillStyle = color; 138 | context2D.font = FONT_STYLE; 139 | context2D.fillText(time.toFixed(3), 0, height); 140 | } 141 | 142 | 143 | _convertTimeToPixel(scale, time) { 144 | let {min, max, canvasWidth} = scale; 145 | let effectiveCanvasWidth = canvasWidth - TRACK_TYPE_WIDTH; 146 | let divider = Math.max(max - min, 3*60); //trick so we can see the progression of the buffer during the 3 first minutes of a stream. 147 | return TRACK_TYPE_WIDTH + ((time - min) * effectiveCanvasWidth / divider); 148 | } 149 | 150 | _writeTrackType(context2D, trackType, yPosition){ 151 | context2D.fillStyle = TRACK_TYPE_COLOR; 152 | context2D.font = FONT_STYLE; 153 | context2D.fillText(trackType, 0, yPosition); 154 | } 155 | 156 | } 157 | 158 | export default new BufferDisplay(); 159 | -------------------------------------------------------------------------------- /demo/example_dash.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | fMSE Polyfill - dash.js test page 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 |
13 | 14 |
15 | 16 |
17 | 18 | 19 |
20 | 21 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "fMSE", 3 | "version": "0.1.0", 4 | "homepage": "www.streamroot.io", 5 | "scripts": { 6 | "test": "mocha src/js/videomock/test/" 7 | }, 8 | "author": { 9 | "name": "StreamRoot", 10 | "email": "contact@streamroot.io" 11 | }, 12 | "devDependencies": { 13 | "should": "4.0.4", 14 | "grunt": "^0.4.5", 15 | "grunt-browserify": "4.0.1", 16 | "babelify": "^6.1.0", 17 | "matchdep": "0.3.0" 18 | }, 19 | "dependencies": { 20 | "eventemitter3": "^1.1.1" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/Box.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.errors.IllegalOperationError; 11 | import flash.utils.ByteArray; 12 | 13 | internal class Box { 14 | protected const SIZE_AND_TYPE:uint = 8; 15 | 16 | private var _end:uint; 17 | 18 | public function Box(offset:uint, size:uint) { 19 | _end = offset + size; 20 | } 21 | 22 | public function get end():uint { 23 | return _end; 24 | } 25 | 26 | public function parse(ba:ByteArray):void { 27 | while (ba.bytesAvailable) { 28 | var offset:uint = ba.position; 29 | var size:uint = ba.readUnsignedInt(); 30 | var type:String = ba.readUTFBytes(4); 31 | 32 | var parsed:Object = parseChildBox(type, offset, size, ba); 33 | 34 | if (parsed == false) { 35 | if (ba.position < _end) { // skip 36 | ba.position += size - SIZE_AND_TYPE; 37 | } else { // quit 38 | ba.position = _end; 39 | return; 40 | } 41 | } 42 | } 43 | } 44 | 45 | protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 46 | throw new IllegalOperationError("Method not implemented"); 47 | } 48 | } 49 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/FLVTag.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class FLVTag { 13 | public static const I_FRAME:uint = 1; 14 | public static const P_FRAME:uint = 2; 15 | public static const B_FRAME:uint = 3; 16 | public static const UNKNOWN:uint = 4; 17 | 18 | private static const VIDEO_TYPE:uint = 9; 19 | private static const AUDIO_TYPE:uint = 8; 20 | 21 | public var length:uint; 22 | public var timestamp:uint; 23 | public var frameType:uint; 24 | public var compositionTimestamp:int; 25 | public var dataOffset:uint; 26 | public var data:ByteArray; 27 | public var setup:Boolean = false; 28 | 29 | public var duration:uint; 30 | 31 | private var _type:uint; 32 | 33 | public function FLVTag() { 34 | } 35 | 36 | public function get type():uint { 37 | return _type; 38 | } 39 | 40 | public function isVideo():Boolean { 41 | return _type == VIDEO_TYPE; 42 | } 43 | 44 | public function isAudio():Boolean { 45 | return _type == AUDIO_TYPE; 46 | } 47 | 48 | public function markAsVideo():void { 49 | _type = VIDEO_TYPE; 50 | } 51 | 52 | public function markAsAudio():void { 53 | _type = AUDIO_TYPE; 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/FullBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.errors.IllegalOperationError; 11 | import flash.utils.ByteArray; 12 | 13 | public class FullBox extends Box { 14 | private var _version:uint; 15 | private var _flags:uint; 16 | 17 | public function FullBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get version():uint { 22 | return _version; 23 | } 24 | 25 | public function get flags():uint { 26 | return _flags; 27 | } 28 | 29 | public override function parse(ba:ByteArray):void { 30 | parseVersion(ba); 31 | parseFlags(ba); 32 | 33 | parseBox(ba); 34 | 35 | ba.position = end; 36 | } 37 | 38 | protected function parseBox(ba:ByteArray):void { 39 | throw new IllegalOperationError("Method not implemented"); 40 | } 41 | 42 | private function parseVersion(ba:ByteArray):void { 43 | _version = ba.readUnsignedByte(); 44 | } 45 | 46 | private function parseFlags(ba:ByteArray):void { 47 | _flags = 0; 48 | 49 | for (var i:uint = 0; i < 3; i++) { 50 | _flags = _flags << 8; 51 | _flags += ba.readUnsignedByte(); 52 | } 53 | } 54 | } 55 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/HandlerReferenceBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class HandlerReferenceBox extends FullBox { 13 | 14 | // 'vide', 'soun' or other values 15 | private var _type:String; 16 | 17 | public function HandlerReferenceBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get type():String { 22 | return _type; 23 | } 24 | 25 | override protected function parseBox(ba:ByteArray):void { 26 | 27 | // skip QUICKTIME type 28 | ba.position += 4; 29 | 30 | _type = ba.readUTFBytes(4); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MediaBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class MediaBox extends Box { 14 | private var _mdhd:MediaHeaderBox; 15 | private var _hdlr:HandlerReferenceBox; 16 | private var _minf:MediaInformationBox; 17 | 18 | public function MediaBox(offset:uint, size:uint) { 19 | super(offset, size); 20 | } 21 | 22 | public function get mdhd():MediaHeaderBox { 23 | return _mdhd; 24 | } 25 | 26 | public function get hdlr():HandlerReferenceBox { 27 | return _hdlr; 28 | } 29 | 30 | public function get minf():MediaInformationBox { 31 | return _minf; 32 | } 33 | 34 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 35 | if (type == "mdhd") { 36 | parseMediaHeaderBox(offset, size, ba); 37 | return true; 38 | } 39 | 40 | if (type == "hdlr") { 41 | parseHandlerReferenceBox(offset, size, ba); 42 | return true; 43 | } 44 | 45 | if (type == "minf") { 46 | parseMediaInformationBox(offset, size, ba); 47 | return true; 48 | } 49 | 50 | return false; 51 | } 52 | 53 | private function parseMediaInformationBox(offset:uint, size:uint, ba:ByteArray):void { 54 | _minf = new MediaInformationBox(offset, size); 55 | _minf.parse(ba); 56 | } 57 | 58 | private function parseHandlerReferenceBox(offset:uint, size:uint, ba:ByteArray):void { 59 | _hdlr = new HandlerReferenceBox(offset, size); 60 | _hdlr.parse(ba); 61 | } 62 | 63 | private function parseMediaHeaderBox(offset:uint, size:uint, ba:ByteArray):void { 64 | _mdhd = new MediaHeaderBox(offset, size); 65 | _mdhd.parse(ba); 66 | } 67 | } 68 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MediaHeaderBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class MediaHeaderBox extends FullBox { 13 | private var _timescale:uint; 14 | 15 | public function MediaHeaderBox(offset:uint, size:uint) { 16 | super(offset, size); 17 | } 18 | 19 | public function get timescale():uint { 20 | return _timescale; 21 | } 22 | 23 | override protected function parseBox(ba:ByteArray):void { 24 | if (version == 1) { 25 | parseVersion1(ba); 26 | } 27 | 28 | if (version == 0) { 29 | parseVersion2(ba); 30 | } 31 | } 32 | 33 | private function parseVersion1(ba:ByteArray):void { 34 | 35 | // // creation time MSB 36 | // ba.readUnsignedInt(); // 4 bytes 37 | // // creation time LSB 38 | // ba.readUnsignedInt(); // 4 bytes 39 | // // modification time MSB 40 | // ba.readUnsignedInt(); // 4 bytes 41 | // // modification time LSB 42 | // ba.readUnsignedInt(); // 4 bytes 43 | 44 | // skip 45 | ba.position += 16; 46 | 47 | // timescale 48 | _timescale = ba.readUnsignedInt(); // 4 bytes 49 | } 50 | 51 | private function parseVersion2(ba:ByteArray):void { 52 | 53 | // // creation time LSB 54 | // ba.readUnsignedInt(); // 4 bytes 55 | // // modification time LSB 56 | // ba.readUnsignedInt(); // 4 bytes 57 | 58 | // skip 59 | ba.position += 8; 60 | 61 | // timescale 62 | _timescale = ba.readUnsignedInt(); 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MediaInformationBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class MediaInformationBox extends Box { 13 | private var _stbl:SampleTableBox; 14 | 15 | public function MediaInformationBox(offset:uint, size:uint) { 16 | super(offset, size); 17 | } 18 | 19 | public function get stbl():SampleTableBox { 20 | return _stbl; 21 | } 22 | 23 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 24 | if (type == "stbl") { 25 | parseSampleTableBox(offset, size, ba); 26 | return true; 27 | } 28 | 29 | return false; 30 | } 31 | 32 | private function parseSampleTableBox(offset:uint, size:uint, ba:ByteArray):void { 33 | _stbl = new SampleTableBox(offset, size); 34 | _stbl.parse(ba); 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MovieBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class MovieBox extends Box { 14 | private var _traks:Vector. = new Vector.(); 15 | private var _mvex:MovieExtendsBox; 16 | 17 | public function MovieBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get traks():Vector. { 22 | return _traks; 23 | } 24 | 25 | public function get mvex():MovieExtendsBox { 26 | return _mvex; 27 | } 28 | 29 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 30 | if (type == "trak") { 31 | parseTrackBox(offset, size, ba); 32 | return true; 33 | } 34 | 35 | if (type == "mvex") { 36 | parseMovieExtendsBox(offset, size, ba); 37 | return true; 38 | } 39 | 40 | return false; 41 | } 42 | 43 | private function parseMovieExtendsBox(offset:uint, size:uint, ba:ByteArray):void { 44 | _mvex = new MovieExtendsBox(offset, size); 45 | _mvex.parse(ba); 46 | } 47 | 48 | private function parseTrackBox(offset:uint, size:uint, ba:ByteArray):void { 49 | var trak:TrackBox = new TrackBox(offset, size); 50 | trak.parse(ba); 51 | _traks.push(trak); 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MovieExtendsBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class MovieExtendsBox extends Box { 14 | private var _trexs:Vector. = new Vector.(); 15 | 16 | public function MovieExtendsBox(offset:uint, size:uint) { 17 | super(offset, size); 18 | } 19 | 20 | public function get trexs():Vector. { 21 | return _trexs; 22 | } 23 | 24 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 25 | if (type == "trex") { 26 | parseTrackExtendsBox(offset, size, ba); 27 | return true; 28 | } 29 | 30 | return false; 31 | } 32 | 33 | private function parseTrackExtendsBox(offset:uint, size:uint, ba:ByteArray):void { 34 | var trex:TrackExtendsBox = new TrackExtendsBox(offset, size); 35 | trex.parse(ba); 36 | _trexs.push(trex); 37 | } 38 | } 39 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/MovieFragmentBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class MovieFragmentBox extends Box { 14 | private var _trafs:Vector. = new Vector.(); 15 | private var _offset:uint; 16 | 17 | public function MovieFragmentBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | _offset = offset; 20 | } 21 | 22 | public function get trafs():Vector. { 23 | return _trafs; 24 | } 25 | 26 | public function get offset():uint { 27 | return _offset; 28 | } 29 | 30 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 31 | if (type == "traf") { 32 | parseTrackFragmentBox(offset, size, ba); 33 | return true; 34 | } 35 | 36 | return false; 37 | } 38 | 39 | private function parseTrackFragmentBox(offset:uint, size:uint, ba:ByteArray):void { 40 | var traf:TrackFragmentBox = new TrackFragmentBox(offset, size); 41 | traf.parse(ba); 42 | _trafs.push(traf); 43 | } 44 | } 45 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/Muxer.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class Muxer { 14 | private var _duration:uint; 15 | 16 | public function Muxer() { 17 | } 18 | public function get duration():uint { 19 | return _duration; 20 | } 21 | 22 | public function mux(messages:Vector.):ByteArray { 23 | var ba:ByteArray = new ByteArray(); 24 | _duration = 0; 25 | while (messages.length > 0) { 26 | var message:FLVTag = messages.shift(); 27 | writeMsg(message, ba); 28 | _duration += message.duration; 29 | } 30 | 31 | return ba; 32 | } 33 | 34 | private function writeMsg(message:FLVTag, ba:ByteArray):void { 35 | var messageSize:uint = calculateSize(message); 36 | 37 | writeType(message, ba); 38 | writeSize(messageSize, ba); 39 | writeTimestamp(message, ba); 40 | writeStreamId(message, ba); 41 | writeHeader(message, ba); 42 | writeData(message, ba); 43 | 44 | // write previous tag size 45 | ba.writeUnsignedInt(messageSize + 11); // 4 bytes 46 | } 47 | 48 | private function calculateSize(message:FLVTag):uint { 49 | var size:uint = message.length; 50 | 51 | if (message.isVideo()) { 52 | size += 5; 53 | } else if (message.isAudio()) { 54 | size += 2; 55 | } 56 | 57 | return size; 58 | } 59 | 60 | private function writeType(message:FLVTag, ba:ByteArray):void { 61 | ba.writeByte(message.type); 62 | } 63 | 64 | private function writeData(message:FLVTag, ba:ByteArray):void { 65 | if (message.data != null) { 66 | ba.writeBytes(message.data); 67 | } 68 | } 69 | 70 | private function writeHeader(message:FLVTag, ba:ByteArray):void { 71 | if (message.isVideo()) { 72 | writeVideoHeader(message, ba); 73 | writeCompositionTimestamp(message, ba); 74 | } 75 | 76 | if (message.isAudio()) { 77 | writeAudioHeader(message, ba); 78 | } 79 | } 80 | 81 | private function writeStreamId(message:FLVTag, ba:ByteArray):void { 82 | writeNumber(0, ba); // always the same ID 83 | } 84 | 85 | private function writeTimestamp(message:FLVTag, ba:ByteArray):void { 86 | var timestamp:Number = Math.round(message.timestamp); 87 | writeNumber(timestamp, ba); 88 | ba.writeByte(0); 89 | } 90 | 91 | private function writeSize(messageSize:uint, ba:ByteArray):void { 92 | writeNumber(messageSize, ba); 93 | } 94 | 95 | private function writeAudioHeader(message:FLVTag, ba:ByteArray):void { 96 | var sampleSize:uint; 97 | if (SampleEntry.sampleSize == 16) { // 16 bit 98 | sampleSize = 1; 99 | } else { 100 | sampleSize = 0; 101 | } 102 | 103 | var channelCount:uint; 104 | if (SampleEntry.channelCount == 2) { // stereo 105 | channelCount = 1; 106 | } else { 107 | channelCount = 0; 108 | } 109 | 110 | ba.writeByte((10 << 4) + (3 << 2) + (sampleSize << 1) + channelCount); 111 | 112 | if (message.setup) { 113 | ba.writeByte(0); 114 | } else { 115 | ba.writeByte(1); 116 | } 117 | } 118 | 119 | private function writeVideoHeader(message:FLVTag, ba:ByteArray):void { 120 | switch (message.frameType) { 121 | case FLVTag.I_FRAME: 122 | ba.writeByte(0x17); 123 | ba.writeByte(1); 124 | break; 125 | case FLVTag.P_FRAME: 126 | ba.writeByte(0x27); 127 | ba.writeByte(1); 128 | break; 129 | case FLVTag.B_FRAME: 130 | ba.writeByte(0x37); 131 | ba.writeByte(1); 132 | break; 133 | default: 134 | ba.writeByte(0x17); 135 | ba.writeByte(0); 136 | } 137 | } 138 | 139 | private function writeCompositionTimestamp(message:FLVTag, ba:ByteArray):void { 140 | writeNumber(message.compositionTimestamp, ba); 141 | } 142 | 143 | private function writeNumber(number:int, ba:ByteArray):void { 144 | ba.writeByte(number >> 16); 145 | ba.writeByte(number >> 8); 146 | ba.writeByte(number); 147 | } 148 | } 149 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/NalUnit.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class NalUnit { 13 | private static const HEADER:uint = 1; 14 | 15 | public function NalUnit() { 16 | } 17 | 18 | public function parse(ba:ByteArray):uint { 19 | ba.position = 0; 20 | 21 | var pPresent:Boolean = false; 22 | var bPresent:Boolean = false; 23 | var iPresent:Boolean = false; 24 | 25 | while (ba.bytesAvailable) { 26 | var nalUnitLength:uint = ba.readUnsignedInt(); 27 | var nalUnitHeader:uint = ba.readUnsignedByte(); 28 | var nalUnitType:uint = nalUnitHeader & 0x1f; 29 | 30 | if ((nalUnitHeader & 0x80) != 0x0) { 31 | return FLVTag.UNKNOWN; 32 | } 33 | 34 | if (nalUnitType == 0x5) { 35 | return FLVTag.I_FRAME; 36 | } 37 | 38 | if (nalUnitType == 0x1) { 39 | var bits:BitArray = new BitArray(ba); 40 | 41 | // firstMbInSlice 42 | readUnsignedIntegerFromGolombCode(bits); 43 | 44 | var sliceType:uint = readUnsignedIntegerFromGolombCode(bits); 45 | 46 | switch (sliceType) { 47 | case 0: 48 | pPresent = true; 49 | break; 50 | case 1: 51 | bPresent = true; 52 | break; 53 | case 2: 54 | iPresent = true; 55 | break; 56 | case 5: 57 | return FLVTag.P_FRAME; 58 | case 6: 59 | return FLVTag.B_FRAME; 60 | case 7: 61 | return FLVTag.I_FRAME; 62 | case 8: 63 | return FLVTag.UNKNOWN; 64 | case 9: 65 | return FLVTag.UNKNOWN; 66 | } 67 | } 68 | 69 | ba.position += nalUnitLength - HEADER; 70 | } 71 | 72 | if (bPresent) { 73 | return FLVTag.B_FRAME; 74 | } 75 | 76 | if (pPresent) { 77 | return FLVTag.P_FRAME; 78 | } 79 | 80 | if (iPresent) { 81 | return FLVTag.I_FRAME; 82 | } 83 | 84 | return FLVTag.UNKNOWN; 85 | } 86 | 87 | private function readUnsignedIntegerFromGolombCode(ba:BitArray):uint { 88 | var leadingZeroBits:uint = 0; 89 | 90 | // count number of zeros 91 | while (ba.readBit() == 0) { 92 | leadingZeroBits++; 93 | } 94 | 95 | // code = 2^(leadingZeroBits) – 1 96 | var code:uint = (1 << leadingZeroBits) - 1; 97 | 98 | // code += read_bits(leadingZeroBits) 99 | for (var i:int = (leadingZeroBits - 1); i >= 0; i--) { 100 | code += ba.readBit() << i; 101 | } 102 | 103 | return code; 104 | } 105 | } 106 | } 107 | 108 | import flash.utils.ByteArray; 109 | 110 | class BitArray { 111 | private var _ba:ByteArray; 112 | private var _offset:uint; 113 | 114 | public function BitArray(ba:ByteArray) { 115 | _ba = ba; 116 | _offset = ba.position * 8; 117 | } 118 | 119 | public function readBit():uint { 120 | var byte:uint = _ba[(_offset >> 0x3)]; 121 | var byteOffset:uint = 0x7 - (_offset & 0x7); 122 | 123 | _offset++; 124 | 125 | return (byte >> byteOffset) & 0x1; 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/SampleDescriptionBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class SampleDescriptionBox extends FullBox { 14 | private var _sampleEntries:Vector. = new Vector.(); 15 | 16 | public function SampleDescriptionBox(offset:uint, size:uint) { 17 | super(offset, size); 18 | } 19 | 20 | public function get sampleEntries():Vector. { 21 | return _sampleEntries; 22 | } 23 | 24 | override protected function parseBox(ba:ByteArray):void { 25 | var sampleEntriesLength:uint = ba.readUnsignedInt(); 26 | 27 | for (var i:uint = 0; i < sampleEntriesLength; i++) { 28 | var offset:uint = ba.position; 29 | var size:uint = ba.readUnsignedInt(); 30 | var type:String = ba.readUTFBytes(4); 31 | 32 | parseSampleEntry(offset, size, type, ba); 33 | } 34 | } 35 | 36 | private function parseSampleEntry(offset:uint, size:uint, type:String, ba:ByteArray):void { 37 | var sampleEntry:SampleEntry = new SampleEntry(offset, size, type); 38 | sampleEntry.parse(ba); 39 | _sampleEntries.push(sampleEntry); 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/SampleEntry.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import com.dash.utils.Console; 12 | 13 | import flash.utils.ByteArray; 14 | 15 | public class SampleEntry extends Box { 16 | public static var channelCount:uint; 17 | public static var sampleSize:uint; 18 | 19 | private var _data:ByteArray; 20 | private var _type:String; 21 | 22 | public function SampleEntry(offset:uint, size:uint, type:String) { 23 | super(offset, size); 24 | _type = type; 25 | } 26 | 27 | public function get data():ByteArray { 28 | return _data; 29 | } 30 | 31 | public override function parse(ba:ByteArray):void { 32 | if (_type == "avc1") { 33 | parseAvc1(ba); 34 | } 35 | 36 | if (_type == "mp4a") { 37 | parseMp4a(ba); 38 | } 39 | 40 | ba.position = end; 41 | } 42 | 43 | private function parseAvc1(ba:ByteArray):void { 44 | 45 | // skip 46 | ba.position += 78; 47 | 48 | parseAvc1Data(ba); 49 | } 50 | 51 | private function parseMp4a(ba:ByteArray):void { 52 | 53 | // skip 54 | ba.position += 16; 55 | 56 | channelCount = ba.readUnsignedShort(); 57 | sampleSize = ba.readUnsignedShort(); 58 | 59 | // skip 60 | ba.position += 8; 61 | 62 | _data = parseMp4aData(ba); 63 | } 64 | 65 | private function parseAvc1Data(ba:ByteArray):void { 66 | ba.position = goToBox("avcC", ba); 67 | 68 | var size:Number = ba.readUnsignedInt(); 69 | 70 | // skip: type("avcC") 71 | ba.position += 4; 72 | 73 | _data = new ByteArray(); 74 | ba.readBytes(_data, 0, size - SIZE_AND_TYPE); 75 | } 76 | 77 | private function goToBox(type:String, ba:ByteArray):uint { 78 | var typeBegin:String = type.slice(0, 1); 79 | var typeOther:String = type.slice(1); 80 | 81 | while (ba.bytesAvailable) { 82 | if (ba.readUTFBytes(typeBegin.length) != typeBegin) { 83 | continue; 84 | } 85 | 86 | if (ba.readUTFBytes(typeOther.length) == typeOther) { 87 | return ba.position - SIZE_AND_TYPE; 88 | } 89 | } 90 | 91 | throw Console.getInstance().logError(new Error("Couldn't find any '" + type + "' box")); 92 | } 93 | 94 | private function parseMp4aData(ba:ByteArray):ByteArray { 95 | 96 | // // 4-bytes size 97 | // ba.position += 4; 98 | // // 4-bytes type 99 | // ba.position += 4; 100 | // // 4-bytes version/flags 101 | // ba.position += 4; 102 | // // 1-byte type (0x03) 103 | // ba.position += 1; 104 | 105 | ba.position += 13; 106 | 107 | // 3-bytes header (optional) and 1-byte size 108 | getDescriptorSize(ba); 109 | 110 | // // 2-bytes ID 111 | // ba.position += 2; 112 | // // 1-byte priority 113 | // ba.position += 1; 114 | // // 1-byte type (0x04) 115 | // ba.position += 1; 116 | 117 | ba.position += 4; 118 | 119 | // 3-bytes header (optional) and 1-byte size 120 | getDescriptorSize(ba); 121 | 122 | // // 1-byte ID 123 | // ba.position += 1; 124 | // // 1-byte type/flags 125 | // ba.position += 1; 126 | // // 3-bytes buffer size 127 | // ba.position += 3; 128 | // // 4-bytes maximum bit rate 129 | // ba.position += 4; 130 | // // 4-bytes average bit rate 131 | // ba.position += 4; 132 | // // 1-byte type (0x05) 133 | // ba.position += 1; 134 | 135 | ba.position += 14; 136 | 137 | var sdf:ByteArray = new ByteArray(); 138 | ba.readBytes(sdf, 0, getDescriptorSize(ba)); 139 | 140 | return sdf; 141 | } 142 | 143 | private function getDescriptorSize(ba:ByteArray):uint { 144 | var headerOrSize:uint = ba.readUnsignedByte(); 145 | 146 | if (headerOrSize == 0x80) { 147 | 148 | // 2-bytes header 149 | ba.position += 2; 150 | 151 | // size 152 | return ba.readUnsignedByte(); 153 | } 154 | 155 | // size 156 | return headerOrSize; 157 | } 158 | } 159 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/SampleTableBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class SampleTableBox extends Box { 13 | private var _stsd:SampleDescriptionBox; 14 | 15 | public function SampleTableBox(offset:uint, size:uint) { 16 | super(offset, size); 17 | } 18 | 19 | public function get stsd():SampleDescriptionBox { 20 | return _stsd; 21 | } 22 | 23 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 24 | if (type == "stsd") { 25 | parseSampleDescriptionBox(offset, size, ba); 26 | return true; 27 | } 28 | 29 | return false; 30 | } 31 | 32 | private function parseSampleDescriptionBox(offset:uint, size:uint, ba:ByteArray):void { 33 | _stsd = new SampleDescriptionBox(offset, size); 34 | _stsd.parse(ba); 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class TrackBox extends Box { 14 | private var _tkhd:TrackHeaderBox; 15 | private var _mdia:MediaBox; 16 | 17 | public function TrackBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get tkhd():TrackHeaderBox { 22 | return _tkhd; 23 | } 24 | 25 | public function get mdia():MediaBox { 26 | return _mdia; 27 | } 28 | 29 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 30 | if (type == "tkhd") { 31 | parseTrackHeaderBox(offset, size, ba); 32 | return true; 33 | } 34 | 35 | if (type == "mdia") { 36 | parseMediaBox(offset, size, ba); 37 | return true; 38 | } 39 | 40 | return false; 41 | } 42 | 43 | private function parseTrackHeaderBox(offset:uint, size:uint, ba:ByteArray):void { 44 | _tkhd = new TrackHeaderBox(offset, size); 45 | _tkhd.parse(ba); 46 | } 47 | 48 | private function parseMediaBox(offset:uint, size:uint, ba:ByteArray):void { 49 | _mdia = new MediaBox(offset, size); 50 | _mdia.parse(ba); 51 | } 52 | } 53 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackExtendsBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import flash.utils.ByteArray; 11 | 12 | public class TrackExtendsBox extends FullBox { 13 | private var _trackId:uint; 14 | private var _defaultSampleDuration:uint; 15 | 16 | public function TrackExtendsBox(offset:uint, size:uint) { 17 | super(offset, size); 18 | } 19 | 20 | public function get trackId():uint { 21 | return _trackId; 22 | } 23 | 24 | public function get defaultSampleDuration():uint { 25 | return _defaultSampleDuration; 26 | } 27 | 28 | override protected function parseBox(ba:ByteArray):void { 29 | 30 | // track ID 31 | _trackId = ba.readUnsignedInt(); 32 | 33 | // // default sample description index 34 | // ba.readUnsignedInt(); // 4 bytes 35 | 36 | // skip 37 | ba.position += 4; 38 | 39 | _defaultSampleDuration = ba.readUnsignedInt(); 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackFragmentBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import flash.utils.ByteArray; 12 | 13 | public class TrackFragmentBox extends Box { 14 | private var _truns:Vector. = new Vector.(); 15 | private var _tfhd:TrackFragmentHeaderBox; 16 | 17 | public function TrackFragmentBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get truns():Vector. { 22 | return _truns; 23 | } 24 | 25 | public function get tfhd():TrackFragmentHeaderBox { 26 | return _tfhd; 27 | } 28 | 29 | override protected function parseChildBox(type:String, offset:uint, size:uint, ba:ByteArray):Boolean { 30 | if (type == "tfhd") { 31 | parseTrackFragmentHeaderBox(offset, size, ba); 32 | return true; 33 | } 34 | 35 | if (type == "trun") { 36 | parseTrackFragmentRunBox(offset, size, ba); 37 | return true; 38 | } 39 | 40 | return false; 41 | } 42 | 43 | private function parseTrackFragmentRunBox(offset:uint, size:uint, ba:ByteArray):void { 44 | var trun:TrackFragmentRunBox = new TrackFragmentRunBox(offset, size); 45 | trun.parse(ba); 46 | _truns.push(trun); 47 | } 48 | 49 | private function parseTrackFragmentHeaderBox(offset:uint, size:uint, ba:ByteArray):void { 50 | _tfhd = new TrackFragmentHeaderBox(offset, size); 51 | _tfhd.parse(ba); 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackFragmentHeaderBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | 11 | import com.dash.utils.Bytes; 12 | 13 | import flash.utils.ByteArray; 14 | 15 | public class TrackFragmentHeaderBox extends FullBox { 16 | private var _baseDataOffsetPresent:Boolean = false; 17 | private var _baseDataOffset:Number; 18 | private var _defaultSampleDurationPresent:Boolean = false; 19 | private var _defaultSampleDuration:uint; 20 | 21 | public function TrackFragmentHeaderBox(offset:uint, size:uint) { 22 | super(offset, size); 23 | } 24 | 25 | public function get defaultSampleDurationPresent():Boolean { 26 | return _defaultSampleDurationPresent; 27 | } 28 | 29 | public function get baseDataOffsetPresent():Boolean { 30 | return _baseDataOffsetPresent; 31 | } 32 | 33 | public function get defaultSampleDuration():uint { 34 | return _defaultSampleDuration; 35 | } 36 | 37 | public function get baseDataOffset():Number { 38 | return _baseDataOffset; 39 | } 40 | 41 | override protected function parseBox(ba:ByteArray):void { 42 | if ((flags & 0x1) == 0x1) { 43 | _baseDataOffsetPresent = true; 44 | } 45 | 46 | var sampleDescriptionIndexPresent:Boolean = false; 47 | if ((flags & 0x2) == 0x2) { 48 | sampleDescriptionIndexPresent = true; 49 | } 50 | 51 | if ((flags & 0x8) == 0x8) { 52 | _defaultSampleDurationPresent = true; 53 | } 54 | 55 | // trafId 56 | Bytes.readNumber(ba); 57 | 58 | if (_baseDataOffsetPresent) { 59 | _baseDataOffset = Bytes.readNumber(ba, 8); 60 | } 61 | 62 | Bytes.skipNumberIfNeeded(sampleDescriptionIndexPresent, ba); 63 | 64 | if (_defaultSampleDurationPresent) { 65 | _defaultSampleDuration = Bytes.readNumber(ba); 66 | } 67 | } 68 | } 69 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackFragmentRunBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import com.dash.utils.Bytes; 11 | 12 | import flash.utils.ByteArray; 13 | 14 | public class TrackFragmentRunBox extends FullBox { 15 | private var _sampleDurationPresent:Boolean = false; 16 | private var _sampleDuration:Vector. = new Vector.(); 17 | private var _sampleDependsOn:Vector. = new Vector.(); 18 | private var _sampleIsDependedOn:Vector. = new Vector.(); 19 | private var _sampleSize:Vector. = new Vector.(); 20 | private var _sampleCompositionTimeOffset:Vector. = new Vector.(); 21 | private var _dataOffset:int; 22 | 23 | public function TrackFragmentRunBox(offset:uint, size:uint) { 24 | super(offset, size); 25 | } 26 | 27 | public function get dataOffset():int { 28 | return _dataOffset; 29 | } 30 | 31 | public function get sampleDurationPresent():Boolean { 32 | return _sampleDurationPresent; 33 | } 34 | 35 | public function get sampleCompositionTimeOffset():Vector. { 36 | return _sampleCompositionTimeOffset; 37 | } 38 | 39 | public function get sampleSize():Vector. { 40 | return _sampleSize; 41 | } 42 | 43 | public function get sampleDuration():Vector. { 44 | return _sampleDuration; 45 | } 46 | 47 | public function get sampleDependsOn():Vector. { 48 | return _sampleDependsOn; 49 | } 50 | 51 | public function get sampleIsDependedOn():Vector. { 52 | return _sampleIsDependedOn; 53 | } 54 | 55 | override protected function parseBox(ba:ByteArray):void { 56 | var dataOffsetPresent:Boolean = false; 57 | if ((flags & 0x1) == 0x1) { 58 | dataOffsetPresent = true; 59 | } 60 | 61 | var firstSampleFlagsPresent:Boolean = false; 62 | if ((flags & 0x4) == 0x4) { 63 | firstSampleFlagsPresent = true; 64 | } 65 | 66 | if ((flags & 0x100) == 0x100) { 67 | _sampleDurationPresent = true; 68 | } 69 | 70 | var sampleSizePresent:Boolean = false; 71 | if ((flags & 0x200) == 0x200) { 72 | sampleSizePresent = true; 73 | } 74 | 75 | var secondSampleFlagsPresent:Boolean = false; 76 | if ((flags & 0x400) == 0x400) { 77 | secondSampleFlagsPresent = true; 78 | } 79 | 80 | var sampleCompositionTimeOffsetsPresent:Boolean = false; 81 | if ((flags & 0x800) == 0x800) { 82 | sampleCompositionTimeOffsetsPresent = true; 83 | } 84 | 85 | var sampleCount:uint = Bytes.readNumber(ba); 86 | 87 | parseDataOffsetIfNeeded(dataOffsetPresent, ba); 88 | Bytes.skipNumberIfNeeded(firstSampleFlagsPresent, ba); 89 | 90 | for (var i:uint = 0; i < sampleCount; i++) { 91 | parseSampleDurationIfNeeded(i, ba); 92 | parseSampleSizeIfNeeded(sampleSizePresent, i, ba); 93 | parseSampleFlagsIfNeeded(secondSampleFlagsPresent, i, ba); 94 | parseSampleCompositionTimeOffsetsIfNeeded(sampleCompositionTimeOffsetsPresent, i, ba); 95 | } 96 | } 97 | 98 | private function parseDataOffsetIfNeeded(present:Boolean, ba:ByteArray):void { 99 | if (present) { 100 | _dataOffset = Bytes.readNumber(ba); 101 | } 102 | } 103 | 104 | private function parseSampleCompositionTimeOffsetsIfNeeded(present:Boolean, i:uint, ba:ByteArray):void { 105 | if (present) { 106 | if (version == 0) { 107 | parseSampleCompositionTimeOffsetsVersion0(i, ba); 108 | } 109 | 110 | if (version == 1) { 111 | parseSampleCompositionTimeOffsetsVersion1(i, ba); 112 | } 113 | } 114 | } 115 | 116 | private function parseSampleCompositionTimeOffsetsVersion1(i:uint, ba:ByteArray):void { 117 | var cts:uint = Bytes.readNumber(ba); 118 | 119 | if (((cts >> 31) & 0x1) == 1) { 120 | _sampleCompositionTimeOffset[i] = cts - 0xffffffff; 121 | } else { 122 | _sampleCompositionTimeOffset[i] = cts; 123 | } 124 | } 125 | 126 | private function parseSampleCompositionTimeOffsetsVersion0(i:uint, ba:ByteArray):void { 127 | _sampleCompositionTimeOffset[i] = Bytes.readNumber(ba); 128 | } 129 | 130 | private function parseSampleSizeIfNeeded(present:Boolean, i:uint, ba:ByteArray):void { 131 | if (present) { 132 | _sampleSize[i] = Bytes.readNumber(ba); 133 | } 134 | } 135 | 136 | private function parseSampleDurationIfNeeded(i:uint, ba:ByteArray):void { 137 | if (_sampleDurationPresent) { 138 | _sampleDuration[i] = Bytes.readNumber(ba); 139 | } 140 | } 141 | 142 | private function parseSampleFlagsIfNeeded(present:Boolean, i:uint, ba:ByteArray):void { 143 | if (present) { 144 | var sampleFlags:uint = ba.readUnsignedInt(); 145 | _sampleDependsOn[i] = (sampleFlags >> 24) & 0x03; 146 | _sampleIsDependedOn[i] = (sampleFlags >> 22) & 0x03; 147 | } 148 | } 149 | } 150 | } -------------------------------------------------------------------------------- /src/as3/com/dash/boxes/TrackHeaderBox.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.boxes { 10 | import com.dash.utils.Console; 11 | 12 | import flash.utils.ByteArray; 13 | 14 | public class TrackHeaderBox extends FullBox { 15 | private var _id:uint; 16 | 17 | public function TrackHeaderBox(offset:uint, size:uint) { 18 | super(offset, size); 19 | } 20 | 21 | public function get id():uint { 22 | return _id; 23 | } 24 | 25 | override protected function parseBox(ba:ByteArray):void { 26 | if (version == 0) { 27 | 28 | // // created mac UTC date 29 | // ba.position += 4; 30 | // // modified mac UTC date 31 | // ba.position += 4; 32 | 33 | ba.position += 8; 34 | } else if (version == 1) { 35 | 36 | // // created mac UTC date 37 | // ba.position += 8; 38 | // // modified mac UTC date 39 | // ba.position += 8; 40 | 41 | ba.position += 16; 42 | } else { 43 | throw Console.getInstance().logError(new Error("Unknown TrackHeaderBox version")); 44 | } 45 | 46 | _id = ba.readUnsignedInt(); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/as3/com/dash/events/MessageEvent.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.events { 10 | 11 | import flash.events.Event; 12 | 13 | public class MessageEvent extends Event { 14 | public static const ADDED:String = "messageAdded"; 15 | 16 | private var _message:String; 17 | 18 | public function MessageEvent(type:String, bubbles:Boolean = false, cancelable:Boolean = false, 19 | message:String='') { 20 | super(type, bubbles, cancelable); 21 | 22 | _message = message; 23 | } 24 | 25 | public function get message():String { 26 | return _message; 27 | } 28 | 29 | } 30 | } -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/AudioSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.Muxer; 12 | 13 | import flash.utils.ByteArray; 14 | 15 | public class AudioSegmentHandler extends MediaSegmentHandler { 16 | public function AudioSegmentHandler(segment:ByteArray, messages:Vector., 17 | defaultSampleDuration:uint, timescale:uint, timestamp:Number, mixer:Muxer) { 18 | super(segment, messages, defaultSampleDuration, timescale, timestamp, mixer); 19 | } 20 | 21 | protected override function buildMessage(sampleDuration:uint, sampleSize:uint, sampleDependsOn:uint, 22 | sampleIsDependedOn:uint, compositionTimeOffset:Number, 23 | dataOffset:uint, ba:ByteArray):FLVTag { 24 | var message:FLVTag = new FLVTag(); 25 | 26 | message.markAsAudio(); 27 | 28 | message.timestamp = _timestamp; 29 | _timestamp = message.timestamp + sampleDuration * 1000 / _timescale; 30 | 31 | message.duration = sampleDuration * 1000 * 1000 / _timescale; //sampleDuration / _timescale;// * 1000 32 | 33 | message.length = sampleSize; 34 | 35 | message.dataOffset = dataOffset; 36 | 37 | message.data = new ByteArray(); 38 | ba.position = message.dataOffset; 39 | ba.readBytes(message.data, 0, sampleSize); 40 | 41 | return message; 42 | } 43 | } 44 | } -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/IndexSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | 11 | import com.dash.utils.Bytes; 12 | import com.dash.utils.Console; 13 | 14 | import flash.utils.ByteArray; 15 | 16 | public class IndexSegmentHandler { 17 | public static const SEGMENT_INDEX_BOX_TYPE:String = "sidx"; 18 | 19 | private var _references:Vector. = new Vector.(); 20 | 21 | public function IndexSegmentHandler(segment:ByteArray, indexSegmentRangeBegin:Number) { 22 | processSegment(segment, indexSegmentRangeBegin); 23 | } 24 | 25 | private function processSegment(segment:ByteArray, indexSegmentRangeBegin:Number):void { 26 | segment.position = 0; 27 | 28 | var size:Number = segment.readUnsignedInt(); 29 | var type:String = segment.readUTFBytes(4); 30 | 31 | while (SEGMENT_INDEX_BOX_TYPE != type) { 32 | segment.position += size - 8; 33 | size = segment.readUnsignedInt(); 34 | type = segment.readUTFBytes(4); 35 | } 36 | 37 | segment.position -= 8; 38 | 39 | var box:ByteArray = new ByteArray(); 40 | segment.readBytes(box, 0, size); 41 | processSegmentIndexBox(box, indexSegmentRangeBegin); 42 | } 43 | 44 | private function processSegmentIndexBox(box:ByteArray, indexSegmentRangeBegin:Number):void { 45 | var size:Number = box.readUnsignedInt(); 46 | var type:String = box.readUTFBytes(4); 47 | 48 | if (size == 1) { 49 | 50 | // large size box used, read next 8 bytes 51 | var msb:uint = box.readUnsignedInt(); 52 | var lsb:uint = box.readUnsignedInt(); 53 | size = (Number(msb) * Math.pow(2, 32)) + Number(lsb); 54 | } 55 | 56 | if ((SEGMENT_INDEX_BOX_TYPE == type) && (box.bytesAvailable >= size - 8)) { 57 | var version:Number = box.readUnsignedInt(); 58 | box.position += 4; 59 | var timescale:Number = box.readUnsignedInt(); 60 | 61 | var earliestPresentationTime:Number; 62 | var firstOffset:Number; 63 | 64 | if (version == 0) { 65 | earliestPresentationTime = box.readUnsignedInt(); 66 | firstOffset = box.readUnsignedInt(); 67 | } else { 68 | earliestPresentationTime = Bytes.readNumber(box, 8); 69 | firstOffset = Bytes.readNumber(box, 8); 70 | } 71 | 72 | firstOffset += size + indexSegmentRangeBegin; 73 | 74 | box.position += 2; 75 | var referenceCount:Number = box.readUnsignedShort(); 76 | 77 | var offset:Number = firstOffset; 78 | var time:Number = earliestPresentationTime; 79 | 80 | for (var i:int = 0; i < referenceCount; i++) { 81 | var referenceTypeAndSize:uint = box.readUnsignedInt(); 82 | var referenceType:uint = referenceTypeAndSize >>> 31; 83 | var referenceSize:uint = referenceTypeAndSize & 0x7fffffff; 84 | var referenceDuration:uint = box.readUnsignedInt(); 85 | box.position += 4; 86 | 87 | var reference:Object = {}; 88 | reference.range = offset + "-" + (offset + referenceSize - 1); 89 | reference.startTimestamp = time / timescale; // seconds 90 | reference.endTimestamp = (time + referenceDuration) / timescale; // seconds 91 | 92 | _references.push(reference); 93 | 94 | offset += referenceSize; 95 | time += referenceDuration; 96 | } 97 | } 98 | } 99 | 100 | public function get references():Vector. { 101 | return _references; 102 | } 103 | 104 | public function toString():String { 105 | return "referencesCount='" + _references.length + "'"; 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/InitializationAudioSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.SampleEntry; 12 | 13 | import flash.utils.ByteArray; 14 | 15 | public class InitializationAudioSegmentHandler extends InitializationSegmentHandler { 16 | public function InitializationAudioSegmentHandler(ba:ByteArray) { 17 | super(ba); 18 | } 19 | 20 | override protected function get expectedTrackType():String { 21 | return 'soun'; 22 | } 23 | 24 | protected override function buildMessage(sampleEntry:SampleEntry):FLVTag { 25 | var message:FLVTag = new FLVTag(); 26 | 27 | message.markAsAudio(); 28 | 29 | message.timestamp = 0; 30 | 31 | message.length = sampleEntry.data.length; 32 | 33 | message.data = new ByteArray(); 34 | sampleEntry.data.readBytes(message.data, 0, sampleEntry.data.length); 35 | 36 | message.setup = true; 37 | 38 | return message; 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/InitializationSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.MovieBox; 12 | import com.dash.boxes.SampleEntry; 13 | import com.dash.boxes.TrackBox; 14 | import com.dash.boxes.TrackExtendsBox; 15 | import com.dash.utils.Console; 16 | 17 | import flash.errors.IllegalOperationError; 18 | import flash.utils.ByteArray; 19 | 20 | public class InitializationSegmentHandler extends SegmentHandler { 21 | private var _timescale:Number = 0; 22 | private var _defaultSampleDuration:uint = 0; 23 | private var _messages:Vector. = new Vector.(); 24 | 25 | public function InitializationSegmentHandler(ba:ByteArray) { 26 | parseMovieBox(ba); 27 | } 28 | 29 | public function get timescale():Number { 30 | return _timescale; 31 | } 32 | 33 | public function get defaultSampleDuration():Number { 34 | return _defaultSampleDuration; 35 | } 36 | 37 | public function get messages():Vector. { 38 | return _messages.concat(); // a shallow copy 39 | } 40 | 41 | private function parseMovieBox(ba:ByteArray):void { 42 | var offsetAndSize:Object = goToBox("moov", ba); 43 | var offset:uint = offsetAndSize.offset; 44 | var size:uint = offsetAndSize.size; 45 | 46 | var movie:MovieBox = new MovieBox(offset, size); 47 | movie.parse(ba); 48 | 49 | var track:TrackBox = findTrackWithSpecifiedType(movie); 50 | 51 | loadTimescale(track); 52 | loadMessages(track); 53 | loadDefaultSampleDuration(movie, track.tkhd.id); 54 | } 55 | 56 | private function findTrackWithSpecifiedType(movie:MovieBox):TrackBox { 57 | for each (var track:TrackBox in movie.traks) { 58 | if (track.mdia.hdlr.type == expectedTrackType) { 59 | return track; 60 | } 61 | } 62 | 63 | throw Console.getInstance().logError(new Error("Track isn't defined, type='" + expectedTrackType + "'")); 64 | } 65 | 66 | protected function get expectedTrackType():String { 67 | throw new IllegalOperationError("Method isn't implemented"); 68 | } 69 | 70 | private function loadTimescale(track:TrackBox):void { 71 | _timescale = track.mdia.mdhd.timescale; 72 | } 73 | 74 | private function loadMessages(track:TrackBox):void { 75 | var sampleEntry:SampleEntry = buildSampleEntry(track); 76 | var message:FLVTag = buildMessage(sampleEntry); 77 | _messages.push(message); 78 | } 79 | 80 | private function loadDefaultSampleDuration(movie:MovieBox, trackId:uint):void { 81 | for each (var trex:TrackExtendsBox in movie.mvex.trexs) { 82 | if (trackId == trex.trackId) { 83 | _defaultSampleDuration = trex.defaultSampleDuration; 84 | return; 85 | } 86 | } 87 | 88 | Console.getInstance().warn("Default sample duration isn't defined, trackId='" + trackId + "'"); 89 | } 90 | 91 | private function buildSampleEntry(track:TrackBox):SampleEntry { 92 | return track.mdia.minf.stbl.stsd.sampleEntries[0]; 93 | } 94 | 95 | protected function buildMessage(sampleEntry:SampleEntry):FLVTag { 96 | throw new IllegalOperationError("Method isn't implemented"); 97 | } 98 | 99 | public function toString():String { 100 | return "timescale='" + _timescale + "', defaultSampleDuration='" + _defaultSampleDuration 101 | + "', messagesCount='" + _messages.length + "'"; 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/InitializationVideoSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.SampleEntry; 12 | 13 | import flash.utils.ByteArray; 14 | 15 | public class InitializationVideoSegmentHandler extends InitializationSegmentHandler { 16 | public function InitializationVideoSegmentHandler(ba:ByteArray) { 17 | super(ba); 18 | } 19 | 20 | override protected function get expectedTrackType():String { 21 | return 'vide'; 22 | } 23 | 24 | protected override function buildMessage(sampleEntry:SampleEntry):FLVTag { 25 | var message:FLVTag = new FLVTag(); 26 | 27 | message.markAsVideo(); 28 | 29 | message.timestamp = 0; 30 | 31 | message.length = sampleEntry.data.length; 32 | 33 | message.data = new ByteArray(); 34 | sampleEntry.data.readBytes(message.data, 0, sampleEntry.data.length); 35 | 36 | message.frameType = FLVTag.UNKNOWN; 37 | 38 | message.compositionTimestamp = 0; 39 | 40 | message.setup = true; 41 | 42 | return message; 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/MediaSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.Muxer; 12 | import com.dash.boxes.MovieFragmentBox; 13 | import com.dash.boxes.TrackFragmentHeaderBox; 14 | import com.dash.boxes.TrackFragmentRunBox; 15 | 16 | import flash.errors.IllegalOperationError; 17 | import flash.utils.ByteArray; 18 | 19 | public class MediaSegmentHandler extends SegmentHandler { 20 | protected var _messages:Vector.; 21 | protected var _timescale:uint; 22 | protected var _timestamp:Number; 23 | 24 | private var _bytes:ByteArray; 25 | private var _movieFragmentBox:MovieFragmentBox; 26 | private var _defaultSampleDuration:uint; 27 | 28 | private var _muxer:Muxer; 29 | 30 | private var _duration:uint; 31 | 32 | public function MediaSegmentHandler(ba:ByteArray, messages:Vector., defaultSampleDuration:uint, 33 | timescale:uint, timestamp:Number, muxer:Muxer) { 34 | _messages = messages; 35 | _defaultSampleDuration = defaultSampleDuration; 36 | _timescale = timescale; 37 | _timestamp = timestamp; 38 | 39 | _muxer = muxer; 40 | 41 | parseMovieFragmentBox(ba); 42 | parseMediaDataBox(ba); 43 | } 44 | 45 | public function get bytes():ByteArray { 46 | return _bytes; 47 | } 48 | 49 | public function get duration():uint { 50 | return _duration; 51 | } 52 | 53 | private function parseMovieFragmentBox(ba:ByteArray):void { 54 | var offsetAndSize:Object = goToBox("moof", ba); 55 | var offset:uint = offsetAndSize.offset; 56 | var size:uint = offsetAndSize.size; 57 | 58 | _movieFragmentBox = new MovieFragmentBox(offset, size); 59 | _movieFragmentBox.parse(ba); 60 | } 61 | 62 | private function parseMediaDataBox(ba:ByteArray):void { 63 | var size:Number = ba.readUnsignedInt(); 64 | var type:String = ba.readUTFBytes(4); 65 | 66 | validateType("mdat", type); 67 | validateSize(size); 68 | 69 | processTrackBox(ba); 70 | 71 | _bytes = _muxer.mux(_messages); 72 | _bytes.position = 0; // reset 73 | _duration = _muxer.duration; 74 | } 75 | 76 | public function processTrackBox(ba:ByteArray):void { 77 | validateTracksNumber(_movieFragmentBox.trafs.length); 78 | 79 | var headerBox:TrackFragmentHeaderBox = _movieFragmentBox.trafs[0].tfhd; 80 | var runBoxes:Vector. = _movieFragmentBox.trafs[0].truns; 81 | 82 | for each (var runBox:TrackFragmentRunBox in runBoxes) { 83 | var baseDataOffset:Number = loadBaseDataOffset(headerBox); 84 | 85 | setDefaultDurationIfNeeded(runBox, headerBox); 86 | loadMessages(runBox, baseDataOffset, ba); 87 | } 88 | } 89 | 90 | private function loadBaseDataOffset(headerBox:TrackFragmentHeaderBox):Number { 91 | 92 | // otherwise point to segment's begin 93 | return (headerBox.baseDataOffsetPresent) ? headerBox.baseDataOffset : _movieFragmentBox.offset; 94 | } 95 | 96 | private function setDefaultDurationIfNeeded(runBox:TrackFragmentRunBox, headerBox:TrackFragmentHeaderBox):void { 97 | if (!runBox.sampleDurationPresent && headerBox.defaultSampleDurationPresent) { 98 | _defaultSampleDuration = headerBox.defaultSampleDuration; 99 | } 100 | } 101 | 102 | private function loadMessages(runBox:TrackFragmentRunBox, baseDataOffset:Number, ba:ByteArray):void { 103 | var dataOffset:uint = runBox.dataOffset + baseDataOffset; 104 | var sampleSizes:Vector. = runBox.sampleSize; 105 | 106 | for (var i:uint = 0; i < sampleSizes.length; i++) { 107 | var sampleDuration:uint = loadSampleDuration(runBox, i); 108 | var compositionTimeOffset:int = loadCompositionTimeOffset(runBox, i); 109 | var sampleDependsOn:uint = loadSampleDependsOn(runBox, i); 110 | var sampleIsDependedOn:uint = loadSampleIsDependedOn(runBox, i); 111 | 112 | var message:FLVTag = buildMessage(sampleDuration, sampleSizes[i], sampleDependsOn, sampleIsDependedOn, 113 | compositionTimeOffset, dataOffset, ba); 114 | 115 | _messages.push(message); 116 | 117 | dataOffset = dataOffset + sampleSizes[i]; 118 | } 119 | } 120 | 121 | private function loadSampleDuration(runBox:TrackFragmentRunBox, i:uint):uint { 122 | return i < runBox.sampleDuration.length ? runBox.sampleDuration[i] : _defaultSampleDuration; 123 | } 124 | 125 | private function loadCompositionTimeOffset(runBox:TrackFragmentRunBox, i:uint):int { 126 | return i < runBox.sampleCompositionTimeOffset.length ? runBox.sampleCompositionTimeOffset[i] : NaN; 127 | } 128 | 129 | private function loadSampleDependsOn(runBox:TrackFragmentRunBox, i:uint):uint { 130 | return i < runBox.sampleDependsOn.length ? runBox.sampleDependsOn[i] : 0; 131 | } 132 | 133 | private function loadSampleIsDependedOn(runBox:TrackFragmentRunBox, i:uint):uint { 134 | return i < runBox.sampleIsDependedOn.length ? runBox.sampleIsDependedOn[i] : 0; 135 | } 136 | 137 | protected function buildMessage(sampleDuration:uint, sampleSize:uint, sampleDependsOn:uint, 138 | sampleIsDependedOn:uint, compositionTimeOffset:Number, 139 | dataOffset:uint, ba:ByteArray):FLVTag { 140 | throw new IllegalOperationError("Method isn't implemented"); 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/SegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.utils.Console; 11 | 12 | import flash.utils.ByteArray; 13 | 14 | public class SegmentHandler { 15 | public function SegmentHandler() { 16 | } 17 | 18 | protected function goToBox(expectedType:String, ba:ByteArray):Object { 19 | var offset:uint = 0; 20 | var size:uint = 0; 21 | var type:String; 22 | 23 | do { 24 | ba.position = offset + size; 25 | 26 | offset = ba.position; 27 | size = ba.readUnsignedInt(); 28 | type = ba.readUTFBytes(4); 29 | 30 | validateSize(size); 31 | } while (expectedType != type && ba.bytesAvailable); 32 | 33 | validateType(expectedType, type); 34 | 35 | return { offset: offset, size: size }; 36 | } 37 | 38 | protected function validateType(expectedType:String, actualType:String):void { 39 | if (actualType != expectedType) { 40 | throw Console.getInstance().logError(new Error("Couldn't find any '" + expectedType + "' box")); 41 | } 42 | } 43 | 44 | protected function validateSize(size:uint):void { 45 | if (size == 1) { 46 | // don't support "large box", because default size is sufficient for fragmented movie 47 | throw Console.getInstance().logError(new Error("Large box isn't supported")); 48 | } 49 | } 50 | 51 | protected function validateTracksNumber(number:int):void { 52 | if (number > 1) { 53 | throw Console.getInstance().logError(new Error("Multiple tracks aren't supported")); 54 | } 55 | 56 | if (number < 1) { 57 | throw Console.getInstance().logError(new Error("Track isn't defined")); 58 | } 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/as3/com/dash/handlers/VideoSegmentHandler.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.handlers { 10 | import com.dash.boxes.FLVTag; 11 | import com.dash.boxes.Muxer; 12 | import com.dash.boxes.NalUnit; 13 | 14 | import flash.utils.ByteArray; 15 | 16 | public class VideoSegmentHandler extends MediaSegmentHandler { 17 | private static var _nalUnit:NalUnit = new NalUnit(); //TODO inject 18 | 19 | private static const MIN_CTO:int = -33; 20 | 21 | public function VideoSegmentHandler(segment:ByteArray, messages:Vector., defaultSampleDuration:uint, 22 | timescale:uint, timestamp:Number, mixer:Muxer) { 23 | super(segment, messages, defaultSampleDuration, timescale, timestamp, mixer); 24 | } 25 | 26 | protected override function buildMessage(sampleDuration:uint, sampleSize:uint, sampleDependsOn:uint, 27 | sampleIsDependedOn:uint, compositionTimeOffset:Number, 28 | dataOffset:uint, ba:ByteArray):FLVTag { 29 | var message:FLVTag = new FLVTag(); 30 | 31 | message.markAsVideo(); 32 | 33 | message.timestamp = _timestamp; 34 | _timestamp = message.timestamp + sampleDuration * 1000 / _timescale; 35 | 36 | message.duration = sampleDuration *1000*1000 / _timescale; 37 | 38 | message.length = sampleSize; 39 | 40 | message.dataOffset = dataOffset; 41 | 42 | message.data = new ByteArray(); 43 | ba.position = message.dataOffset; 44 | ba.readBytes(message.data, 0, sampleSize); 45 | 46 | if (sampleDependsOn == 2) { 47 | message.frameType = FLVTag.I_FRAME; 48 | } else if (sampleDependsOn == 1 && sampleIsDependedOn == 1) { 49 | message.frameType = FLVTag.P_FRAME; 50 | } else if (sampleDependsOn == 1 && sampleIsDependedOn == 2) { 51 | message.frameType = FLVTag.B_FRAME; 52 | } else { 53 | message.frameType = _nalUnit.parse(message.data); 54 | } 55 | 56 | if (!isNaN(compositionTimeOffset)) { 57 | message.compositionTimestamp = compositionTimeOffset * 1000 / _timescale - MIN_CTO; 58 | } 59 | 60 | return message; 61 | } 62 | 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/as3/com/dash/loaders/FragmentLoader.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.loaders { 10 | 11 | import com.dash.events.MessageEvent; 12 | import flash.events.EventDispatcher; 13 | 14 | public class FragmentLoader extends EventDispatcher { 15 | 16 | public function FragmentLoader() { 17 | } 18 | 19 | public function sendMessage(message:String):void{ 20 | dispatchEvent(new MessageEvent(MessageEvent.ADDED,false,false,message)); 21 | } 22 | 23 | public function testMessage():void{ 24 | sendMessage('in FragmentLoader') 25 | } 26 | 27 | } 28 | 29 | } -------------------------------------------------------------------------------- /src/as3/com/dash/utils/BandwidthMonitor.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.utils { 10 | import flash.events.Event; 11 | import flash.events.EventDispatcher; 12 | import flash.net.URLLoader; 13 | 14 | public class BandwidthMonitor { 15 | private const HISTORY_LENGTH:Number = 20; 16 | 17 | private var _lastBandwidth:Number = 0; 18 | private var _history:Vector. = new Vector.(); 19 | 20 | public function BandwidthMonitor() { 21 | } 22 | 23 | public function appendListeners(http:EventDispatcher):void { 24 | var context:Object = {}; 25 | 26 | function onOpen(event:Event):void { 27 | context['start'] = new Date().getTime(); 28 | } 29 | 30 | function onComplete(event:Event):void { 31 | 32 | // seconds 33 | var duration:Number = (new Date().getTime() - context['start']) / 1000; 34 | 35 | // bytes 36 | var contentLengthBytes:Number = getContentLength(event); 37 | 38 | // bits 39 | var contentLengthBits:Number = contentLengthBytes * 9; 40 | 41 | if (duration < 0.01) { // avoid infinity 42 | duration = 0.01; 43 | } 44 | 45 | var bandwidth:Number = contentLengthBits / duration; 46 | 47 | _history.push(bandwidth); 48 | if (_history.length > HISTORY_LENGTH) { 49 | _history.shift(); 50 | } 51 | 52 | Console.getInstance().appendRealUserBandwidth(bandwidth); 53 | 54 | var sum:Number = 0; 55 | for (var i:uint = 0; i < _history.length; i++) { 56 | sum += _history[i]; 57 | } 58 | 59 | if ( _history.length != 0) { 60 | _lastBandwidth = sum / _history.length; 61 | } 62 | 63 | Console.getInstance().appendAverageUserBandwidth(_lastBandwidth); 64 | } 65 | 66 | //URLLoader events 67 | http.addEventListener(Event.OPEN, onOpen); 68 | http.addEventListener(Event.COMPLETE, onComplete); 69 | } 70 | 71 | private static function getContentLength(event:Event):Number { 72 | return URLLoader(event.target).bytesLoaded; 73 | } 74 | 75 | public function get userBandwidth():Number { 76 | return _lastBandwidth; 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/as3/com/dash/utils/Base64.as: -------------------------------------------------------------------------------- 1 | package com.dash.utils { 2 | 3 | import flash.utils.ByteArray; 4 | 5 | public class Base64 { 6 | 7 | private static const _decodeChars:Vector. = InitDecodeChar(); 8 | 9 | public static function decode(str:String):ByteArray 10 | { 11 | var c1:int; 12 | var c2:int; 13 | var c3:int; 14 | var c4:int; 15 | var i:int = 0; 16 | var len:int = str.length; 17 | 18 | var byteString:ByteArray = new ByteArray(); 19 | byteString.writeUTFBytes(str); 20 | var outPos:int = 0; 21 | while (i < len) 22 | { 23 | //c1 24 | c1 = _decodeChars[int(byteString[i++])]; 25 | if (c1 == -1) 26 | break; 27 | 28 | //c2 29 | c2 = _decodeChars[int(byteString[i++])]; 30 | if (c2 == -1) 31 | break; 32 | 33 | byteString[int(outPos++)] = (c1 << 2) | ((c2 & 0x30) >> 4); 34 | 35 | //c3 36 | c3 = byteString[int(i++)]; 37 | if (c3 == 61) 38 | { 39 | byteString.length = outPos 40 | return byteString; 41 | } 42 | 43 | c3 = _decodeChars[int(c3)]; 44 | if (c3 == -1) 45 | break; 46 | 47 | byteString[int(outPos++)] = ((c2 & 0x0f) << 4) | ((c3 & 0x3c) >> 2); 48 | 49 | //c4 50 | c4 = byteString[int(i++)]; 51 | if (c4 == 61) 52 | { 53 | byteString.length = outPos 54 | return byteString; 55 | } 56 | 57 | c4 = _decodeChars[int(c4)]; 58 | if (c4 == -1) 59 | break; 60 | 61 | byteString[int(outPos++)] = ((c3 & 0x03) << 6) | c4; 62 | } 63 | byteString.length = outPos 64 | return byteString; 65 | } 66 | 67 | public static function InitDecodeChar():Vector. 68 | { 69 | 70 | var decodeChars:Vector. = new [ 71 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 72 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 73 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, 74 | 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1, 75 | -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 76 | 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, 77 | -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 78 | 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1, 79 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 80 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 82 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 83 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 84 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 85 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 86 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]; 87 | 88 | return decodeChars; 89 | } 90 | } 91 | } -------------------------------------------------------------------------------- /src/as3/com/dash/utils/Bytes.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.utils { 10 | import flash.utils.ByteArray; 11 | 12 | public class Bytes { 13 | private static const NUMBER:uint = 4; 14 | 15 | public function Bytes() { 16 | throw new Error("It's a static class"); 17 | } 18 | 19 | public static function readNumber(ba:ByteArray, length:uint = NUMBER):uint { 20 | var number:uint = 0; 21 | 22 | for (var i:uint = 0; i < length; i++) { 23 | number = number << 8; 24 | number += ba.readUnsignedByte(); 25 | } 26 | 27 | return number; 28 | } 29 | 30 | public static function skipNumberIfNeeded(condition:Boolean, ba:ByteArray):void { 31 | if (condition) { 32 | ba.position += NUMBER; 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/as3/com/dash/utils/Console.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.utils { 10 | import flash.events.TimerEvent; 11 | import flash.external.ExternalInterface; 12 | import flash.utils.Timer; 13 | 14 | public class Console { 15 | private static var instance:Console; 16 | 17 | private static const ERROR:String = "error"; 18 | private static const WARN:String = "warn"; 19 | private static const INFO:String = "info"; 20 | private static const DEBUG:String = "debug"; 21 | 22 | private var enabled:Boolean = false; 23 | private var events:Array = []; 24 | 25 | public function Console() { 26 | } 27 | 28 | public static function getInstance():Console { 29 | if (instance == null) { 30 | instance = new Console(); 31 | } 32 | 33 | return instance; 34 | } 35 | 36 | public function enable():void { 37 | enabled = true; 38 | 39 | var timer:Timer = new Timer(2000); // 2 second 40 | timer.addEventListener(TimerEvent.TIMER, send); 41 | timer.start(); 42 | } 43 | 44 | private function send(event:TimerEvent):void { 45 | if (events.length == 0) { 46 | return; 47 | } 48 | 49 | ExternalInterface.call("handleEvents", events); 50 | 51 | while (events.length > 0) { 52 | events.pop(); 53 | } 54 | } 55 | 56 | public function error(message:String):void { 57 | log(ERROR, message); 58 | } 59 | 60 | public function warn(message:String):void { 61 | log(WARN, message); 62 | } 63 | 64 | public function info(message:String):void { 65 | log(INFO, message); 66 | } 67 | 68 | public function debug(message:String):void { 69 | log(DEBUG, message); 70 | } 71 | 72 | public function logError(error:Error):Error { 73 | log(ERROR, error.message); 74 | return error; 75 | } 76 | 77 | public function log(level:String, message:String):void { 78 | if (!enabled) { 79 | return; 80 | } 81 | 82 | trace(message); 83 | 84 | events.push({ 85 | id: "log", 86 | level: level, 87 | message: message 88 | }); 89 | } 90 | 91 | public function appendRealUserBandwidth(bandwidth:Number):void { 92 | appendUserBandwidth("real", bandwidth); 93 | } 94 | 95 | public function appendAverageUserBandwidth(bandwidth:Number):void { 96 | appendUserBandwidth("average", bandwidth); 97 | } 98 | 99 | public function appendUserBandwidth(type:String, bandwidth:Number):void { 100 | if (!enabled) { 101 | return; 102 | } 103 | 104 | events.push({ 105 | id: "appendUserBandwidth", 106 | dataset: type, 107 | bandwidth: bandwidth 108 | }); 109 | } 110 | 111 | public function appendVideoBandwidth(bandwidth:Number):void { 112 | appendMediaBandwidth("video", bandwidth); 113 | } 114 | 115 | public function appendAudioBandwidth(bandwidth:Number):void { 116 | appendMediaBandwidth("audio", bandwidth); 117 | } 118 | 119 | public function appendMediaBandwidth(type:String, bandwidth:Number):void { 120 | if (!enabled) { 121 | return; 122 | } 123 | 124 | events.push({ 125 | id: "appendMediaBandwidth", 126 | dataset: type, 127 | bandwidth: bandwidth 128 | }); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/as3/com/dash/utils/Manifest.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.utils { 10 | public class Manifest { 11 | public function Manifest() { 12 | throw new Error("It's a static class"); 13 | } 14 | 15 | public static function toSeconds(value:String):Number { 16 | 17 | // format: "PT\d+H\d+M\d+S"; "S" means seconds, "M" means minutes and "H" means hours 18 | 19 | var match:Array; 20 | 21 | match = value.match(/([\d.]+)S/); 22 | var seconds:Number = match ? Number(match[1]) : 0; 23 | 24 | match = value.match(/([\d.]+)M/); 25 | var minutes:Number = match ? Number(match[1]) : 0; 26 | 27 | match = value.match(/([\d.]+)H/); 28 | var hours:Number = match ? Number(match[1]) : 0; 29 | 30 | return (hours * 60 * 60) + (minutes * 60) + seconds; 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/as3/com/dash/utils/SmoothMonitor.as: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 castLabs GmbH 3 | * 4 | * This Source Code Form is subject to the terms of the Mozilla Public 5 | * License, v. 2.0. If a copy of the MPL was not distributed with this 6 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | */ 8 | 9 | package com.dash.utils { 10 | import flash.events.EventDispatcher; 11 | import flash.events.NetStatusEvent; 12 | 13 | import org.osmf.net.NetStreamCodes; 14 | 15 | public class SmoothMonitor { 16 | private static const ACCEPTED_BUFFERING_COUNT:uint = 1; 17 | 18 | private var _bufferingCount:Number = 0; 19 | 20 | public function SmoothMonitor() { 21 | } 22 | 23 | public function appendListeners(netStream:EventDispatcher):void { 24 | netStream.addEventListener(NetStatusEvent.NET_STATUS, onNetStatus); 25 | } 26 | 27 | private function onNetStatus(event:NetStatusEvent):void { 28 | if (event.info.code == NetStreamCodes.NETSTREAM_BUFFER_EMPTY) { 29 | _bufferingCount++; 30 | Console.getInstance().warn("Registered buffering incident, bufferingCount='" + _bufferingCount + "'"); 31 | } 32 | 33 | if (event.info.code == NetStreamCodes.NETSTREAM_SEEK_NOTIFY) { 34 | _bufferingCount = 0; 35 | Console.getInstance().info("Reset buffering incidents counter"); 36 | } 37 | } 38 | 39 | public function get fix():Number { 40 | var fix:Number = _bufferingCount - ACCEPTED_BUFFERING_COUNT; 41 | return fix > 0 ? fix : 0; 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/MSEPolyfill.as: -------------------------------------------------------------------------------- 1 | package com.streamroot { 2 | 3 | import com.streamroot.buffer.Segment; 4 | import com.streamroot.buffer.StreamBuffer; 5 | import com.streamroot.util.TrackTypeHelper; 6 | 7 | import flash.events.Event; 8 | import flash.external.ExternalInterface; 9 | import flash.net.NetStreamAppendBytesAction; 10 | import flash.system.MessageChannel; 11 | import flash.system.Worker; 12 | import flash.system.WorkerDomain; 13 | import flash.utils.ByteArray; 14 | import flash.utils.getQualifiedClassName; 15 | import flash.utils.setTimeout; 16 | 17 | public class MSEPolyfill { 18 | 19 | private static const TIMEOUT_LENGTH:uint = 5; 20 | 21 | [Embed(source="transcoder/TranscodeWorker.swf", mimeType="application/octet-stream")] 22 | private static var WORKER_SWF:Class; 23 | 24 | private var _netStreamWrapper:NetStreamWrapper; 25 | private var _streamBuffer:StreamBuffer; 26 | private var _jsReady:Boolean = false; 27 | private var _loaded:Boolean = false; 28 | 29 | private var _seek_offset:Number = 0; 30 | private var _ended:Boolean = false; 31 | 32 | private var _lastWidth:Number = 0; 33 | private var _lastHeight:Number = 0; 34 | private var _lastDuration:Number = 0; 35 | 36 | private var _worker:Worker; 37 | 38 | private var _mainToWorker:MessageChannel; 39 | private var _workerToMain:MessageChannel; 40 | private var _commChannel:MessageChannel; 41 | 42 | private var _isWorkerReady:Boolean = false; 43 | 44 | private var _isWorkerBusy:Boolean = false; 45 | private var _pendingAppend:Object; 46 | private var _discardAppend:Boolean = false; //Used to discard data from worker in case we were seeking during transcoding 47 | 48 | public function MSEPolyfill(netStreamWrapper:NetStreamWrapper) { 49 | _netStreamWrapper = netStreamWrapper; 50 | 51 | //StreamrootMSE callbacks 52 | ExternalInterface.addCallback("addSourceBuffer", addSourceBuffer); 53 | ExternalInterface.addCallback("appendBuffer", appendBuffer); 54 | //ExternalInterface.addCallback("buffered", buffered); 55 | 56 | //StreamBuffer callbacks 57 | ExternalInterface.addCallback("remove", remove); 58 | 59 | ExternalInterface.addCallback("jsReady", jsReady); 60 | 61 | //StreamrootInterface callbacks 62 | //METHODS 63 | ExternalInterface.addCallback("onMetaData", onMetaData); 64 | ExternalInterface.addCallback("play", play); 65 | ExternalInterface.addCallback("pause", pause); 66 | ExternalInterface.addCallback("seek", seek); 67 | ExternalInterface.addCallback("bufferEmpty", bufferEmpty); 68 | //GETTERS 69 | ExternalInterface.addCallback("currentTime", currentTime); 70 | ExternalInterface.addCallback("paused", paused); 71 | ExternalInterface.addCallback("readyState", readyState); 72 | 73 | _streamBuffer = new StreamBuffer(this); 74 | 75 | setupWorker(); 76 | } 77 | 78 | private function setupWorker():void { 79 | var workerBytes:ByteArray = new WORKER_SWF() as ByteArray; 80 | _worker = WorkerDomain.current.createWorker(workerBytes); 81 | 82 | // Send to worker 83 | _mainToWorker = Worker.current.createMessageChannel(_worker); 84 | _worker.setSharedProperty("mainToWorker", _mainToWorker); 85 | 86 | // Receive from worker 87 | _workerToMain = _worker.createMessageChannel(Worker.current); 88 | _workerToMain.addEventListener(Event.CHANNEL_MESSAGE, onWorkerToMain); 89 | _worker.setSharedProperty("workerToMain", _workerToMain); 90 | 91 | // Receive startup message from worker 92 | _commChannel = _worker.createMessageChannel(Worker.current); 93 | _commChannel.addEventListener(Event.CHANNEL_MESSAGE, onCommChannel); 94 | _worker.setSharedProperty("commChannel", _commChannel); 95 | 96 | _worker.start(); 97 | } 98 | 99 | private function setSeekOffset(timeSeek:Number):void { 100 | debug("Set seek offset", this); 101 | _seek_offset = timeSeek; 102 | 103 | if (_pendingAppend) { 104 | //remove pending append job to avoid appending it after seek 105 | debug("Discarding _pendingAppend " + _pendingAppend.type, this); 106 | sendSegmentFlushedMessage(_pendingAppend.type); 107 | _pendingAppend = null; 108 | } 109 | 110 | //If worker is appending a segment during seek, discard it as we don't want to append it 111 | if (_isWorkerBusy) { 112 | debug("Setting discard to true", this); 113 | _discardAppend = true 114 | } 115 | _streamBuffer.onSeek(); 116 | _mainToWorker.send('seeking'); 117 | } 118 | 119 | private function addSourceBuffer(type:String):void { 120 | var key:String = TrackTypeHelper.getType(type); 121 | if (key) { 122 | _streamBuffer.addSourceBuffer(key); 123 | } else { 124 | error("Error: Type not supported: " + type); 125 | } 126 | } 127 | 128 | //timestampStart and timestampEnd in second 129 | private function appendBuffer(data:String, type:String, isInit:Boolean, startTime:Number = 0, endTime:Number = 0):void { 130 | debug("AppendBuffer", this); 131 | var message:Object = {data: data, type: type, isInit: isInit, startTime: startTime, endTime: endTime, offset: _seek_offset};// - offset + 100}; 132 | appendOrQueue(message); 133 | } 134 | 135 | private function appendOrQueue(message:Object):void { 136 | if (!_isWorkerBusy) { 137 | _isWorkerBusy = true; 138 | setTimeout(sendWorkerMessage, TIMEOUT_LENGTH, message); 139 | //_mainToWorker.send(message); 140 | } else if (!_pendingAppend) { 141 | _pendingAppend = message; //TODO: clear this job when we seek 142 | } else { 143 | error("Error: not supporting more than one pending job for now", this); 144 | sendSegmentFlushedMessage(message.type); 145 | } 146 | } 147 | 148 | private function sendWorkerMessage():void { 149 | _mainToWorker.send(arguments[0]); 150 | } 151 | 152 | public function getFileHeader():ByteArray { 153 | var output:ByteArray = new ByteArray(); 154 | output.writeByte(0x46); // 'F' 155 | output.writeByte(0x4c); // 'L' 156 | output.writeByte(0x56); // 'V' 157 | output.writeByte(0x01); // version 0x01 158 | 159 | var flags:uint = 0; 160 | 161 | flags |= 0x01; 162 | 163 | output.writeByte(flags); 164 | 165 | var offsetToWrite:uint = 9; // minimum file header byte count 166 | 167 | output.writeUnsignedInt(offsetToWrite); 168 | 169 | var previousTagSize0:uint = 0; 170 | 171 | output.writeUnsignedInt(previousTagSize0); 172 | 173 | return output; 174 | } 175 | 176 | private function onWorkerToMain(event:Event):void { 177 | var message:* = _workerToMain.receive(); 178 | 179 | var type:String = message.type; 180 | var isInit:Boolean = message.isInit; 181 | var width:Number = message.width; 182 | var height:Number = message.height; 183 | 184 | var segment:Segment = new Segment(message.segmentBytes, message.type, message.startTime, message.endTime); 185 | 186 | _isWorkerBusy = false; 187 | 188 | if (!_discardAppend) { 189 | 190 | if (!isInit) { 191 | debug("Appending segment in StreamBuffer", this); 192 | if (_lastHeight === 0 && width > 0 && height > 0) { 193 | debug("setting video size: " + width + " - " + height); 194 | onMetaData(0, width, height); 195 | } 196 | 197 | _streamBuffer.appendSegment(segment, TrackTypeHelper.getType(segment.type)); 198 | } 199 | 200 | if (_pendingAppend) { 201 | debug("Unqueing", this); 202 | appendOrQueue(_pendingAppend); 203 | _pendingAppend = null; 204 | } 205 | 206 | if (TrackTypeHelper.isAudio(type)) { 207 | setTimeout(updateendAudio, TIMEOUT_LENGTH); 208 | } else if (TrackTypeHelper.isVideo(type)) { 209 | setTimeout(updateendVideo, TIMEOUT_LENGTH); 210 | } else { 211 | error("no type matching"); 212 | } 213 | } else { 214 | sendSegmentFlushedMessage(type); 215 | _discardAppend = false; 216 | } 217 | } 218 | 219 | private function sendSegmentFlushedMessage(type:String):void { 220 | debug("Discarding segment " + type, this); 221 | 222 | if (TrackTypeHelper.isAudio(type)) { 223 | setTimeout(updateendAudio, TIMEOUT_LENGTH, true); 224 | } else if (TrackTypeHelper.isVideo(type)) { 225 | setTimeout(updateendVideo, TIMEOUT_LENGTH, true); 226 | } 227 | } 228 | 229 | private function updateendAudio(error:Boolean = false):void { 230 | ExternalInterface.call("fMSE.callbacks.updateend_audio", error); 231 | } 232 | 233 | private function updateendVideo(error:Boolean = false):void { 234 | ExternalInterface.call("fMSE.callbacks.updateend_video", error); 235 | } 236 | 237 | private function onCommChannel(event:Event):void { 238 | var message:* = _commChannel.receive(); 239 | _isWorkerReady = true; 240 | 241 | if (message.command == "debug") { 242 | debug(message.message, this); 243 | } else if (message.command == "error") { 244 | transcodeError(message.message); 245 | flush(message); 246 | } 247 | } 248 | 249 | private function jsReady():void { 250 | _jsReady = true; 251 | } 252 | 253 | //StreamBuffer function 254 | public function appendNetStream(bytes:ByteArray):void { 255 | _netStreamWrapper.appendBuffer(bytes); 256 | } 257 | 258 | public function remove(start:Number, end:Number, type:String):Number { 259 | return _streamBuffer.removeDataFromSourceBuffer(start, end, TrackTypeHelper.getType(type)); 260 | } 261 | 262 | public function getBufferLength():Number { 263 | return _netStreamWrapper.getBufferLength(); 264 | } 265 | 266 | //StreamrootInterface function 267 | private function onMetaData(duration:Number, width:Number = 0, height:Number = 0):void { 268 | if (_lastDuration === 0) { 269 | if (duration > 0) { 270 | _lastDuration = duration; 271 | _streamBuffer.setDuration(duration); 272 | } 273 | } else if (duration === 0) { 274 | duration = _lastDuration; 275 | } 276 | if (_lastHeight === 0) { 277 | _lastWidth = width; 278 | _lastHeight = height; 279 | _netStreamWrapper.onMetaData(duration, width, height); 280 | } 281 | } 282 | 283 | private function play():void { 284 | _netStreamWrapper.play(); 285 | } 286 | 287 | private function pause():void { 288 | _netStreamWrapper.pause(); 289 | } 290 | 291 | private function seek(time:Number):void { 292 | setSeekOffset(time); 293 | _netStreamWrapper.seekBySeconds(time); 294 | } 295 | 296 | public function currentTime():Number { 297 | return _netStreamWrapper.time; 298 | } 299 | 300 | private function paused():Boolean { 301 | return _netStreamWrapper.paused; 302 | } 303 | 304 | private function readyState():Number { 305 | return _netStreamWrapper.readyState; 306 | } 307 | 308 | public function bufferEmpty():void { 309 | _netStreamWrapper.onBufferEmpty(true); 310 | triggerWaiting(); 311 | } 312 | 313 | public function bufferFull():void { 314 | _netStreamWrapper.onBuffersReady(); 315 | triggerPlaying(); 316 | } 317 | 318 | public function loaded():void { 319 | //append the FLV Header to the provider, using appendBytesAction 320 | _netStreamWrapper.appendBytesAction(NetStreamAppendBytesAction.RESET_BEGIN); 321 | _netStreamWrapper.appendBuffer(getFileHeader()); 322 | 323 | if (!_loaded) { 324 | _loaded = true; 325 | } 326 | 327 | //Tell our Javascript library to start loading video segments 328 | triggerLoadStart(); 329 | } 330 | 331 | //StreamrootInterface events 332 | public function triggerSeeked():void { 333 | //Trigger event when seek is done 334 | ExternalInterface.call("fMSE.callbacks.seeked"); 335 | } 336 | 337 | public function triggerLoadStart():void { 338 | //Trigger event when we want to start loading data (at the beginning of the video or on replay) 339 | if (_jsReady) { 340 | ExternalInterface.call("fMSE.callbacks.loadstart"); 341 | } else { 342 | setTimeout(triggerLoadStart, 10); 343 | } 344 | } 345 | 346 | public function triggerPlay():void { 347 | _ended = false; 348 | 349 | if (_jsReady) { 350 | //Trigger event when video starts playing. 351 | ExternalInterface.call("fMSE.callbacks.play"); 352 | if (_streamBuffer.isBufferReady()) { 353 | triggerPlaying(); 354 | } 355 | //_firstPlayEventSent = true; 356 | } else { 357 | setTimeout(triggerPlay, 10); 358 | } 359 | } 360 | 361 | public function triggerPause():void { 362 | //Trigger event when video starts playing. Not used for now 363 | if (_jsReady) { 364 | ExternalInterface.call("fMSE.callbacks.pause"); 365 | } else { 366 | setTimeout(triggerPause, 10); 367 | } 368 | } 369 | 370 | public function triggerPlaying():void { 371 | _ended = false; 372 | 373 | //Trigger event when media is playing 374 | if (_jsReady) { 375 | ExternalInterface.call("fMSE.callbacks.playing"); 376 | } else { 377 | setTimeout(triggerPlaying, 10); 378 | } 379 | } 380 | 381 | public function triggerWaiting():void { 382 | //Trigger event when video has been paused but is expected to resume (ie on buffering or manual paused) 383 | if (_jsReady) { 384 | ExternalInterface.call("fMSE.callbacks.waiting"); 385 | } else { 386 | setTimeout(triggerWaiting, 10); 387 | } 388 | } 389 | 390 | public function triggerStopped():void { 391 | //Trigger event when video ends. 392 | if (!_ended) { 393 | _netStreamWrapper.onStop(); 394 | ExternalInterface.call("fMSE.callbacks.stopped"); 395 | _ended = true; 396 | } 397 | } 398 | 399 | public function triggerCanplay():void { 400 | //trigger event xhen there is enough stat in buffer to play 401 | ExternalInterface.call("fMSE.callbacks.canplay"); 402 | } 403 | 404 | public function triggerDurationChange(duration:Number):void { 405 | //trigger event xhen there is enough stat in buffer to play 406 | ExternalInterface.call("fMSE.callbacks.durationChange", duration); 407 | } 408 | 409 | public function triggerVolumeChange(volume:Number):void { 410 | //trigger event when there is enough stat in buffer to play 411 | ExternalInterface.call("fMSE.callbacks.volumeChange", volume); 412 | } 413 | 414 | public function appendedSegment(startTime:Number, endTime:Number):void { 415 | //trigger event when there is enough stat in buffer to play 416 | ExternalInterface.call("fMSE.callbacks.appended_segment", startTime, endTime); 417 | } 418 | 419 | public function error(message:Object, obj:Object = null):void { 420 | if (_jsReady && CONFIG::LOG_ERROR) { 421 | if (obj != null) { 422 | var textMessage:String = getQualifiedClassName(obj) + ".as : " + String(message); 423 | ExternalInterface.call("console.error", textMessage); 424 | } else { 425 | ExternalInterface.call("console.error", String(message)); 426 | } 427 | } else { 428 | setTimeout(error, 10, message, obj); 429 | } 430 | } 431 | 432 | public function transcodeError(message:Object):void { 433 | if (_jsReady && CONFIG::LOG_ERROR) { 434 | ExternalInterface.call("fMSE.callbacks.transcodeError", String(message)); 435 | } else { 436 | setTimeout(transcodeError, 10, message); 437 | } 438 | } 439 | 440 | public function debug(message:Object, obj:Object = null):void { 441 | 442 | if (_jsReady && CONFIG::LOG_DEBUG) { 443 | if (obj != null) { 444 | var textMessage:String = getQualifiedClassName(obj) + ".as : " + String(message); 445 | ExternalInterface.call("console.debug", textMessage); 446 | } else { 447 | ExternalInterface.call("console.debug", String(message)); 448 | } 449 | } else { 450 | setTimeout(debug, 10, message, obj); 451 | } 452 | } 453 | 454 | public function flush(message:Object):void { 455 | if (message.type) { 456 | //If worker sent back an attribute "type", we want to set _isWorkerBusy to false and trigger 457 | //a segment flushed message to notify the JS that append didn't work well, in order not to 458 | //block the append pipeline 459 | _isWorkerBusy = false; 460 | debug("Error type: " + message.type, this); 461 | sendSegmentFlushedMessage(message.type); 462 | } 463 | } 464 | } 465 | } 466 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/Main.as: -------------------------------------------------------------------------------- 1 | package com.streamroot { 2 | 3 | import flash.display.Sprite; 4 | import flash.events.Event; 5 | import flash.external.ExternalInterface; 6 | import flash.system.Security; 7 | import flash.ui.ContextMenu; 8 | 9 | [SWF(backgroundColor="#000000", frameRate="60", width="480", height="270")] 10 | public class Main extends Sprite { 11 | 12 | private var _model:NetStreamWrapper; 13 | private var _view:Skin; 14 | 15 | public function Main() { 16 | addEventListener(Event.ADDED_TO_STAGE, onAddedToStage); 17 | } 18 | 19 | private function onAddedToStage(e:Event):void { 20 | init(); 21 | } 22 | 23 | private function init():void { 24 | Security.allowDomain("*"); 25 | Security.allowInsecureDomain("*"); 26 | 27 | _model = new NetStreamWrapper(); 28 | 29 | _view = new Skin(_model); 30 | addChild(_view); 31 | 32 | var _ctxMenu:ContextMenu = new ContextMenu(); 33 | _ctxMenu.hideBuiltInItems(); 34 | this.contextMenu = _ctxMenu; 35 | 36 | if (loaderInfo.parameters.readyFunction != undefined) { 37 | ExternalInterface.call(loaderInfo.parameters.readyFunction, ExternalInterface.objectID); 38 | } 39 | } 40 | 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/NetStreamWrapper.as: -------------------------------------------------------------------------------- 1 | package com.streamroot { 2 | 3 | import com.streamroot.events.PlaybackEvent; 4 | 5 | import flash.events.EventDispatcher; 6 | import flash.events.NetStatusEvent; 7 | import flash.events.TimerEvent; 8 | import flash.media.Video; 9 | import flash.net.NetConnection; 10 | import flash.net.NetStream; 11 | import flash.net.NetStreamAppendBytesAction; 12 | import flash.utils.ByteArray; 13 | import flash.utils.Timer; 14 | import flash.utils.getTimer; 15 | 16 | public class NetStreamWrapper extends EventDispatcher { 17 | 18 | private var _nc:NetConnection; 19 | private var _ns:NetStream; 20 | private var _throughputTimer:Timer; 21 | private var _currentThroughput:int = 0; // in B/sec 22 | private var _loadStartTimestamp:int; 23 | private var _loadStarted:Boolean = false; 24 | private var _loadCompleted:Boolean = false; 25 | private var _loadErrored:Boolean = false; 26 | private var _pauseOnStart:Boolean = false; 27 | private var _pausePending:Boolean = false; 28 | /** 29 | * The number of seconds between the logical start of the stream and the current zero 30 | * playhead position of the NetStream. During normal, file-based playback this value should 31 | * always be zero. When the NetStream is in data generation mode, seeking during playback 32 | * resets the zero point of the stream to the seek target. To recover the playhead position 33 | * in the logical stream, this value can be added to the NetStream reported time. 34 | * 35 | * @see http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/net/NetStream.html#play() 36 | */ 37 | private var _startOffset:Number = 0; 38 | /** 39 | * If true, an empty NetStream buffer should be interpreted as the end of the video. This 40 | * is probably the case because the video data is being fed to the NetStream dynamically 41 | * through appendBuffer, not for traditional file download video. 42 | */ 43 | private var _ending:Boolean = false; 44 | private var _videoReference:Video; 45 | 46 | /** 47 | * When the player is paused, and a seek is executed, the NetStream.time property will NOT update until the decoder encounters a new time tag, 48 | * which won't happen until playback is resumed. This wrecks havoc with external scrubber logic, so when the player is paused and a seek is requested, 49 | * we cache the intended time, and use it IN PLACE OF NetStream's time when the time accessor is hit. 50 | */ 51 | private var _pausedSeekValue:Number = -1; 52 | 53 | private var _metadata:Object; 54 | private var _isPlaying:Boolean = false; 55 | private var _isPaused:Boolean = true; 56 | private var _isBuffering:Boolean = true; 57 | private var _isSeeking:Boolean = false; 58 | private var _isLive:Boolean = false; 59 | private var _canSeekAhead:Boolean = false; 60 | private var _hasEnded:Boolean = false; 61 | private var _canPlayThrough:Boolean = false; 62 | private var _durationOverride:Number; 63 | 64 | private var _fMSE:MSEPolyfill; 65 | 66 | public function NetStreamWrapper() { 67 | _metadata = {}; 68 | _throughputTimer = new Timer(250, 0); 69 | _throughputTimer.addEventListener(TimerEvent.TIMER, onThroughputTimerTick); 70 | 71 | _fMSE = new MSEPolyfill(this); 72 | } 73 | 74 | public function get time():Number { 75 | if (_ns != null) { 76 | if (_pausedSeekValue != -1) { 77 | return _pausedSeekValue; 78 | } 79 | else { 80 | return _startOffset + _ns.time; 81 | } 82 | } 83 | else { 84 | return 0; 85 | } 86 | } 87 | 88 | public function getBufferLength():Number { 89 | if (_ns) { 90 | return _ns.bufferLength; 91 | } else { 92 | return -1; 93 | } 94 | } 95 | 96 | public function get duration():Number { 97 | if (_metadata != null && _metadata.duration != undefined) { 98 | return Number(_metadata.duration); 99 | } else if (_durationOverride && _durationOverride > 0) { 100 | return _durationOverride; 101 | } 102 | else { 103 | return 0; 104 | } 105 | } 106 | 107 | public function set duration(value:Number):void { 108 | _durationOverride = value; 109 | } 110 | 111 | public function get readyState():int { 112 | // if we have metadata and a known duration 113 | if (_metadata != null && _metadata.duration != undefined) { 114 | // if playback has begun 115 | if (_isPlaying) { 116 | // if the asset can play through without rebuffering 117 | if (_canPlayThrough) { 118 | return 4; 119 | } 120 | // if we don't know if the asset can play through without buffering 121 | else { 122 | // if the buffer is full, we assume we can seek a head at least a keyframe 123 | if (_ns.bufferLength >= _ns.bufferTime) { 124 | return 3; 125 | } 126 | // otherwise, we can't be certain that seeking ahead will work 127 | else { 128 | return 2; 129 | } 130 | } 131 | } 132 | // if playback has not begun 133 | else { 134 | return 1; 135 | } 136 | } 137 | // if we have no metadata 138 | else { 139 | return 0; 140 | } 141 | } 142 | 143 | public function appendBytesAction(action:String):void { 144 | if (_ns) { 145 | _ns.appendBytesAction(action); 146 | } 147 | } 148 | 149 | public function appendBuffer(bytes:ByteArray):void { 150 | if (_ns) { 151 | _ns.appendBytes(bytes); 152 | } else { 153 | _fMSE.error("Error: netStream not ready") 154 | } 155 | } 156 | 157 | public function abort():void { 158 | // flush the netstream buffers 159 | _ns.seek(time); 160 | } 161 | 162 | public function get buffered():Number { 163 | if (_ns == null) { 164 | return _startOffset + _ns.bufferLength + _ns.time; 165 | } else if (duration > 0) { 166 | return (_ns.bytesLoaded / _ns.bytesTotal) * duration; 167 | } else { 168 | return 0; 169 | } 170 | } 171 | 172 | public function get playing():Boolean { 173 | return _isPlaying; 174 | } 175 | 176 | public function get paused():Boolean { 177 | return _isPaused; 178 | } 179 | 180 | public function get ended():Boolean { 181 | return _hasEnded; 182 | } 183 | 184 | public function get seeking():Boolean { 185 | return _isSeeking; 186 | } 187 | 188 | public function get metadata():Object { 189 | return _metadata; 190 | } 191 | 192 | public function load():void { 193 | _pauseOnStart = true; 194 | _isPlaying = false; 195 | _isPaused = true; 196 | 197 | initNetConnection(); 198 | } 199 | 200 | public function play():void { 201 | // if this is a fresh playback request 202 | _fMSE.debug("entering play"); 203 | if (!_loadStarted) { 204 | _pauseOnStart = false; 205 | _isPlaying = false; 206 | _isPaused = false; 207 | _metadata = {}; 208 | initNetConnection(); 209 | _fMSE.triggerPlay(); 210 | _fMSE.loaded(); 211 | } 212 | // if the asset is already loading 213 | else { 214 | _fMSE.loaded(); 215 | if (_hasEnded) { 216 | _hasEnded = false; 217 | _ns.seek(0); 218 | } 219 | _pausePending = false; 220 | _ns.resume(); 221 | _isPaused = false; 222 | } 223 | 224 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_START, {})); 225 | } 226 | 227 | public function pause():void { 228 | if (!_ns) { 229 | return; 230 | } 231 | 232 | _ns.pause(); 233 | _fMSE.triggerPause(); 234 | 235 | if (_isPlaying && !_isPaused) { 236 | _isPaused = true; 237 | if (_isBuffering) { 238 | _pausePending = true; 239 | } 240 | } else if (_hasEnded) { 241 | _fMSE.debug("pause: hasEnded"); 242 | _hasEnded = false; 243 | _ns.seek(0); 244 | } 245 | } 246 | 247 | public function resume():void { 248 | if (_ns) { 249 | _fMSE.debug("entering resume"); 250 | if (_isPlaying && _isPaused) { 251 | _fMSE.debug("resume"); 252 | _ns.resume(); 253 | _isPaused = false; 254 | _fMSE.triggerPlay(); 255 | } 256 | } else { 257 | play(); 258 | } 259 | } 260 | 261 | public function seekBySeconds(pTime:Number):void { 262 | _fMSE.debug('seek (flash)'); 263 | _fMSE.debug(pTime); 264 | 265 | if (_isPlaying) { 266 | _isSeeking = true; 267 | _throughputTimer.stop(); 268 | if (_isPaused) { 269 | _pausedSeekValue = pTime; 270 | } 271 | } 272 | else if (_hasEnded) { 273 | _isPlaying = true; 274 | _hasEnded = false; 275 | } 276 | 277 | _isBuffering = true; 278 | 279 | _startOffset = pTime; 280 | _fMSE.debug(_startOffset); 281 | _fMSE.debug(pTime); 282 | _ns.seek(pTime); 283 | _ns.appendBytesAction(NetStreamAppendBytesAction.RESET_SEEK); 284 | 285 | } 286 | 287 | public function stop():void { 288 | if (_isPlaying) { 289 | _ns.close(); 290 | _isPlaying = false; 291 | _hasEnded = true; 292 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_CLOSE, {})); 293 | _throughputTimer.stop(); 294 | _throughputTimer.reset(); 295 | } 296 | } 297 | 298 | public function attachVideo(pVideo:Video):void { 299 | _videoReference = pVideo; 300 | } 301 | 302 | public function die():void { 303 | if (_videoReference) { 304 | _videoReference.attachNetStream(null); 305 | } 306 | 307 | if (_ns) { 308 | try { 309 | _ns.close(); 310 | _ns = null; 311 | } catch (err:Error) { 312 | 313 | } 314 | } 315 | 316 | if (_nc) { 317 | try { 318 | _nc.close(); 319 | _nc = null; 320 | } catch (err:Error) { 321 | 322 | } 323 | } 324 | 325 | if (_throughputTimer) { 326 | try { 327 | _throughputTimer.stop(); 328 | _throughputTimer = null; 329 | } catch (err:Error) { 330 | 331 | } 332 | } 333 | } 334 | 335 | private function initNetConnection():void { 336 | // the video element triggers loadstart as soon as the resource selection algorithm selects a source 337 | // this is somewhat later than that moment but relatively close 338 | _loadStarted = true; 339 | 340 | if (_nc != null) { 341 | try { 342 | _nc.close(); 343 | } catch (err:Error) { 344 | 345 | } 346 | _nc.removeEventListener(NetStatusEvent.NET_STATUS, onNetConnectionStatus); 347 | _nc = null; 348 | } 349 | 350 | _nc = new NetConnection(); 351 | _nc.client = this; 352 | _nc.addEventListener(NetStatusEvent.NET_STATUS, onNetConnectionStatus); 353 | _nc.connect(null); 354 | } 355 | 356 | private function initNetStream():void { 357 | if (_ns != null) { 358 | _ns.close(); 359 | _ns.removeEventListener(NetStatusEvent.NET_STATUS, onNetStreamStatus); 360 | _ns = null; 361 | } 362 | _ns = new NetStream(_nc); 363 | _ns.inBufferSeek = true; 364 | _ns.addEventListener(NetStatusEvent.NET_STATUS, onNetStreamStatus); 365 | _ns.client = this; 366 | _ns.bufferTime = .5; 367 | 368 | _ns.play(null); 369 | 370 | _videoReference.attachNetStream(_ns); 371 | 372 | // _pausePending = true; 373 | 374 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_READY, {ns: _ns})); 375 | } 376 | 377 | private function calculateThroughput():void { 378 | // if it's finished loading, we can kill the calculations and assume it can play through 379 | if (_ns.bytesLoaded == _ns.bytesTotal) { 380 | _canPlayThrough = true; 381 | _loadCompleted = true; 382 | _throughputTimer.stop(); 383 | _throughputTimer.reset(); 384 | } 385 | // if it's still loading, but we know its duration, we can check to see if the current transfer rate 386 | // will sustain uninterrupted playback - this requires the duration to be known, which is currently 387 | // only accessible via metadata, which isn't parsed until the Flash Player encounters the metadata atom 388 | // in the file itself, which means that this logic will only work if the asset is playing - preload 389 | // won't ever cause this logic to run :( 390 | else if (_ns.bytesTotal > 0 && _metadata != null && _metadata.duration != undefined) { 391 | _currentThroughput = _ns.bytesLoaded / ((getTimer() - _loadStartTimestamp) / 1000); 392 | var __estimatedTimeToLoad:Number = (_ns.bytesTotal - _ns.bytesLoaded) * _currentThroughput; 393 | if (__estimatedTimeToLoad <= _metadata.duration) { 394 | _throughputTimer.stop(); 395 | _throughputTimer.reset(); 396 | _canPlayThrough = true; 397 | } 398 | } 399 | } 400 | 401 | private function onNetConnectionStatus(e:NetStatusEvent):void { 402 | switch (e.info.code) { 403 | case "NetConnection.Connect.Success": 404 | initNetStream(); 405 | break; 406 | case "NetConnection.Connect.Failed": 407 | 408 | break; 409 | } 410 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_NETCONNECTION_STATUS, {info: e.info})); 411 | } 412 | 413 | private function onNetStreamStatus(e:NetStatusEvent):void { 414 | switch (e.info.code) { 415 | case "NetStream.Play.Start": 416 | _fMSE.debug("NetStream.Play.Start"); 417 | _pausedSeekValue = -1; 418 | _metadata = null; 419 | _canPlayThrough = false; 420 | _hasEnded = false; 421 | _isBuffering = true; 422 | _currentThroughput = 0; 423 | _loadStartTimestamp = getTimer(); 424 | _throughputTimer.reset(); 425 | _throughputTimer.start(); 426 | if (_pauseOnStart && _loadStarted == false) { 427 | _ns.pause(); 428 | _isPaused = true; 429 | } 430 | else { 431 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_START, {info: e.info})); 432 | } 433 | break; 434 | 435 | case "NetStream.SeekStart.Notify": 436 | appendBytesAction(NetStreamAppendBytesAction.RESET_SEEK); 437 | break; 438 | 439 | case "NetStream.Buffer.Full": 440 | //Now handled by function onBuffersReady, called by StreamrootInterface. NetStream would trigger this as soon as 441 | //audio OR video had been appended 442 | if (_isBuffering) { 443 | _ns.pause(); 444 | } 445 | break; 446 | 447 | case "NetStream.Buffer.Empty": 448 | // should not fire if ended/paused. issue #38 449 | onBufferEmpty(false); 450 | break; 451 | 452 | case "NetStream.Play.Stop": 453 | _isPlaying = false; 454 | _isPaused = true; 455 | _hasEnded = true; 456 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_CLOSE, {info: e.info})); 457 | 458 | _throughputTimer.stop(); 459 | _throughputTimer.reset(); 460 | break; 461 | 462 | case "NetStream.Seek.Notify": 463 | _isPlaying = true; 464 | _isSeeking = false; 465 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_STREAM_SEEK_COMPLETE, {info: e.info})); 466 | _fMSE.triggerSeeked(); 467 | _currentThroughput = 0; 468 | _loadStartTimestamp = getTimer(); 469 | _throughputTimer.reset(); 470 | _throughputTimer.start(); 471 | break; 472 | 473 | case "NetStream.Play.StreamNotFound": 474 | _loadErrored = true; 475 | break; 476 | 477 | case "NetStream.Video.DimensionChange": 478 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_VIDEO_DIMENSION_UPDATE, {videoWidth: _videoReference.videoWidth, videoHeight: _videoReference.videoHeight})); 479 | if (_metadata && _videoReference) { 480 | _metadata.width = _videoReference.videoWidth; 481 | _metadata.height = _videoReference.videoHeight; 482 | } 483 | break; 484 | } 485 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_NETSTREAM_STATUS, {info: e.info})); 486 | } 487 | 488 | private function onThroughputTimerTick(e:TimerEvent):void { 489 | calculateThroughput(); 490 | } 491 | 492 | public function onMetaData(duration:Number, width:Number = 0, height:Number = 0):void { 493 | _metadata = {}; 494 | _metadata.duration = duration; 495 | if (width > 0 && height > 0) { 496 | _metadata.width = width; 497 | _metadata.height = height; 498 | } 499 | 500 | if (_metadata.duration) { 501 | _isLive = false; 502 | _canSeekAhead = true; 503 | } 504 | else { 505 | _isLive = true; 506 | _canSeekAhead = false; 507 | } 508 | dispatchEvent(new PlaybackEvent(PlaybackEvent.ON_META_DATA, {metadata: _metadata})); 509 | } 510 | 511 | //ADDED METHODS 512 | public function onBuffersReady():void { 513 | _pausedSeekValue = -1; 514 | _isPlaying = true; 515 | if (_pausePending) { 516 | _pausePending = false; 517 | _ns.pause(); 518 | _isPaused = true; 519 | } else if (/*_isBuffering &&*/ !_isPaused) { 520 | _ns.resume(); 521 | } 522 | _isBuffering = false; 523 | } 524 | 525 | public function onBufferEmpty(fromJS:Boolean):void { 526 | if (!_isPlaying) { 527 | return; 528 | } 529 | 530 | // reaching the end of the buffer after endOfStream has been called means we've 531 | // hit the end of the video 532 | if (_ending) { 533 | _ending = false; 534 | _isPlaying = false; 535 | _isPaused = true; 536 | _hasEnded = true; 537 | //TODO: commented next line because of argument e. Will probably cause issue at the end of video. Pass e 538 | //dispatchEvent(new VideoPlaybackEvent(VideoPlaybackEvent.ON_STREAM_CLOSE, {info: e.info})); 539 | 540 | _startOffset = 0; 541 | _pausedSeekValue = 0; 542 | return; 543 | } 544 | 545 | _isBuffering = true; 546 | 547 | if (fromJS) { 548 | _ns.pause(); 549 | } 550 | } 551 | 552 | public function onStop():void { 553 | throw new Error("Method onStop isn't implemented"); 554 | } 555 | } 556 | } 557 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/Skin.as: -------------------------------------------------------------------------------- 1 | package com.streamroot { 2 | 3 | import com.streamroot.events.PlaybackEvent; 4 | 5 | import flash.display.Sprite; 6 | import flash.display.StageAlign; 7 | import flash.display.StageScaleMode; 8 | import flash.events.Event; 9 | import flash.media.Video; 10 | 11 | public class Skin extends Sprite { 12 | 13 | private var video:Video; 14 | private var bg:Sprite; 15 | 16 | private var _model:NetStreamWrapper; 17 | 18 | public function Skin(model:NetStreamWrapper) { 19 | _model = model; 20 | _model.addEventListener(PlaybackEvent.ON_META_DATA, onMetaData); 21 | _model.addEventListener(PlaybackEvent.ON_VIDEO_DIMENSION_UPDATE, onDimensionUpdate); 22 | 23 | bg = new Sprite(); 24 | addChild(bg); 25 | 26 | video = new Video(); 27 | video.smoothing = true; 28 | addChild(video); 29 | 30 | _model.attachVideo(video); 31 | 32 | addEventListener(Event.ADDED_TO_STAGE, onAddedToStage); 33 | } 34 | 35 | private function onAddedToStage(event:Event):void { 36 | removeEventListener(Event.ADDED_TO_STAGE, onAddedToStage); 37 | 38 | stage.scaleMode = StageScaleMode.NO_SCALE; 39 | stage.align = StageAlign.TOP_LEFT; 40 | stage.addEventListener(Event.RESIZE, onStageResize); 41 | 42 | onStageResize(null); 43 | } 44 | 45 | private function onStageResize(event:Event):void { 46 | drawBackground(); 47 | sizeVideoObject(); 48 | } 49 | 50 | private function onMetaData(e:PlaybackEvent):void { 51 | sizeVideoObject(); 52 | } 53 | 54 | private function onDimensionUpdate(e:PlaybackEvent):void { 55 | sizeVideoObject(); 56 | } 57 | 58 | private function drawBackground():void { 59 | bg.graphics.clear(); 60 | bg.graphics.beginFill(0, 1); 61 | bg.graphics.drawRect(0, 0, stage.stageWidth, stage.stageHeight); 62 | bg.graphics.endFill(); 63 | } 64 | 65 | private function sizeVideoObject():void { 66 | var videoWidth:int = video.videoWidth || Number(_model.metadata.width); 67 | var videoHeight:int = video.videoHeight || Number(_model.metadata.height); 68 | var givenWidth:Number = stage.stageWidth; 69 | var givenHeight:Number = stage.stageHeight; 70 | 71 | if (isNaN(videoWidth) || isNaN(videoHeight) || isNaN(givenWidth) || isNaN(givenHeight)) 72 | { 73 | return; 74 | } 75 | 76 | var kw:Number = videoWidth / givenWidth; 77 | var kh:Number = videoHeight / givenHeight; 78 | if (kw > kh) 79 | { 80 | doResize(givenWidth, Math.round(videoHeight / kw)); 81 | } 82 | else 83 | { 84 | doResize(Math.round(videoWidth / kh), givenHeight); 85 | } 86 | } 87 | 88 | private function doResize(width:Number, height:Number):void 89 | { 90 | video.width = width; 91 | video.height = height; 92 | 93 | video.x = Math.round(0.5 * (stage.stageWidth - video.width)); 94 | video.y = Math.round(0.5 * (stage.stageHeight - video.height)); 95 | } 96 | 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/buffer/Segment.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.buffer { 2 | 3 | import flash.utils.ByteArray; 4 | 5 | public class Segment { 6 | private var _type:String; 7 | private var _startTime:Number; 8 | private var _endTime:Number; 9 | private var _segmentBytes:ByteArray; 10 | 11 | public function Segment(bytes:ByteArray, type:String, startTime:Number, endTime:Number):void { 12 | _type = type; 13 | _segmentBytes = bytes; 14 | _startTime = startTime; 15 | _endTime = endTime; 16 | } 17 | 18 | public function get bytes():ByteArray { 19 | return _segmentBytes; 20 | } 21 | 22 | public function get type():String { 23 | return _type; 24 | } 25 | 26 | public function get startTime():Number { 27 | return _startTime; 28 | } 29 | 30 | public function get endTime():Number { 31 | return _endTime; 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/buffer/SourceBuffer.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.buffer { 2 | 3 | import flash.utils.ByteArray; 4 | 5 | /** 6 | * This class represent a buffer of audio or video data 7 | * When segment are appended in NetStream they are deleted from the SourceBuffer 8 | */ 9 | public class SourceBuffer { 10 | 11 | private var _buffer:Array = []; 12 | private var _appendedEndTime:Number = 0; 13 | private var _type:String; 14 | private var _ready:Boolean = false; 15 | 16 | public function SourceBuffer(type:String):void { 17 | _type = type; 18 | } 19 | 20 | /** 21 | * _appendedEndTime is the endTime of the last segment appended in NetStream 22 | * If no segment has been appended, it is 0 23 | */ 24 | public function get appendedEndTime():Number { 25 | return _appendedEndTime; 26 | } 27 | 28 | public function get type():String { 29 | return _type; 30 | } 31 | 32 | /** 33 | * _ready is true if at least one segment has been appended, false if not 34 | * It is set a false only at the intialization and after a seek 35 | */ 36 | public function get ready():Boolean { 37 | return _ready; 38 | } 39 | 40 | /** 41 | * Return bufferEndTime, ie that endTime of the last segment in the buffer, in second 42 | * If buffer is empty, it return the _appendedEndTime, which may be 0 if nothing has been appended in Netstream 43 | */ 44 | public function getBufferEndTime():Number { 45 | if (_buffer.length == 0) { 46 | return _appendedEndTime; 47 | } else { 48 | return (_buffer[_buffer.length - 1] as Segment).endTime; 49 | } 50 | } 51 | 52 | /** 53 | * Return the next segment to be appended in NetStream 54 | */ 55 | public function getNextSegmentBytes():ByteArray { 56 | var bytes:ByteArray = null; 57 | var segment:Segment; 58 | if (_buffer.length > 0) { 59 | segment = _buffer[0] as Segment; 60 | bytes = segment.bytes; 61 | _appendedEndTime = segment.endTime; 62 | _buffer.splice(0, 1); 63 | _ready = true; 64 | } 65 | return bytes; 66 | } 67 | 68 | public function appendSegment(segment:Segment):void { 69 | _buffer.push(segment); 70 | } 71 | 72 | /** 73 | * Remove data between start and end time from the buffer 74 | * Return bufferEndTime, ie that endTime of the last segment in the buffer, in second 75 | * (don't be mistaken, it is not the _appendedEndTime which is the endTime of the last segment APPENDED in NetStream) 76 | */ 77 | public function remove(start:Number, end:Number):Number { 78 | if (start == 0) { 79 | _buffer = []; 80 | } else { 81 | while (_buffer.length > 0 && (_buffer[_buffer.length - 1] as Segment).startTime >= start) { 82 | _buffer.pop(); 83 | } 84 | } 85 | return getBufferEndTime(); 86 | } 87 | 88 | /** 89 | * Clear all data in the buffer 90 | */ 91 | private function flush():Number { 92 | _buffer = []; 93 | return getBufferEndTime(); 94 | } 95 | 96 | public function onSeek():Number { 97 | _ready = false; 98 | _appendedEndTime = 0; 99 | return flush(); 100 | } 101 | 102 | public function bufferEmpty(appendedEndTime:Number):void { 103 | if (_appendedEndTime == appendedEndTime) { 104 | _ready = false; 105 | } 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/buffer/StreamBuffer.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.buffer { 2 | 3 | import com.streamroot.MSEPolyfill; 4 | 5 | import flash.utils.ByteArray; 6 | 7 | /** 8 | * This class is an intermediate buffer before NetStream 9 | * It has been created because we can't remove data from NetStream. 10 | * To fix that we first store data in StreamBuffer, and at the last moment the minimum amount of needed data is appended in streambuffer 11 | * StreamBufferController regulary check the buffer length in NetStream and if need append new data from StreamBuffer 12 | * 13 | * This class manages the different buffer. There are one buffer per track type (ie audio, video) 14 | * 15 | */ 16 | public class StreamBuffer { 17 | 18 | private var _streamBufferController:StreamBufferController; 19 | private var _sourceBufferList:Array = []; 20 | private var _streamrootMSE:MSEPolyfill; 21 | 22 | public function StreamBuffer(streamrootMSE:MSEPolyfill):void { 23 | _streamrootMSE = streamrootMSE; 24 | _streamBufferController = new StreamBufferController(this, _streamrootMSE); 25 | } 26 | 27 | public function addSourceBuffer(type:String):void { 28 | if (getSourceBufferByType(type) == null) { 29 | _sourceBufferList.push(new SourceBuffer(type)); 30 | } else { 31 | _streamrootMSE.error('SourceBuffer for this type already exists : ' + type, this); 32 | } 33 | } 34 | 35 | private function getSourceBufferByType(type:String):SourceBuffer { 36 | if (type == null) { 37 | _streamrootMSE.error("No buffer for type null", this); 38 | } else { 39 | var sourceBuffer:SourceBuffer; 40 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 41 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 42 | if (sourceBuffer.type == type) { 43 | return _sourceBufferList[i]; 44 | } 45 | } 46 | } 47 | return null; 48 | } 49 | 50 | public function isBufferReady():Boolean { 51 | var ready:Boolean = true; 52 | var sourceBuffer:SourceBuffer; 53 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 54 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 55 | ready = ready && sourceBuffer.ready; 56 | } 57 | return (ready && _sourceBufferList.length); 58 | } 59 | 60 | public function getDiffBetweenBuffers():Number { 61 | switch (_sourceBufferList.length) { 62 | case 0: 63 | case 1: 64 | return 0; 65 | case 2: 66 | var firstSB:SourceBuffer = _sourceBufferList[0] as SourceBuffer; 67 | var secondSB:SourceBuffer = _sourceBufferList[1] as SourceBuffer; 68 | 69 | return Math.abs(firstSB.appendedEndTime - secondSB.appendedEndTime); 70 | default: 71 | _streamrootMSE.error("Wrong number of source buffer in flash StreamBuffer (should be 1 or 2) : " + _sourceBufferList.length, this); 72 | return 0; 73 | } 74 | } 75 | 76 | /* 77 | * Append a decoded segment in the corresponding sourceBuffer 78 | */ 79 | public function appendSegment(segment:Segment, type:String):void { 80 | var sb:SourceBuffer = getSourceBufferByType(type); 81 | if (sb != null) { 82 | _streamrootMSE.appendedSegment(segment.startTime, segment.endTime); 83 | sb.appendSegment(segment); 84 | } else { 85 | _streamrootMSE.error("BufferSource for type " + type + " not found"); 86 | } 87 | } 88 | 89 | public function getBufferEndTime():Number { 90 | var bufferEndTime:Number = 0; 91 | var isInit:Boolean = false; 92 | var sourceBuffer:SourceBuffer; 93 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 94 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 95 | if (!isInit) { 96 | bufferEndTime = sourceBuffer.getBufferEndTime(); 97 | isInit = true; 98 | } else { 99 | bufferEndTime = Math.min(bufferEndTime, sourceBuffer.getBufferEndTime()); 100 | } 101 | } 102 | return bufferEndTime; 103 | } 104 | 105 | /* 106 | * Remove data between start and end time in the sourceBuffer corresponding the type 107 | */ 108 | public function removeDataFromSourceBuffer(start:Number, end:Number, type:String):Number { 109 | var sb:SourceBuffer = getSourceBufferByType(type); 110 | if (sb != null) { 111 | return sb.remove(start, end); 112 | } else { 113 | return 0; 114 | } 115 | } 116 | 117 | /* 118 | * Each sourceBuffer has an attribute appendedEndTime that correspond to the endTime of the last segment appended in NetStream 119 | * Because audio and video segment can have different length, audio and video sourceBuffer may have diffrent appendedEndTime 120 | * This function return the minimum appendedEndTime of all sourceBuffer. 121 | * We know that before appendedEndTime we have both audio and video, but after it we may have only video or only audio appended in NetStream 122 | */ 123 | public function getAppendedEndTime():Number { 124 | var appendedEndTime:Number = 0; 125 | var sourceBuffer:SourceBuffer; 126 | var isInit:Boolean = false; 127 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 128 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 129 | if (!isInit) { 130 | appendedEndTime = sourceBuffer.appendedEndTime; 131 | isInit = true; 132 | } else { 133 | appendedEndTime = Math.min(appendedEndTime, sourceBuffer.appendedEndTime); 134 | } 135 | } 136 | return appendedEndTime; 137 | } 138 | 139 | /* 140 | * This function return the next segment that need to be appended in NetStream 141 | * It may be only video or audio data, or both at the same time 142 | */ 143 | public function getNextSegmentBytes():Array { 144 | var array:Array = []; 145 | var appendedEndTime:Number = getAppendedEndTime(); 146 | var sourceBuffer:SourceBuffer; 147 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 148 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 149 | if (appendedEndTime == sourceBuffer.appendedEndTime) { 150 | var segmentBytes:ByteArray = sourceBuffer.getNextSegmentBytes(); 151 | if (segmentBytes != null) { 152 | array.push(segmentBytes); 153 | } 154 | } 155 | } 156 | return array; 157 | } 158 | 159 | public function onSeek():void { 160 | var sourceBuffer:SourceBuffer; 161 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 162 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 163 | sourceBuffer.onSeek(); 164 | } 165 | } 166 | 167 | public function bufferEmpty():void { 168 | var sourceBuffer:SourceBuffer; 169 | for (var i:int = 0; i < _sourceBufferList.length; i++) { 170 | sourceBuffer = _sourceBufferList[i] as SourceBuffer; 171 | sourceBuffer.bufferEmpty(getAppendedEndTime()); 172 | } 173 | _streamrootMSE.bufferEmpty(); 174 | } 175 | 176 | public function setDuration(duration:Number):void { 177 | _streamBufferController.duration = duration; 178 | } 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/buffer/StreamBufferController.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.buffer { 2 | 3 | import com.streamroot.MSEPolyfill; 4 | import com.streamroot.util.Conf; 5 | 6 | import flash.utils.setInterval; 7 | 8 | public class StreamBufferController { 9 | 10 | private var _streamBuffer:StreamBuffer; 11 | private var _streamrootMSE:MSEPolyfill; 12 | private var _needData:Boolean = true; 13 | private var _canPlay:Boolean = false; 14 | 15 | private var _duration:Number; 16 | 17 | private const TIMEOUT_LENGTH:int = 100; 18 | private const EVENT_TIMEOUT_LENGTH:int = 1; 19 | private const BUFFER_EMPTY:Number = 0.1; 20 | 21 | public function StreamBufferController(streamBuffer:StreamBuffer, streamrootMSE:MSEPolyfill):void { 22 | _streamBuffer = streamBuffer; 23 | _streamrootMSE = streamrootMSE; 24 | 25 | setInterval(bufferize, TIMEOUT_LENGTH); 26 | setInterval(triggerEvent, EVENT_TIMEOUT_LENGTH); 27 | } 28 | 29 | public function set duration(duration:Number):void { 30 | _duration = duration; 31 | } 32 | 33 | private function triggerEvent():void { 34 | if (_streamBuffer.isBufferReady() && !_canPlay) { 35 | _canPlay = true; 36 | _streamrootMSE.triggerCanplay() 37 | } else if (!_streamBuffer.isBufferReady()) { 38 | _canPlay = false; 39 | } 40 | } 41 | 42 | /** 43 | * This method is call at fixed interval to check the time length of Netstream 44 | * If the time left is less than a fixed value (BUFFER_EMPTY) we call streambuffer to get the next segment to append 45 | * 46 | * It also check if buffer is empty or not, and call bufferEmpty and bufferFull is needed 47 | */ 48 | private function bufferize():void { 49 | //this is because _streamrootMSE.getBufferLength return the max length of audio and video track 50 | // but we want the length of the buffer for which we have both audio and video 51 | 52 | var bufferLength:Number = _streamrootMSE.getBufferLength(); // return -1 is NetStream is not ready 53 | if (bufferLength >= 0) { 54 | var trueBufferLength:Number = bufferLength - _streamBuffer.getDiffBetweenBuffers(); 55 | if (trueBufferLength < Conf.NETSTREAM_BUFFER_LENGTH) { 56 | var array:Array = _streamBuffer.getNextSegmentBytes(); 57 | 58 | //check for buffer empty 59 | if (array.length == 0 && trueBufferLength < BUFFER_EMPTY && !_needData) { 60 | _streamBuffer.bufferEmpty(); 61 | _needData = true; 62 | } 63 | 64 | for (var i:uint = 0; i < array.length; i++) { 65 | _streamrootMSE.appendNetStream(array[i]); 66 | } 67 | } 68 | 69 | //check for buffer full 70 | if (_needData && _streamBuffer.isBufferReady()) { 71 | _streamrootMSE.bufferFull(); 72 | _needData = false; 73 | } 74 | 75 | if (_duration && _streamrootMSE.currentTime() + 0.3 >= _duration) { 76 | _streamrootMSE.triggerStopped(); 77 | } 78 | 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/events/PlaybackEvent.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.events { 2 | 3 | import flash.events.Event; 4 | 5 | public class PlaybackEvent extends Event { 6 | 7 | public static const ON_META_DATA:String = "VideoPlaybackEvent.ON_META_DATA"; 8 | public static const ON_NETSTREAM_STATUS:String = "VideoPlaybackEvent.ON_NETSTREAM_STATUS"; 9 | public static const ON_NETCONNECTION_STATUS:String = "VideoPlaybackEvent.ON_NETCONNECTION_STATUS"; 10 | public static const ON_STREAM_READY:String = "VideoPlaybackEvent.ON_STREAM_READY"; 11 | public static const ON_STREAM_START:String = "VideoPlaybackEvent.ON_STREAM_START"; 12 | public static const ON_STREAM_CLOSE:String = "VideoPlaybackEvent.ON_STREAM_CLOSE"; 13 | public static const ON_STREAM_SEEK_COMPLETE:String = "VideoPlaybackEvent.ON_STREAM_SEEK_COMPLETE"; 14 | public static const ON_VIDEO_DIMENSION_UPDATE:String = "VideoPlaybackEvent.ON_VIDEO_DIMENSION_UPDATE"; 15 | 16 | // a flexible container object for whatever data needs to be attached to any of these events 17 | private var _data:Object; 18 | 19 | public function PlaybackEvent(pType:String, pData:Object = null) { 20 | super(pType, true, false); 21 | _data = pData; 22 | } 23 | 24 | public function get data():Object { 25 | return _data; 26 | } 27 | 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/transcoder/TranscodeWorker.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.transcoder { 2 | 3 | import flash.display.Sprite; 4 | import flash.events.Event; 5 | import flash.system.MessageChannel; 6 | import flash.system.Worker; 7 | import flash.utils.ByteArray; 8 | 9 | /** 10 | * WARN : don't get lose between second and millisecond ! 11 | * Basically what is named timestamp or pts is in millisecond, what is name something_time is in second 12 | * Make sure everything that goes out from Transcoder to StreamrootMSE is in second 13 | */ 14 | public class TranscodeWorker extends Sprite { 15 | 16 | private var _mainToWorker:MessageChannel; 17 | private var _workerToMain:MessageChannel; 18 | private var _commChannel:MessageChannel; 19 | 20 | private var _startTime:Number;//second 21 | private var _endTime:Number;//second 22 | 23 | private var _transcoder:Transcoder; 24 | 25 | public function TranscodeWorker() { 26 | _mainToWorker = Worker.current.getSharedProperty("mainToWorker"); 27 | _mainToWorker.addEventListener(Event.CHANNEL_MESSAGE, onMainToWorker); 28 | 29 | _workerToMain = Worker.current.getSharedProperty("workerToMain"); 30 | 31 | _commChannel = Worker.current.getSharedProperty("commChannel"); 32 | 33 | var object:Object = {command: 'init'} 34 | _commChannel.send(object); 35 | _transcoder = new Transcoder(this, asyncTranscodeCB); 36 | } 37 | 38 | private function onMainToWorker(event:Event):void { 39 | var message:* = _mainToWorker.receive(); 40 | 41 | if (message == "seeking") { 42 | _transcoder.seeking(); 43 | return; 44 | } 45 | 46 | var data:String = message.data; 47 | var type:String = message.type; 48 | var isInit:Boolean = message.isInit; 49 | var timestamp:Number = message.startTime * 1000;//message.startTime in seocnd and timestamp in millisecond 50 | var offset:Number = message.offset * 1000; 51 | _startTime = message.startTime; 52 | _endTime = message.endTime; 53 | 54 | var answer:Object = {type: type, isInit: isInit}; //Need to initialize answer here (didn't work if I only declared it) 55 | debug("transcoding type=" + type + " isInit=" + isInit + " timestamp=" + timestamp + " offset=" + offset); 56 | if (isInit) { 57 | debug("transcoding init"); 58 | debug("CHECK MODIFS"); 59 | try { 60 | _transcoder.transcodeInit(data, type); 61 | } catch (e:Error) { 62 | error(e.toString(), type); 63 | return; 64 | } 65 | 66 | answer = {type: type, isInit: isInit}; 67 | debug("sending back message"); 68 | _workerToMain.send(answer); 69 | debug("message sent"); 70 | 71 | } else { 72 | debug("transcoding media"); 73 | try { 74 | _transcoder.asyncTranscode(data, type, timestamp, offset, isInit); 75 | } catch (e:Error) { 76 | error(e.toString() + "\n" + e.getStackTrace(), type); 77 | return; 78 | } 79 | } 80 | } 81 | 82 | /** 83 | * min_pts and max_pts are in millisecond 84 | * Make sure everything that goes out from Transcoder to StreamrootMSE is in second 85 | */ 86 | public function asyncTranscodeCB(type:String, isInit:Boolean, segmentBytes:ByteArray):void { 87 | debug("asyncTranscodeCB"); 88 | var answer:Object = { 89 | type: type, 90 | isInit: isInit, 91 | segmentBytes: segmentBytes, 92 | startTime: _startTime, 93 | endTime: _endTime 94 | }; 95 | 96 | debug("sending back message"); 97 | _workerToMain.send(answer); 98 | debug("message sent"); 99 | } 100 | 101 | public function debug(message:String):void { 102 | if (CONFIG::LOG_DEBUG) { 103 | var object:Object = {command: 'debug', message: message}; 104 | _commChannel.send(object); 105 | } 106 | } 107 | 108 | /** 109 | * min_pts and max_pts are in millisecond 110 | * Make sure everything that goes out from Transcoder to StreamrootMSE is in second 111 | */ 112 | public function error(message:String, type:String):void { 113 | if (CONFIG::LOG_ERROR) { 114 | var object:Object = {command: 'error', message: message, type: type}; 115 | _commChannel.send(object); 116 | } 117 | } 118 | } 119 | 120 | } 121 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/transcoder/Transcoder.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.transcoder { 2 | 3 | import com.dash.boxes.Muxer; 4 | import com.dash.handlers.AudioSegmentHandler; 5 | import com.dash.handlers.InitializationAudioSegmentHandler; 6 | import com.dash.handlers.InitializationVideoSegmentHandler; 7 | import com.dash.handlers.VideoSegmentHandler; 8 | import com.dash.utils.Base64; 9 | 10 | import flash.utils.ByteArray; 11 | 12 | public class Transcoder { 13 | 14 | private var _initHandlerAudio:InitializationAudioSegmentHandler; 15 | private var _initHandlerVideo:InitializationVideoSegmentHandler; 16 | 17 | private var _muxer:Muxer; 18 | 19 | private var _transcodeWorker:TranscodeWorker; 20 | private var _asyncTranscodeCB:Function; 21 | 22 | public function Transcoder(transcodeWorker:TranscodeWorker, asyncTranscodeCB:Function) { 23 | _muxer = new Muxer(); 24 | _transcodeWorker = transcodeWorker; 25 | _asyncTranscodeCB = asyncTranscodeCB; 26 | } 27 | 28 | //TODO: transcode init in separate method (problem with return type?), return bytes to worker, that will send message back to MSE. 29 | //Call this method from MSE instead of fake Async with loop ( keep that on the side in different class) 30 | //We might want to take turns between appending audio and video though (if argument problems, or if simplifies the workflow) 31 | //timestamp must already take seek offset into account 32 | 33 | public function transcodeInit(data:String, type:String):void { 34 | var bytes_event:ByteArray = Base64.decode(data); 35 | if (isAudio(type)) { 36 | _initHandlerAudio = new InitializationAudioSegmentHandler(bytes_event); 37 | } else if (isVideo(type)) { 38 | _initHandlerVideo = new InitializationVideoSegmentHandler(bytes_event); 39 | } 40 | } 41 | 42 | public function asyncTranscode(data:String, type:String, timestamp:Number, offset:Number, isInit:Boolean):void { 43 | _transcodeWorker.debug('FLASH transcoder.asyncTranscode'); 44 | 45 | var bytes_event:ByteArray = Base64.decode(data); 46 | if (isAudio(type)) { 47 | var bytes_append_audio:ByteArray = new ByteArray(); 48 | var audioSegmentHandler:AudioSegmentHandler = new AudioSegmentHandler(bytes_event, _initHandlerAudio.messages, _initHandlerAudio.defaultSampleDuration, _initHandlerAudio.timescale, timestamp - offset + 100, _muxer); 49 | bytes_append_audio.writeBytes(audioSegmentHandler.bytes); 50 | 51 | _asyncTranscodeCB(type, isInit, bytes_append_audio); 52 | } else if (isVideo(type)) { 53 | var bytes_append:ByteArray = new ByteArray(); 54 | var videoSegmentHandler:VideoSegmentHandler = new VideoSegmentHandler(bytes_event, _initHandlerVideo.messages, _initHandlerVideo.defaultSampleDuration, _initHandlerVideo.timescale, timestamp - offset + 100, _muxer); 55 | bytes_append.writeBytes(videoSegmentHandler.bytes); 56 | 57 | _asyncTranscodeCB(type, isInit, bytes_append); 58 | } 59 | } 60 | 61 | public function seeking():void { 62 | } 63 | 64 | private static function isAudio(type:String):Boolean { 65 | return type.indexOf("audio") >= 0; 66 | } 67 | 68 | private static function isVideo(type:String):Boolean { 69 | return type.indexOf("video") >= 0; 70 | } 71 | 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/util/Conf.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.util { 2 | 3 | public class Conf { 4 | //StreamBufferController 5 | public static const NETSTREAM_BUFFER_LENGTH:Number = 3; //seconds 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /src/as3/com/streamroot/util/TrackTypeHelper.as: -------------------------------------------------------------------------------- 1 | package com.streamroot.util { 2 | 3 | public class TrackTypeHelper { 4 | private static const VIDEO:String = "video"; 5 | private static const AUDIO:String = "audio"; 6 | private static const audio:String = "audio"; 7 | private static const video:String = "video"; 8 | 9 | public static function getType(type:String):String { 10 | if (type.indexOf(video) >= 0) { 11 | return VIDEO; 12 | } else if (type.indexOf(audio) >= 0) { 13 | return AUDIO; 14 | } else { 15 | return null; 16 | } 17 | } 18 | 19 | public static function isVideo(type:String):Boolean { 20 | return type.indexOf(video) >= 0; 21 | } 22 | 23 | public static function isAudio(type:String):Boolean { 24 | return type.indexOf(audio) >= 0; 25 | } 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /src/js/Main.js: -------------------------------------------------------------------------------- 1 | var MediaSourceFlash = require('./lib/MediaSourceFlash'); 2 | var VideoExtension = require('./lib/VideoExtension'); 3 | 4 | function init(polyfillSwfUrl, videoElement, onReady, flashByDefault){ 5 | var isMSESupported = !!window.MediaSource; 6 | if(isMSESupported && !flashByDefault){ 7 | return onReady(videoElement); 8 | } 9 | 10 | window.MediaSource = MediaSourceFlash; 11 | 12 | window.fMSE.callbacks = window.fMSE.callbacks || {}; 13 | window.fMSE.callbacks.onFlashReady = function(){ 14 | onReady(new VideoExtension(swfObj)); 15 | }; 16 | 17 | var readyFunctionString = "window.fMSE.callbacks.onFlashReady"; 18 | 19 | var height = videoElement.height || 150; 20 | var width = videoElement.width || 300; 21 | 22 | var oldId = videoElement.id; 23 | var oldIdClasses = videoElement.className; 24 | 25 | var swfObjString = ''+ 27 | ' '+ 28 | ' '+ 29 | ' '+ 30 | ' '+ 31 | ' '+ 32 | ' '+ 33 | ' '; 34 | 35 | var parentElement = videoElement.parentElement; 36 | parentElement.innerHTML = swfObjString; 37 | var swfObj = parentElement.firstChild; 38 | } 39 | 40 | module.exports = init; 41 | -------------------------------------------------------------------------------- /src/js/lib/B64Encoder.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var B64Worker = require("./B64Worker.js"); 4 | 5 | /** 6 | * This object manage the B64Worker, which encode data in B64 7 | * Indeed, since data may come from different track (ie audio/video), 8 | * we need this object, that will keep in memory a callback for each data it received 9 | * When the data has been encoded, the corresponding callback is called 10 | */ 11 | var B64Encoder = function() { 12 | 13 | var self = this, 14 | _b64w, 15 | _jobQueue = [], 16 | 17 | _createWorker = function() { 18 | //Build an inline worker that can be used with browserify 19 | var blobURL = URL.createObjectURL(new Blob( 20 | [ '(' + B64Worker.toString() + ')()' ], 21 | {type: 'application/javascript'} 22 | )); 23 | var worker = new Worker(blobURL); 24 | URL.revokeObjectURL(blobURL); 25 | return worker; 26 | }, 27 | 28 | _encodeData = function (data, cb) { 29 | var jobIndex = _jobQueue.push({ 30 | cb: cb 31 | }) -1; 32 | _b64w.postMessage({ 33 | data: data, 34 | jobIndex: jobIndex 35 | }); 36 | }, 37 | 38 | _onWorkerMessage = function(e) { 39 | var jobIndex = e.data.jobIndex, 40 | job = _jobQueue[jobIndex]; 41 | delete(_jobQueue[jobIndex]); //delete and not splice to avoid offsetting index 42 | job.cb(e.data.b64data); 43 | }, 44 | 45 | _initialize = function(){ 46 | _b64w = _createWorker(); 47 | _b64w.onmessage = _onWorkerMessage; 48 | }; 49 | 50 | _initialize(); 51 | 52 | self.encodeData = _encodeData; 53 | }; 54 | 55 | module.exports = B64Encoder; 56 | -------------------------------------------------------------------------------- /src/js/lib/B64MainThread.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var B64MainThread = function(segmentAppender) { 4 | 5 | 6 | var _bytes, 7 | _b64Data, 8 | _offset, 9 | _segmentAppender = segmentAppender, 10 | PIECE_SIZE = 10000 * 3, //PIECE_SIZE needs to be a multiple of 3, since we call btoa on it 11 | 12 | _arrayBufferToBase64 = function() { 13 | var i, 14 | len = _bytes.byteLength, 15 | end = Math.min(_offset + PIECE_SIZE, len), 16 | tempString = ""; 17 | for (i = _offset; i < end; i++) { 18 | tempString += String.fromCharCode(_bytes[i]); 19 | } 20 | _b64Data += btoa(tempString); 21 | if (end === len) { 22 | setTimeout(function() { 23 | _segmentAppender.onDecoded(_b64Data); 24 | }, 5); 25 | } else { 26 | _offset = end; 27 | setTimeout(_arrayBufferToBase64, 5); 28 | } 29 | }, 30 | 31 | _startDecoding = function(segmentData) { 32 | _bytes = segmentData; 33 | _b64Data = ''; 34 | _offset = 0; 35 | 36 | _arrayBufferToBase64(); 37 | }; 38 | 39 | this.startDecoding = _startDecoding; 40 | }; 41 | 42 | module.exports = B64MainThread; 43 | -------------------------------------------------------------------------------- /src/js/lib/B64Worker.js: -------------------------------------------------------------------------------- 1 | // Apr 1st 2015 2 | // Was removed by Kevin Oury, see commit 9be8c00a8c20e5889b367fec09448f086d69115f 3 | // Restore by Stanislas Fechner for performance issue 4 | 5 | /** 6 | * B64Encoding is done in a seperate worker to avoid performance issue when the 7 | * user switch tab or use fullscreen 8 | * In these use case, the browser consider the tab is not the active one, and all 9 | * timeout in the main thread are set to minimum 1 second 10 | * Since we need the timeout in the function _arrayBufferToBase64 (for performance issue) 11 | * we do it in a different worker, in which timeout will not be affected 12 | */ 13 | 14 | function B64Worker(){ 15 | 16 | var _arrayBufferToBase64 = function(bytes, index) { 17 | var len = bytes.byteLength, 18 | b64Data = ""; 19 | for (var i = 0; i < len; i++) { 20 | b64Data += String.fromCharCode(bytes[i]); 21 | } 22 | b64Data = btoa(b64Data); 23 | self.postMessage({ 24 | b64data: b64Data, 25 | jobIndex: index 26 | }); 27 | }; 28 | 29 | self.onmessage = function(e) { 30 | _arrayBufferToBase64(new Uint8Array(e.data.data), e.data.jobIndex); 31 | }; 32 | 33 | 34 | //Not in use atm, 35 | //Method tick can be used trigger event 'timeUpdate' in flash. 36 | //We'll be able to use this event as a workaroud for the setTimeout / setInterval throttling when the tab is inactive / video in fullscreen 37 | 38 | var tick = function() { 39 | self.postMessage({ 40 | tick: true 41 | }); 42 | }; 43 | 44 | //setInterval(tick, 125); 45 | } 46 | 47 | module.exports = B64Worker; 48 | -------------------------------------------------------------------------------- /src/js/lib/MediaSourceFlash.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var SourceBuffer = require('./SourceBuffer'); 4 | var B64Encoder = require('./B64Encoder'); 5 | var EventEmitter = require('eventemitter3'); 6 | 7 | var MediaSourceFlash = function() { 8 | var self = this, 9 | 10 | _videoExtension, 11 | 12 | _swfobj, 13 | 14 | _b64Encoder = new B64Encoder(), 15 | 16 | _READY_STATE = { 17 | OPEN: 'open', 18 | CLOSED: 'closed' 19 | }, 20 | 21 | _readyState = _READY_STATE.CLOSED, 22 | 23 | //TODO: is duration realy an attribute of MSE, or of video? 24 | _duration = 0, 25 | 26 | _ee = new EventEmitter(), 27 | 28 | _sourceBuffers = [], 29 | 30 | _addEventListener = function(type, listener) { 31 | _ee.on(type, listener); 32 | }, 33 | 34 | _removeEventListener = function(type, listener) { 35 | _ee.off(type, listener); 36 | }, 37 | 38 | _trigger = function(event) { 39 | _ee.emit(event.type, event); 40 | }, 41 | 42 | _addSourceBuffer = function(type) { 43 | var sourceBuffer; 44 | sourceBuffer = new SourceBuffer(type, _videoExtension, _b64Encoder); 45 | _sourceBuffers.push(sourceBuffer); 46 | _videoExtension.registerSourceBuffer(sourceBuffer); 47 | _swfobj.addSourceBuffer(type); 48 | return sourceBuffer; 49 | }, 50 | 51 | _removeSourceBuffer = function() { 52 | 53 | }, 54 | 55 | _endOfStream = function() { 56 | 57 | }, 58 | 59 | _initialize = function(videoExtension) { 60 | 61 | _videoExtension = videoExtension; 62 | _swfobj = _videoExtension.getSwf(); 63 | 64 | _videoExtension.createSrc(self); 65 | 66 | _readyState = _READY_STATE.OPEN; 67 | _trigger({type: "sourceopen"}); 68 | 69 | window.fMSE.callbacks.transcodeError = function(message) { 70 | console.error(message); 71 | if (window.onPlayerError) { 72 | window.onPlayerError(message); 73 | } 74 | }; 75 | 76 | _swfobj.jsReady(); 77 | }; 78 | 79 | this.addSourceBuffer = _addSourceBuffer; 80 | this.addEventListener = _addEventListener; 81 | this.removeEventListener = _removeEventListener; 82 | this.endOfStream = _endOfStream; 83 | this.initialize = _initialize; 84 | 85 | Object.defineProperty(this, "readyState", { 86 | get: function() { 87 | return _readyState; 88 | }, 89 | set: undefined 90 | }); 91 | 92 | //Duration is set in Buffer._initBuffer. 93 | Object.defineProperty(this, "duration", { 94 | get: function() { 95 | return _duration; 96 | }, 97 | set: function(newDuration) { 98 | _duration = newDuration; 99 | _swfobj.onMetaData(newDuration, 320, 240); 100 | } 101 | }); 102 | 103 | Object.defineProperty(this, "sourceBuffers", { 104 | get: function () { 105 | return _sourceBuffers; 106 | } 107 | }); 108 | }; 109 | 110 | MediaSourceFlash.isTypeSupported = function (type) { 111 | return type.indexOf('video/mp4') > -1; 112 | }; 113 | 114 | module.exports = MediaSourceFlash; 115 | -------------------------------------------------------------------------------- /src/js/lib/SegmentAppender.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var SegmentAppender = function(sourceBuffer, swfObj, b64Encoder) { 4 | var self = this, 5 | 6 | _b64Encoder = b64Encoder, 7 | 8 | _sourceBuffer = sourceBuffer, 9 | _swfObj = swfObj, 10 | 11 | _type, 12 | _startTime, 13 | _endTime, 14 | _segmentType, 15 | _discard = false, //prevent from appending decoded segment to swf obj during seeking (segment was already in B64 when we seeked) 16 | _seeking = false, //prevent an appendBuffer during seeking (segment arrived after having seeked) 17 | _isDecoding = false, 18 | 19 | //Before sending segment to flash we check first if we are seeking. If so, we don't append the decoded data. 20 | _doAppend = function (data) { 21 | _isDecoding = false; 22 | if (!_discard) { 23 | console.info("SegmentApender: DO append " + _type + "_startTime=" + _startTime); 24 | 25 | var isInit = _segmentType !== undefined 26 | ? _segmentType == "InitializationSegment" 27 | : isNaN(_startTime) || (typeof _endTime !== 'undefined'); 28 | 29 | _swfObj.appendBuffer(data, _type, isInit, _startTime, _endTime); 30 | } else { 31 | console.info("SegmentApender: discard data " + _type); 32 | _discard = false; 33 | _sourceBuffer.segmentFlushed(); 34 | } 35 | }, 36 | 37 | _appendBuffer = function(data, type, startTime, endTime, segmentType) { 38 | 39 | if (!_seeking) { 40 | _type = type; 41 | _startTime = startTime; 42 | _endTime = endTime; 43 | _segmentType = segmentType; 44 | 45 | console.info("SegmentApender: start decoding " + _type); 46 | _isDecoding = true; 47 | _b64Encoder.encodeData(data, _doAppend); 48 | } else { 49 | _sourceBuffer.segmentFlushed(); 50 | } 51 | }, 52 | 53 | _initialize = function() {}; 54 | 55 | self.appendBuffer = _appendBuffer; 56 | 57 | self.seeking = function() { 58 | if (_isDecoding) { 59 | _discard = true; 60 | } 61 | _seeking = true; 62 | }; 63 | self.seeked = function() { 64 | _seeking = false; 65 | }; 66 | 67 | _initialize(); 68 | }; 69 | 70 | module.exports = SegmentAppender; 71 | -------------------------------------------------------------------------------- /src/js/lib/SourceBuffer.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var CustomTimeRange = require('./utils/CustomTimeRange'); 4 | var SegmentAppender = require('./SegmentAppender'); 5 | var EventEmitter = require('eventemitter3'); 6 | 7 | var SourceBuffer = function(type, videoExtension, b64Encoder) { 8 | 9 | var self = this, 10 | 11 | _swfobj = videoExtension.getSwf(), 12 | 13 | _segmentAppender = new SegmentAppender(self, _swfobj, b64Encoder), 14 | 15 | _updating = false, //true , false 16 | _type = type, 17 | 18 | _startTime = 0, //TODO: Remove startTime hack 19 | _endTime = 0, 20 | _pendingEndTime = -1, 21 | /** _switchingTrack is set to true when we change rep and until the first segment of the new rep is appended in the Flash. It avoids fatal blocking at _isTimestampConsistent **/ 22 | _switchingTrack = false, 23 | 24 | _onTrackSwitch = function() { 25 | _switchingTrack = true; 26 | }, 27 | 28 | _ee = new EventEmitter(), 29 | 30 | _addEventListener = function(type, listener) { 31 | _ee.on(type, listener); 32 | }, 33 | 34 | _removeEventListener = function(type, listener) { 35 | _ee.off(type, listener); 36 | }, 37 | 38 | _trigger = function(event) { 39 | _ee.emit(event.type, event); 40 | }, 41 | 42 | _isTimestampConsistent = function(startTime) { 43 | if (Math.abs(startTime - _endTime) >= 1 /*|| Math.abs(startTime - _endTime) > 60*/ ) { 44 | console.info("_isTimestampConsistent FALSE. startTime=", startTime, "_endTime=", _endTime); 45 | } 46 | 47 | return isNaN(startTime) || (Math.abs(startTime - _endTime) < 1); 48 | }, 49 | 50 | _appendBuffer = function(arraybuffer_data, startTime, endTime) { 51 | _updating = true; //Do this at the very first 52 | _trigger({ 53 | type: 'updatestart' 54 | }); 55 | 56 | // that's dash.js segment descriptor 57 | if (startTime && startTime.segmentType) { 58 | var descriptor = startTime; 59 | startTime = descriptor.start; 60 | endTime = descriptor.end; 61 | var segmentType = descriptor.segmentType; 62 | } 63 | 64 | if (_isTimestampConsistent(startTime) || _switchingTrack || typeof startTime === "undefined") { //Test if discontinuity. Always pass test for initSegment (startTime unefined) 65 | _segmentAppender.appendBuffer(arraybuffer_data, _type, startTime, endTime, segmentType); 66 | _pendingEndTime = endTime; 67 | } else { 68 | //There's a discontinuity 69 | var firstSegmentBool = (_startTime === _endTime); 70 | console.info('timestamp not consistent. First segment after seek: ' + firstSegmentBool + ". " + (startTime)); 71 | _onUpdateend(true); //trigger updateend with error bool to true 72 | } 73 | }, 74 | 75 | /** 76 | * This method remove data from the buffer. 77 | * WARN: all data between start and end time are not really removed from the buffer 78 | * Indeed we can't remove data from NetStream. To fix that an intermediate buffer has been implemented in flash (StreamBuffer.as) 79 | * Data is first stored in the streamBuffer, and then at the last moment, the minimum amount of data is inserted in NetStream 80 | * The methods _swfobj.flushSourceBuffer and _swfobj.remove clear data from the streamBuffer, but there will 81 | * always be a small amount of data in NetStream that can't be removed. 82 | * 83 | * @param {int} start - Start of the removed interval, in seconds 84 | * @param {int} end - End of the removed interval, in seconds 85 | * @return - no returned value 86 | */ 87 | _remove = function(start, end) { 88 | if (start < 0 || end == Infinity || start > end) { 89 | throw new Error("Invalid Arguments: cannot call SourceBuffer.remove"); 90 | } 91 | 92 | _updating = true; 93 | if (start >= _endTime || end <= _startTime) { 94 | //we don't remove anything 95 | } else if (start <= _startTime && end >= _endTime) { 96 | //we remove the whole buffer 97 | //we should set _endTime = _startTime; 98 | //however all data that have been inserted into NetStream can't be removed. Method flushSourceBuffer return the true endTime, ie the endTime of NetSteam 99 | _endTime = _swfobj.remove(start, end, _type); 100 | } else if (start > _startTime) { 101 | //we should set _endTime = start; 102 | //however all data that have been inserted into NetStream can't be removed. Method _swfobj.remove return the true endTime, ie the endTime of NetSteam 103 | _endTime = _swfobj.remove(start, end, _type); 104 | } else if (start <= _startTime) { 105 | //in that case we can't remove data from NetStream 106 | console.warn('Buffer is virtually removed but data still exist in NetStream object'); 107 | _startTime = end; 108 | } 109 | //it is important to set _pendingEndTime to -1 so that _endTime is not reassigned when flash will trigger onUpdateend when decoding of the current segment is finished 110 | _pendingEndTime = -1; 111 | //trigger updateend to launch next job. Needs the setTimeout to be called asynchronously and avoid error with Max call stack size (infinite recursive loop) 112 | _onUpdateend(); 113 | }, 114 | 115 | _buffered = function() { 116 | var bufferedArray = []; 117 | if (_endTime > _startTime) { 118 | bufferedArray.push({ 119 | start: _startTime, 120 | end: _endTime 121 | }); 122 | } 123 | return new CustomTimeRange(bufferedArray); 124 | }, 125 | 126 | _debugBuffered = function() { 127 | var buffered = _buffered(); 128 | if (_pendingEndTime > _endTime) { 129 | buffered.add({ 130 | start: _endTime, 131 | end: _pendingEndTime 132 | }); 133 | } 134 | return buffered; 135 | }, 136 | 137 | _triggerUpdateend = function(error) { 138 | _updating = false; 139 | 140 | //If _pendingEndTime < _endTime, it means a segment has arrived late (MBR?), and we don't want to reduce our buffered.end 141 | //(that would trigger other late downloads and we would add everything to flash in double, which is not good for 142 | //performance) 143 | console.info('updateend ' + _type); 144 | if (!error && _pendingEndTime > _endTime) { 145 | console.info('setting end time to ' + _pendingEndTime); 146 | _endTime = _pendingEndTime; 147 | // Wait until we're sure the right segment was appended to netStream before setting _switchingTrack to false to avoid perpetual blocking at _isTimestampConsistent 148 | _switchingTrack = false; 149 | } else if (error) { 150 | console.info("Wrong segment. Update map then bufferize OR discontinuity at sourceBuffer.appendBuffer"); 151 | } 152 | 153 | _trigger({ 154 | type: 'updateend' 155 | }); 156 | }, 157 | 158 | _onUpdateend = function(error) { 159 | setTimeout(function() { 160 | _triggerUpdateend(error); 161 | }, 5); 162 | }, 163 | 164 | _seekTime = function(time) { 165 | //Sets both startTime and endTime to seek time. 166 | _startTime = time; 167 | _endTime = time; 168 | 169 | //set _pendingEndTime to -1, because update end is triggered 20ms after end of append in NetStream, so if a seek happens in the meantime we would set _endTime to _pendingEndTime wrongly. 170 | //This won't happen if we set _pendingEndTime to -1, since we need _pendingEndTime > _endTime. 171 | _pendingEndTime = -1; 172 | }, 173 | 174 | _initialize = function() { 175 | if (_type.match(/video/)) { 176 | window.fMSE.callbacks.updateend_video = _onUpdateend; 177 | } else if (_type.match(/audio/)) { 178 | window.fMSE.callbacks.updateend_audio = _onUpdateend; 179 | } else if (_type.match(/vnd/)) { 180 | window.fMSE.callbacks.updateend_video = _onUpdateend; 181 | } 182 | videoExtension.addEventListener('trackSwitch', _onTrackSwitch); 183 | 184 | if (window.fMSE.debug.bufferDisplay) { 185 | var debugSourceBuffer = { 186 | buffered: _buffered, 187 | type: _type 188 | }; 189 | 190 | Object.defineProperty(debugSourceBuffer, "debugBuffered", { 191 | get: _debugBuffered, 192 | set: undefined 193 | }); 194 | 195 | window.fMSE.debug.bufferDisplay.attachSourceBuffer(debugSourceBuffer); 196 | } 197 | }; 198 | 199 | this.appendBuffer = _appendBuffer; 200 | this.remove = _remove; 201 | this.addEventListener = _addEventListener; 202 | this.removeEventListener = _removeEventListener; 203 | 204 | Object.defineProperty(this, "updating", { 205 | get: function() { 206 | return _updating; 207 | }, 208 | set: undefined 209 | }); 210 | 211 | Object.defineProperty(this, "buffered", { 212 | get: _buffered, 213 | set: undefined 214 | }); 215 | 216 | this.appendWindowStart = 0; 217 | 218 | // 219 | //TODO: a lot of methods not in sourceBuffer spec. is there an other way? 220 | // 221 | 222 | this.seeking = function(time) { 223 | _seekTime(time); 224 | _segmentAppender.seeking(); 225 | }; 226 | 227 | this.seeked = function() { 228 | _segmentAppender.seeked(); 229 | }; 230 | 231 | this.segmentFlushed = function() { 232 | _onUpdateend(true); 233 | }; 234 | 235 | Object.defineProperty(this, "isFlash", { 236 | get: function() { 237 | return true; 238 | }, 239 | set: undefined 240 | }); 241 | 242 | _initialize(); 243 | }; 244 | 245 | module.exports = SourceBuffer; 246 | -------------------------------------------------------------------------------- /src/js/lib/VideoExtension.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var CustomTimeRange = require('./utils/CustomTimeRange'); 4 | var EventEmitter = require('eventemitter3'); 5 | 6 | var VideoExtension = function(swfObj) { 7 | 8 | var self = this, 9 | 10 | _swfObj = swfObj, 11 | 12 | _mediaSource, 13 | _sourceBuffers = [], 14 | 15 | _currentTime = 0, 16 | _fixedCurrentTime = 0, //In case of video paused or buffering 17 | _seekTarget, // Using another variable for seeking, because seekTarget can be set to undefined by "playing" event (TODO: triggered during seek, which is a separate issue) 18 | _lastCurrentTimeTimestamp, 19 | _REFRESH_INTERVAL = 2000, //Max interval until we look up flash to get real value of currentTime 20 | 21 | _ended = false, 22 | //_buffering = true, 23 | //_paused = false, 24 | _seeking = false, 25 | _seekedTimeout, 26 | 27 | _ee = new EventEmitter(), 28 | 29 | _isInitialized = function() { 30 | return (typeof _swfObj !== 'undefined'); 31 | }, 32 | 33 | _addEventListener = function(type, listener) { 34 | _ee.on(type, listener); 35 | }, 36 | 37 | _removeEventListener = function(type, listener) { 38 | _ee.off(type, listener); 39 | }, 40 | 41 | _trigger = function(event) { 42 | _ee.emit(event.type, event); 43 | }, 44 | 45 | _play = function() { 46 | if (_isInitialized()) { 47 | _fixedCurrentTime = undefined; 48 | _swfObj.play(); 49 | } else { 50 | //TODO: implement exceptions similar to HTML5 one, and handle them correctly in the code 51 | new Error('Flash video is not initialized'); //TODO: should be "throw new Error(...)" but that would stop the execution 52 | } 53 | }, 54 | 55 | _pause = function() { 56 | if (_isInitialized()) { 57 | if (typeof _fixedCurrentTime === "undefined") { //Don't override _fixedCurrentTime if it already exists (case of a seek for example); 58 | _fixedCurrentTime = _getCurrentTimeFromFlash(); 59 | } 60 | _swfObj.pause(); 61 | } else { 62 | //TODO: implement exceptions similar to HTML5 one, and handle them correctly in the code 63 | new Error('Flash video is not initialized'); //TODO: should be "throw new Error(...)" but that would stop the execution 64 | } 65 | }, 66 | 67 | _seek = function(time) { 68 | if (!_seeking) { 69 | _seekedTimeout = setTimeout(_onSeeked, 5000); 70 | if (_isInitialized()) { 71 | 72 | console.info("seeking"); 73 | _trigger({ 74 | type: 'seeking' 75 | }); 76 | _seeking = true; 77 | 78 | //Rapid fix. Check if better way 79 | for (var i = 0; i < _sourceBuffers.length; i++) { 80 | _sourceBuffers[i].seeking(time); 81 | } 82 | 83 | _seekTarget = _fixedCurrentTime = time; 84 | 85 | //The flash is flushed somewhere in this seek function 86 | _swfObj.seek(time); 87 | } else { 88 | //TODO: implement exceptions similar to HTML5 one, and handle them correctly in the code 89 | new Error('Flash video is not initialized'); //TODO: should be "throw new Error(...)" but that would stop the execution 90 | } 91 | } 92 | }, 93 | 94 | _getCurrentTimeFromFlash = function() { 95 | _currentTime = _swfObj.currentTime(); 96 | return _currentTime; 97 | }, 98 | 99 | _getCurrentTime = function() { 100 | var now = new Date().getTime(); 101 | 102 | if (_ended) { 103 | return _mediaSource.duration; 104 | } 105 | 106 | 107 | if (typeof _seekTarget !== "undefined") { 108 | return _seekTarget; 109 | } 110 | 111 | if (typeof _fixedCurrentTime !== "undefined") { 112 | return _fixedCurrentTime; 113 | } 114 | 115 | if (_lastCurrentTimeTimestamp && now - _lastCurrentTimeTimestamp < _REFRESH_INTERVAL) { 116 | return _currentTime + (now - _lastCurrentTimeTimestamp) / 1000; 117 | } else if (_isInitialized()) { 118 | _lastCurrentTimeTimestamp = now; 119 | return _getCurrentTimeFromFlash(); 120 | } 121 | return 0; 122 | }, 123 | 124 | _getPaused = function() { 125 | if (_isInitialized()) { 126 | return _swfObj.paused(); 127 | } else { 128 | //TODO: implement exceptions similar to HTML5 one, and handle them correctly in the code 129 | new Error('Flash video is not initialized'); //TODO: should be "throw new Error(...)" but that would stop the execution 130 | } 131 | }, 132 | 133 | _getBuffered = function() { 134 | var sbBuffered, 135 | start = Infinity, 136 | end = 0; 137 | for (var i = 0; i < _sourceBuffers.length; i++) { 138 | sbBuffered = _sourceBuffers[i].buffered; 139 | if (!sbBuffered.length) { 140 | return new CustomTimeRange([]); 141 | } else { 142 | // Compute the intersection of the TimeRanges of each SourceBuffer 143 | // WARNING: we make the assumption that SourceBuffer return a TimeRange with length 0 or 1, because that's how this property is implemented for now. 144 | // This will break if this is no longer the case (if we improve AS3 buffer management to support multiple ranges for example) 145 | start = Math.min(start, sbBuffered.start(0)); 146 | end = Math.max(end, sbBuffered.end(0)); 147 | } 148 | } 149 | if (start >= end) { 150 | return new CustomTimeRange([]); 151 | } 152 | return new CustomTimeRange([{start, end}]); 153 | }, 154 | 155 | _getPlayed = function() { 156 | // TODO: return normalized TimeRange here according to MediaElement API 157 | 158 | return []; 159 | }, 160 | 161 | //EVENTS 162 | _onSeeked = function() { 163 | _seeking = false; 164 | _ended = false; 165 | _seekTarget = undefined; 166 | clearTimeout(_seekedTimeout); 167 | _trigger({ 168 | type: 'seeked' 169 | }); //trigger with value _fixedCurrentTime 170 | for (var i = 0; i < _sourceBuffers.length; i++) { 171 | _sourceBuffers[i].seeked(); 172 | } 173 | }, 174 | 175 | _onLoadStart = function() { 176 | _ended = false; 177 | _trigger({ 178 | type: 'loadstart' 179 | }); 180 | }, 181 | 182 | _onPlay = function() { 183 | _currentTime = _getCurrentTimeFromFlash(); //Force refresh _currentTime 184 | _fixedCurrentTime = undefined; 185 | 186 | _ended = false; 187 | _trigger({type: 'play'}); 188 | }, 189 | 190 | //TODO: seems not be used anymore see CLIEN-268 191 | _onPause = function() { 192 | _fixedCurrentTime = _fixedCurrentTime !== undefined ? _fixedCurrentTime : _getCurrentTimeFromFlash(); // Do not erase value if already set 193 | _trigger({type: 'pause'}); 194 | }, 195 | 196 | _onPlaying = function() { 197 | _fixedCurrentTime = undefined; 198 | _trigger({type: 'playing'}); 199 | }, 200 | 201 | _onWaiting = function() { 202 | _fixedCurrentTime = _fixedCurrentTime !== undefined ? _fixedCurrentTime : _getCurrentTimeFromFlash(); // Do not erase value if already set 203 | }, 204 | 205 | _onStopped = function() { 206 | _ended = true; 207 | 208 | _trigger({ 209 | type: 'ended' 210 | }); 211 | }, 212 | 213 | _onCanplay = function() { 214 | _trigger({ 215 | type: 'canplay' 216 | }); 217 | }, 218 | 219 | _onDurationchange = function() { 220 | _trigger({ 221 | type: 'durationchange' 222 | }); 223 | }, 224 | 225 | _onVolumechange = function() { 226 | _trigger({ 227 | type: 'volumechange' 228 | }); 229 | }, 230 | 231 | _canPlayType = function() { 232 | return 'probably'; 233 | }, 234 | 235 | _initialize = function() { 236 | 237 | window.fMSE.callbacks.seeked = function() { 238 | //Trigger event when seek is done 239 | _onSeeked(); 240 | }; 241 | 242 | window.fMSE.callbacks.loadstart = function() { 243 | //Trigger event when we want to start loading data (at the beginning of the video or on replay) 244 | _onLoadStart(); 245 | }; 246 | 247 | window.fMSE.callbacks.play = function() { 248 | //Trigger event when media is ready to play 249 | _onPlay(); 250 | }; 251 | 252 | window.fMSE.callbacks.pause = function () { 253 | _onPause(); 254 | }; 255 | 256 | window.fMSE.callbacks.canplay = function() { 257 | _onCanplay(); 258 | }; 259 | 260 | window.fMSE.callbacks.playing = function() { 261 | //Trigger event when the media is playing 262 | _onPlaying(); 263 | }; 264 | 265 | window.fMSE.callbacks.waiting = function() { 266 | //Trigger event when video has been paused but is expected to resume (ie on buffering or manual paused) 267 | _onWaiting(); 268 | }; 269 | 270 | window.fMSE.callbacks.stopped = function() { 271 | //Trigger event when video ends. 272 | _onStopped(); 273 | }; 274 | 275 | window.fMSE.callbacks.durationChange = function(duration) { 276 | _onDurationchange(duration); 277 | }; 278 | 279 | window.fMSE.callbacks.appended_segment = function(startTime, endTime) { 280 | // TODO: not sure what this event was meant for. It duplicates the updateend events, and the comments along this workflow don't reflect what it is really supposed to do 281 | }; 282 | 283 | window.fMSE.callbacks.volumeChange = function(volume) { 284 | _onVolumechange(volume); 285 | }; 286 | 287 | var oldCreateObjectURL = window.URL.createObjectURL; 288 | window.URL.createObjectURL = function (mediaSource) { 289 | if (mediaSource.initialize) { 290 | _mediaSource = mediaSource; 291 | _mediaSource.initialize(self); 292 | } else { 293 | return oldCreateObjectURL(mediaSource); 294 | } 295 | }; 296 | 297 | if (window.fMSE.debug.bufferDisplay) { 298 | window.fMSE.debug.bufferDisplay.attachVideo(self); 299 | } 300 | }; 301 | 302 | Object.defineProperty(this, "currentTime", { 303 | get: _getCurrentTime, 304 | set: function(time) { 305 | _seek(time); 306 | } 307 | }); 308 | 309 | Object.defineProperty(this, "seeking", { 310 | get: function() { 311 | return _seeking; 312 | }, 313 | set: undefined 314 | }); 315 | 316 | Object.defineProperty(this, "paused", { 317 | get: _getPaused, 318 | set: undefined 319 | }); 320 | 321 | Object.defineProperty(this, "duration", { 322 | get: function () { 323 | return _mediaSource.duration; 324 | }, 325 | set: undefined 326 | }); 327 | 328 | Object.defineProperty(this, "playbackRate", { 329 | get: function () { 330 | return 1; //Always return 1, as we don't support changing playback rate 331 | }, 332 | set: function () { 333 | //The only time we'll set playback rate for now is to pause video on rebuffering (workaround in HTML5 only). 334 | //Added warning if we ever wanted to use it for other purposes. 335 | console.error("Changing playback rate is not supported for now with Streamroot Flash playback."); 336 | } 337 | }); 338 | 339 | Object.defineProperty(this, "isFlash", { 340 | get: function() { 341 | return true; 342 | }, 343 | set: undefined 344 | }); 345 | 346 | Object.defineProperty(this, "buffered", { 347 | get: _getBuffered, 348 | set: undefined 349 | }); 350 | 351 | Object.defineProperty(this, "played", { 352 | get: _getPlayed, 353 | set: undefined 354 | }); 355 | 356 | Object.defineProperty(this, "preload", { 357 | get: undefined, 358 | set: function() { 359 | } 360 | }); 361 | 362 | Object.defineProperty(this, "onencrypted", { 363 | get: undefined, 364 | set: undefined 365 | }); 366 | 367 | Object.defineProperty(this, "autoplay", { 368 | get: undefined, 369 | set: function() { 370 | } 371 | }); 372 | 373 | Object.defineProperty(this, "ended", { 374 | get: undefined, 375 | set: undefined 376 | }); 377 | 378 | Object.defineProperty(this, "readyState", { 379 | get: _swfObj.readyState, 380 | set: undefined 381 | }); 382 | 383 | this.createSrc = function(mediaSourceFlash) { 384 | _mediaSource = mediaSourceFlash; 385 | }; 386 | 387 | this.registerSourceBuffer = function(sourceBuffer) { 388 | _sourceBuffers.push(sourceBuffer); 389 | //TODO: register source buffer in there for sourceBufferEvents 390 | }; 391 | 392 | this.getSwf = function() { 393 | return _swfObj; 394 | }; 395 | 396 | this.play = _play; 397 | this.pause = _pause; 398 | this.addEventListener = _addEventListener; 399 | this.removeEventListener = _removeEventListener; 400 | this.dispatchEvent = _trigger; 401 | this.canPlayType = _canPlayType; 402 | 403 | //TODO:register mediaSource and video events 404 | 405 | //TODO: create global methods for flash events here, and dispatch events to registered MediaSource, SourceBuffers, etc... 406 | 407 | _initialize(); 408 | }; 409 | 410 | VideoExtension.prototype = Object.create(window.HTMLMediaElement.prototype); 411 | VideoExtension.prototype.constructor = VideoExtension; 412 | 413 | module.exports = VideoExtension; 414 | -------------------------------------------------------------------------------- /src/js/lib/utils/CustomTimeRange.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var CustomTimeRange = function(timeRangeArray = []) { 4 | var _timeRangeArray = timeRangeArray; 5 | 6 | this.length = _timeRangeArray.length; 7 | 8 | this.add = function(segment) { 9 | _timeRangeArray.push(segment); 10 | this.length = _timeRangeArray.length; 11 | }; 12 | 13 | this.start = function(i) { 14 | if (isInteger(i) && i >= 0 && i < _timeRangeArray.length) { 15 | return _timeRangeArray[i].start; 16 | } else { 17 | // console.error('Index out of range'); 18 | // if(Number.isInteger(i)){ // Comes with ECMAScript 6. Only works in Chrome and Firefox. "Enable Experimental Javascript" flag in Chrome 19 | if (isInteger(i)) { 20 | throw new Error("CustomTimeRange index out of range"); 21 | } else { 22 | throw new Error("Incorrect index type"); 23 | } 24 | } 25 | }; 26 | 27 | this.end = function(i) { 28 | if (isInteger(i) && i >= 0 && i < _timeRangeArray.length) { 29 | return _timeRangeArray[i].end; 30 | } else { 31 | // console.error('Index out of range'); 32 | // if(Number.isInteger(i)){ // Comes with ECMAScript 6. Only works in Chrome and Firefox. "Enable Experimental Javascript" flag in Chrome 33 | if (isInteger(i)) { 34 | throw new Error("CustomTimeRange index out of range"); 35 | } else { 36 | throw new Error("Incorrect index type"); 37 | } 38 | } 39 | }; 40 | }; 41 | 42 | function isInteger(n) { 43 | return (typeof n === "number" && n % 1 === 0); 44 | } 45 | 46 | module.exports = CustomTimeRange; 47 | -------------------------------------------------------------------------------- /src/js/test/utils.customTimeRange.js: -------------------------------------------------------------------------------- 1 | var CustomTimeRange = require('../lib/utils/CustomTimeRange'); 2 | require('should'); 3 | 4 | describe('CustomTimeRange module', function(){ 5 | var GOODTIMERANGEARRAY = [{start: 0, end: 10}, {start: 21, end: 22}], 6 | customTimeRange; 7 | describe('Test CustomTimeRange.start good result handling', function(){ 8 | it('should return the correct start value', function(){ 9 | customTimeRange = new CustomTimeRange(); 10 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 11 | customTimeRange.add(GOODTIMERANGEARRAY[1]); 12 | customTimeRange.start(0).should.equal(0); 13 | customTimeRange.start(1).should.equal(21); 14 | }); 15 | }); 16 | describe('Test CustomTimeRange.start error handling', function(){ 17 | it('should throw IndexOutOfRange error', function(){ 18 | customTimeRange = new CustomTimeRange(); 19 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 20 | customTimeRange.start.bind(null, -1).should.throw("CustomTimeRange index out of range"); 21 | customTimeRange.start.bind(null, 3).should.throw("CustomTimeRange index out of range"); 22 | }); 23 | it('should throw incorrect type error', function(){ 24 | customTimeRange = new CustomTimeRange(GOODTIMERANGEARRAY); 25 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 26 | customTimeRange.start.bind(null, null).should.throw("Incorrect index type"); 27 | customTimeRange.start.bind(null, 4.5).should.throw("Incorrect index type"); 28 | customTimeRange.start.bind(null, true).should.throw("Incorrect index type"); 29 | customTimeRange.start.bind(null, false).should.throw("Incorrect index type"); 30 | customTimeRange.start.bind(null, undefined).should.throw("Incorrect index type"); 31 | customTimeRange.start.bind(null, "string").should.throw("Incorrect index type"); 32 | customTimeRange.start.bind(null, NaN).should.throw("Incorrect index type"); 33 | }); 34 | }); 35 | 36 | describe('Test CustomTimeRange.end good result handling', function(){ 37 | it('should return the correct end value', function(){ 38 | customTimeRange = new CustomTimeRange(GOODTIMERANGEARRAY); 39 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 40 | customTimeRange.add(GOODTIMERANGEARRAY[1]); 41 | customTimeRange.end(0).should.equal(10); 42 | customTimeRange.end(1).should.equal(22); 43 | }); 44 | }); 45 | describe('Test CustomTimeRange.end error handling', function(){ 46 | it('should throw IndexOutOfRange error', function(){ 47 | customTimeRange = new CustomTimeRange(GOODTIMERANGEARRAY); 48 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 49 | customTimeRange.end.bind(null, -1).should.throw("CustomTimeRange index out of range"); 50 | customTimeRange.end.bind(null, 3).should.throw("CustomTimeRange index out of range"); 51 | }); 52 | it('should throw incorrect type error', function(){ 53 | customTimeRange = new CustomTimeRange(GOODTIMERANGEARRAY); 54 | customTimeRange.add(GOODTIMERANGEARRAY[0]); 55 | customTimeRange.end.bind(null, null).should.throw("Incorrect index type"); 56 | customTimeRange.end.bind(null, 4.5).should.throw("Incorrect index type"); 57 | customTimeRange.end.bind(null, true).should.throw("Incorrect index type"); 58 | customTimeRange.end.bind(null, false).should.throw("Incorrect index type"); 59 | customTimeRange.end.bind(null, undefined).should.throw("Incorrect index type"); 60 | customTimeRange.end.bind(null, "string").should.throw("Incorrect index type"); 61 | customTimeRange.end.bind(null, NaN).should.throw("Incorrect index type"); 62 | }); 63 | }); 64 | }); 65 | 66 | --------------------------------------------------------------------------------