├── .gitignore ├── favicon.ico ├── pics ├── encoder-page-ui.png ├── player-page-ui.png ├── basic-block-diagram.png ├── player-block-diagram.png └── encoder-block-diagram.png ├── create_self_signed_certs.sh ├── start-http-server-cross-origin-isolated.py ├── utils ├── ts_queue.js ├── time_buffer_checker.js ├── utils.js ├── media │ ├── avcc_parser.js │ └── avc_decoder_configuration_record_parser.js ├── buffer_utils.js ├── jitter_buffer.js ├── varint.js └── moqt.js ├── LICENSE ├── CONTRIBUTING.md ├── overlay_processor ├── overlay_encoder.js └── overlay_decoder.js ├── capture ├── a_capture.js └── v_capture.js ├── render ├── video_render_buffer.js ├── source_buffer_worklet.js └── audio_circular_buffer.js ├── encode ├── a_encoder.js └── v_encoder.js ├── CODE_OF_CONDUCT.md ├── decode ├── audio_decoder.js └── video_decoder.js ├── src-player └── simple.html ├── src-encoder └── simple.html ├── packager └── mi_packager.js ├── receiver └── moq_demuxer_downloader.js ├── README.md └── sender └── moq_sender.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules/ 3 | eslint.* 4 | package*.json 5 | certs/ 6 | -------------------------------------------------------------------------------- /favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/favicon.ico -------------------------------------------------------------------------------- /pics/encoder-page-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/pics/encoder-page-ui.png -------------------------------------------------------------------------------- /pics/player-page-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/pics/player-page-ui.png -------------------------------------------------------------------------------- /pics/basic-block-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/pics/basic-block-diagram.png -------------------------------------------------------------------------------- /pics/player-block-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/pics/player-block-diagram.png -------------------------------------------------------------------------------- /pics/encoder-block-diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/facebookexperimental/moq-encoder-player/HEAD/pics/encoder-block-diagram.png -------------------------------------------------------------------------------- /create_self_signed_certs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Copyright (c) Meta Platforms, Inc. and affiliates. 4 | # This source code is licensed under the MIT license found in the 5 | # LICENSE file in the root directory of this source tree. 6 | 7 | mkdir -p certs 8 | 9 | KEY_FILE="./certs/certificate.key" 10 | openssl ecparam -name secp384r1 -genkey -out $KEY_FILE 11 | echo "Created $KEY_FILE" 12 | 13 | CERT_FILE="./certs/certificate.pem" 14 | openssl req -new -x509 -days 10 -subj '/CN=Test Certificate' -addext "subjectAltName = DNS:localhost" -key $KEY_FILE -sha384 -out $CERT_FILE 15 | echo "Created $CERT_FILE" 16 | 17 | # Compute fingerprint 18 | FINGUERPRINT_FILE="./certs/certificate_fingerprint.hex" 19 | openssl x509 -in $CERT_FILE -outform der | openssl dgst -sha256 -binary > $FINGUERPRINT_FILE 20 | echo "Created $FINGUERPRINT_FILE" -------------------------------------------------------------------------------- /start-http-server-cross-origin-isolated.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright (c) Meta Platforms, Inc. and affiliates. 4 | # This source code is licensed under the MIT license found in the 5 | # LICENSE file in the root directory of this source tree. 6 | 7 | from http.server import HTTPServer, SimpleHTTPRequestHandler, test 8 | import sys 9 | 10 | class CORSRequestHandler (SimpleHTTPRequestHandler): 11 | def end_headers (self): 12 | self.send_header('Access-Control-Allow-Origin', '*') 13 | self.send_header('Cross-Origin-Opener-Policy', 'same-origin') 14 | self.send_header('Cross-Origin-Embedder-Policy', 'require-corp') 15 | SimpleHTTPRequestHandler.end_headers(self) 16 | 17 | if __name__ == '__main__': 18 | test(CORSRequestHandler, HTTPServer, port=int(sys.argv[1]) if len(sys.argv) > 1 else 8080) -------------------------------------------------------------------------------- /utils/ts_queue.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | export class TsQueue { 9 | constructor () { 10 | this.elementsList = [] 11 | this.totalDiscarded = 0 12 | this.ptsQueue = [] 13 | } 14 | 15 | clear () { 16 | this.ptsQueue = [] 17 | } 18 | 19 | addToPtsQueue (ts, d) { 20 | this.ptsQueue.push({ ts, d }) 21 | } 22 | 23 | shiftPtsQueue (numElements = 1) { 24 | this.ptsQueue = this.ptsQueue.slice(numElements) 25 | } 26 | 27 | removeUntil (length) { 28 | const removeSize = Math.max(this.ptsQueue.length - length, 0) 29 | if (removeSize > 0) { 30 | this.shiftPtsQueue(removeSize) 31 | } 32 | } 33 | 34 | getPtsQueueLengthInfo () { 35 | const r = { lengthMs: 0, size: this.ptsQueue.length } 36 | this.ptsQueue.forEach(element => { 37 | r.lengthMs += element.d / 1000 38 | }) 39 | return r 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | MIT License 3 | 4 | Copyright (c) Meta Platforms, Inc. and affiliates. 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to moq-encoder-player 2 | We want to make contributing to this project as easy and transparent as 3 | possible. 4 | 5 | ## Pull Requests 6 | We actively welcome your pull requests. 7 | 8 | 1. Fork the repo and create your branch from `main`. 9 | 2. If you've added code that should be tested, add tests. 10 | 3. If you've changed APIs, update the documentation. 11 | 4. Ensure the test suite passes. 12 | 5. Make sure your code lints. 13 | 6. If you haven't already, complete the Contributor License Agreement ("CLA"). 14 | 15 | ## Contributor License Agreement ("CLA") 16 | In order to accept your pull request, we need you to submit a CLA. You only need 17 | to do this once to work on any of Facebook's open source projects. 18 | 19 | Complete your CLA here: 20 | 21 | ## Issues 22 | We use GitHub issues to track public bugs. Please ensure your description is 23 | clear and has sufficient instructions to be able to reproduce the issue. 24 | 25 | Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe 26 | disclosure of security bugs. In those cases, please go through the process 27 | outlined on that page and do not file a public issue. 28 | 29 | ## License 30 | By contributing to moq-encoder-player, you agree that your contributions will be licensed 31 | under the LICENSE file in the root directory of this source tree. -------------------------------------------------------------------------------- /utils/time_buffer_checker.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | export class TimeBufferChecker { 9 | constructor (mediaType, isVerbose) { 10 | this.mediaType = mediaType 11 | this.elementsList = [] 12 | this.isVerbose = false 13 | if (isVerbose === true) { 14 | this.isVerbose = true 15 | } 16 | } 17 | 18 | AddItem (item) { 19 | if (('ts' in item) && ('clkms' in item)) { 20 | // Add at the end 21 | this.elementsList.push(item) 22 | if (this.isVerbose) { 23 | console.log(`TimeBufferChecker[${this.mediaType}] Added item: ${JSON.stringify(item)}, list: ${JSON.stringify(this.elementsList)}`) 24 | } 25 | } 26 | } 27 | 28 | GetItemByTs (ts, useExact) { 29 | let ret = { valid: false, ts: -1, compensatedTs: -1, estimatedDuration: -1, clkms: -1 } 30 | let i = 0 31 | let indexPastTs = -1 32 | let removedElements = 0 33 | 34 | // elementsList is sorted by arrival order 35 | while (i < this.elementsList.length) { 36 | if (useExact === true) { 37 | if (this.elementsList[i].ts === ts) { 38 | indexPastTs = i 39 | } 40 | } else { 41 | if (ts >= this.elementsList[i].ts) { 42 | indexPastTs = i 43 | } else if (ts < this.elementsList[i].ts) { 44 | break 45 | } 46 | } 47 | i++ 48 | } 49 | if (indexPastTs >= 0) { 50 | ret = this.elementsList[indexPastTs] 51 | ret.valid = true 52 | removedElements = Math.min(indexPastTs + 1, this.elementsList.length) 53 | this.elementsList = this.elementsList.slice(indexPastTs + 1) 54 | } 55 | if (this.isVerbose) { 56 | console.log(`TimeBufferChecker[${this.mediaType}] removedElements: ${removedElements}, elements list: ${this.elementsList.length}, retTs: ${(ret === undefined) ? 'undefined' : JSON.stringify(ret)}, asked: ${ts}, list: ${JSON.stringify(this.elementsList)}`) 57 | } 58 | 59 | return ret 60 | } 61 | 62 | Clear () { 63 | this.elementsList = [] 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /utils/utils.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | 'use strict' 9 | 10 | export class StateEnum { 11 | // Create new instances of the same class as static attributes 12 | static Created = new StateEnum('created') 13 | static Instantiated = new StateEnum('instantiated') 14 | static Running = new StateEnum('running') 15 | static Stopped = new StateEnum('stopped') 16 | 17 | constructor (name) { 18 | this.name = name 19 | } 20 | } 21 | 22 | export function numberToSingleByteArray(num) { 23 | if (num > 255 || num < 0) 24 | throw new Error(`Overlfow! Tried to encode ${num} as single byte`) 25 | return new Uint8Array([Math.round(num)]); 26 | } 27 | 28 | export function numberTo2BytesArray(num, isLittleEndian) { 29 | if (num > 65535 || num < 0) 30 | throw new Error(`Overlfow! Tried to encode ${num} as single byte`) 31 | 32 | const buffer = new ArrayBuffer(2); 33 | new DataView(buffer).setInt16(0, num, isLittleEndian); 34 | return buffer 35 | } 36 | 37 | export function sendMessageToMain (prefix, type, data) { 38 | if (type === 'debug' || type === 'info' || type === 'error' || type === 'warning') { 39 | data = prefix + ' ' + data 40 | } 41 | self.postMessage({ type, data }) 42 | } 43 | 44 | export async function getBinaryFile(url) { 45 | const response = await fetch(url); 46 | if (!response.ok) { 47 | throw new Error(`Response status: ${response.status}`); 48 | } 49 | 50 | return await response.arrayBuffer() 51 | } 52 | 53 | export function compareArrayBuffer(a, b) { 54 | if (a == undefined && b == undefined) { 55 | return true; 56 | } 57 | if (a == undefined || b == undefined) { 58 | return false; 59 | } 60 | if (a.byteLength !== b.byteLength) { 61 | return false; 62 | } 63 | const av = new Int8Array(a) 64 | const bv = new Int8Array(b) 65 | for (let i = 0; i < a.byteLength; i++) { 66 | if (av[i] !== bv[i]) { 67 | return false; 68 | } 69 | } 70 | return true; 71 | } 72 | 73 | export function convertTimestamp(ts, originalTimescale, destTimeScale) { 74 | return Math.round(ts * destTimeScale / originalTimescale); 75 | } 76 | 77 | export function buf2hex(buffer) { // buffer is an ArrayBuffer 78 | return [...new Uint8Array(buffer)] 79 | .map(x => x.toString(16).padStart(2, '0')) 80 | .join(''); 81 | } 82 | -------------------------------------------------------------------------------- /overlay_processor/overlay_encoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const DEFAULT_START_LINE = 0 9 | const DEFAULT_NUM_LINES = 2 10 | const DEFAULT_BITS_TO_WRITE = 64 11 | const START_SEQ = "1010" 12 | 13 | export class OverlayEncoder { 14 | constructor () { 15 | this.startLine = DEFAULT_START_LINE 16 | this.numLines = DEFAULT_NUM_LINES 17 | this.bitsToWrite = DEFAULT_BITS_TO_WRITE 18 | } 19 | 20 | Encode (vFrame, data) { 21 | if (vFrame.format != "NV12") { 22 | throw new Error('Only NV12 format supported') 23 | } 24 | if ((!('codedWidth' in vFrame)) || (!('codedHeight' in vFrame))) { 25 | throw new Error('Bad frame format NV12 format supported') 26 | } 27 | if (!Number.isInteger(data)) { 28 | throw new Error('Data to encode in overlay needs to be integer') 29 | } 30 | if (vFrame.codedWidth < this.bitsToWrite) { 31 | throw new Error(`Image is to small to encode ${this.bitsToWrite} bits, at least we need ${this.bitsToWrite} width`) 32 | } 33 | if (vFrame.codedHeight < this.numLines) { 34 | throw new Error(`Image is to small to encode data, we need ${this.numLines} height`) 35 | } 36 | 37 | const mewFramepixelsData = new Uint8Array(parseInt((vFrame.codedWidth * vFrame.codedHeight) * (1 + 1/4 + 1/4))) 38 | 39 | const copyOptions = {layout: [{offset: 0, stride: vFrame.codedWidth}, {offset: vFrame.codedHeight * vFrame.codedWidth, stride: vFrame.codedWidth}]} 40 | vFrame.copyTo(mewFramepixelsData, copyOptions) 41 | 42 | const data_num_bytes_to_write = this.bitsToWrite - START_SEQ.length 43 | const data_str_pad = START_SEQ + data.toString(2).padStart(data_num_bytes_to_write, '0') 44 | 45 | const pixelsPerBit = Math.floor(vFrame.codedWidth / this.bitsToWrite) 46 | // Y is stored at start for NV12 47 | for (let l = 0; l < this.numLines; l++) { 48 | const y_byte_offset = l * vFrame.codedWidth 49 | for (let x = 0; x < this.bitsToWrite; x++) { 50 | const base_offset = y_byte_offset + (x * pixelsPerBit) 51 | const val = data_str_pad[x] == '1' ? 255 : 0 52 | for (let f = 0; f < pixelsPerBit; f++) { 53 | mewFramepixelsData[base_offset + f] = val 54 | } 55 | } 56 | } 57 | const vNewFrame = new VideoFrame(mewFramepixelsData, vFrame); 58 | vFrame.close() 59 | 60 | return vNewFrame 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /capture/a_capture.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain } from '../utils/utils.js' 9 | 10 | const WORKER_PREFIX = '[AUDIO-CAP]' 11 | 12 | let stopped = false 13 | let mainLoopInterval 14 | let isMainLoopInExecution = false 15 | 16 | function mainLoop (frameReader) { 17 | return new Promise(function (resolve) { 18 | if (isMainLoopInExecution) { 19 | return resolve(false) 20 | } 21 | isMainLoopInExecution = true 22 | if (stopped === true) { 23 | if (mainLoopInterval !== undefined) { 24 | clearInterval(mainLoopInterval) 25 | mainLoopInterval = undefined 26 | } 27 | sendMessageToMain(WORKER_PREFIX, 'info', 'Exited!') 28 | isMainLoopInExecution = false 29 | return resolve(false) 30 | } 31 | frameReader.read() 32 | .then(result => { 33 | if (result.done) { 34 | sendMessageToMain(WORKER_PREFIX, 'info', 'Stream is done') 35 | return frameReader.cancel('ended') 36 | } else { 37 | return new Promise(function (resolve) { return resolve(result) }) 38 | } 39 | }).then(result => { 40 | if (result === 'ended') { 41 | isMainLoopInExecution = false 42 | return resolve(false) 43 | } else { 44 | const aFrame = result.value 45 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Read frame format: ' + aFrame.format + ', ts: ' + aFrame.timestamp + ', dur: ' + aFrame.duration + ', fs: ' + aFrame.sampleRate + ', Frames: ' + aFrame.numberOfFrames + ', ch: ' + aFrame.numberOfChannels) 46 | 47 | // AudioData is NOT transferable: https://github.com/WebAudio/web-audio-api/issues/2390 48 | self.postMessage({ type: 'aframe', clkms: Date.now(), data: aFrame.clone() }) 49 | aFrame.close() 50 | 51 | isMainLoopInExecution = false 52 | return resolve(true) 53 | } 54 | }) 55 | }) 56 | } 57 | 58 | self.addEventListener('message', async function (e) { 59 | const type = e.data.type 60 | if (type === 'stop') { 61 | stopped = true 62 | return 63 | } 64 | if (type === 'stream') { 65 | if (mainLoopInterval !== undefined) { 66 | sendMessageToMain(WORKER_PREFIX, 'error', 'Loop already running') 67 | return 68 | } 69 | const aFrameStream = e.data.aStream 70 | const aFrameReader = aFrameStream.getReader() 71 | 72 | sendMessageToMain(WORKER_PREFIX, 'info', 'Received streams from main page, starting worker loop') 73 | 74 | mainLoopInterval = setInterval(mainLoop, 1, aFrameReader) 75 | 76 | return 77 | } 78 | 79 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received') 80 | }) 81 | -------------------------------------------------------------------------------- /render/video_render_buffer.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const MAX_ELEMENTS_RENDERER = 60 9 | 10 | export class VideoRenderBuffer { 11 | constructor () { 12 | this.elementsList = [] 13 | this.totalDiscarded = 0 14 | 15 | this.totalLengthMs = 0 16 | } 17 | 18 | AddItem (vFrame) { 19 | let r = true 20 | if (this.elementsList.length < MAX_ELEMENTS_RENDERER) { 21 | // Add at the end (ordered by timestamp) 22 | this.elementsList.push(vFrame) 23 | 24 | this.totalLengthMs += vFrame.duration / 1000 25 | } else { 26 | r = false 27 | } 28 | return r 29 | } 30 | 31 | GetFirstElement () { 32 | const ret = { vFrame: null, discarded: 0, totalDiscarded: 0, queueSize: this.elementsList.length, queueLengthMs: this.totalLengthMs } 33 | if (this.elementsList.length > 0) { 34 | ret.vFrame = this.elementsList.shift() 35 | this.totalLengthMs -= ret.vFrame.duration / 1000 36 | ret.queueSize = this.elementsList.length 37 | ret.queueLengthMs = this.totalLengthMs 38 | } 39 | 40 | return ret 41 | } 42 | 43 | GetItemByTs (ts) { 44 | const ret = { vFrame: null, discarded: 0, totalDiscarded: this.totalDiscarded, queueSize: this.elementsList.length, queueLengthMs: this.totalLengthMs } 45 | 46 | if (this.elementsList.length <= 0 || ts < this.elementsList[0].timestamp) { 47 | return ret 48 | } 49 | 50 | let exit = false 51 | let lastFrameInThePastIndex = 0 52 | while ((lastFrameInThePastIndex < this.elementsList.length) && (exit === false)) { 53 | if (this.elementsList[lastFrameInThePastIndex].timestamp >= ts) { 54 | exit = true 55 | } else { 56 | lastFrameInThePastIndex++ 57 | } 58 | } 59 | 60 | // Remove items from 0..(lastFrameInThePastIndex-1) 61 | for (let n = 0; n < (lastFrameInThePastIndex - 1); n++) { 62 | const vFrame = this.elementsList.shift() 63 | ret.discarded++ 64 | this.totalLengthMs -= vFrame.duration / 1000 65 | vFrame.close() 66 | } 67 | 68 | if (this.elementsList.length > 0) { 69 | ret.vFrame = this.elementsList.shift() 70 | this.totalLengthMs -= ret.vFrame.duration / 1000 71 | } 72 | 73 | this.totalDiscarded += ret.discarded 74 | ret.totalDiscarded = this.totalDiscarded 75 | ret.queueSize = this.elementsList.length 76 | ret.queueLengthMs = this.totalLengthMs 77 | 78 | return ret 79 | } 80 | 81 | Clear () { 82 | while (this.elementsList.length > 0) { 83 | const vFrame = this.elementsList.shift() 84 | vFrame.close() 85 | } 86 | this.totalLengthMs = 0 87 | this.totalDiscarded = 0 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /overlay_processor/overlay_decoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const DEFAULT_START_LINE = 0 9 | const DEFAULT_NUM_LINES = 2 10 | const DEFAULT_BITS_TO_READ = 64 11 | const START_SEQ = "1010" 12 | 13 | export class OverlayDecoder { 14 | constructor () { 15 | this.startLine = DEFAULT_START_LINE 16 | this.numLines = DEFAULT_NUM_LINES 17 | this.bitsToRead = DEFAULT_BITS_TO_READ 18 | } 19 | 20 | Decode (vFrame) { 21 | if (vFrame.format != "I420") { 22 | throw new Error('Only NV12 format supported') 23 | } 24 | if ((!('codedWidth' in vFrame)) || (!('codedHeight' in vFrame))) { 25 | throw new Error('Bad frame format NV12 format supported') 26 | } 27 | if (vFrame.codedWidth < this.bitsToRead) { 28 | throw new Error(`Image is to small to decode ${this.bitsToRead} bits, we need at lease ${this.bitsToRead} width`) 29 | } 30 | if (vFrame.codedHeight < this.numLines) { 31 | throw new Error(`Image is to small, we need at list ${ this.numLines} height`) 32 | } 33 | 34 | const mewFramepixelsData = new Uint8Array(parseInt((vFrame.codedWidth * vFrame.codedHeight) * (1 + 1/2 + 1/2))) 35 | 36 | const copyOptions = {layout: [{offset: 0, stride: vFrame.codedWidth}, {offset: vFrame.codedHeight * vFrame.codedWidth, stride: vFrame.codedWidth / 2}, {offset: vFrame.codedHeight * vFrame.codedWidth + vFrame.codedHeight * vFrame.codedWidth / 2, stride: vFrame.codedWidth / 2}]} 37 | vFrame.copyTo(mewFramepixelsData, copyOptions) 38 | 39 | // Y is stored at start for I420 40 | const pixelsPerBit = vFrame.displayWidth / this.bitsToRead 41 | let bin_str = "" 42 | 43 | for (let b = 0; b < this.bitsToRead; b++) { 44 | let totalVal = 0 45 | const baseOffset = Math.floor(b * pixelsPerBit) 46 | for (let x = 0; x < pixelsPerBit; x++) { 47 | for (let y = this.startLine; y < this.numLines; y++) { 48 | totalVal += mewFramepixelsData[y * vFrame.codedWidth + baseOffset + x] 49 | } 50 | } 51 | let val = totalVal / (pixelsPerBit * this.numLines) 52 | if (val >= 128) { 53 | bin_str += '1' 54 | } else { 55 | bin_str += '0' 56 | } 57 | } 58 | 59 | let ret_conf = 0 60 | if (bin_str.length >= START_SEQ.length && bin_str.substring(0,START_SEQ.length) == START_SEQ) { 61 | ret_conf = 1 62 | bin_str = bin_str.substring(START_SEQ.length).padStart(this.bitsToRead, '0') 63 | } 64 | 65 | return { val: BigInt('0b' + bin_str), confidence: ret_conf} 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /utils/media/avcc_parser.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | 'use strict'; 9 | 10 | export const DEFAULT_AVCC_HEADER_LENGTH = 4; 11 | 12 | const NAL_TYPE_SLICE_IDR = 0x5; 13 | 14 | export function BitReaderHelper(buf, bitPos, numBits) { 15 | let ret = 0; 16 | let internalBytePos = Math.floor(bitPos / 8); 17 | let internalBitPos = 7 - (bitPos % 8); 18 | 19 | for (let n = 0; n < numBits; n++) { 20 | const bit = 21 | (buf[internalBytePos] & parseInt(Math.pow(2, internalBitPos), 10)) > 0 22 | ? 1 23 | : 0; 24 | ret = (ret << 1) | bit; 25 | 26 | internalBitPos--; 27 | if (internalBitPos < 0) { 28 | internalBytePos++; 29 | internalBitPos = 7; 30 | } 31 | } 32 | return ret; 33 | } 34 | 35 | export function GetUint16FromBufferBe(data) { 36 | return new DataView(data.buffer, data.byteOffset, data.byteLength).getUint16( 37 | 0, 38 | false, 39 | ); 40 | } 41 | 42 | export function ParseNAL(data) { 43 | const naluData = { 44 | nalType: -1, 45 | offset: data.byteOffset, 46 | length: data.byteLength 47 | }; 48 | 49 | const nalTypeNum = BitReaderHelper(data, 3, 5); 50 | naluData.nalType = nalTypeNum 51 | 52 | return naluData; 53 | } 54 | 55 | export function ParseH264NALs(dataBytes, avccHeaderLengthSize) { 56 | if (dataBytes == undefined || dataBytes == null) { 57 | return undefined 58 | } 59 | let dataBytes8b = dataBytes 60 | if (!(dataBytes instanceof Uint8Array)) { 61 | dataBytes8b = new Uint8Array(dataBytes) 62 | } 63 | const h264AvccStreamData = []; 64 | 65 | let nPos = 0; 66 | while (nPos + avccHeaderLengthSize < dataBytes8b.byteLength) { 67 | const naluSize = BitReaderHelper( 68 | dataBytes8b.subarray(nPos, nPos + avccHeaderLengthSize), 69 | 0, 70 | avccHeaderLengthSize * 8, 71 | ); 72 | nPos += avccHeaderLengthSize; 73 | if (nPos + naluSize <= dataBytes8b.byteLength) { 74 | const nalu = ParseNAL(dataBytes8b.subarray(nPos, nPos + naluSize)); 75 | h264AvccStreamData.push(nalu); 76 | } else { 77 | throw new Error( 78 | `NALU size indicates an offset bigger than data buffer. Buffer size ${ 79 | dataBytes8b.byteLength 80 | }, requested: ${nPos + naluSize}`, 81 | ); 82 | } 83 | nPos += naluSize; 84 | } 85 | 86 | return h264AvccStreamData; 87 | } 88 | 89 | export function ContainsNALUSliceIDR(dataBytes, avccHeaderLengthSize) { 90 | if (dataBytes == undefined || dataBytes == null) { 91 | return false; 92 | } 93 | const nals = ParseH264NALs(dataBytes, avccHeaderLengthSize); 94 | let i = 0 95 | while (i < nals.length) { 96 | if (nals[i].nalType === NAL_TYPE_SLICE_IDR) { 97 | return true; 98 | } 99 | i++; 100 | } 101 | return false; 102 | } 103 | -------------------------------------------------------------------------------- /capture/v_capture.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain } from '../utils/utils.js' 9 | 10 | const WORKER_PREFIX = '[VIDEO-CAP]' 11 | 12 | let stopped = false 13 | let mainLoopInterval 14 | let isMainLoopInExecution = false 15 | 16 | let timeCheck 17 | let estFps = 0 18 | 19 | function mainLoop (frameReader) { 20 | return new Promise(function (resolve) { 21 | if (isMainLoopInExecution) { 22 | return resolve(false) 23 | } 24 | isMainLoopInExecution = true 25 | if (stopped === true) { 26 | if (mainLoopInterval !== undefined) { 27 | clearInterval(mainLoopInterval) 28 | mainLoopInterval = undefined 29 | } 30 | sendMessageToMain(WORKER_PREFIX, 'info', 'Exited!') 31 | isMainLoopInExecution = false 32 | return resolve(false) 33 | } 34 | frameReader.read() 35 | .then(result => { 36 | if (result.done) { 37 | sendMessageToMain(WORKER_PREFIX, 'info', 'Stream is done') 38 | return frameReader.cancel('ended') 39 | } else { 40 | return new Promise(function (resolve) { return resolve(result) }) 41 | } 42 | }).then(result => { 43 | if (result === 'ended') { 44 | isMainLoopInExecution = false 45 | return resolve(false) 46 | } else { 47 | const vFrame = result.value 48 | 49 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Read frame format: ' + vFrame.format + ', ts: ' + vFrame.timestamp + ', dur: ' + vFrame.duration) 50 | 51 | // Send frame to process 52 | self.postMessage({ type: 'vframe', clkms: Date.now(), data: vFrame }, [vFrame]) 53 | // vFrame.close(); 54 | 55 | estFps++ 56 | if (timeCheck === undefined) { 57 | timeCheck = Date.now() 58 | } 59 | const nowMs = Date.now() 60 | if (nowMs >= timeCheck + 1000) { 61 | sendMessageToMain(WORKER_PREFIX, 'debug', 'estimated fps last sec: ' + estFps) 62 | estFps = 0 63 | timeCheck = nowMs 64 | } 65 | 66 | isMainLoopInExecution = false 67 | return resolve(true) 68 | } 69 | }) 70 | }) 71 | } 72 | 73 | self.addEventListener('message', async function (e) { 74 | const type = e.data.type 75 | if (type === 'stop') { 76 | stopped = true 77 | return 78 | } 79 | if (type === 'stream') { 80 | if (mainLoopInterval !== undefined) { 81 | sendMessageToMain(WORKER_PREFIX, 'error', 'Loop already running') 82 | return 83 | } 84 | const vFrameStream = e.data.vStream 85 | const vFrameReader = vFrameStream.getReader() 86 | 87 | sendMessageToMain(WORKER_PREFIX, 'info', 'Received streams from main page, starting worker loop') 88 | 89 | mainLoopInterval = setInterval(mainLoop, 1, vFrameReader) 90 | 91 | return 92 | } 93 | 94 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received.') 95 | }) 96 | -------------------------------------------------------------------------------- /encode/a_encoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum } from '../utils/utils.js' 9 | 10 | const WORKER_PREFIX = '[AUDIO-ENC]' 11 | 12 | const WEBCODECS_TIMESCALE = 1000000 // 1us 13 | 14 | let frameDeliveredCounter = 0 15 | let chunkDeliveredCounter = 0 16 | let workerState = StateEnum.Created 17 | 18 | // Default values 19 | let encoderMaxQueueSize = 5 20 | 21 | // Last audioData SampleFreq 22 | let lastEncoderConfig; 23 | 24 | // Encoder 25 | const initAudioEncoder = { 26 | output: handleChunk, 27 | error: (e) => { 28 | if (workerState === StateEnum.Created) { 29 | console.error(e.message) 30 | } else { 31 | sendMessageToMain(WORKER_PREFIX, 'error', e.message) 32 | } 33 | } 34 | } 35 | 36 | let aEncoder = null 37 | 38 | function handleChunk (chunk, metadata) { 39 | const msg = { type: 'achunk', seqId: chunkDeliveredCounter++, chunk, timebase: WEBCODECS_TIMESCALE, sampleFreq: lastEncoderConfig.sampleRate, numChannels: lastEncoderConfig.numberOfChannels, codec: lastEncoderConfig.codec} 40 | 41 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Chunk created. sId: ' + msg.seqId + ', Timestamp: ' + chunk.timestamp + ', dur: ' + chunk.duration + ', type: ' + chunk.type + ', size: ' + chunk.byteLength + ', metadata: ' + JSON.stringify(metadata)); 42 | 43 | self.postMessage(msg) 44 | } 45 | 46 | self.addEventListener('message', async function (e) { 47 | if (workerState === StateEnum.Created) { 48 | workerState = StateEnum.Instantiated 49 | } 50 | 51 | if (workerState === StateEnum.Stopped) { 52 | sendMessageToMain(WORKER_PREFIX, 'info', 'Encoder is stopped it does not accept messages') 53 | return 54 | } 55 | 56 | const type = e.data.type 57 | if (type === 'stop') { 58 | workerState = StateEnum.Stopped 59 | // Make sure all requests has been processed 60 | await aEncoder.flush() 61 | 62 | aEncoder.close() 63 | 64 | return 65 | } 66 | if (type === 'aencoderini') { 67 | const encoderConfig = e.data.encoderConfig 68 | 69 | // eslint-disable-next-line no-undef 70 | aEncoder = new AudioEncoder(initAudioEncoder) 71 | 72 | // We do NOT accept changing audio encoding settings mid-stream for now 73 | aEncoder.configure(encoderConfig) 74 | lastEncoderConfig = encoderConfig 75 | if ('encoderMaxQueueSize' in e.data) { 76 | encoderMaxQueueSize = e.data.encoderMaxQueueSize 77 | } 78 | sendMessageToMain(WORKER_PREFIX, 'info', `Encoder initialized with config: ${JSON.stringify(lastEncoderConfig)}`) 79 | return 80 | } 81 | if (type !== 'aframe') { 82 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received') 83 | return 84 | } 85 | 86 | const aFrame = e.data.aframe 87 | 88 | if (aEncoder.encodeQueueSize > encoderMaxQueueSize) { 89 | // Too many frames in the encoder, encoder is overwhelmed let's drop this frame. 90 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), ts: aFrame.timestamp, msg: 'Dropped encoding audio frame' }) 91 | aFrame.close() 92 | } else { 93 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Send to encode frame ts: ' + aFrame.timestamp + '. Counter: ' + frameDeliveredCounter++) 94 | 95 | aEncoder.encode(aFrame) 96 | aFrame.close() 97 | } 98 | }) 99 | -------------------------------------------------------------------------------- /utils/buffer_utils.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | 'use strict' 9 | 10 | export class ReadStreamClosed extends Error { 11 | constructor(message) { 12 | super(message) 13 | this.name = "ReadStreamClosed" 14 | } 15 | } 16 | 17 | export function concatBuffer (arr) { 18 | let totalLength = 0 19 | arr.forEach(element => { 20 | if (element !== undefined) { 21 | totalLength += element.byteLength 22 | } 23 | }) 24 | const retBuffer = new Uint8Array(totalLength) 25 | let pos = 0 26 | arr.forEach(element => { 27 | if (element !== undefined) { 28 | let element8b = element 29 | if (!(element instanceof Uint8Array)) { 30 | element8b = new Uint8Array(element) 31 | } 32 | retBuffer.set(element8b, pos) 33 | pos += element.byteLength 34 | } 35 | }) 36 | return retBuffer 37 | } 38 | 39 | export async function readUntilEof (readableStream, blockSize) { 40 | const chunkArray = [] 41 | let totalLength = 0 42 | 43 | while (true) { 44 | let bufferChunk = new Uint8Array(blockSize) 45 | const reader = readableStream.getReader({ mode: 'byob' }) 46 | const { value, done } = await reader.read(new Uint8Array(bufferChunk, 0, blockSize)) 47 | if (value !== undefined) { 48 | bufferChunk = value.buffer 49 | chunkArray.push(bufferChunk.slice(0, value.byteLength)) 50 | totalLength += value.byteLength 51 | } 52 | reader.releaseLock() 53 | if (value === undefined) { 54 | throw new Error('error reading incoming data') 55 | } 56 | if (done) { 57 | break 58 | } 59 | } 60 | // Concatenate received data 61 | const uint8Buffer = new Uint8Array(totalLength) 62 | let pos = 0 63 | for (const element of chunkArray) { 64 | const uint8view = new Uint8Array(element, 0, element.byteLength) 65 | uint8Buffer.set(uint8view, pos) 66 | pos += element.byteLength 67 | } 68 | 69 | return uint8Buffer 70 | } 71 | 72 | export async function buffRead (readableStream, size) { 73 | let ret = null 74 | if (size <= 0) { 75 | return {eof: false, buff: new Uint8Array(Number(0)) } 76 | } 77 | let buff = new Uint8Array(Number(size)) 78 | const reader = readableStream.getReader({ mode: 'byob' }) 79 | 80 | try { 81 | ret = await buffReadFrombyobReader(reader, buff, 0, size) 82 | } finally { 83 | reader.releaseLock() 84 | } 85 | return ret 86 | } 87 | 88 | export async function buffReadFrombyobReader (reader, buffer, offset, size) { 89 | const ret = null 90 | if (size <= 0) { 91 | return ret 92 | } 93 | let remainingSize = size 94 | let eof = false 95 | while (remainingSize > 0) { 96 | const { value, done } = await reader.read(new Uint8Array(buffer, offset, remainingSize)) 97 | if (value !== undefined) { 98 | buffer = value.buffer 99 | offset += value.byteLength 100 | remainingSize = remainingSize - value.byteLength 101 | } 102 | if (done && remainingSize > 0) { 103 | throw new ReadStreamClosed('short buffer') 104 | } 105 | eof = done 106 | } 107 | return {eof, buff: buffer} 108 | } 109 | 110 | export function getArrayBufferByteLength(arrayBuffer) { 111 | let ret = 0; 112 | arrayBuffer.forEach(element => { 113 | ret += element.byteLength; 114 | }); 115 | return ret; 116 | } -------------------------------------------------------------------------------- /utils/jitter_buffer.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const DEFAULT_BUFFER_SIZE_MS = 200 9 | 10 | export class JitterBuffer { 11 | constructor (maxSizeMs, droppedCallback) { 12 | this.bufferSizeMs = DEFAULT_BUFFER_SIZE_MS 13 | if (maxSizeMs !== undefined && maxSizeMs > 0) { 14 | this.bufferSizeMs = maxSizeMs 15 | } 16 | this.elementsList = [] 17 | 18 | this.droppedCallback = droppedCallback 19 | this.totalLengthMs = 0 20 | this.numTotalGaps = 0 21 | this.numTotalLostStreams = 0 22 | this.lastCorrectSeqId = undefined 23 | } 24 | 25 | AddItem (chunk, seqId, extraData) { 26 | let r 27 | // Order by SeqID 28 | if (this.elementsList.length <= 0) { 29 | this.elementsList.push({ chunk, seqId, extraData }) 30 | this.totalLengthMs += chunk.duration / 1000 31 | } else { 32 | // Anything later than 1st element will be dropped 33 | if (seqId <= this.elementsList[0].seqId) { 34 | // Arrived late to jitter buffer -> drop 35 | if (this.droppedCallback !== undefined) { 36 | this.droppedCallback({ seqId, firstBufferSeqId: this.elementsList[0].seqId }) 37 | } 38 | } else { 39 | let n = 0 40 | let exit = false 41 | while ((n < this.elementsList.length) && (!exit)) { 42 | if (seqId < this.elementsList[n].seqId) { 43 | this.elementsList.splice(n, 0, { chunk, seqId, extraData }) 44 | exit = true 45 | } 46 | n++ 47 | } 48 | if (exit === false) { 49 | this.elementsList.push({ chunk, seqId, extraData }) 50 | } 51 | this.totalLengthMs += chunk.duration / 1000 52 | } 53 | } 54 | 55 | // Get 1st element if jitter buffer full 56 | if (this.totalLengthMs >= this.bufferSizeMs) { 57 | r = this.elementsList.shift() 58 | 59 | // Check for discontinuities in the stream 60 | r.isDisco = false 61 | r.repeatedOrBackwards = false 62 | if (r.seqId >= 0) { // Init is -1 63 | if (this.lastCorrectSeqId !== undefined) { 64 | if (this.lastCorrectSeqId + 1 !== r.seqId) { 65 | r.isDisco = true 66 | this.numTotalGaps++ 67 | this.numTotalLostStreams += Math.abs(r.seqId - this.lastCorrectSeqId) 68 | 69 | // Check for repeated and backwards seqID 70 | if (r.seqId <= this.lastCorrectSeqId) { 71 | r.repeatedOrBackwards = true 72 | } else { 73 | this.lastCorrectSeqId = r.seqId 74 | } 75 | } else { 76 | this.lastCorrectSeqId = r.seqId 77 | } 78 | } else { 79 | this.lastCorrectSeqId = r.seqId 80 | } 81 | } 82 | this.totalLengthMs -= r.chunk.duration / 1000 83 | } 84 | return r 85 | } 86 | 87 | GetStats () { 88 | return { numTotalGaps: this.numTotalGaps, numTotalLostStreams: this.numTotalLostStreams, totalLengthMs: this.totalLengthMs, size: this.elementsList.length, currentMaSizeMs: this.bufferSizeMs } 89 | } 90 | 91 | Clear () { 92 | this.elementsList = [] 93 | this.totalLengthMs = 0 94 | this.numTotalGaps = 0 95 | this.numTotalLostStreams = 0 96 | this.lastSeqIdDelivered = undefined 97 | } 98 | 99 | UpdateMaxSize(bufferSizeMs) { 100 | if (bufferSizeMs > 0) { 101 | this.bufferSizeMs = bufferSizeMs; 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | This Code of Conduct also applies outside the project spaces when there is a 56 | reasonable belief that an individual's behavior may have a negative impact on 57 | the project or its community. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported by contacting the project team at . All 63 | complaints will be reviewed and investigated and will result in a response that 64 | is deemed necessary and appropriate to the circumstances. The project team is 65 | obligated to maintain confidentiality with regard to the reporter of an incident. 66 | Further details of specific enforcement policies may be posted separately. 67 | 68 | Project maintainers who do not follow or enforce the Code of Conduct in good 69 | faith may face temporary or permanent repercussions as determined by other 70 | members of the project's leadership. 71 | 72 | ## Attribution 73 | 74 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 75 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 76 | 77 | [homepage]: https://www.contributor-covenant.org 78 | 79 | For answers to common questions about this code of conduct, see 80 | https://www.contributor-covenant.org/faq 81 | -------------------------------------------------------------------------------- /encode/v_encoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum } from '../utils/utils.js' 9 | import { ParseAVCDecoderConfigurationRecord } from "../utils/media/avc_decoder_configuration_record_parser.js" 10 | 11 | const WORKER_PREFIX = '[VIDEO-ENC]' 12 | 13 | const WEBCODECS_TIMESCALE = 1000000 14 | 15 | let frameDeliveredCounter = 0 16 | let chunkDeliveredCounter = 0 17 | 18 | let workerState = StateEnum.Created 19 | 20 | // Default values 21 | let encoderMaxQueueSize = 5 22 | let keyframeEvery = 60 23 | let insertNextKeyframe = false 24 | 25 | // Encoder 26 | const initVideoEncoder = { 27 | output: handleChunk, 28 | error: (e) => { 29 | if (workerState === StateEnum.Created) { 30 | console.error(e.message) 31 | } else { 32 | sendMessageToMain(WORKER_PREFIX, 'error', e.message) 33 | } 34 | } 35 | } 36 | 37 | let vEncoder = null 38 | 39 | function handleChunk (chunk, metadata) { 40 | // decoderConfig in h264 is AVCDecoderConfigurationRecord 41 | const frame_metadata = (metadata != undefined && metadata.decoderConfig != undefined && "description" in metadata.decoderConfig) ? metadata.decoderConfig.description : undefined; 42 | const msg = { type: 'vchunk', seqId: chunkDeliveredCounter++, chunk, metadata: frame_metadata, timebase: WEBCODECS_TIMESCALE} 43 | 44 | // Assume we are sending AVCDecoderConfigurationRecord in the metadata.description 45 | sendMessageToMain(WORKER_PREFIX, 'debug', `Chunk created. sId: ${msg.seqId}, pts: ${chunk.timestamp}, dur: ${chunk.duration}, type: ${chunk.type}, size: ${chunk.byteLength}, metadata_size:${(frame_metadata != undefined) ? frame_metadata.byteLength : 0}, avcDecoderConfigurationRecord: ${(frame_metadata != undefined) ? JSON.stringify(ParseAVCDecoderConfigurationRecord(frame_metadata)) : "-"}`) 46 | 47 | self.postMessage(msg) 48 | } 49 | 50 | self.addEventListener('message', async function (e) { 51 | if (workerState === StateEnum.Created) { 52 | workerState = StateEnum.Instantiated 53 | } 54 | 55 | if (workerState === StateEnum.Stopped) { 56 | sendMessageToMain(WORKER_PREFIX, 'info', 'Encoder is stopped it does not accept messages') 57 | return 58 | } 59 | 60 | const type = e.data.type 61 | if (type === 'stop') { 62 | workerState = StateEnum.Stopped 63 | // Make sure all requests has been processed 64 | await vEncoder.flush() 65 | 66 | vEncoder.close() 67 | workerState = StateEnum.Stopped 68 | return 69 | } 70 | if (type === 'vencoderini') { 71 | const encoderConfig = e.data.encoderConfig 72 | 73 | // eslint-disable-next-line no-undef 74 | vEncoder = new VideoEncoder(initVideoEncoder) 75 | 76 | vEncoder.configure(encoderConfig) 77 | if ('encoderMaxQueueSize' in e.data) { 78 | encoderMaxQueueSize = e.data.encoderMaxQueueSize 79 | } 80 | if ('keyframeEvery' in e.data) { 81 | keyframeEvery = e.data.keyframeEvery 82 | } 83 | sendMessageToMain(WORKER_PREFIX, 'info', `Encoder initialized: ${JSON.stringify(encoderConfig)}`); 84 | 85 | workerState = StateEnum.Running 86 | return 87 | } 88 | if (type !== 'vframe') { 89 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received') 90 | return 91 | } 92 | 93 | const vFrame = e.data.vframe 94 | 95 | if (vEncoder.encodeQueueSize > encoderMaxQueueSize) { 96 | // Too many frames in the encoder queue, encoder is overwhelmed let's not add this frame 97 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), ts: vFrame.timestamp, msg: 'Dropped encoding video frame' }) 98 | vFrame.close() 99 | // Insert a keyframe after dropping 100 | insertNextKeyframe = true 101 | } else { 102 | const frameNum = frameDeliveredCounter++ 103 | const insertKeyframe = (frameNum % keyframeEvery) === 0 || (insertNextKeyframe === true) 104 | vEncoder.encode(vFrame, { keyFrame: insertKeyframe }) 105 | sendMessageToMain(WORKER_PREFIX, 'debug', `Encoded frame: ${frameNum}, key: ${insertKeyframe}`) 106 | vFrame.close() 107 | insertNextKeyframe = false 108 | frameDeliveredCounter++ 109 | } 110 | }) 111 | -------------------------------------------------------------------------------- /utils/media/avc_decoder_configuration_record_parser.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | 'use strict'; 9 | 10 | import {DEFAULT_AVCC_HEADER_LENGTH, BitReaderHelper, GetUint16FromBufferBe } from "./avcc_parser.js" 11 | 12 | export function ParseAVCDecoderConfigurationRecord(data) { 13 | if (data == undefined || data == null) { 14 | return undefined 15 | } 16 | const avcVDCR = { 17 | configurationVersion: -1, 18 | avcProfileIndication: -1, 19 | profileCompatibility: -1, 20 | AVCLevelIndication: -1, 21 | avcHeaderLengthSize: DEFAULT_AVCC_HEADER_LENGTH, 22 | spsUnits: [], 23 | ppsUnits: [], 24 | chromaFormat: -1, 25 | bitDepthLuma: -1, 26 | bitDepthChroma: -1, 27 | spsExtUnits: [], 28 | dataBytes: new Uint8Array(data), 29 | }; 30 | 31 | let nPos = 0; 32 | avcVDCR.configurationVersion = avcVDCR.dataBytes[nPos++]; 33 | avcVDCR.avcProfileIndication = avcVDCR.dataBytes[nPos++]; 34 | avcVDCR.profileCompatibility = avcVDCR.dataBytes[nPos++]; 35 | avcVDCR.AVCLevelIndication = avcVDCR.dataBytes[nPos++]; 36 | const lengthSizeMinusOne = BitReaderHelper( 37 | avcVDCR.dataBytes.subarray(nPos, nPos + 1), 38 | 6, 39 | 2, 40 | ); 41 | nPos++; 42 | 43 | // Set AVC header length 44 | avcVDCR.avcHeaderLengthSize = lengthSizeMinusOne + 1; 45 | 46 | const numOfSequenceParameterSets = BitReaderHelper( 47 | avcVDCR.dataBytes.subarray(nPos, nPos + 1), 48 | 3, 49 | 5, 50 | ); 51 | nPos++; 52 | for (let n = 0; n < numOfSequenceParameterSets; n++) { 53 | const sequenceParameterSetLength = GetUint16FromBufferBe( 54 | avcVDCR.dataBytes.subarray(nPos, nPos + 2), 55 | ); 56 | nPos += 2; 57 | const spsNaluData = avcVDCR.dataBytes.subarray(nPos, nPos + sequenceParameterSetLength); 58 | avcVDCR.spsUnits.push(spsNaluData); 59 | nPos += sequenceParameterSetLength; 60 | } 61 | 62 | const numOfPictureParameterSets = avcVDCR.dataBytes[nPos++]; 63 | for (let n = 0; n < numOfPictureParameterSets; n++) { 64 | const pictureParameterSetLength = GetUint16FromBufferBe( 65 | avcVDCR.dataBytes.subarray(nPos, nPos + 2), 66 | ); 67 | nPos += 2; 68 | const ppsNaluData = avcVDCR.dataBytes.subarray(nPos, nPos + pictureParameterSetLength); 69 | avcVDCR.ppsUnits.push(ppsNaluData); 70 | nPos += pictureParameterSetLength; 71 | } 72 | 73 | if ( 74 | avcVDCR.avcProfileIndication !== 66 && 75 | avcVDCR.avcProfileIndication !== 77 && 76 | avcVDCR.avcProfileIndication !== 88 77 | ) { 78 | const chromaFormatNum = BitReaderHelper( 79 | avcVDCR.dataBytes.subarray(nPos, nPos + 1), 80 | 6, 81 | 2, 82 | ); 83 | nPos++; 84 | avcVDCR.chromaFormat = chromaFormatNum; 85 | 86 | const bitDepthLumaMinus8 = BitReaderHelper( 87 | avcVDCR.dataBytes.subarray(nPos, nPos + 1), 88 | 5, 89 | 3, 90 | ); 91 | nPos++; 92 | avcVDCR.bitDepthLuma = bitDepthLumaMinus8 + 8; 93 | const bitDepthChromaMinus8 = BitReaderHelper( 94 | avcVDCR.dataBytes.subarray(nPos, nPos + 1), 95 | 5, 96 | 3, 97 | ); 98 | nPos++; 99 | avcVDCR.bitDepthChroma = bitDepthChromaMinus8 + 8; 100 | 101 | const numOfSequenceParameterSetExt = avcVDCR.dataBytes[nPos++]; 102 | for (let n = 0; n < numOfSequenceParameterSetExt; n++) { 103 | const sequenceParameterSetExtLength = GetUint16FromBufferBe( 104 | avcVDCR.dataBytes.subarray(nPos, nPos + 2), 105 | ); 106 | nPos += 2; 107 | const spsExtNaluData = avcVDCR.dataBytes.subarray(nPos, nPos + sequenceParameterSetExtLength); 108 | avcVDCR.spsExtUnits.push(spsExtNaluData); 109 | nPos += sequenceParameterSetExtLength; 110 | } 111 | } 112 | 113 | return avcVDCR; 114 | } 115 | 116 | export function GetVideoCodecStringFromAVCDecoderConfigurationRecord(avcDecoderConfigurationRecord) { 117 | return GetVideoCodecStringFromProfileLevel("avc1", avcDecoderConfigurationRecord.avcProfileIndication, avcDecoderConfigurationRecord.AVCLevelIndication); 118 | } 119 | 120 | export function GetVideoCodecStringFromProfileLevel(codec, profile, level) { 121 | return codec + "." + profile.toString(16).toUpperCase().padStart(2, '0') + "00" + level.toString(16).toUpperCase().padStart(2, '0'); 122 | } -------------------------------------------------------------------------------- /decode/audio_decoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum } from '../utils/utils.js' 9 | import { TsQueue } from '../utils/ts_queue.js' 10 | import { MIPayloadTypeEnum} from '../packager/mi_packager.js' 11 | 12 | const WORKER_PREFIX = '[AUDIO-DECO]' 13 | 14 | const MAX_DECODE_QUEUE_SIZE_FOR_WARNING_MS = 200 15 | 16 | let workerState = StateEnum.Created 17 | 18 | let audioDecoder = null 19 | 20 | // The Audio decoder does NOT track timestamps (bummer), it just uses the 1st one sent and at every decoded audio sample adds 1/fs (so sample time) 21 | // That means if we drop and audio packet those timestamps will be collapsed creating A/V out of sync 22 | let timestampOffset = 0 23 | let lastChunkSentTimestamp = -1 24 | 25 | const ptsQueue = new TsQueue() 26 | 27 | function processAudioFrame (aFrame) { 28 | self.postMessage({ type: 'aframe', frame: aFrame, queueSize: ptsQueue.getPtsQueueLengthInfo().size, queueLengthMs: ptsQueue.getPtsQueueLengthInfo().lengthMs, timestampCompensationOffset: timestampOffset }, [aFrame]) 29 | } 30 | 31 | function initializeDecoder(config) { 32 | // eslint-disable-next-line no-undef 33 | audioDecoder = new AudioDecoder({ 34 | output: frame => { 35 | processAudioFrame(frame) 36 | }, 37 | error: err => { 38 | sendMessageToMain(WORKER_PREFIX, 'error', 'Audio decoder. err: ' + err.message) 39 | } 40 | }) 41 | 42 | audioDecoder.addEventListener('dequeue', () => { 43 | if (audioDecoder != null) { 44 | ptsQueue.removeUntil(audioDecoder.decodeQueueSize) 45 | } 46 | }) 47 | 48 | audioDecoder.configure(config) 49 | 50 | workerState = StateEnum.Running 51 | 52 | sendMessageToMain(WORKER_PREFIX, 'info', `Initialized and configured: ${JSON.stringify(config)}`) 53 | } 54 | 55 | self.addEventListener('message', async function (e) { 56 | if (workerState === StateEnum.Created) { 57 | workerState = StateEnum.Instantiated 58 | } 59 | 60 | if (workerState === StateEnum.Stopped) { 61 | sendMessageToMain(WORKER_PREFIX, 'info', 'Encoder is stopped it does not accept messages') 62 | return 63 | } 64 | 65 | const type = e.data.type 66 | if (type === 'stop') { 67 | workerState = StateEnum.Stopped 68 | if (audioDecoder != null) { 69 | await audioDecoder.flush() 70 | audioDecoder.close() 71 | audioDecoder = null 72 | 73 | ptsQueue.clear() 74 | } 75 | workerState = StateEnum.Created 76 | timestampOffset = 0 77 | lastChunkSentTimestamp = -1 78 | } else if (type === 'audiochunk') { 79 | if (audioDecoder != null) { 80 | sendMessageToMain(WORKER_PREFIX, 'debug', `audio-${e.data.seqId} Received init, but AudioDecoder already initialized`) 81 | } else { 82 | let config; 83 | if (e.data.packagerType == MIPayloadTypeEnum.AudioAACMP4LCWCP) { 84 | config = {codec: "mp4a.40.02", sampleRate: e.data.sampleFreq, numberOfChannels: e.data.numChannels}; 85 | } else if (e.data.packagerType == MIPayloadTypeEnum.AudioOpusWCP) { 86 | config = {codec: "opus", sampleRate: e.data.sampleFreq, numberOfChannels: e.data.numChannels} 87 | } 88 | initializeDecoder(config); 89 | } 90 | 91 | sendMessageToMain(WORKER_PREFIX, 'debug', `audio-${e.data.seqId} Received chunk, chunkSize: ${e.data.chunk.byteLength}, metadataSize: -`); 92 | 93 | if (workerState !== StateEnum.Running) { 94 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Received audio chunk, but NOT running state') 95 | return 96 | } 97 | ptsQueue.addToPtsQueue(e.data.chunk.timestamp, e.data.chunk.duration) 98 | 99 | if (e.data.isDisco && lastChunkSentTimestamp >= 0) { 100 | const addTs = e.data.chunk.timestamp - lastChunkSentTimestamp 101 | sendMessageToMain(WORKER_PREFIX, 'warning', `disco at seqId: ${e.data.seqId}, ts: ${e.data.chunk.timestamp}, added: ${addTs}`) 102 | timestampOffset += addTs 103 | } 104 | lastChunkSentTimestamp = e.data.chunk.timestamp + e.data.chunk.duration 105 | 106 | audioDecoder.decode(e.data.chunk) 107 | 108 | const decodeQueueInfo = ptsQueue.getPtsQueueLengthInfo() 109 | if (decodeQueueInfo.lengthMs > MAX_DECODE_QUEUE_SIZE_FOR_WARNING_MS) { 110 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Decode queue size is ' + decodeQueueInfo.lengthMs + 'ms (' + decodeQueueInfo.size + ' frames), audioDecoder: ' + audioDecoder.decodeQueueSize) 111 | } else { 112 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Decode queue size is ' + decodeQueueInfo.lengthMs + 'ms (' + decodeQueueInfo.size + ' frames), audioDecoder: ' + audioDecoder.decodeQueueSize) 113 | } 114 | } else { 115 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received') 116 | } 117 | }); 118 | -------------------------------------------------------------------------------- /utils/varint.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { buffReadFrombyobReader, ReadStreamClosed } from './buffer_utils.js' 9 | 10 | const MAX_U6 = Math.pow(2, 6) - 1 11 | const MAX_U14 = Math.pow(2, 14) - 1 12 | const MAX_U30 = Math.pow(2, 30) - 1 13 | const MAX_U53 = Number.MAX_SAFE_INTEGER 14 | // const MAX_U62 = 2n ** 62n - 1n 15 | 16 | export function numberToVarInt (v) { 17 | if (v <= MAX_U6) { 18 | return setUint8(v) 19 | } else if (v <= MAX_U14) { 20 | return setUint16(v | 0x4000) 21 | } else if (v <= MAX_U30) { 22 | return setUint32(v | 0x80000000) 23 | } else if (v <= MAX_U53) { 24 | return setUint64(BigInt(v) | 0xc000000000000000n) 25 | } else { 26 | throw new Error(`overflow, value larger than 53-bits: ${v}`) 27 | } 28 | } 29 | 30 | export function varIntToNumbeFromBuffer(buff, offset) { 31 | let startOffset = 0 32 | if (typeof offset === 'number') { 33 | startOffset = offset 34 | } 35 | let ret = {num: undefined, byteLength: 0} 36 | const size = (new DataView(buff, startOffset, 1).getUint8() & 0xc0) >> 6 37 | if (buff.byteLength < size + 1) { 38 | throw new Error(`Size of varint does NOT match (size: ${size}, buff.byteLength: ${buff.byteLength})`) 39 | } 40 | if (size === 0) { 41 | ret.num = new DataView(buff, startOffset, 1).getUint8() & 0x3f 42 | ret.byteLength = 1 43 | } else if (size === 1) { 44 | ret.num = new DataView(buff, startOffset, 2).getUint16() & 0x3fff 45 | ret.byteLength = 2 46 | } else if (size === 2) { 47 | ret.num = new DataView(buff, startOffset, 4).getUint32() & 0x3fffffff 48 | ret.byteLength = 4 49 | } else if (size === 3) { 50 | ret.num = Number(new DataView(buff, startOffset, 8).getBigUint64() & BigInt('0x3fffffffffffffff')) 51 | ret.byteLength = 8 52 | } else { 53 | throw new Error('Impossible size for varint') 54 | } 55 | return ret 56 | } 57 | 58 | export async function varIntToNumberOrThrow (readableStream) { 59 | let ret = await varIntToNumber(readableStream) 60 | if (ret.eof) { 61 | throw new ReadStreamClosed(`Connection closed while reading data`) 62 | } 63 | return ret.num 64 | } 65 | 66 | export async function varIntToNumberAndLengthOrThrow (readableStream) { 67 | let ret = await varIntToNumber(readableStream) 68 | if (ret.eof) { 69 | throw new ReadStreamClosed(`Connection closed while reading data`) 70 | } 71 | return {num: ret.num, byteLength: ret.byteLength} 72 | } 73 | 74 | async function varIntToNumber (readableStream) { 75 | const ret = {eof: false, num: undefined, byteLength: 0} 76 | const reader = readableStream.getReader({ mode: 'byob' }) 77 | try { 78 | let buff = new ArrayBuffer(8) 79 | let retData = await buffReadFrombyobReader(reader, buff, 0, 1) 80 | ret.byteLength = ret.byteLength + 1; 81 | ret.eof = retData.eof 82 | if (!ret.eof) { 83 | buff = retData.buff 84 | const size = (new DataView(buff, 0, 1).getUint8() & 0xc0) >> 6 85 | if (size === 0) { 86 | ret.eof = retData.eof 87 | ret.num = new DataView(buff, 0, 1).getUint8() & 0x3f 88 | } else if (size === 1) { 89 | retData = await buffReadFrombyobReader(reader, buff, 1, 1) 90 | ret.byteLength = ret.byteLength + 1; 91 | buff = retData.buff 92 | ret.eof = retData.eof 93 | ret.num = new DataView(buff, 0, 2).getUint16() & 0x3fff 94 | } else if (size === 2) { 95 | retData = await buffReadFrombyobReader(reader, buff, 1, 3) 96 | ret.byteLength = ret.byteLength + 3; 97 | buff = retData.buff 98 | ret.eof = retData.eof 99 | ret.num = new DataView(buff, 0, 4).getUint32() & 0x3fffffff 100 | } else if (size === 3) { 101 | retData = await buffReadFrombyobReader(reader, buff, 1, 7) 102 | ret.byteLength = ret.byteLength + 7; 103 | buff = retData.buff 104 | ret.eof = retData.eof 105 | ret.num = Number(new DataView(buff, 0, 8).getBigUint64() & BigInt('0x3fffffffffffffff')) 106 | } else { 107 | throw new Error('Impossible size for varint') 108 | } 109 | } 110 | } finally { 111 | reader.releaseLock() 112 | } 113 | return ret 114 | } 115 | 116 | function setUint8 (v) { 117 | const ret = new Uint8Array(1) 118 | ret[0] = v 119 | return ret 120 | } 121 | 122 | function setUint16 (v) { 123 | const ret = new Uint8Array(2) 124 | const view = new DataView(ret.buffer) 125 | view.setUint16(0, v) 126 | return ret 127 | } 128 | 129 | function setUint32 (v) { 130 | const ret = new Uint8Array(4) 131 | const view = new DataView(ret.buffer) 132 | view.setUint32(0, v) 133 | return ret 134 | } 135 | 136 | function setUint64 (v) { 137 | const ret = new Uint8Array(8) 138 | const view = new DataView(ret.buffer) 139 | view.setBigUint64(0, v) 140 | return ret 141 | } 142 | -------------------------------------------------------------------------------- /render/source_buffer_worklet.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const SharedStates = { 9 | AUDIO_BUFF_START: 0, // The reader only modifies this pointer 10 | AUDIO_BUFF_END: 1, // The writer (this) only modifies this pointer 11 | 12 | AUDIO_INSERTED_SILENCE_MS: 2, 13 | 14 | IS_PLAYING: 3 // Indicates playback state 15 | } 16 | 17 | class SourceBuffer extends AudioWorkletProcessor { 18 | // Custom AudioParams can be defined with this static getter. 19 | static get parameterDescriptors () { 20 | return [] 21 | } 22 | 23 | constructor () { 24 | // The super constructor call is required. 25 | super() 26 | 27 | this.contextSampleFrequency = -1 28 | 29 | this.port.onmessage = this.handleMessage.bind(this) 30 | 31 | this.totalSilenceInsertedSamples = 0 32 | 33 | this.sharedCommBuffer = null 34 | this.sharedAudiobuffers = null 35 | this.circularBufferSizeSamples = 0 36 | } 37 | 38 | handleMessage (e) { 39 | if (e.data.type === 'iniabuffer') { 40 | if ('config' in e.data) { 41 | if ('contextSampleFrequency' in e.data.config) { 42 | this.contextSampleFrequency = e.data.config.contextSampleFrequency 43 | } 44 | if ('cicularAudioSharedBuffers' in e.data.config) { 45 | this.sharedCommBuffer = e.data.config.cicularAudioSharedBuffers.sharedCommBuffer 46 | this.sharedAudiobuffers = e.data.config.cicularAudioSharedBuffers.sharedAudiobuffers 47 | 48 | // States access 49 | this.sharedStates = new Int32Array(this.sharedCommBuffer) 50 | } 51 | if ('circularBufferSizeSamples' in e.data.config) { 52 | this.circularBufferSizeSamples = e.data.config.circularBufferSizeSamples 53 | } 54 | } 55 | } 56 | } 57 | 58 | process (inputs, outputs, parameters) { 59 | // Assume single input 60 | const outputFirstTrack = outputs[0] 61 | // Assuming all channels has same length 62 | const numOutSamplesFirstChannel = outputFirstTrack[0].length 63 | if ((numOutSamplesFirstChannel === undefined) || (numOutSamplesFirstChannel <= 0)) { 64 | throw new Error('Num samples to process for 1st channel is not valid') 65 | } 66 | if (this.sharedCommBuffer === null) { 67 | return true // Not init yet 68 | } 69 | const isPlaying = Atomics.load(this.sharedStates, SharedStates.IS_PLAYING) 70 | if (isPlaying === 0) { 71 | return true // Not in playing state yet 72 | } 73 | 74 | if (this.circularBufferSizeSamples <= 0) { 75 | throw new Error('Bad size for circular audio buffer') 76 | } 77 | 78 | const start = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_START) 79 | const end = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_END) 80 | 81 | if (start < 0 || end < 0) { 82 | return true 83 | } 84 | if (numOutSamplesFirstChannel > this._getUsedSlots(start, end)) { 85 | this.totalSilenceInsertedSamples += numOutSamplesFirstChannel 86 | const totalSilenceInsertedMs = this.totalSilenceInsertedSamples * 1000 / this.contextSampleFrequency 87 | Atomics.store(this.sharedStates, SharedStates.AUDIO_INSERTED_SILENCE_MS, totalSilenceInsertedMs) 88 | } else { 89 | // Loop all channels 90 | if (start + numOutSamplesFirstChannel <= this.circularBufferSizeSamples) { 91 | // All 92 | for (let c = 0; c < outputFirstTrack.length; c++) { 93 | const outputRingBufferPortion = new Float32Array(this.sharedAudiobuffers[c], start * Float32Array.BYTES_PER_ELEMENT, numOutSamplesFirstChannel) 94 | outputFirstTrack[c].set(outputRingBufferPortion) 95 | } 96 | 97 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_START, start + numOutSamplesFirstChannel) 98 | } else { 99 | const samplesToCopyFirstHalf = this.circularBufferSizeSamples - start 100 | const samplesToCopySecondsHalf = numOutSamplesFirstChannel - samplesToCopyFirstHalf 101 | for (let c = 0; c < outputFirstTrack.length; c++) { 102 | // First half 103 | const outputRingBufferPortionFirstHalf = new Float32Array(this.sharedAudiobuffers[c], start * Float32Array.BYTES_PER_ELEMENT, samplesToCopyFirstHalf) 104 | outputFirstTrack[c].set(outputRingBufferPortionFirstHalf) 105 | // Second half 106 | const outputRingBufferPortionSecondHalf = new Float32Array(this.sharedAudiobuffers[c], 0, samplesToCopySecondsHalf) 107 | outputFirstTrack[c].set(outputRingBufferPortionSecondHalf, samplesToCopyFirstHalf) 108 | } 109 | 110 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_START, samplesToCopySecondsHalf) 111 | } 112 | } 113 | return true 114 | } 115 | 116 | _getUsedSlots (start, end) { 117 | if (start === end) { 118 | return 0 119 | } else if (end > start) { 120 | return end - start 121 | } else { 122 | return (this.circularBufferSizeSamples - start) + end 123 | } 124 | } 125 | } 126 | registerProcessor('source-buffer', SourceBuffer) 127 | -------------------------------------------------------------------------------- /src-player/simple.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | 11 | 32 | Test Ultra low latency with WebCodecs + WebTransport PLAYER (by Jordi Cenzano) 33 | 34 | 35 | 36 |

Simple MOQ subscriber

37 |

MOQT Version: , MediaPackager Version:

38 |
39 |
40 |
41 |

Data needed

42 | 44 |
45 | 46 |
47 | 48 |
49 | 50 | (Leave it blank to NOT use it) 51 |
52 | 53 | 54 |
55 |
56 |
57 |
58 |
    59 |
    60 | 61 | -------------------------------------------------------------------------------- /decode/video_decoder.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum, compareArrayBuffer } from '../utils/utils.js' 9 | import { TsQueue } from '../utils/ts_queue.js' 10 | import { ParseAVCDecoderConfigurationRecord, GetVideoCodecStringFromAVCDecoderConfigurationRecord } from "../utils/media/avc_decoder_configuration_record_parser.js" 11 | import { ParseH264NALs, DEFAULT_AVCC_HEADER_LENGTH } from "../utils/media/avcc_parser.js" 12 | 13 | const WORKER_PREFIX = '[VIDEO-DECO]' 14 | 15 | const MAX_DECODE_QUEUE_SIZE_FOR_WARNING_MS = 500 16 | const MAX_QUEUED_CHUNKS_DEFAULT = 60 17 | 18 | let workerState = StateEnum.Created 19 | 20 | let videoDecoder = null 21 | 22 | let lastMetadataUsed = null 23 | 24 | let waitForKeyFrame = true 25 | let discardedDelta = 0 26 | let discardedBufferFull = 0 27 | const maxQueuedChunks = MAX_QUEUED_CHUNKS_DEFAULT 28 | 29 | // Unlike the audio decoder video decoder tracks timestamps between input - output, so timestamps of RAW frames matches the timestamps of encoded frames 30 | 31 | const ptsQueue = new TsQueue() 32 | 33 | function processVideoFrame (vFrame) { 34 | self.postMessage({ type: 'vframe', frame: vFrame, queueSize: ptsQueue.getPtsQueueLengthInfo().size, queueLengthMs: ptsQueue.getPtsQueueLengthInfo().lengthMs }, [vFrame]) 35 | } 36 | 37 | function setWaitForKeyframe (value) { 38 | waitForKeyFrame = value 39 | } 40 | 41 | function isWaitingForKeyframe () { 42 | return waitForKeyFrame 43 | } 44 | 45 | function getAndOverrideInitDataValues(metadata) { 46 | // Assume we are sending AVCDecoderConfigurationRecord in the metadata.description 47 | const avcDecoderConfigurationRecordInfo = ParseAVCDecoderConfigurationRecord(metadata); 48 | 49 | // Override values 50 | // We can get the width and height from SPS inside AVCDecoderConfigurationRecord but that is complex and NOT necessary 51 | const config = {codec: GetVideoCodecStringFromAVCDecoderConfigurationRecord(avcDecoderConfigurationRecordInfo) , description: metadata}; 52 | config.optimizeForLatency = true 53 | // In my test @2022/11 with hardware accel could NOT get real time decoding, 54 | // switching to soft decoding fixed everything (h264) 55 | config.hardwareAcceleration = 'prefer-software' 56 | 57 | return {config, avcDecoderConfigurationRecordInfo}; 58 | } 59 | 60 | function configureDecoder(seqId, metadata) { 61 | if (videoDecoder == null) { 62 | sendMessageToMain(WORKER_PREFIX, 'warn', `SeqId: ${seqId} Could NOT initialize decoder, decoder was null at this time`) 63 | return 64 | } 65 | const ret = getAndOverrideInitDataValues(metadata) 66 | 67 | sendMessageToMain(WORKER_PREFIX, 'info', `SeqId: ${seqId} Received different init, REinitializing the VideoDecoder. Config: ${JSON.stringify(ret.config)}, avcDecoderConfigurationRecord: ${JSON.stringify(ret.avcDecoderConfigurationRecordInfo)}`) 68 | videoDecoder.configure(ret.config) 69 | } 70 | 71 | self.addEventListener('message', async function (e) { 72 | if (workerState === StateEnum.Created) { 73 | workerState = StateEnum.Instantiated 74 | } 75 | 76 | if (workerState === StateEnum.Stopped) { 77 | sendMessageToMain(WORKER_PREFIX, 'info', 'Encoder is stopped it does not accept messages') 78 | return 79 | } 80 | 81 | const type = e.data.type 82 | if (type === 'stop') { 83 | workerState = StateEnum.Stopped 84 | if (videoDecoder != null) { 85 | await videoDecoder.flush() 86 | videoDecoder.close() 87 | videoDecoder = null 88 | 89 | ptsQueue.clear() 90 | } 91 | workerState = StateEnum.Created 92 | } else if (type === 'videochunk') { 93 | if (e.data.metadata !== undefined && e.data.metadata != null) { 94 | sendMessageToMain(WORKER_PREFIX, 'debug', `SeqId: ${e.data.seqId} Received chunk, chunkSize: ${e.data.chunk.byteLength}, metadataSize: ${e.data.metadata.byteLength}`) 95 | if (videoDecoder != null) { 96 | if (lastMetadataUsed == null || !compareArrayBuffer(lastMetadataUsed, e.data.metadata)) { 97 | configureDecoder(e.data.seqId, e.data.metadata) 98 | } 99 | lastMetadataUsed = e.data.metadata 100 | } else { 101 | // Initialize video decoder 102 | // eslint-disable-next-line no-undef 103 | videoDecoder = new VideoDecoder({ 104 | output: frame => { 105 | processVideoFrame(frame) 106 | }, 107 | error: err => { 108 | sendMessageToMain(WORKER_PREFIX, 'error', 'Video decoder. err: ' + err.message) 109 | } 110 | }) 111 | 112 | videoDecoder.addEventListener('dequeue', () => { 113 | if (videoDecoder != null) { 114 | ptsQueue.removeUntil(videoDecoder.decodeQueueSize) 115 | } 116 | }) 117 | 118 | configureDecoder(e.data.seqId, e.data.metadata) 119 | lastMetadataUsed = e.data.metadata 120 | 121 | workerState = StateEnum.Running 122 | setWaitForKeyframe(true) 123 | } 124 | } else { 125 | sendMessageToMain(WORKER_PREFIX, 'debug', `SeqId: ${e.data.seqId} Received chunk, chunkSize: ${e.data.chunk.byteLength}`) 126 | } 127 | 128 | if (workerState !== StateEnum.Running) { 129 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Received video chunk, but NOT running state') 130 | return 131 | } 132 | 133 | if (videoDecoder.decodeQueueSize >= maxQueuedChunks) { 134 | discardedBufferFull++ 135 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Discarded ' + discardedBufferFull + ' video chunks because decoder buffer is full') 136 | return 137 | } 138 | 139 | discardedBufferFull = 0 140 | 141 | // If there is a disco, we need to wait for a new key 142 | if (e.data.isDisco) { 143 | setWaitForKeyframe(true) 144 | } 145 | 146 | // The message is video chunk 147 | if (isWaitingForKeyframe() && (e.data.chunk.type !== 'key')) { 148 | // Discard Frame 149 | discardedDelta++ 150 | } else { 151 | if (discardedDelta > 0) { 152 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Discarded ' + discardedDelta + ' video chunks before key') 153 | } 154 | discardedDelta = 0 155 | setWaitForKeyframe(false) 156 | 157 | ptsQueue.removeUntil(videoDecoder.decodeQueueSize) 158 | ptsQueue.addToPtsQueue(e.data.chunk.timestamp, e.data.chunk.duration) 159 | 160 | // This is verbose and slow 161 | if ("verbose" in e.data && e.data.verbose === true) { 162 | // Assumes it is h264 AVCC with 4 bytes of size length 163 | const chunkDataBuffer = new Uint8Array(e.data.chunk.byteLength) 164 | e.data.chunk.copyTo(chunkDataBuffer); 165 | const chunkNALUInfo = ParseH264NALs(chunkDataBuffer, DEFAULT_AVCC_HEADER_LENGTH); 166 | sendMessageToMain(WORKER_PREFIX, 'info', `New chunk SeqId: ${e.data.seqId}, NALUS: ${JSON.stringify(chunkNALUInfo)}`) 167 | } 168 | videoDecoder.decode(e.data.chunk) 169 | 170 | const decodeQueueInfo = ptsQueue.getPtsQueueLengthInfo() 171 | if (decodeQueueInfo.lengthMs > MAX_DECODE_QUEUE_SIZE_FOR_WARNING_MS) { 172 | sendMessageToMain(WORKER_PREFIX, 'warning', 'Decode queue size is ' + decodeQueueInfo.lengthMs + 'ms (' + decodeQueueInfo.size + ' frames), videoDecoder: ' + videoDecoder.decodeQueueSize) 173 | } else { 174 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Decode queue size is ' + decodeQueueInfo.lengthMs + 'ms (' + decodeQueueInfo.size + ' frames), videoDecoder: ' + videoDecoder.decodeQueueSize) 175 | } 176 | } 177 | } else { 178 | sendMessageToMain(WORKER_PREFIX, 'error', 'Invalid message received') 179 | } 180 | }) 181 | -------------------------------------------------------------------------------- /render/audio_circular_buffer.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | const SharedStates = { 9 | AUDIO_BUFF_START: 0, // The reader only modifies this pointer 10 | AUDIO_BUFF_END: 1, // The writer (this) only modifies this pointer 11 | 12 | AUDIO_INSERTED_SILENCE_MS: 2, 13 | 14 | IS_PLAYING: 3 // Indicates playback state 15 | } 16 | 17 | // Keep only last 30 audio frames in the TS index 18 | const MAX_ITEMS_IN_TS_INDEX = 30 19 | 20 | export class CicularAudioSharedBuffer { 21 | constructor () { 22 | this.sampleIndexToTS = null // In Us 23 | this.sharedAudiobuffers = null 24 | this.sharedCommBuffer = new SharedArrayBuffer(Object.keys(SharedStates).length * Int32Array.BYTES_PER_ELEMENT) 25 | this.size = -1 26 | 27 | this.contextFrequency = -1 28 | 29 | // Get TypedArrayView from SAB. 30 | this.sharedStates = new Int32Array(this.sharedCommBuffer) 31 | 32 | this.onDropped = null 33 | 34 | // Initialize |States| buffer. 35 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_START, -1) 36 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_END, -1) 37 | Atomics.store(this.sharedStates, SharedStates.AUDIO_INSERTED_SILENCE_MS, 0) 38 | 39 | // Last sent timestamp 40 | this.lastTimestamp = undefined 41 | } 42 | 43 | SetCallbacks (onDropped) { 44 | this.onDropped = onDropped 45 | } 46 | 47 | Init (numChannels, numSamples, contextFrequency) { 48 | if (this.sharedAudiobuffers != null) { 49 | throw new Error('Already initialized') 50 | } 51 | if ((numChannels <= 0) || (numChannels === undefined)) { 52 | throw new Error('Passed bad numChannels') 53 | } 54 | if ((numSamples <= 0) || (numSamples === undefined)) { 55 | throw new Error('Passed bad numSamples') 56 | } 57 | this.sharedAudiobuffers = [] 58 | for (let c = 0; c < numChannels; c++) { 59 | this.sharedAudiobuffers.push(new SharedArrayBuffer(numSamples * Float32Array.BYTES_PER_ELEMENT)) 60 | } 61 | 62 | this.contextFrequency = contextFrequency 63 | this.lastTimestamp = -1 64 | 65 | this.size = numSamples 66 | this.sampleIndexToTS = [] 67 | 68 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_START, 0) 69 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_END, 0) 70 | } 71 | 72 | Add (aFrame, overrideFrameTs) { 73 | const frameTimestamp = (overrideFrameTs === undefined) ? aFrame.timestamp : overrideFrameTs 74 | if (aFrame === undefined) { 75 | throw new Error('Passed undefined aFrame') 76 | } 77 | if (aFrame.numberOfChannels !== this.sharedAudiobuffers.length) { 78 | throw new Error(`Channels diffent than expected, expected ${this.sharedAudiobuffers.length}, passed: ${aFrame.numberOfChannels}`) 79 | } 80 | if (aFrame.sampleRate !== this.contextFrequency) { 81 | throw new Error('Error sampling frequency received does NOT match local audio renderer. sampleFrequency: ' + this.sampleFrequency + ', contextSampleFrequency: ' + this.contextSampleFrequency) 82 | } 83 | 84 | const samplesToAdd = aFrame.numberOfFrames 85 | 86 | const start = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_START) 87 | let end = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_END) 88 | 89 | if (samplesToAdd > this._getFreeSlots(start, end)) { 90 | if (this.onDropped != null) { 91 | this.onDropped({ clkms: Date.now(), mediaType: 'audio', ts: frameTimestamp, msg: 'Dropped PCM audio frame, ring buffer full' }) 92 | } 93 | } else { 94 | // This will always return recent TS. This is a cicular buffer, we are indexing with numsample in the buffer, so things will get messy if we do not ask for GetStats for more than buffer size. And this happens when tab loses focus 95 | this._cleanUpIndex() 96 | this.sampleIndexToTS.push({ sampleIndex: end, ts: frameTimestamp }) 97 | if (end + samplesToAdd <= this.size) { 98 | // All 99 | for (let c = 0; c < aFrame.numberOfChannels; c++) { 100 | const outputRingBuffer = new Float32Array(this.sharedAudiobuffers[c], end * Float32Array.BYTES_PER_ELEMENT) 101 | aFrame.copyTo(outputRingBuffer, { planeIndex: c, frameOffset: 0, frameCount: samplesToAdd }) 102 | } 103 | end += samplesToAdd 104 | } else { 105 | const samplesToAddFirstHalf = this.size - end 106 | const samplesToAddSecondsHalf = samplesToAdd - samplesToAddFirstHalf 107 | for (let c = 0; c < aFrame.numberOfChannels; c++) { 108 | // First half 109 | const outputRingBuffer1 = new Float32Array(this.sharedAudiobuffers[c], end * Float32Array.BYTES_PER_ELEMENT, samplesToAddFirstHalf) 110 | aFrame.copyTo(outputRingBuffer1, { planeIndex: c, frameOffset: 0, frameCount: samplesToAddFirstHalf }) 111 | 112 | // Second half 113 | const outputRingBuffer2 = new Float32Array(this.sharedAudiobuffers[c], 0, samplesToAddSecondsHalf) 114 | aFrame.copyTo(outputRingBuffer2, { planeIndex: c, frameOffset: samplesToAddFirstHalf, frameCount: samplesToAddSecondsHalf }) 115 | } 116 | end = samplesToAddSecondsHalf 117 | } 118 | } 119 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_END, end) 120 | } 121 | 122 | GetStats () { 123 | const start = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_START) // Reader 124 | const end = Atomics.load(this.sharedStates, SharedStates.AUDIO_BUFF_END) // Writer 125 | 126 | // Find the last sent timestamp 127 | let retIndexTs 128 | let n = 0 129 | let bExit = false 130 | while (n < this.sampleIndexToTS.length && !bExit) { 131 | if (this._isSentSample(this.sampleIndexToTS[n].sampleIndex, start, end)) { 132 | retIndexTs = n 133 | } else { 134 | if (retIndexTs !== undefined) { 135 | bExit = true 136 | } 137 | } 138 | n++ 139 | } 140 | if (retIndexTs !== undefined) { 141 | const lastFrameTimestampSent = this.sampleIndexToTS[retIndexTs].ts 142 | const extraSamplesSent = start - this.sampleIndexToTS[retIndexTs].sampleIndex 143 | 144 | // Adjust at sample level 145 | // Assume ts in nanosec 146 | this.lastTimestamp = lastFrameTimestampSent + (extraSamplesSent * 1000 * 1000) / this.contextFrequency 147 | 148 | // Remove old indexes (already sent) 149 | this.sampleIndexToTS = this.sampleIndexToTS.slice(retIndexTs + 1) 150 | } 151 | 152 | const sizeSamples = this._getUsedSlots(start, end) 153 | const sizeMs = Math.floor((sizeSamples * 1000) / this.contextFrequency) 154 | const totalSilenceInsertedMs = Atomics.load(this.sharedStates, SharedStates.AUDIO_INSERTED_SILENCE_MS) 155 | const isPlaying = Atomics.load(this.sharedStates, SharedStates.IS_PLAYING) 156 | 157 | return { currentTimestamp: this.lastTimestamp, queueSize: sizeSamples, queueLengthMs: sizeMs, totalSilenceInsertedMs, isPlaying } 158 | } 159 | 160 | Play () { 161 | Atomics.store(this.sharedStates, SharedStates.IS_PLAYING, 1) 162 | } 163 | 164 | GetSharedBuffers () { 165 | if (this.sharedAudiobuffers === null) { 166 | throw new Error('Not initialized yet') 167 | } 168 | return { sharedAudiobuffers: this.sharedAudiobuffers, sharedCommBuffer: this.sharedCommBuffer } 169 | } 170 | 171 | Clear () { 172 | this.sharedAudiobuffers = null 173 | this.size = -1 174 | this.sampleIndexToTS = null 175 | this.contextFrequency = -1 176 | this.lastTimestamp = undefined 177 | 178 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_START, -1) 179 | Atomics.store(this.sharedStates, SharedStates.AUDIO_BUFF_END, -1) 180 | Atomics.store(this.sharedStates, SharedStates.AUDIO_INSERTED_SILENCE_MS, 0) 181 | Atomics.store(this.sharedStates, SharedStates.IS_PLAYING, 0) 182 | } 183 | 184 | _cleanUpIndex() { 185 | if (this.sampleIndexToTS == null) { 186 | return 187 | } 188 | while (this.sampleIndexToTS.length > MAX_ITEMS_IN_TS_INDEX) { 189 | this.sampleIndexToTS.shift() 190 | } 191 | } 192 | 193 | _getUsedSlots (start, end) { 194 | if (start === end) { 195 | return 0 196 | } else if (end > start) { 197 | return end - start 198 | } else { 199 | return (this.size - start) + end 200 | } 201 | } 202 | 203 | _getFreeSlots (start, end) { 204 | return this.size - this._getUsedSlots(start, end) 205 | } 206 | 207 | _isSentSample (index, start, end) { 208 | if (start === end) { 209 | return false 210 | } else if (end > start) { 211 | return index <= start 212 | } else { 213 | return (index <= start && index > end) 214 | } 215 | } 216 | } 217 | -------------------------------------------------------------------------------- /src-encoder/simple.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | 11 | 32 | Simple MOQ publisher (by Jordi Cenzano) 33 | 34 | 35 | 36 |

    Simple MOQ publisher

    37 |

    MOQT Version: , MediaPackager Version:

    38 |
    39 |
    40 |
    41 |

    Data needed

    42 |
    43 | 45 |
    46 | 47 |
    48 | 49 |
    50 | 51 | (Leave it blank to NOT use it) 52 |
    53 | 54 |
    55 | 56 | 57 |
    58 | 59 | 60 |
    61 |
    62 |
    63 |
    64 |
    65 |
    66 |

    Object data

    67 |
    68 | 69 |
    70 | 71 |
    72 |
    73 |
    74 | 75 | -------------------------------------------------------------------------------- /packager/mi_packager.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | // Follows "draft-cenzano-moq-media-interop/": https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/ 9 | 10 | import { numberToVarInt, varIntToNumbeFromBuffer } from '../utils/varint.js' 11 | import { buffRead, concatBuffer, readUntilEof } from '../utils/buffer_utils.js' 12 | import { MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_EXTRADATA, MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA, moqCreateKvPair} from '../utils/moqt.js' 13 | 14 | 'use strict' 15 | 16 | export const MI_PACKAGER_VERSION = "02" 17 | 18 | // Values for MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE 19 | export const MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_VIDEO_H264_IN_AVCC = 0x00 20 | export const MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_OPUS_BITSTREAM = 0x01 21 | export const MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_TEXT_UTF8 = 0x02 22 | export const MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_AUDIO_AACLC_MPEG4 = 0x03 23 | 24 | 25 | export class MIPayloadTypeEnum { 26 | static #_NONE = 0xff; 27 | static #_VideoH264AVCCWCP = 0x0; 28 | static #_AudioOpusWCP = 0x1; 29 | static #_AudioAACMP4LCWCP = 0x3; 30 | static #_RAW = 0x2; 31 | 32 | static get None() { return this.#_NONE; } 33 | static get VideoH264AVCCWCP() { return this.#_VideoH264AVCCWCP; } 34 | static get AudioOpusWCP() { return this.#_AudioOpusWCP; } 35 | static get AudioAACMP4LCWCP() { return this.#_AudioAACMP4LCWCP; } 36 | static get RAWData() { return this.#_RAW; } 37 | } 38 | 39 | export class MIPackager { 40 | constructor () { 41 | this.type = MIPayloadTypeEnum.None // Commom 42 | this.seqId = -1 // Commom 43 | this.pts = undefined // Commom 44 | this.timebase = undefined // Common 45 | this.duration = undefined // Common 46 | this.wallclock = undefined // Common 47 | this.data = null // Common 48 | 49 | this.dts = undefined // VideoH264AVCCWCP 50 | this.metadata = null // VideoH264AVCCWCP 51 | 52 | this.sampleFreq = undefined // AudioOpusWCP & AudioAACMP4LCWCP 53 | this.numChannels = undefined // AudioOpusWCP & AudioAACMP4LCWCP 54 | 55 | this.isDelta = undefined // Internal (only use in set data) 56 | this.eof = false // Internal 57 | 58 | this.READ_BLOCK_SIZE = 1024 59 | } 60 | 61 | SetData (type, seqId, pts, timebase, duration, wallclock, data, dts, metadata, sampleFreq, numChannels, isDelta) { 62 | this.type = type 63 | this.seqId = seqId 64 | this.pts = pts 65 | this.timebase = timebase 66 | this.duration = duration 67 | this.wallclock = wallclock 68 | this.data = data 69 | this.dts = dts 70 | this.metadata = metadata 71 | this.sampleFreq = sampleFreq 72 | this.numChannels = numChannels 73 | 74 | this.isDelta = isDelta 75 | } 76 | 77 | async ParseData(readerStream, extensionHeaders, payloadLength) { 78 | this.parseExtensionHeaders(extensionHeaders) 79 | 80 | // Read payload with length 81 | if (typeof payloadLength !== 'undefined') { 82 | const ret = await buffRead(readerStream, payloadLength) 83 | this.data = ret.buff 84 | this.eof = ret.eof 85 | } else { 86 | const buff = await readUntilEof(readerStream, this.READ_BLOCK_SIZE) 87 | this.data = buff 88 | this.eof = true 89 | } 90 | } 91 | 92 | parseExtensionHeaders(extensionHeaders) { 93 | const extTypeRead = [] 94 | for (let i = 0; i < extensionHeaders.length; i++) { 95 | const extHeader = extensionHeaders[i] 96 | extTypeRead.push(extHeader.name) 97 | 98 | if (extHeader.name == MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE) { 99 | this.type = extHeader.val 100 | } 101 | if (extHeader.name === MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA) { 102 | let bytesRead = 0 103 | let r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 104 | bytesRead += r.byteLength 105 | this.seqId = r.num 106 | 107 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 108 | bytesRead += r.byteLength 109 | this.pts = r.num 110 | 111 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 112 | bytesRead += r.byteLength 113 | this.dts = r.num 114 | 115 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 116 | bytesRead += r.byteLength 117 | this.timebase = r.num 118 | 119 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 120 | bytesRead += r.byteLength 121 | this.duration = r.num 122 | 123 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 124 | bytesRead += r.byteLength 125 | this.wallclock = r.num 126 | } 127 | if (extHeader.name == MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_EXTRADATA) { 128 | this.metadata = extHeader.val 129 | } 130 | if (extHeader.name === MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA || extHeader.name === MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA) { 131 | let bytesRead = 0 132 | let r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 133 | bytesRead += r.byteLength 134 | this.seqId = r.num 135 | 136 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 137 | bytesRead += r.byteLength 138 | this.pts = r.num 139 | 140 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 141 | bytesRead += r.byteLength 142 | this.timebase = r.num 143 | 144 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 145 | bytesRead += r.byteLength 146 | this.sampleFreq = r.num 147 | 148 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 149 | bytesRead += r.byteLength 150 | this.numChannels = r.num 151 | 152 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 153 | bytesRead += r.byteLength 154 | this.duration = r.num 155 | 156 | r = varIntToNumbeFromBuffer(extHeader.val, bytesRead) 157 | bytesRead += r.byteLength 158 | this.wallclock = r.num 159 | } 160 | if (extHeader.name === MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA) { 161 | const r = varIntToNumbeFromBuffer(extHeader.val, 0) 162 | this.seqId = r.num 163 | } 164 | } 165 | 166 | if (this.name === MIPayloadTypeEnum.RAWData) { 167 | if (!(extTypeRead.includes(MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA))) { 168 | throw new Error(`Type RAWData needs MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA`) 169 | } 170 | } 171 | if (this.name === MIPayloadTypeEnum.VideoH264AVCCWCP) { 172 | if (!(extTypeRead.includes(MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA))) { 173 | throw new Error(`Type VideoH264AVCCWCP needs MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA`) 174 | } 175 | } 176 | if (this.name === MIPayloadTypeEnum.AudioOpusWCP) { 177 | if (!(extTypeRead.includes(MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA))) { 178 | throw new Error(`Type AudioOpusWCP needs MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA`) 179 | } 180 | } 181 | if (this.name === MIPayloadTypeEnum.AudioAACMP4LCWCP) { 182 | if (!(extTypeRead.includes(MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA))) { 183 | throw new Error(`Type AudioAACMP4LCWCP needs MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA`) 184 | } 185 | } 186 | } 187 | 188 | GetData () { 189 | if (this.type == MIPayloadTypeEnum.VideoH264AVCCWCP) { 190 | return { 191 | type: this.type, 192 | seqId: this.seqId, 193 | pts: this.pts, 194 | dts: this.dts, 195 | timebase: this.timebase, 196 | duration: this.duration, 197 | wallclock: this.wallclock, 198 | metadata: this.metadata, 199 | data: this.data, 200 | } 201 | } else if (this.type == MIPayloadTypeEnum.AudioOpusWCP || this.type == MIPayloadTypeEnum.AudioAACMP4LCWCP) { 202 | return { 203 | type: this.type, 204 | seqId: this.seqId, 205 | pts: this.pts, 206 | timebase: this.timebase, 207 | sampleFreq: this.sampleFreq, 208 | numChannels: this.numChannels, 209 | duration: this.duration, 210 | wallclock: this.wallclock, 211 | data: this.data, 212 | } 213 | } else if (this.type == MIPayloadTypeEnum.RAWData) { 214 | return { 215 | type: this.type, 216 | seqId: this.seqId, 217 | data: this.data, 218 | } 219 | } else { 220 | return null 221 | } 222 | } 223 | 224 | GetDataStr () { 225 | const metadataSize = (this.metadata === undefined || this.metadata == null) ? 0 : this.metadata.byteLength 226 | const dataSize = (this.data === undefined || this.data == null) ? 0 : this.data.byteLength 227 | return `type: ${this.type} - seqId: ${this.seqId} - pts: ${this.pts} - duration: ${this.duration} - sampleFreq: ${this.sampleFreq} - numChannels: ${this.numChannels} - wallclock: ${this.wallclock} - metadataSize: ${metadataSize} - dataSize: ${dataSize}` 228 | } 229 | 230 | PayloadToBytes() { 231 | if (this.type != MIPayloadTypeEnum.VideoH264AVCCWCP && this.type != MIPayloadTypeEnum.AudioOpusWCP && this.type != MIPayloadTypeEnum.AudioAACMP4LCWCP && this.type != MIPayloadTypeEnum.RAWData) { 232 | throw new Error(`Payload type ${this.type} not supported`) 233 | } 234 | return this.data 235 | } 236 | 237 | ExtensionHeaders() { 238 | const kv_params = [] 239 | if (this.type == MIPayloadTypeEnum.VideoH264AVCCWCP) { 240 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_VIDEO_H264_IN_AVCC)) 241 | 242 | const h264AvccMetadataValue = []; 243 | h264AvccMetadataValue.push(numberToVarInt(this.seqId)) 244 | h264AvccMetadataValue.push(numberToVarInt(this.pts)) 245 | h264AvccMetadataValue.push(numberToVarInt(this.dts)) 246 | h264AvccMetadataValue.push(numberToVarInt(this.timebase)) 247 | h264AvccMetadataValue.push(numberToVarInt(this.duration)) 248 | h264AvccMetadataValue.push(numberToVarInt(this.wallclock)) 249 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA, concatBuffer(h264AvccMetadataValue))) 250 | 251 | if (this.metadata != undefined && this.metadata != null && this.metadata.byteLength > 0) { 252 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_EXTRADATA, this.metadata )) 253 | } 254 | } else if (this.type == MIPayloadTypeEnum.AudioOpusWCP) { 255 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_OPUS_BITSTREAM)) 256 | 257 | const opusMetadataValue = []; 258 | opusMetadataValue.push(numberToVarInt(this.seqId)) 259 | opusMetadataValue.push(numberToVarInt(this.pts)) 260 | opusMetadataValue.push(numberToVarInt(this.timebase)) 261 | opusMetadataValue.push(numberToVarInt(this.sampleFreq)) 262 | opusMetadataValue.push(numberToVarInt(this.numChannels)) 263 | opusMetadataValue.push(numberToVarInt(this.duration)) 264 | opusMetadataValue.push(numberToVarInt(this.wallclock)) 265 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA, concatBuffer(opusMetadataValue))) 266 | } else if (this.type == MIPayloadTypeEnum.AudioAACMP4LCWCP) { 267 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_AUDIO_AACLC_MPEG4)) 268 | 269 | const aacMetadataValue = []; 270 | aacMetadataValue.push(numberToVarInt(this.seqId)) 271 | aacMetadataValue.push(numberToVarInt(this.pts)) 272 | aacMetadataValue.push(numberToVarInt(this.timebase)) 273 | aacMetadataValue.push(numberToVarInt(this.sampleFreq)) 274 | aacMetadataValue.push(numberToVarInt(this.numChannels)) 275 | aacMetadataValue.push(numberToVarInt(this.duration)) 276 | aacMetadataValue.push(numberToVarInt(this.wallclock)) 277 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA, concatBuffer(aacMetadataValue))) 278 | } else if (this.type == MIPayloadTypeEnum.RAWData) { 279 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_VALUE_AUDIO_TEXT_UTF8)) 280 | kv_params.push(moqCreateKvPair(MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA, numberToVarInt(this.seqId))) 281 | } else { 282 | throw new Error(`Payload type ${this.type} not supported`) 283 | } 284 | return kv_params 285 | } 286 | 287 | IsEof() { 288 | return this.eof 289 | } 290 | 291 | IsDelta() { 292 | return this.isDelta // Only valid from setData 293 | } 294 | 295 | getMediaType() { 296 | if (this.type == MIPayloadTypeEnum.VideoH264AVCCWCP) { 297 | return "video"; 298 | } else if (this.type == MIPayloadTypeEnum.AudioOpusWCP || this.type == MIPayloadTypeEnum.AudioAACMP4LCWCP) { 299 | return "audio"; 300 | } else { 301 | return "data"; 302 | } 303 | } 304 | } 305 | 306 | export function MIgetTrackName(trackPrefix, isAudio) { 307 | let suffix = "" 308 | if (isAudio) { 309 | suffix = "audio0"; 310 | } else { 311 | suffix = "video0"; 312 | } 313 | return `${trackPrefix}${suffix}`; 314 | } 315 | -------------------------------------------------------------------------------- /receiver/moq_demuxer_downloader.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum, convertTimestamp } from '../utils/utils.js' 9 | import { moqCreate, moqClose, moqCreateControlStream, moqSendClientSetup, moqParseObjectHeader, moqSendSubscribe, moqSendUnSubscribe, MOQ_MESSAGE_SUBSCRIBE_DONE, moqParseMsg, MOQ_MESSAGE_SERVER_SETUP, MOQ_MESSAGE_SUBSCRIBE_OK, MOQ_MESSAGE_SUBSCRIBE_ERROR, isMoqObjectStreamHeaderType, moqParseObjectFromSubgroupHeader, MOQ_OBJ_STATUS_END_OF_GROUP, MOQ_OBJ_STATUS_END_OF_TRACK_AND_GROUP, MOQ_OBJ_STATUS_END_OF_SUBGROUP, getFullTrackName, isMoqObjectDatagramType, moqDecodeDatagramType} from '../utils/moqt.js' 10 | import { MIPackager, MIPayloadTypeEnum} from '../packager/mi_packager.js' 11 | import { ContainsNALUSliceIDR , DEFAULT_AVCC_HEADER_LENGTH } from "../utils/media/avcc_parser.js" 12 | 13 | const WORKER_PREFIX = '[MOQ-DOWNLOADER]' 14 | 15 | // Show verbose exceptions 16 | const MOQT_DEV_MODE = true 17 | 18 | const SLEEP_SUBSCRIBE_ERROR_MS = 2000 19 | 20 | let workerState = StateEnum.Created 21 | 22 | let urlHostPortEp = '' 23 | let isSendingStats = false 24 | let currentClientRequestId = undefined 25 | let currentTrackAlias = 0 26 | let certificateHash = null 27 | let tracks = {} // We add subscribeId and trackAlias 28 | // Example 29 | /* moqTracks: { 30 | "audio": { 31 | namespace: ["vc"], 32 | name: "audio0", 33 | authInfo: "secret" 34 | }, 35 | "video": { 36 | namespace: ["vc"], 37 | name: "video0", 38 | authInfo: "secret" 39 | } 40 | } */ 41 | 42 | // Timebases 43 | let systemVideoTimebase = 1000000 // WebCodecs default = 1us 44 | let systemAudioTimebase = 1000000 // WebCodecs default = 1us 45 | 46 | // MOQT data 47 | const moqt = moqCreate() 48 | 49 | function reportStats () { 50 | if (isSendingStats) { 51 | sendMessageToMain(WORKER_PREFIX, 'downloaderstats', { clkms: Date.now() }) 52 | } 53 | } 54 | 55 | // Main listener 56 | self.addEventListener('message', async function (e) { 57 | if ((workerState === StateEnum.Created) || (workerState === StateEnum.Stopped)) { 58 | workerState = StateEnum.Instantiated 59 | } 60 | 61 | if (workerState === StateEnum.Stopped) { 62 | sendMessageToMain(WORKER_PREFIX, 'info', 'downloader is stopped it does not accept messages') 63 | return 64 | } 65 | 66 | const type = e.data.type 67 | if (type === 'stop') { 68 | workerState = StateEnum.Stopped 69 | 70 | // Abort and wait for all inflight requests 71 | try { 72 | await unSubscribeTracks(moqt) 73 | sendMessageToMain(WORKER_PREFIX, 'info', 'Unsubscribed from all tracks, closing MOQT') 74 | await moqClose(moqt) 75 | } catch (err) { 76 | if (MOQT_DEV_MODE) {throw err} 77 | // Expected to finish some promises with abort error 78 | // The abort "errors" are already sent to main "thead" by sendMessageToMain inside the promise 79 | sendMessageToMain(WORKER_PREFIX, 'error', `Errors closing (some could be ok): ${err}`) 80 | } 81 | } else if (type === 'downloadersendini') { 82 | if (workerState !== StateEnum.Instantiated) { 83 | sendMessageToMain(WORKER_PREFIX, 'error', 'received ini message in wrong state. State: ' + workerState) 84 | return 85 | } 86 | if (!('urlHostPort' in e.data.downloaderConfig) || !('urlPath' in e.data.downloaderConfig)) { 87 | sendMessageToMain(WORKER_PREFIX, 'error', 'We need host, streamId to start playback') 88 | return 89 | } 90 | 91 | if ('urlHostPort' in e.data.downloaderConfig) { 92 | urlHostPortEp = e.data.downloaderConfig.urlHostPort 93 | } 94 | if ('isSendingStats' in e.data.downloaderConfig) { 95 | isSendingStats = e.data.downloaderConfig.isSendingStats 96 | } 97 | if ('moqTracks' in e.data.downloaderConfig) { 98 | tracks = e.data.downloaderConfig.moqTracks 99 | } 100 | if ('certificateHash' in e.data.downloaderConfig) { 101 | certificateHash = e.data.downloaderConfig.certificateHash 102 | } 103 | if ('systemVideoTimebase' in e.data.downloaderConfig) { 104 | systemVideoTimebase = e.data.downloaderConfig.systemVideoTimebase 105 | } 106 | if ('systemAudioTimebase' in e.data.downloaderConfig) { 107 | systemAudioTimebase = e.data.downloaderConfig.systemAudioTimebase 108 | } 109 | 110 | const errTrackStr = checkTrackData() 111 | if (errTrackStr != undefined) { 112 | sendMessageToMain(WORKER_PREFIX, 'error', errTrackStr) 113 | return 114 | } 115 | 116 | try { 117 | await moqClose(moqt) 118 | 119 | // WT needs https to establish connection 120 | const url = new URL(urlHostPortEp) 121 | // Replace protocol 122 | url.protocol = 'https' 123 | 124 | // Ini WT 125 | let options = {} 126 | if (certificateHash != undefined && certificateHash != null) { 127 | options = { serverCertificateHashes: [{ algorithm: 'sha-256', value: certificateHash}]} 128 | } 129 | moqt.wt = new WebTransport(url.href, options) 130 | moqt.wt.closed 131 | .then(() => { 132 | sendMessageToMain(WORKER_PREFIX, 'info', 'WT closed transport session') 133 | }) 134 | .catch(err => { 135 | if (MOQT_DEV_MODE) {throw err} 136 | sendMessageToMain(WORKER_PREFIX, 'error', `WT error, closed transport. Err: ${err}`) 137 | }) 138 | 139 | await moqt.wt.ready 140 | await moqCreateControlStream(moqt) 141 | 142 | requestIDsReset() 143 | 144 | await moqCreateSubscriberSession(moqt) 145 | 146 | sendMessageToMain(WORKER_PREFIX, 'info', 'MOQ Initialized') 147 | workerState = StateEnum.Running 148 | 149 | // We need independent async functions to receive streams and datagrams, something like await Promise.any([wtReadableStream.read(), wtDataGramReader.read()]) does NOT work 150 | moqReceiveObjects(moqt) 151 | } catch (err) { 152 | if (MOQT_DEV_MODE) {throw err} 153 | sendMessageToMain(WORKER_PREFIX, 'error', `Initializing MOQ. Err: ${err}`) 154 | } 155 | } 156 | }) 157 | 158 | async function moqReceiveObjects(moqt) { 159 | if (workerState === StateEnum.Stopped) { 160 | return 161 | } 162 | if (moqt.wt === null) { 163 | sendMessageToMain(WORKER_PREFIX, 'error', 'we can not start downloading streams because WT is not initialized') 164 | return 165 | } 166 | 167 | try { 168 | // NO await on purpose! 169 | moqReceiveStreamObjects(moqt) 170 | // NO await on purpose! 171 | moqReceiveDatagramObjects(moqt) 172 | } catch(err) { 173 | if (MOQT_DEV_MODE) {throw err} 174 | sendMessageToMain(WORKER_PREFIX, 'dropped data', { clkms: Date.now(), seqId: -1, msg: `Dropped stream because WT error: ${err}` }) 175 | sendMessageToMain(WORKER_PREFIX, 'error', `WT request. Err: ${JSON.stringify(err)}`) 176 | } 177 | } 178 | 179 | async function moqReceiveStreamObjects (moqt) { 180 | // Get stream 181 | const incomingStream = moqt.wt.incomingUnidirectionalStreams 182 | const wtReadableStream = incomingStream.getReader() 183 | 184 | while (workerState !== StateEnum.Stopped) { 185 | const stream = await wtReadableStream.read() 186 | 187 | if (!stream.done) { 188 | sendMessageToMain(WORKER_PREFIX, 'debug', 'New QUIC stream') 189 | 190 | const moqStreamsObjHeader = await moqParseObjectHeader(stream.value) 191 | if (isMoqObjectStreamHeaderType(moqStreamsObjHeader.type)) { 192 | sendMessageToMain(WORKER_PREFIX, 'debug', `Received object header subgroup ${JSON.stringify(moqStreamsObjHeader)}`) 193 | // NO await on purpose! 194 | moqReceiveMultiObjectStream(stream.value, moqStreamsObjHeader.type) 195 | } else { 196 | sendMessageToMain(WORKER_PREFIX, 'error', `Unsupported stream type for sterams ${moqStreamsObjHeader.type}`) 197 | } 198 | } 199 | } 200 | sendMessageToMain(WORKER_PREFIX, 'info', 'Exited receive objects loop') 201 | } 202 | 203 | async function moqReceiveMultiObjectStream(readerStream, type) { 204 | let isEOF = false 205 | let numObjRead = 0 206 | let objHeader = {} 207 | while (workerState !== StateEnum.Stopped && isEOF === false) { 208 | reportStats() 209 | try { 210 | objHeader = await moqParseObjectFromSubgroupHeader(readerStream, type) 211 | 212 | sendMessageToMain(WORKER_PREFIX, 'debug', `Received subgrp object header ${JSON.stringify(objHeader)}`); 213 | 214 | // Check if we received the end of the subgroup 215 | isEOF = ("status" in objHeader && (objHeader.status == MOQ_OBJ_STATUS_END_OF_GROUP || objHeader.status == MOQ_OBJ_STATUS_END_OF_TRACK_AND_GROUP || objHeader.status == MOQ_OBJ_STATUS_END_OF_SUBGROUP)) 216 | if (!isEOF && objHeader.payloadLength > 0) { 217 | isEOF = await readAndSendPayload(readerStream, objHeader.extensionHeaders, objHeader.payloadLength) 218 | sendMessageToMain(WORKER_PREFIX, 'debug', `Read & send upstream. isEOF: ${isEOF}`); 219 | } 220 | sendMessageToMain(WORKER_PREFIX, 'debug', `isEOF: ${isEOF}`); 221 | numObjRead++ 222 | } catch(err) { 223 | // We receive ERROR when we have a reader and the stream closes 224 | // TODO: Objects with single subgroup/group does NOT send MOQ_OBJ_STATUS_END_OF_GROUP (Bug?), we need to remove numObjRead 225 | if (numObjRead > 0 || err instanceof WebTransportError && err.message.includes("The session is closed")) { 226 | isEOF = true 227 | } else { 228 | throw err 229 | } 230 | } 231 | } 232 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Exited from subgroup reader loop') 233 | } 234 | 235 | async function moqReceiveDatagramObjects (moqt) { 236 | if (moqt.datagramsReader != null) { 237 | throw new Error('Unexpected already initialized datagramsReader') 238 | } 239 | 240 | // Get datagrams 241 | moqt.datagramsReader = moqt.wt.datagrams.readable.getReader(); 242 | 243 | while (workerState !== StateEnum.Stopped) { 244 | const stream = await moqt.datagramsReader.read() 245 | 246 | if (!stream.done) { 247 | // Create a BYOT capable reader for the data by reading whole datagram 248 | const readableStream = new ReadableStream({ 249 | start(controller) { 250 | controller.enqueue(stream.value); 251 | controller.close(); 252 | }, 253 | type: "bytes", 254 | }); 255 | reportStats() 256 | 257 | const moqObjHeader = await moqParseObjectHeader(readableStream) 258 | sendMessageToMain(WORKER_PREFIX, 'debug', `Received object datagram header ${JSON.stringify(moqObjHeader)}`) 259 | 260 | if (!isMoqObjectDatagramType(moqObjHeader.type)) { 261 | throw new Error(`Received via datagram a non properly encoded object ${JSON.stringify(moqObjHeader)}`) 262 | } 263 | let length = undefined // Read until end of datagram 264 | if (moqDecodeDatagramType(moqObjHeader.type).isStatus) { 265 | length = 0 // This will NOT read payload but decode headers if present 266 | } 267 | await readAndSendPayload(readableStream, moqObjHeader.extensionHeaders, length) 268 | } 269 | } 270 | 271 | sendMessageToMain(WORKER_PREFIX, 'debug', 'Exited from datagrams loop') 272 | } 273 | 274 | async function readAndSendPayload(readerStream, extensionHeaders, length) { 275 | const packet = new MIPackager() 276 | await packet.ParseData(readerStream, extensionHeaders, length) 277 | const isEOF = packet.IsEof(); 278 | 279 | const chunkData = packet.GetData() 280 | if (chunkData == null || chunkData.type === undefined) { 281 | throw new Error(`Corrupted headers, we can NOT parse the data, headers: ${packet.GetDataStr()}`) 282 | } 283 | sendMessageToMain(WORKER_PREFIX, 'debug', `Decoded MOQT-MI: ${packet.GetDataStr()})`) 284 | 285 | let chunk 286 | let appMediaType 287 | if (chunkData.type == MIPayloadTypeEnum.AudioOpusWCP || chunkData.type == MIPayloadTypeEnum.AudioAACMP4LCWCP) { 288 | appMediaType = "audiochunk" 289 | const timestamp = convertTimestamp(chunkData.pts, chunkData.timebase, systemAudioTimebase); 290 | const duration = convertTimestamp(chunkData.duration, chunkData.timebase, systemAudioTimebase); 291 | chunk = new EncodedAudioChunk({ 292 | timestamp: timestamp, 293 | type: "key", 294 | data: chunkData.data, 295 | duration: duration 296 | }) 297 | } else if (chunkData.type == MIPayloadTypeEnum.VideoH264AVCCWCP) { 298 | appMediaType = "videochunk" 299 | // Find NALU SliceIDR to specify if this is key or delta 300 | // We could infer if this is IDR from MOQT, identifying if this is start of group, but this method is less error prone 301 | const isIdr = ContainsNALUSliceIDR(chunkData.data, DEFAULT_AVCC_HEADER_LENGTH) 302 | const timestamp = convertTimestamp(chunkData.pts, chunkData.timebase, systemVideoTimebase); 303 | const duration = convertTimestamp(chunkData.duration, chunkData.timebase, systemVideoTimebase); 304 | chunk = new EncodedVideoChunk({ 305 | timestamp: timestamp, 306 | type: isIdr ? "key": "delta", 307 | data: chunkData.data, 308 | duration: duration 309 | }) 310 | } else if (chunkData.type == MIPayloadTypeEnum.RAWData) { 311 | appMediaType = "data" 312 | chunk = chunkData.data 313 | } 314 | 315 | self.postMessage({ type: appMediaType, clkms: Date.now(), packagerType: chunkData.type, captureClkms: chunkData.wallclock, seqId: chunkData.seqId, chunk, metadata: chunkData.metadata, sampleFreq: chunkData.sampleFreq , numChannels: chunkData.numChannels }) 316 | 317 | return isEOF; 318 | } 319 | 320 | // MOQT 321 | 322 | async function moqCreateSubscriberSession (moqt) { 323 | await moqSendClientSetup(moqt.controlWriter) 324 | const moqMsg = await moqParseMsg(moqt.controlReader) 325 | if (moqMsg.type !== MOQ_MESSAGE_SERVER_SETUP) { 326 | throw new Error(`Expected MOQ_MESSAGE_SERVER_SETUP, received ${moqMsg.type}`) 327 | } 328 | const setupResponse = moqMsg.data 329 | sendMessageToMain(WORKER_PREFIX, 'info', `Received SETUP response: ${JSON.stringify(setupResponse)}`) 330 | 331 | // Send subscribe for tracks audio and video (loop until both done or error) 332 | let pending_subscribes = Object.entries(tracks) 333 | while (pending_subscribes.length > 0) { 334 | const [trackType, trackData] = pending_subscribes[0]; 335 | const reqId = getNextClientReqId() 336 | await moqSendSubscribe(moqt.controlWriter, reqId, trackData.namespace, trackData.name, trackData.authInfo) 337 | const moqMsg = await moqParseMsg(moqt.controlReader) 338 | if (moqMsg.type !== MOQ_MESSAGE_SUBSCRIBE_OK && moqMsg.type !== MOQ_MESSAGE_SUBSCRIBE_ERROR) { 339 | throw new Error(`Expected MOQ_MESSAGE_SUBSCRIBE_OK or MOQ_MESSAGE_SUBSCRIBE_ERROR, received ${moqMsg.type}`) 340 | } 341 | if (moqMsg.type === MOQ_MESSAGE_SUBSCRIBE_ERROR) { 342 | sendMessageToMain(WORKER_PREFIX, 'warning', `Received SUBSCRIBE_ERROR response for ${getFullTrackName(trackData.namespace, trackData.name)} (type: ${trackType}): ${JSON.stringify(moqMsg.data)}. waiting for ${SLEEP_SUBSCRIBE_ERROR_MS}ms and Retrying!!`) 343 | 344 | await new Promise(r => setTimeout(r, SLEEP_SUBSCRIBE_ERROR_MS)); 345 | } else { 346 | const subscribeResp = moqMsg.data 347 | if (subscribeResp.requestId !== reqId) { 348 | throw new Error(`Received subscribeId does NOT match with subscriptionId ${subscribeResp.reqId} != ${reqId}`) 349 | } 350 | sendMessageToMain(WORKER_PREFIX, 'info', `Received SUBSCRIBE_OK for ${getFullTrackName(trackData.namespace, trackData.name)}-(type: ${trackType}): ${JSON.stringify(subscribeResp)}`) 351 | trackData.trackAlias = getNextTrackAlias() 352 | 353 | pending_subscribes.shift() 354 | } 355 | } 356 | sendMessageToMain(WORKER_PREFIX, 'info', 'Finished subscription loop') 357 | } 358 | 359 | function checkTrackData () { 360 | if (Object.entries(tracks).length <= 0) { 361 | return 'Number of Track Ids to announce needs to be > 0' 362 | } 363 | 364 | for (const [, track] of Object.entries(tracks)) { 365 | if (!('namespace' in track) || (track.namespace.length <= 0) || !('name' in track) || !('authInfo' in track)) { 366 | return 'Track malformed, needs to contain namespace, name, and authInfo' 367 | } 368 | } 369 | return undefined; 370 | } 371 | 372 | async function unSubscribeTracks(moqt) { 373 | sendMessageToMain(WORKER_PREFIX, 'info', `Sending ${Object.entries(tracks).length} unsubscribes`) 374 | 375 | for (const trackData of Object.values(tracks)) { 376 | if ('subscribeId' in trackData) { 377 | try { 378 | await moqSendUnSubscribe(moqt.controlWriter, trackData.subscribeId) 379 | sendMessageToMain(WORKER_PREFIX, 'info', `Sent UnSubscribe for ${trackData.subscribeId}`) 380 | const moqMsg = await moqParseMsg(moqt.controlReader) 381 | if (moqMsg.type !== MOQ_MESSAGE_SUBSCRIBE_DONE) { 382 | throw new Error(`Expected MOQ_MESSAGE_SUBSCRIBE_DONE received ${moqMsg.type}`) 383 | } 384 | const subscribeDone = moqMsg.data 385 | sendMessageToMain(WORKER_PREFIX, 'info', `Received SubscribeDone for subscibeId: ${subscribeDone.subscribeId}: ${JSON.stringify(subscribeDone)}`) 386 | if (subscribeDone.subscribeId != trackData.subscribeId) { 387 | throw new Error(`Expected MOQ_MESSAGE_SUBSCRIBE_DONE for subscribeId: ${trackData.subscribeId}, received: ${subscribeDone.subscribeId}`) 388 | } 389 | } 390 | catch (err) { 391 | if (MOQT_DEV_MODE) {throw err} 392 | sendMessageToMain(WORKER_PREFIX, 'error', `on UnSubscribe. Err: ${err}`) 393 | } finally { 394 | delete trackData.subscribeId 395 | if ('trackAlias' in trackData) { 396 | delete trackData.trackAlias 397 | } 398 | } 399 | } 400 | } 401 | } 402 | 403 | // Requests IDs 404 | function requestIDsReset() { 405 | currentClientRequestId = undefined 406 | } 407 | 408 | function getNextClientReqId() { 409 | if (typeof currentClientRequestId == 'undefined') { 410 | currentClientRequestId = 0 411 | } else { 412 | currentClientRequestId = currentClientRequestId + 2 413 | } 414 | return currentClientRequestId 415 | } 416 | 417 | function getNextTrackAlias() { 418 | currentTrackAlias++ 419 | return currentTrackAlias 420 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # moq-encoder-player 2 | 3 | This project provides a minimal implementation (inside the browser) of a live video and audio encoder and video / audio player based on [MOQT draft](https://datatracker.ietf.org/doc/draft-ietf-moq-transport/), media transport is based on [draft-cenzano-moq-media-interop](https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/), the exact versions of the drafts implemented are shown in the UI of the endoder and the player. 4 | 5 | The goal if ths code is to provide a minimal live platform implementation that helps learning on low latency trade offs and facilitates experimentation. 6 | 7 | It is NOT optimized for performance / production at all since the 1st goal is experimenting / learning. 8 | 9 | ![Main block diagram](./pics/basic-block-diagram.png) 10 | Fig1: Main block diagram 11 | 12 | For the server/relay side we have used [moxygen](https://github.com/facebookexperimental/moxygen). 13 | 14 | Note: You need to be careful and check that protocol versions implemented by this code and moxygen matches 15 | 16 | ## Packager 17 | 18 | It uses [draft-cenzano-moq-media-interop](https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/) 19 | 20 | ## Encoder 21 | 22 | The encoder implements MOQT publisher role. It is based on [Webcodecs](https://developer.mozilla.org/en-US/docs/Web/API/WebCodecs_API), and [AudioContext](https://developer.mozilla.org/en-US/docs/Web/API/AudioContext), see the block diagram in fig3 23 | 24 | ![Encoder block diagram](./pics/encoder-block-diagram.png) 25 | Fig3: Encoder block diagram 26 | 27 | Note: We have used [WebTransport](https://www.w3.org/TR/webtransport/), so the underlying transport is QUIC (QUIC streams to be more accurate) 28 | 29 | ### Encoder - Config params 30 | 31 | Video encoding config: 32 | 33 | ```javascript 34 | // Video encoder config 35 | const videoEncoderConfig = { 36 | encoderConfig: { 37 | codec: 'avc1.42001e', // Baseline = 66, level 30 (see: https://en.wikipedia.org/wiki/Advanced_Video_Coding) 38 | width: 320, 39 | height: 180, 40 | bitrate: 1_000_000, // 1 Mbps 41 | framerate: 30, 42 | latencyMode: 'realtime', // Sends 1 chunk per frame 43 | }, 44 | encoderMaxQueueSize: 2, 45 | keyframeEvery: 60, 46 | }; 47 | ``` 48 | 49 | Audio encoder config: 50 | 51 | ```javascript 52 | // Audio encoder config 53 | const audioEncoderConfig = { 54 | encoderConfig: { 55 | codec: 'opus', // AAC NOT implemented YET (it is in their roadmap) 56 | sampleRate: 48000, // To fill later 57 | numberOfChannels: 1, // To fill later 58 | bitrate: 32000, 59 | opus: { // See https://www.w3.org/TR/webcodecs-opus-codec-registration/ 60 | frameDuration: 10000 // In us. Lower latency than default = 20000 61 | } 62 | }, 63 | encoderMaxQueueSize: 10, 64 | }; 65 | ``` 66 | 67 | Muxer config: 68 | 69 | ```javascript 70 | const muxerSenderConfig = { 71 | urlHostPort: '', 72 | urlPath: '', 73 | 74 | moqTracks: { 75 | "audio": { 76 | id: 0, 77 | namespace: "vc", 78 | name: "aaa/audio", 79 | maxInFlightRequests: 100, 80 | isHipri: true, 81 | authInfo: "secret" 82 | }, 83 | "video": { 84 | id: 1, 85 | namespace: "vc", 86 | name: "aaa/video", 87 | maxInFlightRequests: 50, 88 | isHipri: false, 89 | authInfo: "secret" 90 | } 91 | }, 92 | } 93 | ``` 94 | 95 | ### src_encoder/index.html 96 | 97 | Main encoder webpage and also glues all encoder pieces together 98 | 99 | - When it receives an audio OR video raw frame from `a_capture` or `v_capture`: 100 | - Adds it into `TimeBufferChecker` (for latency tracking) 101 | - Sends it to encoder 102 | 103 | - When it receives an audio OR video encoded chunk from `a_encoder` or `v_encoder`: 104 | - Gets the wall clock generation time of 1st frame/sample in the chunk 105 | - Sends the chunk (augmented with wall clock, seqId, and metadata) to the muxer 106 | 107 | ### utils/TimeBufferChecker 108 | 109 | Stores the frames timestamps and the wall clock generation time from the raw generated frames. That allows us keep track of each frame / chunk creation time (wall clock) 110 | 111 | ### capture/v_capture.js 112 | 113 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) that waits for the next RGB or YUV video frame from capture device, augments it adding wallclock, and sends it via post message to video encoder 114 | 115 | ### capture/a_capture.js 116 | 117 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) Receives the audio PCM frame (few ms, ~10ms to 25ms of audio samples) from capture device, augments it adding wallclock, and finally send it (doing copy) via post message to audio encoder 118 | 119 | ### encode/v_encoder.js 120 | 121 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) Encodes RGB or YUV video frames into encoded video chunks 122 | 123 | - Receives the video RGB or YUV frame from `v_capture.js` 124 | - Adds the video frame to a queue. And it keeps the queue smaller than `encodeQueueSize` (that helps when encoder is overwhelmed) 125 | - Specifies I frames based on config var `keyframeEvery` 126 | - It delivers the encoded chunks to the next stage (muxer) 127 | 128 | Note: We configure `VideoEncoder` in `realtime` latency mode, so it delivers a chunk per video frame 129 | 130 | ### encode/a_encoder.js 131 | 132 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) Encodes PCM audio frames (samples) into encoded audio chunks 133 | 134 | - Receives the audio PCM frame from `a_capture.js` 135 | - Adds the audio frame to a queue. And it keeps the queue smaller than `encodeQueueSize` (that helps when encoder is overwhelmed) 136 | - It delivers the encoded chunks to the next stage (muxer) 137 | 138 | Note: `opus.frameDuration` setting helps keeping encoding latency low 139 | 140 | ### packager/mi_packager.js 141 | 142 | - Implements [draft-cenzano-moq-media-interop](https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/) 143 | 144 | ### sender/moq_sender.js 145 | 146 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) Implements MOQT and sends video and audio packets (see `mi_packager.js`) to the server / relay following MOQT and [draft-cenzano-moq-media-interop](https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/) 147 | 148 | - Opens a WebTransport session against the relay 149 | - Implements MOQT publisher handshake for 2 tracks (opening control stream and announcing track namespace) 150 | - **Creates a Unidirectional (encoder -> server) QUIC stream per every frame** (video and audio) 151 | - Receives audio and video chunks from `a_encoder.js` and `v_encoder.js` 152 | - It uses sendOrder to establish send priority. We use incremental counter (so new is higher priority than old), and we also increase audio priority over video (by adding an offset) 153 | - It keeps number of inflight requests always below configured value `maxInFlightRequest` 154 | 155 | ## Player 156 | 157 | The encoder implements MOQT subscriber role. It uses [Webcodecs](https://developer.mozilla.org/en-US/docs/Web/API/WebCodecs_API) and [AudioContext](https://developer.mozilla.org/en-US/docs/Web/API/AudioContext) / [Worklet](https://developer.mozilla.org/en-US/docs/Web/API/Worklet), [SharedArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), and [Atomic](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Atomics) 158 | 159 | ![Player block diagram](./pics/player-block-diagram.png) 160 | Fig5: Player block diagram 161 | 162 | ### Audio video sync strategy 163 | 164 | To keep the audio and video in-sync the following strategy is applied: 165 | 166 | - Audio renderer (`audio_circular_buffer.js`) keeps track of last played timestamp (delivered to audio device by `source_buffer_worklet.js`) by using PTS value in the current playing `AudioData` frame and adding the duration of the number of samples delivered. This information is accessible from player page via `timingInfo.renderer.currentAudioTS`, who also adds the hardware latency provided by `AudioContext`. 167 | - Every time we sent new audio samples to audio renderer the video renderer `video_render_buffer` (who contains YUV/RGB frames + timestamps) gets called and: 168 | - Returns / paints the oldest closest (or equal) frame to current audio ts (`timingInfo.renderer.currentAudioTS`) 169 | - Discards (frees) all frames older current ts (except the returned one) 170 | - It is worth saying that `AudioDecoder` does NOT track timestamps, it just uses the 1st one sent and at every decoded audio sample adds 1/fs (so sample time). That means if we drop and audio packet those timestamps will be collapsed creating A/V out of sync. To work around that problem we calculate all the audio GAPs duration `timestampOffset` (by last playedTS - newTS, ideally = 0 if NO gaps), and we compensate the issued PTS by that. 171 | 172 | ### receiver/moq_demuxer_downloader.js 173 | 174 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) Implements MOQT and extracts video and audio packets (see `mi_packager.js`) from the server / relay following MOQT and [draft-cenzano-moq-media-interop](https://datatracker.ietf.org/doc/draft-cenzano-moq-media-interop/) 175 | 176 | - Opens WebTransport session 177 | - Implements MOQT subscriber handshake for 2 tracks (video and audio) 178 | - Waits for incoming unidirectional (Server -> Player) QUIC streams 179 | - For every received chunk (QUIC stream) we: 180 | - Demuxed it (see `mi_packager.js`) 181 | - Video: Create `EncodedVideoChunk` 182 | - Could be enhanced by init metadata, wallclock, and seqId 183 | - Audio: Create `EncodedAudioChunk` 184 | - Could be enhanced by init metadata, wallclock, and seqId 185 | 186 | ### utils/jitter_buffer.js 187 | 188 | Since we do not have any guarantee that QUIC streams are delivered in order we need to order them before sending them to the decoder. This is the function of the deJitter. We create one instance per track, in this case one for Audio, one for video 189 | 190 | - Receives the chunks from `moq_demuxer_downloader.js` 191 | - Adds them into a sorted list, we sort by ascending `seqID` 192 | - When list length (in ms is > `bufferSizeMs`) we deliver (remove) the 1st element in the list 193 | - It also keeps track of delivered `seqID` detecting: 194 | - Gaps / discontinuities 195 | - Total QUIC Stream lost (not arrived in time) 196 | 197 | ### decode/audio_decoder.js 198 | 199 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) when it receives and audio chunk it decodes it and it sends the audio PCM samples to the audio renderer. 200 | `AudioDecoder` does NOT track timestamps on decoded data, it just uses the 1st one sent and at every decoded audio sample adds 1/fs (so sample time). That means if we drop and audio packet those timestamps will be collapsed creating A/V out of sync. 201 | To work around that problem we calculate all the audio GAPs duration `timestampOffset` and we publish that to allow other elements in the pipeline to have accurate idea of live head position 202 | 203 | - Receives audio chunk 204 | - If discontinuity detected (reported by jitter_buffer.js) then calculate lost time by: 205 | - `lostTime = currentChunkTimestamp - lastChunkSentTimestamp;` Where `lastChunkSentTimestamp = lastSentChunk.timestamp + lastSentChunk.duration` 206 | - `timestampOffset += lostTime` 207 | - Decode chunk and deliver PCM data 208 | 209 | ### render/audio_circular_buffer.js 210 | 211 | Leverages [SharedArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) and [Atomic](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Atomics) to implement following mechanisms to share data in a "multi thread" environment: 212 | 213 | - Circular buffer (`sharedAudiobuffers`): Main buffer used to share audio PCM data from decoder to renderer `source_buffer_worklet.js` 214 | - State communication (`sharedStates`): Use to share states and data between renderer `source_buffer_worklet.js` and main thread 215 | 216 | ### render/source_buffer_worklet.js 217 | 218 | [AudioWorkletProcessor](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API), implements an audio source Worklet that sends audio samples to renderer. 219 | 220 | - It reads new audio samples from circular buffer 221 | - The samples are played at sampling freq rate 222 | - In case the buffer is exhausted (underrun) it will insert silence samples and notify timing according to that. 223 | - Reports last PTS rendered (this is used to sync video to the audio track, so to keep A/V in sync) 224 | 225 | ### decode/video_decoder.js 226 | 227 | [WebWorker](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API), Decodes video chunks and sends the decoded data (YUV or RGB) to the next stage (`video_render_buffer.js`) 228 | 229 | - Initializes video decoder with init segment 230 | - Sends video chunks to video decoder 231 | - If it detects a discontinuity drops all video frames until next IDR frame 232 | - Sends the decoded frame to `video_render_buffer.js` 233 | 234 | ### render/video_render_buffer.js 235 | 236 | Buffer that stores video decoded frames 237 | 238 | - Received video decoded frames 239 | - Allows the retrieval of video decoded frames via timestamps 240 | - Automatically drops all video frames that older than the currently requested 241 | 242 | ### Latency measurement based in video data 243 | We can activate the option "Activate latency tracker (overlays data on video)" in the encoder (CPU consuming), this options will add the epoch ms clock of the encoder in the video frame as soon as it is received from the camera. It replaces the first video lines with that clock information. It is also encoded in a way that is resilient to video processing / encoding / decoding operations (see `./overlay_processor/overlay_encoder.js` and `./overlay_processor/overlay_decoder.js` in the code) 244 | 245 | The player will decode that info from every frame and when it is about to show that frame it will calculate the latency by: `latency_ms = now_in_ms - frame_capture_in_ms`. 246 | 247 | Note: This assumes the clocks of the encoder and the decoder are in-sync. Always true if you use same computer to encode and decode 248 | 249 | ### Legacy latency measurement 250 | 251 | - Every audio and video received chunk `timestamp` and `clkms` (wall clock) is added into `latencyAudioChecker` and `latencyVideoChecker` queue (instances of `TimeBufferChecker`) 252 | - The `renderer.currentAudioTS` (current audio sample rendered) is used to get the closest wall clock time from `audioTimeChecker`. From there we sync video. 253 | - The UI displays: `Latency = Now - whenSampleWasGenerated` 254 | 255 | Note: Encoder and Player clock have to be in sync for this metric to be accurate. If you use same computer as encoder & player then metric should be pretty accurate 256 | 257 | ## testing (encoder player served from localhost) 258 | 259 | - Clone this repo 260 | 261 | ```bash 262 | git clone git@github.com:facebookexperimental/moq-encoder-player.git 263 | ``` 264 | 265 | - Install Python (see this [guide](https://realpython.com/installing-python/)) 266 | 267 | - Run local webserver by calling: 268 | 269 | ```bash 270 | ./start-http-server-cross-origin-isolated.py 271 | ``` 272 | 273 | Note: It is better to run webserver using this script but you can use any webserver you to publish the `.` directory (repo directory) 274 | 275 | - Load encoder webpage, url: http://localhost:8080/src-encoder/?local 276 | - Click "Start" 277 | - Load player webpage, url: http://localhost:8080/src-player/?local 278 | - Copy `Track Name` from encoder webpage and paste it into Receiver demuxer `Track Name` 279 | - Click "Start" 280 | 281 | ENJOY YOUR POCing!!! :-) 282 | 283 | ![Encoder UI](./pics/encoder-page-ui.png) 284 | Fig6: Encoder UI 285 | 286 | ![Player UI](./pics/player-page-ui.png) 287 | Fig7: Player UI 288 | 289 | Note: This is an experimentation code, we plan the evolve it quick, so those screenshots could be a bit outdated 290 | 291 | ## Local testing (encoder-player served and moxygen served from localhost) 292 | 293 | - Create key, certificate, and certificate fingerprint by running following script 294 | ``` 295 | ./create_self_signed_certs.sh 296 | ``` 297 | Note: The trick here is that this script will create a self signed certificate for localhost with EDCSA and validity of 10 days (<15), this is the type Chrome will accept. 298 | 299 | - Follow the installation instructions of [moxygen](https://github.com/facebookexperimental/moxygen). 300 | - Remember to use key and certificate created on the previous step to run moxygen 301 | 302 | - Clone this repo 303 | 304 | ```bash 305 | git clone git@github.com:facebookexperimental/moq-encoder-player.git 306 | ``` 307 | 308 | - Install Python (see this [guide](https://realpython.com/installing-python/)) 309 | 310 | - Run local webserver by calling: 311 | 312 | ```bash 313 | ./start-http-server-cross-origin-isolated.py 314 | ``` 315 | 316 | Note: You need to use this script to **run the player** because it adds some needed headers (more info [here](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer#security_requirements)) 317 | 318 | - Load encoder webpage, url: http://localhost:8080/src-encoder/?local 319 | - Click "Start" 320 | - Load player webpage, url: http://localhost:8080/src-player/?local 321 | - Copy `Track Name` from encoder webpage and paste it into Receiver demuxer `Track Name` 322 | - Click "Start" 323 | 324 | ENJOY YOUR POCing!!! :-) 325 | 326 | You should see same UI that is shown in testing section above 327 | 328 | ## TODO 329 | - Encoder: Cancel QUIC stream after some reasonable time (?) in mode live 330 | - Player: Do not use main thread for anything except reporting 331 | - Player/server: Cancel QUIC stream if arrives after jitter buffer 332 | - Accelerate playback if we are over latency budget 333 | - Fix dropped frames UI on VC player (not properly separated between encoder & player, see TODO in the code) 334 | - Copy updates from event player to regular one 335 | - Better TS logging and video renderer 336 | - All: 337 | - Accept B frames (DTS) 338 | 339 | ## License 340 | 341 | moq-encoder-player is released under the [MIT License](https://github.com/facebookincubator/rush/blob/master/LICENSE). 342 | 343 | TODO: 344 | - Check token in all messages, not just when encoder receives SUBSCRIBE -------------------------------------------------------------------------------- /sender/moq_sender.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { sendMessageToMain, StateEnum} from '../utils/utils.js' 9 | import { moqCreate, moqClose, moqCloseWrttingStreams, moqParseMsg, moqCreateControlStream, moqSendSubscribeOk, moqSendSubscribeError, moqSendSubgroupHeader, moqSendObjectPerDatagramToWriter, moqSendClientSetup, moqSendUnAnnounce, MOQ_PUBLISHER_PRIORITY_BASE_DEFAULT, moqSendAnnounce, getTrackFullName, moqSendSubscribeDone, MOQ_SUBSCRIPTION_ERROR_INTERNAL, MOQ_MESSAGE_SUBSCRIBE, MOQ_MESSAGE_UNSUBSCRIBE, MOQ_SUBSCRIPTION_DONE_ENDED, MOQ_MESSAGE_SERVER_SETUP, MOQ_MESSAGE_ANNOUNCE_OK, MOQ_MESSAGE_ANNOUNCE_ERROR, MOQ_MAPPING_SUBGROUP_PER_GROUP, MOQ_MAPPING_OBJECT_PER_DATAGRAM, moqSendObjectSubgroupToWriter, moqSendObjectEndOfGroupToWriter, getAuthInfofromParameters } from '../utils/moqt.js' 10 | import { MIPackager, MIPayloadTypeEnum} from '../packager/mi_packager.js' 11 | 12 | const WORKER_PREFIX = '[MOQ-SENDER]' 13 | 14 | // Show verbose exceptions 15 | const MOQT_DEV_MODE = true 16 | 17 | let moqPublisherState = {} 18 | 19 | let workerState = StateEnum.Created 20 | 21 | let isSendingStats = true 22 | 23 | let keepAlivesEveryMs = 0 24 | let keepAliveInterval = null 25 | let keepAliveNameSpace = "" 26 | let certificateHash = null 27 | 28 | let lastObjectSentMs = 0 29 | 30 | let currentClientRequestId = undefined 31 | let currentTrackAlias = 0 32 | 33 | let tracks = {} 34 | // Example 35 | /* moqTracks: { 36 | "audio": { 37 | id: 0, 38 | maxInFlightRequests: 100, 39 | isHipri: true, 40 | authInfo: "secret", 41 | moqMapping: "ObjStream", 42 | }, 43 | "video": { 44 | id: 1, 45 | maxInFlightRequests: 50, 46 | isHipri: false, 47 | authInfo: "secret", 48 | moqMapping: "ObjStream", 49 | }, 50 | } */ 51 | 52 | // Inflight req abort signal 53 | const abortController = new AbortController() 54 | 55 | // MOQT data 56 | const moqt = moqCreate() 57 | 58 | self.addEventListener('message', async function (e) { 59 | if (workerState === StateEnum.Created) { 60 | workerState = StateEnum.Instantiated 61 | } 62 | 63 | if (workerState === StateEnum.Stopped) { 64 | sendMessageToMain(WORKER_PREFIX, 'info', 'Muxer-send is stopped it does not accept messages') 65 | return 66 | } 67 | 68 | const type = e.data.type 69 | if (type === 'stop') { 70 | workerState = StateEnum.Stopped 71 | 72 | // Abort and wait for all inflight requests 73 | try { 74 | if (keepAliveInterval != null) { 75 | clearInterval(keepAliveInterval); 76 | keepAliveInterval = null 77 | } 78 | //TODO JOC finish abort controller 79 | abortController.abort() 80 | await moqCloseWrttingStreams(moqt) 81 | 82 | await sendSubscribeDone(moqt) 83 | await unAnnounceTracks(moqt) 84 | await moqClose(moqt) 85 | } catch (err) { 86 | if (MOQT_DEV_MODE) {throw err} 87 | // Expected to finish some promises with abort error 88 | // The abort "errors" are already sent to main "thead" by sendMessageToMain inside the promise 89 | sendMessageToMain(WORKER_PREFIX, 'info', `Aborting / closing streams while exiting. Err: ${err.message}`) 90 | } 91 | return 92 | } 93 | 94 | if (type === 'muxersendini') { 95 | if (workerState !== StateEnum.Instantiated) { 96 | sendMessageToMain(WORKER_PREFIX, 'error', 'received ini message in wrong state. State: ' + workerState) 97 | return 98 | } 99 | 100 | let urlHostPortEp = '' 101 | 102 | if ('urlHostPort' in e.data.muxerSenderConfig) { 103 | urlHostPortEp = e.data.muxerSenderConfig.urlHostPort 104 | } 105 | if ('isSendingStats' in e.data.muxerSenderConfig) { 106 | isSendingStats = e.data.muxerSenderConfig.isSendingStats 107 | } 108 | if ('moqTracks' in e.data.muxerSenderConfig) { 109 | tracks = e.data.muxerSenderConfig.moqTracks 110 | } 111 | if ('keepAlivesEveryMs' in e.data.muxerSenderConfig) { 112 | keepAlivesEveryMs = e.data.muxerSenderConfig.keepAlivesEveryMs 113 | } 114 | if ('certificateHash' in e.data.muxerSenderConfig) { 115 | certificateHash = e.data.muxerSenderConfig.certificateHash 116 | } 117 | 118 | if (urlHostPortEp === '') { 119 | sendMessageToMain(WORKER_PREFIX, 'error', 'Empty host port') 120 | return 121 | } 122 | 123 | const errTrackStr = checkTrackData(); 124 | if (errTrackStr != undefined) { 125 | sendMessageToMain(WORKER_PREFIX, 'error', errTrackStr) 126 | return 127 | } 128 | 129 | try { 130 | // Reset state 131 | moqResetState() 132 | await moqClose(moqt) 133 | 134 | // WT needs https to establish connection 135 | const url = new URL(urlHostPortEp) 136 | // Replace protocol 137 | url.protocol = 'https' 138 | 139 | // Ini WT 140 | let options = {} 141 | if (certificateHash != undefined && certificateHash != null) { 142 | options = { serverCertificateHashes: [{ algorithm: 'sha-256', value: certificateHash}]} 143 | } 144 | sendMessageToMain(WORKER_PREFIX, 'info', `WT initiating with options ${JSON.stringify(options)}`) 145 | 146 | moqt.wt = new WebTransport(url.href, options) 147 | moqt.wt.closed 148 | .then(() => { 149 | sendMessageToMain(WORKER_PREFIX, 'info', 'WT closed transport session') 150 | }) 151 | .catch(err => { 152 | if (MOQT_DEV_MODE) {throw err} 153 | sendMessageToMain(WORKER_PREFIX, 'error', `WT error, closed transport. Err: ${err}`) 154 | }) 155 | 156 | await moqt.wt.ready 157 | await moqCreateControlStream(moqt) 158 | 159 | // Reset request IDs 160 | requestIDsReset() 161 | 162 | await moqCreatePublisherSession(moqt) 163 | 164 | sendMessageToMain(WORKER_PREFIX, 'info', 'MOQ Initialized, waiting for subscriptions') 165 | workerState = StateEnum.Running 166 | 167 | startLoopSubscriptionsLoop(moqt.controlReader, moqt.controlWriter) 168 | .then(() => { 169 | sendMessageToMain(WORKER_PREFIX, 'info', 'Exited receiving subscription loop in control stream') 170 | }) 171 | .catch(err => { 172 | if (MOQT_DEV_MODE) {throw err} 173 | if (workerState !== StateEnum.Stopped) { 174 | sendMessageToMain(WORKER_PREFIX, 'error', `Error in the subscription loop in control stream. Err: ${JSON.stringify(err)}`) 175 | } else { 176 | sendMessageToMain(WORKER_PREFIX, 'info', `Exited receiving subscription loop in control stream. Err: ${JSON.stringify(err)}`) 177 | } 178 | }) 179 | 180 | if (keepAlivesEveryMs > 0) { 181 | keepAliveNameSpace = Math.floor(Math.random() * 10000000) + "-keepAlive" 182 | sendMessageToMain(WORKER_PREFIX, 'info', `Starting keep alive every ${keepAlivesEveryMs}ms, ns: ${keepAliveNameSpace}`) 183 | keepAliveInterval = setInterval(sendKeepAlive, keepAlivesEveryMs, moqt.controlWriter); 184 | } 185 | } catch (err) { 186 | if (MOQT_DEV_MODE) {throw err} 187 | sendMessageToMain(WORKER_PREFIX, 'error', `Initializing MOQ. Err: ${JSON.stringify(err)}`) 188 | } 189 | 190 | return 191 | } 192 | 193 | if (workerState !== StateEnum.Running) { 194 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), seqId: e.data.seqId, ts: e.data.chunk.timestamp, msg: 'Dropped chunk because transport is NOT open yet' }) 195 | return 196 | } 197 | 198 | if (!(type in tracks)) { 199 | sendMessageToMain(WORKER_PREFIX, 'error', `Invalid message received ${type} is NOT in tracks ${JSON.stringify(tracks)}`) 200 | return 201 | } 202 | 203 | if (!('subscribers' in tracks[type]) || (tracks[type].subscribers.length <= 0)) { 204 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), seqId: e.data.seqId, ts: e.data.chunk.timestamp, msg: `Dropped chunk because there is NO subscribers for track ${type}` }) 205 | return 206 | } 207 | 208 | // Send one object per every subscriber 209 | // Relay needs to aggregate subscriptions to avoid overload pub -> relay link 210 | const firstFrameClkms = (e.data.firstFrameClkms === undefined || e.data.firstFrameClkms < 0) ? 0 : e.data.firstFrameClkms 211 | const compensatedTs = (e.data.compensatedTs === undefined || e.data.compensatedTs < 0) ? 0 : e.data.compensatedTs 212 | const estimatedDuration = (e.data.estimatedDuration === undefined || e.data.estimatedDuration < 0) ? e.data.chunk.duration : e.data.estimatedDuration 213 | const seqId = (e.data.seqId === undefined) ? 0 : e.data.seqId 214 | const chunkData = { mediaType: type, firstFrameClkms, compensatedTs, estimatedDuration, seqId, chunk: e.data.chunk, metadata: e.data.metadata, timebase: e.data.timebase, sampleFreq: e.data.sampleFreq, numChannels: e.data.numChannels, codec: e.data.codec, newSubgroupEvery: tracks[type].newSubgroupEvery} 215 | const moqMapping = (e.data.moqMapping === undefined) ? tracks[type].moqMapping : e.data.moqMapping 216 | 217 | let i = 0 218 | while (i < tracks[type].subscribers.length) { 219 | const trackAlias = tracks[type].subscribers[i].trackAlias 220 | 221 | sendChunkToTransport(chunkData, trackAlias, getNumInflightRequestByType(moqt, type), tracks[type].maxInFlightRequests, moqMapping) 222 | .then(val => { 223 | if (val !== undefined && val.dropped === true) { 224 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), seqId, mediaType: type, ts: chunkData.timestamp, msg: val.message }) 225 | } else { 226 | sendMessageToMain(WORKER_PREFIX, 'debug', `SENT CHUNK ${type} - seqId: ${seqId}, metadataSize: ${(chunkData.metadata != undefined) ? chunkData.metadata.byteLength : 0} for trackAlias: ${trackAlias}`) 227 | } 228 | }) 229 | .catch(err => { 230 | if (MOQT_DEV_MODE) {throw err} 231 | sendMessageToMain(WORKER_PREFIX, 'dropped', { clkms: Date.now(), seqId, mediaType: chunkData.mediaType, ts: chunkData.timestamp, msg: err.message }) 232 | sendMessageToMain(WORKER_PREFIX, 'error', `error sending chunk. For trackAlias: ${trackAlias}. Err: ${err.message}`) 233 | }) 234 | i++ 235 | } 236 | 237 | // Report stats 238 | if (isSendingStats) { 239 | self.postMessage({ type: 'sendstats', clkms: Date.now(), inFlightReq: getInflightRequestsReport(moqt) }) 240 | } 241 | }) 242 | 243 | async function sendKeepAlive(controlWriter) { 244 | if((Date.now() - lastObjectSentMs) > keepAlivesEveryMs) { 245 | await moqSendAnnounce(controlWriter, getNextClientReqId(), [keepAliveNameSpace], "") 246 | sendMessageToMain(WORKER_PREFIX, 'info', `Sent keep alive (announce) for ns: ${keepAliveNameSpace}`) 247 | } 248 | } 249 | 250 | async function startLoopSubscriptionsLoop(controlReader, controlWriter) { 251 | sendMessageToMain(WORKER_PREFIX, 'info', 'Started subscription loop') 252 | 253 | while (workerState === StateEnum.Running) { 254 | const moqMsg = await moqParseMsg(controlReader) 255 | sendMessageToMain(WORKER_PREFIX, 'debug', `Message received: ${JSON.stringify(moqMsg)}`) 256 | 257 | if (moqMsg.type === MOQ_MESSAGE_SUBSCRIBE) { 258 | const subscribe = moqMsg.data 259 | sendMessageToMain(WORKER_PREFIX, 'info', `Received SUBSCRIBE: ${JSON.stringify(subscribe)}`) 260 | const fullTrackName = getTrackFullName(subscribe.namespace, subscribe.trackName) 261 | const track = getTrackFromFullTrackName(fullTrackName) 262 | if (track == null) { 263 | sendMessageToMain(WORKER_PREFIX, 'error', `Invalid subscribe received ${fullTrackName} is NOT in tracks ${JSON.stringify(tracks)}`) 264 | continue 265 | } 266 | if (track.authInfo != undefined && track.authInfo != "") { 267 | const authInfo = getAuthInfofromParameters(subscribe.parameters) 268 | if (track.authInfo !== authInfo) { 269 | const errorCode = MOQ_SUBSCRIPTION_ERROR_INTERNAL 270 | const errReason = `Invalid subscribe authInfo ${authInfo}` 271 | sendMessageToMain(WORKER_PREFIX, 'error', `${errReason} does not match with ${JSON.stringify(tracks)}`) 272 | await moqSendSubscribeError(controlWriter, subscribe.requestId, errorCode, errReason) 273 | continue 274 | } 275 | } 276 | if (!('subscribers' in track)) { 277 | track.subscribers = [] 278 | } 279 | // Generate track alias 280 | subscribe.trackAlias = getNextTrackAlias() 281 | // Add subscription 282 | track.subscribers.push(subscribe) 283 | if (!('aggregatedNumSubscription' in track)) { 284 | track.aggregatedNumSubscription = 0 285 | } else { 286 | track.aggregatedNumSubscription++ 287 | } 288 | 289 | sendMessageToMain(WORKER_PREFIX, 'info', `New subscriber for track ${subscribe.trackAlias}(${subscribe.namespace}/${subscribe.trackName}). Current num subscriber: ${track.subscribers.length}. AuthInfo MATCHED!`) 290 | 291 | const lastSent = getLastSentFromTrackAlias(subscribe.trackAlias) 292 | await moqSendSubscribeOk(controlWriter, subscribe.requestId, subscribe.trackAlias, 0, lastSent.group, lastSent.obj, subscribe.parameters.authInfo) 293 | sendMessageToMain(WORKER_PREFIX, 'info', `Sent SUBSCRIBE_OK for requestId: ${subscribe.requestId}, last: ${lastSent.group}/${lastSent.obj}`) 294 | } 295 | else if (moqMsg.type === MOQ_MESSAGE_UNSUBSCRIBE) { 296 | const unsubscribe = moqMsg.data 297 | sendMessageToMain(WORKER_PREFIX, 'info', `Received UNSUBSCRIBE: ${JSON.stringify(unsubscribe)}`) 298 | const subscribe = removeSubscriberFromTrack(unsubscribe.requestId) 299 | if (subscribe != null) { 300 | sendMessageToMain(WORKER_PREFIX, 'info', `Removed subscriber for subscribeId: ${subscribe.requestId}`) 301 | } else { 302 | sendMessageToMain(WORKER_PREFIX, 'error', `Removing subscriber. Could not find subscribeId: ${subscribe.requestId}`) 303 | } 304 | } 305 | else if (moqMsg.type === MOQ_MESSAGE_ANNOUNCE_OK) { 306 | // This could be the keep alive answer 307 | } 308 | else { 309 | sendMessageToMain(WORKER_PREFIX, 'warning', `Unexpected message (type ${moqMsg.type} received, ignoring`) 310 | } 311 | } 312 | } 313 | 314 | async function sendChunkToTransport (chunkData, trackAlias, numInflightReqType, maxFlightRequests, moqMapping) { 315 | if (chunkData == null) { 316 | return { dropped: true, message: 'chunkData is null' } 317 | } 318 | if (numInflightReqType >= maxFlightRequests) { 319 | return { dropped: true, message: 'too many inflight requests' } 320 | } 321 | return createRequest(chunkData, trackAlias, moqMapping) 322 | } 323 | 324 | async function createRequest (chunkData, trackAlias, moqMapping) { 325 | let packet = null 326 | let isHiPri = false 327 | 328 | // Media MI packager 329 | packet = new MIPackager() 330 | if (chunkData.mediaType === "video") { 331 | const chunkDataBuffer = new Uint8Array(chunkData.chunk.byteLength) 332 | chunkData.chunk.copyTo(chunkDataBuffer) 333 | 334 | // Assuming NO B-Frames (pts === dts) 335 | const avcDecoderConfigurationRecord = ("metadata" in chunkData && chunkData.metadata != undefined && chunkData.metadata != null) ? chunkData.metadata : undefined; 336 | 337 | packet.SetData(MIPayloadTypeEnum.VideoH264AVCCWCP, chunkData.seqId, chunkData.compensatedTs, chunkData.timebase, chunkData.estimatedDuration, chunkData.firstFrameClkms, chunkDataBuffer, chunkData.compensatedTs, avcDecoderConfigurationRecord, undefined, undefined, chunkData.chunk.type === "delta") 338 | } else if (chunkData.mediaType == "audio") { 339 | const chunkDataBuffer = new Uint8Array(chunkData.chunk.byteLength) 340 | chunkData.chunk.copyTo(chunkDataBuffer) 341 | let payloadType = MIPayloadTypeEnum.None; 342 | if (chunkData.codec == "opus") { 343 | payloadType = MIPayloadTypeEnum.AudioOpusWCP; 344 | } else { 345 | payloadType = MIPayloadTypeEnum.AudioAACMP4LCWCP; 346 | } 347 | packet.SetData(payloadType, chunkData.seqId, chunkData.compensatedTs, chunkData.timebase, chunkData.estimatedDuration, chunkData.firstFrameClkms, chunkDataBuffer, undefined, undefined, chunkData.sampleFreq, chunkData.numChannels, chunkData.chunk.type === "delta") 348 | isHiPri = true 349 | } else if (chunkData.mediaType === 'data') { 350 | let isDelta = false; 351 | if (chunkData.newSubgroupEvery > 1) { 352 | isDelta = (chunkData.seqId % chunkData.newSubgroupEvery == 0) ? false : true; 353 | } 354 | packet.SetData(MIPayloadTypeEnum.RAWData, chunkData.seqId, undefined, undefined, undefined, undefined, chunkData.chunk, undefined, undefined, undefined, undefined, isDelta); 355 | } else { 356 | throw new Error(`Not supported media type ${chunkData.mediaType}`) 357 | } 358 | return createSendPromise(packet, trackAlias, moqMapping, isHiPri) 359 | } 360 | 361 | async function createSendPromise (packet, trackAlias, moqMapping, isHiPri) { 362 | let isFirstObject = false 363 | if (moqt.wt === null) { 364 | return { dropped: true, message: `Dropped Object for trackAlias: ${trackAlias}, because transport is NOT open. SeqId: ${packet.GetData().seqId}` } 365 | } 366 | 367 | // Check about if it is 1st object to be send in this track 368 | if (!(trackAlias in moqPublisherState)) { 369 | if (packet.IsDelta()) { 370 | const msg = `Dropped Object for trackAlias: ${trackAlias}, because first object can not be delta, data: ${packet.GetDataStr()}` 371 | sendMessageToMain(WORKER_PREFIX, 'debug', msg); 372 | return { dropped: true, message: msg } 373 | } 374 | moqPublisherState[trackAlias] = createTrackState() 375 | sendMessageToMain(WORKER_PREFIX, 'debug', `Created first object for trackAlias: ${trackAlias}`); 376 | isFirstObject = true 377 | } 378 | 379 | // Gets the stream priority 380 | const sendOrder = moqCalculateSendOrder(packet, isHiPri) 381 | 382 | // Group sequence, Using it as a joining point 383 | const prevGroupSeq = moqPublisherState[trackAlias].currentGroupSeq; 384 | const prevObjSeq = moqPublisherState[trackAlias].currentObjectSeq; 385 | if (!packet.IsDelta()) { 386 | if (!isFirstObject) { 387 | moqPublisherState[trackAlias].currentGroupSeq++ 388 | } 389 | sendMessageToMain(WORKER_PREFIX, 'debug', `Created new group for trackAlias: ${trackAlias}`); 390 | moqPublisherState[trackAlias].currentObjectSeq = 0 391 | } 392 | 393 | const groupSeq = moqPublisherState[trackAlias].currentGroupSeq 394 | const objSeq = moqPublisherState[trackAlias].currentObjectSeq 395 | const publisherPriority = (isHiPri) ? (MOQ_PUBLISHER_PRIORITY_BASE_DEFAULT - 1) : MOQ_PUBLISHER_PRIORITY_BASE_DEFAULT; 396 | 397 | const mediaType = packet.getMediaType(); 398 | 399 | if (moqMapping === MOQ_MAPPING_OBJECT_PER_DATAGRAM) { 400 | // Get datagram writer 401 | const datagramWriter = moqt.wt.datagrams.writable.getWriter(); 402 | 403 | sendMessageToMain(WORKER_PREFIX, 'debug', `Sending Object per datagram. trackAlias: ${trackAlias} ${groupSeq}/${objSeq}(${sendOrder}). Data: ${packet.GetDataStr()}, Ext Headers: ${JSON.stringify(packet.ExtensionHeaders())}`) 404 | 405 | moqSendObjectPerDatagramToWriter(datagramWriter, trackAlias, groupSeq, objSeq, publisherPriority, packet.PayloadToBytes(), packet.ExtensionHeaders(), true) 406 | 407 | datagramWriter.releaseLock() 408 | 409 | moqPublisherState[trackAlias].currentObjectSeq++; 410 | 411 | } else if (moqMapping === MOQ_MAPPING_SUBGROUP_PER_GROUP) { 412 | const currentStreamWriterId = createMultiObjectHash(mediaType, trackAlias, groupSeq); 413 | let currentUniStreamWritter = moqt.multiObjectWritter[currentStreamWriterId] 414 | // New group 415 | if (currentUniStreamWritter == undefined) { 416 | // Create new stream 417 | const uniStream = await moqt.wt.createUnidirectionalStream({ options: { sendOrder } }) 418 | currentUniStreamWritter = uniStream.getWriter() 419 | moqt.multiObjectWritter[currentStreamWriterId] = currentUniStreamWritter 420 | 421 | sendMessageToMain(WORKER_PREFIX, 'debug', `Created new subgroup (stream) ${currentStreamWriterId} with sendOrder: ${sendOrder}`); 422 | 423 | moqSendSubgroupHeader(currentUniStreamWritter, trackAlias, groupSeq, publisherPriority); 424 | } 425 | 426 | // Check and clean old streams 427 | if (prevGroupSeq != groupSeq) { 428 | const prevStreamWriterId = createMultiObjectHash(mediaType, trackAlias, prevGroupSeq); 429 | let prevUniStreamWritter = moqt.multiObjectWritter[prevStreamWriterId]; 430 | if (prevUniStreamWritter != undefined) { 431 | // Indicate end of group 432 | moqSendObjectEndOfGroupToWriter(prevUniStreamWritter, prevObjSeq + 1, [], true); 433 | sendMessageToMain(WORKER_PREFIX, 'debug', `Send group close for ${prevStreamWriterId} and ${prevGroupSeq}`); 434 | if (moqt.multiObjectWritter[prevStreamWriterId] != undefined) { 435 | delete moqt.multiObjectWritter[prevStreamWriterId] 436 | } 437 | const msg = `Closing stream ${prevStreamWriterId}` 438 | sendMessageToMain(WORKER_PREFIX, 'debug', msg); 439 | } 440 | } 441 | 442 | // Send object to current stream 443 | moqSendObjectSubgroupToWriter(currentUniStreamWritter, objSeq, packet.PayloadToBytes(), packet.ExtensionHeaders()) 444 | moqPublisherState[trackAlias].currentObjectSeq++; 445 | } 446 | else { 447 | throw new Error(`Unexpected MOQ - QUIC mapping, received ${moqMapping}`) 448 | } 449 | 450 | lastObjectSentMs = Date.now(); 451 | } 452 | 453 | function createMultiObjectHash(mediaType, trackAlias, groupId) { 454 | return `${mediaType}-${trackAlias}-${groupId}`; 455 | } 456 | 457 | // MOQT 458 | 459 | async function moqCreatePublisherSession (moqt) { 460 | // SETUP 461 | await moqSendClientSetup(moqt.controlWriter) 462 | 463 | const moqMsg = await moqParseMsg(moqt.controlReader) 464 | if (moqMsg.type !== MOQ_MESSAGE_SERVER_SETUP) { 465 | throw new Error(`Expected MOQ_MESSAGE_SERVER_SETUP, received ${moqMsg.type}`) 466 | } 467 | 468 | // ANNOUNCE 469 | const announcedNamespaces = [] 470 | for (const [trackType, trackData] of Object.entries(tracks)) { 471 | if (!announcedNamespaces.includes(trackData.namespace)) { 472 | const announceReqId = getNextClientReqId() 473 | await moqSendAnnounce(moqt.controlWriter, announceReqId, trackData.namespace, trackData.authInfo) 474 | const moqMsg = await moqParseMsg(moqt.controlReader) 475 | if (moqMsg.type !== MOQ_MESSAGE_ANNOUNCE_OK && moqMsg.type !== MOQ_MESSAGE_ANNOUNCE_ERROR) { 476 | throw new Error(`Expected MOQ_MESSAGE_ANNOUNCE_OK or MOQ_MESSAGE_ANNOUNCE_ERROR, received ${moqMsg.type}`) 477 | } 478 | if (moqMsg.type === MOQ_MESSAGE_ANNOUNCE_ERROR) { 479 | throw new Error(`Received ANNOUNCE_ERROR response for ${trackData.namespace}/${trackData.name}-(type: ${trackType}): ${JSON.stringify(moqMsg.data)}`) 480 | } 481 | const announceResp = moqMsg.data 482 | sendMessageToMain(WORKER_PREFIX, 'info', `Received ANNOUNCE_OK response for ${trackData.id}-${trackType}-${trackData.namespace}: ${JSON.stringify(announceResp)}`) 483 | if (announceReqId != announceResp.reqId) { 484 | throw new Error(`Received RequestID ${announceResp.reqId} does NOT match with the one sent in ANNOUNCE ${announceReqId}`) 485 | } 486 | announcedNamespaces.push(trackData.namespace) 487 | } 488 | } 489 | 490 | lastObjectSentMs = Date.now() 491 | } 492 | 493 | function checkTrackData () { 494 | if (Object.entries(tracks).length <= 0) { 495 | return 'Number of Track Ids to announce needs to be > 0' 496 | } 497 | for (const [, track] of Object.entries(tracks)) { 498 | if (!('namespace' in track) || (track.namespace.length <= 0) || !('name' in track) || !('authInfo' in track)) { 499 | return 'Track malformed, needs to contain namespace, name, and authInfo' 500 | } 501 | } 502 | return undefined; 503 | } 504 | 505 | function moqResetState () { 506 | moqPublisherState = {} 507 | } 508 | 509 | function moqCalculateSendOrder (packet, isHiPri) { 510 | // Prioritize: 511 | // Audio over video 512 | // New over old 513 | 514 | let ret = packet.GetData().seqId 515 | if (ret < 0) { 516 | // Send now 517 | ret = Number.MAX_SAFE_INTEGER 518 | } else { 519 | if (isHiPri) { 520 | ret += Math.floor(ret + Number.MAX_SAFE_INTEGER / 2) 521 | } 522 | } 523 | return ret 524 | } 525 | 526 | function createTrackState () { 527 | return { 528 | currentGroupSeq: 0, 529 | currentObjectSeq: 0, 530 | } 531 | } 532 | 533 | function getTrackFromFullTrackName (fullTrackName) { 534 | for (const [, trackData] of Object.entries(tracks)) { 535 | if (getTrackFullName(trackData.namespace, trackData.name) === fullTrackName) { 536 | return trackData 537 | } 538 | } 539 | return null 540 | } 541 | 542 | function removeSubscriberFromTrack (requestId) { 543 | for (const trackData of Object.values(tracks)) { 544 | if ("subscribers" in trackData && trackData.subscribers.length > 0) { 545 | let i = 0 546 | if ('subscribers' in trackData) { 547 | while (i < trackData.subscribers.length) { 548 | if (trackData.subscribers[i].requestId === requestId) { 549 | const ret = trackData.subscribers[i] 550 | trackData.subscribers.splice(i, 1) 551 | return ret 552 | } 553 | i++ 554 | } 555 | } 556 | } 557 | } 558 | return null 559 | } 560 | 561 | function getListOfRequestIdPerTrack(trackData) { 562 | const ret = [] 563 | if ("subscribers" in trackData && trackData.subscribers.length > 0) { 564 | let i = 0 565 | if ('subscribers' in trackData) { 566 | while (i < trackData.subscribers.length) { 567 | ret.push(trackData.subscribers[i].requestId) 568 | i++ 569 | } 570 | } 571 | } 572 | return ret 573 | } 574 | 575 | function getAggretatedSubscriptions() { 576 | let ret = 0 577 | for (const trackData of Object.values(tracks)) { 578 | if ('aggregatedNumSubscription' in trackData && trackData.aggregatedNumSubscription > 0) { 579 | ret = ret + trackData.aggregatedNumSubscription 580 | } 581 | } 582 | return ret 583 | } 584 | 585 | function getNumInflightRequestByType(moqt, trackType) { 586 | let ret = 0 587 | for (const key of Object.keys(moqt.multiObjectWritter)) { 588 | if (key.startsWith(trackType)) { 589 | ret++ 590 | } 591 | } 592 | return ret 593 | } 594 | 595 | function getInflightRequestsReport (moqt) { 596 | const ret = {} 597 | for (const [trackType] of Object.entries(tracks)) { 598 | ret[trackType] = getNumInflightRequestByType(moqt, trackType) 599 | } 600 | return ret 601 | } 602 | 603 | // This is an aproximation 604 | function getLastSentFromTrackAlias(trackAlias) { 605 | const ret = {group: undefined, obj: undefined} 606 | if (trackAlias in moqPublisherState) { 607 | ret.group = moqPublisherState[trackAlias].currentGroupSeq 608 | ret.obj = moqPublisherState[trackAlias].currentObjectSeq 609 | } 610 | return ret 611 | } 612 | 613 | async function sendSubscribeDone(moqt) { 614 | const errorCode = MOQ_SUBSCRIPTION_DONE_ENDED 615 | const errReason = "Subscription Ended, the stream has finsed" 616 | const numberOfOpenedStreams = getAggretatedSubscriptions() 617 | 618 | for (const trackData of Object.values(tracks)) { 619 | const subscribeIDs = getListOfRequestIdPerTrack(trackData) 620 | for (const subscribeId of subscribeIDs) { 621 | try { 622 | await moqSendSubscribeDone(moqt.controlWriter, currentClientRequestId, errorCode, errReason, numberOfOpenedStreams) 623 | sendMessageToMain(WORKER_PREFIX, 'info', `Sent SUBSCRIBE_DONE for subscribeId: ${subscribeId}, err: ${errorCode}(${errReason}), numberOfOpenedStreams: ${numberOfOpenedStreams}`) 624 | } 625 | catch (err) { 626 | if (MOQT_DEV_MODE) { throw err } 627 | sendMessageToMain(WORKER_PREFIX, 'error', `on SUBSCRIBE_DONE. Err: ${err}`) 628 | } 629 | } 630 | } 631 | } 632 | 633 | async function unAnnounceTracks(moqt) { 634 | for (const trackData of Object.values(tracks)) { 635 | try { 636 | await moqSendUnAnnounce(moqt.controlWriter, [trackData.namespace]) 637 | sendMessageToMain(WORKER_PREFIX, 'info', `Sent UnAnnounce for ${trackData.namespace}`) 638 | } 639 | catch (err) { 640 | if (MOQT_DEV_MODE) {throw err} 641 | sendMessageToMain(WORKER_PREFIX, 'error', `on UnAnnounce. Err: ${err}`) 642 | } 643 | } 644 | } 645 | 646 | // Requests IDs 647 | function requestIDsReset() { 648 | currentClientRequestId = undefined 649 | } 650 | 651 | function getNextClientReqId() { 652 | if (typeof currentClientRequestId == 'undefined') { 653 | currentClientRequestId = 0 654 | } else { 655 | currentClientRequestId = currentClientRequestId + 2 656 | } 657 | return currentClientRequestId 658 | } 659 | 660 | function getNextTrackAlias() { 661 | const ret = currentTrackAlias 662 | currentTrackAlias++ 663 | return ret 664 | } -------------------------------------------------------------------------------- /utils/moqt.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) Meta Platforms, Inc. and affiliates. 3 | 4 | This source code is licensed under the MIT license found in the 5 | LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import { numberToVarInt, varIntToNumberOrThrow, varIntToNumberAndLengthOrThrow} from './varint.js' 9 | import { numberTo2BytesArray, numberToSingleByteArray } from './utils.js' 10 | import { concatBuffer, buffRead, ReadStreamClosed , getArrayBufferByteLength } from './buffer_utils.js' 11 | 12 | // MOQ definitions 13 | // https://datatracker.ietf.org/doc/draft-ietf-moq-transport/ 14 | export const MOQ_DRAFT01_VERSION = 0xff000001 15 | export const MOQ_DRAFT02_VERSION = 0xff000002 16 | export const MOQ_DRAFT03_VERSION = 0xff000003 17 | export const MOQ_DRAFT04_VERSION = 0xff000004 18 | export const MOQ_DRAFT07exp2_VERSION = 0xff070002 19 | export const MOQ_DRAFT07_VERSION = 0xff000007 20 | export const MOQ_DRAFT08_VERSION_EXP9 = 0xff080009 21 | export const MOQ_DRAFT08_VERSION = 0xff000008 22 | export const MOQ_DRAFT12_VERSION = 0xff00000C 23 | 24 | export const MOQ_CURRENT_VERSION = MOQ_DRAFT12_VERSION 25 | export const MOQ_SUPPORTED_VERSIONS = [MOQ_CURRENT_VERSION] 26 | 27 | export const MOQ_USE_LITTLE_ENDIAN = false // MoQ is big endian 28 | 29 | // Setup params 30 | // export const MOQ_SETUP_PARAMETER_ROLE = 0x0 removed in version 8 31 | export const MOQ_SETUP_PARAMETER_PATH = 0x1 32 | export const MOQ_SETUP_PARAMETER_MAX_REQUEST_ID = 0x2 33 | export const MOQ_SETUP_MAX_AUTH_TOKEN_CACHE_SIZE = 0x4 34 | 35 | //MOQ general params 36 | export const MOQ_PARAMETER_DELIVERY_TIMEOUT = 0x2 37 | export const MOQ_PARAMETER_AUTHORIZATION_TOKEN = 0x3 38 | export const MOQ_PARAMETER_MAX_CACHE_DURATION = 0x4 39 | 40 | export const MOQ_MAX_PARAMS = 256 41 | export const MOQ_MAX_ARRAY_LENGTH = 1024 42 | export const MOQ_MAX_TUPLE_PARAMS = 32 43 | export const MOQ_MAX_REQUEST_ID_NUM = 128 44 | 45 | // MOQ Location modes 46 | export const MOQ_LOCATION_MODE_NONE = 0x0 47 | export const MOQ_LOCATION_MODE_ABSOLUTE = 0x1 48 | export const MOQ_LOCATION_MODE_RELATIVE_PREVIOUS = 0x2 49 | export const MOQ_LOCATION_MODE_RELATIVE_NEXT = 0x3 50 | 51 | // MOQ SUBSCRIPTION CODES 52 | export const MOQ_SUBSCRIPTION_ERROR_INTERNAL = 0 53 | export const MOQ_SUBSCRIPTION_RETRY_TRACK_ALIAS = 0x2 54 | 55 | // MOQ SUBSCRIPTION DONE CODES 56 | export const MOQ_SUBSCRIPTION_DONE_ENDED = 0x4 57 | 58 | // MOQ FILTER TYPES 59 | export const MOQ_FILTER_TYPE_LATEST_GROUP = 0x1 60 | export const MOQ_FILTER_TYPE_LATEST_OBJ = 0x2 61 | export const MOQ_FILTER_TYPE_ABSOLUTE_START = 0x3 62 | export const MOQ_FILTER_TYPE_ABSOLUTE_RANGE = 0x4 63 | 64 | // MOQ object headers 65 | // Datagrams 66 | export const MOQ_MESSAGE_OBJECT_DATAGRAM_MIN= 0x0 67 | export const MOQ_MESSAGE_OBJECT_DATAGRAM_MAX = 0x4 68 | export const MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MIN= 0x20 69 | export const MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MAX= 0x21 70 | export const MOQ_MESSAGE_STREAM_HEADER_SUBGROUP_MIN = 0x10 71 | export const MOQ_MESSAGE_STREAM_HEADER_SUBGROUP_MAX = 0x1D 72 | 73 | // MOQ Messages 74 | export const MOQ_MESSAGE_CLIENT_SETUP = 0x20 75 | export const MOQ_MESSAGE_SERVER_SETUP = 0x21 76 | 77 | export const MOQ_MESSAGE_SUBSCRIBE = 0x3 78 | export const MOQ_MESSAGE_SUBSCRIBE_OK = 0x4 79 | export const MOQ_MESSAGE_SUBSCRIBE_ERROR = 0x5 80 | export const MOQ_MESSAGE_UNSUBSCRIBE = 0xa 81 | export const MOQ_MESSAGE_SUBSCRIBE_DONE = 0xb 82 | 83 | export const MOQ_MESSAGE_ANNOUNCE = 0x6 84 | export const MOQ_MESSAGE_ANNOUNCE_OK = 0x7 85 | export const MOQ_MESSAGE_ANNOUNCE_ERROR = 0x8 86 | export const MOQ_MESSAGE_UNANNOUNCE = 0x9 87 | 88 | // MOQ PRIORITIES 89 | export const MOQ_PUBLISHER_PRIORITY_BASE_DEFAULT = 0xa 90 | 91 | // MOQ - QUIC mapping 92 | export const MOQ_MAPPING_OBJECT_PER_DATAGRAM = "ObjPerDatagram" 93 | export const MOQ_MAPPING_SUBGROUP_PER_GROUP = "SubGroupPerObj" 94 | 95 | export const MOQ_USECASE_SUBSCRIBER_PRIORITY_DEFAULT = 0x1 // Lower values are hi-pri (highest = 0) 96 | 97 | // Group order 98 | export const MOQ_GROUP_ORDER_FOLLOW_PUBLISHER = 0x0 99 | export const MOQ_GROUP_ORDER_ASCENDING = 0x1 100 | export const MOQ_GROUP_ORDER_DESCENDING = 0x2 101 | 102 | // Forward 103 | export const MOQ_FORWARD_FALSE = 0 104 | export const MOQ_FORWARD_TRUE = 1 105 | 106 | // Object status 107 | export const MOQ_OBJ_STATUS_NORMAL = 0x0 108 | export const MOQ_OBJ_STATUS_NOT_EXISTS = 0x1 109 | export const MOQ_OBJ_STATUS_END_OF_GROUP = 0x3 110 | export const MOQ_OBJ_STATUS_END_OF_TRACK_AND_GROUP = 0x4 111 | export const MOQ_OBJ_STATUS_END_OF_SUBGROUP = 0x5 112 | 113 | // Extension headers (Even types indicate value coded by a single varint. Odd types idicates value is byte buffer with prefixed varint to indicate lenght) 114 | export const MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE = 0x0A 115 | export const MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA = 0x0B 116 | export const MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_EXTRADATA = 0x0D 117 | export const MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA = 0x0F 118 | export const MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA = 0x11 119 | export const MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA = 0x13 120 | 121 | //Audio AAC-LC in MPEG4 bitstream data header extension (Header extension type = 0x13) 122 | 123 | export const MOQ_EXT_HEADERS_SUPPORTED = [MOQ_EXT_HEADER_TYPE_MOQMI_MEDIA_TYPE, MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_VIDEO_H264_IN_AVCC_EXTRADATA, MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_OPUS_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_TEXT_UTF8_METADATA, MOQ_EXT_HEADER_TYPE_MOQMI_AUDIO_AACLC_MPEG4_METADATA] 124 | 125 | // Token Alias type 126 | export const MOQ_TOKEN_DELETE = 0x0 127 | export const MOQ_TOKEN_REGISTER = 0x1 128 | export const MOQ_TOKEN_USE_ALIAS = 0x2 129 | export const MOQ_TOKEN_USE_VALUE = 0x3 130 | 131 | // Token type 132 | export const MOQ_TOKEN_TYPE_NEGOTIATED_OUT_OF_BAND = 0x0 133 | 134 | 135 | 136 | export function moqCreate () { 137 | return { 138 | wt: null, 139 | 140 | controlStream: null, 141 | controlWriter: null, 142 | controlReader: null, 143 | 144 | multiObjectWritter: {}, 145 | 146 | datagramsReader: null, 147 | } 148 | } 149 | 150 | export async function moqCloseWrttingStreams (moqt) { 151 | const multiWritterClosePromises = [] 152 | for (const multiWritter of Object.values(moqt.multiObjectWritter)) { 153 | multiWritterClosePromises.push(multiWritter.close()) 154 | } 155 | if (multiWritterClosePromises.length > 0) { 156 | await Promise.all(multiWritterClosePromises) 157 | } 158 | moqt.multiObjectWritter = {} 159 | } 160 | 161 | export async function moqClose (moqt) { 162 | await moqCloseWrttingStreams(moqt) 163 | 164 | if (moqt.datagramsReader != null) { 165 | await moqt.datagramsReader.cancel("Closing!") 166 | } 167 | 168 | if (moqt.controlWriter != null) { 169 | await moqt.controlWriter.close() 170 | moqt.controlWriter = null 171 | } 172 | // TODO: We need to cancel the reader (https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamBYOBReader) 173 | if (moqt.controlReader != null) { 174 | await moqt.controlReader.cancel("Closing!") 175 | moqt.controlReader = null 176 | } 177 | if (moqt.wt != null) { 178 | // Race condition, relay closing too 179 | await moqt.wt.close() 180 | } 181 | moqt.wt = null 182 | moqt.controlStream = null 183 | moqt.controlReader = null 184 | moqt.datagramsReader = null 185 | } 186 | 187 | // MOQ control stream 188 | 189 | export async function moqCreateControlStream (moqt) { 190 | if (moqt.wt === null) { 191 | throw new Error('WT session is NULL when we tried to create MOQ') 192 | } 193 | if (moqt.controlReader != null || moqt.controlWriter != null) { 194 | throw new Error('controlReader OR controlWriter are NOT null this indicates there are some dirt from previous sessions when we tried to create MOQ') 195 | } 196 | 197 | moqt.controlStream = await moqt.wt.createBidirectionalStream() 198 | moqt.controlWriter = moqt.controlStream.writable 199 | moqt.controlReader = moqt.controlStream.readable 200 | } 201 | 202 | // SETUP CLIENT 203 | 204 | function moqCreateClientSetupMessageBytes () { 205 | const msg = [] 206 | 207 | // Number of supported versions 208 | msg.push(numberToVarInt(1)); 209 | // Version[0] 210 | msg.push(numberToVarInt(MOQ_CURRENT_VERSION)); 211 | const kv_params = [moqCreateKvPair(MOQ_SETUP_PARAMETER_MAX_REQUEST_ID, MOQ_MAX_REQUEST_ID_NUM)] 212 | msg.push(moqCreateParametersBytes(kv_params)) 213 | 214 | // Length 215 | const totalLength = getArrayBufferByteLength(msg); 216 | 217 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_CLIENT_SETUP), numberTo2BytesArray(totalLength, MOQ_USE_LITTLE_ENDIAN), ...msg]) 218 | } 219 | 220 | export async function moqSendClientSetup (writerStream) { 221 | return moqSendToStream(writerStream, moqCreateClientSetupMessageBytes()) 222 | } 223 | 224 | // SETUP SERVER 225 | 226 | async function moqParseSetupResponse (readerStream) { 227 | const ret = { } 228 | await moqIntReadBytesOrThrow(readerStream, 2) // Length 229 | 230 | ret.version = await varIntToNumberOrThrow(readerStream) 231 | if (!MOQ_SUPPORTED_VERSIONS.includes(ret.version)) { 232 | throw new Error(`version sent from server NOT supported. Supported versions ${JSON.stringify(MOQ_SUPPORTED_VERSIONS)}, got from server ${JSON.stringify(ret.version)}`) 233 | } 234 | ret.parameters = await moqReadParameters(readerStream) 235 | 236 | return ret 237 | } 238 | 239 | // ANNOUNCE 240 | 241 | export async function moqSendAnnounce(writerStream, reqId, namespace, authInfo) { 242 | return moqSendToStream(writerStream, moqCreateAnnounceMessageBytes(namespace, reqId, authInfo)) 243 | } 244 | 245 | function moqCreateAnnounceMessageBytes (namespace, reqId, authInfo) { 246 | const msg = [] 247 | 248 | // RequestID 249 | msg.push(numberToVarInt(reqId)) 250 | // Namespace 251 | msg.push(moqCreateTupleBytes(namespace)); 252 | let kv_params = [] 253 | if (authInfo != undefined && authInfo != "") { 254 | kv_params = [moqCreateKvPair(MOQ_PARAMETER_AUTHORIZATION_TOKEN, moqCreateUseValueTokenFromString(authInfo))] 255 | } 256 | msg.push(moqCreateParametersBytes(kv_params)) 257 | 258 | // Length 259 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 260 | 261 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_ANNOUNCE), lengthBytes, ...msg]) 262 | } 263 | 264 | // ANNOUNCE OK 265 | 266 | async function moqParseAnnounceOk (readerStream) { 267 | const ret = { } 268 | 269 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 270 | 271 | ret.reqId = await varIntToNumberOrThrow(readerStream) 272 | 273 | return ret 274 | } 275 | 276 | // ANNOUNCE ERROR 277 | 278 | async function moqParseAnnounceError (readerStream) { 279 | const ret = { } 280 | 281 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 282 | 283 | ret.reqId = await varIntToNumberOrThrow(readerStream) 284 | ret.errorCode = await varIntToNumberOrThrow(readerStream) 285 | ret.reason = await moqStringReadOrThrow(readerStream) 286 | 287 | return ret 288 | } 289 | 290 | // UNANNOUNCE 291 | 292 | export async function moqSendUnAnnounce (writerStream, namespace) { 293 | return moqSendToStream(writerStream, moqCreateUnAnnounceMessageBytes(namespace)) 294 | } 295 | 296 | function moqCreateUnAnnounceMessageBytes (namespace) { 297 | const msg = [] 298 | 299 | // Namespace 300 | msg.push(moqCreateTupleBytes(namespace)); 301 | 302 | // Length 303 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 304 | 305 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_UNANNOUNCE), lengthBytes, ...msg]) 306 | } 307 | 308 | 309 | // SUBSCRIBE 310 | // Always subscribe from start next group 311 | 312 | export async function moqSendSubscribe (writerStream, requestId, trackNamespace, trackName, authInfo) { 313 | return moqSendToStream(writerStream, moqCreateSubscribeMessageBytes(requestId, trackNamespace, trackName, authInfo)) 314 | } 315 | 316 | function moqCreateSubscribeMessageBytes(requestId, trackNamespace, trackName, authInfo) { 317 | const msg = [] 318 | 319 | // reuqestID 320 | msg.push(numberToVarInt(requestId)); 321 | 322 | // Track namespace 323 | msg.push(moqCreateTupleBytes(trackNamespace)); 324 | 325 | // Track name 326 | msg.push(moqCreateStringBytes(trackName)); 327 | 328 | // Subscriber priority 329 | msg.push(numberToSingleByteArray(MOQ_USECASE_SUBSCRIBER_PRIORITY_DEFAULT)); 330 | 331 | // Group order 332 | msg.push(numberToSingleByteArray(MOQ_GROUP_ORDER_FOLLOW_PUBLISHER)); 333 | 334 | // Forward 335 | msg.push(numberToSingleByteArray(MOQ_FORWARD_TRUE)); 336 | 337 | // Filter type (request latest object) 338 | msg.push(numberToVarInt(MOQ_FILTER_TYPE_LATEST_OBJ)); 339 | 340 | // NO need to add StartGroup, StartObject, EndGroup 341 | 342 | // Params 343 | let kv_params = [] 344 | if (authInfo != undefined && authInfo != "") { 345 | kv_params = [moqCreateKvPair(MOQ_PARAMETER_AUTHORIZATION_TOKEN, moqCreateUseValueTokenFromString(authInfo))] 346 | } 347 | msg.push(moqCreateParametersBytes(kv_params)) 348 | 349 | // Length 350 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 351 | 352 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_SUBSCRIBE), lengthBytes, ...msg]) 353 | } 354 | 355 | async function moqParseSubscribe (readerStream) { 356 | const ret = { } 357 | 358 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 359 | ret.requestId = await varIntToNumberOrThrow(readerStream) 360 | ret.namespace = await moqTupleReadOrThrow(readerStream) 361 | ret.trackName = await moqStringReadOrThrow(readerStream) 362 | ret.subscriberPriority = await moqIntReadBytesOrThrow(readerStream, 1); 363 | ret.groupOrder = await moqIntReadBytesOrThrow(readerStream, 1); 364 | ret.forward = await moqIntReadBytesOrThrow(readerStream, 1); 365 | 366 | ret.filterType = await varIntToNumberOrThrow(readerStream) 367 | if (ret.filterType === MOQ_FILTER_TYPE_ABSOLUTE_START || ret.filterType === MOQ_FILTER_TYPE_ABSOLUTE_RANGE) { 368 | ret.startGroup = await varIntToNumberOrThrow(readerStream) 369 | if (ret.startGroup !== MOQ_LOCATION_MODE_NONE) { 370 | await varIntToNumberOrThrow(readerStream) 371 | throw new Error('Not supported startGroup') 372 | } 373 | // Start object 374 | ret.startObject = await varIntToNumberOrThrow(readerStream) 375 | if (ret.startObject !== MOQ_LOCATION_MODE_NONE) { 376 | await varIntToNumberOrThrow(readerStream) 377 | throw new Error('Not supported startObject') 378 | } 379 | } 380 | if (ret.filterType === MOQ_FILTER_TYPE_ABSOLUTE_RANGE) { 381 | ret.endGroup = await varIntToNumberOrThrow(readerStream) 382 | if (ret.endGroup !== MOQ_LOCATION_MODE_NONE) { 383 | await varIntToNumberOrThrow(readerStream) 384 | throw new Error('Not supported endGroup') 385 | } 386 | ret.endObject = await varIntToNumberOrThrow(readerStream) 387 | if (ret.endObject !== MOQ_LOCATION_MODE_NONE) { 388 | await varIntToNumberOrThrow(readerStream) 389 | throw new Error('Not supported endObject') 390 | } 391 | } 392 | ret.parameters = await moqReadParameters(readerStream) 393 | 394 | return ret 395 | } 396 | 397 | // SUBSCRIBE OK 398 | 399 | export async function moqSendSubscribeOk (writerStream, requestId, trackAlias, expiresMs, lastGroupSent, lastObjSent, authInfo) { 400 | return moqSendToStream(writerStream, moqCreateSubscribeOkMessageBytes(requestId, trackAlias, expiresMs, lastGroupSent, lastObjSent, authInfo)) 401 | } 402 | 403 | function moqCreateSubscribeOkMessageBytes (requestId, trackAlias, expiresMs, lastGroupSent, lastObjSent, authInfo) { 404 | const msg = [] 405 | 406 | // RequestID 407 | msg.push(numberToVarInt(requestId)) 408 | 409 | // Trackalias 410 | msg.push(numberToVarInt(trackAlias)) 411 | 412 | // Expires MS 413 | msg.push(numberToVarInt(expiresMs)) 414 | 415 | // Group order 416 | msg.push(numberToSingleByteArray(MOQ_GROUP_ORDER_DESCENDING)); // Live streaming app (so new needs to be send first) 417 | 418 | if (lastGroupSent != undefined && lastObjSent != undefined) { 419 | // Content exists 420 | msg.push(numberToSingleByteArray(1)); 421 | // Final group 422 | msg.push(numberToVarInt(lastGroupSent)); 423 | // Final object 424 | msg.push(numberToVarInt(lastObjSent)); 425 | } else { 426 | // Content exists 427 | msg.push(numberToSingleByteArray(0)); 428 | } 429 | 430 | // Params 431 | let kv_params = [] 432 | if (authInfo != undefined && authInfo != "") { 433 | kv_params = [moqCreateKvPair(MOQ_PARAMETER_AUTHORIZATION_TOKEN, moqCreateUseValueTokenFromString(authInfo))] 434 | } 435 | msg.push(moqCreateParametersBytes(kv_params)) 436 | 437 | // Length 438 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 439 | 440 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_SUBSCRIBE_OK), lengthBytes, ...msg]) 441 | } 442 | 443 | async function moqParseSubscribeOk (readerStream) { 444 | const ret = { } 445 | 446 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 447 | ret.requestId = await varIntToNumberOrThrow(readerStream) 448 | ret.trackAlias = await varIntToNumberOrThrow(readerStream) 449 | ret.expires = await varIntToNumberOrThrow(readerStream) 450 | ret.groupOrder = await moqIntReadBytesOrThrow(readerStream, 1); 451 | const contentExists = await moqIntReadBytesOrThrow(readerStream, 1); 452 | if (contentExists > 0) { 453 | ret.lastGroupSent = await varIntToNumberOrThrow(readerStream) 454 | ret.lastObjSent = await varIntToNumberOrThrow(readerStream) 455 | } 456 | ret.parameters = await moqReadParameters(readerStream) 457 | 458 | return ret 459 | } 460 | 461 | // SUBSCRIBE ERROR 462 | 463 | export async function moqSendSubscribeError (writerStream, requestId, errorCode, reason) { 464 | return moqSendToStream(writerStream, moqCreateSubscribeErrorMessageBytes(requestId, errorCode, reason)) 465 | } 466 | 467 | function moqCreateSubscribeErrorMessageBytes (requestId, errorCode, reason) { 468 | const msg = [] 469 | 470 | // Request Id 471 | msg.push(numberToVarInt(requestId)); 472 | // errorCode 473 | msg.push(numberToVarInt(errorCode)); 474 | // Reason 475 | msg.push(moqCreateStringBytes(reason)) 476 | 477 | // Length 478 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 479 | 480 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_SUBSCRIBE_ERROR), lengthBytes, ...msg]) 481 | } 482 | 483 | async function moqParseSubscribeError (readerStream) { 484 | const ret = { } 485 | 486 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 487 | ret.requestId = await varIntToNumberOrThrow(readerStream) 488 | ret.errorCode = await varIntToNumberOrThrow(readerStream) 489 | ret.errorReason = await moqStringReadOrThrow(readerStream) 490 | 491 | return ret 492 | } 493 | 494 | // UNSUBSCRIBE 495 | 496 | export async function moqSendUnSubscribe (writerStream, subscribeId) { 497 | return moqSendToStream(writerStream, moqCreateUnSubscribeMessageBytes(subscribeId)) 498 | } 499 | 500 | function moqCreateUnSubscribeMessageBytes (requestId) { 501 | const msg = [] 502 | 503 | // Subscribe Id 504 | msg.push(numberToVarInt(requestId)); 505 | 506 | // Length 507 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 508 | 509 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_UNSUBSCRIBE), lengthBytes, ...msg]) 510 | } 511 | 512 | async function moqParseUnSubscribe (readerStream) { 513 | const ret = { } 514 | 515 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 516 | 517 | // requestId 518 | ret.requestId = await varIntToNumberOrThrow(readerStream) 519 | 520 | return ret 521 | } 522 | // SUBSCRIBE DONE 523 | 524 | export async function moqSendSubscribeDone(writerStream, requestId, errorCode, reason, numberOfOpenedStreams) { 525 | return moqSendToStream(writerStream, moqCreateSubscribeDoneMessageBytes(requestId, errorCode, reason, numberOfOpenedStreams)) 526 | } 527 | 528 | function moqCreateSubscribeDoneMessageBytes(requestId, statusCode, reason, streamCount) { 529 | const msg = [] 530 | 531 | // Request Id 532 | msg.push(numberToVarInt(requestId)); 533 | // statusCode 534 | msg.push(numberToVarInt(statusCode)); 535 | // streamCount 536 | msg.push(numberToVarInt(streamCount)); 537 | // Reason 538 | msg.push(moqCreateStringBytes(reason)); 539 | 540 | // Length 541 | const lengthBytes = numberTo2BytesArray(getArrayBufferByteLength(msg), MOQ_USE_LITTLE_ENDIAN) 542 | 543 | return concatBuffer([numberToVarInt(MOQ_MESSAGE_SUBSCRIBE_DONE), lengthBytes, ...msg]) 544 | } 545 | 546 | async function moqParseSubscribeDone (readerStream) { 547 | const ret = { } 548 | 549 | await moqIntReadBytesOrThrow(readerStream, 2); // Length 550 | ret.requestId = await varIntToNumberOrThrow(readerStream) 551 | ret.statusCode = await varIntToNumberOrThrow(readerStream) 552 | ret.streamCount = await varIntToNumberOrThrow(readerStream) 553 | ret.errorReason = await moqStringReadOrThrow(readerStream) 554 | 555 | return ret 556 | } 557 | 558 | 559 | // PARSE MESSAGES 560 | 561 | export async function moqParseMsg (readerStream) { 562 | const msgType = await varIntToNumberOrThrow(readerStream) 563 | let data = null 564 | if (msgType === MOQ_MESSAGE_SUBSCRIBE) { 565 | data = await moqParseSubscribe(readerStream) 566 | } else if (msgType === MOQ_MESSAGE_UNSUBSCRIBE) { 567 | data = await moqParseUnSubscribe(readerStream) 568 | } else if (msgType === MOQ_MESSAGE_SUBSCRIBE_DONE) { 569 | data = await moqParseSubscribeDone(readerStream) 570 | } else if (msgType === MOQ_MESSAGE_SERVER_SETUP) { 571 | data = await moqParseSetupResponse(readerStream) 572 | } else if (msgType === MOQ_MESSAGE_ANNOUNCE_OK) { 573 | data = await moqParseAnnounceOk(readerStream) 574 | } else if (msgType === MOQ_MESSAGE_ANNOUNCE_ERROR) { 575 | data = await moqParseAnnounceError(readerStream) 576 | } else if (msgType === MOQ_MESSAGE_SUBSCRIBE_OK) { 577 | data = await moqParseSubscribeOk(readerStream) 578 | } else if (msgType === MOQ_MESSAGE_SUBSCRIBE_ERROR) { 579 | data = await moqParseSubscribeError(readerStream) 580 | } else { 581 | throw new Error(`UNKNOWN msg type received, got ${msgType}`) 582 | } 583 | return {type: msgType, data: data} 584 | } 585 | 586 | // OBJECT 587 | 588 | function moqCreateSubgroupHeaderBytes(trackAlias, groupSeq, publisherPriority) { 589 | const msg = [] 590 | 591 | const type = getSubgroupHeaderType(true, false, true, false) 592 | // Message type 593 | msg.push(numberToVarInt(type)); 594 | msg.push(numberToVarInt(trackAlias)); // Track Alias 595 | msg.push(numberToVarInt(groupSeq)); // Group ID 596 | msg.push(numberToVarInt(groupSeq)); // Subgroup ID 597 | msg.push(numberToSingleByteArray(publisherPriority)); // Publisher priority 598 | 599 | return concatBuffer(msg); 600 | } 601 | 602 | function moqCreateObjectEndOfGroupBytes(objSeq, extensionHeaders) { 603 | const msg = [] 604 | 605 | msg.push(numberToVarInt(objSeq)) // Object ID 606 | if (extensionHeaders == undefined || extensionHeaders.length <= 0) { 607 | msg.push(numberToVarInt(0)); // Extension headers count 608 | } else { 609 | moqCreateKvParamBytes 610 | msg.push(moqCreateExtensionsBytes(extensionHeaders)); // Extension headers 611 | } 612 | msg.push(numberToVarInt(0)) // Size = 0 613 | msg.push(numberToVarInt(MOQ_OBJ_STATUS_END_OF_GROUP)) 614 | 615 | return concatBuffer(msg); 616 | } 617 | 618 | function moqCreateObjectSubgroupBytes(objSeq, data, extensionHeaders) { 619 | const msg = [] 620 | 621 | msg.push(numberToVarInt(objSeq)); // Object ID 622 | if (extensionHeaders == undefined || extensionHeaders.length <= 0) { 623 | msg.push(numberToVarInt(0)); // Extension headers count 624 | } else { 625 | msg.push(moqCreateExtensionsBytes(extensionHeaders)); // Extension headers 626 | } 627 | if (data != undefined && data.byteLength > 0) { 628 | msg.push(numberToVarInt(data.byteLength)) // Data size 629 | msg.push(data) 630 | } else { 631 | msg.push(numberToVarInt(0)) // Data size 632 | msg.push(numberToVarInt(MOQ_OBJ_STATUS_NORMAL)) // Obj status 633 | } 634 | return concatBuffer(msg); 635 | } 636 | 637 | function moqCreateObjectPerDatagramBytes (trackAlias, groupSeq, objSeq, publisherPriority, data, extensionHeaders, isEndOfFGroup) { 638 | const msg = [] 639 | const hasHeaders = (extensionHeaders != undefined && extensionHeaders.length > 0) 640 | const hasData = (data != undefined && data.byteLength > 0) 641 | 642 | const type = getDatagramType(!hasData, hasHeaders, isEndOfFGroup) 643 | 644 | // Message type 645 | msg.push(numberToVarInt(type)) 646 | msg.push(numberToVarInt(trackAlias)) 647 | msg.push(numberToVarInt(groupSeq)) 648 | msg.push(numberToVarInt(objSeq)) 649 | msg.push(numberToSingleByteArray(publisherPriority)) 650 | if (hasHeaders) { 651 | msg.push(moqCreateExtensionsBytes(extensionHeaders)); // Extension headers 652 | } 653 | if (hasData) { 654 | msg.push(data) 655 | } else { 656 | msg.push(numberToVarInt(MOQ_OBJ_STATUS_NORMAL)) 657 | } 658 | 659 | return concatBuffer(msg); 660 | } 661 | 662 | export function moqSendSubgroupHeader (writer, trackAlias, groupSeq, publisherPriority) { 663 | return moqSendToWriter(writer, moqCreateSubgroupHeaderBytes(trackAlias, groupSeq, publisherPriority)) 664 | } 665 | 666 | export function moqSendObjectSubgroupToWriter (writer, objSeq, data, extensionHeaders) { 667 | return moqSendToWriter(writer, moqCreateObjectSubgroupBytes(objSeq, data, extensionHeaders)) 668 | } 669 | 670 | export function moqSendObjectEndOfGroupToWriter (writer, objSeq, extensionHeaders, closeStream) { 671 | return moqSendToWriter(writer, moqCreateObjectEndOfGroupBytes(objSeq, extensionHeaders), closeStream) 672 | } 673 | 674 | export function moqSendObjectPerDatagramToWriter (writer, trackAlias, groupSeq, objSeq, publisherPriority, data, extensionHeaders, isEndOfFGroup) { 675 | return moqSendToWriter(writer, moqCreateObjectPerDatagramBytes(trackAlias, groupSeq, objSeq, publisherPriority, data, extensionHeaders, isEndOfFGroup)) 676 | } 677 | 678 | export async function moqParseObjectHeader (readerStream) { 679 | const type = await varIntToNumberOrThrow(readerStream) 680 | if (!isMoqObjectStreamHeaderType(type) && !isMoqObjectDatagramType(type)) { 681 | throw new Error(`OBJECT is not any known object type, got ${type}`) 682 | } 683 | 684 | let ret = undefined 685 | if (isMoqObjectDatagramType(type)) { 686 | const options = moqDecodeDatagramType(type) 687 | const trackAlias = await varIntToNumberOrThrow(readerStream); 688 | const groupSeq = await varIntToNumberOrThrow(readerStream); 689 | const objSeq = await varIntToNumberOrThrow(readerStream); 690 | const publisherPriority = await moqIntReadBytesOrThrow(readerStream, 1); 691 | let extensionHeaders = undefined 692 | if (options.extensionsPresent) { 693 | extensionHeaders = await moqReadHeaderExtensions(readerStream) 694 | } 695 | ret = {type, trackAlias, groupSeq, objSeq, publisherPriority, extensionHeaders} 696 | } 697 | else if (isMoqObjectStreamHeaderType(type)) { 698 | const options = moqDecodeStreamHeaderType(type) 699 | const trackAlias = await varIntToNumberOrThrow(readerStream) 700 | const groupSeq = await varIntToNumberOrThrow(readerStream) 701 | let subGroupSeq = undefined 702 | if (options.subGroupIdPresent) { 703 | subGroupSeq = await varIntToNumberOrThrow(readerStream) 704 | } 705 | const publisherPriority = await moqIntReadBytesOrThrow(readerStream, 1); 706 | ret = {type, trackAlias, groupSeq, subGroupSeq, publisherPriority} 707 | } 708 | return ret 709 | } 710 | 711 | export async function moqParseObjectFromSubgroupHeader(readerStream, type) { 712 | const typeDecoded = moqDecodeStreamHeaderType(type) 713 | 714 | const objSeq = await varIntToNumberOrThrow(readerStream) 715 | let extensionHeaders = [] 716 | if (typeDecoded.extensionsPresent) { 717 | extensionHeaders = await moqReadHeaderExtensions(readerStream) 718 | } 719 | const payloadLength = await varIntToNumberOrThrow(readerStream) 720 | const ret = {objSeq, payloadLength, extensionHeaders} 721 | if (payloadLength == 0) { 722 | ret.status = await varIntToNumberOrThrow(readerStream) 723 | } 724 | return ret 725 | } 726 | 727 | // Helpers 728 | 729 | export function getTrackFullName(namespace, trackName) { 730 | return namespace + trackName 731 | } 732 | 733 | function moqCreateStringBytes (str) { 734 | const dataStrBytes = new TextEncoder().encode(str) 735 | const dataStrLengthBytes = numberToVarInt(dataStrBytes.byteLength) 736 | return concatBuffer([dataStrLengthBytes, dataStrBytes]) 737 | } 738 | 739 | function moqCreateTupleBytes(arr) { 740 | const msg = []; 741 | if (arr.length > MOQ_MAX_TUPLE_PARAMS) { 742 | throw new Error(`We only support up to ${MOQ_MAX_TUPLE_PARAMS} items in an MOQ tuple`) 743 | } 744 | msg.push(numberToVarInt(arr.length)); 745 | for (let i = 0; i < arr.length; i++) { 746 | msg.push(moqCreateStringBytes(arr[i])); 747 | } 748 | return concatBuffer(msg); 749 | } 750 | 751 | export function moqCreateKvPair(name, val) { 752 | return {name: name, val: val} 753 | } 754 | 755 | function moqCreateParametersBytes(kv_params) { 756 | const msg = []; 757 | msg.push(numberToVarInt(kv_params.length)); 758 | for (let i = 0; i < kv_params.length; i++) { 759 | const param = kv_params[i] 760 | msg.push(moqCreateKvParamBytes(param.name, param.val, false)) 761 | } 762 | return concatBuffer(msg); 763 | } 764 | 765 | function moqCreateExtensionsBytes(kv_params) { 766 | const msg = []; 767 | for (let i = 0; i < kv_params.length; i++) { 768 | const param = kv_params[i] 769 | msg.push(moqCreateKvParamBytes(param.name, param.val, true)) 770 | } 771 | // Length 772 | const lengthBytes = getArrayBufferByteLength(msg) 773 | 774 | return concatBuffer([numberToVarInt(lengthBytes), ...msg]) 775 | } 776 | 777 | function moqCreateKvParamBytes(name, val, isExtensionHeaders) { 778 | const msg = []; 779 | msg.push(numberToVarInt(name)); 780 | if (typeof val === 'number') { 781 | if (name % 2 != 0) { // Even types indicate value coded by a single varint 782 | throw new Error('Params with odd name needs to be followed by string or buffer') 783 | } 784 | msg.push(numberToVarInt(val)); 785 | } else if (typeof val === 'string') { 786 | if (name % 2 == 0) { // Odd types are followed by varint or buffer 787 | throw new Error('Params with even name needs to be followed by number') 788 | } 789 | msg.push(moqCreateStringBytes(val)); 790 | } else if ((typeof val === 'object') && (!isExtensionHeaders) && (name === MOQ_PARAMETER_AUTHORIZATION_TOKEN)) { 791 | msg.push(moqCreateTokenBytes(val)); 792 | } 793 | else if ((typeof val === 'object') && isExtensionHeaders) { 794 | if (name % 2 == 0) { // Odd types are followed by varint or buffer 795 | throw new Error('Params with even name needs to be followed by number') 796 | } 797 | if (!(val instanceof Uint8Array) && !(val instanceof ArrayBuffer)) { 798 | throw new Error(`Trying to write an non Uint8Array/ArrayBuffer as buffer`) 799 | } 800 | msg.push(numberToVarInt(val.byteLength)) 801 | msg.push(val) 802 | } 803 | else { 804 | throw new Error(`Not supported MOQT param/extension type ${(typeof val)}`) 805 | } 806 | return concatBuffer(msg); 807 | } 808 | 809 | async function moqStringReadOrThrow(readerStream) { 810 | const size = await varIntToNumberOrThrow(readerStream) 811 | const ret = await buffRead(readerStream, size) 812 | if (ret.eof) { 813 | throw new ReadStreamClosed(`Connection closed while reading data`) 814 | } 815 | return new TextDecoder().decode(ret.buff) 816 | } 817 | 818 | async function moqIntReadBytesOrThrow(readerStream, length) { 819 | if (length > 4 || length < 0 || !Number.isInteger(length)) 820 | throw new Error(`We can NOT read ints of length ${length}, only ints from 1 to 4 bytes`) 821 | 822 | const ret = await buffRead(readerStream, length); 823 | if (ret.eof) { 824 | throw new ReadStreamClosed(`Connection closed while reading byte`) 825 | } 826 | if (length === 1) 827 | return new DataView(ret.buff, 0, length).getUint8(); 828 | if (length === 2) 829 | return new DataView(ret.buff, 0, length).getUint16(0, MOQ_USE_LITTLE_ENDIAN) 830 | if (length > 2) 831 | return new DataView(ret.buff, 0, length).getUint32(0, MOQ_USE_LITTLE_ENDIAN) 832 | } 833 | 834 | async function moqTupleReadOrThrow (readerStream) { 835 | const ret = []; 836 | const size = await varIntToNumberOrThrow(readerStream) 837 | let i = 0; 838 | while (i < size) { 839 | const element = await moqStringReadOrThrow(readerStream); 840 | ret.push(element); 841 | i++; 842 | } 843 | return ret; 844 | } 845 | 846 | async function moqReadParameters(readerStream) { 847 | const ret = [] 848 | const count = await varIntToNumberOrThrow(readerStream) 849 | for (let i = 0; i < count; i++) { 850 | const param = await moqReadKeyValuePair(readerStream, false) 851 | ret.push(param.val) 852 | } 853 | return ret 854 | } 855 | 856 | async function moqReadHeaderExtensions(readerStream) { 857 | const ret = [] 858 | let remainingBytes = await varIntToNumberOrThrow(readerStream) 859 | while (remainingBytes > 0) { 860 | const param = await moqReadKeyValuePair(readerStream, true) 861 | ret.push(param.val) 862 | 863 | remainingBytes = remainingBytes - param.byteLength 864 | } 865 | return ret 866 | } 867 | 868 | async function moqReadKeyValuePair(readerStream, isExtensionHeaders) { 869 | let param = {val: undefined, byteLength: 0} 870 | 871 | const name = await varIntToNumberAndLengthOrThrow(readerStream) 872 | param.byteLength = param.byteLength + name.byteLength 873 | 874 | if (name.num % 2 == 0) { // Even are followed by varint 875 | const intValue = await varIntToNumberAndLengthOrThrow(readerStream) 876 | param.byteLength = param.byteLength + intValue.byteLength 877 | param.val = moqCreateKvPair(name.num, intValue.num) 878 | } else { // Odd are followed by length and buffer 879 | const size = await varIntToNumberAndLengthOrThrow(readerStream) 880 | param.byteLength = param.byteLength + size.byteLength 881 | if ((name.num == MOQ_PARAMETER_AUTHORIZATION_TOKEN) && !isExtensionHeaders) { 882 | const token = await moqParseTokenBytes(readerStream, size.num) 883 | param.byteLength = param.byteLength + size.num 884 | param.val = moqCreateKvPair(name.num, token) 885 | } else { 886 | const buffRet = await buffRead(readerStream, size.num) 887 | if (buffRet.eof) { 888 | throw new ReadStreamClosed(`Connection closed while reading data`) 889 | } 890 | param.byteLength = param.byteLength + size.num 891 | param.val = moqCreateKvPair(name.num, buffRet.buff) 892 | } 893 | } 894 | return param 895 | } 896 | 897 | function moqCreateUseValueTokenFromString(str) { 898 | return { aliasType: MOQ_TOKEN_USE_VALUE, tokenType: MOQ_TOKEN_TYPE_NEGOTIATED_OUT_OF_BAND, value: new TextEncoder().encode(str)} 899 | } 900 | 901 | function moqCreateTokenBytes(token) { 902 | const msg = []; 903 | 904 | if (token.aliasType != MOQ_TOKEN_USE_VALUE) { 905 | throw new Error('Only USE_VALUE token supported') 906 | } 907 | msg.push(numberToVarInt(token.aliasType)); 908 | 909 | if (token.tokenType != MOQ_TOKEN_TYPE_NEGOTIATED_OUT_OF_BAND) { 910 | throw new Error('Only TYPE_NEGOTIATED_OUT_OF_BAND token type supported') 911 | } 912 | msg.push(numberToVarInt(token.tokenType)); 913 | msg.push(token.value) // Already a buffer 914 | 915 | // Length 916 | const totalLength = getArrayBufferByteLength(msg); 917 | 918 | return concatBuffer([numberToVarInt(totalLength, MOQ_USE_LITTLE_ENDIAN), ...msg]) 919 | } 920 | 921 | async function moqParseTokenBytes (readerStream, total_size) { 922 | const token = {} 923 | let remaining_size = total_size 924 | const read_data_aliasType = await varIntToNumberAndLengthOrThrow(readerStream) 925 | token.aliasType = read_data_aliasType.num 926 | remaining_size = remaining_size - read_data_aliasType.byteLength 927 | if (token.aliasType != MOQ_TOKEN_USE_VALUE) { 928 | throw new Error('Only USE_VALUE token supported') 929 | } 930 | const read_data_tokenType = await varIntToNumberAndLengthOrThrow(readerStream) 931 | token.tokenType = read_data_tokenType.num 932 | remaining_size = remaining_size - read_data_tokenType.byteLength 933 | if (token.tokenType != MOQ_TOKEN_TYPE_NEGOTIATED_OUT_OF_BAND) { 934 | throw new Error('Only TYPE_NEGOTIATED_OUT_OF_BAND token type supported') 935 | } 936 | 937 | if (remaining_size > 0) { 938 | const buffRet = await buffRead(readerStream, remaining_size) 939 | if (buffRet.eof) { 940 | throw new ReadStreamClosed(`Connection closed while reading data`) 941 | } 942 | token.value = buffRet.buff 943 | } else if (remaining_size < 0) { 944 | throw new Error('Corrupted token size') 945 | } 946 | 947 | return token 948 | } 949 | 950 | async function moqSendToStream(writerStream, dataBytes, closeStream) { 951 | const writer = writerStream.getWriter() 952 | await moqSendToWriter(writer, dataBytes, closeStream) 953 | await writer.ready 954 | writer.releaseLock() 955 | } 956 | 957 | async function moqSendToWriter(writer, dataBytes, closeStream) { 958 | return writer.write(dataBytes) 959 | .then(() => { 960 | if (closeStream) { 961 | return writer.close() 962 | } else { 963 | return Promise.resolve() 964 | } 965 | }) 966 | } 967 | 968 | export function getFullTrackName(ns, name) { 969 | return `[${ns.join("/")}]/${name}` 970 | } 971 | 972 | export function getAuthInfofromParameters(parameters) { 973 | let ret = undefined 974 | let i = 0 975 | while (ret == undefined && i < parameters.length) { 976 | const param = parameters[i] 977 | if (param.name == MOQ_PARAMETER_AUTHORIZATION_TOKEN) { 978 | const token = param.val 979 | if (token.aliasType == MOQ_TOKEN_USE_VALUE && token.tokenType == MOQ_TOKEN_TYPE_NEGOTIATED_OUT_OF_BAND) { 980 | ret = new TextDecoder().decode(token.value); 981 | } 982 | } 983 | i++ 984 | } 985 | return ret 986 | } 987 | 988 | export function moqDecodeDatagramType(type) { 989 | if (!isMoqObjectDatagramType(type)) { 990 | throw new Error(`No valid datagram type ${type}, it can NOT be decoded`) 991 | } 992 | const ret = { isStatus: false, extensionsPresent: false, isEndOfGroup: false } 993 | if (type >= MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MIN && type <= MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MAX) { 994 | ret.isStatus = true 995 | } else { 996 | if (type == 0x2 || type == 0x3) { 997 | ret.isEndOfGroup = true 998 | } 999 | } 1000 | if ((type & 0x1) > 0) { 1001 | ret.extensionsPresent = true 1002 | } 1003 | return ret 1004 | } 1005 | 1006 | export function isMoqObjectDatagramType(type) { 1007 | let ret = false 1008 | if ((type >= MOQ_MESSAGE_OBJECT_DATAGRAM_MIN && type <= MOQ_MESSAGE_OBJECT_DATAGRAM_MAX) || (type >= MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MIN && type <= MOQ_MESSAGE_OBJECT_DATAGRAM_STATUS_MAX)) { 1009 | ret = true 1010 | } 1011 | return ret 1012 | } 1013 | 1014 | export function isMoqObjectStreamHeaderType(type) { 1015 | let ret = false 1016 | if (type >= MOQ_MESSAGE_STREAM_HEADER_SUBGROUP_MIN && type <= MOQ_MESSAGE_STREAM_HEADER_SUBGROUP_MAX) { 1017 | ret = true 1018 | } 1019 | return ret 1020 | } 1021 | 1022 | function getDatagramType(isStatus, hasExternsionHeaders, isEndOfGroup) { 1023 | let type = 0 1024 | 1025 | if (isStatus) { 1026 | type = 0x20 1027 | } else { 1028 | if (isEndOfGroup) { 1029 | type = 0x2 1030 | } else { 1031 | type = 0x1 1032 | } 1033 | } 1034 | if (hasExternsionHeaders) { 1035 | type = type | 0x1 1036 | } 1037 | return type 1038 | } 1039 | 1040 | function getSubgroupHeaderType(extensionsPresent, isEndOfGroup, subGroupIdPresent, isSubgroupIdFirstObjectId) { 1041 | let type = 0x10 1042 | if (isEndOfGroup) { 1043 | type = type | 0x8 1044 | } 1045 | if (subGroupIdPresent) { 1046 | type = type | 0x4 1047 | } 1048 | if (isSubgroupIdFirstObjectId) { 1049 | type = type | 0x2 1050 | } 1051 | if (extensionsPresent) { 1052 | type = type | 0x1 1053 | } 1054 | if (type == 0x16 || type == 0x17 || type > 0x1d) { 1055 | throw new Error(`Subgroup header to create type ${type} does not make sense`) 1056 | } 1057 | return type 1058 | } 1059 | 1060 | export function moqDecodeStreamHeaderType(type) { 1061 | if (!isMoqObjectStreamHeaderType(type)) { 1062 | throw new Error(`No valid stream header type ${type}, it can NOT be decoded`) 1063 | } 1064 | if (type == 0x16 || type == 0x17 || type > 0x1d) { 1065 | throw new Error(`Subgroup received header type ${type} does not make sense`) 1066 | } 1067 | const ret = { extensionsPresent: false, isEndOfGroup: false, subGroupIdPresent: false, isSubgroupIdFirstObjectId: false } 1068 | if ((type & 0x1) > 0) { 1069 | ret.extensionsPresent = true 1070 | } 1071 | if ((type & 0x2) > 0) { 1072 | ret.isSubgroupIdFirstObjectId = true 1073 | } 1074 | if ((type & 0x4) > 0) { 1075 | ret.subGroupIdPresent = true 1076 | } 1077 | if ((type & 0x8) > 0) { 1078 | ret.isEndOfGroup = true 1079 | } 1080 | return ret 1081 | } --------------------------------------------------------------------------------