├── .eslintrc.json
├── .gitignore
├── .npmignore
├── LICENSE
├── README.md
├── package.json
├── src
├── collector.js
├── engine.js
├── exporter.js
├── extractor.js
├── index.js
├── live.js
├── probe.js
└── utils
│ ├── config.js
│ ├── helper.js
│ ├── log.js
│ ├── models.js
│ ├── rules.js
│ └── score.js
├── webpack.config-dev.js
├── webpack.config-prod.js
└── yarn.lock
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "es2020": true
5 | },
6 | "extends": [
7 | "airbnb-base"
8 | ],
9 | "parserOptions": {
10 | "ecmaVersion": 11,
11 | "sourceType": "module"
12 | },
13 | "rules": {
14 | "quotes": [
15 | 1,
16 | "double",
17 | "avoid-escape"
18 | ],
19 | "indent": [
20 | 0,
21 | 4
22 | ],
23 | "no-console": [
24 | 0
25 | ],
26 | "no-underscore-dangle": [
27 | 0
28 | ],
29 | "class-methods-use-this": [
30 | 0
31 | ],
32 | "import/prefer-default-export": [
33 | 0
34 | ],
35 | "max-len": [
36 | 0,
37 | 80
38 | ],
39 | "no-case-declarations": [
40 | 0
41 | ],
42 | "operator-linebreak": [
43 | 0
44 | ],
45 | "import/no-extraneous-dependencies": [
46 | 0
47 | ],
48 | "import/extensions": [
49 | 0
50 | ],
51 | "no-await-in-loop": [
52 | 0
53 | ],
54 | "no-restricted-syntax": [
55 | 0
56 | ],
57 | "no-param-reassign": [
58 | 0
59 | ],
60 | "no-promise-executor-return": [
61 | 0
62 | ]
63 | },
64 | "ignorePatterns": [
65 | "node_modules/*"
66 | ]
67 | }
68 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # TypeScript v1 declaration files
45 | typings/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 |
78 | # Next.js build output
79 | .next
80 |
81 | # Nuxt.js build / generate output
82 | .nuxt
83 | dist
84 |
85 | # Gatsby files
86 | .cache/
87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
88 | # https://nextjs.org/blog/next-9-1#public-directory-support
89 | # public
90 |
91 | # vuepress build output
92 | .vuepress/dist
93 |
94 | # Serverless directories
95 | .serverless/
96 |
97 | # FuseBox cache
98 | .fusebox/
99 |
100 | # DynamoDB Local files
101 | .dynamodb/
102 |
103 | # TernJS port file
104 | .tern-port
105 |
106 | .idea/
107 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | src
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Olivier Anguenot
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # WEBRTC METRICS & STATS
2 |
3 | **WebRTCMetrics** is a JavaScript library that aggregates stats received from several `RTCPeerConnection` objects and
4 | generates JSON reports in live during a call as well as a **Call Detail Records** (CDR) at the end of the call resuming
5 | the main statistics captured.
6 |
7 | **WebRTCMetrics** is based on the WebRTC `getStats` API and collect statistics using **probes**. Each probe is
8 | associated to a `RTCPeerConnection`. A probe collects statistics from all streams of a `RTCPeerConnection`.
9 |
10 | ## Install
11 |
12 | Using NPM
13 |
14 | ```bash
15 | $ npm install webrtcmetrics
16 | ```
17 |
18 | Using Yarn
19 |
20 | ```bash
21 | $ yarn add webrtcmetrics
22 | ```
23 |
24 | ## Usage
25 |
26 | ### Create a new instance
27 |
28 | A new instance of the WebRTCMetrics is created when calling the constructor. A JSON configuration can be set to define
29 | the main characteristics of the collect of the statistics.
30 |
31 | ```javascript
32 | import WebRTCMetrics from "webrtcmetrics";
33 |
34 | // Define your configuration
35 | const configuration = {
36 | refreshEvery: 3000, // Optional. Refresh every 3 seconds
37 | startAfter: 5000, // Optional. Start collecting stats after 5 seconds
38 | stopAfter: 30000, // Optional. Stop collecting stats after 30 seconds
39 | verbose: false, // Optional. Display verbose logs or not
40 | silent: true, // Optional. No log at all if set to true
41 | };
42 |
43 | const metrics = new WebRTCMetrics(configuration);
44 | ```
45 |
46 | As defined in that sample, the following parameters can be configured:
47 |
48 | - `refreshEvery`: Number. Contains the duration to wait (in milliseconds) before collecting a new set of statistics.
49 | Default value is **2000**.
50 |
51 | - `startAfter`: Number. Contains the duration to wait (in milliseconds) before collecting the first set of statistics.
52 | Default value is equals to 0 for starting immediately.
53 |
54 | - `stopAfter`: Number. Contains the duration to wait (in milliseconds) before stopping to collect the statistics. This
55 | duration starts after the `startAfter` duration. Default value is **-1** which means that the statistics are collected
56 | until the function `stop()` is called.
57 |
58 | - `verbose`: Boolean. True for displaying verbose information in the logger such as the raw statistics coming
59 | from `getStats`. Default is **false**.
60 |
61 | - `silent`: Boolean. True to disable logs. When `silent` is set to `true`, the parameter `verbose` becomes obsolete.
62 |
63 | _Note:_ The **configuration** parameter is optional.
64 |
65 | ### Create a new probe
66 |
67 | A **probe** collects the statistics associated to a `RTCPeerConnection`.
68 |
69 | To create a new probe, call the function `createProbe()` with a `RTCPeerConnection` instance to capture.
70 |
71 | ```javascript
72 | import WebRTCMetrics from "webrtcmetrics";
73 |
74 | // Should exist somewhere in your code
75 | const existingPeerConnection = new RTCPeerConnection(config);
76 |
77 | // Initialize the analyzer
78 | const metrics = new WebRTCMetrics();
79 |
80 | const probe = metrics.createProbe(existingPeerConnection, {
81 | pname: 'PeerConnection_1', // Optional. Name of the peer connection
82 | cid: 'call007984', // Optional. Call Id
83 | uid: 'jdoe@mycorp.com', // Optional. User Id
84 | ticket: true, // Optional. Generate a ticket at the end of the call or not.
85 | record: true, // Optional. Record reports in the ticket or not.
86 | passthrough: { "inbound-rtp": ["audioLevel"] } // Optional. Get any properties from the reports
87 | });
88 | ```
89 |
90 | _Note:_ The `RTCPeerConnection` parameter is mandatory whereas the `configuration` parameter is optional.
91 |
92 | ```typescript
93 | createProbe(peerConnection
94 | :
95 | RTCPeerConnection, configuration ? : Object
96 | ):
97 | Probe
98 | ```
99 |
100 | The `configuration` parameter contains the following properties:
101 |
102 | - `pname`: String. Contains the name of the `RTCPeerConnection`. This is an arbitrary name that can be used to identify
103 | statistics received.
104 |
105 | - `cid`: String. Contains the identifier of the call. This is an arbitrary name that can be used to gather the
106 | statistics.
107 |
108 | - `uid`: String. Contains the identifier of the user. This is an arbitrary name that can be used to gather the
109 | statistics.
110 |
111 | - `ticket`: Boolean. True for generating a ticket when the collect of statistics is stopped. Default is **true**.
112 |
113 | - `record`: Boolean. True to link all reports generated to the ticket. This allows to access to all reports individually
114 | after the call. Default is **false**.
115 |
116 | ### Probe lifecycle
117 |
118 | Once a probe has been created, it is ready to collect the statistics using **reports**. The application needs to listen
119 | to the event `onreport` to receive them.
120 |
121 | After the call, a **ticket** that summarizes all the reports received for a probe can be received by listening to the
122 | event `onticket`. Don't forget to put the property `ticket` to **true** in the configuration of the WebRTCMetrics
123 | Object.
124 |
125 | ### Complete example
126 |
127 | ```javascript
128 | const probe = metrics.createProbe(existingPeerConnection, {
129 | pname: 'PeerConnection_1', // Optional. Name of the peer connection
130 | cid: 'call007984', // Optional. Call Id
131 | uid: 'jdoe@mycorp.com', // Optional. User Id
132 | ticket: true, // Optional. Generate a ticket at the end of the call or not.
133 | record: true, // Optional. Record reports in the ticket or not.
134 | });
135 |
136 | probe.onreport = (report) => {
137 | // Do something with a report collected (JSON)
138 | };
139 |
140 | probe.onticket = (ticket) => {
141 | // Do something with the ticket collected at the end of the call (JSON)
142 | };
143 |
144 | metrics.onresult = (result) => {
145 | // Do something with the global report collected (JSON)
146 | }
147 |
148 | // Start collecting statistics
149 | metrics.startAllProbes();
150 |
151 | // At any time, call ID and user ID can be updated
152 | probe.updateUserId('newUserID');
153 | probe.updateCallId('newCallID');
154 |
155 | // Once the call is finished, stop the analyzer when running
156 | if (metrics.running) {
157 | metrics.stopAllProbes();
158 | }
159 | ```
160 |
161 | ### Additional information
162 |
163 | The reports can be obtained by registering to event `onreport`; this callback is called in loop with an interval equals
164 | to the value of the `refreshEvery` parameter and with the **report** generated.
165 |
166 | If you don't want to capture the first curve of statistics but something much more linear, you can specify a delay
167 | before receiving the metrics. By default, the stats are captured immediately. But depending on your needs, use the
168 | parameter `startAfter` to delay the capture.
169 |
170 | Stats can be captured during a defined period or time. To do that, set a value to the parameter `stopAfter` to stop
171 | receiving reports after that duration given in ms. If you want to capture statistics as long as the call is running,
172 | omit that parameter of set the value to `-1`. In that case, you will have to call manually the method `stop()` of the
173 | probe to stop the collector.
174 |
175 | The first set of statistics collected (first report) is called the **reference report**. It is reported separately from
176 | the others (can't be received in the `onreport` event) but is used for computing statistics of the next ones (for
177 | example **delta_packets_received**).
178 |
179 | _Note:_ The `report` and `ticket` parameters received from the events are JSON objects. See below for the content.
180 |
181 | ### Dealing with multiple streams in a probe
182 |
183 | A `RTCPeerConnection` can transport more than one audio and video streams (`MediaStreamTrack`). Statistics is collected
184 | per type of stream (audio or video) and per direction (inbound or outbound).
185 |
186 | Each report contains the statistics of all streams in live. Ticket summarizes the statistics of all streams at the end
187 | of the call.
188 |
189 | ### Creating multiples probes
190 |
191 | When connecting to a conference server such as an **SFU**, you can receive multiple `RTCPeerConnection` objects. You can
192 | collect statistics from each by creating as many probes as needed. One for each `RTCPeerConnection`.
193 |
194 | As the parameter **refreshEvery**, **startAfter** and **stopAfter** are common to all probes created, the statistics of
195 | all probes are collected one after the other, as soon as possible in order to be able to compare. To avoid any mistake,
196 | each probe has its own `timestamp` when the stats have been collected.
197 |
198 | ```javascript
199 | const probe1 = metrics.createProbe(pc1, {
200 | pname: 'pc_1', // Optional. Name of the peer connection
201 | ticket: true, // Optional. Generate a ticket at the end of the call or not.
202 | record: true, // Optional. Record reports in the ticket or not.
203 | });
204 |
205 | const probe2 = metrics.createProbe(pc2, {
206 | pname: 'pc_2', // Optional. Name of the peer connection
207 | ticket: true, // Optional. Generate a ticket at the end of the call or not.
208 | record: true, // Optional. Record reports in the ticket or not.
209 | });
210 |
211 | probe1.onticket = (result) => {
212 | // Do something with the ticket of probe 1
213 | }
214 |
215 | probe2.onticket = (result) => {
216 | // Do something with the ticket of probe 2
217 | }
218 |
219 | // Start all registered probes
220 | metrics.startAllProbes();
221 | ```
222 |
223 | ### Collecting stats from all probes
224 |
225 | Register to the event `onresult` from the metrics Object created to get a global report that contains all probes reports
226 | as well as some global stats.
227 |
228 | _Note:_ This method is equivalent to register to the event `onreport` on each probe individually.
229 |
230 | ## Report Statistics
231 |
232 | Each **report** collected from the event `onreport` contains the following statistics.
233 |
234 | ### Global statistics
235 |
236 | | Name | Value | Description |
237 | |:--------------|:------:|:--------------------------------------------------|
238 | | **pname** | String | Name of the Peer Connection given |
239 | | **call_id** | String | Identifier or abstract name representing the call |
240 | | **user_id** | String | Identifier or abstract name representing the user |
241 | | **timestamp** | Number | Timestamp of the metric collected |
242 | | **count** | Number | Number of the report |
243 |
244 | ### Audio statistics
245 |
246 | Audio statistics are gathered under the `audio` properties which is an object containing all the audio streams
247 | collected (inbound and outbound). Each stream is identified by its `ssrc`.
248 |
249 | Each **inbound audio stream** contains the following statistics:
250 |
251 | | Name | Value | Description |
252 | |:--------------------------------------|:------:|:---------------------------------------------------------------------------------------------------------------------------|
253 | | **codec_in** | JSON | Description of the audio input codec and parameters used |
254 | | **codec_id_in** | String | ID of the audio input codec used |
255 | | **delta_KBytes_in** | Number | Number of kilobytes (KB) received since the last report |
256 | | **delta_kbs_in** | Number | Number of kilobits received per second since the last report |
257 | | **delta_jitter_ms_in** | Number | Incoming Jitter (in ms) |
258 | | **delta_packets_lost_in** | Number | Number of packets lost (not received) since last report |
259 | | **delta_packets_in** | Number | Number of packets received since the last report |
260 | | **delta_rtt_ms_in** | Number | Round Trip-Time (in ms). Could be null when no value collected. |
261 | | **delta_synthetized_ms_in** | Number | Duration of synthetized voice since last report (in ms) |
262 | | **delta_playout_delay_ms_in** | Number | Delay of the playout path since last report (in ms) |
263 | | **delta_jitter_buffer_delay_ms_in** | Number | Average Jitter buffer delay (in ms) |
264 | | **direction** | String | Direction of the stream. "inbound" here. |
265 | | **level_in** | Number | Level of the input sound. Detect presence of incoming sound |
266 | | **mos_in** | Number | Audio quality indicator based on 'G.107' |
267 | | **percent_packets_lost_in** | Number | Percent of audio packet lost (not received) since the last report |
268 | | **percent_synthetized_in** | Number | Percent of voice packet synthetized (generated) since the last report |
269 | | **timestamp_in** | Number | Timestamp when report has been sent. Associated with **delta_rtt_ms_in**, **total_rtt_measure_in** and **total_rtt_ms_in** |
270 | | **total_KBytes_in** | Number | Number of kilobytes (KB) received since the beginning of the call |
271 | | **total_packets_lost_in** | Number | Number of packets lost (not received) since the beginning of the call |
272 | | **total_packets_in** | Number | Number of packets received since the beginning of the call |
273 | | **total_rtt_measure_in** | Number | Number of RTT measurements done |
274 | | **total_rtt_ms_in** | Number | Total Round Trip Time since the beginning of the call |
275 | | **total_playout_ms_in** | Number | Total duration of the playout since the beginning of the call (in ms) |
276 | | **total_synthetized_ms_in** | Number | Total duration of the synthetized voice since the beginning of the call (in ms) |
277 | | **total_percent_synthetized_in** | Number | Percent of voice packet synthetized (generated) since the beginning of the call |
278 | | **total_time_jitter_buffer_delay_in** | Number | Total time spent by all audio samples in jitter buffer (in ms) |
279 | | **total_jitter_emitted_in** | Number | Total number of audio samples that have come out the jitter buffer (in ms) |
280 | | **track_in** | String | The id of the associated mediastream track |
281 |
282 | _Note:_ `mos_in` reflects the quality of the audio media received using a rank from 1 (inaudible) to
283 | 4.5 (excellent). It is the quality the local user experienced from his call.
284 |
285 | Each **outbound audio stream** contains the following statistics
286 |
287 | | Name | Value | Description |
288 | |:---------------------------------|:-------:|:----------------------------------------------------------------------------------------------------------|
289 | | **active_out** | Boolean | True if that stream is active (sending media) |
290 | | **codec_out** | JSON | Description of the audio output codec and parameters used |
291 | | **codec_id_out** | String | ID of the audio output codec used |
292 | | **delta_packet_delay_ms_out** | Number | Average duration spent by packets before being sent (in ms) |
293 | | **delta_KBytes_out** | Number | Number of kilobytes (KB) sent since last report |
294 | | **delta_kbs_out** | Number | Number of kbits sent per second since the last report |
295 | | **delta_jitter_ms_out** | Number | Outgoing Jitter (in ms) |
296 | | **delta_packets_lost_out** | Number | Number of packets lost (not received by the recipient) since last report |
297 | | **delta_packets_out** | Number | Number of packets sent since the last report |
298 | | **delta_rtt_ms_out** | Number | Round Trip-Time (in ms). Could be null when no value collected. |
299 | | **direction** | String | Direction of the stream. "outbound" here. |
300 | | **level_out** | Number | Level of the output sound. Detect presence of outgoing sound |
301 | | **mos_out** | Number | Audio quality indicator based on G.107' |
302 | | **percent_packets_lost_out** | Number | Percent of audio packet lost (not received by the recipient) since the last report |
303 | | **timestamp_out** | Number | Timestamp when report has been received. Associated with **delta_jitter_ms_out** and **delta_rtt_ms_out** |
304 | | **total_KBytes_out** | Number | Number of kilobytes (KB) sent since the beginning of the call |
305 | | **total_time_packets_delay_out** | Number | Total time spent for all packets before being sent (in ms) |
306 | | **total_packets_lost_out** | Number | Number of packets lost (not received by the recipient) since the beginning of the call |
307 | | **total_packets_out** | Number | Number of packets sent since the beginning of the call |
308 | | **total_rtt_measure_out** | Number | Number of RTT measurements done |
309 | | **total_rtt_ms_out** | Number | Total Round Trip Time since the beginning of the call |
310 | | **track_out** | String | The id of the mediastream track associated | |
311 | | **device_out** | String | The label of the device associated to the **track_out** |
312 |
313 | _Note:_ `mos_out` reflects the quality of the audio media sent using a rank from 1 (inaudible) to
314 | 4.5 (excellent). It is not the quality the remote peer will experience but is a good indicator of the capacity of the
315 | local user to send the media to detect a quality issue on the local side
316 |
317 | ### Video statistics
318 |
319 | Video statistics are gathered under the `video` properties which is an object containing all the video streams
320 | collected (inbound and outbound). Each stream is identified by its `ssrc`.
321 |
322 | Each **inbound video stream** contains the following statistics:
323 |
324 | | Name | Value | Description |
325 | |:--------------------------------------|:------:|:----------------------------------------------------------------------------------------------------------|
326 | | **decoder_in** | String | Description of the video decoder used |
327 | | **delta_KBytes_in** | Number | Number of kilobytes (KB) received since the last report |
328 | | **delta_kbs_in** | Number | Number of kbits received per second since the last report |
329 | | **delta_jitter_ms_in** | Number | Incoming Jitter (in ms). Could be null when no value collected |
330 | | **delta_glitch_in** | JSON | Number of freezes and pauses encountered since the last report |
331 | | **delta_decode_frame_ms_in** | Number | Time needed to decode a frame (in ms) |
332 | | **delta_processing_delay_ms_in** | Number | Time needed to process a frame (in ms) |
333 | | **delta_assembly_delay_ms_in** | Number | Time needed to assemble a frame (in ms) |
334 | | **delta_jitter_buffer_delay_ms_in** | Number | Average Jitter buffer delay (in ms) |
335 | | **delta_nack_sent_in** | Number | Nack sent since the last report |
336 | | **delta_packets_lost_in** | Number | Number of packets lost (not received) since last report |
337 | | **delta_packets_in** | Number | Number of packets received since the last report |
338 | | **delta_pli_sent_in** | Number | Pli sent since the last report |
339 | | **codec_in** | JSON | Description of the video input codec and parameters used |
340 | | **codec_id_in** | String | ID of the video input codec used |
341 | | **size_in** | Number | Size of the input video (from remote peer) + framerate |
342 | | **percent_packets_lost_in** | Number | Percent of audio packet lost (not received) since the last report |
343 | | **total_KBytes_in** | Number | Number of kilobytes (KB) received since the beginning of the call |
344 | | **total_frames_decoded_in** | Number | Total of frames decoded |
345 | | **total_glitch_in** | JSON | Number of freezes and pauses encountered since the beginning of the call |
346 | | **total_nack_sent_in** | Number | Total nack sent since the beginning of the call |
347 | | **total_packets_lost_in** | Number | Number of packets lost (not received) since the beginning of the call |
348 | | **total_packets_in** | Number | Number of packets received since the beginning of the call |
349 | | **total_pli_sent_in** | Number | Total pli sent since the beginning of the call |
350 | | **total_time_decoded_in** | Number | Total time used for decoding all frames (in ms) |
351 | | **total_time_processing_delay_in** | Number | Total time used for processing all frames (in ms) |
352 | | **total_time_assembly_delay_in** | Number | Total time used for assembling all frames (in ms) |
353 | | **total_time_jitter_buffer_delay_in** | Number | Total time spent by all frames in jitter buffer (in ms) |
354 | | **total_jitter_emitted_in** | Number | Total number of frames that have come out the jitter buffer (in ms) |
355 | | **timestamp_out** | Number | Timestamp when report has been received. Associated with **delta_jitter_ms_out** and **delta_rtt_ms_out** |
356 | | **track_in** | String | The id of the mediastream track associated | |
357 |
358 | Each **outbound video stream** contains the following statistics
359 |
360 | | Name | Value | Description |
361 | |:---------------------------------|:-------:|:----------------------------------------------------------------------------------------------------------|
362 | | **active_out** | Boolean | True if that stream is active (sending media) |
363 | | **codec_out** | JSON | Description of the video output codec and parameters used |
364 | | **codec_id_out** | String | ID of the video output codec used |
365 | | **delta_packet_delay_ms_out** | Number | Average duration spent by packets before being sent (in ms) |
366 | | **delta_KBytes_out** | Number | Number of kilobytes (KB) sent since last report |
367 | | **delta_kbs_out** | Number | Number of kbits sent per second since the last report |
368 | | **delta_jitter_ms_out** | Number | Outgoing Jitter (in ms). Could be null when no value collected. |
369 | | **delta_packets_lost_out** | Number | Number of packets lost (not received by the recipient) since last report |
370 | | **delta_encode_frame_ms_out** | Number | Time needed to encode a frame |
371 | | **delta_nack_received_out** | Number | Nack received since the last report |
372 | | **delta_pli_received_out** | Number | Pli received since the last report |
373 | | **delta_rtt_ms_out** | Number | Round Trip-Time (in ms). Could be null when no value collected. |
374 | | **encoder_out** | String | Description of the video encoder used |
375 | | **size_out** | Object | Size of the output video sent + framerate (could be lower than the size asked) |
376 | | **size_pref_out** | Object | Size of the output video asked + framerate |
377 | | **percent_packets_lost_out** | Number | Percent of audio packet lost (not received by the recipient) since the last report |
378 | | **limitation_out** | Object | Object containing the reason and the durations spent in each state |
379 | | **total_KBytes_out** | Number | Number of kilobytes (KB) sent since the beginning of the call |
380 | | **total_time_packets_delay_out** | Number | Total time spent for all packets before being sent (in ms) |
381 | | **total_packets_lost_out** | Number | Number of packets lost (not received by the recipient) since the beginning of the call |
382 | | **total_frames_encoded_out** | Number | Total of frames encoded |
383 | | **total_nack_received_out** | Number | Total nack received since the beginning of the call |
384 | | **total_pli_received_out** | Number | Total pli received since the beginning of the call |
385 | | **total_rtt_measure_out** | Number | Number of RTT measurements done |
386 | | **total_rtt_ms_out** | Number | Total Round Trip Time since the beginning of the call |
387 | | **total_time_encoded_out** | Number | Total time used for encoding all frames |
388 | | **timestamp_out** | Number | Timestamp when report has been received. Associated with **delta_jitter_ms_out** and **delta_rtt_ms_out** |
389 | | **track_out** | String | The id of the mediastream track associated |
390 | | **device_out** | String | The label of the device associated to the **track_out** |
391 |
392 | ### Network properties
393 |
394 | | Name | Value | Description |
395 | |:-----------------------------------|:------:|:------------------------------------------------------------------------|
396 | | **infrastructure** | Number | Infrastructure level (0: Eth, 3: Wifi, 5: 4G, 10: 3G).
(Deprecated) |
397 | | **local_candidate_id** | String | ID of the local candidate used |
398 | | **local_candidate_protocol** | String | Protocol used (udp, tcp) |
399 | | **local_candidate_relay_protocol** | String | Protocol used when relayed with TURN (udp, tcp, tls) |
400 | | **local_candidate_type** | String | Type of candidate used (host, relay, srflx) |
401 | | **remote_candidate_id** | String | ID of the remote candidate used |
402 | | **remote_candidate_protocol** | String | Protocol used (udp, tcp) |
403 | | **remote_candidate_type** | String | Type of candidate used (host, relay, srflx) |
404 |
405 | ### Data properties
406 |
407 | These stats are collected from the candidate-pair stats.
408 |
409 | | Name | Value | Description |
410 | |:-----------------------------------|:------:|:--------------------------------------------------------------------------------------|
411 | | **delta_KBytes_in** | Number | Number of kilobytes (KB) received since the last report (audio+video) |
412 | | **delta_KBytes_out** | Number | Number of kilobytes (KB) sent since last report (audio+video) |
413 | | **delta_kbs_bandwidth_in** | Number | Available incoming bitrate in kb/s (audio+video) |
414 | | **delta_kbs_bandwidth_out** | Number | Available outgoing bitrate in kb/s for (audio+video) |
415 | | **delta_kbs_in** | Number | Number of kbits received per second since the last report (audio+video) |
416 | | **delta_kbs_out** | Number | Number of kbits sent per second since the last report (audio+video) |
417 | | **delta_rtt_connectivity_ms** | Number | Round Trip-Time (in ms) computed from STUN connectivity checks |
418 | | **total_KBytes_in** | Number | Number of kilobytes (KB) received since the beginning of the call (audio+video) |
419 | | **total_KBytes_out** | Number | Number of kilobytes (KB) sent since the beginning of the call (audio+video) |
420 | | **total_rtt_connectivity_measure** | Number | Number of RTT measurements done (from STUN connectivity checks) |
421 | | **total_rtt_connectivity_ms** | Number | Total Round Trip Time since the beginning of the call (from STUN connectivity checks) |
422 |
423 | ### Experimental
424 |
425 | These stats are subject to change in the future
426 |
427 | | Name | Value | Description |
428 | |:----------------------:|:------:|:-----------------------------------------------------------------------------------------------|
429 | | **time_to_measure_ms** | Number | Time (ms) to measure a probe which means the time to collect and the time to compute the stats |
430 |
431 | ## Stop reporting
432 |
433 | At any time, calling the method `stop()` stops collecting statistics on that probe. No other reports are received.
434 |
435 | ## Generating a ticket
436 |
437 | When calling the method `stop()` or automatically after a duration equals to `stopAfter`, a ticket is generated with the
438 | most important information collected. This ticket is generated only if the option `ticket` has not been manually set
439 | to `false`.
440 |
441 | To obtain that ticket, subscribe to the event `onticket`. The callback is fired when the probe is stopped (ie: by
442 | calling the method `stop()`) or after the `stopAfter`. The callback is called with a JSON parameter corresponding to
443 | something like a **CDR**.
444 |
445 | If the option `record` has been set to `true`, the ticket contains all the reports generated.
446 |
447 | The ticket generated contains the following information:
448 |
449 | | Name | Value | Description |
450 | |:------------------|:------:|:-------------------------------------------------------------|
451 | | **call** | Object | Contains the `call_id` and the `events` related to the call |
452 | | **configuration** | Object | Contains some configuration parameters such as the frequency |
453 | | **data** | Object | Contains the global statistics of the call |
454 | | **details** | Object | Contains the list of reports as well as the reference report |
455 | | **ended** | Date | End date of the ticket |
456 | | **ssrc** | Object | Contains the list of all statistics for all streams |
457 | | **started** | Date | Start date of the ticket |
458 | | **ua** | Object | Contains the `ua`, the `pname` and the `user_id` |
459 | | **version** | String | The version of the ticket format |
460 |
461 | Each **SSRC** is an object containing the following statistics:
462 |
463 | | Name | Value | Description |
464 | |:----------------|:------:|:---------------------------------------------------------------------------------------|
465 | | **direction** | String | The direction of the stream. Can be `inbound` or `outbound` |
466 | | **type** | String | The type of the stream. Can be `audio` or `video` |
467 | | **bitrate** | Object | `min`, `max`, `avg`, `values` and `volatility` for Bitrate |
468 | | **jitter** | Object | `min`, `max`, `avg`, `values` and `volatility` for Jitter |
469 | | **loss** | Object | `total`, `min`, `max`, `avg`, `values` and `volatility` for Packets Loss |
470 | | **rtt** | Object | (Outbound only) `min`, `max`, `avg`, `values` and `volatility` for Round Trip Time |
471 | | **mos** | Object | (Audio only) `min`, `max`, `avg`, `values` and `volatility` |
472 | | **traffic** | Object | `min`, `max`, `avg`, `values` and `volatility` for Traffic |
473 | | **limitations** | Object | (Video outbound only) `bandwidth`, `cpu`, `other`, `none` for Limitations (in percent) |
474 |
475 | ## PassThrough
476 |
477 | **WebRTCMetrics** allows to capture any properties from the underlying reports generated by the WebRTC stack (aka
478 | getStats API).
479 |
480 | ### Basic usage
481 |
482 | For doing that, you need to know which report the property belongs to, and use the key `passthrough` to give it to *
483 | *WebRTCMetrics**.
484 |
485 | Here is an example for capturing the audio level for any incoming streams as well as for the local source used
486 |
487 | ```js
488 | probe1 = metrics.createProbe(pc1, {
489 | pname: 'pc-bob-1', // Name of the peer connection (Optional)
490 | cid: 'call007984', // Call Id (Optional)
491 | uid: 'Bob', // User Id (Optional)
492 | passthrough: {
493 | "inbound-rtp": ["audioLevel"],
494 | "media-source": ["audioLevel"]
495 | }
496 | });
497 | ```
498 |
499 | The result will be added to each report in the following way:
500 |
501 | ```json
502 | {
503 | "passthrough": {
504 | "audioLevel": {
505 | "inbound-rtp_audio=3691646660": 0.09140293588061159,
506 | "media-source_audio=4252341231": 0.02352323323412
507 | }
508 | }
509 | }
510 | ```
511 |
512 | ### Advanced usage (units)
513 |
514 | Starting version v5.4 you use some tags for collecting the property directly using the right unit.
515 |
516 | For that, you can use tag `ms` for using milliseconds and `kbits` instead of having bytes.
517 |
518 | ```js
519 | probe1 = metrics.createProbe(pc1, {
520 | pname: 'pc-bob-1', // Name of the peer connection (Optional)
521 | cid: 'call007984', // Call Id (Optional)
522 | uid: 'Bob', // User Id (Optional)
523 | passthrough: {
524 | "inbound-rtp": ["bytesReceived.kbits"],
525 | "remote-inbound": ["jitter.ms"]
526 | }
527 | });
528 | ```
529 |
530 | Some metrics are cumulative, if you want to have a value per second, you can use the tag `ps`.
531 |
532 | ```js
533 | probe1 = metrics.createProbe(pc1, {
534 | pname: 'pc-bob-1', // Name of the peer connection (Optional)
535 | cid: 'call007984', // Call Id (Optional)
536 | uid: 'Bob', // User Id (Optional)
537 | passthrough: {
538 | "inbound-rtp": ["ps:bytesReceived.kbits"],
539 | "remote-inbound": ["jitter.ms"]
540 | }
541 | });
542 | ```
543 |
544 | ### Advanced usage (computation)
545 |
546 | Starting version 5.4, you can do computation with the properties collected
547 |
548 | For that, you have to specify the properties to used and the operand.
549 |
550 | ```js
551 | probe1 = metrics.createProbe(pc1, {
552 | pname: 'pc-bob-1', // Name of the peer connection (Optional)
553 | cid: 'call007984', // Call Id (Optional)
554 | uid: 'Bob', // User Id (Optional)
555 | passthrough: {
556 | "remote-inbound-rtp": [
557 | "[totalRoundTripTime/roundTripTimeMeasurements]"
558 | ],
559 | "inbound-rtp": [
560 | "[framesDecoded-keyFramesDecoded]",
561 | "[totalDecodeTime/framesDecoded]",
562 | "[pauseCount+freezeCount]",
563 | "[totalFreezesDuration+totalPausesDuration]"
564 | ]
565 | }
566 | });
567 | ```
568 |
569 | The following operands are supported: `/`, `+`, `-`, `*`. But only one kind of operand can be used in a formula.
570 | You can have more than 2 properties in a formula.
571 |
572 | ## Additional information
573 |
574 | ### Callbacks
575 |
576 | Setting the `onreport`, `onticket` and `onresult` to `null`, unregisters the callback previously registered.
577 |
578 | ### Probes
579 |
580 | You can get the list of available probes by using the `probes` accessor.
581 |
582 | ```javascript
583 | import WebRTCMetrics from "webrtcmetrics";
584 |
585 | const metrics = new WebRTCMetrics();
586 |
587 | metrics.createProbe(firstPeerConnection);
588 | metrics.createProbe(secondPeerConnection);
589 |
590 | // Get the list of existing probes
591 | const probes = metrics.probes;
592 | ```
593 |
594 | Probes can be started and stopped all together.
595 |
596 | ```javascript
597 | import WebRTCMetrics from "webrtcmetrics";
598 |
599 | const metrics = new WebRTCMetrics();
600 |
601 | metrics.createProbe(firstPeerConnection);
602 | metrics.createProbe(secondPeerConnection);
603 |
604 | // Start all probes
605 | metrics.startAllProbes();
606 |
607 | // Stop all probes
608 | metrics.stopAllProbes();
609 | ```
610 |
611 | ### Events and custom events
612 |
613 | Each probe records some WebRTC events related to the `RTCPeerConnection` or to the devices used. These events are
614 | collected and available in the **ticket** report.
615 |
616 | Additionally, to these events, **custom events** can be recorded too.
617 |
618 | ```javascript
619 | import WebRTCMetrics from "webrtcmetrics";
620 |
621 | const metrics = new WebRTCMetrics();
622 |
623 | const probe = metrics.createProbe(firstPeerConnection);
624 |
625 | // ssrc is optional but can be used to link events together. Null by default.
626 | const ssrc = null;
627 |
628 | // Data can be any Object
629 | const data = { custom: "data" };
630 |
631 | // At any time, for storing an event
632 | probe.addCustomEvent('an event', 'a category', 'a description of the event', new Date(), ssrc, { custom: "data" });
633 |
634 | // At any time, for storing a period
635 | probe.addCustomEvent('an event', 'a category', 'a description of the event', new Date(), ssrc, { custom: "data" }, new Date());
636 | ```
637 |
638 | ### Setting the logs level
639 |
640 | Logs level can be set in two different ways:
641 |
642 | - When initializing the library and by using the `verbose` flag from the configuration object.
643 |
644 | - By using the method `setupLogLevel`
645 |
646 | ```javascript
647 | import WebRTCMetrics from "webrtcmetrics";
648 |
649 | const metrics = new WebRTCMetrics();
650 | metrics.setupLogLevel('SILENT');
651 | ```
652 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webrtcmetrics",
3 | "version": "5.5.0",
4 | "description": "WebRTC stats library",
5 | "main": "dist/WebRTCMetrics.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "prebuild": "rimraf dist",
9 | "build:dev": "webpack --config webpack.config-dev.js -w",
10 | "build:prod": "webpack --config webpack.config-prod.js"
11 | },
12 | "repository": {
13 | "type": "git",
14 | "url": "git+https://github.com/oanguenot/webrtc-stats.git"
15 | },
16 | "keywords": [
17 | "WebRTC",
18 | "stats"
19 | ],
20 | "files": [
21 | "dist",
22 | "README.md",
23 | "LICENSE.md"
24 | ],
25 | "author": "Olivier Anguenot ",
26 | "license": "MIT",
27 | "bugs": {
28 | "url": "https://github.com/oanguenot/webrtc-stats/issues"
29 | },
30 | "homepage": "https://github.com/oanguenot/webrtc-stats#readme",
31 | "devDependencies": {
32 | "@babel/cli": "7.23.0",
33 | "@babel/core": "7.23.2",
34 | "@babel/preset-env": "7.23.2",
35 | "@babel/register": "7.22.15",
36 | "babel-loader": "9.1.3",
37 | "eslint": "8.51.0",
38 | "eslint-config-airbnb-base": "15.0.0",
39 | "eslint-plugin-import": "2.28.1",
40 | "eslint-webpack-plugin": "4.0.1",
41 | "prettier": "3.0.3",
42 | "rimraf": "5.0.5",
43 | "webpack": "5.89.0",
44 | "webpack-cli": "5.1.4"
45 | },
46 | "babel": {
47 | "presets": [
48 | "@babel/preset-env"
49 | ]
50 | },
51 | "dependencies": {
52 | "loglevel": "1.8.1",
53 | "short-unique-id": "5.0.3"
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/collector.js:
--------------------------------------------------------------------------------
1 | import Exporter from "./exporter";
2 | import { extract, extractPassthroughFields } from "./extractor";
3 | import { mos } from "./utils/score";
4 | import {
5 | COLLECTOR_STATE,
6 | defaultAudioMetricIn,
7 | defaultAudioMetricOut,
8 | defaultVideoMetricIn,
9 | defaultVideoMetricOut,
10 | getDefaultMetric,
11 | VALUE,
12 | TYPE,
13 | DIRECTION,
14 | } from "./utils/models";
15 | import { createCollectorId, call } from "./utils/helper";
16 | import { debug, error, info } from "./utils/log";
17 | import { doLiveTreatment } from "./live";
18 |
19 | export default class Collector {
20 | constructor(cfg, refProbeId) {
21 | this._callbacks = {
22 | onreport: null,
23 | onticket: null,
24 | };
25 |
26 | this._id = createCollectorId();
27 | this._moduleName = this._id;
28 | this._probeId = refProbeId;
29 | this._config = cfg;
30 | this._exporter = new Exporter(cfg);
31 | this._state = COLLECTOR_STATE.IDLE;
32 |
33 | this.deviceChanged = () => this._onDeviceChange();
34 | this.connectionStateChange = () => this._onConnectionStateChange();
35 | this.iceConnectionStateChange = () => this._onIceConnectionStateChange();
36 | this.iceGatheringStateChange = () => this._onIceGatheringStateChange();
37 | this.track = (e) => this._onTrack(e);
38 | this.negotiationNeeded = () => this._onNegotiationNeeded();
39 |
40 | info(this._moduleName, `new collector created for probe ${this._probeId}`);
41 | }
42 |
43 | analyze(
44 | stats,
45 | oldStats,
46 | previousReport,
47 | beforeLastReport,
48 | referenceReport,
49 | _refPC,
50 | ) {
51 | const getDefaultSSRCMetric = (kind, reportType) => {
52 | if (kind === VALUE.AUDIO) {
53 | if (reportType === TYPE.INBOUND_RTP) {
54 | return { ...defaultAudioMetricIn };
55 | }
56 | return { ...defaultAudioMetricOut };
57 | }
58 |
59 | if (reportType === TYPE.INBOUND_RTP) {
60 | return { ...defaultVideoMetricIn };
61 | }
62 | return { ...defaultVideoMetricOut };
63 | };
64 |
65 | const getAssociatedPreviousReport = (currentReport, previousReports) => {
66 | let find = null;
67 | previousReports.forEach((prevReport) => {
68 | if (prevReport.id === currentReport.id) {
69 | find = prevReport;
70 | }
71 | });
72 | return find;
73 | };
74 |
75 | // Get previous report without any modifications
76 | const report = getDefaultMetric(previousReport);
77 |
78 | let timestamp = null;
79 | stats.forEach((stat) => {
80 | if (!timestamp && stat.timestamp) {
81 | timestamp = stat.timestamp;
82 | }
83 | const values = extract(
84 | stat,
85 | report,
86 | report.pname,
87 | referenceReport,
88 | stats,
89 | oldStats,
90 | _refPC,
91 | );
92 | values.forEach((data) => {
93 | if ("internal" in data) {
94 | const events = doLiveTreatment(data, previousReport, values);
95 | events.forEach((event) => this.addCustomEvent(event));
96 | }
97 | if (data.value && data.type) {
98 | if (data.ssrc) {
99 | let ssrcReport = report[data.type][data.ssrc];
100 | if (!ssrcReport) {
101 | ssrcReport = getDefaultSSRCMetric(data.type, stat.type);
102 | ssrcReport.ssrc = data.ssrc;
103 | report[data.type][data.ssrc] = ssrcReport;
104 | }
105 | Object.keys(data.value)
106 | .forEach((key) => {
107 | ssrcReport[key] = data.value[key];
108 | });
109 | } else {
110 | Object.keys(data.value)
111 | .forEach((key) => {
112 | report[data.type][key] = data.value[key];
113 | });
114 | }
115 | }
116 | });
117 |
118 | const previousSameReport = oldStats ? getAssociatedPreviousReport(stat, oldStats) : null;
119 |
120 | // Extract passthrough fields
121 | const passthrough = extractPassthroughFields(
122 | stat,
123 | previousSameReport,
124 | this._config.passthrough,
125 | );
126 | Object.keys(passthrough)
127 | .forEach((key) => {
128 | if (!report.passthrough[key]) {
129 | report.passthrough[key] = {};
130 | }
131 | report.passthrough[key] = {
132 | ...report.passthrough[key],
133 | ...passthrough[key],
134 | };
135 | });
136 | });
137 | report.pname = this._config.pname;
138 | report.call_id = this._config.cid;
139 | report.user_id = this._config.uid;
140 | report.count = previousReport ? previousReport.count + 1 : 1;
141 | report.timestamp = timestamp;
142 | Object.keys(report[VALUE.AUDIO])
143 | .forEach((key) => {
144 | const ssrcReport = report[VALUE.AUDIO][key];
145 | ssrcReport[
146 | ssrcReport.direction === DIRECTION.INBOUND ? "mos_in" : "mos_out"
147 | ] = mos(
148 | report,
149 | VALUE.AUDIO,
150 | previousReport,
151 | beforeLastReport,
152 | ssrcReport.ssrc,
153 | ssrcReport.direction,
154 | 3,
155 | );
156 | });
157 | return report;
158 | }
159 |
160 | async takeReferenceStats() {
161 | return new Promise((resolve, reject) => {
162 | const preWaitTime = Date.now();
163 | setTimeout(async () => {
164 | try {
165 | const waitTime = Date.now() - preWaitTime;
166 | const preTime = Date.now();
167 | const reports = await this._config.pc.getStats();
168 | const referenceReport = this.analyze(
169 | reports,
170 | null,
171 | null,
172 | null,
173 | null,
174 | this._config.pc,
175 | );
176 | const postTime = Date.now();
177 | referenceReport.experimental.time_to_measure_ms = postTime - preTime;
178 | referenceReport.experimental.time_to_wait_ms = waitTime;
179 | this._exporter.saveReferenceReport(referenceReport);
180 | debug(
181 | this._moduleName,
182 | `got reference report for probe ${this._probeId}`,
183 | );
184 | resolve();
185 | } catch (err) {
186 | reject(err);
187 | }
188 | }, this._config.startAfter);
189 | });
190 | }
191 |
192 | async collectStats() {
193 | try {
194 | if (this._state !== COLLECTOR_STATE.RUNNING || !this._config.pc) {
195 | debug(
196 | this._moduleName,
197 | `report discarded (too late) for probe ${this._probeId}`,
198 | );
199 | return null;
200 | }
201 |
202 | const preTime = Date.now();
203 | const reports = await this._config.pc.getStats();
204 | const report = this.analyze(
205 | reports,
206 | this._oldReports,
207 | this._exporter.getLastReport(),
208 | this._exporter.getBeforeLastReport(),
209 | this._exporter.getReferenceReport(),
210 | this._config.pc,
211 | );
212 | this._oldReports = reports;
213 | const postTime = Date.now();
214 | report.experimental.time_to_measure_ms = postTime - preTime;
215 | this._exporter.addReport(report);
216 | debug(
217 | this._moduleName,
218 | `got report for probe ${this._probeId}#${
219 | this._exporter.getReportsNumber() + 1
220 | }`,
221 | );
222 | this.fireOnReport(report);
223 | return report;
224 | } catch (err) {
225 | error(this._moduleName, `got error ${err}`);
226 | return null;
227 | }
228 | }
229 |
230 | async start() {
231 | debug(this._moduleName, "starting");
232 | this._oldReports = null;
233 | this._exporter.reset();
234 | await this.registerToPCEvents();
235 | this.state = COLLECTOR_STATE.RUNNING;
236 | this._exporter.start();
237 | debug(this._moduleName, "started");
238 | }
239 |
240 | async mute() {
241 | this.state = COLLECTOR_STATE.MUTED;
242 | debug(this._moduleName, "muted");
243 | }
244 |
245 | async unmute() {
246 | this.state = COLLECTOR_STATE.RUNNING;
247 | debug(this._moduleName, "unmuted");
248 | }
249 |
250 | async stop(forced) {
251 | debug(this._moduleName, `stopping${forced ? " by watchdog" : ""}...`);
252 | this._exporter.stop();
253 | this.unregisterToPCEvents();
254 | this.state = COLLECTOR_STATE.IDLE;
255 |
256 | if (this._config.ticket) {
257 | const ticket = this._exporter.generateTicket();
258 | this.fireOnTicket(ticket);
259 | }
260 | debug(this._moduleName, "stopped");
261 | }
262 |
263 | registerCallback(name, callback, context) {
264 | if (name in this._callbacks) {
265 | this._callbacks[name] = {
266 | callback,
267 | context,
268 | };
269 | debug(this._moduleName, `registered callback '${name}'`);
270 | } else {
271 | error(
272 | this._moduleName,
273 | `can't register callback for '${name}' - not found`,
274 | );
275 | }
276 | }
277 |
278 | unregisterCallback(name) {
279 | if (name in this._callbacks) {
280 | this._callbacks[name] = null;
281 | delete this._callbacks[name];
282 | debug(this._moduleName, `unregistered callback '${name}'`);
283 | } else {
284 | error(
285 | this._moduleName,
286 | `can't unregister callback for '${name}' - not found`,
287 | );
288 | }
289 | }
290 |
291 | fireOnReport(report) {
292 | if (this._callbacks.onreport) {
293 | call(
294 | this._callbacks.onreport.callback,
295 | this._callbacks.onreport.context,
296 | report,
297 | );
298 | }
299 | }
300 |
301 | fireOnTicket(ticket) {
302 | if (this._callbacks.onticket) {
303 | call(
304 | this._callbacks.onticket.callback,
305 | this._callbacks.onticket.context,
306 | ticket,
307 | );
308 | }
309 | }
310 |
311 | updateConfig(config) {
312 | this._config = config;
313 | this._exporter.updateConfig(config);
314 | }
315 |
316 | get state() {
317 | return this._state;
318 | }
319 |
320 | set state(newState) {
321 | this._state = newState;
322 | debug(this._moduleName, `state changed to ${newState}`);
323 | }
324 |
325 | addCustomEvent(event) {
326 | this._exporter.addCustomEvent(event);
327 | }
328 |
329 | async _onDeviceChange() {
330 | try {
331 | const devices = await navigator.mediaDevices.enumerateDevices();
332 | this.addCustomEvent({
333 | at: new Date().toJSON(),
334 | ended: null,
335 | category: "device",
336 | name: "device-change",
337 | ssrc: null,
338 | details: {
339 | message: "One device (at least) has been plugged or unplugged",
340 | direction: null,
341 | kind: null,
342 | value: devices.length,
343 | value_old: null,
344 | },
345 | });
346 | // eslint-disable-next-line no-empty
347 | } catch (err) {
348 | error(this._moduleName, "can't get devices");
349 | }
350 | }
351 |
352 | _onIceConnectionStateChange() {
353 | const { pc } = this._config;
354 | const value = pc.iceConnectionState;
355 | this.addCustomEvent({
356 | at: new Date().toJSON(),
357 | ended: null,
358 | category: "signal",
359 | name: "ice-change",
360 | ssrc: null,
361 | details: {
362 | message: `The ICE connection state has changed to ${value}`,
363 | direction: null,
364 | kind: null,
365 | value,
366 | value_old: null,
367 | },
368 | });
369 | }
370 |
371 | _onConnectionStateChange() {
372 | const { pc } = this._config;
373 | const value = pc.connectionState;
374 | this.addCustomEvent({
375 | at: new Date().toJSON(),
376 | ended: null,
377 | category: "signal",
378 | name: "connection-change",
379 | ssrc: null,
380 | details: {
381 | message: `The connection state has changed to ${value}`,
382 | direction: null,
383 | kind: null,
384 | value,
385 | value_old: null,
386 | },
387 | });
388 | }
389 |
390 | _onIceGatheringStateChange() {
391 | const { pc } = this._config;
392 | const value = pc.iceGatheringState;
393 | this.addCustomEvent({
394 | at: new Date().toJSON(),
395 | ended: null,
396 | category: "signal",
397 | name: "gathering-change",
398 | ssrc: null,
399 | details: {
400 | message: `The ICE gathering state has changed to ${value}`,
401 | direction: null,
402 | kind: null,
403 | value,
404 | value_old: null,
405 | },
406 | });
407 | }
408 |
409 | _onTrack(e) {
410 | this.addCustomEvent({
411 | at: new Date().toJSON(),
412 | ended: null,
413 | category: "signal",
414 | name: "track-received",
415 | ssrc: null,
416 | details: {
417 | message: `A new inbound ${e.track.id} stream has been started`,
418 | direction: "inbound",
419 | kind: e.track.kind,
420 | value: e.track.label,
421 | value_old: null,
422 | },
423 | });
424 | }
425 |
426 | _onNegotiationNeeded() {
427 | this.addCustomEvent({
428 | at: new Date().toJSON(),
429 | ended: null,
430 | category: "signal",
431 | name: "ice-negotiation",
432 | ssrc: null,
433 | details: {
434 | message: "A negotiation is required",
435 | direction: null,
436 | kind: null,
437 | value: "negotiation-needed",
438 | value_old: null,
439 | },
440 | });
441 | }
442 |
443 | async registerToPCEvents() {
444 | const { pc } = this._config;
445 | if (navigator.mediaDevices) {
446 | navigator.mediaDevices.addEventListener("devicechange", this.deviceChanged);
447 | }
448 | if (pc) {
449 | pc.addEventListener("iceconnectionstatechange", this.iceConnectionStateChange);
450 | pc.addEventListener("connectionstatechange", this.connectionStateChange);
451 | pc.addEventListener("icegatheringstatechange", this.iceGatheringStateChange);
452 | pc.addEventListener("track", this.track);
453 | pc.addEventListener("negotiationneeded", this.negotiationNeeded);
454 | }
455 | }
456 |
457 | unregisterToPCEvents() {
458 | const { pc } = this._config;
459 | if (navigator.mediaDevices) {
460 | navigator.mediaDevices.removeEventListener("devicechange", this.deviceChanged);
461 | }
462 | if (pc) {
463 | pc.removeEventListener("iceconnectionstatechange", this.iceConnectionStateChange);
464 | pc.removeEventListener("connectionstatechange", this.connectionStateChange);
465 | pc.removeEventListener("icegatheringstatechange", this.iceGatheringStateChange);
466 | pc.removeEventListener("track", this.track);
467 | pc.removeEventListener("negotiationneeded", this.negotiationNeeded);
468 | }
469 | }
470 |
471 | getTicket() {
472 | return this._exporter && this._exporter.generateTicket();
473 | }
474 | }
475 |
--------------------------------------------------------------------------------
/src/engine.js:
--------------------------------------------------------------------------------
1 | import { info, debug, error } from "./utils/log";
2 | import { getConfig } from "./utils/config";
3 | import Probe from "./probe";
4 | import {
5 | COLLECTOR_STATE,
6 | getDefaultGlobalMetric,
7 | } from "./utils/models";
8 | import { call, sumValuesOfReports, timeout } from "./utils/helper";
9 |
10 | const moduleName = "engine ";
11 |
12 | export default class ProbesEngine {
13 | constructor(cfg) {
14 | this._config = cfg;
15 | this._probes = [];
16 | this._startedTime = null;
17 | this._callbacks = {
18 | onresult: null,
19 | };
20 | info(moduleName, `configured for probing every ${this._config.refreshEvery}ms`);
21 | info(moduleName, `configured for starting after ${this._config.startAfter}ms`);
22 | info(moduleName, `${(!this._config.stopAfter || this._config.stopAfter !== -1) ? `configured for stopped after ${this._config.stopAfter}ms` : "configured for never stopped"}`);
23 | debug(moduleName, "engine initialized");
24 | }
25 |
26 | get probes() {
27 | return this._probes;
28 | }
29 |
30 | get isRunning() {
31 | return this._probes.some((probe) => (probe.isRunning));
32 | }
33 |
34 | get isIdle() {
35 | return this._probes.every((probe) => (probe.isIdle));
36 | }
37 |
38 | addNewProbe(peerConnection, options) {
39 | if (!peerConnection) {
40 | throw new Error("undefined peer connection");
41 | }
42 | const probeConfig = getConfig(peerConnection, options, this._config);
43 | const probe = new Probe(probeConfig);
44 | this._probes.push(probe);
45 | debug(moduleName, `${this._probes.length} probes registered`);
46 | return probe;
47 | }
48 |
49 | removeExistingProbe(probe) {
50 | if (!probe) {
51 | throw new Error("undefined probe");
52 | }
53 | if (probe.state === COLLECTOR_STATE.RUNNING) {
54 | probe.stop();
55 | }
56 | this._probes = this._probes.filter((existingProbe) => (probe.id !== existingProbe.id));
57 | }
58 |
59 | async start() {
60 | const startProbes = () => {
61 | this._probes.forEach((probe) => probe.start());
62 | };
63 |
64 | const takeReferenceStat = async () => (
65 | Promise.all(this._probes.map((probe) => (probe.takeReferenceStats())))
66 | );
67 |
68 | const shouldCollectStats = () => {
69 | if (this.isIdle) {
70 | // don't collect if there is no running probes
71 | return false;
72 | }
73 | if (!this._config.stopAfter || this._config.stopAfter < 0) {
74 | // always collect if stopAfter has not been set
75 | return true;
76 | }
77 | // Else check expiration
78 | return (Date.now() < this._startedTime + this._config.stopAfter);
79 | };
80 |
81 | const collectStats = async () => {
82 | const globalReport = getDefaultGlobalMetric();
83 | const runningProbes = this._probes.filter((probe) => (probe.isRunning));
84 | for (const probe of runningProbes) {
85 | const report = await probe.collectStats();
86 | if (report) {
87 | globalReport.probes.push(report);
88 | }
89 | debug(moduleName, `got probe ${probe.id}`);
90 | await timeout(0);
91 | }
92 |
93 | // Compute total measure time
94 | globalReport.delta_time_to_measure_probes_ms = sumValuesOfReports(globalReport.probes, "experimental", "time_to_measure_ms");
95 | globalReport.delta_KBytes_in = sumValuesOfReports(globalReport.probes, "data", "delta_KBytes_in");
96 | globalReport.delta_KBytes_out = sumValuesOfReports(globalReport.probes, "data", "delta_KBytes_out");
97 | globalReport.delta_kbs_in = sumValuesOfReports(globalReport.probes, "data", "delta_kbs_in");
98 | globalReport.delta_kbs_out = sumValuesOfReports(globalReport.probes, "data", "delta_kbs_out");
99 | globalReport.total_time_decoded_in = sumValuesOfReports(globalReport.probes, "video", "total_time_decoded_in");
100 | globalReport.total_time_encoded_out = sumValuesOfReports(globalReport.probes, "video", "total_time_encoded_out");
101 | return globalReport;
102 | };
103 |
104 | debug(moduleName, "starting to collect");
105 | startProbes();
106 | debug(moduleName, "generating reference reports...");
107 | await takeReferenceStat();
108 | debug(moduleName, "reference reports generated");
109 | this._startedTime = Date.now();
110 | debug(moduleName, `wait ${this._config.refreshEvery}ms before collecting`);
111 | await timeout(this._config.refreshEvery);
112 | while (shouldCollectStats()) {
113 | debug(moduleName, "collecting...");
114 | const preTime = Date.now();
115 | const globalReport = await collectStats();
116 | const postTime = Date.now();
117 | globalReport.delta_time_consumed_to_measure_ms = postTime - preTime;
118 | this.fireOnReports(globalReport);
119 | debug(moduleName, "collected");
120 | debug(moduleName, `wait ${this._config.refreshEvery}ms before collecting`);
121 | await timeout(this._config.refreshEvery);
122 | }
123 |
124 | debug(moduleName, "reaching end of the collecting period...");
125 |
126 | if (this.isRunning) {
127 | setTimeout(() => {
128 | this.stop();
129 | }, 0);
130 | }
131 | }
132 |
133 | stop(forced) {
134 | const stopProbes = (manual) => {
135 | this._probes.forEach((probe) => {
136 | probe.stop(manual);
137 | });
138 | };
139 |
140 | info(moduleName, "stop collecting");
141 | stopProbes(forced);
142 | }
143 |
144 | registerCallback(name, callback, context) {
145 | if (name in this._callbacks) {
146 | this._callbacks[name] = { callback, context };
147 | debug(moduleName, `registered callback '${name}'`);
148 | } else {
149 | error(moduleName, `can't register callback for '${name}' - not found`);
150 | }
151 | }
152 |
153 | unregisterCallback(name) {
154 | if (name in this._callbacks) {
155 | this._callbacks[name] = null;
156 | delete this._callbacks[name];
157 | debug(this._moduleName, `unregistered callback '${name}'`);
158 | } else {
159 | error(this._moduleName, `can't unregister callback for '${name}' - not found`);
160 | }
161 | }
162 |
163 | fireOnReports(report) {
164 | if (this._callbacks.onresult && report.probes.length > 0) {
165 | call(this._callbacks.onresult.callback, this._callbacks.onresult.context, report);
166 | }
167 | }
168 | }
169 |
--------------------------------------------------------------------------------
/src/exporter.js:
--------------------------------------------------------------------------------
1 | import { debug, trace } from "./utils/log";
2 | import {
3 | averageValuesOfReports,
4 | minValueOfReports,
5 | maxValueOfReports,
6 | lastOfReports,
7 | volatilityValuesOfReports,
8 | getLastReport, valuesOfReports,
9 | } from "./utils/helper";
10 | import { DIRECTION, VALUE } from "./utils/models";
11 |
12 | const moduleName = "exporter ";
13 |
14 | const VERSION_EXPORTER = "2.0";
15 |
16 | const averageRTT = (reports, kind, ssrc, forInbound = false) => {
17 | if (!reports || reports.length === 0) {
18 | return 0;
19 | }
20 |
21 | const lastReport = reports[reports.length - 1];
22 | if (!lastReport) {
23 | return 0;
24 | }
25 |
26 | const ssrcData = lastReport[kind][ssrc];
27 | if (ssrcData) {
28 | const totalRTT = forInbound ? ssrcData.total_rtt_ms_in : ssrcData.total_rtt_ms_out;
29 | const totalMeasurements = forInbound ? ssrcData.total_rtt_measure_in : ssrcData.total_rtt_measure_out;
30 |
31 | if (!totalMeasurements || !totalRTT) {
32 | return averageValuesOfReports(reports, kind, forInbound ? "delta_rtt_ms_in" : "delta_rtt_ms_out", false, ssrc);
33 | }
34 |
35 | return Number(totalRTT / totalMeasurements);
36 | }
37 | return null;
38 | };
39 |
40 | const limitationsPercent = (reports, kind, ssrc) => {
41 | const defaultValue = {
42 | other: 0,
43 | cpu: 0,
44 | bandwidth: 0,
45 | none: 100,
46 | };
47 |
48 | if (!reports || reports.length === 0) {
49 | return defaultValue;
50 | }
51 |
52 | const lastReport = reports[reports.length - 1];
53 | const ssrcData = lastReport[kind][ssrc];
54 |
55 | if (!ssrcData) {
56 | return defaultValue;
57 | }
58 |
59 | if (!("limitation_out" in ssrcData) || !("durations" in ssrcData.limitation_out)) {
60 | return defaultValue;
61 | }
62 |
63 | // FF: No quality limitations
64 | if (!ssrcData.limitation_out.durations) {
65 | return defaultValue;
66 | }
67 |
68 | const {
69 | other,
70 | bandwidth,
71 | cpu,
72 | none,
73 | } = ssrcData.limitation_out.durations;
74 |
75 | const totalDuration = Number(other) + Number(bandwidth) + Number(cpu) + Number(none);
76 |
77 | return {
78 | other: +((other / totalDuration) * 100).toFixed(2),
79 | cpu: +((cpu / totalDuration) * 100).toFixed(2),
80 | bandwidth: +((bandwidth / totalDuration) * 100).toFixed(2),
81 | none: +((none / totalDuration) * 100).toFixed(2),
82 | };
83 | };
84 |
85 | const averageRTTConnectivity = (reports, kind) => {
86 | if (!reports || reports.length === 0) {
87 | return 0;
88 | }
89 |
90 | const lastReport = reports[reports.length - 1];
91 | if (!lastReport) {
92 | return 0;
93 | }
94 | const totalRTT = lastReport[kind].total_rtt_connectivity_ms;
95 | const totalMeasurements = lastReport[kind].total_rtt_connectivity_measure;
96 |
97 | if (!totalMeasurements || !totalRTT) {
98 | return averageValuesOfReports(
99 | reports,
100 | kind,
101 | "delta_rtt_connectivity_ms",
102 | );
103 | }
104 |
105 | return Number(totalRTT / totalMeasurements);
106 | };
107 |
108 | const getPath = (reports) => {
109 | const localCandidateType = lastOfReports(
110 | reports,
111 | "network",
112 | "local_candidate_type",
113 | );
114 |
115 | if (localCandidateType !== "relay") {
116 | const localCandidateProtocol = lastOfReports(
117 | reports,
118 | "network",
119 | "local_candidate_protocol",
120 | );
121 | return `direct/${localCandidateProtocol}`;
122 | }
123 |
124 | const localCandidateRelayProtocol = lastOfReports(
125 | reports,
126 | "network",
127 | "local_candidate_relay_protocol",
128 | );
129 | return `turn/${localCandidateRelayProtocol}`;
130 | };
131 |
132 | const getRemotePath = (reports) => {
133 | const localCandidateType = lastOfReports(
134 | reports,
135 | "network",
136 | "remote_candidate_type",
137 | );
138 | const localCandidateProtocol = lastOfReports(
139 | reports,
140 | "network",
141 | "remote_candidate_protocol",
142 | );
143 |
144 | if (localCandidateType !== "relay") {
145 | return `direct/${localCandidateProtocol}`;
146 | }
147 |
148 | return `turn/${localCandidateProtocol}`;
149 | };
150 |
151 | export default class Exporter {
152 | constructor(cfg) {
153 | this._start = null;
154 | this._end = null;
155 | this._cfg = cfg;
156 | this._referenceReport = null;
157 | this._reports = [];
158 | this._events = [];
159 | }
160 |
161 | start() {
162 | trace(moduleName, "start() - start exporter...");
163 | const date = new Date();
164 | this._start = date.toJSON();
165 | return date;
166 | }
167 |
168 | stop() {
169 | trace(moduleName, "stop() - stop exporter...");
170 | const date = new Date();
171 | this._end = date.toJSON();
172 | return date;
173 | }
174 |
175 | saveReferenceReport(report) {
176 | this._referenceReport = report;
177 | }
178 |
179 | getReferenceReport() {
180 | return this._referenceReport;
181 | }
182 |
183 | addReport(report) {
184 | if (this._cfg.ticket) {
185 | debug(
186 | moduleName,
187 | `addReport() - add report to exporter at ${report.timestamp}`,
188 | );
189 | this._reports.push(report);
190 | }
191 | }
192 |
193 | addCustomEvent(event) {
194 | this._events.push(event);
195 | }
196 |
197 | reset() {
198 | trace(moduleName, "resetReports() - reset reports");
199 | this._reports = [];
200 | this._referenceReport = null;
201 | this._start = null;
202 | this._end = null;
203 | }
204 |
205 | generateTicket() {
206 | debug(moduleName, "ticket() - generate ticket");
207 |
208 | const audioPacketsLost = lastOfReports(
209 | this._reports,
210 | "audio",
211 | "total_packets_lost_in",
212 | );
213 | const audioPacketsReceived = lastOfReports(
214 | this._reports,
215 | "audio",
216 | "total_packets_in",
217 | );
218 | const videoPacketsLost = lastOfReports(
219 | this._reports,
220 | "video",
221 | "total_packets_lost_in",
222 | );
223 | const videoPacketsReceived = lastOfReports(
224 | this._reports,
225 | "video",
226 | "total_packets_in",
227 | );
228 |
229 | const ssrcExporter = {};
230 |
231 | const lastReport = getLastReport(this._reports);
232 | if (lastReport) {
233 | Object.keys(lastReport[VALUE.AUDIO])
234 | .forEach((ssrc) => {
235 | const ssrcAudio = lastReport[VALUE.AUDIO][ssrc];
236 | ssrcExporter[ssrcAudio.ssrc] = {
237 | type: VALUE.AUDIO,
238 | direction: ssrcAudio.direction,
239 | };
240 | if (ssrcAudio.direction === DIRECTION.INBOUND) {
241 | const jitter = {
242 | avg: averageValuesOfReports(
243 | this._reports,
244 | VALUE.AUDIO,
245 | "delta_jitter_ms_in",
246 | false,
247 | ssrc,
248 | ),
249 | min: minValueOfReports(
250 | this._reports,
251 | VALUE.AUDIO,
252 | "delta_jitter_ms_in",
253 | ssrc,
254 | ),
255 | max: maxValueOfReports(
256 | this._reports,
257 | VALUE.AUDIO,
258 | "delta_jitter_ms_in",
259 | ssrc,
260 | ),
261 | volatility: volatilityValuesOfReports(
262 | this._reports,
263 | VALUE.AUDIO,
264 | "delta_jitter_ms_in",
265 | ssrc,
266 | ),
267 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_jitter_ms_in", ssrc),
268 | _unit: {
269 | avg: "ms",
270 | min: "ms",
271 | max: "ms",
272 | volatility: "percent",
273 | },
274 | };
275 | const bitrate = {
276 | avg: averageValuesOfReports(
277 | this._reports,
278 | VALUE.AUDIO,
279 | "delta_kbs_in",
280 | false,
281 | ssrc,
282 | ),
283 | min: minValueOfReports(
284 | this._reports,
285 | VALUE.AUDIO,
286 | "delta_kbs_in",
287 | ssrc,
288 | ),
289 | max: maxValueOfReports(
290 | this._reports,
291 | VALUE.AUDIO,
292 | "delta_kbs_in",
293 | ssrc,
294 | ),
295 | volatility: volatilityValuesOfReports(
296 | this._reports,
297 | VALUE.AUDIO,
298 | "delta_kbs_in",
299 | ssrc,
300 | ),
301 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_kbs_in", ssrc),
302 | _unit: {
303 | avg: "kbs",
304 | min: "kbs",
305 | max: "kbs",
306 | volatility: "percent",
307 | },
308 | };
309 | const traffic = {
310 | avg: averageValuesOfReports(
311 | this._reports,
312 | VALUE.AUDIO,
313 | "delta_KBytes_in",
314 | false,
315 | ssrc,
316 | ),
317 | min: minValueOfReports(
318 | this._reports,
319 | VALUE.AUDIO,
320 | "delta_KBytes_in",
321 | ssrc,
322 | ),
323 | max: maxValueOfReports(
324 | this._reports,
325 | VALUE.AUDIO,
326 | "delta_KBytes_in",
327 | ssrc,
328 | ),
329 | volatility: volatilityValuesOfReports(
330 | this._reports,
331 | VALUE.AUDIO,
332 | "delta_KBytes_in",
333 | ssrc,
334 | ),
335 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_KBytes_in", ssrc),
336 | _unit: {
337 | avg: "KB",
338 | min: "KB",
339 | max: "KB",
340 | volatility: "percent",
341 | },
342 | };
343 | const mos = {
344 | avg: averageValuesOfReports(this._reports, VALUE.AUDIO, "mos_in", false, ssrc),
345 | min: minValueOfReports(this._reports, VALUE.AUDIO, "mos_in", ssrc),
346 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "mos_in", ssrc),
347 | volatility: volatilityValuesOfReports(
348 | this._reports,
349 | VALUE.AUDIO,
350 | "mos_in",
351 | ssrc,
352 | ),
353 | values: valuesOfReports(this._reports, VALUE.AUDIO, "mos_in", ssrc),
354 | _unit: {
355 | avg: "number (1-5)",
356 | min: "number (1-5)",
357 | max: "number (1-5)",
358 | volatility: "percent",
359 | },
360 | };
361 | const packetsLost = lastOfReports(
362 | this._reports,
363 | VALUE.AUDIO,
364 | "total_packets_lost_in",
365 | ssrc,
366 | );
367 | const packetsReceived = lastOfReports(
368 | this._reports,
369 | VALUE.AUDIO,
370 | "total_packets_in",
371 | ssrc,
372 | );
373 | const loss = {
374 | total: packetsLost,
375 | avg: Math.round(
376 | ((packetsLost /
377 | (packetsLost + packetsReceived)) *
378 | 100 || 0) * 100,
379 | ) / 100,
380 | min: minValueOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_in", ssrc, false),
381 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_in", ssrc, false),
382 | volatility: volatilityValuesOfReports(
383 | this._reports,
384 | VALUE.AUDIO,
385 | "delta_packets_lost_in",
386 | ssrc,
387 | false,
388 | ),
389 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_in", ssrc),
390 | _unit: {
391 | avg: "percent",
392 | min: "number",
393 | max: "number",
394 | volatility: "percent",
395 | total: "number",
396 | },
397 | };
398 | const rtt = {
399 | avg: averageRTT(this._reports, VALUE.AUDIO, ssrc, true),
400 | min: minValueOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_in", ssrc),
401 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_in", ssrc),
402 | volatility: volatilityValuesOfReports(
403 | this._reports,
404 | VALUE.AUDIO,
405 | "delta_rtt_ms_in",
406 | ssrc,
407 | ),
408 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_in", ssrc),
409 | _unit: {
410 | avg: "ms",
411 | min: "ms",
412 | max: "ms",
413 | volatility: "percent",
414 | },
415 | };
416 | ssrcExporter[ssrc].jitter = jitter;
417 | ssrcExporter[ssrc].rtt = rtt;
418 | ssrcExporter[ssrc].mos = mos;
419 | ssrcExporter[ssrc].traffic = traffic;
420 | ssrcExporter[ssrc].bitrate = bitrate;
421 | ssrcExporter[ssrc].loss = loss;
422 | } else {
423 | const jitter = {
424 | avg: averageValuesOfReports(
425 | this._reports,
426 | VALUE.AUDIO,
427 | "delta_jitter_ms_out",
428 | false,
429 | ssrc,
430 | ),
431 | min: minValueOfReports(
432 | this._reports,
433 | VALUE.AUDIO,
434 | "delta_jitter_ms_out",
435 | ssrc,
436 | ),
437 | max: maxValueOfReports(
438 | this._reports,
439 | VALUE.AUDIO,
440 | "delta_jitter_ms_out",
441 | ssrc,
442 | ),
443 | volatility: volatilityValuesOfReports(
444 | this._reports,
445 | VALUE.AUDIO,
446 | "delta_jitter_ms_out",
447 | ssrc,
448 | ),
449 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_jitter_ms_out", ssrc),
450 | _unit: {
451 | avg: "ms",
452 | min: "ms",
453 | max: "ms",
454 | volatility: "percent",
455 | },
456 | };
457 | const bitrate = {
458 | avg: averageValuesOfReports(
459 | this._reports,
460 | VALUE.AUDIO,
461 | "delta_kbs_out",
462 | false,
463 | ssrc,
464 | ),
465 | min: minValueOfReports(
466 | this._reports,
467 | VALUE.AUDIO,
468 | "delta_kbs_out",
469 | ssrc,
470 | ),
471 | max: maxValueOfReports(
472 | this._reports,
473 | VALUE.AUDIO,
474 | "delta_kbs_out",
475 | ssrc,
476 | ),
477 | volatility: volatilityValuesOfReports(
478 | this._reports,
479 | VALUE.AUDIO,
480 | "delta_kbs_out",
481 | ssrc,
482 | ),
483 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_kbs_out", ssrc),
484 | _unit: {
485 | avg: "kbs",
486 | min: "kbs",
487 | max: "kbs",
488 | volatility: "percent",
489 | },
490 | };
491 | const traffic = {
492 | avg: averageValuesOfReports(
493 | this._reports,
494 | VALUE.AUDIO,
495 | "delta_KBytes_out",
496 | false,
497 | ssrc,
498 | ),
499 | min: minValueOfReports(
500 | this._reports,
501 | VALUE.AUDIO,
502 | "delta_KBytes_out",
503 | ssrc,
504 | ),
505 | max: maxValueOfReports(
506 | this._reports,
507 | VALUE.AUDIO,
508 | "delta_KBytes_out",
509 | ssrc,
510 | ),
511 | volatility: volatilityValuesOfReports(
512 | this._reports,
513 | VALUE.AUDIO,
514 | "delta_KBytes_out",
515 | ssrc,
516 | ),
517 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_KBytes_out", ssrc),
518 | _unit: {
519 | avg: "KB",
520 | min: "KB",
521 | max: "KB",
522 | bitrate: "kbs",
523 | volatility: "percent",
524 | },
525 | };
526 | const rtt = {
527 | avg: averageRTT(this._reports, VALUE.AUDIO, ssrc),
528 | min: minValueOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_out", ssrc),
529 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_out", ssrc),
530 | volatility: volatilityValuesOfReports(
531 | this._reports,
532 | VALUE.AUDIO,
533 | "delta_rtt_ms_out",
534 | ssrc,
535 | ),
536 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_rtt_ms_out", ssrc),
537 | _unit: {
538 | avg: "ms",
539 | min: "ms",
540 | max: "ms",
541 | volatility: "percent",
542 | },
543 | };
544 | const packetsLost = lastOfReports(
545 | this._reports,
546 | VALUE.AUDIO,
547 | "total_packets_lost_out",
548 | ssrc,
549 | );
550 | const packetsReceived = lastOfReports(
551 | this._reports,
552 | VALUE.AUDIO,
553 | "total_packets_out",
554 | ssrc,
555 | );
556 | const loss = {
557 | total: packetsLost,
558 | min: minValueOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_out", ssrc, false),
559 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_out", ssrc, false),
560 | volatility: volatilityValuesOfReports(
561 | this._reports,
562 | VALUE.AUDIO,
563 | "delta_packets_lost_out",
564 | ssrc,
565 | false,
566 | ),
567 | avg: Math.round(
568 | ((packetsLost /
569 | (packetsLost + packetsReceived)) *
570 | 100 || 0) * 100,
571 | ) / 100,
572 | values: valuesOfReports(this._reports, VALUE.AUDIO, "delta_packets_lost_out", ssrc),
573 | _unit: {
574 | avg: "percent",
575 | min: "number",
576 | max: "number",
577 | volatility: "percent",
578 | total: "number",
579 | },
580 | };
581 | const mos = {
582 | avg: averageValuesOfReports(this._reports, VALUE.AUDIO, "mos_out", false, ssrc),
583 | min: minValueOfReports(this._reports, VALUE.AUDIO, "mos_out", ssrc),
584 | max: maxValueOfReports(this._reports, VALUE.AUDIO, "mos_out", ssrc),
585 | volatility: volatilityValuesOfReports(
586 | this._reports,
587 | VALUE.AUDIO,
588 | "mos_out",
589 | ssrc,
590 | ),
591 | values: valuesOfReports(this._reports, VALUE.AUDIO, "mos_out", ssrc),
592 | _unit: {
593 | avg: "number (1-5)",
594 | min: "number (1-5)",
595 | max: "number (1-5)",
596 | volatility: "percent",
597 | },
598 | };
599 | ssrcExporter[ssrc].jitter = jitter;
600 | ssrcExporter[ssrc].rtt = rtt;
601 | ssrcExporter[ssrc].traffic = traffic;
602 | ssrcExporter[ssrc].bitrate = bitrate;
603 | ssrcExporter[ssrc].loss = loss;
604 | ssrcExporter[ssrc].mos = mos;
605 | }
606 | });
607 | Object.keys(lastReport[VALUE.VIDEO])
608 | .forEach((ssrc) => {
609 | const ssrcVideo = lastReport[VALUE.VIDEO][ssrc];
610 | ssrcExporter[ssrc] = {
611 | type: VALUE.VIDEO,
612 | direction: ssrcVideo.direction,
613 | };
614 | if (ssrcVideo.direction === DIRECTION.INBOUND) {
615 | const jitter = {
616 | avg: averageValuesOfReports(
617 | this._reports,
618 | VALUE.VIDEO,
619 | "delta_jitter_ms_in",
620 | false,
621 | ssrc,
622 | ),
623 | min: minValueOfReports(
624 | this._reports,
625 | VALUE.VIDEO,
626 | "delta_jitter_ms_in",
627 | ssrc,
628 | ),
629 | max: maxValueOfReports(
630 | this._reports,
631 | VALUE.VIDEO,
632 | "delta_jitter_ms_in",
633 | ssrc,
634 | ),
635 | volatility: volatilityValuesOfReports(
636 | this._reports,
637 | VALUE.VIDEO,
638 | "delta_jitter_ms_in",
639 | ssrc,
640 | ),
641 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_jitter_ms_in", ssrc),
642 | _unit: {
643 | avg: "ms",
644 | min: "ms",
645 | max: "ms",
646 | volatility: "percent",
647 | },
648 | };
649 | const bitrate = {
650 | avg: averageValuesOfReports(
651 | this._reports,
652 | VALUE.VIDEO,
653 | "delta_kbs_in",
654 | false,
655 | ssrc,
656 | ),
657 | min: minValueOfReports(
658 | this._reports,
659 | VALUE.VIDEO,
660 | "delta_kbs_in",
661 | ssrc,
662 | ),
663 | max: maxValueOfReports(
664 | this._reports,
665 | VALUE.VIDEO,
666 | "delta_kbs_in",
667 | ssrc,
668 | ),
669 | volatility: volatilityValuesOfReports(
670 | this._reports,
671 | VALUE.VIDEO,
672 | "delta_kbs_in",
673 | ssrc,
674 | ),
675 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_kbs_in", ssrc),
676 | _unit: {
677 | avg: "kbs",
678 | min: "kbs",
679 | max: "kbs",
680 | volatility: "percent",
681 | },
682 | };
683 | const traffic = {
684 | avg: averageValuesOfReports(
685 | this._reports,
686 | VALUE.VIDEO,
687 | "delta_KBytes_in",
688 | false,
689 | ssrc,
690 | ),
691 | min: minValueOfReports(
692 | this._reports,
693 | VALUE.VIDEO,
694 | "delta_KBytes_in",
695 | ssrc,
696 | ),
697 | max: maxValueOfReports(
698 | this._reports,
699 | VALUE.VIDEO,
700 | "delta_KBytes_in",
701 | ssrc,
702 | ),
703 | volatility: volatilityValuesOfReports(
704 | this._reports,
705 | VALUE.VIDEO,
706 | "delta_KBytes_in",
707 | ssrc,
708 | ),
709 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_KBytes_in", ssrc),
710 | _unit: {
711 | avg: "KB",
712 | min: "KB",
713 | max: "KB",
714 | volatility: "percent",
715 | },
716 | };
717 | const packetsLost = lastOfReports(
718 | this._reports,
719 | VALUE.VIDEO,
720 | "total_packets_lost_in",
721 | ssrc,
722 | );
723 | const packetsReceived = lastOfReports(
724 | this._reports,
725 | VALUE.VIDEO,
726 | "total_packets_in",
727 | ssrc,
728 | );
729 | const loss = {
730 | total: packetsLost,
731 | min: minValueOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_in", ssrc, false),
732 | max: maxValueOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_in", ssrc, false),
733 | volatility: volatilityValuesOfReports(
734 | this._reports,
735 | VALUE.VIDEO,
736 | "delta_packets_lost_in",
737 | ssrc,
738 | false,
739 | ),
740 | avg: Math.round(
741 | ((packetsLost /
742 | (packetsLost + packetsReceived)) *
743 | 100 || 0) * 100,
744 | ) / 100,
745 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_in", ssrc),
746 | _unit: {
747 | avg: "percent",
748 | min: "number",
749 | max: "number",
750 | volatility: "percent",
751 | total: "number",
752 | },
753 | };
754 | ssrcExporter[ssrc].jitter = jitter;
755 | ssrcExporter[ssrc].traffic = traffic;
756 | ssrcExporter[ssrc].bitrate = bitrate;
757 | ssrcExporter[ssrc].loss = loss;
758 | } else {
759 | const jitter = {
760 | avg: averageValuesOfReports(
761 | this._reports,
762 | VALUE.VIDEO,
763 | "delta_jitter_ms_out",
764 | false,
765 | ssrc,
766 | ),
767 | min: minValueOfReports(
768 | this._reports,
769 | VALUE.VIDEO,
770 | "delta_jitter_ms_out",
771 | ssrc,
772 | ),
773 | max: maxValueOfReports(
774 | this._reports,
775 | VALUE.VIDEO,
776 | "delta_jitter_ms_out",
777 | ssrc,
778 | ),
779 | volatility: volatilityValuesOfReports(
780 | this._reports,
781 | VALUE.VIDEO,
782 | "delta_jitter_ms_out",
783 | ssrc,
784 | ),
785 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_jitter_ms_out", ssrc),
786 | _unit: {
787 | avg: "ms",
788 | min: "ms",
789 | max: "ms",
790 | volatility: "percent",
791 | },
792 | };
793 | const bitrate = {
794 | avg: averageValuesOfReports(
795 | this._reports,
796 | VALUE.VIDEO,
797 | "delta_kbs_out",
798 | false,
799 | ssrc,
800 | ),
801 | min: minValueOfReports(
802 | this._reports,
803 | VALUE.VIDEO,
804 | "delta_kbs_out",
805 | ssrc,
806 | ),
807 | max: maxValueOfReports(
808 | this._reports,
809 | VALUE.VIDEO,
810 | "delta_kbs_out",
811 | ssrc,
812 | ),
813 | volatility: volatilityValuesOfReports(
814 | this._reports,
815 | VALUE.VIDEO,
816 | "delta_kbs_out",
817 | ssrc,
818 | ),
819 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_kbs_out", ssrc),
820 | _unit: {
821 | avg: "kbs",
822 | min: "kbs",
823 | max: "kbs",
824 | volatility: "percent",
825 | },
826 | };
827 | const traffic = {
828 | avg: averageValuesOfReports(
829 | this._reports,
830 | VALUE.VIDEO,
831 | "delta_KBytes_out",
832 | false,
833 | ssrc,
834 | ),
835 | min: minValueOfReports(
836 | this._reports,
837 | VALUE.VIDEO,
838 | "delta_KBytes_out",
839 | ssrc,
840 | ),
841 | max: maxValueOfReports(
842 | this._reports,
843 | VALUE.VIDEO,
844 | "delta_KBytes_out",
845 | ssrc,
846 | ),
847 | volatility: volatilityValuesOfReports(
848 | this._reports,
849 | VALUE.VIDEO,
850 | "delta_KBytes_out",
851 | ssrc,
852 | ),
853 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_KBytes_out", ssrc),
854 | _unit: {
855 | avg: "KB",
856 | min: "KB",
857 | max: "KB",
858 | volatility: "percent",
859 | },
860 | };
861 | const rtt = {
862 | avg: averageRTT(this._reports, VALUE.VIDEO, ssrc),
863 | min: minValueOfReports(this._reports, VALUE.VIDEO, "delta_rtt_ms_out", ssrc),
864 | max: maxValueOfReports(this._reports, VALUE.VIDEO, "delta_rtt_ms_out", ssrc),
865 | volatility: volatilityValuesOfReports(
866 | this._reports,
867 | VALUE.VIDEO,
868 | "delta_rtt_ms_out",
869 | ssrc,
870 | ),
871 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_rtt_ms_out", ssrc),
872 | _unit: {
873 | avg: "ms",
874 | min: "ms",
875 | max: "ms",
876 | volatility: "percent",
877 | },
878 | };
879 | const packetsLost = lastOfReports(
880 | this._reports,
881 | VALUE.VIDEO,
882 | "total_packets_lost_out",
883 | ssrc,
884 | );
885 | const packetsReceived = lastOfReports(
886 | this._reports,
887 | VALUE.VIDEO,
888 | "total_packets_out",
889 | ssrc,
890 | );
891 | const loss = {
892 | total: packetsLost,
893 | min: minValueOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_out", ssrc, false),
894 | max: maxValueOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_out", ssrc, false),
895 | volatility: volatilityValuesOfReports(
896 | this._reports,
897 | VALUE.VIDEO,
898 | "delta_packets_lost_out",
899 | ssrc,
900 | false,
901 | ),
902 | avg: Math.round(
903 | ((packetsLost /
904 | (packetsLost + packetsReceived)) *
905 | 100 || 0) * 100,
906 | ) / 100,
907 | values: valuesOfReports(this._reports, VALUE.VIDEO, "delta_packets_lost_out", ssrc),
908 | _unit: {
909 | avg: "percent",
910 | min: "number",
911 | max: "number",
912 | volatility: "percent",
913 | total: "number",
914 | },
915 | };
916 |
917 | ssrcExporter[ssrc].jitter = jitter;
918 | ssrcExporter[ssrc].rtt = rtt;
919 | ssrcExporter[ssrc].traffic = traffic;
920 | ssrcExporter[ssrc].bitrate = bitrate;
921 | ssrcExporter[ssrc].loss = loss;
922 | ssrcExporter[ssrc].limitations = limitationsPercent(this._reports, VALUE.VIDEO, ssrc);
923 | }
924 | });
925 | }
926 |
927 | return {
928 | version: VERSION_EXPORTER,
929 | configuration: {
930 | frequency: this._cfg.refreshEvery,
931 | },
932 | started: this._start,
933 | ended: this._end,
934 | ua: {
935 | agent: navigator.userAgent,
936 | pname: this._cfg.pname,
937 | user_id: this._cfg.uid,
938 | },
939 | call: {
940 | call_id: this._cfg.cid,
941 | events: this._events,
942 | },
943 | details: {
944 | count: this._reports.length,
945 | reports: this._cfg.record ? this._reports : [],
946 | reference: this._referenceReport || null,
947 | },
948 | ssrc: ssrcExporter,
949 | data: {
950 | rtt: {
951 | avg: averageRTTConnectivity(this._reports, "data"),
952 | min: minValueOfReports(
953 | this._reports,
954 | VALUE.DATA,
955 | "delta_rtt_connectivity_ms",
956 | ),
957 | max: maxValueOfReports(
958 | this._reports,
959 | VALUE.DATA,
960 | "delta_rtt_connectivity_ms",
961 | ),
962 | volatility: volatilityValuesOfReports(
963 | this._reports,
964 | VALUE.DATA,
965 | "delta_rtt_connectivity_ms",
966 | ),
967 | values: valuesOfReports(this._reports, VALUE.DATA, "delta_rtt_connectivity_ms"),
968 | _unit: {
969 | avg: "ms",
970 | min: "ms",
971 | max: "ms",
972 | volatility: "percent",
973 | },
974 | },
975 | packetsLost: {
976 | audio: {
977 | in: {
978 | avg:
979 | Math.round(
980 | ((audioPacketsLost /
981 | (audioPacketsLost + audioPacketsReceived)) *
982 | 100 || 0) * 100,
983 | ) / 100,
984 | },
985 | },
986 | video: {
987 | in: {
988 | avg:
989 | Math.round(
990 | ((videoPacketsLost /
991 | (videoPacketsLost + videoPacketsReceived)) *
992 | 100 || 0) * 100,
993 | ) / 100,
994 | },
995 | },
996 | unit: {
997 | avg: "percent",
998 | },
999 | },
1000 | bitrate: {
1001 | in: {
1002 | avg: averageValuesOfReports(this._reports, "data", "delta_kbs_in"),
1003 | min: minValueOfReports(this._reports, "data", "delta_kbs_in"),
1004 | max: maxValueOfReports(this._reports, "data", "delta_kbs_in"),
1005 | volatility: volatilityValuesOfReports(
1006 | this._reports,
1007 | "data",
1008 | "delta_kbs_in",
1009 | ),
1010 | values: valuesOfReports(this._reports, VALUE.DATA, "delta_kbs_in"),
1011 | },
1012 | out: {
1013 | avg: averageValuesOfReports(this._reports, "data", "delta_kbs_out"),
1014 | min: minValueOfReports(this._reports, "data", "delta_kbs_out"),
1015 | max: maxValueOfReports(this._reports, "data", "delta_kbs_out"),
1016 | volatility: volatilityValuesOfReports(
1017 | this._reports,
1018 | "data",
1019 | "delta_kbs_out",
1020 | ),
1021 | values: valuesOfReports(this._reports, VALUE.DATA, "delta_kbs_out"),
1022 | },
1023 | unit: {
1024 | avg: "kbs",
1025 | min: "kbs",
1026 | max: "kbs",
1027 | volatility: "percent",
1028 | },
1029 | },
1030 | traffic: {
1031 | in: {
1032 | avg: averageValuesOfReports(this._reports, "data", "delta_KBytes_in"),
1033 | min: minValueOfReports(this._reports, "data", "delta_KBytes_in"),
1034 | max: maxValueOfReports(this._reports, "data", "delta_KBytes_in"),
1035 | volatility: volatilityValuesOfReports(
1036 | this._reports,
1037 | "data",
1038 | "delta_KBytes_in",
1039 | ),
1040 | values: valuesOfReports(this._reports, VALUE.DATA, "delta_KBytes_in"),
1041 | },
1042 | out: {
1043 | avg: averageValuesOfReports(
1044 | this._reports,
1045 | "data",
1046 | "delta_KBytes_out",
1047 | ),
1048 | min: minValueOfReports(this._reports, "data", "delta_KBytes_out"),
1049 | max: maxValueOfReports(this._reports, "data", "delta_KBytes_out"),
1050 | volatility: volatilityValuesOfReports(
1051 | this._reports,
1052 | "data",
1053 | "delta_KBytes_out",
1054 | ),
1055 | values: valuesOfReports(this._reports, VALUE.DATA, "delta_KBytes_out"),
1056 | },
1057 | unit: {
1058 | avg: "KBytes",
1059 | min: "KBytes",
1060 | max: "KBytes",
1061 | volatility: "percent",
1062 | },
1063 | },
1064 | network: {
1065 | localConnection: getPath(this._reports),
1066 | remoteConnection: getRemotePath(this._reports),
1067 | },
1068 | },
1069 | };
1070 | }
1071 |
1072 | updateConfig(config) {
1073 | this._cfg = config;
1074 | }
1075 |
1076 | getLastReport() {
1077 | return this._reports.slice()
1078 | .pop() || null;
1079 | }
1080 |
1081 | getBeforeLastReport() {
1082 | const duplicated = this._reports.slice();
1083 | duplicated.pop();
1084 | return duplicated.pop() || null;
1085 | }
1086 |
1087 | getReportsNumber() {
1088 | return this._reports.length;
1089 | }
1090 | }
1091 |
--------------------------------------------------------------------------------
/src/extractor.js:
--------------------------------------------------------------------------------
1 | import {
2 | PROPERTY,
3 | INFRASTRUCTURE_VALUE,
4 | STAT_TYPE,
5 | INFRASTRUCTURE_LABEL,
6 | TYPE,
7 | VALUE,
8 | DIRECTION,
9 | } from "./utils/models";
10 |
11 | import {
12 | findOutgoingTrackFromPeerConnectionByKind,
13 | findTrackInPeerConnectionById, fixed2,
14 | getSSRCDataFromBunch,
15 | } from "./utils/helper";
16 |
17 | import { debug } from "./utils/log";
18 |
19 | const moduleName = "extractor ";
20 |
21 | const extractPlayoutInformation = (report, previousReport) => {
22 | const previousSynthetized = previousReport ? (previousReport[PROPERTY.SYNTHETIZED_SAMPLES_DURATION] * 1000) || 0 : 0;
23 | const currentSynthetized = report ? (report[PROPERTY.SYNTHETIZED_SAMPLES_DURATION] * 1000) || 0 : 0;
24 | const totalSamplesDuration = report ? (report[PROPERTY.TOTAL_SAMPLES_DURATION] * 1000) || 0 : 0;
25 | const previousTotalSamplesDuration = previousReport ? (previousReport[PROPERTY.TOTAL_SAMPLES_DURATION] * 1000) || 0 : 0;
26 | const delta = currentSynthetized - previousSynthetized;
27 | const deltaDuration = totalSamplesDuration - previousTotalSamplesDuration;
28 | const totalDelay = report ? report[PROPERTY.TOTAL_PLAYOUT_DELAY] || 0 : 0;
29 | const totalSamplesCount = report ? report[PROPERTY.TOTAL_SAMPLES_COUNT] || 0 : 0;
30 |
31 | const deltaDelay = totalSamplesCount ? totalDelay / totalSamplesCount : 0;
32 | const deltaPercentSynthetized = deltaDuration ? (delta / deltaDuration) * 100 : 0;
33 | const totalPercentSynthetized = totalSamplesDuration ? (currentSynthetized / totalSamplesDuration) * 100 : 0;
34 |
35 | return {
36 | total_synthetized_ms_in: currentSynthetized,
37 | delta_synthetized_ms_in: delta,
38 | percent_synthetized_in: deltaPercentSynthetized,
39 | total_percent_synthetized_in: totalPercentSynthetized,
40 | total_playout_ms_in: totalDelay,
41 | delta_playout_delay_ms_in: deltaDelay,
42 | };
43 | };
44 |
45 | const extractRTTBasedOnRTCP = (bunch, kind, referenceReport, previousBunch) => {
46 | let supportOfMeasure = false;
47 | const previousRTT = previousBunch[kind].total_rtt_ms_out;
48 | const previousNbMeasure = previousBunch[kind].total_rtt_measure_out;
49 | const referenceRTT = referenceReport
50 | ? referenceReport[kind].total_rtt_ms_out
51 | : 0;
52 | const referenceNbMeasure = referenceReport
53 | ? referenceReport[kind].total_rtt_measure_out
54 | : 0;
55 |
56 | const returnedValuesByDefault = {
57 | rtt: null,
58 | totalRTT: previousRTT,
59 | totalRTTMeasurements: previousNbMeasure,
60 | };
61 |
62 | if (bunch[PROPERTY.TIMESTAMP] === previousBunch[kind].timestamp_out) {
63 | return returnedValuesByDefault;
64 | }
65 |
66 | // If RTT is not part of the stat - return
67 | if (!Object.prototype.hasOwnProperty.call(bunch, PROPERTY.ROUND_TRIP_TIME)) {
68 | return returnedValuesByDefault;
69 | }
70 |
71 | // If no measure yet or no new measure - return
72 | if (
73 | Object.prototype.hasOwnProperty.call(
74 | bunch,
75 | PROPERTY.TOTAL_ROUND_TRIP_TIME_MEASUREMENTS,
76 | )
77 | ) {
78 | supportOfMeasure = true;
79 | if (
80 | Number(bunch[PROPERTY.TOTAL_ROUND_TRIP_TIME_MEASUREMENTS]) === 0 ||
81 | Number(bunch[PROPERTY.TOTAL_ROUND_TRIP_TIME_MEASUREMENTS]) -
82 | referenceNbMeasure ===
83 | previousNbMeasure
84 | ) {
85 | return returnedValuesByDefault;
86 | }
87 | }
88 |
89 | const currentRTT = Number(1000) * Number(bunch[PROPERTY.ROUND_TRIP_TIME]);
90 | let currentTotalRTT = previousRTT + currentRTT;
91 | let currentTotalMeasurements = previousNbMeasure + 1;
92 |
93 | // If support of totalRoundTripTime
94 | if (supportOfMeasure) {
95 | currentTotalRTT =
96 | Number(1000) * Number(bunch[PROPERTY.TOTAL_ROUND_TRIP_TIME]) -
97 | referenceRTT;
98 | currentTotalMeasurements =
99 | Number(bunch[PROPERTY.TOTAL_ROUND_TRIP_TIME_MEASUREMENTS]) -
100 | referenceNbMeasure;
101 | }
102 |
103 | return {
104 | rtt: currentRTT,
105 | totalRTT: currentTotalRTT,
106 | totalRTTMeasurements: currentTotalMeasurements,
107 | };
108 | };
109 |
110 | const extractRTTBasedOnSTUNConnectivityCheck = (
111 | bunch,
112 | kind,
113 | referenceReport,
114 | previousBunch,
115 | ) => {
116 | // If RTT is not part of the stat - return null value
117 | if (
118 | !Object.prototype.hasOwnProperty.call(
119 | bunch,
120 | PROPERTY.CURRENT_ROUND_TRIP_TIME,
121 | )
122 | ) {
123 | return {
124 | rtt: null,
125 | totalRTT: previousBunch[kind].total_rtt_connectivity_ms,
126 | totalRTTMeasurements:
127 | previousBunch[kind].total_rtt_connectivity_measure,
128 | };
129 | }
130 |
131 | const currentRTT =
132 | Number(1000) * Number(bunch[PROPERTY.CURRENT_ROUND_TRIP_TIME]);
133 | let currentTotalRTT =
134 | previousBunch[kind].total_rtt_connectivity_ms + currentRTT;
135 | let currentTotalMeasurements =
136 | previousBunch[kind].total_rtt_connectivity_measure + 1;
137 |
138 | // If support of totalRoundTripTime
139 | if (
140 | Object.prototype.hasOwnProperty.call(bunch, PROPERTY.TOTAL_ROUND_TRIP_TIME)
141 | ) {
142 | currentTotalRTT =
143 | Number(1000) * Number(bunch[PROPERTY.TOTAL_ROUND_TRIP_TIME]) -
144 | (referenceReport
145 | ? referenceReport[kind].total_rtt_connectivity_ms
146 | : 0);
147 | }
148 | // If support of responsesReceived
149 | if (
150 | Object.prototype.hasOwnProperty.call(bunch, PROPERTY.RESPONSES_RECEIVED)
151 | ) {
152 | currentTotalMeasurements =
153 | Number(bunch[PROPERTY.RESPONSES_RECEIVED]) -
154 | (referenceReport
155 | ? referenceReport[kind].total_rtt_connectivity_measure
156 | : 0);
157 | }
158 |
159 | return {
160 | rtt: currentRTT,
161 | totalRTT: currentTotalRTT,
162 | totalRTTMeasurements: currentTotalMeasurements,
163 | };
164 | };
165 |
166 | const extractLastJitter = (bunch, kind, previousBunch) => {
167 | if (bunch[PROPERTY.TIMESTAMP] === previousBunch[kind].timestamp_out) {
168 | return null;
169 | }
170 |
171 | if (!Object.prototype.hasOwnProperty.call(bunch, PROPERTY.JITTER)) {
172 | return null;
173 | }
174 |
175 | return Number(1000) * (Number(bunch[PROPERTY.JITTER]) || 0);
176 | };
177 |
178 | const extractJitterBufferInfo = (bunch, kind, previousBunch) => {
179 | const jitterBufferDelay = bunch[PROPERTY.JITTER_BUFFER_DELAY] * 1000 || 0;
180 | const jitterBufferEmittedCount = bunch[PROPERTY.JITTER_BUFFER_EMITTED_COUNT] || 0;
181 |
182 | const deltaJitterBufferDelay = jitterBufferDelay - previousBunch[kind].total_time_jitter_buffer_delay_in;
183 | const deltaJitterBufferEmittedCount = jitterBufferEmittedCount - previousBunch[kind].total_jitter_emitted_in;
184 |
185 | return {
186 | delta_ms_jitter_buffer_delay: deltaJitterBufferEmittedCount ? deltaJitterBufferDelay / deltaJitterBufferEmittedCount : 0,
187 | total_time_jitter_buffer_delay: jitterBufferDelay,
188 | total_time_jitter_emitted: jitterBufferEmittedCount,
189 | };
190 | };
191 |
192 | const extractDecodeTime = (bunch, previousBunch) => {
193 | if (
194 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FRAMES_DECODED) ||
195 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.TOTAL_DECODE_TIME)
196 | ) {
197 | return {
198 | delta_ms_decode_frame:
199 | previousBunch[VALUE.VIDEO].delta_decode_frame_ms_in,
200 | frames_decoded: previousBunch[VALUE.VIDEO].total_frames_decoded_in,
201 | total_decode_time: previousBunch[VALUE.VIDEO].total_time_decoded_in,
202 | };
203 | }
204 |
205 | const decodedFrames = bunch[PROPERTY.FRAMES_DECODED];
206 | const totalDecodeTime = bunch[PROPERTY.TOTAL_DECODE_TIME] * 1000; // in ms
207 | const totalProcessingDelay = bunch[PROPERTY.TOTAL_PROCESSING_DELAY] * 1000 || 0; // in ms
208 | const totalAssemblyTime = bunch[PROPERTY.TOTAL_ASSEMBLY_TIME] * 1000 || 0; // in ms
209 |
210 | const totalProcessingDelayDelta = totalProcessingDelay - previousBunch[VALUE.VIDEO].total_time_processing_delay_in;
211 | const decodeTimeDelta = totalDecodeTime - previousBunch[VALUE.VIDEO].total_time_decoded_in;
212 | const frameDelta = decodedFrames - previousBunch[VALUE.VIDEO].total_frames_decoded_in;
213 | const totalAssemblyTimeDelta = totalAssemblyTime - previousBunch[VALUE.VIDEO].total_time_assembly_delay_in;
214 |
215 | return {
216 | frames_decoded: decodedFrames,
217 | delta_ms_decode_frame: frameDelta > 0 ? decodeTimeDelta / frameDelta : 0,
218 | delta_ms_processing_delay: frameDelta > 0 ? totalProcessingDelayDelta / frameDelta : 0,
219 | delta_ms_assembly_delay: frameDelta > 0 ? totalAssemblyTimeDelta / frameDelta : 0,
220 | total_time_processing_delay: totalProcessingDelay,
221 | total_decode_time: totalDecodeTime,
222 | total_assembly_time: totalAssemblyTime,
223 | };
224 | };
225 |
226 | const extractEncodeTime = (bunch, previousBunch) => {
227 | if (
228 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FRAMES_ENCODED) ||
229 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.TOTAL_ENCODE_TIME)
230 | ) {
231 | return {
232 | delta_ms_encode_frame: previousBunch[VALUE.VIDEO].delta_encode_frame_ms_out,
233 | frames_encoded: previousBunch[VALUE.VIDEO].total_frames_encoded_out,
234 | total_encode_time: previousBunch[VALUE.VIDEO].total_time_encoded_out,
235 | };
236 | }
237 |
238 | const encodedFrames = bunch[PROPERTY.FRAMES_ENCODED];
239 | const totalEncodeTime = bunch[PROPERTY.TOTAL_ENCODE_TIME];
240 |
241 | const encodeTimeDelta =
242 | totalEncodeTime - previousBunch[VALUE.VIDEO].total_time_encoded_out;
243 | const frameDelta =
244 | encodedFrames - previousBunch[VALUE.VIDEO].total_frames_encoded_out;
245 | const framesEncodedDelta =
246 | frameDelta > 0 && encodeTimeDelta
247 | ? (encodeTimeDelta * 1000) / frameDelta
248 | : 0;
249 |
250 | return {
251 | delta_ms_encode_frame: framesEncodedDelta,
252 | frames_encoded: encodedFrames,
253 | total_encode_time: totalEncodeTime,
254 | };
255 | };
256 |
257 | const extractAudioVideoPacketSent = (
258 | bunch,
259 | kind,
260 | previousBunch,
261 | referenceReport,
262 | ) => {
263 | const packetsSent =
264 | Number(bunch[PROPERTY.PACKETS_SENT]) ||
265 | 0 - (referenceReport ? referenceReport[kind].total_packets_out : 0);
266 | const deltaPacketsSent = packetsSent - previousBunch[kind].total_packets_out;
267 | const totalPacketSendDelay = Number(bunch[PROPERTY.TOTAL_PACKETS_SEND_DELAY]) * 1000 ||
268 | 0 - (referenceReport ? referenceReport[kind].total_time_packets_delay_out : 0);
269 | const deltaPacketsDelay = totalPacketSendDelay - previousBunch[kind].total_time_packets_delay_out;
270 | const deltaAvgPacketSendDelay = deltaPacketsSent ? deltaPacketsDelay / deltaPacketsSent : 0;
271 | const KBytesSent = (Number(bunch[PROPERTY.BYTES_SENT]) / 1024) - (referenceReport ? referenceReport[kind].total_KBytes_out : 0);
272 | const deltaKBytesSent = KBytesSent - previousBunch[kind].total_KBytes_out;
273 | const timestamp = bunch[PROPERTY.TIMESTAMP] || Date.now();
274 | const referenceTimestamp = referenceReport ? referenceReport.timestamp : null;
275 | let previousTimestamp = previousBunch.timestamp;
276 | if (!previousTimestamp && referenceTimestamp) {
277 | previousTimestamp = referenceTimestamp;
278 | }
279 | const deltaMs = previousTimestamp ? timestamp - previousTimestamp : 0;
280 | const kbsSent = deltaMs > 0 ? ((deltaKBytesSent * 0.008 * 1024) / deltaMs) * 1000 : 0; // kbs = kilo bits per second
281 |
282 | return {
283 | packetsSent,
284 | deltaPacketsSent,
285 | KBytesSent,
286 | deltaKBytesSent,
287 | kbsSent,
288 | deltaAvgPacketSendDelay,
289 | totalPacketSendDelay,
290 | };
291 | };
292 |
293 | const extractAudioVideoPacketLost = (
294 | bunch,
295 | kind,
296 | previousBunch,
297 | referenceReport,
298 | ) => {
299 | let packetsLost = previousBunch[kind].total_packets_lost_out;
300 | let deltaPacketsLost = 0;
301 | let fractionLost = 0;
302 | if (Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PACKETS_LOST)) {
303 | packetsLost = Number(bunch[PROPERTY.PACKETS_LOST]) || 0 - (referenceReport ? referenceReport[kind].total_packets_lost_out : 0);
304 | deltaPacketsLost = packetsLost - previousBunch[kind].total_packets_lost_out;
305 | }
306 |
307 | if (Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FRACTION_LOST)) {
308 | fractionLost = Number(100 * bunch[PROPERTY.FRACTION_LOST]);
309 | }
310 | return {
311 | packetsLost,
312 | deltaPacketsLost,
313 | fractionLost,
314 | };
315 | };
316 |
317 | const extractAudioVideoPacketReceived = (
318 | bunch,
319 | kind,
320 | previousBunch,
321 | referenceReport,
322 | ) => {
323 | if (
324 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PACKETS_RECEIVED) ||
325 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PACKETS_LOST) ||
326 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.BYTES_RECEIVED)
327 | ) {
328 | return {
329 | percent_packets_lost: previousBunch[kind].percent_packets_lost_in,
330 | packetsReceived: previousBunch[kind].total_packets_in,
331 | packetsLost: previousBunch[kind].total_packets_lost_in,
332 | bytesReceived: previousBunch[kind].total_KBytes_in,
333 | };
334 | }
335 |
336 | const packetsReceived =
337 | Number(bunch[PROPERTY.PACKETS_RECEIVED]) ||
338 | 0 - (referenceReport ? referenceReport[kind].total_packets_in : 0);
339 | const packetsLost =
340 | Number(bunch[PROPERTY.PACKETS_LOST]) ||
341 | 0 - (referenceReport ? referenceReport[kind].total_packets_lost_in : 0);
342 | const deltaPacketsLost =
343 | packetsLost - previousBunch[kind].total_packets_lost_in;
344 | const deltaPacketsReceived =
345 | packetsReceived - previousBunch[kind].total_packets_in;
346 | const percentPacketsLost =
347 | packetsReceived !== previousBunch[kind].total_packets_in
348 | ? (deltaPacketsLost * 100) / (deltaPacketsLost + deltaPacketsReceived)
349 | : 0.0;
350 | const KBytesReceived = (Number(bunch[PROPERTY.BYTES_RECEIVED]) / 1024) - (referenceReport ? referenceReport[kind].total_KBytes_in : 0);
351 | const deltaKBytesReceived = KBytesReceived - previousBunch[kind].total_KBytes_in;
352 | const timestamp = bunch[PROPERTY.TIMESTAMP] || Date.now();
353 | const referenceTimestamp = referenceReport ? referenceReport.timestamp : null;
354 | let previousTimestamp = previousBunch.timestamp;
355 | if (!previousTimestamp && referenceTimestamp) {
356 | previousTimestamp = referenceTimestamp;
357 | }
358 | const deltaMs = previousTimestamp ? timestamp - previousTimestamp : 0;
359 | const kbsReceived = deltaMs > 0 ? ((deltaKBytesReceived * 0.008 * 1024) / deltaMs) * 1000 : 0; // kbs = kilo bits per second
360 |
361 | return {
362 | percentPacketsLost,
363 | packetsReceived,
364 | deltaPacketsReceived,
365 | packetsLost,
366 | deltaPacketsLost,
367 | KBytesReceived,
368 | deltaKBytesReceived,
369 | kbsReceived,
370 | };
371 | };
372 |
373 | const extractRelayProtocolUsed = (bunch) => {
374 | const candidateType = bunch[PROPERTY.CANDIDATE_TYPE];
375 | if (candidateType !== "relay") {
376 | return "";
377 | }
378 | return bunch[PROPERTY.RELAY_PROTOCOL] || "";
379 | };
380 |
381 | const extractInfrastructureValue = (bunch) => {
382 | if (!Object.prototype.hasOwnProperty.call(bunch, PROPERTY.NETWORK_TYPE)) {
383 | // Assuming Wifi when not provided (firefox/Safari at this time)
384 | return INFRASTRUCTURE_VALUE.WIFI;
385 | }
386 |
387 | switch (bunch[PROPERTY.NETWORK_TYPE]) {
388 | case INFRASTRUCTURE_LABEL.ETHERNET:
389 | return INFRASTRUCTURE_VALUE.ETHERNET;
390 | case INFRASTRUCTURE_LABEL.CELLULAR_4G:
391 | return INFRASTRUCTURE_VALUE.CELLULAR_4G;
392 | case INFRASTRUCTURE_LABEL.WIFI:
393 | return INFRASTRUCTURE_VALUE.WIFI;
394 | default:
395 | return INFRASTRUCTURE_VALUE.CELLULAR;
396 | }
397 | };
398 |
399 | const extractVideoSize = (bunch, previousBunch, direction) => {
400 | if (
401 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FRAME_HEIGHT) ||
402 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FRAME_WIDTH)
403 | ) {
404 | return { width: 0, height: 0, framerate: 0 };
405 | }
406 |
407 | const width = bunch[PROPERTY.FRAME_WIDTH] || 0;
408 | const height = bunch[PROPERTY.FRAME_HEIGHT] || 0;
409 | let framerate = fixed2(bunch[PROPERTY.FRAMES_PER_SECOND] || 0);
410 |
411 | const frames = direction === DIRECTION.INBOUND ? bunch[PROPERTY.FRAMES_DECODED] : bunch[PROPERTY.FRAMES_ENCODED];
412 | if (previousBunch) {
413 | const previousFrames = direction === DIRECTION.INBOUND ? previousBunch[PROPERTY.FRAMES_DECODED] : previousBunch[PROPERTY.FRAMES_ENCODED];
414 | const period = (bunch.timestamp - previousBunch.timestamp) / 1000; // in seconds
415 | const deltaFrames = frames - previousFrames;
416 | if (period !== 0) {
417 | let divider = 1;
418 | if (direction === DIRECTION.OUTBOUND && bunch[PROPERTY.SCALABILITY_MODE]) {
419 | const scalabilityMode = bunch[PROPERTY.SCALABILITY_MODE];
420 | if (scalabilityMode.startsWith("L2") || scalabilityMode.startsWith("S2")) {
421 | divider = 2;
422 | } else if (scalabilityMode.startsWith("L3") || scalabilityMode.startsWith("S3")) {
423 | divider = 3;
424 | }
425 | }
426 | framerate = fixed2(deltaFrames / period / divider);
427 | }
428 | }
429 |
430 | return {
431 | width,
432 | height,
433 | framerate,
434 | };
435 | };
436 |
437 | const extractQualityLimitation = (bunch) => {
438 | const reason = Object.prototype.hasOwnProperty.call(
439 | bunch,
440 | PROPERTY.QUALITY_LIMITATION_REASON,
441 | )
442 | ? bunch[PROPERTY.QUALITY_LIMITATION_REASON]
443 | : null;
444 | const resolutionChanges = Object.prototype.hasOwnProperty.call(
445 | bunch,
446 | PROPERTY.QUALITY_LIMITATION_RESOLUTION_CHANGES,
447 | )
448 | ? bunch[PROPERTY.QUALITY_LIMITATION_RESOLUTION_CHANGES]
449 | : null;
450 | const durations = Object.prototype.hasOwnProperty.call(
451 | bunch,
452 | PROPERTY.QUALITY_LIMITATION_DURATIONS,
453 | )
454 | ? bunch[PROPERTY.QUALITY_LIMITATION_DURATIONS]
455 | : null;
456 |
457 | if (durations) {
458 | Object.keys(durations).forEach((key) => {
459 | if (durations[key] > 1000) {
460 | durations[key] = Number(durations[key] / 1000);
461 | }
462 | });
463 | }
464 | return { reason, durations, resolutionChanges };
465 | };
466 |
467 | const extractVideoGlitch = (bunch, kind, previousReport, referenceReport) => {
468 | if (
469 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.FREEZE_COUNT) ||
470 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PAUSE_COUNT)
471 | ) {
472 | return {
473 | freezeCount: previousReport[kind].total_glitch_in.freeze,
474 | pauseCount: previousReport[kind].total_glitch_in.pause,
475 | deltaFreezeCount: 0,
476 | deltaPauseCount: 0,
477 | };
478 | }
479 |
480 | const freezeCount = (bunch[PROPERTY.FREEZE_COUNT] || 0) - (referenceReport ? referenceReport[kind].total_glitch_in.freeze : 0);
481 | const pauseCount = (bunch[PROPERTY.PAUSE_COUNT] || 0) - (referenceReport ? referenceReport[kind].total_glitch_in.pause : 0);
482 |
483 | return {
484 | freezeCount,
485 | pauseCount,
486 | deltaFreezeCount: freezeCount - previousReport[kind].total_glitch_in.freeze,
487 | deltaPauseCount: pauseCount - previousReport[kind].total_glitch_in.pause,
488 | };
489 | };
490 |
491 | const extractNackAndPliCountSentWhenReceiving = (bunch, previousReport, referenceReport) => {
492 | if (
493 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PLI) ||
494 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.NACK)
495 | ) {
496 | return {
497 | pliCount: previousReport.total_pli_sent_in,
498 | nackCount: previousReport.total_nack_sent_in,
499 | deltaPliCount: 0,
500 | deltaNackCount: 0,
501 | };
502 | }
503 |
504 | const pliCount = (bunch[PROPERTY.PLI] || 0) - (referenceReport ? referenceReport[VALUE.VIDEO].total_pli_sent_in : 0);
505 | const nackCount = (bunch[PROPERTY.NACK] || 0) - (referenceReport ? referenceReport[VALUE.VIDEO].total_nack_sent_in : 0);
506 |
507 | return {
508 | pliCount,
509 | nackCount,
510 | deltaPliCount: pliCount - previousReport[VALUE.VIDEO].total_pli_sent_in,
511 | deltaNackCount: nackCount - previousReport[VALUE.VIDEO].total_nack_sent_in,
512 | };
513 | };
514 |
515 | const extractNackAndPliCountReceivedWhenSending = (bunch, previousReport, referenceReport) => {
516 | if (
517 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.PLI) ||
518 | !Object.prototype.hasOwnProperty.call(bunch, PROPERTY.NACK)
519 | ) {
520 | return {
521 | pliCount: previousReport.total_pli_received_out,
522 | nackCount: previousReport.total_nack_received_out,
523 | deltaPliCount: 0,
524 | deltaNackCount: 0,
525 | };
526 | }
527 |
528 | const pliCount = (bunch[PROPERTY.PLI] || 0) - (referenceReport ? referenceReport[VALUE.VIDEO].total_pli_received_out : 0);
529 | const nackCount = (bunch[PROPERTY.NACK] || 0) - (referenceReport ? referenceReport[VALUE.VIDEO].total_nack_received_out : 0);
530 |
531 | return {
532 | pliCount,
533 | nackCount,
534 | deltaPliCount: pliCount - previousReport[VALUE.VIDEO].total_pli_received_out,
535 | deltaNackCount: nackCount - previousReport[VALUE.VIDEO].total_nack_received_out,
536 | };
537 | };
538 |
539 | const extractAudioCodec = (bunch) => ({
540 | channels: bunch[PROPERTY.CHANNELS] || null,
541 | clock_rate: bunch[PROPERTY.CLOCK_RATE] || null,
542 | mime_type: bunch[PROPERTY.MIME_TYPE] || null,
543 | sdp_fmtp_line: bunch[PROPERTY.SDP_FMTP_LINE] || null,
544 | });
545 |
546 | const extractVideoCodec = (bunch) => ({
547 | clock_rate: bunch[PROPERTY.CLOCK_RATE] || null,
548 | mime_type: bunch[PROPERTY.MIME_TYPE] || null,
549 | });
550 |
551 | const extractBytesSentReceived = (bunch, previousBunch, referenceReport) => {
552 | const totalKBytesReceived =
553 | (bunch[PROPERTY.BYTES_RECEIVED] || 0) / 1024 -
554 | (referenceReport ? referenceReport.data.total_KBytes_in : 0);
555 | const totalKBytesSent =
556 | (bunch[PROPERTY.BYTES_SENT] || 0) / 1024 -
557 | (referenceReport ? referenceReport.data.total_KBytes_out : 0);
558 |
559 | const timestamp = bunch[PROPERTY.TIMESTAMP] || Date.now();
560 | const KBytesReceived =
561 | totalKBytesReceived - previousBunch.data.total_KBytes_in;
562 | const KBytesSent = totalKBytesSent - previousBunch.data.total_KBytes_out;
563 |
564 | const referenceTimestamp = referenceReport ? referenceReport.timestamp : null;
565 | let previousTimestamp = previousBunch.timestamp;
566 | if (!previousTimestamp && referenceTimestamp) {
567 | previousTimestamp = referenceTimestamp;
568 | }
569 | const deltaMs = previousTimestamp ? timestamp - previousTimestamp : 0;
570 | const kbsSpeedReceived =
571 | deltaMs > 0 ? ((KBytesReceived * 0.008 * 1024) / deltaMs) * 1000 : 0; // kbs = kilo bits per second
572 | const kbsSpeedSent =
573 | deltaMs > 0 ? ((KBytesSent * 0.008 * 1024) / deltaMs) * 1000 : 0;
574 |
575 | return {
576 | total_KBytes_received: totalKBytesReceived,
577 | total_KBytes_sent: totalKBytesSent,
578 | delta_KBytes_received: KBytesReceived,
579 | delta_KBytes_sent: KBytesSent,
580 | kbs_speed_received: kbsSpeedReceived,
581 | kbs_speed_sent: kbsSpeedSent,
582 | };
583 | };
584 |
585 | const extractAvailableBandwidth = (bunch) => {
586 | const kbsIncomingBandwidth =
587 | bunch[PROPERTY.AVAILABLE_INCOMING_BITRATE] / 1024 || 0;
588 | const kbsOutgoingBandwidth =
589 | bunch[PROPERTY.AVAILABLE_OUTGOING_BITRATE] / 1024 || 0;
590 |
591 | return {
592 | kbs_incoming_bandwidth: kbsIncomingBandwidth,
593 | kbs_outgoing_bandwidth: kbsOutgoingBandwidth,
594 | };
595 | };
596 |
597 | export const extract = (bunch, previousBunch, pname, referenceReport, raw, oldRaw, _refPC) => {
598 | if (!bunch) {
599 | return [];
600 | }
601 |
602 | debug(
603 | moduleName,
604 | `analyze() - got stats ${bunch[PROPERTY.TYPE]} for ${pname}`,
605 | bunch,
606 | );
607 |
608 | switch (bunch[PROPERTY.TYPE]) {
609 | case TYPE.CANDIDATE_PAIR:
610 | let selectedPairForFirefox = false;
611 | let selectedPair = false;
612 | // get Transport report
613 | if (raw.has(bunch[PROPERTY.TRANSPORT_ID])) {
614 | const transportReport = raw.get(bunch[PROPERTY.TRANSPORT_ID]);
615 | if (transportReport[PROPERTY.SELECTED_CANDIDATEPAIR_ID] === bunch[PROPERTY.ID]) {
616 | selectedPair = true;
617 | }
618 | }
619 |
620 | // FF: NO RTCTransportStats report - Use candidate-pair with selected=true
621 | if (PROPERTY.SELECTED in bunch && bunch[PROPERTY.SELECTED]) {
622 | selectedPairForFirefox = true;
623 | }
624 |
625 | if (selectedPair || selectedPairForFirefox) {
626 | const localCandidateId = bunch[PROPERTY.LOCAL_CANDIDATE_ID];
627 | const remoteCandidateId = bunch[PROPERTY.REMOTE_CANDIDATE_ID];
628 | const selectedCandidatePairId = bunch[PROPERTY.ID];
629 |
630 | const valueSentReceived = extractBytesSentReceived(
631 | bunch,
632 | previousBunch,
633 | referenceReport,
634 | );
635 | const bandwidth = extractAvailableBandwidth(bunch);
636 | const rttConnectivity = extractRTTBasedOnSTUNConnectivityCheck(
637 | bunch,
638 | "data",
639 | referenceReport,
640 | previousBunch,
641 | );
642 |
643 | const result = [
644 | {
645 | type: STAT_TYPE.NETWORK,
646 | value: { local_candidate_id: localCandidateId },
647 | },
648 | {
649 | type: STAT_TYPE.NETWORK,
650 | value: { remote_candidate_id: remoteCandidateId },
651 | },
652 | {
653 | type: STAT_TYPE.DATA,
654 | value: { total_KBytes_in: valueSentReceived.total_KBytes_received },
655 | },
656 | {
657 | type: STAT_TYPE.DATA,
658 | value: { total_KBytes_out: valueSentReceived.total_KBytes_sent },
659 | },
660 | {
661 | type: STAT_TYPE.DATA,
662 | value: { delta_KBytes_in: valueSentReceived.delta_KBytes_received },
663 | },
664 | {
665 | type: STAT_TYPE.DATA,
666 | value: { delta_KBytes_out: valueSentReceived.delta_KBytes_sent },
667 | },
668 | {
669 | type: STAT_TYPE.DATA,
670 | value: { delta_kbs_in: valueSentReceived.kbs_speed_received },
671 | },
672 | {
673 | type: STAT_TYPE.DATA,
674 | value: { delta_kbs_out: valueSentReceived.kbs_speed_sent },
675 | },
676 | {
677 | type: STAT_TYPE.DATA,
678 | value: { delta_kbs_bandwidth_in: bandwidth.kbs_incoming_bandwidth },
679 | },
680 | {
681 | type: STAT_TYPE.DATA,
682 | value: {
683 | delta_kbs_bandwidth_out: bandwidth.kbs_outgoing_bandwidth,
684 | },
685 | },
686 | {
687 | type: STAT_TYPE.DATA,
688 | value: { delta_rtt_connectivity_ms: rttConnectivity.rtt },
689 | },
690 | {
691 | type: STAT_TYPE.DATA,
692 | value: { total_rtt_connectivity_ms: rttConnectivity.totalRTT },
693 | },
694 | {
695 | type: STAT_TYPE.DATA,
696 | value: {
697 | total_rtt_connectivity_measure:
698 | rttConnectivity.totalRTTMeasurements,
699 | },
700 | },
701 | ];
702 |
703 | if (selectedPairForFirefox) {
704 | result.push(
705 | {
706 | type: STAT_TYPE.NETWORK,
707 | internal: "selectedPairChanged",
708 | value: { selected_candidate_pair_id: selectedCandidatePairId },
709 | },
710 | );
711 | }
712 | return result;
713 | }
714 | break;
715 | case TYPE.LOCAL_CANDIDATE:
716 | if (bunch[PROPERTY.ID] === previousBunch.network.local_candidate_id) {
717 | return [
718 | {
719 | type: STAT_TYPE.NETWORK,
720 | value: { infrastructure: extractInfrastructureValue(bunch) },
721 | },
722 | {
723 | type: STAT_TYPE.NETWORK,
724 | value: {
725 | local_candidate_type: bunch[PROPERTY.CANDIDATE_TYPE] || "",
726 | },
727 | },
728 | {
729 | type: STAT_TYPE.NETWORK,
730 | value: { local_candidate_protocol: bunch[PROPERTY.PROTOCOL] || "" },
731 | },
732 | {
733 | type: STAT_TYPE.NETWORK,
734 | value: {
735 | local_candidate_relay_protocol: extractRelayProtocolUsed(bunch),
736 | },
737 | },
738 | ];
739 | }
740 | break;
741 | case TYPE.REMOTE_CANDIDATE:
742 | if (bunch[PROPERTY.ID] === previousBunch.network.remote_candidate_id) {
743 | return [
744 | {
745 | type: STAT_TYPE.NETWORK,
746 | value: {
747 | remote_candidate_type: bunch[PROPERTY.CANDIDATE_TYPE] || "",
748 | },
749 | },
750 | {
751 | type: STAT_TYPE.NETWORK,
752 | value: {
753 | remote_candidate_protocol: bunch[PROPERTY.PROTOCOL] || "",
754 | },
755 | },
756 | ];
757 | }
758 | break;
759 | case TYPE.INBOUND_RTP: {
760 | // get SSRC and associated data
761 | const ssrc = bunch[PROPERTY.SSRC];
762 | const previousSSRCBunch = getSSRCDataFromBunch(ssrc, previousBunch, DIRECTION.INBOUND);
763 | if (previousSSRCBunch) {
764 | previousSSRCBunch.timestamp = previousBunch.timestamp;
765 | }
766 | const referenceSSRCBunch = getSSRCDataFromBunch(ssrc, referenceReport, DIRECTION.INBOUND);
767 | if (referenceSSRCBunch) {
768 | referenceSSRCBunch.timestamp = referenceReport.timestamp;
769 | }
770 |
771 | if (bunch[PROPERTY.MEDIA_TYPE] === VALUE.AUDIO) {
772 | // Packets stats and Bytes
773 | const data = extractAudioVideoPacketReceived(
774 | bunch,
775 | VALUE.AUDIO,
776 | previousSSRCBunch,
777 | referenceSSRCBunch,
778 | );
779 |
780 | // Jitter stats
781 | const jitter = extractLastJitter(bunch, VALUE.AUDIO, previousSSRCBunch);
782 |
783 | // Codec stats
784 | const audioInputCodecId = bunch[PROPERTY.CODEC_ID] || "";
785 |
786 | // Audio level in
787 | const audioLevel = bunch[PROPERTY.AUDIO_LEVEL] || 0;
788 |
789 | // average playout delay
790 | let playout = null;
791 | if (raw.has(bunch[PROPERTY.PLAYOUT_ID])) {
792 | const playoutReport = raw.get(bunch[PROPERTY.PLAYOUT_ID]);
793 | const previousPlayoutReport = oldRaw ? oldRaw.get(bunch[PROPERTY.PLAYOUT_ID]) : null;
794 | playout = extractPlayoutInformation(playoutReport, previousPlayoutReport);
795 | }
796 |
797 | const jitterBuffer = extractJitterBufferInfo(bunch, VALUE.AUDIO, previousSSRCBunch);
798 |
799 | return [
800 | {
801 | ssrc,
802 | type: STAT_TYPE.AUDIO,
803 | value: { codec_id_in: audioInputCodecId },
804 | },
805 | {
806 | ssrc,
807 | type: STAT_TYPE.AUDIO,
808 | value: { total_packets_in: data.packetsReceived },
809 | },
810 | {
811 | ssrc,
812 | type: STAT_TYPE.AUDIO,
813 | value: { delta_packets_in: data.deltaPacketsReceived },
814 | },
815 | {
816 | ssrc,
817 | type: STAT_TYPE.AUDIO,
818 | value: { total_packets_lost_in: data.packetsLost },
819 | },
820 | {
821 | ssrc,
822 | type: STAT_TYPE.AUDIO,
823 | value: { delta_packets_lost_in: data.deltaPacketsLost },
824 | },
825 | {
826 | ssrc,
827 | type: STAT_TYPE.AUDIO,
828 | value: { percent_packets_lost_in: data.percentPacketsLost },
829 | },
830 | {
831 | ssrc,
832 | type: STAT_TYPE.AUDIO,
833 | value: { total_KBytes_in: data.KBytesReceived },
834 | },
835 | {
836 | ssrc,
837 | type: STAT_TYPE.AUDIO,
838 | internal: "bytesReceivedChanged",
839 | value: { delta_KBytes_in: data.deltaKBytesReceived },
840 | },
841 | {
842 | ssrc,
843 | type: STAT_TYPE.AUDIO,
844 | value: { delta_kbs_in: data.kbsReceived },
845 | },
846 | {
847 | ssrc,
848 | type: STAT_TYPE.AUDIO,
849 | value: { delta_jitter_ms_in: jitter },
850 | },
851 | {
852 | ssrc,
853 | type: STAT_TYPE.AUDIO,
854 | value: { delta_jitter_buffer_delay_ms_in: jitterBuffer.delta_ms_jitter_buffer_delay },
855 | },
856 | {
857 | ssrc,
858 | type: STAT_TYPE.AUDIO,
859 | value: { total_time_jitter_buffer_delay_in: jitterBuffer.total_time_jitter_buffer_delay },
860 | },
861 | {
862 | ssrc,
863 | type: STAT_TYPE.AUDIO,
864 | value: { total_jitter_emitted_in: jitterBuffer.total_time_jitter_emitted },
865 | },
866 | {
867 | ssrc,
868 | type: STAT_TYPE.AUDIO,
869 | value: { track_in: bunch[PROPERTY.TRACK_IDENTIFIER] },
870 | },
871 | {
872 | ssrc,
873 | type: STAT_TYPE.AUDIO,
874 | internal: "ssrcIdentifierIn",
875 | value: { ssrc_in: bunch[PROPERTY.SSRC] },
876 | },
877 | {
878 | ssrc,
879 | type: STAT_TYPE.AUDIO,
880 | value: { level_in: audioLevel },
881 | },
882 | {
883 | ssrc,
884 | type: STAT_TYPE.AUDIO,
885 | value: { delta_synthetized_ms_in: playout ? playout.delta_synthetized_ms_in : 0 },
886 | },
887 | {
888 | ssrc,
889 | type: STAT_TYPE.AUDIO,
890 | value: { total_synthetized_ms_in: playout ? playout.total_synthetized_ms_in : 0 },
891 | },
892 | {
893 | ssrc,
894 | type: STAT_TYPE.AUDIO,
895 | value: { delta_playout_delay_ms_in: playout ? playout.delta_playout_delay_ms_in : 0 },
896 | },
897 | {
898 | ssrc,
899 | type: STAT_TYPE.AUDIO,
900 | value: { total_playout_ms_in: playout ? playout.total_playout_ms_in : 0 },
901 | },
902 | {
903 | ssrc,
904 | type: STAT_TYPE.AUDIO,
905 | value: { percent_synthetized_in: playout ? playout.percent_synthetized_in : 0 },
906 | },
907 | {
908 | ssrc,
909 | type: STAT_TYPE.AUDIO,
910 | value: { total_percent_synthetized_in: playout ? playout.total_percent_synthetized_in : 0 },
911 | },
912 | ];
913 | }
914 |
915 | if (bunch[PROPERTY.MEDIA_TYPE] === VALUE.VIDEO) {
916 | // Decode time stats
917 | const data = extractDecodeTime(bunch, previousSSRCBunch);
918 |
919 | // Packets stats and Bytes
920 | const packetsData = extractAudioVideoPacketReceived(
921 | bunch,
922 | VALUE.VIDEO,
923 | previousSSRCBunch,
924 | referenceSSRCBunch,
925 | );
926 |
927 | // Jitter stats
928 | const jitter = extractLastJitter(bunch, VALUE.VIDEO, previousSSRCBunch);
929 |
930 | // Codec stats
931 | const decoderImplementation =
932 | bunch[PROPERTY.DECODER_IMPLEMENTATION] || null;
933 | const videoInputCodecId = bunch[PROPERTY.CODEC_ID] || null;
934 |
935 | // Video size
936 | const oldBunch = oldRaw ? oldRaw.get(bunch[PROPERTY.ID]) : null;
937 | const inputVideo = extractVideoSize(bunch, oldBunch, DIRECTION.INBOUND);
938 |
939 | // Nack & Pli stats
940 | const nackPliData = extractNackAndPliCountSentWhenReceiving(
941 | bunch,
942 | previousSSRCBunch,
943 | referenceSSRCBunch,
944 | );
945 |
946 | // Glitch
947 | const freezePauseData = extractVideoGlitch(bunch, VALUE.VIDEO, previousSSRCBunch, referenceSSRCBunch);
948 |
949 | // Jitter buffer
950 | const jitterBuffer = extractJitterBufferInfo(bunch, VALUE.AUDIO, previousSSRCBunch);
951 |
952 | return [
953 | {
954 | ssrc,
955 | type: STAT_TYPE.VIDEO,
956 | value: { codec_id_in: videoInputCodecId },
957 | },
958 | {
959 | ssrc,
960 | type: STAT_TYPE.VIDEO,
961 | value: { total_packets_in: packetsData.packetsReceived },
962 | },
963 | {
964 | ssrc,
965 | type: STAT_TYPE.VIDEO,
966 | value: { delta_packets_in: packetsData.deltaPacketsReceived },
967 | },
968 | {
969 | ssrc,
970 | type: STAT_TYPE.VIDEO,
971 | value: { total_packets_lost_in: packetsData.packetsLost },
972 | },
973 | {
974 | ssrc,
975 | type: STAT_TYPE.VIDEO,
976 | value: { delta_packets_lost_in: packetsData.deltaPacketsLost },
977 | },
978 | {
979 | ssrc,
980 | type: STAT_TYPE.VIDEO,
981 | value: { percent_packets_lost_in: packetsData.percentPacketsLost },
982 | },
983 | {
984 | ssrc,
985 | type: STAT_TYPE.VIDEO,
986 | value: { total_KBytes_in: packetsData.KBytesReceived },
987 | },
988 | {
989 | ssrc,
990 | type: STAT_TYPE.VIDEO,
991 | internal: "bytesReceivedChanged",
992 | value: { delta_KBytes_in: packetsData.deltaKBytesReceived },
993 | },
994 | {
995 | ssrc,
996 | type: STAT_TYPE.VIDEO,
997 | value: { delta_kbs_in: packetsData.kbsReceived },
998 | },
999 | {
1000 | ssrc,
1001 | type: STAT_TYPE.VIDEO,
1002 | value: { delta_jitter_ms_in: jitter },
1003 | },
1004 | {
1005 | ssrc,
1006 | type: STAT_TYPE.VIDEO,
1007 | value: { delta_jitter_buffer_delay_ms_in: jitterBuffer.delta_ms_jitter_buffer_delay },
1008 | },
1009 | {
1010 | ssrc,
1011 | type: STAT_TYPE.VIDEO,
1012 | value: { total_time_jitter_buffer_delay_in: jitterBuffer.total_time_jitter_buffer_delay },
1013 | },
1014 | {
1015 | ssrc,
1016 | type: STAT_TYPE.VIDEO,
1017 | value: { total_jitter_emitted_in: jitterBuffer.total_time_jitter_emitted },
1018 | },
1019 | {
1020 | ssrc,
1021 | type: STAT_TYPE.VIDEO,
1022 | value: { decoder_in: decoderImplementation },
1023 | },
1024 | {
1025 | ssrc,
1026 | type: STAT_TYPE.VIDEO,
1027 | value: { delta_decode_frame_ms_in: data.delta_ms_decode_frame },
1028 | },
1029 | {
1030 | ssrc,
1031 | type: STAT_TYPE.VIDEO,
1032 | value: { total_frames_decoded_in: data.frames_decoded },
1033 | },
1034 | {
1035 | ssrc,
1036 | type: STAT_TYPE.VIDEO,
1037 | value: { delta_processing_delay_ms_in: data.delta_ms_processing_delay },
1038 | },
1039 | {
1040 | ssrc,
1041 | type: STAT_TYPE.VIDEO,
1042 | value: { total_time_processing_delay_in: data.total_time_processing_delay },
1043 | },
1044 | {
1045 | ssrc,
1046 | type: STAT_TYPE.VIDEO,
1047 | value: { delta_assembly_delay_ms_in: data.delta_ms_assembly_delay },
1048 | },
1049 | {
1050 | ssrc,
1051 | type: STAT_TYPE.VIDEO,
1052 | value: { total_time_assembly_delay_in: data.total_assembly_time },
1053 | },
1054 | {
1055 | ssrc,
1056 | type: STAT_TYPE.VIDEO,
1057 | value: { total_time_decoded_in: data.total_decode_time },
1058 | },
1059 | {
1060 | ssrc,
1061 | type: STAT_TYPE.VIDEO,
1062 | value: { total_nack_sent_in: nackPliData.nackCount },
1063 | },
1064 | {
1065 | ssrc,
1066 | type: STAT_TYPE.VIDEO,
1067 | value: { delta_nack_sent_in: nackPliData.deltaNackCount },
1068 | },
1069 | {
1070 | ssrc,
1071 | type: STAT_TYPE.VIDEO,
1072 | value: { total_pli_sent_in: nackPliData.pliCount },
1073 | },
1074 | {
1075 | ssrc,
1076 | type: STAT_TYPE.VIDEO,
1077 | value: { delta_pli_sent_in: nackPliData.deltaPliCount },
1078 | },
1079 | {
1080 | ssrc,
1081 | type: STAT_TYPE.VIDEO,
1082 | value: { size_in: inputVideo },
1083 | internal: "inputSizeChanged",
1084 | },
1085 | {
1086 | ssrc,
1087 | type: STAT_TYPE.VIDEO,
1088 | value: { track_in: bunch[PROPERTY.TRACK_IDENTIFIER] },
1089 | },
1090 | {
1091 | ssrc,
1092 | type: STAT_TYPE.VIDEO,
1093 | internal: "ssrcIdentifierIn",
1094 | value: { ssrc_in: bunch[PROPERTY.SSRC] },
1095 | },
1096 | {
1097 | ssrc,
1098 | type: STAT_TYPE.VIDEO,
1099 | value: {
1100 | total_glitch_in: { freeze: freezePauseData.freezeCount, pause: freezePauseData.pauseCount },
1101 | delta_glitch_in: { freeze: freezePauseData.deltaFreezeCount, pause: freezePauseData.deltaPauseCount },
1102 | },
1103 | internal: "glitchChanged",
1104 | },
1105 | ];
1106 | }
1107 | break;
1108 | }
1109 | case TYPE.OUTBOUND_RTP: {
1110 | const active = !!bunch[PROPERTY.MEDIA_SOURCE_ID];
1111 |
1112 | // get SSRC and associated data
1113 | const ssrc = bunch[PROPERTY.SSRC];
1114 | const previousSSRCBunch = getSSRCDataFromBunch(ssrc, previousBunch, DIRECTION.OUTBOUND);
1115 | if (previousSSRCBunch) {
1116 | previousSSRCBunch.timestamp = previousBunch.timestamp;
1117 | }
1118 | const referenceSSRCBunch = getSSRCDataFromBunch(ssrc, referenceReport, DIRECTION.OUTBOUND);
1119 | if (referenceSSRCBunch) {
1120 | referenceSSRCBunch.timestamp = referenceReport.timestamp;
1121 | }
1122 |
1123 | let trackOut = "";
1124 | let audioLevel = 0;
1125 | let size = { width: 0, height: 0, framerate: 0 };
1126 | if (active && raw.has(bunch[PROPERTY.MEDIA_SOURCE_ID])) {
1127 | const mediaSourceReport = raw.get(bunch[PROPERTY.MEDIA_SOURCE_ID]);
1128 | trackOut = mediaSourceReport[PROPERTY.TRACK_IDENTIFIER];
1129 | if (bunch[PROPERTY.KIND] === VALUE.AUDIO) {
1130 | audioLevel = mediaSourceReport[PROPERTY.AUDIO_LEVEL];
1131 | } else {
1132 | size = { width: mediaSourceReport[PROPERTY.WIDTH] || null, height: mediaSourceReport[PROPERTY.HEIGHT] || null, framerate: mediaSourceReport[PROPERTY.FRAMES_PER_SECOND] || null };
1133 | }
1134 | }
1135 |
1136 | let deviceLabel = "";
1137 | if (trackOut) {
1138 | const track = findTrackInPeerConnectionById(trackOut, _refPC);
1139 | if (track) {
1140 | deviceLabel = track.label;
1141 | }
1142 | }
1143 |
1144 | if (bunch[PROPERTY.MEDIA_TYPE] === VALUE.AUDIO) {
1145 | const audioOutputCodecId = bunch[PROPERTY.CODEC_ID] || null;
1146 |
1147 | // FF: no media-source, try to find the track from the sender (first track of kind found)
1148 | if (!trackOut) {
1149 | const track = findOutgoingTrackFromPeerConnectionByKind("audio", _refPC);
1150 | if (track) {
1151 | trackOut = track.id;
1152 | deviceLabel = track.label;
1153 | }
1154 | }
1155 |
1156 | // packets and bytes
1157 | const data = extractAudioVideoPacketSent(bunch, VALUE.AUDIO, previousSSRCBunch, referenceSSRCBunch);
1158 |
1159 | return [
1160 | {
1161 | ssrc,
1162 | type: STAT_TYPE.AUDIO,
1163 | internal: "mediaSourceUpdated",
1164 | value: { active_out: active },
1165 | },
1166 | {
1167 | ssrc,
1168 | type: STAT_TYPE.AUDIO,
1169 | value: { device_out: deviceLabel },
1170 | },
1171 | {
1172 | ssrc,
1173 | type: STAT_TYPE.AUDIO,
1174 | value: { codec_id_out: audioOutputCodecId },
1175 | },
1176 | {
1177 | ssrc,
1178 | type: STAT_TYPE.AUDIO,
1179 | value: { total_packets_out: data.packetsSent },
1180 | },
1181 | {
1182 | ssrc,
1183 | type: STAT_TYPE.AUDIO,
1184 | value: { delta_packets_out: data.deltaPacketsSent },
1185 | },
1186 | {
1187 | ssrc,
1188 | type: STAT_TYPE.AUDIO,
1189 | value: { delta_packet_delay_ms_out: data.deltaAvgPacketSendDelay },
1190 | },
1191 | {
1192 | ssrc,
1193 | type: STAT_TYPE.AUDIO,
1194 | value: { total_time_packets_delay_out: data.totalPacketSendDelay },
1195 | },
1196 | {
1197 | ssrc,
1198 | type: STAT_TYPE.AUDIO,
1199 | value: { total_KBytes_out: data.KBytesSent },
1200 | },
1201 | {
1202 | ssrc,
1203 | type: STAT_TYPE.AUDIO,
1204 | internal: "bytesSentChanged",
1205 | value: { delta_KBytes_out: data.deltaKBytesSent },
1206 | },
1207 | {
1208 | ssrc,
1209 | type: STAT_TYPE.AUDIO,
1210 | value: { delta_kbs_out: data.kbsSent },
1211 | },
1212 | {
1213 | ssrc,
1214 | type: STAT_TYPE.AUDIO,
1215 | internal: "deviceChanged",
1216 | value: { track_out: trackOut },
1217 | },
1218 | {
1219 | ssrc,
1220 | type: STAT_TYPE.AUDIO,
1221 | value: { level_out: audioLevel },
1222 | },
1223 | {
1224 | ssrc,
1225 | type: STAT_TYPE.AUDIO,
1226 | internal: "ssrcIdentifierOut",
1227 | value: { ssrc_out: bunch[PROPERTY.SSRC] },
1228 | },
1229 | ];
1230 | }
1231 | if (bunch[PROPERTY.MEDIA_TYPE] === VALUE.VIDEO) {
1232 | const encoderImplementation = bunch[PROPERTY.ENCODER_IMPLEMENTATION] || null;
1233 | const videoOutputCodecId = bunch[PROPERTY.CODEC_ID] || null;
1234 |
1235 | // FF: no media-source, try to find the track from the sender (first track of kind found)
1236 | if (!trackOut) {
1237 | const track = findOutgoingTrackFromPeerConnectionByKind("video", _refPC);
1238 | if (track) {
1239 | trackOut = track.id;
1240 | deviceLabel = track.label;
1241 | }
1242 | }
1243 |
1244 | // Encode time
1245 | const data = extractEncodeTime(bunch, previousSSRCBunch);
1246 |
1247 | // Video size
1248 | const oldBunch = oldRaw ? oldRaw.get(bunch[PROPERTY.ID]) : null;
1249 | const outputVideo = extractVideoSize(bunch, oldBunch, DIRECTION.OUTBOUND);
1250 |
1251 | // limitations
1252 | const limitationOut = extractQualityLimitation(bunch);
1253 |
1254 | // Nack & Pli stats
1255 | const nackPliData = extractNackAndPliCountReceivedWhenSending(
1256 | bunch,
1257 | previousSSRCBunch,
1258 | referenceSSRCBunch,
1259 | );
1260 |
1261 | // packets and bytes
1262 | const dataSent = extractAudioVideoPacketSent(bunch, VALUE.VIDEO, previousSSRCBunch, referenceSSRCBunch);
1263 |
1264 | return [
1265 | {
1266 | ssrc,
1267 | type: STAT_TYPE.VIDEO,
1268 | internal: "mediaSourceUpdated",
1269 | value: { active_out: active },
1270 | },
1271 | {
1272 | ssrc,
1273 | type: STAT_TYPE.VIDEO,
1274 | value: { device_out: deviceLabel },
1275 | },
1276 | {
1277 | ssrc,
1278 | type: STAT_TYPE.VIDEO,
1279 | value: { codec_id_out: videoOutputCodecId },
1280 | },
1281 | {
1282 | ssrc,
1283 | type: STAT_TYPE.VIDEO,
1284 | value: { total_packets_out: dataSent.packetsSent },
1285 | },
1286 | {
1287 | ssrc,
1288 | type: STAT_TYPE.VIDEO,
1289 | value: { delta_packets_out: dataSent.deltaPacketsSent },
1290 | },
1291 | {
1292 | ssrc,
1293 | type: STAT_TYPE.VIDEO,
1294 | value: { delta_packet_delay_ms_out: dataSent.deltaAvgPacketSendDelay },
1295 | },
1296 | {
1297 | ssrc,
1298 | type: STAT_TYPE.VIDEO,
1299 | value: { total_time_packets_delay_out: dataSent.totalPacketSendDelay },
1300 | },
1301 | {
1302 | ssrc,
1303 | type: STAT_TYPE.VIDEO,
1304 | value: { total_KBytes_out: dataSent.KBytesSent },
1305 | },
1306 | {
1307 | ssrc,
1308 | type: STAT_TYPE.VIDEO,
1309 | internal: "bytesSentChanged",
1310 | value: { delta_KBytes_out: dataSent.deltaKBytesSent },
1311 | },
1312 | {
1313 | ssrc,
1314 | type: STAT_TYPE.VIDEO,
1315 | value: { delta_kbs_out: dataSent.kbsSent },
1316 | },
1317 | {
1318 | ssrc,
1319 | type: STAT_TYPE.VIDEO,
1320 | value: { encoder_out: encoderImplementation },
1321 | },
1322 | {
1323 | ssrc,
1324 | type: STAT_TYPE.VIDEO,
1325 | value: { delta_encode_frame_ms_out: data.delta_ms_encode_frame },
1326 | },
1327 | {
1328 | ssrc,
1329 | type: STAT_TYPE.VIDEO,
1330 | value: { total_frames_encoded_out: data.frames_encoded },
1331 | },
1332 | {
1333 | ssrc,
1334 | type: STAT_TYPE.VIDEO,
1335 | value: { total_time_encoded_out: data.total_encode_time },
1336 | },
1337 | {
1338 | ssrc,
1339 | type: STAT_TYPE.VIDEO,
1340 | value: { total_nack_received_out: nackPliData.nackCount },
1341 | },
1342 | {
1343 | ssrc,
1344 | type: STAT_TYPE.VIDEO,
1345 | value: { delta_nack_received_out: nackPliData.deltaNackCount },
1346 | },
1347 | {
1348 | ssrc,
1349 | type: STAT_TYPE.VIDEO,
1350 | value: { total_pli_received_out: nackPliData.pliCount },
1351 | },
1352 | {
1353 | ssrc,
1354 | type: STAT_TYPE.VIDEO,
1355 | value: { delta_pli_received_out: nackPliData.deltaPliCount },
1356 | },
1357 | {
1358 | ssrc,
1359 | type: STAT_TYPE.VIDEO,
1360 | value: { size_out: outputVideo },
1361 | internal: "outputSizeChanged",
1362 | },
1363 | {
1364 | ssrc,
1365 | type: STAT_TYPE.VIDEO,
1366 | value: { limitation_out: limitationOut },
1367 | internal: "videoLimitationChanged",
1368 | },
1369 | {
1370 | ssrc,
1371 | type: STAT_TYPE.VIDEO,
1372 | internal: "deviceChanged",
1373 | value: { track_out: trackOut },
1374 | },
1375 | {
1376 | ssrc,
1377 | type: STAT_TYPE.VIDEO,
1378 | value: { size_pref_out: size },
1379 | },
1380 | {
1381 | ssrc,
1382 | type: STAT_TYPE.VIDEO,
1383 | internal: "ssrcIdentifierOut",
1384 | value: { ssrc_out: bunch[PROPERTY.SSRC] },
1385 | },
1386 | ];
1387 | }
1388 | break;
1389 | }
1390 | case TYPE.MEDIA_SOURCE: {
1391 | break;
1392 | }
1393 | case TYPE.TRACK: {
1394 | break;
1395 | }
1396 | case TYPE.CODEC:
1397 | const result = [];
1398 | // Check for Audio codec
1399 | Object.keys(previousBunch[VALUE.AUDIO]).forEach((ssrc) => {
1400 | const ssrcAudioBunch = previousBunch[VALUE.AUDIO][ssrc];
1401 | if ((ssrcAudioBunch.codec_id_in === bunch[PROPERTY.ID]) || (ssrcAudioBunch.codec_id_out === bunch[PROPERTY.ID])) {
1402 | const codec = extractAudioCodec(bunch);
1403 | if (bunch[PROPERTY.ID] === ssrcAudioBunch.codec_id_in) {
1404 | result.push({ ssrc: ssrcAudioBunch.ssrc, type: STAT_TYPE.AUDIO, value: { codec_in: codec } });
1405 | } else {
1406 | result.push({ ssrc: ssrcAudioBunch.ssrc, type: STAT_TYPE.AUDIO, value: { codec_out: codec } });
1407 | }
1408 | }
1409 | });
1410 |
1411 | // Check for Video codec
1412 | Object.keys(previousBunch[VALUE.VIDEO]).forEach((ssrc) => {
1413 | const ssrcVideoBunch = previousBunch[VALUE.VIDEO][ssrc];
1414 | if ((ssrcVideoBunch.codec_id_in === bunch[PROPERTY.ID]) || (ssrcVideoBunch.codec_id_out === bunch[PROPERTY.ID])) {
1415 | const codec = extractVideoCodec(bunch);
1416 | if (bunch[PROPERTY.ID] === ssrcVideoBunch.codec_id_in) {
1417 | result.push({ ssrc: ssrcVideoBunch.ssrc, type: STAT_TYPE.VIDEO, value: { codec_in: codec } });
1418 | } else {
1419 | result.push({ ssrc: ssrcVideoBunch.ssrc, type: STAT_TYPE.VIDEO, value: { codec_out: codec } });
1420 | }
1421 | }
1422 | });
1423 | return result;
1424 | case TYPE.REMOTE_INBOUND_RTP: {
1425 | // get SSRC and associated data
1426 | const ssrc = bunch[PROPERTY.SSRC];
1427 | const previousSSRCBunch = getSSRCDataFromBunch(ssrc, previousBunch, DIRECTION.OUTBOUND);
1428 | const referenceSSRCBunch = getSSRCDataFromBunch(ssrc, referenceReport, DIRECTION.OUTBOUND);
1429 | if (bunch[PROPERTY.KIND] === VALUE.AUDIO) {
1430 | // Round Trip Time based on RTCP
1431 | const data = extractRTTBasedOnRTCP(
1432 | bunch,
1433 | VALUE.AUDIO,
1434 | referenceSSRCBunch,
1435 | previousSSRCBunch,
1436 | );
1437 |
1438 | // Jitter (out)
1439 | const jitter = extractLastJitter(bunch, VALUE.AUDIO, previousSSRCBunch);
1440 |
1441 | // Packets lost
1442 | const packets = extractAudioVideoPacketLost(bunch, VALUE.AUDIO, previousSSRCBunch, referenceSSRCBunch);
1443 |
1444 | return [
1445 | {
1446 | ssrc,
1447 | type: STAT_TYPE.AUDIO,
1448 | value: { delta_rtt_ms_out: data.rtt },
1449 | },
1450 | {
1451 | ssrc,
1452 | type: STAT_TYPE.AUDIO,
1453 | value: { total_rtt_ms_out: data.totalRTT },
1454 | },
1455 | {
1456 | ssrc,
1457 | type: STAT_TYPE.AUDIO,
1458 | value: { total_rtt_measure_out: data.totalRTTMeasurements },
1459 | },
1460 | {
1461 | ssrc,
1462 | type: STAT_TYPE.AUDIO,
1463 | value: { delta_jitter_ms_out: jitter },
1464 | },
1465 | {
1466 | ssrc,
1467 | type: STAT_TYPE.AUDIO,
1468 | value: { timestamp_out: bunch[PROPERTY.TIMESTAMP] },
1469 | },
1470 | {
1471 | ssrc,
1472 | type: STAT_TYPE.AUDIO,
1473 | value: { total_packets_lost_out: packets.packetsLost },
1474 | },
1475 | {
1476 | ssrc,
1477 | type: STAT_TYPE.AUDIO,
1478 | value: { delta_packets_lost_out: packets.deltaPacketsLost },
1479 | },
1480 | {
1481 | ssrc,
1482 | type: STAT_TYPE.AUDIO,
1483 | value: { percent_packets_lost_out: packets.fractionLost },
1484 | },
1485 | ];
1486 | }
1487 |
1488 | if (bunch[PROPERTY.KIND] === VALUE.VIDEO) {
1489 | // Round Trip Time based on RTCP
1490 | const data = extractRTTBasedOnRTCP(
1491 | bunch,
1492 | VALUE.VIDEO,
1493 | referenceSSRCBunch,
1494 | previousSSRCBunch,
1495 | );
1496 |
1497 | // Jitter (out)
1498 | const jitter = extractLastJitter(bunch, VALUE.VIDEO, previousSSRCBunch);
1499 |
1500 | // Packets lost
1501 | const packets = extractAudioVideoPacketLost(bunch, VALUE.VIDEO, previousSSRCBunch, referenceSSRCBunch);
1502 |
1503 | return [
1504 | {
1505 | ssrc,
1506 | type: STAT_TYPE.VIDEO,
1507 | value: { delta_rtt_ms_out: data.rtt },
1508 | },
1509 | {
1510 | ssrc,
1511 | type: STAT_TYPE.VIDEO,
1512 | value: { total_rtt_ms_out: data.totalRTT },
1513 | },
1514 | {
1515 | ssrc,
1516 | type: STAT_TYPE.VIDEO,
1517 | value: { total_rtt_measure_out: data.totalRTTMeasurements },
1518 | },
1519 | {
1520 | ssrc,
1521 | type: STAT_TYPE.VIDEO,
1522 | value: { delta_jitter_ms_out: jitter },
1523 | },
1524 | {
1525 | ssrc,
1526 | type: STAT_TYPE.VIDEO,
1527 | value: { timestamp_out: bunch[PROPERTY.TIMESTAMP] },
1528 | },
1529 | {
1530 | ssrc,
1531 | type: STAT_TYPE.VIDEO,
1532 | value: { total_packets_lost_out: packets.packetsLost },
1533 | },
1534 | {
1535 | ssrc,
1536 | type: STAT_TYPE.VIDEO,
1537 | value: { delta_packets_lost_out: packets.deltaPacketsLost },
1538 | },
1539 | {
1540 | ssrc,
1541 | type: STAT_TYPE.VIDEO,
1542 | value: { percent_packets_lost_out: packets.fractionLost },
1543 | },
1544 | ];
1545 | }
1546 | break;
1547 | }
1548 | case TYPE.REMOTE_OUTBOUND_RTP: {
1549 | // get SSRC and associated data
1550 | const ssrc = bunch[PROPERTY.SSRC];
1551 | const previousSSRCBunch = getSSRCDataFromBunch(ssrc, previousBunch, DIRECTION.OUTBOUND);
1552 | const referenceSSRCBunch = getSSRCDataFromBunch(ssrc, referenceReport, DIRECTION.OUTBOUND);
1553 | if (bunch[PROPERTY.KIND] === VALUE.AUDIO) {
1554 | // Round Trip Time based on RTCP
1555 | const data = extractRTTBasedOnRTCP(
1556 | bunch,
1557 | VALUE.AUDIO,
1558 | referenceSSRCBunch,
1559 | previousSSRCBunch,
1560 | );
1561 |
1562 | return [
1563 | {
1564 | ssrc,
1565 | type: STAT_TYPE.AUDIO,
1566 | value: { delta_rtt_ms_in: data.rtt },
1567 | },
1568 | {
1569 | ssrc,
1570 | type: STAT_TYPE.AUDIO,
1571 | value: { total_rtt_ms_in: data.totalRTT },
1572 | },
1573 | {
1574 | ssrc,
1575 | type: STAT_TYPE.AUDIO,
1576 | value: { total_rtt_measure_in: data.totalRTTMeasurements },
1577 | },
1578 | {
1579 | ssrc,
1580 | type: STAT_TYPE.AUDIO,
1581 | value: { timestamp_in: bunch[PROPERTY.TIMESTAMP] },
1582 | },
1583 | ];
1584 | }
1585 | break;
1586 | }
1587 | case TYPE.TRANSPORT: {
1588 | const selectedCandidatePairId = bunch[PROPERTY.SELECTED_CANDIDATEPAIR_ID];
1589 | return [
1590 | {
1591 | type: STAT_TYPE.NETWORK,
1592 | internal: "selectedPairChanged",
1593 | value: { selected_candidate_pair_id: selectedCandidatePairId },
1594 | },
1595 | ];
1596 | }
1597 | default:
1598 | break;
1599 | }
1600 |
1601 | // No interesting data
1602 | return [];
1603 | };
1604 |
1605 | export const extractPassthroughFields = (bunch, oldBunch, passthrough) => {
1606 | const convertTable = {
1607 | kbits: (valueInBytes) => ((valueInBytes * 8) / 1000),
1608 | ms: (valueInSeconds) => (valueInSeconds * 1000),
1609 | asis: (value) => (value),
1610 | };
1611 |
1612 | if (!bunch) {
1613 | return {};
1614 | }
1615 |
1616 | // Don't add measure if identical report
1617 | if (oldBunch && ((oldBunch.timestamp === bunch.timestamp) || (oldBunch.remoteTimestamp && (oldBunch.remoteTimestamp === bunch.remoteTimestamp)))) {
1618 | return {};
1619 | }
1620 |
1621 | // Example {"inbound-rtp": ["jitter.ms", "ps:bytesReceived"]}
1622 | const fieldsToReport = (passthrough && passthrough[bunch[PROPERTY.TYPE]]) || [];
1623 |
1624 | const pass = {};
1625 | if (fieldsToReport.length > 0) {
1626 | const ref = bunch[PROPERTY.SSRC] || bunch[PROPERTY.ID];
1627 | const kind = bunch[PROPERTY.KIND] || "";
1628 | const id = `${bunch.type}${kind ? `_${kind}` : "_*"}=${ref}`;
1629 | fieldsToReport.forEach((fields) => {
1630 | // Collect properties (normally one, but several in case of an operation)
1631 | let properties = [fields];
1632 | let operand = "";
1633 | if (fields.startsWith("[") && fields.endsWith("]")) {
1634 | const operation = fields.substring(1, fields.length - 1);
1635 | if (operation.includes("/")) {
1636 | operand = "/";
1637 | } else if (operation.includes("+")) {
1638 | operand = "+";
1639 | } else if (operation.includes("*")) {
1640 | operand = "*";
1641 | } else if (operation.includes("-")) {
1642 | operand = "-";
1643 | }
1644 |
1645 | properties = operation.split(operand);
1646 | }
1647 |
1648 | // For each prop, get the value if exists in the report
1649 | const values = [];
1650 | properties.forEach((prop) => {
1651 | const hasMethod = prop.split(":").length > 1;
1652 | const hasMetric = prop.split(".").length > 1;
1653 | const method = hasMethod ? prop.split(":")[0] : "total";
1654 | const metric = hasMetric ? prop.split(".")[1] : "asis";
1655 | const property = hasMethod ? prop.split(":")[1].split(".")[0] : prop.split(".")[0];
1656 |
1657 | if (property in bunch) {
1658 | let value = convertTable[metric](bunch[property]);
1659 | const currentTimestamp = bunch[PROPERTY.REMOTE_TIMESTAMP] || bunch[PROPERTY.TIMESTAMP];
1660 | if (method === "ps" && oldBunch) {
1661 | const deltaValue = value - convertTable[metric](oldBunch[property]);
1662 | const deltaTimestamp = currentTimestamp - (oldBunch[PROPERTY.REMOTE_TIMESTAMP] || oldBunch[PROPERTY.TIMESTAMP]);
1663 | value = (deltaValue / deltaTimestamp) * 1000;
1664 | }
1665 | values.push({ fields, property, value });
1666 | }
1667 | });
1668 |
1669 | // Only one result, return it
1670 | if (values.length === 1) {
1671 | const result = values[0];
1672 | if (!(result.property in pass)) {
1673 | pass[result.property] = {};
1674 | }
1675 | pass[result.property][id] = result.value;
1676 | // Several result, compute the operation
1677 | } else if (values.length > 1) {
1678 | const first = values.shift();
1679 |
1680 | const value = values.reduce((acc, current) => {
1681 | switch (operand) {
1682 | case "+":
1683 | return acc + current.value;
1684 | case "/":
1685 | if (current.value !== 0) {
1686 | return acc / current.value;
1687 | }
1688 | return acc;
1689 | case "*":
1690 | return acc * current.value;
1691 | case "-":
1692 | return acc - current.value;
1693 | default:
1694 | return acc + current.value;
1695 | }
1696 | }, first.value);
1697 | if (!(values[0].fields in pass)) {
1698 | pass[values[0].fields] = {};
1699 | }
1700 | pass[values[0].fields][id] = value;
1701 | }
1702 | });
1703 | }
1704 |
1705 | return pass;
1706 | };
1707 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | import "regenerator-runtime/runtime.js";
2 | import {
3 | info,
4 | setLogLevel,
5 | setSilentLog,
6 | setVerboseLog,
7 | } from "./utils/log";
8 | import { getGlobalConfig } from "./utils/config";
9 | import ProbesEngine from "./engine";
10 |
11 | const moduleName = "interface ";
12 |
13 | export default class WebRTCMetrics {
14 | constructor(cfg) {
15 | this._config = getGlobalConfig(cfg);
16 | if (this._config.silent) {
17 | setSilentLog();
18 | } else {
19 | setVerboseLog(this._config.verbose || false);
20 | }
21 | info(moduleName, `welcome to ${this._config.name} version ${this._config.version}`);
22 | this._engine = new ProbesEngine(this._config);
23 | }
24 |
25 | /**
26 | * Change log level manually
27 | * @param {string} level - The level of logs. Can be one of 'TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'SILENT'
28 | */
29 | setupLogLevel(level) {
30 | setLogLevel(level);
31 | }
32 |
33 | /**
34 | * Get the version
35 | */
36 | get version() {
37 | return this._config.version;
38 | }
39 |
40 | /**
41 | * Get the library name
42 | */
43 | get name() {
44 | return this._config.name;
45 | }
46 |
47 | /**
48 | * Get the probes
49 | */
50 | get probes() {
51 | return this._engine.probes;
52 | }
53 |
54 | /**
55 | * Create a new probe and return it
56 | * @param {RTCPeerConnection} peerConnection The RTCPeerConnection instance to monitor
57 | * @param {Object} options The option
58 | * @return {Probe} The probe created
59 | */
60 | createProbe(peerConnection, options) {
61 | return this._engine.addNewProbe(peerConnection, options);
62 | }
63 |
64 | /**
65 | * Start all probes
66 | */
67 | startAllProbes() {
68 | this._engine.start();
69 | }
70 |
71 | /**
72 | * Stop all probes
73 | */
74 | stopAllProbes() {
75 | this._engine.stop();
76 | }
77 |
78 | /**
79 | * Is running
80 | */
81 | get running() {
82 | return this._engine.isRunning;
83 | }
84 |
85 | /**
86 | * Is Idle
87 | */
88 | get idle() {
89 | return this._engine.isIdle;
90 | }
91 |
92 | /**
93 | * Experimental
94 | * Remote a probe
95 | * @param {Probe} probe
96 | */
97 | removeProbe(probe) {
98 | this._engine.removeExistingProbe(probe);
99 | }
100 |
101 | set onresult(callback) {
102 | if (callback) {
103 | this._engine.registerCallback("onresult", callback);
104 | } else {
105 | this._engine.unregisterCallback("onresult");
106 | }
107 | }
108 |
109 | /**
110 | * Get the report generated by a probe
111 | * @param {Probe} probe
112 | */
113 | getTicket(probe) {
114 | return probe.getTicket();
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/src/live.js:
--------------------------------------------------------------------------------
1 | import { alertOnFramerate, alertOnPeak } from "./utils/rules";
2 |
3 | const getValueFromReport = (data, property, report, withoutSSRC = false) => {
4 | if (!report) {
5 | return null;
6 | }
7 |
8 | if (withoutSSRC) {
9 | return data.type in report && property in report[data.type]
10 | ? report[data.type][property]
11 | : null;
12 | }
13 | return data.type in report &&
14 | data.ssrc in report[data.type] &&
15 | property in report[data.type][data.ssrc]
16 | ? report[data.type][data.ssrc][property]
17 | : null;
18 | };
19 |
20 | const getValueFromReportValues = (property, reportValues) => reportValues.find((reportValue) => {
21 | if (property in reportValue.value) {
22 | return reportValue.value[property];
23 | }
24 | return null;
25 | });
26 |
27 | export const doLiveTreatment = (data, previousReport, values) => {
28 | const events = [];
29 |
30 | const addEvent = (at, category, name, ssrc, details) => {
31 | events.push({
32 | at,
33 | ended: null,
34 | category,
35 | name,
36 | ssrc,
37 | details,
38 | });
39 | };
40 |
41 | // track id changed = device changed
42 | const compareAndSendEventForDevice = (property) => {
43 | const currentTrackId = data.value[property];
44 | const previousTrackId = getValueFromReport(data, property, previousReport);
45 | const currentDevice = getValueFromReportValues("device_out", values);
46 | const oldDevice = getValueFromReport(data, "device_out", previousReport);
47 | let eventName = "track-stop";
48 |
49 | if (previousTrackId !== currentTrackId) {
50 | // Message when currentTrackId is null
51 | let message = `The existing outbound ${data.type} stream from ${
52 | oldDevice || "unknown"
53 | } has been stopped or muted`;
54 | if (currentTrackId && previousTrackId) {
55 | // Message when trackId changed
56 | message = `The existing outbound ${
57 | data.type
58 | } device has been changed to ${
59 | currentDevice ? currentDevice.value.device_out : "unknown"
60 | }`;
61 | eventName = "track-change";
62 | } else if (!previousTrackId) {
63 | // Message when new trackId
64 | message = `A new outbound ${data.type} stream from ${
65 | currentDevice ? currentDevice.value.device_out : "unknown"
66 | } has been started or unmuted`;
67 | eventName = "track-start";
68 | }
69 |
70 | addEvent(new Date().toJSON(), "call", eventName, data.ssrc, {
71 | message,
72 | direction: "outbound",
73 | kind: data.type,
74 | value: currentTrackId,
75 | value_old: previousTrackId,
76 | });
77 | }
78 | };
79 |
80 | // width / framerate changed = resolution changed
81 | const compareAndSendEventForSize = (property) => {
82 | const size = data.value[property];
83 | const previousSize = getValueFromReport(data, property, previousReport);
84 | const currentActive = property.includes("out")
85 | ? getValueFromReportValues("active_out", values)
86 | : true;
87 | // Only send event for resolution and framerate if there is an active stream
88 | if (currentActive) {
89 | if (previousSize?.width !== size.width) {
90 | addEvent(
91 | new Date().toJSON(),
92 | "quality",
93 | !previousSize || previousSize.width < size.width
94 | ? "size-up"
95 | : "size-down",
96 | data.ssrc,
97 | {
98 | message: `The resolution of the ${
99 | property.includes("out") ? "outbound" : "inbound"
100 | } ${data.type} stream has ${
101 | !previousSize || previousSize.width < size.width
102 | ? "increased"
103 | : "decreased"
104 | } to ${size.width}x${size.height}`,
105 | direction: property.includes("out") ? "outbound" : "inbound",
106 | kind: data.type,
107 | value: `${size.width}x${size.height}`,
108 | value_old: `${previousSize ? previousSize.width : 0}x${
109 | previousSize ? previousSize.height : 0
110 | }`,
111 | },
112 | );
113 | }
114 | if (alertOnFramerate(previousSize?.framerate, size?.framerate)) {
115 | addEvent(
116 | new Date().toJSON(),
117 | "quality",
118 | !previousSize || previousSize.framerate < size.framerate
119 | ? "fps-up"
120 | : "fps-down",
121 | data.ssrc,
122 | {
123 | message: `The framerate of the ${
124 | property.includes("out") ? "outbound" : "inbound"
125 | } ${data.type} stream has ${
126 | !previousSize || previousSize.framerate < size.framerate
127 | ? "increased"
128 | : "decreased"
129 | } to ${size.framerate}`,
130 | direction: property.includes("out") ? "outbound" : "inbound",
131 | kind: data.type,
132 | value: size.framerate,
133 | value_old: previousSize ? previousSize.framerate : 0,
134 | },
135 | );
136 | }
137 | }
138 | };
139 |
140 | // Outbound active property changed: camera or microphone track removed (muted) or added again (unmuted)
141 | const compareAndSendEventForOutboundMediaSource = (property) => {
142 | const active = data.value[property];
143 | const previousActive = getValueFromReport(data, property, previousReport);
144 | if (active !== previousActive) {
145 | addEvent(
146 | new Date().toJSON(),
147 | "call",
148 | active ? "track-active" : "track-inactive",
149 | data.ssrc,
150 | {
151 | message: `The ${property.includes("out") ? "outbound" : "inbound"} ${
152 | data.type
153 | } stream switched to ${active ? "active" : "inactive"}`,
154 | direction: property.includes("out") ? "outbound" : "inbound",
155 | kind: data.type,
156 | value: active,
157 | value_old: previousActive,
158 | },
159 | );
160 | }
161 | };
162 |
163 | // VideoLimitation Change = cpu, bandwidth, other, none
164 | const compareAndSendEventForOutboundLimitation = (property) => {
165 | const limitation = data.value[property];
166 | const previousLimitation = getValueFromReport(
167 | data,
168 | property,
169 | previousReport,
170 | );
171 |
172 | if (
173 | !previousLimitation ||
174 | limitation.reason !== previousLimitation.reason
175 | ) {
176 | addEvent(
177 | new Date().toJSON(),
178 | "quality",
179 | "limitation",
180 | data.ssrc,
181 | {
182 | message: `The outbound video stream resolution is ${
183 | limitation.reason === "none"
184 | ? "no more limited"
185 | : `limited due to ${limitation.reason} reason`
186 | }`,
187 | direction: property.includes("out") ? "outbound" : "inbound",
188 | kind: data.type,
189 | value: limitation.reason,
190 | value_old: previousLimitation,
191 | },
192 | );
193 | }
194 | };
195 |
196 | // BytesSent changed a lot /10 or x10 = possibly track has been muted/unmuted
197 | const compareAndSendEventForBytes = (property) => {
198 | const bytesExchanged = data.value[property];
199 | const previousBytesExchanged = getValueFromReport(
200 | data,
201 | property,
202 | previousReport,
203 | );
204 | const currentActive = property.includes("out")
205 | ? getValueFromReportValues("active_out", values)
206 | : true;
207 |
208 | if (currentActive) {
209 | if (alertOnPeak(previousBytesExchanged, bytesExchanged)) {
210 | addEvent(
211 | new Date().toJSON(),
212 | "quality",
213 | bytesExchanged > previousBytesExchanged ? "peak-up" : "peak-down",
214 | data.ssrc,
215 | {
216 | message: `A peak has been detected for the ${
217 | property.includes("out") ? "outbound" : "inbound"
218 | } ${data.type} steam.`,
219 | direction: property.includes("out") ? "outbound" : "inbound",
220 | kind: data.type,
221 | value: bytesExchanged,
222 | value_old: previousBytesExchanged,
223 | },
224 | );
225 | }
226 | }
227 | };
228 |
229 | const compareAndSendEventForSelectedCandidatePairChanged = (property) => {
230 | const selectedCandidatePairId = data.value[property];
231 | const previousSelectedCandidatePairId = getValueFromReport(
232 | data,
233 | property,
234 | previousReport,
235 | true,
236 | );
237 | if (selectedCandidatePairId !== previousSelectedCandidatePairId) {
238 | addEvent(new Date().toJSON(), "signal", "route-change", null, {
239 | message: `The selected candidates pair changed to ${selectedCandidatePairId}`,
240 | direction: null,
241 | kind: null,
242 | value: selectedCandidatePairId,
243 | value_old: previousSelectedCandidatePairId,
244 | });
245 | }
246 | };
247 |
248 | const compareAndSendEventForNewSSRC = (property) => {
249 | const ssrc = data.value[property];
250 | const previouSsrc = getValueFromReport(
251 | data,
252 | property,
253 | previousReport,
254 | );
255 |
256 | if (ssrc && !previouSsrc) {
257 | addEvent(new Date().toJSON(), "call", "track-added", ssrc, {
258 | message: `New track added to the call ${ssrc}`,
259 | direction: property.includes("in") ? "inbound" : "outbound",
260 | kind: data.type,
261 | value: ssrc,
262 | value_old: null,
263 | });
264 | }
265 | };
266 |
267 | switch (data.internal) {
268 | case "deviceChanged": {
269 | if (previousReport) {
270 | compareAndSendEventForDevice("track_out");
271 | }
272 | break;
273 | }
274 | case "inputSizeChanged": {
275 | compareAndSendEventForSize("size_in");
276 | break;
277 | }
278 | case "outputSizeChanged": {
279 | compareAndSendEventForSize("size_out");
280 | break;
281 | }
282 | case "bytesSentChanged": {
283 | compareAndSendEventForBytes("delta_KBytes_out");
284 | break;
285 | }
286 | case "bytesReceivedChanged": {
287 | if (previousReport) {
288 | compareAndSendEventForBytes("delta_KBytes_in");
289 | }
290 | break;
291 | }
292 | case "mediaSourceUpdated": {
293 | if (previousReport) {
294 | compareAndSendEventForOutboundMediaSource("active_out");
295 | }
296 | break;
297 | }
298 | case "videoLimitationChanged": {
299 | compareAndSendEventForOutboundLimitation("limitation_out");
300 | break;
301 | }
302 | case "selectedPairChanged": {
303 | compareAndSendEventForSelectedCandidatePairChanged(
304 | "selected_candidate_pair_id",
305 | );
306 | break;
307 | }
308 | case "ssrcIdentifierIn": {
309 | compareAndSendEventForNewSSRC("ssrc_in");
310 | break;
311 | }
312 | case "ssrcIdentifierOut": {
313 | compareAndSendEventForNewSSRC("ssrc_out");
314 | break;
315 | }
316 | default:
317 | break;
318 | }
319 |
320 | return events;
321 | };
322 |
--------------------------------------------------------------------------------
/src/probe.js:
--------------------------------------------------------------------------------
1 | import { info, warn } from "./utils/log";
2 | import Collector from "./collector";
3 | import { COLLECTOR_STATE } from "./utils/models";
4 | import { createProbeId } from "./utils/helper";
5 |
6 | export default class Probe {
7 | constructor(cfg) {
8 | this._id =
9 | (cfg.pname && cfg.pname.substr(0, 12).padEnd(12, " ")) || createProbeId();
10 | this._moduleName = this._id;
11 | info(this._moduleName, "probe created");
12 | this._config = cfg;
13 | this._collector = new Collector(this._config, this._id);
14 | }
15 |
16 | /**
17 | * Register a callback to 'onreport'
18 | * Unregister when callback is null
19 | * Fired when a report is received
20 | */
21 | set onreport(callback) {
22 | if (callback) {
23 | this._collector.registerCallback("onreport", callback);
24 | } else {
25 | this._collector.unregisterCallback("onreport");
26 | }
27 | }
28 |
29 | /**
30 | * Register a callback to 'onticket'
31 | * Unregister when callback is null
32 | * Fired when a ticket is received
33 | */
34 | set onticket(callback) {
35 | if (callback) {
36 | this._collector.registerCallback("onticket", callback);
37 | } else {
38 | this._collector.unregisterCallback("onticket");
39 | }
40 | }
41 |
42 | /**
43 | * Get the id of the Probe
44 | */
45 | get id() {
46 | return this._id;
47 | }
48 |
49 | /**
50 | * Get the name of the PeerConnection
51 | */
52 | get pname() {
53 | return this._config.pname;
54 | }
55 |
56 | /**
57 | * Get the call identifier
58 | */
59 | get cid() {
60 | return this._config.cid;
61 | }
62 |
63 | /**
64 | * Get the user identifier
65 | */
66 | get uid() {
67 | return this._config.uid;
68 | }
69 |
70 | /**
71 | * Get the state of the analyzer
72 | * Value can be 'running' or 'idle'
73 | */
74 | get state() {
75 | return this._collector.state;
76 | }
77 |
78 | set state(newState) {
79 | this._collector.state = newState;
80 | }
81 |
82 | /**
83 | * Add a custom event for that probe
84 | * @param {String} name The name of the event
85 | * @param {String} category The category of the event. Could be any strings
86 | * @param {String} message A description. Could be empty
87 | * @param {Date} at Optional. The date of the event.
88 | * @param {String} ssrc Optional. The associated ssrc of the stream. Null by default.
89 | * @param {Object} value. The value. Null by default
90 | * @param {Date} ended Optional. Then end date for period
91 | */
92 | addCustomEvent(
93 | name,
94 | category,
95 | message,
96 | at = new Date(),
97 | ssrc = null,
98 | value = null,
99 | ended = null,
100 | ) {
101 | let endedAt = null;
102 | if (ended) {
103 | endedAt = typeof ended === "object" ? ended.toJSON() : ended;
104 | }
105 |
106 | this._collector.addCustomEvent({
107 | at: typeof at === "object" ? at.toJSON() : at,
108 | ended: endedAt,
109 | category,
110 | name,
111 | ssrc,
112 | details: {
113 | message,
114 | kind: null,
115 | direction: null,
116 | value,
117 | value_old: null,
118 | },
119 | });
120 | }
121 |
122 | /**
123 | * Return true if the probe is running
124 | */
125 | get isRunning() {
126 | return this._collector.state === COLLECTOR_STATE.RUNNING;
127 | }
128 |
129 | /**
130 | * Return true if the probe is idle
131 | */
132 | get isIdle() {
133 | return this._collector.state === COLLECTOR_STATE.IDLE;
134 | }
135 |
136 | /**
137 | * Set the user identifier
138 | */
139 | updateUserId(value) {
140 | this._config.uid = value;
141 | this._collector.updateConfig(this._config);
142 | }
143 |
144 | /**
145 | * Update the call identifier
146 | */
147 | updateCallId(value) {
148 | this._config.cid = value;
149 | this._collector.updateConfig(this._config);
150 | }
151 |
152 | /**
153 | * Set a probe to running state
154 | */
155 | start() {
156 | if (!this.isIdle) {
157 | warn(this._moduleName, "probe is already running");
158 | return;
159 | }
160 | this._collector.start();
161 | }
162 |
163 | /**
164 | * Set a probe to idle state
165 | */
166 | stop(forced = false) {
167 | if (!this.isRunning) {
168 | return;
169 | }
170 | this._collector.stop(forced);
171 | }
172 |
173 | async takeReferenceStats() {
174 | return this._collector.takeReferenceStats();
175 | }
176 |
177 | async collectStats() {
178 | return this._collector.collectStats();
179 | }
180 |
181 | getTicket() {
182 | return this._collector && this._collector.getTicket();
183 | }
184 | }
185 |
--------------------------------------------------------------------------------
/src/utils/config.js:
--------------------------------------------------------------------------------
1 | import { defaultConfig, getLibName, getVersion } from "./models";
2 | import { warn } from "./log";
3 |
4 | const moduleName = "config ";
5 |
6 | export const getConfig = (peerConnection, cfg = {}, globalConfig = {}) => {
7 | const config = { ...globalConfig, ...cfg };
8 |
9 | if (!cfg.pname) {
10 | warn(moduleName, `Argument [String] 'cfg.pname' for the peerConnection name or id is missing - use generated '${globalConfig.pname}'`);
11 | }
12 |
13 | if (!cfg.cid) {
14 | warn(moduleName, `Argument [String] 'cfg.cid' for the call name or id is missing - use generated '${globalConfig.cid}'`);
15 | }
16 |
17 | if (!cfg.uid) {
18 | warn(moduleName, `Argument [String] 'cfg.uid' for the user name or id is missing - use generated '${globalConfig.uid}'`);
19 | }
20 |
21 | config.pc = peerConnection;
22 | return config;
23 | };
24 |
25 | export const getGlobalConfig = (cfg = {}) => {
26 | const config = { ...defaultConfig, ...cfg };
27 | config.name = getLibName();
28 | config.version = getVersion();
29 | return config;
30 | };
31 |
--------------------------------------------------------------------------------
/src/utils/helper.js:
--------------------------------------------------------------------------------
1 | import ShortUniqueId from "short-unique-id";
2 | import {
3 | defaultAudioMetricIn,
4 | defaultAudioMetricOut,
5 | defaultVideoMetricIn,
6 | defaultVideoMetricOut,
7 | DIRECTION,
8 | VALUE,
9 | } from "./models";
10 |
11 | const shortUUID = new ShortUniqueId();
12 |
13 | const getValues = (reports, key, subKey, avoidZeroValue = false, ssrc = "", withTimestamp = false) => {
14 | let arr = reports.map((report) => {
15 | if (!subKey) {
16 | if (withTimestamp) {
17 | return {
18 | timestamp: new Date(report.timestamp).toJSON(),
19 | value: report[key],
20 | };
21 | }
22 | return report[key];
23 | }
24 | if (!ssrc) {
25 | if (withTimestamp) {
26 | return {
27 | timestamp: new Date(report.timestamp).toJSON(),
28 | value: report[key][subKey],
29 | };
30 | }
31 | return report[key][subKey];
32 | }
33 | const data = report[key][ssrc];
34 | if (data) {
35 | if (withTimestamp) {
36 | return {
37 | timestamp: new Date(report.timestamp).toJSON(),
38 | value: data[subKey],
39 | };
40 | }
41 | return data[subKey];
42 | }
43 | return null;
44 | });
45 |
46 | // Avoid null value
47 | arr = arr.filter((item) => {
48 | if (withTimestamp) {
49 | if (avoidZeroValue) {
50 | return (item && Number.isFinite(item.value) && item.value > 0);
51 | }
52 | return item && Number.isFinite(item.value);
53 | }
54 |
55 | if (avoidZeroValue) {
56 | return (Number.isFinite(item) && item > 0);
57 | }
58 | return Number.isFinite(item);
59 | });
60 | if (arr.length === 0) {
61 | return [];
62 | }
63 | return arr;
64 | };
65 |
66 | export const filteredAverage = (nums, defaultValue) => {
67 | const filtered = nums.filter((num) => num !== null);
68 | if (filtered.length > 0) {
69 | return filtered.reduce((a, b) => a + b, 0) / nums.length;
70 | }
71 | return defaultValue;
72 | };
73 |
74 | export const average = (nums) => (nums.reduce((a, b) => a + b, 0) / nums.length);
75 |
76 | export const createProbeId = () => (`probe-${shortUUID.rnd(10)}`);
77 |
78 | export const createCollectorId = () => (`coltr-${shortUUID.rnd(10)}`);
79 |
80 | export const timeout = (ms) => (new Promise((resolve) => setTimeout(resolve, ms)));
81 |
82 | export const call = (fct, context, value) => {
83 | if (!context) {
84 | fct(value);
85 | } else {
86 | fct.call(context, value);
87 | }
88 | };
89 |
90 | export const volatilityValuesOfReports = (reports, key, subKey, ssrc, avoidZeroValue = true) => {
91 | const values = getValues(reports, key, subKey, avoidZeroValue, ssrc);
92 | if (values.length === 0) {
93 | return null;
94 | }
95 | const avg = values.reduce((p, c) => p + c, 0) / values.length;
96 | if (avg === 0 && avoidZeroValue) {
97 | return null;
98 | } if (avg === 0) {
99 | return 0;
100 | }
101 |
102 | const diff = values.map((data) => (Math.abs(avg - data)));
103 | const totalDiff = diff.reduce((p, c) => p + c, 0);
104 | const volatility = ((totalDiff / values.length) * 100) / avg;
105 | return volatility;
106 | };
107 |
108 | export const averageValuesOfReports = (reports, key, subKey, avoidZeroValue = false, ssrc = "") => {
109 | const values = getValues(reports, key, subKey, avoidZeroValue, ssrc);
110 | if (values.length === 0) {
111 | return null;
112 | }
113 | return values.reduce((p, c) => p + c, 0) / values.length;
114 | };
115 |
116 | export const sumValuesOfReports = (reports, key, subKey) => {
117 | const values = getValues(reports, key, subKey);
118 | return values.reduce((p, c) => p + c, 0);
119 | };
120 |
121 | export const minValueOfReports = (reports, key, subKey, ssrc, avoidZeroValue = true) => {
122 | const values = getValues(reports, key, subKey, avoidZeroValue, ssrc);
123 | if (values.length === 0) {
124 | return null;
125 | }
126 | return Math.min(...values);
127 | };
128 |
129 | export const maxValueOfReports = (reports, key, subKey, ssrc, avoidZeroValue = true) => {
130 | const values = getValues(reports, key, subKey, avoidZeroValue, ssrc);
131 | if (values.length === 0) {
132 | return null;
133 | }
134 | return Math.max(...values);
135 | };
136 |
137 | export const valuesOfReports = (reports, key, subKey, ssrc) => (getValues(reports, key, subKey, false, ssrc, true));
138 |
139 | export const lastOfReports = (reports, key, subKey, ssrc) => {
140 | const lastReport = reports.slice()
141 | .pop();
142 | if (!lastReport) {
143 | return null;
144 | }
145 | if (!subKey) {
146 | return lastReport[key];
147 | }
148 | if (!ssrc) {
149 | return lastReport[key][subKey];
150 | }
151 | const ssrcData = lastReport[key][ssrc];
152 |
153 | if (ssrcData) {
154 | return ssrcData[subKey];
155 | }
156 | return null;
157 | };
158 |
159 | export const getLastReport = (reports) => (reports.slice()
160 | .pop());
161 |
162 | export const getSSRCDataFromBunch = (ssrc, bunch, direction) => {
163 | if (!bunch) {
164 | return null;
165 | }
166 | const ssrcBunch = {};
167 | let audioBunch = bunch[VALUE.AUDIO][ssrc];
168 | if (!audioBunch) {
169 | audioBunch = direction === DIRECTION.INBOUND ? { ...defaultAudioMetricIn } : { ...defaultAudioMetricOut };
170 | }
171 | ssrcBunch[VALUE.AUDIO] = audioBunch;
172 |
173 | let videoBunch = bunch[VALUE.VIDEO][ssrc];
174 | if (!videoBunch) {
175 | videoBunch = direction === DIRECTION.INBOUND ? { ...defaultVideoMetricIn } : { ...defaultVideoMetricOut };
176 | }
177 | ssrcBunch[VALUE.VIDEO] = videoBunch;
178 | return ssrcBunch;
179 | };
180 |
181 | export const findTrackInPeerConnectionById = (trackId, pc) => {
182 | // Get track from PC senders
183 | const senderOfTrack = pc.getSenders()
184 | .find((sender) => sender.track && sender.track.id === trackId);
185 |
186 | if (senderOfTrack) {
187 | return senderOfTrack.track;
188 | }
189 |
190 | // Get track from PC receivers
191 | const receiverOfTrack = pc.getReceivers()
192 | .find((receiver) => receiver.track && receiver.track.id === trackId);
193 |
194 | if (receiverOfTrack) {
195 | return receiverOfTrack.track;
196 | }
197 | return null;
198 | };
199 |
200 | export const findOutgoingTrackFromPeerConnectionByKind = (kind, pc) => {
201 | const senderOfTrack = pc.getSenders()
202 | .find((sender) => sender.track && sender.track.kind === kind);
203 | if (senderOfTrack) {
204 | return senderOfTrack.track;
205 | }
206 | return null;
207 | };
208 |
209 | export const fixed2 = (value) => Math.round(100 * value) / 100;
210 |
--------------------------------------------------------------------------------
/src/utils/log.js:
--------------------------------------------------------------------------------
1 | import * as log from "loglevel";
2 |
3 | const getHeader = () => `${new Date().toISOString()} | metrics`;
4 | const format = (header, module, message) => `${header} | ${module} | ${message}`;
5 |
6 | log.setDefaultLevel(log.levels.TRACE);
7 |
8 | export const setVerboseLog = (shouldHaveVerboseLog) => {
9 | log.info(format(getHeader(), "log ", `set log level to ${shouldHaveVerboseLog ? "verbose" : "info"}`));
10 | log.setLevel(shouldHaveVerboseLog ? log.levels.TRACE : log.levels.INFO);
11 | };
12 |
13 | export const setSilentLog = () => {
14 | log.setLevel(log.levels.SILENT);
15 | };
16 |
17 | export const setLogLevel = (logLevel) => {
18 | const levels = [...Object.keys(log.levels)];
19 | if (levels.includes(logLevel)) {
20 | log.info(format(getHeader(), "log ", `update log level to ${logLevel.toLowerCase()}`));
21 | log.setLevel(logLevel);
22 | } else {
23 | log.warn(format(getHeader(), "log ", "Incorrect log level please choose one of "), levels);
24 | }
25 | };
26 |
27 | export const debug = (name, message, data) => {
28 | if (data) {
29 | log.debug(format(getHeader(), name, message), data);
30 | } else {
31 | log.debug(format(getHeader(), name, message));
32 | }
33 | };
34 |
35 | export const trace = (name, message) => {
36 | log.info(format(getHeader(), name, message));
37 | };
38 |
39 | export const info = (name, message) => {
40 | log.info(format(getHeader(), name, message));
41 | };
42 |
43 | export const warn = (name, message) => {
44 | log.warn(format(getHeader(), name, message));
45 | };
46 |
47 | export const error = (name, message) => {
48 | log.error(format(getHeader(), name, message));
49 | };
50 |
--------------------------------------------------------------------------------
/src/utils/models.js:
--------------------------------------------------------------------------------
1 | import ShortUniqueId from "short-unique-id";
2 |
3 | const shortUUID = new ShortUniqueId();
4 |
5 | export const getLibName = () => "WebRTCMetrics";
6 | export const getVersion = () => "5.5.0";
7 |
8 | export const DIRECTION = {
9 | INBOUND: "inbound",
10 | OUTBOUND: "outbound",
11 | };
12 |
13 | export const COLLECTOR_STATE = {
14 | IDLE: "idle",
15 | RUNNING: "running",
16 | MUTED: "muted",
17 | };
18 |
19 | export const ENGINE_STATE = {
20 | IDLE: "idle",
21 | COLLECTING: "collecting",
22 | ENDED: "ended",
23 | };
24 |
25 | export const ICE_CONNECTION_STATE = {
26 | NEW: "new",
27 | CHECKING: "checking",
28 | CONNECTED: "connected",
29 | COMPLETED: "completed",
30 | DISCONNECTED: "disconnected",
31 | FAILED: "failed",
32 | CLOSED: "closed",
33 | };
34 |
35 | export const ICE_GATHERING_STATE = {
36 | NEW: "new",
37 | GATHERING: "gathering",
38 | COMPLETE: "complete",
39 | };
40 |
41 | export const getDefaultGlobalMetric = () => {
42 | const defaultMetrics = {
43 | delta_time_to_measure_probes_ms: 0, // Total time to measure all probes
44 | delta_time_consumed_to_measure_ms: 0, // Total time to measure at engine level (additional time needed to compute global stats)
45 | delta_KBytes_in: 0,
46 | delta_KBytes_out: 0,
47 | delta_kbs_in: 0,
48 | delta_kbs_out: 0,
49 | total_time_decoded_in: 0,
50 | total_time_encoded_out: 0,
51 | probes: [],
52 | };
53 |
54 | const metrics = {
55 | ...defaultMetrics,
56 | };
57 |
58 | return metrics;
59 | };
60 |
61 | export const defaultAudioMetricIn = {
62 | level_in: 0,
63 | codec_id_in: "",
64 | codec_in: {
65 | mime_type: null,
66 | clock_rate: null,
67 | sdp_fmtp_line: null,
68 | },
69 | track_in: "",
70 | ssrc: "",
71 | direction: DIRECTION.INBOUND,
72 | delta_jitter_ms_in: 0,
73 | delta_rtt_ms_in: null,
74 | delta_packets_in: 0,
75 | delta_packets_lost_in: 0,
76 | delta_KBytes_in: 0,
77 | delta_kbs_in: 0,
78 | delta_synthetized_ms_in: 0,
79 | delta_playout_delay_ms_in: 0,
80 | delta_jitter_buffer_delay_ms_in: 0,
81 | total_rtt_ms_in: 0,
82 | total_rtt_measure_in: 0,
83 | total_packets_in: 0,
84 | total_packets_lost_in: 0,
85 | total_KBytes_in: 0,
86 | total_percent_synthetized_in: 0,
87 | total_synthetized_ms_in: 0,
88 | total_playout_ms_in: 0,
89 | total_time_jitter_buffer_delay_in: 0,
90 | total_jitter_emitted_in: 0,
91 | percent_synthetized_in: 0,
92 | timestamp_in: null,
93 | mos_in: 1,
94 | percent_packets_lost_in: 0,
95 | };
96 |
97 | export const defaultAudioMetricOut = {
98 | active_out: null,
99 | level_out: 0,
100 | codec_id_out: "",
101 | codec_out: {
102 | mime_type: null,
103 | clock_rate: null,
104 | sdp_fmtp_line: null,
105 | },
106 | track_out: "",
107 | device_out: "",
108 | ssrc: "",
109 | direction: DIRECTION.OUTBOUND,
110 | delta_jitter_ms_out: 0,
111 | delta_rtt_ms_out: null,
112 | delta_packet_delay_ms_out: 0,
113 | delta_packets_lost_out: 0,
114 | delta_packets_out: 0,
115 | delta_KBytes_out: 0,
116 | delta_kbs_out: 0,
117 | percent_packets_lost_out: 0,
118 | total_rtt_ms_out: 0,
119 | total_rtt_measure_out: 0,
120 | total_time_packets_delay_out: 0,
121 | total_packets_lost_out: 0,
122 | total_packets_out: 0,
123 | total_KBytes_out: 0,
124 | timestamp_out: null,
125 | mos_out: 1,
126 | };
127 |
128 | export const defaultVideoMetricIn = {
129 | codec_id_in: "",
130 | codec_in: {
131 | mime_type: null,
132 | clock_rate: null,
133 | },
134 | direction: DIRECTION.INBOUND,
135 | decoder_in: null,
136 | track_in: "",
137 | ssrc: "",
138 | size_in: {
139 | width: 0,
140 | height: 0,
141 | framerate: 0,
142 | },
143 | delta_jitter_ms_in: 0,
144 | delta_packets_in: 0,
145 | delta_packets_lost_in: 0,
146 | delta_KBytes_in: 0,
147 | delta_kbs_in: 0,
148 | delta_glitch_in: {
149 | freeze: 0,
150 | pause: 0,
151 | },
152 | delta_decode_frame_ms_in: 0,
153 | delta_processing_delay_ms_in: 0,
154 | delta_assembly_delay_ms_in: 0,
155 | delta_nack_sent_in: 0,
156 | delta_pli_sent_in: 0,
157 | delta_jitter_buffer_delay_ms_in: 0,
158 | percent_packets_lost_in: 0,
159 | total_packets_in: 0,
160 | total_packets_lost_in: 0,
161 | total_KBytes_in: 0,
162 | total_glitch_in: {
163 | freeze: 0,
164 | pause: 0,
165 | },
166 | total_frames_decoded_in: 0,
167 | total_time_decoded_in: 0,
168 | total_time_processing_delay_in: 0,
169 | total_time_assembly_delay_in: 0,
170 | total_time_jitter_buffer_delay_in: 0,
171 | total_jitter_emitted_in: 0,
172 | total_nack_sent_in: 0,
173 | total_pli_sent_in: 0,
174 | };
175 |
176 | export const defaultVideoMetricOut = {
177 | active_out: null,
178 | codec_id_out: "",
179 | codec_out: {
180 | mime_type: null,
181 | clock_rate: null,
182 | },
183 | track_out: "",
184 | device_out: "",
185 | ssrc: "",
186 | direction: DIRECTION.OUTBOUND,
187 | encoder_out: null,
188 | size_out: {
189 | width: 0,
190 | height: 0,
191 | framerate: 0,
192 | },
193 | size_pref_out: {
194 | width: 0,
195 | height: 0,
196 | framerate: 0,
197 | },
198 | delta_jitter_ms_out: 0,
199 | delta_rtt_ms_out: null,
200 | delta_packet_delay_ms_out: 0,
201 | delta_packets_lost_out: 0,
202 | delta_packets_out: 0,
203 | delta_KBytes_out: 0,
204 | delta_kbs_out: 0,
205 | delta_encode_frame_ms_out: 0,
206 | delta_nack_received_out: 0,
207 | delta_pli_received_out: 0,
208 | total_rtt_ms_out: 0,
209 | total_rtt_measure_out: 0,
210 | total_time_packets_delay_out: 0,
211 | total_packets_lost_out: 0,
212 | total_packets_out: 0,
213 | total_KBytes_out: 0,
214 | total_time_encoded_out: 0,
215 | total_frames_encoded_out: 0,
216 | total_nack_received_out: 0,
217 | total_pli_received_out: 0,
218 | percent_packets_lost_out: 0,
219 | limitation_out: {
220 | reason: null,
221 | durations: null,
222 | resolutionChanges: 0,
223 | },
224 | timestamp_out: null,
225 | };
226 |
227 | export const getDefaultMetric = (previousStats) => {
228 | const defaultMetrics = {
229 | pname: "",
230 | call_id: "",
231 | user_id: "",
232 | timestamp: null,
233 | count: 0,
234 | audio: {},
235 | video: {},
236 | network: {
237 | infrastructure: 3,
238 | selected_candidate_pair_id: "",
239 | local_candidate_id: "",
240 | local_candidate_type: "",
241 | local_candidate_protocol: "",
242 | local_candidate_relay_protocol: "",
243 | remote_candidate_id: "",
244 | remote_candidate_type: "",
245 | remote_candidate_protocol: "",
246 | },
247 | data: {
248 | total_KBytes_in: 0,
249 | total_KBytes_out: 0,
250 | delta_KBytes_in: 0,
251 | delta_KBytes_out: 0,
252 | delta_kbs_in: 0,
253 | delta_kbs_out: 0,
254 | delta_kbs_bandwidth_in: 0,
255 | delta_kbs_bandwidth_out: 0,
256 | delta_rtt_connectivity_ms: null,
257 | total_rtt_connectivity_ms: 0,
258 | total_rtt_connectivity_measure: 0,
259 | },
260 | experimental: {
261 | time_to_measure_ms: 0,
262 | },
263 | passthrough: {},
264 | };
265 |
266 | if (previousStats) {
267 | const metrics = {
268 | ...previousStats,
269 | audio: {},
270 | video: {},
271 | data: { ...previousStats.data },
272 | network: { ...previousStats.network },
273 | experimental: { ...previousStats.experimental },
274 | passthrough: {},
275 | };
276 | Object.keys(previousStats.audio)
277 | .forEach((ssrc) => {
278 | metrics.audio[ssrc] = { ...previousStats.audio[ssrc] };
279 | });
280 | Object.keys(previousStats.video)
281 | .forEach((ssrc) => {
282 | metrics.video[ssrc] = { ...previousStats.video[ssrc] };
283 | });
284 | return metrics;
285 | }
286 |
287 | return {
288 | ...defaultMetrics,
289 | audio: {},
290 | video: {},
291 | data: { ...defaultMetrics.data },
292 | network: { ...defaultMetrics.network },
293 | experimental: { ...defaultMetrics.experimental },
294 | };
295 | };
296 |
297 | export const defaultConfig = {
298 | refreshEvery: 2000, // Default - generate a report every 2s (in ms). Min 1s.
299 | startAfter: 0, // Default - Duration (in ms) to wait before starting to grab the stats. 0 starts immediately
300 | stopAfter: -1, // Default - Max duration (in ms) for grabbing the stats. -1 means until calling stop().
301 | // keepMaxReport: 50, // Keep the last 50 tickets (new one erases the oldest)
302 | verbose: false, // Default - minimal logs
303 | silent: false, // Default - no log at all if set to true
304 | pname: `p-${shortUUID.rnd(10)}`, // Default - peer connection name
305 | cid: `c-${shortUUID.rnd(10)}`, // Default - call identifier
306 | uid: `u-${shortUUID.rnd(10)}`, // Default - user identifier
307 | record: false, // Default - no record,
308 | ticket: true, // Default - ticket generated and so all reports are kept
309 | passthrough: {}, // Access to specific fields directly from the stack {"inbound-rtp": ["jitter", "bytesReceived"]}
310 | };
311 |
312 | export const TYPE = {
313 | CANDIDATE_PAIR: "candidate-pair",
314 | CODEC: "codec",
315 | INBOUND_RTP: "inbound-rtp",
316 | LOCAL_CANDIDATE: "local-candidate",
317 | MEDIA_PLAYOUT: "media-playout",
318 | MEDIA_SOURCE: "media-source",
319 | OUTBOUND_RTP: "outbound-rtp",
320 | REMOTE_CANDIDATE: "remote-candidate",
321 | REMOTE_INBOUND_RTP: "remote-inbound-rtp",
322 | REMOTE_OUTBOUND_RTP: "remote-outbound-rtp",
323 | TRACK: "track",
324 | TRANSPORT: "transport",
325 | };
326 |
327 | export const PROPERTY = {
328 | AUDIO_LEVEL: "audioLevel",
329 | AVAILABLE_OUTGOING_BITRATE: "availableOutgoingBitrate",
330 | AVAILABLE_INCOMING_BITRATE: "availableIncomingBitrate",
331 | BYTES_RECEIVED: "bytesReceived",
332 | BYTES_SENT: "bytesSent",
333 | CANDIDATE_TYPE: "candidateType",
334 | CHANNELS: "channels",
335 | CLOCK_RATE: "clockRate",
336 | CODEC_ID: "codecId",
337 | CURRENT_ROUND_TRIP_TIME: "currentRoundTripTime",
338 | DECODER_IMPLEMENTATION: "decoderImplementation",
339 | ENCODER_IMPLEMENTATION: "encoderImplementation",
340 | FRACTION_LOST: "fractionLost",
341 | FRAME_HEIGHT: "frameHeight",
342 | FRAME_WIDTH: "frameWidth",
343 | FRAMES_DECODED: "framesDecoded",
344 | FRAMES_ENCODED: "framesEncoded",
345 | FRAMES_PER_SECOND: "framesPerSecond",
346 | FREEZE_COUNT: "freezeCount",
347 | HEIGHT: "height",
348 | QUALITY_LIMITATION_REASON: "qualityLimitationReason",
349 | QUALITY_LIMITATION_DURATIONS: "qualityLimitationDurations",
350 | QUALITY_LIMITATION_RESOLUTION_CHANGES: "qualityLimitationResolutionChanges",
351 | ID: "id",
352 | JITTER: "jitter",
353 | JITTER_BUFFER_DELAY: "jitterBufferDelay",
354 | JITTER_BUFFER_EMITTED_COUNT: "jitterBufferEmittedCount",
355 | KIND: "kind",
356 | LOCAL_CANDIDATE_ID: "localCandidateId",
357 | MEDIA_TYPE: "mediaType",
358 | MIME_TYPE: "mimeType",
359 | MEDIA_SOURCE_ID: "mediaSourceId",
360 | NACK: "nackCount",
361 | NETWORK_TYPE: "networkType",
362 | NOMINATED: "nominated",
363 | RELAY_PROTOCOL: "relayProtocol",
364 | PACKETS_LOST: "packetsLost",
365 | PACKETS_RECEIVED: "packetsReceived",
366 | PACKETS_SENT: "packetsSent",
367 | PAUSE_COUNT: "pauseCount",
368 | PLAYOUT_ID: "playoutId",
369 | PLI: "pliCount",
370 | PROTOCOL: "protocol",
371 | PORT: "port",
372 | REMOTE_CANDIDATE_ID: "remoteCandidateId",
373 | REMOTE_SOURCE: "remoteSource",
374 | REMOTE_TIMESTAMP: "remoteTimestamp",
375 | RESPONSES_RECEIVED: "responsesReceived",
376 | ROUND_TRIP_TIME: "roundTripTime",
377 | SDP_FMTP_LINE: "sdpFmtpLine",
378 | SSRC: "ssrc",
379 | SELECTED: "selected",
380 | SELECTED_CANDIDATEPAIR_ID: "selectedCandidatePairId",
381 | SCALABILITY_MODE: "scalabilityMode",
382 | STATE: "state",
383 | SYNTHETIZED_SAMPLES_DURATION: "synthesizedSamplesDuration",
384 | TIMESTAMP: "timestamp",
385 | TRACK_IDENTIFIER: "trackIdentifier",
386 | TRANSPORT_ID: "transportId",
387 | TOTAL_ASSEMBLY_TIME: "totalAssemblyTime",
388 | TOTAL_DECODE_TIME: "totalDecodeTime",
389 | TOTAL_ENCODE_TIME: "totalEncodeTime",
390 | TOTAL_PACKETS_SEND_DELAY: "totalPacketSendDelay",
391 | TOTAL_PLAYOUT_DELAY: "totalPlayoutDelay",
392 | TOTAL_PROCESSING_DELAY: "totalProcessingDelay",
393 | TOTAL_SAMPLES_COUNT: "totalSamplesCount",
394 | TOTAL_SAMPLES_DURATION: "totalSamplesDuration",
395 | TOTAL_ROUND_TRIP_TIME: "totalRoundTripTime",
396 | TOTAL_ROUND_TRIP_TIME_MEASUREMENTS: "roundTripTimeMeasurements",
397 | TYPE: "type",
398 | WIDTH: "width",
399 | };
400 |
401 | export const VALUE = {
402 | SUCCEEDED: "succeeded",
403 | AUDIO: "audio",
404 | VIDEO: "video",
405 | DATA: "data",
406 | };
407 |
408 | export const INFRASTRUCTURE_VALUE = {
409 | ETHERNET: 0,
410 | CELLULAR_5G: 2,
411 | WIFI: 3, // default
412 | CELLULAR_4G: 5,
413 | CELLULAR: 10,
414 | };
415 |
416 | export const INFRASTRUCTURE_LABEL = {
417 | ETHERNET: "ethernet",
418 | CELLULAR_4G: "cellular",
419 | WIFI: "wifi",
420 | };
421 |
422 | export const STAT_TYPE = {
423 | AUDIO: "audio",
424 | VIDEO: "video",
425 | NETWORK: "network",
426 | DATA: "data",
427 | };
428 |
--------------------------------------------------------------------------------
/src/utils/rules.js:
--------------------------------------------------------------------------------
1 | const ABSOLUTE_FRAMERATE_CHANGE = 2; // Alert if framerate change > 2 fps
2 | const ABSOLUTE_BYTES_THRESHOLD_PERCENT = 50; // Alert if bytes change > 50%
3 |
4 | export const alertOnFramerate = (oldFramerate, currentFramerate) => (
5 | (oldFramerate && Math.abs(oldFramerate - currentFramerate) > ABSOLUTE_FRAMERATE_CHANGE)
6 | );
7 |
8 | export const alertOnPeak = (oldBytesExchanged, currentBytesExchanged) => (
9 | (currentBytesExchanged && Math.abs(oldBytesExchanged - currentBytesExchanged) / (currentBytesExchanged * 100) > ABSOLUTE_BYTES_THRESHOLD_PERCENT)
10 | );
11 |
--------------------------------------------------------------------------------
/src/utils/score.js:
--------------------------------------------------------------------------------
1 | import { DIRECTION } from "./models";
2 | import { filteredAverage, getSSRCDataFromBunch } from "./helper";
3 |
4 | const getAbsoluteDelay = (roundTripTime, jitterDelay) => ((roundTripTime / 2) + jitterDelay + 20); // Add extra 20ms for packetisation delay
5 |
6 | const computeScore = (r, forceToMinimal) => {
7 | if (forceToMinimal) {
8 | return 1;
9 | }
10 |
11 | if (r < 0) {
12 | return 1;
13 | }
14 |
15 | if (r > 100) {
16 | return 4.5;
17 | }
18 |
19 | return 1 + 0.035 * r + (7.0 / 1000000) * r * (r - 60) * (100 - r);
20 | };
21 |
22 | const getSSRCReportFrom = (ssrc, report, previousReport, beforeLastReport, direction) => {
23 | const currentSSRCReport = getSSRCDataFromBunch(ssrc, report, direction);
24 | const previousSSRCReport = getSSRCDataFromBunch(ssrc, previousReport, direction);
25 | const beforeLastSSRCReport = getSSRCDataFromBunch(ssrc, beforeLastReport, direction);
26 |
27 | return {
28 | currentSSRCReport,
29 | previousSSRCReport,
30 | beforeLastSSRCReport,
31 | };
32 | };
33 |
34 | const computeJitter = (ssrcReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange) => {
35 | const jitterValues = [];
36 | const currentValue = direction === DIRECTION.INBOUND ? ssrcReport[kind].delta_jitter_ms_in : ssrcReport[kind].delta_jitter_ms_out;
37 | // Current value weighted to 4
38 | jitterValues.push(currentValue, currentValue, currentValue, currentValue);
39 | if (smoothedRange > 1) {
40 | const previousValue = direction === DIRECTION.INBOUND ? (previousSSRCReport && previousSSRCReport[kind].delta_jitter_ms_in) || null : (previousSSRCReport && previousSSRCReport[kind].delta_jitter_ms_out) || null;
41 | // Previous value weighted to 2
42 | jitterValues.push(previousValue, previousValue);
43 | }
44 | if (smoothedRange > 2) {
45 | // Before last value weighted to 1
46 | jitterValues.push(direction === DIRECTION.INBOUND ? (beforeLastSSRCReport && beforeLastSSRCReport[kind].delta_jitter_ms_in) || null : (beforeLastSSRCReport && beforeLastSSRCReport[kind].delta_jitter_ms_out) || null);
47 | }
48 | return filteredAverage(jitterValues, 10);
49 | };
50 |
51 | const computeRTT = (report, ssrcReport, previousReport, previousSSRCReport, beforeLastReport, beforeLastSSRCReport, kind, direction, smoothedRange) => {
52 | const rttValues = [];
53 | const currentValue = direction === DIRECTION.INBOUND ? ssrcReport[kind].delta_rtt_ms_in || report.data.delta_rtt_connectivity_ms : ssrcReport[kind].delta_rtt_ms_out || report.data.delta_rtt_connectivity_ms;
54 | // Current value weighted to 4
55 | rttValues.push(currentValue, currentValue, currentValue, currentValue);
56 | if (smoothedRange > 1) {
57 | const previousValue = direction === DIRECTION.INBOUND ? (previousSSRCReport && (previousSSRCReport[kind].delta_rtt_ms_in || previousReport.data.delta_rtt_connectivity_ms)) || null : (previousSSRCReport && (previousSSRCReport[kind].delta_rtt_ms_in || previousReport.data.delta_rtt_connectivity_ms)) || null;
58 | // Previous value weighted to 2
59 | rttValues.push(previousValue, previousValue);
60 | }
61 | if (smoothedRange > 2) {
62 | // Before last value weighted to 1
63 | rttValues.push(direction === DIRECTION.INBOUND ? (beforeLastSSRCReport && (beforeLastSSRCReport[kind].delta_rtt_ms_in || beforeLastReport.data.delta_rtt_connectivity_ms)) || null : (beforeLastSSRCReport && (beforeLastSSRCReport[kind].delta_jitter_ms_out || beforeLastReport.data.delta_rtt_connectivity_ms)) || null);
64 | }
65 | return filteredAverage(rttValues, 100);
66 | };
67 |
68 | const computePacketsLossPercent = (ssrcReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange) => {
69 | const packetLossValues = [];
70 | const currentValue = direction === DIRECTION.INBOUND ? ssrcReport[kind].percent_packets_lost_in : ssrcReport[kind].percent_packets_lost_out;
71 | // Current value weighted to 4
72 | packetLossValues.push(currentValue, currentValue, currentValue, currentValue);
73 | if (smoothedRange > 1) {
74 | const previousValue = direction === DIRECTION.INBOUND ? (previousSSRCReport && previousSSRCReport[kind].percent_packets_lost_in) || null : (previousSSRCReport && previousSSRCReport[kind].percent_packets_lost_out) || null;
75 | // Previous value weighted to 2
76 | packetLossValues.push(previousValue, previousValue);
77 | }
78 | if (smoothedRange > 2) {
79 | // Before last value weighted to 1
80 | packetLossValues.push(direction === DIRECTION.INBOUND ? (beforeLastSSRCReport && beforeLastSSRCReport[kind].percent_packets_lost_in) || null : (beforeLastSSRCReport && beforeLastSSRCReport[kind].percent_packets_lost_out) || null);
81 | }
82 | return filteredAverage(packetLossValues, 0);
83 | };
84 |
85 | const computeFullEModelScore = (
86 | report,
87 | kind,
88 | previousReport,
89 | beforeLastReport,
90 | ssrc,
91 | direction = DIRECTION.INBOUND,
92 | smoothedRange = 3,
93 | ) => {
94 | const RoFB = 148; // RoFB is the signal-to-noise ratio
95 | const IsFB = 0; // IsFB is the simultaneous impairment factor
96 | let Idd = 0; // Idd id the delay impairment factor
97 | const A = 0; // A is the advantage factor
98 |
99 | const {
100 | currentSSRCReport,
101 | previousSSRCReport,
102 | beforeLastSSRCReport,
103 | } = getSSRCReportFrom(ssrc, report, previousReport, beforeLastReport, direction);
104 | const packetsLoss = computePacketsLossPercent(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
105 | const rtt = computeRTT(report, currentSSRCReport, previousReport, previousSSRCReport, beforeLastReport, beforeLastSSRCReport, kind, direction, smoothedRange);
106 | const jitter = computeJitter(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
107 |
108 | const Ta = getAbsoluteDelay(rtt, jitter); // Overall one way delay (ms)
109 | const defaultEquipmentImpairmentFactor = 132;
110 | const defaultIe = 10.2; // 10.2 G.113 Amend 3
111 | const defaultBpl = 9.6; // G.113 Amend 3
112 | const Iee = defaultIe + ((defaultEquipmentImpairmentFactor - defaultIe) * (packetsLoss / (packetsLoss + defaultBpl)));
113 |
114 | if (Ta > 100) {
115 | const x = (Math.log(Ta) - Math.log(100)) / (Math.log(2));
116 | const a = x ** 6;
117 | const b = (1 + a) ** (1 / 6);
118 | const c = (x / 3) ** 6;
119 | const d = (1 + c) ** (1 / 6);
120 | Idd = 1.48 * 25 * (b - (3 * d) + 2);
121 | }
122 |
123 | const Rx = RoFB - IsFB - Idd - Iee + A;
124 | const R = Rx / 1.48;
125 | return computeScore(R);
126 | };
127 |
128 | const computeEModelMOS = (
129 | report,
130 | kind,
131 | previousReport,
132 | beforeLastReport,
133 | ssrc,
134 | direction = DIRECTION.INBOUND,
135 | smoothedRange = 3,
136 | ) => {
137 | const {
138 | currentSSRCReport,
139 | previousSSRCReport,
140 | beforeLastSSRCReport,
141 | } = getSSRCReportFrom(ssrc, report, previousReport, beforeLastReport, direction);
142 | const packetsLoss = computePacketsLossPercent(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
143 | const rtt = computeRTT(report, currentSSRCReport, previousReport, previousSSRCReport, beforeLastReport, beforeLastSSRCReport, kind, direction, smoothedRange);
144 | const jitter = computeJitter(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
145 |
146 | const rx = Math.max(0, 93.2 - packetsLoss);
147 | const ry = 0.18 * rx * rx - 27.9 * rx + 1126.62;
148 |
149 | const d = getAbsoluteDelay(rtt, jitter);
150 | const h = d - 177.3 < 0 ? 0 : 1;
151 |
152 | const id = 0.024 * d + 0.11 * (d - 177.3) * h;
153 |
154 | const r = ry - id;
155 |
156 | return computeScore(r, packetsLoss > 30);
157 | };
158 |
159 | export const computeNarrowEModelScore = (
160 | report,
161 | kind,
162 | previousReport,
163 | beforeLastReport,
164 | ssrc,
165 | direction = DIRECTION.INBOUND,
166 | smoothedRange = 3,
167 | ) => {
168 | const RoFB = 93.2; // RoFB is the signal-to-noise ratio
169 | const IsFB = 0; // IsFB is the simultaneous impairment factor
170 | let Idd = 0; // Idd id the delay impairment factor
171 | const A = 0; // A is the advantage factor
172 |
173 | const {
174 | currentSSRCReport,
175 | previousSSRCReport,
176 | beforeLastSSRCReport,
177 | } = getSSRCReportFrom(ssrc, report, previousReport, beforeLastReport, direction);
178 | const packetsLoss = computePacketsLossPercent(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
179 | const rtt = computeRTT(report, currentSSRCReport, previousReport, previousSSRCReport, beforeLastReport, beforeLastSSRCReport, kind, direction, smoothedRange);
180 | const jitter = computeJitter(currentSSRCReport, previousSSRCReport, beforeLastSSRCReport, kind, direction, smoothedRange);
181 |
182 | const Ta = getAbsoluteDelay(rtt, jitter); // Overall one way delay (ms)
183 | const defaultEquipmentImpairmentFactor = 95;
184 | const defaultIe = 0;
185 | const defaultBpl = 4.3;
186 | const Iee = defaultIe + ((defaultEquipmentImpairmentFactor - defaultIe) * (packetsLoss / (packetsLoss + defaultBpl)));
187 |
188 | if (Ta > 100) {
189 | const x = (Math.log(Ta) - Math.log(100)) / (Math.log(2));
190 | const a = x ** 6;
191 | const b = (1 + a) ** (1 / 6);
192 | const c = (x / 3) ** 6;
193 | const d = (1 + c) ** (1 / 6);
194 | Idd = 25 * (b - (3 * d) + 2);
195 | }
196 |
197 | const Rx = RoFB - IsFB - Idd - Iee + A;
198 | return computeScore(Rx);
199 | };
200 |
201 | export const mos = (report, kind, previousReport, beforeLastReport, ssrc, direction, smoothedRange = 3) => {
202 | const currentSSRCReport = getSSRCDataFromBunch(ssrc, report, direction);
203 |
204 | const codec = direction === DIRECTION.INBOUND ? currentSSRCReport[kind].codec_in?.mime_type || null : currentSSRCReport[kind].codec_out?.mime_type;
205 |
206 | // For Opus, compute G.107.2 MOS
207 | if (codec === "opus") {
208 | return computeFullEModelScore(report, kind, previousReport, beforeLastReport, ssrc, direction, smoothedRange);
209 | }
210 |
211 | // For other codecs, compute min of G.107 and G.107 simplified
212 | return Math.min(
213 | computeEModelMOS(report, kind, previousReport, beforeLastReport, ssrc, direction, smoothedRange),
214 | computeNarrowEModelScore(report, kind, previousReport, beforeLastReport, ssrc, direction, smoothedRange),
215 | );
216 | };
217 |
--------------------------------------------------------------------------------
/webpack.config-dev.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 |
3 | const ESLintPlugin = require('eslint-webpack-plugin');
4 |
5 | module.exports = {
6 | entry: './src/index.js',
7 | mode: "development",
8 | output: {
9 | path: path.resolve(__dirname, 'dist'),
10 | filename: 'WebRTCMetrics.js',
11 | library: {
12 | type: 'umd',
13 | name: "WebRTCMetrics",
14 | export: 'default',
15 | },
16 | },
17 | plugins: [new ESLintPlugin()],
18 | };
19 |
--------------------------------------------------------------------------------
/webpack.config-prod.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 |
3 | module.exports = {
4 | entry: './src/index.js',
5 | mode: "production",
6 | output: {
7 | path: path.resolve(__dirname, 'dist'),
8 | filename: 'WebRTCMetrics.js',
9 | library: {
10 | name: 'WebRTCMetrics',
11 | type: 'umd',
12 | export: 'default',
13 | },
14 | },
15 | };
16 |
--------------------------------------------------------------------------------