├── .github └── workflows │ ├── build.yml │ └── publish.yaml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── analysis_options.yaml ├── lib ├── dart_webrtc.dart └── src │ ├── e2ee.worker │ ├── e2ee.cryptor.dart │ ├── e2ee.keyhandler.dart │ ├── e2ee.logger.dart │ ├── e2ee.sfi_guard.dart │ ├── e2ee.utils.dart │ └── e2ee.worker.dart │ ├── event.dart │ ├── factory_impl.dart │ ├── frame_cryptor_impl.dart │ ├── media_devices.dart │ ├── media_recorder.dart │ ├── media_recorder_impl.dart │ ├── media_stream_impl.dart │ ├── media_stream_track_impl.dart │ ├── mediadevices_impl.dart │ ├── navigator_impl.dart │ ├── rtc_data_channel_impl.dart │ ├── rtc_dtmf_sender_impl.dart │ ├── rtc_peerconnection_impl.dart │ ├── rtc_rtp_capailities_imp.dart │ ├── rtc_rtp_parameters_impl.dart │ ├── rtc_rtp_receiver_impl.dart │ ├── rtc_rtp_sender_impl.dart │ ├── rtc_rtp_transceiver_impl.dart │ ├── rtc_track_event_impl.dart │ ├── rtc_transform_stream.dart │ ├── rtc_video_element.dart │ └── utils.dart ├── pubspec.yaml ├── renovate.json └── web ├── favicon.ico ├── index.html ├── main.dart ├── p2p ├── p2p.dart ├── random_string.dart ├── signaling.dart └── simple_websocket.dart ├── styles.css ├── test_media_devices.dart ├── test_media_stream.dart ├── test_media_stream_track.dart ├── test_peerconnection.dart ├── test_template.dart └── test_video_element.dart /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | test: 11 | name: Test on ${{ matrix.os }} 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - uses: actions/setup-java@v1 20 | with: 21 | java-version: '12.x' 22 | - uses: subosito/flutter-action@v1 23 | with: 24 | flutter-version: '2.2.3' 25 | channel: 'stable' 26 | - run: dart pub get 27 | - run: dart format lib/ test/ --set-exit-if-changed 28 | - run: dart pub run import_sorter:main --no-comments --exit-if-changed 29 | - run: dart analyze 30 | -------------------------------------------------------------------------------- /.github/workflows/publish.yaml: -------------------------------------------------------------------------------- 1 | name: Publish to pub.dev 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v[0-9]+.[0-9]+.[0-9]+*' 7 | 8 | jobs: 9 | publish: 10 | permissions: 11 | id-token: write # Required for authentication using OIDC 12 | uses: dart-lang/setup-dart/.github/workflows/publish.yml@v1 13 | # with: 14 | # working-directory: path/to/package/within/repository 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://www.dartlang.org/guides/libraries/private-files 2 | 3 | # Files and directories created by pub 4 | .dart_tool/ 5 | .packages 6 | build/ 7 | # If you're building an application, you may want to check-in your pubspec.lock 8 | pubspec.lock 9 | 10 | # Directory created by dartdoc 11 | # If you don't generate documentation locally you can remove this line. 12 | doc/api/ 13 | 14 | # Avoid committing generated Javascript files: 15 | *.dart.js 16 | *.info.json # Produced by the --dump-info flag. 17 | *.js # When generated by dart2js. Don't specify *.js if your 18 | # project includes source files written in JavaScript. 19 | *.js_ 20 | *.js.deps 21 | *.js.map 22 | .DS_Store 23 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | -------------------------------------------- 4 | [1.5.4] - 2025-04-29 5 | 6 | * Media recording changes. 7 | 8 | [1.5.3+hotfix.2] - 2025-04-25 9 | 10 | * fix bug for dc.onMessage. 11 | 12 | [1.5.3+hotfix.1] - 2025-04-25 13 | 14 | * add getter override for dc.bufferedAmountLowThreshold. 15 | 16 | [1.5.3] - 2025-03-24 17 | 18 | * add getBufferedAmount for DC. 19 | 20 | [1.5.2+hotfix.1] - 2025-02-23. 21 | 22 | * remove platform_detect. 23 | 24 | [1.5.2] - 2025-02-23. 25 | 26 | * fix stats for web. 27 | 28 | [1.5.1] - 2025-02-15 29 | 30 | * fix E2EE for firefox. 31 | 32 | [1.5.0] - 2025-02-13 33 | 34 | * remove js_util. 35 | 36 | [1.4.10] - 2024-012-16 37 | 38 | * fix compiler errors. 39 | 40 | [1.4.9] - 2024-09-04 41 | 42 | * bump web version to 1.0.0. 43 | 44 | [1.4.8] - 2024-07-12 45 | 46 | * fix: missing streamCompleter complete for getUserMedia. 47 | * fix: RTCPeerConnectionWeb.getRemoteStreams. 48 | 49 | [1.4.7] - 2024-07-12 50 | 51 | * fix: MediaStreamTrack.getSettings. 52 | 53 | [1.4.6+hotfix.1] - 2024-06-07 54 | 55 | * Wider version dependencies for js/http. 56 | 57 | [1.4.6] - 2024-06-05 58 | 59 | * chore: bump version for js and http. 60 | * fix: decrypting audio when e2ee. 61 | * fix: translate audio constraints for web. 62 | * fix: missing fault tolerance, better worker reports and a increased timeout for worker tasks. 63 | * fix type cast exception in getConstraints() 64 | 65 | [1.4.5] - 2024-05-13 66 | 67 | * fix: negotiationNeeded listener. 68 | * fix: fix type cast exception in getConstraints(). 69 | 70 | [1.4.4] - 2024-04-24 71 | 72 | * fix: datachannel message parse for Firefox. 73 | * fix: tryCatch editing mediaConstraints #34 74 | 75 | [1.4.3] - 2024-04-18 76 | 77 | * fix: do not fail if removing constraint fails 78 | 79 | [1.4.2] - 2024-04-15 80 | 81 | * fix. 82 | 83 | [1.4.1] - 2024-04-12 84 | 85 | * remove RTCConfiguration convert. 86 | 87 | [1.4.0] - 2024-04-09 88 | 89 | * Fixed bug for RTCConfiguration convert. 90 | 91 | [1.3.3] - 2024-04-09 92 | 93 | * Fix DC data parse. 94 | 95 | [1.3.2] - 2024-04-09 96 | 97 | * Fix error when constructing RTCDataChannelInit. 98 | 99 | [1.3.1] - 2024-04-08 100 | 101 | * Add keyRingSize/discardFrameWhenCryptorNotReady to KeyProviderOptions. 102 | 103 | [1.3.0] - 2024-04-08 104 | 105 | * update to package:web by @jezell in #29. 106 | 107 | [1.2.1] - 2024-02-05 108 | 109 | * Downgrade some dependencies make more compatible. 110 | 111 | [1.2.0] - 2024-02-05 112 | 113 | * Make E2EE events to be consistent with native. 114 | * E2EE imporve, and fix issue on Firefox. 115 | 116 | [1.1.2] - 2023-09-14 117 | 118 | * Add more frame cryptor api. 119 | 120 | [1.1.2] - 2023-08-14 121 | 122 | * Add async functions for get pc states. 123 | 124 | [1.1.1] - 2023-06-29 125 | 126 | * downgrade collection to ^1.17.1. 127 | 128 | [1.1.0] - 2023-06-29 129 | 130 | * Add FrameCryptor support. 131 | 132 | [1.0.17] - 2023-06-14 133 | 134 | * Fix facingMode for mobile. 135 | 136 | [1.0.16] - 2023-04-10 137 | 138 | * Add addStreams to RTCRtpSender. 139 | 140 | [1.0.15] - 2023-02-10 141 | 142 | * add bufferedamountlow 143 | * Fix bug for firefox. 144 | 145 | [1.0.14] - 2023-01-30 146 | 147 | * Add support for getCapabilities/setCodecPreferences. 148 | 149 | [1.0.13] - 2022-12-12 150 | 151 | * export jsRtpReciver. 152 | 153 | [1.0.12] - 2022-12-12 154 | 155 | * fix: Convert iceconnectionstate to connectionstate for Firefox. 156 | 157 | [1.0.11] - 2022-11-12 158 | 159 | * Change MediaStream.clone to async. 160 | 161 | [1.0.10] - 2022-11-02 162 | 163 | * Update MediaRecorder interface. 164 | 165 | [1.0.9] - 2022-10-10 166 | 167 | * Use RTCPeerConnection::onConnectionStateChange. 168 | 169 | -------------------------------------------- 170 | [1.0.8] - 2022-09-06 171 | 172 | * Bump version for webrtc-interface. 173 | 174 | [1.0.7] - 2022-08-04 175 | 176 | * Bump version for webrtc-interface. 177 | 178 | [1.0.6] - 2022-05-08 179 | 180 | * Support null tracks in replaceTrack/setTrack. 181 | 182 | [1.0.5] - 2022-03-31 183 | 184 | * Added RTCDataChannel.id 185 | 186 | [1.0.4] - 2022-02-07 187 | 188 | * Add restartIce. 189 | * Bump version for webrtc-interface. 190 | 191 | [1.0.3] - 2021-12-28 192 | 193 | * export media_stream_impl.dart to fix do not import impl files. 194 | 195 | [1.0.2] - 2021-11-27 196 | 197 | * Fix the type error of minified function in release mode. 198 | 199 | [1.0.1] - 2021-11-25 200 | 201 | * Bump interface version to 1.0.1 202 | * Reduce code. 203 | 204 | 1.0.0 205 | 206 | * Refactor using webrtc_interface. 207 | 208 | 0.2.3 209 | 210 | * Fix bug for simulcast. 211 | 212 | 0.2.2 213 | 214 | * Fix bug for unified-plan. 215 | 216 | 0.2.1 217 | 218 | * Fix getStats. 219 | 220 | 0.2.0 221 | 222 | * Implement basic functions. 223 | 224 | 0.1.0 225 | 226 | * First working version. 227 | 228 | 0.0.1 229 | 230 | * Initial version, created by Stagehand 231 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Flutter WebRTC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dart-webrtc 2 | 3 | A webrtc interface wrapped in dart language. 4 | 5 | Use the [dart/js](https://pub.dev/packages/js) library to re-wrap the [webrtc](https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API) js interface of the browser, to adapted common browsers. 6 | 7 | This library will be used for [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) for [flutter web](https://flutter.dev/web) plugin. 8 | 9 | ## compile E2EE worker 10 | 11 | ```bash 12 | dart compile js ./lib/src/e2ee.worker/e2ee.worker.dart -o web/e2ee.worker.dart.js 13 | ``` 14 | 15 | ## How to develop 16 | 17 | * `git clone https://github.com/flutter-webrtc/dart-webrtc && cd dart-webrtc` 18 | * `dart pub get` 19 | * `dart pub global activate webdev` 20 | * `webdev serve --auto=refresh` 21 | -------------------------------------------------------------------------------- /analysis_options.yaml: -------------------------------------------------------------------------------- 1 | include: package:pedantic/analysis_options.yaml 2 | 3 | linter: 4 | rules: 5 | - always_declare_return_types 6 | - avoid_empty_else 7 | - await_only_futures 8 | - avoid_returning_null_for_void 9 | - cancel_subscriptions 10 | - directives_ordering 11 | - flutter_style_todos 12 | - sort_constructors_first 13 | - sort_unnamed_constructors_first 14 | - sort_pub_dependencies 15 | - type_init_formals 16 | - unnecessary_brace_in_string_interps 17 | - unnecessary_const 18 | - unnecessary_new 19 | - unnecessary_getters_setters 20 | - unnecessary_null_aware_assignments 21 | - unnecessary_null_in_if_null_operators 22 | - unnecessary_overrides 23 | - unnecessary_parenthesis 24 | - unnecessary_statements 25 | - unnecessary_string_interpolations 26 | - unnecessary_this 27 | - unrelated_type_equality_checks 28 | - use_rethrow_when_possible 29 | - valid_regexps 30 | - void_checks 31 | 32 | analyzer: 33 | errors: 34 | # treat missing required parameters as a warning (not a hint) 35 | missing_required_param: warning 36 | # treat missing returns as a warning (not a hint) 37 | missing_return: warning 38 | # allow having TODOs in the code 39 | todo: ignore 40 | # allow self-reference to deprecated members (we do this because otherwise we have 41 | # to annotate every member in every test, assert, etc, when we deprecate something) 42 | deprecated_member_use_from_same_package: ignore -------------------------------------------------------------------------------- /lib/dart_webrtc.dart: -------------------------------------------------------------------------------- 1 | library dart_webrtc; 2 | 3 | export 'package:webrtc_interface/webrtc_interface.dart' 4 | hide MediaDevices, MediaRecorder, Navigator; 5 | 6 | export 'src/factory_impl.dart'; 7 | export 'src/media_devices.dart'; 8 | export 'src/media_recorder.dart'; 9 | export 'src/media_stream_impl.dart'; 10 | export 'src/rtc_video_element.dart'; 11 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.cryptor.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | import 'dart:math'; 5 | import 'dart:typed_data'; 6 | 7 | import 'package:js/js.dart'; 8 | import 'package:web/web.dart' as web; 9 | import 'e2ee.keyhandler.dart'; 10 | import 'e2ee.logger.dart'; 11 | import 'e2ee.sfi_guard.dart'; 12 | 13 | const IV_LENGTH = 12; 14 | 15 | const kNaluTypeMask = 0x1f; 16 | 17 | /// Coded slice of a non-IDR picture 18 | const SLICE_NON_IDR = 1; 19 | 20 | /// Coded slice data partition A 21 | const SLICE_PARTITION_A = 2; 22 | 23 | /// Coded slice data partition B 24 | const SLICE_PARTITION_B = 3; 25 | 26 | /// Coded slice data partition C 27 | const SLICE_PARTITION_C = 4; 28 | 29 | /// Coded slice of an IDR picture 30 | const SLICE_IDR = 5; 31 | 32 | /// Supplemental enhancement information 33 | const SEI = 6; 34 | 35 | /// Sequence parameter set 36 | const SPS = 7; 37 | 38 | /// Picture parameter set 39 | const PPS = 8; 40 | 41 | /// Access unit delimiter 42 | const AUD = 9; 43 | 44 | /// End of sequence 45 | const END_SEQ = 10; 46 | 47 | /// End of stream 48 | const END_STREAM = 11; 49 | 50 | /// Filler data 51 | const FILLER_DATA = 12; 52 | 53 | /// Sequence parameter set extension 54 | const SPS_EXT = 13; 55 | 56 | /// Prefix NAL unit 57 | const PREFIX_NALU = 14; 58 | 59 | /// Subset sequence parameter set 60 | const SUBSET_SPS = 15; 61 | 62 | /// Depth parameter set 63 | const DPS = 16; 64 | 65 | // 17, 18 reserved 66 | 67 | /// Coded slice of an auxiliary coded picture without partitioning 68 | const SLICE_AUX = 19; 69 | 70 | /// Coded slice extension 71 | const SLICE_EXT = 20; 72 | 73 | /// Coded slice extension for a depth view component or a 3D-AVC texture view component 74 | const SLICE_LAYER_EXT = 21; 75 | 76 | // 22, 23 reserved 77 | 78 | List findNALUIndices(Uint8List stream) { 79 | var result = []; 80 | var start = 0, pos = 0, searchLength = stream.length - 2; 81 | while (pos < searchLength) { 82 | // skip until end of current NALU 83 | while (pos < searchLength && 84 | !(stream[pos] == 0 && stream[pos + 1] == 0 && stream[pos + 2] == 1)) { 85 | pos++; 86 | } 87 | if (pos >= searchLength) pos = stream.length; 88 | // remove trailing zeros from current NALU 89 | var end = pos; 90 | while (end > start && stream[end - 1] == 0) { 91 | end--; 92 | } 93 | // save current NALU 94 | if (start == 0) { 95 | if (end != start) throw Exception('byte stream contains leading data'); 96 | } else { 97 | result.add(start); 98 | } 99 | // begin new NALU 100 | start = pos = pos + 3; 101 | } 102 | return result; 103 | } 104 | 105 | int parseNALUType(int startByte) { 106 | return startByte & kNaluTypeMask; 107 | } 108 | 109 | enum CryptorError { 110 | kNew, 111 | kOk, 112 | kDecryptError, 113 | kEncryptError, 114 | kUnsupportedCodec, 115 | kMissingKey, 116 | kKeyRatcheted, 117 | kInternalError, 118 | kDisposed, 119 | } 120 | 121 | class FrameInfo { 122 | FrameInfo({ 123 | required this.ssrc, 124 | required this.timestamp, 125 | required this.buffer, 126 | required this.frameType, 127 | }); 128 | String frameType; 129 | int ssrc; 130 | int timestamp; 131 | Uint8List buffer; 132 | } 133 | 134 | class FrameCryptor { 135 | FrameCryptor({ 136 | required this.worker, 137 | required this.participantIdentity, 138 | required this.trackId, 139 | required this.keyHandler, 140 | }); 141 | Map sendCounts = {}; 142 | String? participantIdentity; 143 | String? trackId; 144 | String? codec; 145 | ParticipantKeyHandler keyHandler; 146 | KeyOptions get keyOptions => keyHandler.keyOptions; 147 | late String kind; 148 | bool _enabled = false; 149 | CryptorError lastError = CryptorError.kNew; 150 | int currentKeyIndex = 0; 151 | final web.DedicatedWorkerGlobalScope worker; 152 | SifGuard sifGuard = SifGuard(); 153 | 154 | void setParticipant(String identity, ParticipantKeyHandler keys) { 155 | if (lastError != CryptorError.kOk) { 156 | logger.info( 157 | 'setParticipantId: lastError != CryptorError.kOk, reset state to kNew'); 158 | lastError = CryptorError.kNew; 159 | } 160 | participantIdentity = identity; 161 | keyHandler = keys; 162 | sifGuard.reset(); 163 | } 164 | 165 | void unsetParticipant() { 166 | participantIdentity = null; 167 | } 168 | 169 | void setKeyIndex(int keyIndex) { 170 | if (lastError != CryptorError.kOk) { 171 | logger.info( 172 | 'setKeyIndex: lastError != CryptorError.kOk, reset state to kNew'); 173 | lastError = CryptorError.kNew; 174 | } 175 | logger.config('setKeyIndex for $participantIdentity, newIndex: $keyIndex'); 176 | currentKeyIndex = keyIndex; 177 | } 178 | 179 | void setSifTrailer(Uint8List? magicBytes) { 180 | logger.config( 181 | 'setSifTrailer for $participantIdentity, magicBytes: $magicBytes'); 182 | keyOptions.uncryptedMagicBytes = magicBytes; 183 | } 184 | 185 | void setEnabled(bool enabled) { 186 | if (lastError != CryptorError.kOk) { 187 | logger.info( 188 | 'setEnabled[$enabled]: lastError != CryptorError.kOk, reset state to kNew'); 189 | lastError = CryptorError.kNew; 190 | } 191 | logger.config('setEnabled for $participantIdentity, enabled: $enabled'); 192 | _enabled = enabled; 193 | } 194 | 195 | bool get enabled { 196 | if (participantIdentity == null) { 197 | return false; 198 | } 199 | return _enabled; 200 | } 201 | 202 | void updateCodec(String codec) { 203 | if (lastError != CryptorError.kOk) { 204 | logger.info( 205 | 'updateCodec[$codec]: lastError != CryptorError.kOk, reset state to kNew'); 206 | lastError = CryptorError.kNew; 207 | } 208 | logger.config('updateCodec for $participantIdentity, codec: $codec'); 209 | this.codec = codec; 210 | } 211 | 212 | Uint8List makeIv( 213 | {required int synchronizationSource, required int timestamp}) { 214 | var iv = ByteData(IV_LENGTH); 215 | 216 | // having to keep our own send count (similar to a picture id) is not ideal. 217 | if (sendCounts[synchronizationSource] == null) { 218 | // Initialize with a random offset, similar to the RTP sequence number. 219 | sendCounts[synchronizationSource] = Random.secure().nextInt(0xffff); 220 | } 221 | 222 | var sendCount = sendCounts[synchronizationSource] ?? 0; 223 | 224 | iv.setUint32(0, synchronizationSource); 225 | iv.setUint32(4, timestamp); 226 | iv.setUint32(8, timestamp - (sendCount % 0xffff)); 227 | 228 | sendCounts[synchronizationSource] = sendCount + 1; 229 | 230 | return iv.buffer.asUint8List(); 231 | } 232 | 233 | void postMessage(Object message) { 234 | worker.postMessage(message.jsify()); 235 | } 236 | 237 | Future setupTransform({ 238 | required String operation, 239 | required web.ReadableStream readable, 240 | required web.WritableStream writable, 241 | required String trackId, 242 | required String kind, 243 | String? codec, 244 | }) async { 245 | logger.info('setupTransform $operation kind $kind'); 246 | this.kind = kind; 247 | if (codec != null) { 248 | logger.info('setting codec on cryptor to $codec'); 249 | this.codec = codec; 250 | } 251 | var transformer = web.TransformStream({ 252 | 'transform': 253 | allowInterop(operation == 'encode' ? encodeFunction : decodeFunction) 254 | }.jsify() as JSObject); 255 | try { 256 | readable 257 | .pipeThrough(transformer as web.ReadableWritablePair) 258 | .pipeTo(writable); 259 | } catch (e) { 260 | logger.warning('e ${e.toString()}'); 261 | if (lastError != CryptorError.kInternalError) { 262 | lastError = CryptorError.kInternalError; 263 | postMessage({ 264 | 'type': 'cryptorState', 265 | 'msgType': 'event', 266 | 'participantId': participantIdentity, 267 | 'state': 'internalError', 268 | 'error': 'Internal error: ${e.toString()}' 269 | }); 270 | } 271 | } 272 | this.trackId = trackId; 273 | } 274 | 275 | int getUnencryptedBytes(JSObject obj, String? codec) { 276 | var data; 277 | var frameType = ''; 278 | if (obj is web.RTCEncodedVideoFrame) { 279 | data = obj.data.toDart.asUint8List(); 280 | if (obj.hasProperty('type'.toJS).toDart) { 281 | frameType = obj.type; 282 | logger.finer('frameType: $frameType'); 283 | } 284 | } 285 | 286 | if (codec != null && codec.toLowerCase() == 'h264') { 287 | var naluIndices = findNALUIndices(data); 288 | for (var index in naluIndices) { 289 | var type = parseNALUType(data[index]); 290 | switch (type) { 291 | case SLICE_IDR: 292 | case SLICE_NON_IDR: 293 | // skipping 294 | logger.finer( 295 | 'unEncryptedBytes NALU of type $type, offset ${index + 2}'); 296 | return index + 2; 297 | default: 298 | logger.finer('skipping NALU of type $type'); 299 | break; 300 | } 301 | } 302 | throw Exception('Could not find NALU'); 303 | } 304 | switch (frameType) { 305 | case 'key': 306 | return 10; 307 | case 'delta': 308 | return 3; 309 | case 'audio': 310 | return 1; // frame.type is not set on audio, so this is set manually 311 | default: 312 | return 0; 313 | } 314 | } 315 | 316 | FrameInfo readFrameInfo(JSObject frameObj) { 317 | var buffer = Uint8List(0); 318 | var synchronizationSource = 0; 319 | var timestamp = 0; 320 | var frameType = ''; 321 | if (frameObj is web.RTCEncodedVideoFrame) { 322 | buffer = frameObj.data.toDart.asUint8List(); 323 | if (frameObj.hasProperty('type'.toJS).toDart) { 324 | frameType = frameObj.type; 325 | logger.finer('frameType: $frameType'); 326 | } 327 | synchronizationSource = frameObj.getMetadata().synchronizationSource; 328 | if (frameObj.getMetadata().hasProperty('rtpTimestamp'.toJS).toDart) { 329 | timestamp = frameObj.getMetadata().rtpTimestamp.toInt(); 330 | } else if (frameObj.hasProperty('timestamp'.toJS).toDart) { 331 | timestamp = 332 | (frameObj.getProperty('timestamp'.toJS) as JSNumber).toDartInt; 333 | } 334 | } else if (frameObj is web.RTCEncodedAudioFrame) { 335 | buffer = frameObj.data.toDart.asUint8List(); 336 | synchronizationSource = frameObj.getMetadata().synchronizationSource; 337 | 338 | if (frameObj.getMetadata().hasProperty('rtpTimestamp'.toJS).toDart) { 339 | timestamp = frameObj.getMetadata().rtpTimestamp.toInt(); 340 | } else if (frameObj.hasProperty('timestamp'.toJS).toDart) { 341 | timestamp = 342 | (frameObj.getProperty('timestamp'.toJS) as JSNumber).toDartInt; 343 | } 344 | frameType = 'audio'; 345 | } else { 346 | throw Exception( 347 | 'encodeFunction: frame is not a RTCEncodedVideoFrame or RTCEncodedAudioFrame'); 348 | } 349 | 350 | return FrameInfo( 351 | ssrc: synchronizationSource, 352 | timestamp: timestamp, 353 | buffer: buffer, 354 | frameType: frameType, 355 | ); 356 | } 357 | 358 | void enqueueFrame(JSObject frameObj, 359 | web.TransformStreamDefaultController controller, BytesBuilder buffer) { 360 | if (frameObj is web.RTCEncodedVideoFrame) { 361 | frameObj.data = buffer.toBytes().buffer.toJS; 362 | } else if (frameObj is web.RTCEncodedAudioFrame) { 363 | frameObj.data = buffer.toBytes().buffer.toJS; 364 | } 365 | controller.enqueue(frameObj); 366 | } 367 | 368 | Future encodeFunction( 369 | JSObject frameObj, 370 | web.TransformStreamDefaultController controller, 371 | ) async { 372 | try { 373 | if (!enabled || 374 | // skip for encryption for empty dtx frames 375 | ((frameObj is web.RTCEncodedVideoFrame && 376 | frameObj.data.toDart.lengthInBytes == 0) || 377 | (frameObj is web.RTCEncodedAudioFrame && 378 | frameObj.data.toDart.lengthInBytes == 0))) { 379 | if (keyOptions.discardFrameWhenCryptorNotReady) { 380 | return; 381 | } 382 | controller.enqueue(frameObj); 383 | return; 384 | } 385 | 386 | var srcFrame = readFrameInfo(frameObj); 387 | 388 | logger.fine( 389 | 'encodeFunction: buffer ${srcFrame.buffer.length}, synchronizationSource ${srcFrame.ssrc} frameType ${srcFrame.frameType}'); 390 | 391 | var secretKey = keyHandler.getKeySet(currentKeyIndex)?.encryptionKey; 392 | var keyIndex = currentKeyIndex; 393 | 394 | if (secretKey == null) { 395 | if (lastError != CryptorError.kMissingKey) { 396 | lastError = CryptorError.kMissingKey; 397 | postMessage({ 398 | 'type': 'cryptorState', 399 | 'msgType': 'event', 400 | 'participantId': participantIdentity, 401 | 'trackId': trackId, 402 | 'kind': kind, 403 | 'state': 'missingKey', 404 | 'error': 'Missing key for track $trackId', 405 | }); 406 | } 407 | return; 408 | } 409 | 410 | var headerLength = 411 | kind == 'video' ? getUnencryptedBytes(frameObj, codec) : 1; 412 | 413 | var iv = makeIv( 414 | synchronizationSource: srcFrame.ssrc, timestamp: srcFrame.timestamp); 415 | 416 | var frameTrailer = ByteData(2); 417 | frameTrailer.setInt8(0, IV_LENGTH); 418 | frameTrailer.setInt8(1, keyIndex); 419 | 420 | var cipherText = await worker.crypto.subtle 421 | .encrypt( 422 | { 423 | 'name': 'AES-GCM', 424 | 'iv': iv, 425 | 'additionalData': srcFrame.buffer.sublist(0, headerLength), 426 | }.jsify() as web.AlgorithmIdentifier, 427 | secretKey, 428 | srcFrame.buffer.sublist(headerLength, srcFrame.buffer.length).toJS, 429 | ) 430 | .toDart as JSArrayBuffer; 431 | 432 | logger.finer( 433 | 'encodeFunction: encrypted buffer: ${srcFrame.buffer.length}, cipherText: ${cipherText.toDart.asUint8List().length}'); 434 | var finalBuffer = BytesBuilder(); 435 | 436 | finalBuffer 437 | .add(Uint8List.fromList(srcFrame.buffer.sublist(0, headerLength))); 438 | finalBuffer.add(cipherText.toDart.asUint8List()); 439 | finalBuffer.add(iv); 440 | finalBuffer.add(frameTrailer.buffer.asUint8List()); 441 | 442 | enqueueFrame(frameObj, controller, finalBuffer); 443 | 444 | if (lastError != CryptorError.kOk) { 445 | lastError = CryptorError.kOk; 446 | postMessage({ 447 | 'type': 'cryptorState', 448 | 'msgType': 'event', 449 | 'participantId': participantIdentity, 450 | 'trackId': trackId, 451 | 'kind': kind, 452 | 'state': 'ok', 453 | 'error': 'encryption ok' 454 | }); 455 | } 456 | 457 | logger.finer( 458 | 'encodeFunction[CryptorError.kOk]: frame enqueued kind $kind,codec $codec headerLength: $headerLength, timestamp: ${srcFrame.timestamp}, ssrc: ${srcFrame.ssrc}, data length: ${srcFrame.buffer.length}, encrypted length: ${finalBuffer.toBytes().length}, iv $iv'); 459 | } catch (e) { 460 | logger.warning('encodeFunction encrypt: e ${e.toString()}'); 461 | if (lastError != CryptorError.kEncryptError) { 462 | lastError = CryptorError.kEncryptError; 463 | postMessage({ 464 | 'type': 'cryptorState', 465 | 'msgType': 'event', 466 | 'participantId': participantIdentity, 467 | 'trackId': trackId, 468 | 'kind': kind, 469 | 'state': 'encryptError', 470 | 'error': e.toString() 471 | }); 472 | } 473 | } 474 | } 475 | 476 | Future decodeFunction( 477 | JSObject frameObj, 478 | web.TransformStreamDefaultController controller, 479 | ) async { 480 | var srcFrame = readFrameInfo(frameObj); 481 | var ratchetCount = 0; 482 | 483 | logger.fine('decodeFunction: frame lenght ${srcFrame.buffer.length}'); 484 | 485 | ByteBuffer? decrypted; 486 | KeySet? initialKeySet; 487 | var initialKeyIndex = currentKeyIndex; 488 | 489 | if (!enabled || 490 | // skip for encryption for empty dtx frames 491 | srcFrame.buffer.isEmpty) { 492 | sifGuard.recordUserFrame(); 493 | if (keyOptions.discardFrameWhenCryptorNotReady) return; 494 | logger.fine('enqueing empty frame'); 495 | controller.enqueue(frameObj); 496 | logger.finer('enqueing silent frame'); 497 | return; 498 | } 499 | 500 | if (keyOptions.uncryptedMagicBytes != null) { 501 | var magicBytes = keyOptions.uncryptedMagicBytes!; 502 | if (srcFrame.buffer.length > magicBytes.length + 1) { 503 | var magicBytesBuffer = srcFrame.buffer.sublist( 504 | srcFrame.buffer.length - magicBytes.length - 1, 505 | srcFrame.buffer.length - 1); 506 | logger.finer( 507 | 'magicBytesBuffer $magicBytesBuffer, magicBytes $magicBytes'); 508 | if (magicBytesBuffer.toString() == magicBytes.toString()) { 509 | sifGuard.recordSif(); 510 | if (sifGuard.isSifAllowed()) { 511 | var frameType = 512 | srcFrame.buffer.sublist(srcFrame.buffer.length - 1)[0]; 513 | logger 514 | .finer('ecodeFunction: skip uncrypted frame, type $frameType'); 515 | var finalBuffer = BytesBuilder(); 516 | finalBuffer.add(Uint8List.fromList(srcFrame.buffer 517 | .sublist(0, srcFrame.buffer.length - (magicBytes.length + 1)))); 518 | enqueueFrame(frameObj, controller, finalBuffer); 519 | logger.fine('ecodeFunction: enqueing silent frame'); 520 | controller.enqueue(frameObj); 521 | } else { 522 | logger.finer('ecodeFunction: SIF limit reached, dropping frame'); 523 | } 524 | logger.finer('ecodeFunction: enqueing silent frame'); 525 | controller.enqueue(frameObj); 526 | return; 527 | } else { 528 | sifGuard.recordUserFrame(); 529 | } 530 | } 531 | } 532 | 533 | try { 534 | var headerLength = 535 | kind == 'video' ? getUnencryptedBytes(frameObj, codec) : 1; 536 | 537 | var frameTrailer = srcFrame.buffer.sublist(srcFrame.buffer.length - 2); 538 | var ivLength = frameTrailer[0]; 539 | var keyIndex = frameTrailer[1]; 540 | var iv = srcFrame.buffer.sublist( 541 | srcFrame.buffer.length - ivLength - 2, srcFrame.buffer.length - 2); 542 | 543 | initialKeySet = keyHandler.getKeySet(keyIndex); 544 | initialKeyIndex = keyIndex; 545 | 546 | logger.finer( 547 | 'decodeFunction: start decrypting frame headerLength $headerLength ${srcFrame.buffer.length} frameTrailer $frameTrailer, ivLength $ivLength, keyIndex $keyIndex, iv $iv'); 548 | 549 | /// missingKey flow: 550 | /// tries to decrypt once, fails, tries to ratchet once and decrypt again, 551 | /// fails (does not save ratcheted key), bumps _decryptionFailureCount, 552 | /// if higher than failuretolerance hasValidKey is set to false, on next 553 | /// frame it fires a missingkey 554 | /// to throw missingkeys faster lower your failureTolerance 555 | if (initialKeySet == null || !keyHandler.hasValidKey) { 556 | if (lastError != CryptorError.kMissingKey) { 557 | lastError = CryptorError.kMissingKey; 558 | postMessage({ 559 | 'type': 'cryptorState', 560 | 'msgType': 'event', 561 | 'participantId': participantIdentity, 562 | 'trackId': trackId, 563 | 'kind': kind, 564 | 'state': 'missingKey', 565 | 'error': 'Missing key for track $trackId' 566 | }); 567 | } 568 | // controller.enqueue(frame); 569 | return; 570 | } 571 | var currentkeySet = initialKeySet; 572 | 573 | Future decryptFrameInternal() async { 574 | decrypted = ((await worker.crypto.subtle 575 | .decrypt( 576 | { 577 | 'name': 'AES-GCM', 578 | 'iv': iv, 579 | 'additionalData': srcFrame.buffer.sublist(0, headerLength), 580 | }.jsify() as web.AlgorithmIdentifier, 581 | currentkeySet.encryptionKey, 582 | srcFrame.buffer 583 | .sublist( 584 | headerLength, srcFrame.buffer.length - ivLength - 2) 585 | .toJS, 586 | ) 587 | .toDart) as JSArrayBuffer) 588 | .toDart; 589 | logger.finer( 590 | 'decodeFunction::decryptFrameInternal: decrypted: ${decrypted!.asUint8List().length}'); 591 | 592 | if (decrypted == null) { 593 | throw Exception('[decryptFrameInternal] could not decrypt'); 594 | } 595 | logger.finer( 596 | 'decodeFunction::decryptFrameInternal: decrypted: ${decrypted!.asUint8List().length}'); 597 | if (currentkeySet != initialKeySet) { 598 | logger.fine( 599 | 'decodeFunction::decryptFrameInternal: ratchetKey: decryption ok, newState: kKeyRatcheted'); 600 | await keyHandler.setKeySetFromMaterial( 601 | currentkeySet, initialKeyIndex); 602 | } 603 | 604 | if (lastError != CryptorError.kOk && 605 | lastError != CryptorError.kKeyRatcheted && 606 | ratchetCount > 0) { 607 | logger.finer( 608 | 'decodeFunction::decryptFrameInternal: KeyRatcheted: ssrc ${srcFrame.ssrc} timestamp ${srcFrame.timestamp} ratchetCount $ratchetCount participantId: $participantIdentity'); 609 | logger.finer( 610 | 'decodeFunction::decryptFrameInternal: ratchetKey: lastError != CryptorError.kKeyRatcheted, reset state to kKeyRatcheted'); 611 | 612 | lastError = CryptorError.kKeyRatcheted; 613 | postMessage({ 614 | 'type': 'cryptorState', 615 | 'msgType': 'event', 616 | 'participantId': participantIdentity, 617 | 'trackId': trackId, 618 | 'kind': kind, 619 | 'state': 'keyRatcheted', 620 | 'error': 'Key ratcheted ok' 621 | }); 622 | } 623 | } 624 | 625 | Future ratchedKeyInternal() async { 626 | if (ratchetCount >= keyOptions.ratchetWindowSize || 627 | keyOptions.ratchetWindowSize <= 0) { 628 | throw Exception('[ratchedKeyInternal] cannot ratchet anymore'); 629 | } 630 | 631 | var newKeyBuffer = await keyHandler.ratchet( 632 | currentkeySet.material, keyOptions.ratchetSalt); 633 | var newMaterial = await keyHandler.ratchetMaterial( 634 | currentkeySet.material, newKeyBuffer.buffer); 635 | currentkeySet = 636 | await keyHandler.deriveKeys(newMaterial, keyOptions.ratchetSalt); 637 | ratchetCount++; 638 | await decryptFrameInternal(); 639 | } 640 | 641 | try { 642 | /// gets frame -> tries to decrypt -> tries to ratchet (does this failureTolerance 643 | /// times, then says missing key) 644 | /// we only save the new key after ratcheting if we were able to decrypt something 645 | await decryptFrameInternal(); 646 | } catch (e) { 647 | lastError = CryptorError.kInternalError; 648 | logger.finer('decodeFunction: kInternalError catch $e'); 649 | await ratchedKeyInternal(); 650 | } 651 | 652 | if (decrypted == null) { 653 | throw Exception( 654 | '[decodeFunction] decryption failed even after ratchting'); 655 | } 656 | 657 | // we can now be sure that decryption was a success 658 | keyHandler.decryptionSuccess(); 659 | 660 | logger.finer( 661 | 'decodeFunction: decryption success, buffer length ${srcFrame.buffer.length}, decrypted: ${decrypted!.asUint8List().length}'); 662 | 663 | var finalBuffer = BytesBuilder(); 664 | 665 | finalBuffer 666 | .add(Uint8List.fromList(srcFrame.buffer.sublist(0, headerLength))); 667 | finalBuffer.add(decrypted!.asUint8List()); 668 | enqueueFrame(frameObj, controller, finalBuffer); 669 | 670 | if (lastError != CryptorError.kOk) { 671 | lastError = CryptorError.kOk; 672 | postMessage({ 673 | 'type': 'cryptorState', 674 | 'msgType': 'event', 675 | 'participantId': participantIdentity, 676 | 'trackId': trackId, 677 | 'kind': kind, 678 | 'state': 'ok', 679 | 'error': 'decryption ok' 680 | }); 681 | } 682 | 683 | logger.fine( 684 | 'decodeFunction[CryptorError.kOk]: decryption success kind $kind, headerLength: $headerLength, timestamp: ${srcFrame.timestamp}, ssrc: ${srcFrame.ssrc}, data length: ${srcFrame.buffer.length}, decrypted length: ${finalBuffer.toBytes().length}, keyindex $keyIndex iv $iv'); 685 | } catch (e) { 686 | if (lastError != CryptorError.kDecryptError) { 687 | lastError = CryptorError.kDecryptError; 688 | postMessage({ 689 | 'type': 'cryptorState', 690 | 'msgType': 'event', 691 | 'participantId': participantIdentity, 692 | 'trackId': trackId, 693 | 'kind': kind, 694 | 'state': 'decryptError', 695 | 'error': e.toString() 696 | }); 697 | } 698 | 699 | keyHandler.decryptionFailure(); 700 | } 701 | } 702 | } 703 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.keyhandler.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | import 'dart:typed_data'; 5 | 6 | import 'package:web/web.dart' as web; 7 | 8 | import 'e2ee.logger.dart'; 9 | import 'e2ee.utils.dart'; 10 | 11 | const KEYRING_SIZE = 16; 12 | 13 | class KeyOptions { 14 | KeyOptions({ 15 | required this.sharedKey, 16 | required this.ratchetSalt, 17 | required this.ratchetWindowSize, 18 | this.uncryptedMagicBytes, 19 | this.failureTolerance = -1, 20 | this.keyRingSze = KEYRING_SIZE, 21 | this.discardFrameWhenCryptorNotReady = false, 22 | }); 23 | bool sharedKey; 24 | Uint8List ratchetSalt; 25 | int ratchetWindowSize = 0; 26 | int failureTolerance; 27 | Uint8List? uncryptedMagicBytes; 28 | int keyRingSze; 29 | bool discardFrameWhenCryptorNotReady; 30 | 31 | @override 32 | String toString() { 33 | return 'KeyOptions{sharedKey: $sharedKey, ratchetWindowSize: $ratchetWindowSize, failureTolerance: $failureTolerance, uncryptedMagicBytes: $uncryptedMagicBytes, ratchetSalt: $ratchetSalt}'; 34 | } 35 | } 36 | 37 | class KeyProvider { 38 | KeyProvider(this.worker, this.id, this.keyProviderOptions); 39 | final web.DedicatedWorkerGlobalScope worker; 40 | final String id; 41 | final KeyOptions keyProviderOptions; 42 | var participantKeys = {}; 43 | ParticipantKeyHandler? sharedKeyHandler; 44 | var sharedKey = Uint8List(0); 45 | 46 | ParticipantKeyHandler getParticipantKeyHandler(String participantIdentity) { 47 | if (keyProviderOptions.sharedKey) { 48 | return getSharedKeyHandler(); 49 | } 50 | var keys = participantKeys[participantIdentity]; 51 | if (keys == null) { 52 | keys = ParticipantKeyHandler( 53 | worker: worker, 54 | participantIdentity: participantIdentity, 55 | keyOptions: keyProviderOptions, 56 | ); 57 | if (sharedKey.isNotEmpty) { 58 | keys.setKey(sharedKey); 59 | } 60 | //keys.on(KeyHandlerEvent.KeyRatcheted, emitRatchetedKeys); 61 | participantKeys[participantIdentity] = keys; 62 | } 63 | return keys; 64 | } 65 | 66 | ParticipantKeyHandler getSharedKeyHandler() { 67 | sharedKeyHandler ??= ParticipantKeyHandler( 68 | worker: worker, 69 | participantIdentity: 'shared-key', 70 | keyOptions: keyProviderOptions, 71 | ); 72 | return sharedKeyHandler!; 73 | } 74 | 75 | void setSharedKey(Uint8List key, {int keyIndex = 0}) { 76 | logger.info('setting shared key'); 77 | sharedKey = key; 78 | getSharedKeyHandler().setKey(key, keyIndex: keyIndex); 79 | } 80 | 81 | void setSifTrailer(Uint8List sifTrailer) { 82 | keyProviderOptions.uncryptedMagicBytes = sifTrailer; 83 | } 84 | } 85 | 86 | class KeySet { 87 | KeySet(this.material, this.encryptionKey); 88 | web.CryptoKey material; 89 | web.CryptoKey encryptionKey; 90 | } 91 | 92 | class ParticipantKeyHandler { 93 | ParticipantKeyHandler({ 94 | required this.worker, 95 | required this.keyOptions, 96 | required this.participantIdentity, 97 | }) { 98 | if (keyOptions.keyRingSze <= 0 || keyOptions.keyRingSze > 255) { 99 | throw Exception('Invalid key ring size'); 100 | } 101 | cryptoKeyRing = List.filled(keyOptions.keyRingSze, null); 102 | } 103 | int currentKeyIndex = 0; 104 | 105 | late List cryptoKeyRing; 106 | 107 | bool _hasValidKey = false; 108 | 109 | bool get hasValidKey => _hasValidKey; 110 | 111 | final KeyOptions keyOptions; 112 | 113 | final web.DedicatedWorkerGlobalScope worker; 114 | 115 | final String participantIdentity; 116 | 117 | int _decryptionFailureCount = 0; 118 | 119 | void decryptionFailure() { 120 | if (keyOptions.failureTolerance < 0) { 121 | return; 122 | } 123 | _decryptionFailureCount += 1; 124 | 125 | if (_decryptionFailureCount > keyOptions.failureTolerance) { 126 | logger.warning('key for $participantIdentity is being marked as invalid'); 127 | _hasValidKey = false; 128 | } 129 | } 130 | 131 | void decryptionSuccess() { 132 | resetKeyStatus(); 133 | } 134 | 135 | /// Call this after user initiated ratchet or a new key has been set in order 136 | /// to make sure to mark potentially invalid keys as valid again 137 | void resetKeyStatus() { 138 | _decryptionFailureCount = 0; 139 | _hasValidKey = true; 140 | } 141 | 142 | Future exportKey(int? keyIndex) async { 143 | var currentMaterial = getKeySet(keyIndex)?.material; 144 | if (currentMaterial == null) { 145 | return null; 146 | } 147 | try { 148 | var key = await worker.crypto.subtle 149 | .exportKey('raw', currentMaterial) 150 | .toDart as JSArrayBuffer; 151 | return key.toDart.asUint8List(); 152 | } catch (e) { 153 | logger.warning('exportKey: $e'); 154 | return null; 155 | } 156 | } 157 | 158 | Future ratchetKey(int? keyIndex) async { 159 | var currentMaterial = getKeySet(keyIndex)?.material; 160 | if (currentMaterial == null) { 161 | return null; 162 | } 163 | var newKey = await ratchet(currentMaterial, keyOptions.ratchetSalt); 164 | var newMaterial = await ratchetMaterial(currentMaterial, newKey.buffer); 165 | var newKeySet = await deriveKeys(newMaterial, keyOptions.ratchetSalt); 166 | await setKeySetFromMaterial(newKeySet, keyIndex ?? currentKeyIndex); 167 | return newKey; 168 | } 169 | 170 | Future ratchetMaterial( 171 | web.CryptoKey currentMaterial, ByteBuffer newKeyBuffer) async { 172 | var newMaterial = await worker.crypto.subtle 173 | .importKey( 174 | 'raw', 175 | newKeyBuffer.toJS, 176 | currentMaterial.algorithm.getProperty('name'.toJS), 177 | false, 178 | ['deriveBits', 'deriveKey'].jsify() as JSArray, 179 | ) 180 | .toDart; 181 | return newMaterial; 182 | } 183 | 184 | KeySet? getKeySet(int? keyIndex) { 185 | return cryptoKeyRing[keyIndex ?? currentKeyIndex]; 186 | } 187 | 188 | Future setKey(Uint8List key, {int keyIndex = 0}) async { 189 | var keyMaterial = await worker.crypto.subtle 190 | .importKey('raw', key.toJS, {'name': 'PBKDF2'.toJS}.jsify() as JSAny, 191 | false, ['deriveBits', 'deriveKey'].jsify() as JSArray) 192 | .toDart; 193 | 194 | var keySet = await deriveKeys( 195 | keyMaterial, 196 | keyOptions.ratchetSalt, 197 | ); 198 | await setKeySetFromMaterial(keySet, keyIndex); 199 | resetKeyStatus(); 200 | } 201 | 202 | Future setKeySetFromMaterial(KeySet keySet, int keyIndex) async { 203 | logger.config('setKeySetFromMaterial: set new key, index: $keyIndex'); 204 | if (keyIndex >= 0) { 205 | currentKeyIndex = keyIndex % cryptoKeyRing.length; 206 | } 207 | cryptoKeyRing[currentKeyIndex] = keySet; 208 | } 209 | 210 | /// Derives a set of keys from the master key. 211 | /// See https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.1 212 | Future deriveKeys(web.CryptoKey material, Uint8List salt) async { 213 | var algorithmName = material.algorithm.getProperty('name'.toJS) as JSString; 214 | var algorithmOptions = getAlgoOptions(algorithmName.toDart, salt); 215 | // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveKey#HKDF 216 | // https://developer.mozilla.org/en-US/docs/Web/API/HkdfParams 217 | var encryptionKey = await worker.crypto.subtle 218 | .deriveKey( 219 | algorithmOptions.jsify() as web.AlgorithmIdentifier, 220 | material, 221 | {'name': 'AES-GCM', 'length': 128}.jsify() as web.AlgorithmIdentifier, 222 | false, 223 | ['encrypt', 'decrypt'].jsify() as JSArray, 224 | ) 225 | .toDart; 226 | 227 | return KeySet(material, encryptionKey as web.CryptoKey); 228 | } 229 | 230 | /// Ratchets a key. See 231 | /// https://tools.ietf.org/html/draft-omara-sframe-00#section-4.3.5.1 232 | 233 | Future ratchet(web.CryptoKey material, Uint8List salt) async { 234 | var algorithmOptions = getAlgoOptions('PBKDF2', salt); 235 | 236 | // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveBits 237 | var newKey = await worker.crypto.subtle 238 | .deriveBits( 239 | algorithmOptions.jsify() as web.AlgorithmIdentifier, material, 256) 240 | .toDart; 241 | return newKey.toDart.asUint8List(); 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.logger.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2024 LiveKit, Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import 'package:logging/logging.dart'; 16 | 17 | enum LoggerLevel { 18 | kALL, 19 | kFINEST, 20 | kFINER, 21 | kFINE, 22 | kCONFIG, 23 | kINFO, 24 | kWARNING, 25 | kSEVERE, 26 | kSHOUT, 27 | kOFF 28 | } 29 | 30 | final logger = Logger('E2EE.Worker'); 31 | 32 | /// disable logging 33 | void disableLogging() { 34 | logger.level = Level.OFF; 35 | } 36 | 37 | /// set the logging level 38 | void setLoggingLevel(LoggerLevel level) { 39 | switch (level) { 40 | case LoggerLevel.kALL: 41 | logger.level = Level.ALL; 42 | break; 43 | case LoggerLevel.kFINEST: 44 | logger.level = Level.FINEST; 45 | break; 46 | case LoggerLevel.kFINER: 47 | logger.level = Level.FINER; 48 | break; 49 | case LoggerLevel.kFINE: 50 | logger.level = Level.FINE; 51 | break; 52 | case LoggerLevel.kCONFIG: 53 | logger.level = Level.CONFIG; 54 | break; 55 | case LoggerLevel.kINFO: 56 | logger.level = Level.INFO; 57 | break; 58 | case LoggerLevel.kWARNING: 59 | logger.level = Level.WARNING; 60 | break; 61 | case LoggerLevel.kSEVERE: 62 | logger.level = Level.SEVERE; 63 | break; 64 | case LoggerLevel.kSHOUT: 65 | logger.level = Level.SHOUT; 66 | break; 67 | case LoggerLevel.kOFF: 68 | logger.level = Level.OFF; 69 | break; 70 | } 71 | } 72 | 73 | /// get the current logging level 74 | Level getLoggingLevel() { 75 | return logger.level; 76 | } 77 | 78 | /// set a custom logging handler 79 | void setLoggingHandler(Function(LogRecord) handler) { 80 | logger.onRecord.listen(handler); 81 | } 82 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.sfi_guard.dart: -------------------------------------------------------------------------------- 1 | const MAX_SIF_COUNT = 100; 2 | const MAX_SIF_DURATION = 2000; 3 | 4 | class SifGuard { 5 | int consecutiveSifCount = 0; 6 | 7 | int? sifSequenceStartedAt; 8 | 9 | int lastSifReceivedAt = 0; 10 | 11 | int userFramesSinceSif = 0; 12 | 13 | void recordSif() { 14 | consecutiveSifCount += 1; 15 | sifSequenceStartedAt ??= DateTime.now().millisecondsSinceEpoch; 16 | lastSifReceivedAt = DateTime.now().millisecondsSinceEpoch; 17 | } 18 | 19 | void recordUserFrame() { 20 | if (sifSequenceStartedAt == null) { 21 | return; 22 | } else { 23 | userFramesSinceSif += 1; 24 | } 25 | if ( 26 | // reset if we received more user frames than SIFs 27 | userFramesSinceSif > consecutiveSifCount || 28 | // also reset if we got a new user frame and the latest SIF frame hasn't been updated in a while 29 | DateTime.now().millisecondsSinceEpoch - lastSifReceivedAt > 30 | MAX_SIF_DURATION) { 31 | reset(); 32 | } 33 | } 34 | 35 | bool isSifAllowed() { 36 | return consecutiveSifCount < MAX_SIF_COUNT && 37 | (sifSequenceStartedAt == null || 38 | DateTime.now().millisecondsSinceEpoch - sifSequenceStartedAt! < 39 | MAX_SIF_DURATION); 40 | } 41 | 42 | void reset() { 43 | userFramesSinceSif = 0; 44 | consecutiveSifCount = 0; 45 | sifSequenceStartedAt = null; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.utils.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_interop'; 2 | import 'dart:js_interop_unsafe'; 3 | import 'dart:typed_data'; 4 | 5 | import 'package:js/js_util.dart'; 6 | import 'package:web/web.dart' as web; 7 | 8 | bool isE2EESupported() { 9 | return isInsertableStreamSupported() || isScriptTransformSupported(); 10 | } 11 | 12 | bool isScriptTransformSupported() { 13 | return web.window.hasProperty('RTCRtpScriptTransform'.toJS).toDart; 14 | } 15 | 16 | bool isInsertableStreamSupported() { 17 | return web.window.hasProperty('RTCRtpSender'.toJS).toDart && 18 | web.window 19 | .getProperty('RTCRtpSender'.toJS) 20 | .hasProperty('createEncodedStreams'.toJS) 21 | .toDart; 22 | } 23 | 24 | Future createKeyMaterialFromString( 25 | Uint8List keyBytes, String algorithm, String usage) { 26 | // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/importKey 27 | return promiseToFuture(web.window.crypto.subtle.importKey( 28 | 'raw', 29 | keyBytes.toJS, 30 | {'name': 'PBKDF2'}.jsify() as web.AlgorithmIdentifier, 31 | false, 32 | ['deriveBits', 'deriveKey'].jsify() as JSArray, 33 | )); 34 | } 35 | 36 | Map getAlgoOptions(String algorithmName, Uint8List salt) { 37 | switch (algorithmName) { 38 | case 'HKDF': 39 | return { 40 | 'name': 'HKDF', 41 | 'salt': salt, 42 | 'hash': 'SHA-256', 43 | 'info': Uint8List(128), 44 | }; 45 | case 'PBKDF2': 46 | { 47 | return { 48 | 'name': 'PBKDF2', 49 | 'salt': salt, 50 | 'hash': 'SHA-256', 51 | 'iterations': 100000, 52 | }; 53 | } 54 | default: 55 | throw Exception('algorithm $algorithmName is currently unsupported'); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /lib/src/e2ee.worker/e2ee.worker.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | import 'dart:typed_data'; 5 | 6 | import 'package:collection/collection.dart'; 7 | import 'package:logging/logging.dart'; 8 | import 'package:web/web.dart' as web; 9 | import 'e2ee.cryptor.dart'; 10 | import 'e2ee.keyhandler.dart'; 11 | import 'e2ee.logger.dart'; 12 | 13 | @JS() 14 | external web.DedicatedWorkerGlobalScope get self; 15 | 16 | var participantCryptors = []; 17 | var keyProviders = {}; 18 | 19 | FrameCryptor getTrackCryptor( 20 | String participantIdentity, String trackId, KeyProvider keyProvider) { 21 | var cryptor = 22 | participantCryptors.firstWhereOrNull((c) => c.trackId == trackId); 23 | if (cryptor == null) { 24 | logger.info( 25 | 'creating new cryptor for $participantIdentity, trackId $trackId'); 26 | 27 | cryptor = FrameCryptor( 28 | worker: self, 29 | participantIdentity: participantIdentity, 30 | trackId: trackId, 31 | keyHandler: keyProvider.getParticipantKeyHandler(participantIdentity), 32 | ); 33 | //setupCryptorErrorEvents(cryptor); 34 | participantCryptors.add(cryptor); 35 | } else if (participantIdentity != cryptor.participantIdentity) { 36 | // assign new participant id to track cryptor and pass in correct key handler 37 | cryptor.setParticipant(participantIdentity, 38 | keyProvider.getParticipantKeyHandler(participantIdentity)); 39 | } 40 | if (keyProvider.keyProviderOptions.sharedKey) {} 41 | return cryptor; 42 | } 43 | 44 | void unsetCryptorParticipant(String trackId) { 45 | participantCryptors 46 | .firstWhereOrNull((c) => c.trackId == trackId) 47 | ?.unsetParticipant(); 48 | } 49 | 50 | void main() async { 51 | // configure logs for debugging 52 | Logger.root.level = Level.WARNING; 53 | Logger.root.onRecord.listen((record) { 54 | print('[${record.loggerName}] ${record.level.name}: ${record.message}'); 55 | }); 56 | 57 | logger.info('Worker created'); 58 | 59 | if (self.hasProperty('RTCTransformEvent'.toJS).toDart) { 60 | logger.info('setup RTCTransformEvent event handler'); 61 | self.onrtctransform = (web.RTCTransformEvent event) { 62 | logger.info('Got onrtctransform event'); 63 | var transformer = event.transformer; 64 | 65 | transformer.setProperty('handled'.toJS, true.toJS); 66 | 67 | var options = transformer.options as JSObject; 68 | var kind = options.getProperty('kind'.toJS) as JSString; 69 | var participantId = options.getProperty('participantId'.toJS) as JSString; 70 | var trackId = options.getProperty('trackId'.toJS) as JSString; 71 | var codec = options.getProperty('codec'.toJS) as JSString?; 72 | var msgType = options.getProperty('msgType'.toJS) as JSString; 73 | var keyProviderId = options.getProperty('keyProviderId'.toJS) as JSString; 74 | 75 | var keyProvider = keyProviders[keyProviderId.toDart]; 76 | 77 | if (keyProvider == null) { 78 | logger.warning('KeyProvider not found for $keyProviderId'); 79 | return; 80 | } 81 | 82 | var cryptor = 83 | getTrackCryptor(participantId.toDart, trackId.toDart, keyProvider); 84 | 85 | cryptor.setupTransform( 86 | operation: msgType.toDart, 87 | readable: transformer.readable, 88 | writable: transformer.writable, 89 | trackId: trackId.toDart, 90 | kind: kind.toDart, 91 | codec: codec?.toDart); 92 | }.toJS; 93 | } 94 | 95 | var handleMessage = (web.MessageEvent e) async { 96 | var msg = e.data.dartify() as Map; 97 | var msgType = msg['msgType']; 98 | var msgId = msg['msgId'] as String?; 99 | logger.config('Got message $msgType, msgId $msgId'); 100 | switch (msgType) { 101 | case 'keyProviderInit': 102 | { 103 | var options = msg['keyOptions']; 104 | var keyProviderId = msg['keyProviderId'] as String; 105 | var keyProviderOptions = KeyOptions( 106 | sharedKey: options['sharedKey'], 107 | ratchetSalt: Uint8List.fromList( 108 | base64Decode(options['ratchetSalt'] as String)), 109 | ratchetWindowSize: options['ratchetWindowSize'], 110 | failureTolerance: options['failureTolerance'] ?? -1, 111 | uncryptedMagicBytes: options['uncryptedMagicBytes'] != null 112 | ? Uint8List.fromList( 113 | base64Decode(options['uncryptedMagicBytes'] as String)) 114 | : null, 115 | keyRingSze: options['keyRingSize'] ?? KEYRING_SIZE, 116 | discardFrameWhenCryptorNotReady: 117 | options['discardFrameWhenCryptorNotReady'] ?? false); 118 | logger.config( 119 | 'Init with keyProviderOptions:\n ${keyProviderOptions.toString()}'); 120 | 121 | var keyProvider = 122 | KeyProvider(self, keyProviderId, keyProviderOptions); 123 | keyProviders[keyProviderId] = keyProvider; 124 | 125 | self.postMessage({ 126 | 'type': 'init', 127 | 'msgId': msgId, 128 | 'msgType': 'response', 129 | }.jsify()); 130 | break; 131 | } 132 | case 'keyProviderDispose': 133 | { 134 | var keyProviderId = msg['keyProviderId'] as String; 135 | logger.config('Dispose keyProvider $keyProviderId'); 136 | keyProviders.remove(keyProviderId); 137 | self.postMessage({ 138 | 'type': 'dispose', 139 | 'msgId': msgId, 140 | 'msgType': 'response', 141 | }.jsify()); 142 | } 143 | break; 144 | case 'enable': 145 | { 146 | var enabled = msg['enabled'] as bool; 147 | var trackId = msg['trackId'] as String; 148 | 149 | var cryptors = 150 | participantCryptors.where((c) => c.trackId == trackId).toList(); 151 | for (var cryptor in cryptors) { 152 | logger.config('Set enable $enabled for trackId ${cryptor.trackId}'); 153 | cryptor.setEnabled(enabled); 154 | } 155 | self.postMessage({ 156 | 'type': 'cryptorEnabled', 157 | 'enable': enabled, 158 | 'msgId': msgId, 159 | 'msgType': 'response', 160 | }.jsify()); 161 | } 162 | break; 163 | case 'decode': 164 | case 'encode': 165 | { 166 | var kind = msg['kind']; 167 | var exist = msg['exist'] as bool; 168 | var participantId = msg['participantId'] as String; 169 | var trackId = msg['trackId']; 170 | var readable = msg['readableStream'] as web.ReadableStream; 171 | var writable = msg['writableStream'] as web.WritableStream; 172 | var keyProviderId = msg['keyProviderId'] as String; 173 | 174 | logger.config( 175 | 'SetupTransform for kind $kind, trackId $trackId, participantId $participantId, ${readable.runtimeType} ${writable.runtimeType}}'); 176 | 177 | var keyProvider = keyProviders[keyProviderId]; 178 | if (keyProvider == null) { 179 | logger.warning('KeyProvider not found for $keyProviderId'); 180 | self.postMessage({ 181 | 'type': 'cryptorSetup', 182 | 'participantId': participantId, 183 | 'trackId': trackId, 184 | 'exist': exist, 185 | 'operation': msgType, 186 | 'error': 'KeyProvider not found', 187 | 'msgId': msgId, 188 | 'msgType': 'response', 189 | }.jsify()); 190 | return; 191 | } 192 | 193 | var cryptor = getTrackCryptor(participantId, trackId, keyProvider); 194 | 195 | await cryptor.setupTransform( 196 | operation: msgType, 197 | readable: readable, 198 | writable: writable, 199 | trackId: trackId, 200 | kind: kind, 201 | ); 202 | 203 | self.postMessage({ 204 | 'type': 'cryptorSetup', 205 | 'participantId': participantId, 206 | 'trackId': trackId, 207 | 'exist': exist, 208 | 'operation': msgType, 209 | 'msgId': msgId, 210 | 'msgType': 'response', 211 | }.jsify()); 212 | cryptor.lastError = CryptorError.kNew; 213 | } 214 | break; 215 | case 'removeTransform': 216 | { 217 | var trackId = msg['trackId'] as String; 218 | logger.config('Removing trackId $trackId'); 219 | unsetCryptorParticipant(trackId); 220 | self.postMessage({ 221 | 'type': 'cryptorRemoved', 222 | 'trackId': trackId, 223 | 'msgId': msgId, 224 | 'msgType': 'response', 225 | }.jsify()); 226 | } 227 | break; 228 | case 'setKey': 229 | case 'setSharedKey': 230 | { 231 | var key = Uint8List.fromList(base64Decode(msg['key'] as String)); 232 | var keyIndex = msg['keyIndex'] as int; 233 | var keyProviderId = msg['keyProviderId'] as String; 234 | var keyProvider = keyProviders[keyProviderId]; 235 | if (keyProvider == null) { 236 | logger.warning('KeyProvider not found for $keyProviderId'); 237 | self.postMessage({ 238 | 'type': 'setKey', 239 | 'error': 'KeyProvider not found', 240 | 'msgId': msgId, 241 | 'msgType': 'response', 242 | }.jsify()); 243 | return; 244 | } 245 | var keyProviderOptions = keyProvider.keyProviderOptions; 246 | if (keyProviderOptions.sharedKey) { 247 | logger.config('Set SharedKey keyIndex $keyIndex'); 248 | keyProvider.setSharedKey(key, keyIndex: keyIndex); 249 | } else { 250 | var participantId = msg['participantId'] as String; 251 | logger.config( 252 | 'Set key for participant $participantId, keyIndex $keyIndex'); 253 | await keyProvider 254 | .getParticipantKeyHandler(participantId) 255 | .setKey(key, keyIndex: keyIndex); 256 | } 257 | 258 | self.postMessage({ 259 | 'type': 'setKey', 260 | 'participantId': msg['participantId'], 261 | 'sharedKey': keyProviderOptions.sharedKey, 262 | 'keyIndex': keyIndex, 263 | 'msgId': msgId, 264 | 'msgType': 'response', 265 | }.jsify()); 266 | } 267 | break; 268 | case 'ratchetKey': 269 | case 'ratchetSharedKey': 270 | { 271 | var keyIndex = msg['keyIndex']; 272 | var participantId = msg['participantId'] as String; 273 | var keyProviderId = msg['keyProviderId'] as String; 274 | var keyProvider = keyProviders[keyProviderId]; 275 | if (keyProvider == null) { 276 | logger.warning('KeyProvider not found for $keyProviderId'); 277 | self.postMessage({ 278 | 'type': 'setKey', 279 | 'error': 'KeyProvider not found', 280 | 'msgId': msgId, 281 | 'msgType': 'response', 282 | }.jsify()); 283 | return; 284 | } 285 | var keyProviderOptions = keyProvider.keyProviderOptions; 286 | Uint8List? newKey; 287 | if (keyProviderOptions.sharedKey) { 288 | logger.config('RatchetKey for SharedKey, keyIndex $keyIndex'); 289 | newKey = 290 | await keyProvider.getSharedKeyHandler().ratchetKey(keyIndex); 291 | } else { 292 | logger.config( 293 | 'RatchetKey for participant $participantId, keyIndex $keyIndex'); 294 | newKey = await keyProvider 295 | .getParticipantKeyHandler(participantId) 296 | .ratchetKey(keyIndex); 297 | } 298 | 299 | self.postMessage({ 300 | 'type': 'ratchetKey', 301 | 'sharedKey': keyProviderOptions.sharedKey, 302 | 'participantId': participantId, 303 | 'newKey': newKey != null ? base64Encode(newKey) : '', 304 | 'keyIndex': keyIndex, 305 | 'msgId': msgId, 306 | 'msgType': 'response', 307 | }.jsify()); 308 | } 309 | break; 310 | case 'setKeyIndex': 311 | { 312 | var keyIndex = msg['index']; 313 | var trackId = msg['trackId'] as String; 314 | logger.config('Setup key index for track $trackId'); 315 | var cryptors = 316 | participantCryptors.where((c) => c.trackId == trackId).toList(); 317 | for (var c in cryptors) { 318 | logger.config('Set keyIndex for trackId ${c.trackId}'); 319 | c.setKeyIndex(keyIndex); 320 | } 321 | 322 | self.postMessage({ 323 | 'type': 'setKeyIndex', 324 | 'keyIndex': keyIndex, 325 | 'msgId': msgId, 326 | 'msgType': 'response', 327 | }.jsify()); 328 | } 329 | break; 330 | case 'exportKey': 331 | case 'exportSharedKey': 332 | { 333 | var keyIndex = msg['keyIndex'] as int; 334 | var participantId = msg['participantId'] as String; 335 | var keyProviderId = msg['keyProviderId'] as String; 336 | var keyProvider = keyProviders[keyProviderId]; 337 | if (keyProvider == null) { 338 | logger.warning('KeyProvider not found for $keyProviderId'); 339 | self.postMessage({ 340 | 'type': 'setKey', 341 | 'error': 'KeyProvider not found', 342 | 'msgId': msgId, 343 | 'msgType': 'response', 344 | }.jsify()); 345 | return; 346 | } 347 | var keyProviderOptions = keyProvider.keyProviderOptions; 348 | Uint8List? key; 349 | if (keyProviderOptions.sharedKey) { 350 | logger.config('Export SharedKey keyIndex $keyIndex'); 351 | key = await keyProvider.getSharedKeyHandler().exportKey(keyIndex); 352 | } else { 353 | logger.config( 354 | 'Export key for participant $participantId, keyIndex $keyIndex'); 355 | key = await keyProvider 356 | .getParticipantKeyHandler(participantId) 357 | .exportKey(keyIndex); 358 | } 359 | self.postMessage({ 360 | 'type': 'exportKey', 361 | 'participantId': participantId, 362 | 'keyIndex': keyIndex, 363 | 'exportedKey': key != null ? base64Encode(key) : '', 364 | 'msgId': msgId, 365 | 'msgType': 'response', 366 | }.jsify()); 367 | } 368 | break; 369 | case 'setSifTrailer': 370 | { 371 | var sifTrailer = 372 | Uint8List.fromList(base64Decode(msg['sifTrailer'] as String)); 373 | var keyProviderId = msg['keyProviderId'] as String; 374 | var keyProvider = keyProviders[keyProviderId]; 375 | if (keyProvider == null) { 376 | logger.warning('KeyProvider not found for $keyProviderId'); 377 | self.postMessage({ 378 | 'type': 'setKey', 379 | 'error': 'KeyProvider not found', 380 | 'msgId': msgId, 381 | 'msgType': 'response', 382 | }.jsify()); 383 | return; 384 | } 385 | keyProvider.setSifTrailer(sifTrailer); 386 | logger.config('SetSifTrailer = $sifTrailer'); 387 | for (var c in participantCryptors) { 388 | c.setSifTrailer(sifTrailer); 389 | } 390 | 391 | self.postMessage({ 392 | 'type': 'setSifTrailer', 393 | 'msgId': msgId, 394 | 'msgType': 'response', 395 | }.jsify()); 396 | } 397 | break; 398 | case 'updateCodec': 399 | { 400 | var codec = msg['codec'] as String; 401 | var trackId = msg['trackId'] as String; 402 | logger.config('Update codec for trackId $trackId, codec $codec'); 403 | var cryptor = 404 | participantCryptors.firstWhereOrNull((c) => c.trackId == trackId); 405 | cryptor?.updateCodec(codec); 406 | 407 | self.postMessage({ 408 | 'type': 'updateCodec', 409 | 'msgId': msgId, 410 | 'msgType': 'response', 411 | }.jsify()); 412 | } 413 | break; 414 | case 'dispose': 415 | { 416 | var trackId = msg['trackId'] as String; 417 | logger.config('Dispose for trackId $trackId'); 418 | var cryptor = 419 | participantCryptors.firstWhereOrNull((c) => c.trackId == trackId); 420 | if (cryptor != null) { 421 | cryptor.lastError = CryptorError.kDisposed; 422 | self.postMessage({ 423 | 'type': 'cryptorDispose', 424 | 'participantId': cryptor.participantIdentity, 425 | 'trackId': trackId, 426 | 'msgId': msgId, 427 | 'msgType': 'response', 428 | }.jsify()); 429 | } else { 430 | self.postMessage({ 431 | 'type': 'cryptorDispose', 432 | 'error': 'cryptor not found', 433 | 'msgId': msgId, 434 | 'msgType': 'response', 435 | }.jsify()); 436 | } 437 | } 438 | break; 439 | default: 440 | logger.warning('Unknown message kind $msg'); 441 | } 442 | }; 443 | 444 | self.onmessage = (web.MessageEvent e) { 445 | handleMessage(e); 446 | }.toJS; 447 | } 448 | -------------------------------------------------------------------------------- /lib/src/event.dart: -------------------------------------------------------------------------------- 1 | // Copyright 2024 LiveKit, Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import 'dart:async'; 16 | 17 | import 'package:meta/meta.dart'; 18 | import 'package:synchronized/synchronized.dart' as sync; 19 | 20 | typedef CancelListenFunc = Function(); 21 | 22 | mixin EventsEmittable { 23 | final events = EventsEmitter(); 24 | EventsListener createListener({bool synchronized = false}) => 25 | EventsListener(events, synchronized: synchronized); 26 | } 27 | 28 | class EventsEmitter extends EventsListenable { 29 | EventsEmitter({ 30 | bool listenSynchronized = false, 31 | }) : super(synchronized: listenSynchronized); 32 | // suppport for multiple event listeners 33 | final streamCtrl = StreamController.broadcast(sync: false); 34 | 35 | @override 36 | EventsEmitter get emitter => this; 37 | 38 | @internal 39 | void emit(T event) { 40 | // emit the event 41 | streamCtrl.add(event); 42 | } 43 | } 44 | 45 | // for listening only 46 | class EventsListener extends EventsListenable { 47 | EventsListener( 48 | this.emitter, { 49 | bool synchronized = false, 50 | }) : super( 51 | synchronized: synchronized, 52 | ); 53 | @override 54 | final EventsEmitter emitter; 55 | } 56 | 57 | // ensures all listeners will close on dispose 58 | abstract class EventsListenable { 59 | EventsListenable({ 60 | required this.synchronized, 61 | }); 62 | // the emitter to listen to 63 | EventsEmitter get emitter; 64 | 65 | final bool synchronized; 66 | // keep track of listeners to cancel later 67 | final _listeners = >[]; 68 | final _syncLock = sync.Lock(); 69 | 70 | List> get listeners => _listeners; 71 | 72 | Future cancelAll() async { 73 | if (_listeners.isNotEmpty) { 74 | // Stop listening to all events 75 | //print('cancelling ${_listeners.length} listeners(s)'); 76 | for (final listener in _listeners) { 77 | await listener.cancel(); 78 | } 79 | } 80 | } 81 | 82 | // listens to all events, guaranteed to be cancelled on dispose 83 | CancelListenFunc listen(FutureOr Function(T) onEvent) { 84 | var func = onEvent; 85 | if (synchronized) { 86 | // ensure `onEvent` will trigger one by one (waits for previous `onEvent` to complete) 87 | func = (event) async { 88 | await _syncLock.synchronized(() async { 89 | await onEvent(event); 90 | }); 91 | }; 92 | } 93 | 94 | final listener = emitter.streamCtrl.stream.listen(func); 95 | _listeners.add(listener); 96 | 97 | // make a cancel func to cancel listening and remove from list in 1 call 98 | void cancelFunc() async { 99 | await listener.cancel(); 100 | _listeners.remove(listener); 101 | //print('event was cancelled by func'); 102 | } 103 | 104 | return cancelFunc; 105 | } 106 | 107 | // convenience method to listen & filter a specific event type 108 | CancelListenFunc on( 109 | FutureOr Function(E) then, { 110 | bool Function(E)? filter, 111 | }) => 112 | listen((event) async { 113 | // event must be E 114 | if (event is! E) return; 115 | // filter must be true (if filter is used) 116 | if (filter != null && !filter(event)) return; 117 | // cast to E 118 | await then(event); 119 | }); 120 | 121 | /// convenience method to listen & filter a specific event type, just once. 122 | void once( 123 | FutureOr Function(E) then, { 124 | bool Function(E)? filter, 125 | }) { 126 | CancelListenFunc? cancelFunc; 127 | cancelFunc = listen((event) async { 128 | // event must be E 129 | if (event is! E) return; 130 | // filter must be true (if filter is used) 131 | if (filter != null && !filter(event)) return; 132 | // cast to E 133 | await then(event); 134 | // cancel after 1 event 135 | cancelFunc?.call(); 136 | }); 137 | } 138 | 139 | // waits for a specific event type 140 | Future waitFor({ 141 | required Duration duration, 142 | bool Function(E)? filter, 143 | FutureOr Function()? onTimeout, 144 | }) async { 145 | final completer = Completer(); 146 | 147 | final cancelFunc = on( 148 | (event) { 149 | if (!completer.isCompleted) { 150 | completer.complete(event); 151 | } 152 | }, 153 | filter: filter, 154 | ); 155 | 156 | try { 157 | // wait to complete with timeout 158 | return await completer.future.timeout( 159 | duration, 160 | onTimeout: onTimeout ?? () => throw Exception('waitFor timed out'), 161 | ); 162 | // do not catch exceptions and pass it up 163 | } finally { 164 | // always clean-up listener 165 | await cancelFunc.call(); 166 | } 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /lib/src/factory_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:convert'; 3 | import 'dart:js_interop'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | import 'frame_cryptor_impl.dart'; 9 | import 'media_recorder_impl.dart'; 10 | import 'media_stream_impl.dart'; 11 | import 'navigator_impl.dart'; 12 | import 'rtc_peerconnection_impl.dart'; 13 | import 'rtc_rtp_capailities_imp.dart'; 14 | 15 | @JS('RTCRtpSender') 16 | @staticInterop 17 | class RTCRtpSenderJs { 18 | external static web.RTCRtpCapabilities getCapabilities(String kind); 19 | } 20 | 21 | @JS('RTCRtpReceiver') 22 | @staticInterop 23 | class RTCRtpReceiverJs { 24 | external static web.RTCRtpCapabilities getCapabilities(String kind); 25 | } 26 | 27 | class RTCFactoryWeb extends RTCFactory { 28 | RTCFactoryWeb._internal(); 29 | static final instance = RTCFactoryWeb._internal(); 30 | 31 | @override 32 | Future createPeerConnection( 33 | Map configuration, 34 | [Map? constraints]) async { 35 | final constr = (constraints != null && constraints.isNotEmpty) 36 | ? constraints 37 | : { 38 | 'mandatory': {}, 39 | 'optional': [ 40 | {'DtlsSrtpKeyAgreement': true}, 41 | ], 42 | }; 43 | final jsRtcPc = web.RTCPeerConnection( 44 | {...constr, ...configuration}.jsify() as web.RTCConfiguration); 45 | final _peerConnectionId = base64Encode(jsRtcPc.toString().codeUnits); 46 | return RTCPeerConnectionWeb(_peerConnectionId, jsRtcPc); 47 | } 48 | 49 | @override 50 | Future createLocalMediaStream(String label) async { 51 | final jsMs = web.MediaStream(); 52 | return MediaStreamWeb(jsMs, 'local'); 53 | } 54 | 55 | @override 56 | MediaRecorder mediaRecorder() { 57 | return MediaRecorderWeb(); 58 | } 59 | 60 | @override 61 | VideoRenderer videoRenderer() { 62 | throw UnimplementedError(); 63 | } 64 | 65 | @override 66 | Navigator get navigator => NavigatorWeb(); 67 | 68 | @override 69 | FrameCryptorFactory get frameCryptorFactory => 70 | FrameCryptorFactoryImpl.instance; 71 | 72 | @override 73 | Future getRtpReceiverCapabilities(String kind) async { 74 | var caps = RTCRtpReceiverJs.getCapabilities(kind); 75 | return RTCRtpCapabilitiesWeb.fromJsObject(caps); 76 | } 77 | 78 | @override 79 | Future getRtpSenderCapabilities(String kind) async { 80 | var caps = RTCRtpSenderJs.getCapabilities(kind); 81 | return RTCRtpCapabilitiesWeb.fromJsObject(caps); 82 | } 83 | } 84 | 85 | Future createPeerConnection( 86 | Map configuration, 87 | [Map? constraints]) { 88 | return RTCFactoryWeb.instance 89 | .createPeerConnection(configuration, constraints); 90 | } 91 | 92 | Future createLocalMediaStream(String label) { 93 | return RTCFactoryWeb.instance.createLocalMediaStream(label); 94 | } 95 | 96 | Future getRtpReceiverCapabilities(String kind) async { 97 | return RTCFactoryWeb.instance.getRtpReceiverCapabilities(kind); 98 | } 99 | 100 | Future getRtpSenderCapabilities(String kind) async { 101 | return RTCFactoryWeb.instance.getRtpSenderCapabilities(kind); 102 | } 103 | 104 | MediaRecorder mediaRecorder() { 105 | return RTCFactoryWeb.instance.mediaRecorder(); 106 | } 107 | 108 | VideoRenderer videoRenderer() { 109 | return RTCFactoryWeb.instance.videoRenderer(); 110 | } 111 | 112 | Navigator get navigator => RTCFactoryWeb.instance.navigator; 113 | 114 | FrameCryptorFactory get frameCryptorFactory => FrameCryptorFactoryImpl.instance; 115 | 116 | MediaDevices get mediaDevices => navigator.mediaDevices; 117 | -------------------------------------------------------------------------------- /lib/src/frame_cryptor_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:convert'; 3 | import 'dart:js_interop'; 4 | import 'dart:js_interop_unsafe'; 5 | import 'dart:typed_data'; 6 | 7 | import 'package:collection/collection.dart'; 8 | import 'package:web/web.dart' as web; 9 | import 'package:webrtc_interface/webrtc_interface.dart'; 10 | 11 | import 'e2ee.worker/e2ee.logger.dart'; 12 | import 'event.dart'; 13 | import 'rtc_rtp_receiver_impl.dart'; 14 | import 'rtc_rtp_sender_impl.dart'; 15 | import 'utils.dart'; 16 | 17 | class WorkerResponse { 18 | WorkerResponse(this.msgId, this.data); 19 | String msgId; 20 | dynamic data; 21 | } 22 | 23 | extension RtcRtpReceiverExt on web.RTCRtpReceiver { 24 | static Map readableStreams_ = {}; 25 | static Map writableStreams_ = {}; 26 | 27 | web.ReadableStream? get readable { 28 | if (readableStreams_.containsKey(hashCode)) { 29 | return readableStreams_[hashCode]!; 30 | } 31 | return null; 32 | } 33 | 34 | web.WritableStream? get writable { 35 | if (writableStreams_.containsKey(hashCode)) { 36 | return writableStreams_[hashCode]!; 37 | } 38 | return null; 39 | } 40 | 41 | set readableStream(web.ReadableStream stream) { 42 | readableStreams_[hashCode] = stream; 43 | } 44 | 45 | set writableStream(web.WritableStream stream) { 46 | writableStreams_[hashCode] = stream; 47 | } 48 | 49 | void closeStreams() { 50 | readableStreams_.remove(hashCode); 51 | writableStreams_.remove(hashCode); 52 | } 53 | } 54 | 55 | extension RtcRtpSenderExt on web.RTCRtpSender { 56 | static Map readableStreams_ = {}; 57 | static Map writableStreams_ = {}; 58 | 59 | web.ReadableStream? get readable { 60 | if (readableStreams_.containsKey(hashCode)) { 61 | return readableStreams_[hashCode]!; 62 | } 63 | return null; 64 | } 65 | 66 | web.WritableStream? get writable { 67 | if (writableStreams_.containsKey(hashCode)) { 68 | return writableStreams_[hashCode]!; 69 | } 70 | return null; 71 | } 72 | 73 | set readableStream(web.ReadableStream stream) { 74 | readableStreams_[hashCode] = stream; 75 | } 76 | 77 | set writableStream(web.WritableStream stream) { 78 | writableStreams_[hashCode] = stream; 79 | } 80 | 81 | void closeStreams() { 82 | readableStreams_.remove(hashCode); 83 | writableStreams_.remove(hashCode); 84 | } 85 | } 86 | 87 | class FrameCryptorImpl extends FrameCryptor { 88 | FrameCryptorImpl( 89 | this._factory, this.worker, this._participantId, this._trackId, 90 | {this.jsSender, this.jsReceiver, required this.keyProvider}); 91 | web.Worker worker; 92 | bool _enabled = false; 93 | int _keyIndex = 0; 94 | final String _participantId; 95 | final String _trackId; 96 | final web.RTCRtpSender? jsSender; 97 | final web.RTCRtpReceiver? jsReceiver; 98 | final FrameCryptorFactoryImpl _factory; 99 | final KeyProviderImpl keyProvider; 100 | 101 | @override 102 | Future dispose() async { 103 | var msgId = randomString(12); 104 | worker.postMessage({ 105 | 'msgType': 'dispose', 106 | 'msgId': msgId, 107 | 'trackId': _trackId, 108 | }.jsify()); 109 | _factory.removeFrameCryptor(_trackId); 110 | return; 111 | } 112 | 113 | @override 114 | Future get enabled => Future(() => _enabled); 115 | 116 | @override 117 | Future get keyIndex => Future(() => _keyIndex); 118 | 119 | @override 120 | String get participantId => _participantId; 121 | 122 | String get trackId => _trackId; 123 | 124 | @override 125 | Future setEnabled(bool enabled) async { 126 | var msgId = randomString(12); 127 | worker.postMessage({ 128 | 'msgType': 'enable', 129 | 'msgId': msgId, 130 | 'trackId': _trackId, 131 | 'enabled': enabled 132 | }.jsify()); 133 | _enabled = enabled; 134 | return true; 135 | } 136 | 137 | @override 138 | Future setKeyIndex(int index) async { 139 | var msgId = randomString(12); 140 | worker.postMessage({ 141 | 'msgType': 'setKeyIndex', 142 | 'msgId': msgId, 143 | 'trackId': _trackId, 144 | 'index': index, 145 | }.jsify()); 146 | _keyIndex = index; 147 | return true; 148 | } 149 | 150 | @override 151 | Future updateCodec(String codec) async { 152 | var msgId = randomString(12); 153 | worker.postMessage({ 154 | 'msgType': 'updateCodec', 155 | 'msgId': msgId, 156 | 'trackId': _trackId, 157 | 'codec': codec, 158 | }.jsify()); 159 | } 160 | } 161 | 162 | class KeyProviderImpl implements KeyProvider { 163 | KeyProviderImpl(this._id, this.worker, this.options, this.events); 164 | final String _id; 165 | final web.Worker worker; 166 | final KeyProviderOptions options; 167 | final Map> _keys = {}; 168 | final EventsEmitter events; 169 | 170 | @override 171 | String get id => _id; 172 | 173 | Future init() async { 174 | var msgId = randomString(12); 175 | worker.postMessage({ 176 | 'msgType': 'keyProviderInit', 177 | 'msgId': msgId, 178 | 'keyProviderId': id, 179 | 'keyOptions': { 180 | 'sharedKey': options.sharedKey, 181 | 'ratchetSalt': base64Encode(options.ratchetSalt), 182 | 'ratchetWindowSize': options.ratchetWindowSize, 183 | 'failureTolerance': options.failureTolerance, 184 | if (options.uncryptedMagicBytes != null) 185 | 'uncryptedMagicBytes': base64Encode(options.uncryptedMagicBytes!), 186 | 'keyRingSize': options.keyRingSize, 187 | 'discardFrameWhenCryptorNotReady': 188 | options.discardFrameWhenCryptorNotReady, 189 | }, 190 | }.jsify()); 191 | 192 | await events.waitFor( 193 | filter: (event) { 194 | logger.fine('waiting for init on msg: $msgId'); 195 | return event.msgId == msgId; 196 | }, 197 | duration: Duration(seconds: 15)); 198 | } 199 | 200 | @override 201 | Future dispose() async { 202 | var msgId = randomString(12); 203 | worker.postMessage({ 204 | 'msgType': 'keyProviderDispose', 205 | 'msgId': msgId, 206 | 'keyProviderId': id, 207 | }.jsify()); 208 | 209 | await events.waitFor( 210 | filter: (event) { 211 | logger.fine('waiting for dispose on msg: $msgId'); 212 | return event.msgId == msgId; 213 | }, 214 | duration: Duration(seconds: 15)); 215 | 216 | _keys.clear(); 217 | } 218 | 219 | @override 220 | Future setKey( 221 | {required String participantId, 222 | required int index, 223 | required Uint8List key}) async { 224 | var msgId = randomString(12); 225 | worker.postMessage({ 226 | 'msgType': 'setKey', 227 | 'msgId': msgId, 228 | 'keyProviderId': id, 229 | 'participantId': participantId, 230 | 'keyIndex': index, 231 | 'key': base64Encode(key), 232 | }.jsify()); 233 | 234 | await events.waitFor( 235 | filter: (event) { 236 | logger.fine('waiting for setKey on msg: $msgId'); 237 | return event.msgId == msgId; 238 | }, 239 | duration: Duration(minutes: 15), 240 | ); 241 | 242 | _keys[participantId] ??= []; 243 | if (_keys[participantId]!.length <= index) { 244 | _keys[participantId]!.add(key); 245 | } else { 246 | _keys[participantId]![index] = key; 247 | } 248 | return true; 249 | } 250 | 251 | @override 252 | Future ratchetKey( 253 | {required String participantId, required int index}) async { 254 | var msgId = randomString(12); 255 | worker.postMessage({ 256 | 'msgType': 'ratchetKey', 257 | 'msgId': msgId, 258 | 'keyProviderId': id, 259 | 'participantId': participantId, 260 | 'keyIndex': index, 261 | }.jsify()); 262 | 263 | var res = await events.waitFor( 264 | filter: (event) { 265 | logger.fine('waiting for ratchetKey on msg: $msgId'); 266 | return event.msgId == msgId; 267 | }, 268 | duration: Duration(seconds: 15)); 269 | 270 | return base64Decode(res.data['newKey']); 271 | } 272 | 273 | @override 274 | Future exportKey( 275 | {required String participantId, required int index}) async { 276 | var msgId = randomString(12); 277 | worker.postMessage({ 278 | 'msgType': 'exportKey', 279 | 'msgId': msgId, 280 | 'keyProviderId': id, 281 | 'participantId': participantId, 282 | 'keyIndex': index, 283 | }.jsify()); 284 | 285 | var res = await events.waitFor( 286 | filter: (event) { 287 | logger.fine('waiting for exportKey on msg: $msgId'); 288 | return event.msgId == msgId; 289 | }, 290 | duration: Duration(seconds: 15)); 291 | 292 | return base64Decode(res.data['exportedKey']); 293 | } 294 | 295 | @override 296 | Future exportSharedKey({int index = 0}) async { 297 | var msgId = randomString(12); 298 | worker.postMessage({ 299 | 'msgType': 'exportSharedKey', 300 | 'msgId': msgId, 301 | 'keyProviderId': id, 302 | 'keyIndex': index, 303 | }.jsify()); 304 | 305 | var res = await events.waitFor( 306 | filter: (event) { 307 | logger.fine('waiting for exportSharedKey on msg: $msgId'); 308 | return event.msgId == msgId; 309 | }, 310 | duration: Duration(seconds: 15)); 311 | 312 | return base64Decode(res.data['exportedKey']); 313 | } 314 | 315 | @override 316 | Future ratchetSharedKey({int index = 0}) async { 317 | var msgId = randomString(12); 318 | worker.postMessage({ 319 | 'msgType': 'ratchetSharedKey', 320 | 'msgId': msgId, 321 | 'keyProviderId': id, 322 | 'keyIndex': index, 323 | }.jsify()); 324 | var res = await events.waitFor( 325 | filter: (event) { 326 | logger.fine('waiting for ratchetSharedKey on msg: $msgId'); 327 | return event.msgId == msgId; 328 | }, 329 | duration: Duration(seconds: 15)); 330 | 331 | return base64Decode(res.data['newKey']); 332 | } 333 | 334 | @override 335 | Future setSharedKey({required Uint8List key, int index = 0}) async { 336 | var msgId = randomString(12); 337 | worker.postMessage({ 338 | 'msgType': 'setSharedKey', 339 | 'msgId': msgId, 340 | 'keyProviderId': id, 341 | 'keyIndex': index, 342 | 'key': base64Encode(key), 343 | }.jsify()); 344 | 345 | await events.waitFor( 346 | filter: (event) { 347 | logger.fine('waiting for setSharedKey on msg: $msgId'); 348 | return event.msgId == msgId; 349 | }, 350 | duration: Duration(seconds: 15)); 351 | } 352 | 353 | @override 354 | Future setSifTrailer({required Uint8List trailer}) async { 355 | var msgId = randomString(12); 356 | worker.postMessage({ 357 | 'msgType': 'setSifTrailer', 358 | 'msgId': msgId, 359 | 'keyProviderId': id, 360 | 'sifTrailer': base64Encode(trailer), 361 | }.jsify()); 362 | 363 | await events.waitFor( 364 | filter: (event) { 365 | logger.fine('waiting for setSifTrailer on msg: $msgId'); 366 | return event.msgId == msgId; 367 | }, 368 | duration: Duration(seconds: 15)); 369 | } 370 | } 371 | 372 | class FrameCryptorFactoryImpl implements FrameCryptorFactory { 373 | FrameCryptorFactoryImpl._internal() { 374 | worker = web.Worker('e2ee.worker.dart.js'.toJS); 375 | 376 | var onMessage = (web.MessageEvent msg) { 377 | final data = msg.data.dartify() as Map; 378 | //print('master got $data'); 379 | var type = data['type']; 380 | var msgId = data['msgId']; 381 | var msgType = data['msgType']; 382 | 383 | if (msgType == 'response') { 384 | events.emit(WorkerResponse(msgId, data)); 385 | } else if (msgType == 'event') { 386 | if (type == 'cryptorState') { 387 | var trackId = data['trackId']; 388 | var participantId = data['participantId']; 389 | var frameCryptor = _frameCryptors.values.firstWhereOrNull( 390 | (element) => (element as FrameCryptorImpl).trackId == trackId); 391 | var state = data['state']; 392 | var frameCryptorState = FrameCryptorState.FrameCryptorStateNew; 393 | switch (state) { 394 | case 'ok': 395 | frameCryptorState = FrameCryptorState.FrameCryptorStateOk; 396 | break; 397 | case 'decryptError': 398 | frameCryptorState = 399 | FrameCryptorState.FrameCryptorStateDecryptionFailed; 400 | break; 401 | case 'encryptError': 402 | frameCryptorState = 403 | FrameCryptorState.FrameCryptorStateEncryptionFailed; 404 | break; 405 | case 'missingKey': 406 | frameCryptorState = FrameCryptorState.FrameCryptorStateMissingKey; 407 | break; 408 | case 'internalError': 409 | frameCryptorState = 410 | FrameCryptorState.FrameCryptorStateInternalError; 411 | break; 412 | case 'keyRatcheted': 413 | frameCryptorState = 414 | FrameCryptorState.FrameCryptorStateKeyRatcheted; 415 | break; 416 | } 417 | frameCryptor?.onFrameCryptorStateChanged 418 | ?.call(participantId, frameCryptorState); 419 | } 420 | } 421 | }; 422 | 423 | worker.addEventListener('message', onMessage.toJS, false.toJS); 424 | 425 | void Function(web.ErrorEvent err) onError = (web.ErrorEvent err) { 426 | print('worker error: $err'); 427 | }; 428 | worker.addEventListener('error', onError.toJS, false.toJS); 429 | } 430 | 431 | static final FrameCryptorFactoryImpl instance = 432 | FrameCryptorFactoryImpl._internal(); 433 | 434 | late web.Worker worker; 435 | final Map _frameCryptors = {}; 436 | final EventsEmitter events = EventsEmitter(); 437 | 438 | @override 439 | Future createDefaultKeyProvider( 440 | KeyProviderOptions options) async { 441 | var keyProvider = 442 | KeyProviderImpl(randomString(12), worker, options, events); 443 | await keyProvider.init(); 444 | return keyProvider; 445 | } 446 | 447 | @override 448 | Future createFrameCryptorForRtpReceiver( 449 | {required String participantId, 450 | required RTCRtpReceiver receiver, 451 | required Algorithm algorithm, 452 | required KeyProvider keyProvider}) { 453 | var jsReceiver = (receiver as RTCRtpReceiverWeb).jsRtpReceiver; 454 | 455 | var trackId = jsReceiver.hashCode.toString(); 456 | var kind = jsReceiver.track.kind; 457 | 458 | if (web.window.hasProperty('RTCRtpScriptTransform'.toJS).toDart) { 459 | print('support RTCRtpScriptTransform'); 460 | var msgId = randomString(12); 461 | var options = { 462 | 'msgType': 'decode', 463 | 'msgId': msgId, 464 | 'keyProviderId': (keyProvider as KeyProviderImpl).id, 465 | 'kind': kind, 466 | 'participantId': participantId, 467 | 'trackId': trackId, 468 | }; 469 | 470 | jsReceiver.transform = web.RTCRtpScriptTransform(worker, options.jsify()); 471 | } else { 472 | var writable = jsReceiver.writable; 473 | var readable = jsReceiver.readable; 474 | var exist = true; 475 | if (writable == null || readable == null) { 476 | final streams = 477 | jsReceiver.callMethod('createEncodedStreams'.toJS); 478 | readable = streams.getProperty('readable'.toJS) as web.ReadableStream; 479 | jsReceiver.readableStream = readable; 480 | writable = streams.getProperty('writable'.toJS) as web.WritableStream; 481 | jsReceiver.writableStream = writable; 482 | exist = false; 483 | } 484 | var msgId = randomString(12); 485 | worker.postMessage( 486 | { 487 | 'msgType': 'decode', 488 | 'msgId': msgId, 489 | 'keyProviderId': (keyProvider as KeyProviderImpl).id, 490 | 'kind': kind, 491 | 'exist': exist, 492 | 'participantId': participantId, 493 | 'trackId': trackId, 494 | 'readableStream': readable, 495 | 'writableStream': writable 496 | }.jsify(), 497 | [readable, writable].jsify() as JSObject, 498 | ); 499 | } 500 | FrameCryptor cryptor = FrameCryptorImpl( 501 | this, worker, participantId, trackId, 502 | jsReceiver: jsReceiver, keyProvider: keyProvider); 503 | _frameCryptors[trackId] = cryptor; 504 | return Future.value(cryptor); 505 | } 506 | 507 | @override 508 | Future createFrameCryptorForRtpSender( 509 | {required String participantId, 510 | required RTCRtpSender sender, 511 | required Algorithm algorithm, 512 | required KeyProvider keyProvider}) { 513 | var jsSender = (sender as RTCRtpSenderWeb).jsRtpSender; 514 | var trackId = jsSender.hashCode.toString(); 515 | var kind = jsSender.track!.kind; 516 | 517 | if (web.window.hasProperty('RTCRtpScriptTransform'.toJS).toDart) { 518 | print('support RTCRtpScriptTransform'); 519 | var msgId = randomString(12); 520 | var options = { 521 | 'msgType': 'encode', 522 | 'msgId': msgId, 523 | 'keyProviderId': (keyProvider as KeyProviderImpl).id, 524 | 'kind': kind, 525 | 'participantId': participantId, 526 | 'trackId': trackId, 527 | 'options': keyProvider.options.toJson(), 528 | }; 529 | print('object: ${options['keyProviderId']}'); 530 | jsSender.transform = web.RTCRtpScriptTransform(worker, options.jsify()); 531 | } else { 532 | var writable = jsSender.writable; 533 | var readable = jsSender.readable; 534 | var exist = true; 535 | if (writable == null || readable == null) { 536 | final streams = 537 | jsSender.callMethod('createEncodedStreams'.toJS); 538 | readable = streams.getProperty('readable'.toJS) as web.ReadableStream; 539 | jsSender.readableStream = readable; 540 | writable = streams.getProperty('writable'.toJS) as web.WritableStream; 541 | 542 | exist = false; 543 | } 544 | var msgId = randomString(12); 545 | worker.postMessage( 546 | { 547 | 'msgType': 'encode', 548 | 'msgId': msgId, 549 | 'keyProviderId': (keyProvider as KeyProviderImpl).id, 550 | 'kind': kind, 551 | 'exist': exist, 552 | 'participantId': participantId, 553 | 'trackId': trackId, 554 | 'options': keyProvider.options.toJson(), 555 | 'readableStream': readable, 556 | 'writableStream': writable 557 | }.jsify(), 558 | [readable, writable].jsify() as JSObject, 559 | ); 560 | } 561 | FrameCryptor cryptor = FrameCryptorImpl( 562 | this, worker, participantId, trackId, 563 | jsSender: jsSender, keyProvider: keyProvider); 564 | _frameCryptors[trackId] = cryptor; 565 | return Future.value(cryptor); 566 | } 567 | 568 | void removeFrameCryptor(String trackId) { 569 | _frameCryptors.remove(trackId); 570 | } 571 | } 572 | -------------------------------------------------------------------------------- /lib/src/media_devices.dart: -------------------------------------------------------------------------------- 1 | import '../dart_webrtc.dart'; 2 | 3 | class MediaDevices { 4 | @Deprecated( 5 | 'Use the navigator.mediaDevices.getUserMedia(Map) provide from the facrory instead') 6 | static Future getUserMedia( 7 | Map mediaConstraints) async { 8 | return navigator.mediaDevices.getUserMedia(mediaConstraints); 9 | } 10 | 11 | @Deprecated( 12 | 'Use the navigator.mediaDevices.getDisplayMedia(Map) provide from the facrory instead') 13 | static Future getDisplayMedia( 14 | Map mediaConstraints) async { 15 | return navigator.mediaDevices.getDisplayMedia(mediaConstraints); 16 | } 17 | 18 | @Deprecated( 19 | 'Use the navigator.mediaDevices.getSources() provide from the facrory instead') 20 | static Future> getSources() { 21 | return navigator.mediaDevices.getSources(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /lib/src/media_recorder.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/src/media_recorder.dart' as _interface; 2 | 3 | import '../dart_webrtc.dart'; 4 | 5 | class MediaRecorder extends _interface.MediaRecorder { 6 | MediaRecorder() : _delegate = mediaRecorder(); 7 | final _interface.MediaRecorder _delegate; 8 | 9 | @override 10 | Future start( 11 | String path, { 12 | MediaStreamTrack? videoTrack, 13 | RecorderAudioChannel? audioChannel, 14 | }) => 15 | _delegate.start(path, videoTrack: videoTrack, audioChannel: audioChannel); 16 | 17 | @override 18 | Future stop() => _delegate.stop(); 19 | 20 | @override 21 | void startWeb( 22 | MediaStream stream, { 23 | Function(dynamic blob, bool isLastOne)? onDataChunk, 24 | String? mimeType, 25 | int timeSlice = 1000, 26 | }) => 27 | _delegate.startWeb( 28 | stream, 29 | onDataChunk: onDataChunk, 30 | mimeType: mimeType ?? 'video/webm', 31 | timeSlice: timeSlice, 32 | ); 33 | } 34 | -------------------------------------------------------------------------------- /lib/src/media_recorder_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | import 'media_stream_impl.dart'; 9 | 10 | class MediaRecorderWeb extends MediaRecorder { 11 | late web.MediaRecorder _recorder; 12 | late Completer _completer; 13 | 14 | @override 15 | Future start( 16 | String path, { 17 | MediaStreamTrack? videoTrack, 18 | RecorderAudioChannel? audioChannel, 19 | }) { 20 | throw 'Use startWeb on Flutter Web!'; 21 | } 22 | 23 | @override 24 | void startWeb( 25 | MediaStream stream, { 26 | Function(dynamic blob, bool isLastOne)? onDataChunk, 27 | String mimeType = 'video/webm', 28 | int timeSlice = 1000, 29 | }) { 30 | var _native = stream as MediaStreamWeb; 31 | _recorder = web.MediaRecorder( 32 | _native.jsStream, web.MediaRecorderOptions(mimeType: mimeType)); 33 | if (onDataChunk == null) { 34 | var _chunks = []; 35 | _completer = Completer(); 36 | final void Function(web.Event event) callback = (web.Event event) { 37 | final blob = event.getProperty('data'.toJS) as web.Blob; 38 | if (blob.size > 0) { 39 | _chunks.add(blob); 40 | } 41 | if (_recorder.state == 'inactive') { 42 | final blob = 43 | web.Blob(_chunks.toJS, web.BlobPropertyBag(type: mimeType)); 44 | _completer.complete(web.URL.createObjectURL(blob)); 45 | } 46 | }; 47 | final void Function(JSAny) onError = (JSAny error) { 48 | _completer.completeError(error); 49 | }; 50 | _recorder.addEventListener('dataavailable', callback.toJS); 51 | _recorder.addEventListener('error', onError.toJS); 52 | } else { 53 | final void Function(web.Event event) callback = (web.Event event) { 54 | onDataChunk( 55 | event.getProperty('data'.toJS), 56 | _recorder.state == 'inactive', 57 | ); 58 | }; 59 | _recorder.addEventListener('dataavailable', callback.toJS); 60 | } 61 | _recorder.start(timeSlice); 62 | } 63 | 64 | @override 65 | Future stop() { 66 | _recorder.stop(); 67 | return _completer.future; 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /lib/src/media_stream_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | 4 | import 'package:web/web.dart' as web; 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | import 'media_stream_track_impl.dart'; 8 | 9 | class MediaStreamWeb extends MediaStream { 10 | MediaStreamWeb(this.jsStream, String ownerTag) : super(jsStream.id, ownerTag); 11 | final web.MediaStream jsStream; 12 | 13 | @override 14 | Future getMediaTracks() { 15 | return Future.value(); 16 | } 17 | 18 | @override 19 | Future addTrack(MediaStreamTrack track, {bool addToNative = true}) { 20 | if (addToNative) { 21 | var _native = track as MediaStreamTrackWeb; 22 | jsStream.addTrack(_native.jsTrack); 23 | } 24 | return Future.value(); 25 | } 26 | 27 | @override 28 | Future removeTrack(MediaStreamTrack track, 29 | {bool removeFromNative = true}) async { 30 | if (removeFromNative) { 31 | var _native = track as MediaStreamTrackWeb; 32 | jsStream.removeTrack(_native.jsTrack); 33 | } 34 | } 35 | 36 | @override 37 | List getAudioTracks() { 38 | var audioTracks = []; 39 | jsStream.getAudioTracks().toDart.forEach( 40 | (dynamic jsTrack) => audioTracks.add(MediaStreamTrackWeb(jsTrack))); 41 | return audioTracks; 42 | } 43 | 44 | @override 45 | List getVideoTracks() { 46 | var audioTracks = []; 47 | jsStream.getVideoTracks().toDart.forEach( 48 | (dynamic jsTrack) => audioTracks.add(MediaStreamTrackWeb(jsTrack))); 49 | return audioTracks; 50 | } 51 | 52 | @override 53 | List getTracks() { 54 | return [...getAudioTracks(), ...getVideoTracks()]; 55 | } 56 | 57 | @override 58 | bool? get active => jsStream.active; 59 | 60 | @override 61 | Future clone() async { 62 | return MediaStreamWeb(jsStream.clone(), ownerTag); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /lib/src/media_stream_track_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | import 'dart:typed_data'; 5 | 6 | import 'package:web/web.dart' as web; 7 | import 'package:webrtc_interface/webrtc_interface.dart'; 8 | 9 | import 'utils.dart'; 10 | 11 | class MediaStreamTrackWeb extends MediaStreamTrack { 12 | MediaStreamTrackWeb(this.jsTrack) { 13 | jsTrack.addEventListener( 14 | 'ended', 15 | (web.Event event) { 16 | onEnded?.call(); 17 | }.toJS); 18 | jsTrack.addEventListener( 19 | 'mute', 20 | (web.Event event) { 21 | onMute?.call(); 22 | }.toJS); 23 | jsTrack.addEventListener( 24 | 'unmute', 25 | (web.Event event) { 26 | onUnMute?.call(); 27 | }.toJS); 28 | } 29 | 30 | final web.MediaStreamTrack jsTrack; 31 | 32 | @override 33 | String? get id => jsTrack.id; 34 | 35 | @override 36 | String? get kind => jsTrack.kind; 37 | 38 | @override 39 | String? get label => jsTrack.label; 40 | 41 | @override 42 | bool get enabled => jsTrack.enabled; 43 | 44 | @override 45 | bool? get muted => jsTrack.muted; 46 | 47 | @override 48 | set enabled(bool? b) { 49 | jsTrack.enabled = b ?? false; 50 | } 51 | 52 | @override 53 | Map getConstraints() { 54 | final c = jsTrack.getConstraints(); 55 | final jso = (c as JSObject).dartify(); 56 | return (jso as Map).cast(); 57 | } 58 | 59 | @override 60 | Future applyConstraints([Map? constraints]) async { 61 | // TODO(wermathurin): Wait for: https://github.com/dart-lang/sdk/commit/1a861435579a37c297f3be0cf69735d5b492bc6c 62 | // to be merged to use jsTrack.applyConstraints() directly 63 | final arg = (constraints ?? {}).jsify(); 64 | 65 | await jsTrack.applyConstraints(arg as web.MediaTrackConstraints).toDart; 66 | } 67 | 68 | // TODO(wermathurin): https://github.com/dart-lang/sdk/issues/44319 69 | // @override 70 | // MediaTrackCapabilities getCapabilities() { 71 | // var _converted = jsTrack.getCapabilities(); 72 | // print(_converted['aspectRatio'].runtimeType); 73 | // return null; 74 | // } 75 | 76 | @override 77 | Map getSettings() { 78 | var settings = jsTrack.getSettings(); 79 | var _converted = {}; 80 | if (kind == 'audio') { 81 | if (settings.has('sampleRate')) { 82 | _converted['sampleRate'] = settings.sampleRate; 83 | } 84 | if (settings.has('sampleSize')) { 85 | _converted['sampleSize'] = settings.sampleSize; 86 | } 87 | if (settings.has('echoCancellation')) { 88 | _converted['echoCancellation'] = settings.echoCancellation; 89 | } 90 | if (settings.has('autoGainControl')) { 91 | _converted['autoGainControl'] = settings.autoGainControl; 92 | } 93 | if (settings.has('noiseSuppression')) { 94 | _converted['noiseSuppression'] = settings.noiseSuppression; 95 | } 96 | if (settings.has('latency')) _converted['latency'] = settings.latency; 97 | if (settings.has('channelCount')) { 98 | _converted['channelCount'] = settings.channelCount; 99 | } 100 | } else { 101 | if (settings.has('width')) { 102 | _converted['width'] = settings.width; 103 | } 104 | if (settings.has('height')) { 105 | _converted['height'] = settings.height; 106 | } 107 | if (settings.has('aspectRatio')) { 108 | _converted['aspectRatio'] = settings.aspectRatio; 109 | } 110 | if (settings.has('frameRate')) { 111 | _converted['frameRate'] = settings.frameRate; 112 | } 113 | if (isMobile && settings.has('facingMode')) { 114 | _converted['facingMode'] = settings.facingMode; 115 | } 116 | if (settings.has('resizeMode')) { 117 | _converted['resizeMode'] = settings.resizeMode; 118 | } 119 | } 120 | if (settings.has('deviceId')) _converted['deviceId'] = settings.deviceId; 121 | if (settings.has('groupId')) { 122 | _converted['groupId'] = settings.groupId; 123 | } 124 | return _converted; 125 | } 126 | 127 | @override 128 | Future captureFrame() async { 129 | final imageCapture = ImageCapture(jsTrack); 130 | final bitmap = await imageCapture.grabFrame().toDart as web.ImageBitmap; 131 | final canvas = web.HTMLCanvasElement(); 132 | canvas.width = bitmap.width; 133 | canvas.height = bitmap.height; 134 | final renderer = 135 | canvas.getContext('bitmaprenderer') as web.ImageBitmapRenderingContext; 136 | renderer.transferFromImageBitmap(bitmap); 137 | 138 | final blobCompleter = Completer(); 139 | final void Function(web.Blob blob) toBlob = (web.Blob blob) { 140 | blobCompleter.complete(blob); 141 | }; 142 | canvas.toBlob(toBlob.toJS); 143 | 144 | final blod = await blobCompleter.future; 145 | 146 | var array = await blod.arrayBuffer().toDart; 147 | bitmap.close(); 148 | return array.toDart; 149 | } 150 | 151 | @override 152 | Future dispose() async {} 153 | 154 | @override 155 | Future stop() async { 156 | jsTrack.stop(); 157 | } 158 | 159 | @override 160 | Future hasTorch() { 161 | return Future.value(false); 162 | } 163 | 164 | @override 165 | Future setTorch(bool torch) { 166 | throw UnimplementedError('The web implementation does not support torch'); 167 | } 168 | 169 | @override 170 | Future clone() async { 171 | return MediaStreamTrackWeb(jsTrack.clone()); 172 | } 173 | } 174 | 175 | extension type ImageCapture._(JSObject _) implements JSObject { 176 | external factory ImageCapture(web.MediaStreamTrack track); 177 | 178 | external JSPromise grabFrame(); 179 | } 180 | -------------------------------------------------------------------------------- /lib/src/mediadevices_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | import 'media_stream_impl.dart'; 9 | import 'utils.dart'; 10 | 11 | class MediaDevicesWeb extends MediaDevices { 12 | @override 13 | Future getUserMedia( 14 | Map mediaConstraints) async { 15 | try { 16 | try { 17 | if (!isMobile) { 18 | if (mediaConstraints['video'] is Map && 19 | mediaConstraints['video']['facingMode'] != null) { 20 | mediaConstraints['video'].remove('facingMode'); 21 | } 22 | } 23 | mediaConstraints.putIfAbsent('video', () => false); 24 | mediaConstraints.putIfAbsent('audio', () => false); 25 | } catch (e) { 26 | print( 27 | '[getUserMedia] failed to remove facingMode from mediaConstraints'); 28 | } 29 | try { 30 | if (mediaConstraints['audio'] is Map && 31 | Map.from(mediaConstraints['audio']).containsKey('optional') && 32 | mediaConstraints['audio']['optional'] 33 | is List>) { 34 | List> optionalValues = 35 | mediaConstraints['audio']['optional']; 36 | final audioMap = {}; 37 | 38 | optionalValues.forEach((option) { 39 | option.forEach((key, value) { 40 | audioMap[key] = value; 41 | }); 42 | }); 43 | 44 | mediaConstraints['audio'].remove('optional'); 45 | mediaConstraints['audio'].addAll(audioMap); 46 | } 47 | } catch (e, s) { 48 | print( 49 | '[getUserMedia] failed to translate optional audio constraints, $e, $s'); 50 | } 51 | 52 | final mediaDevices = web.window.navigator.mediaDevices; 53 | 54 | if (mediaDevices.getProperty('getUserMedia'.toJS).isDefinedAndNotNull) { 55 | var args = mediaConstraints.jsify(); 56 | final jsStream = await mediaDevices 57 | .getUserMedia(args as web.MediaStreamConstraints) 58 | .toDart; 59 | 60 | return MediaStreamWeb(jsStream, 'local'); 61 | } else { 62 | final jsStream = await web.window.navigator.mediaDevices 63 | .getUserMedia(web.MediaStreamConstraints( 64 | audio: mediaConstraints['audio'], 65 | video: mediaConstraints['video'], 66 | )) 67 | .toDart; 68 | return MediaStreamWeb(jsStream, 'local'); 69 | } 70 | } catch (e) { 71 | throw 'Unable to getUserMedia: ${e.toString()}'; 72 | } 73 | } 74 | 75 | @override 76 | Future getDisplayMedia( 77 | Map mediaConstraints) async { 78 | try { 79 | final mediaDevices = web.window.navigator.mediaDevices; 80 | 81 | if (mediaDevices 82 | .getProperty('getDisplayMedia'.toJS) 83 | .isDefinedAndNotNull) { 84 | final arg = mediaConstraints.jsify(); 85 | final jsStream = await mediaDevices 86 | .getDisplayMedia(arg as web.DisplayMediaStreamOptions) 87 | .toDart; 88 | return MediaStreamWeb(jsStream, 'local'); 89 | } else { 90 | final jsStream = await web.window.navigator.mediaDevices 91 | .getUserMedia(web.MediaStreamConstraints( 92 | video: {'mediaSource': 'screen'}.jsify()!, 93 | audio: mediaConstraints['audio'] ?? false)) 94 | .toDart; 95 | return MediaStreamWeb(jsStream, 'local'); 96 | } 97 | } catch (e) { 98 | throw 'Unable to getDisplayMedia: ${e.toString()}'; 99 | } 100 | } 101 | 102 | @override 103 | Future> enumerateDevices() async { 104 | final devices = await getSources(); 105 | 106 | return devices.map((e) { 107 | var input = e; 108 | return MediaDeviceInfo( 109 | deviceId: input.deviceId, 110 | groupId: input.groupId, 111 | kind: input.kind, 112 | label: input.label, 113 | ); 114 | }).toList(); 115 | } 116 | 117 | @override 118 | Future> getSources() async { 119 | final devices = 120 | await web.window.navigator.mediaDevices.enumerateDevices().toDart; 121 | return devices.toDart; 122 | } 123 | 124 | @override 125 | MediaTrackSupportedConstraints getSupportedConstraints() { 126 | final mediaDevices = web.window.navigator.mediaDevices; 127 | 128 | var _mapConstraints = mediaDevices.getSupportedConstraints(); 129 | 130 | return MediaTrackSupportedConstraints( 131 | aspectRatio: _mapConstraints.aspectRatio, 132 | autoGainControl: _mapConstraints.autoGainControl, 133 | brightness: _mapConstraints.brightness, 134 | channelCount: _mapConstraints.channelCount, 135 | colorTemperature: _mapConstraints.colorTemperature, 136 | contrast: _mapConstraints.contrast, 137 | deviceId: _mapConstraints.deviceId, 138 | echoCancellation: _mapConstraints.echoCancellation, 139 | exposureCompensation: _mapConstraints.exposureCompensation, 140 | exposureMode: _mapConstraints.exposureMode, 141 | exposureTime: _mapConstraints.exposureTime, 142 | facingMode: _mapConstraints.facingMode, 143 | focusDistance: _mapConstraints.focusDistance, 144 | focusMode: _mapConstraints.focusMode, 145 | frameRate: _mapConstraints.frameRate, 146 | groupId: _mapConstraints.groupId, 147 | height: _mapConstraints.height, 148 | iso: _mapConstraints.iso, 149 | latency: _mapConstraints.latency, 150 | noiseSuppression: _mapConstraints.noiseSuppression, 151 | pan: _mapConstraints.pan, 152 | pointsOfInterest: _mapConstraints.pointsOfInterest, 153 | resizeMode: _mapConstraints.resizeMode, 154 | saturation: _mapConstraints.saturation, 155 | sampleRate: _mapConstraints.sampleRate, 156 | sampleSize: _mapConstraints.sampleSize, 157 | sharpness: _mapConstraints.sharpness, 158 | tilt: _mapConstraints.tilt, 159 | torch: _mapConstraints.torch, 160 | whiteBalanceMode: _mapConstraints.whiteBalanceMode, 161 | width: _mapConstraints.width, 162 | zoom: _mapConstraints.zoom); 163 | } 164 | 165 | @override 166 | Future selectAudioOutput( 167 | [AudioOutputOptions? options]) async { 168 | try { 169 | final mediaDevices = web.window.navigator.mediaDevices; 170 | 171 | if (mediaDevices 172 | .getProperty('selectAudioOutput'.toJS) 173 | .isDefinedAndNotNull) { 174 | if (options != null) { 175 | final arg = options.jsify(); 176 | final deviceInfo = 177 | await (mediaDevices.callMethod('selectAudioOutput'.toJS, arg) 178 | as JSPromise) 179 | .toDart; 180 | return MediaDeviceInfo( 181 | kind: deviceInfo.kind, 182 | label: deviceInfo.label, 183 | deviceId: deviceInfo.deviceId, 184 | groupId: deviceInfo.groupId, 185 | ); 186 | } else { 187 | final deviceInfo = 188 | await (mediaDevices.callMethod('selectAudioOutput'.toJS) 189 | as JSPromise) 190 | .toDart; 191 | return MediaDeviceInfo( 192 | kind: deviceInfo.kind, 193 | label: deviceInfo.label, 194 | deviceId: deviceInfo.deviceId, 195 | groupId: deviceInfo.groupId, 196 | ); 197 | } 198 | } else { 199 | throw UnimplementedError('selectAudioOutput is missing'); 200 | } 201 | } catch (e) { 202 | throw 'Unable to selectAudioOutput: ${e.toString()}, Please try to use MediaElement.setSinkId instead.'; 203 | } 204 | } 205 | 206 | @override 207 | set ondevicechange(Function(dynamic event)? listener) { 208 | try { 209 | final mediaDevices = web.window.navigator.mediaDevices; 210 | 211 | mediaDevices.ondevicechange = ((JSObject evt) { 212 | listener?.call(evt); 213 | }).toJS; 214 | } catch (e) { 215 | throw 'Unable to set ondevicechange: ${e.toString()}'; 216 | } 217 | } 218 | 219 | @override 220 | Function(dynamic event)? get ondevicechange { 221 | try { 222 | final mediaDevices = web.window.navigator.mediaDevices; 223 | 224 | final fn = mediaDevices.ondevicechange; 225 | if (fn.isUndefinedOrNull) { 226 | return null; 227 | } 228 | return (dynamic event) => fn!.callAsFunction(event); 229 | } catch (e) { 230 | throw 'Unable to get ondevicechange: ${e.toString()}'; 231 | } 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /lib/src/navigator_impl.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/webrtc_interface.dart'; 2 | 3 | import 'mediadevices_impl.dart'; 4 | 5 | class NavigatorWeb extends Navigator { 6 | @override 7 | Future getDisplayMedia(Map mediaConstraints) { 8 | return mediaDevices.getDisplayMedia(mediaConstraints); 9 | } 10 | 11 | @override 12 | Future getSources() { 13 | return mediaDevices.enumerateDevices(); 14 | } 15 | 16 | @override 17 | Future getUserMedia(Map mediaConstraints) { 18 | return mediaDevices.getUserMedia(mediaConstraints); 19 | } 20 | 21 | @override 22 | MediaDevices get mediaDevices => MediaDevicesWeb(); 23 | } 24 | -------------------------------------------------------------------------------- /lib/src/rtc_data_channel_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:typed_data'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | class RTCDataChannelWeb extends RTCDataChannel { 9 | RTCDataChannelWeb(this._jsDc) { 10 | stateChangeStream = _stateChangeController.stream; 11 | messageStream = _messageController.stream; 12 | 13 | _jsDc.onclose = (web.Event _) { 14 | _state = RTCDataChannelState.RTCDataChannelClosed; 15 | _stateChangeController.add(_state); 16 | onDataChannelState?.call(_state); 17 | }.toJS; 18 | 19 | _jsDc.onopen = (web.Event _) { 20 | _state = RTCDataChannelState.RTCDataChannelOpen; 21 | _stateChangeController.add(_state); 22 | onDataChannelState?.call(_state); 23 | }.toJS; 24 | 25 | _jsDc.onmessage = (web.MessageEvent event) { 26 | _parse(event.data.dartify()).then((msg) { 27 | _messageController.add(msg); 28 | onMessage?.call(msg); 29 | }); 30 | }.toJS; 31 | 32 | _jsDc.onbufferedamountlow = (web.Event _) { 33 | onBufferedAmountLow?.call(bufferedAmount ?? 0); 34 | }.toJS; 35 | } 36 | 37 | final web.RTCDataChannel _jsDc; 38 | RTCDataChannelState _state = RTCDataChannelState.RTCDataChannelConnecting; 39 | 40 | @override 41 | RTCDataChannelState get state => _state; 42 | 43 | @override 44 | int? get id => _jsDc.id; 45 | 46 | @override 47 | String? get label => _jsDc.label; 48 | 49 | @override 50 | int? get bufferedAmount => _jsDc.bufferedAmount; 51 | 52 | @override 53 | Future getBufferedAmount() async { 54 | return _jsDc.bufferedAmount; 55 | } 56 | 57 | @override 58 | int? get bufferedAmountLowThreshold => _jsDc.bufferedAmountLowThreshold; 59 | 60 | @override 61 | set bufferedAmountLowThreshold(int? bufferedAmountLowThreshold) { 62 | _jsDc.bufferedAmountLowThreshold = bufferedAmountLowThreshold ?? 0; 63 | } 64 | 65 | final _stateChangeController = 66 | StreamController.broadcast(sync: true); 67 | final _messageController = 68 | StreamController.broadcast(sync: true); 69 | 70 | Future _parse(dynamic data) async { 71 | if (data is String) { 72 | return RTCDataChannelMessage(data); 73 | } 74 | if (data is ByteBuffer) { 75 | return RTCDataChannelMessage.fromBinary(data.asUint8List()); 76 | } else if (data is web.Blob) { 77 | final arrayBuffer = await data.arrayBuffer().toDart; 78 | return RTCDataChannelMessage.fromBinary(arrayBuffer.toDart.asUint8List()); 79 | } 80 | return RTCDataChannelMessage.fromBinary(Uint8List(0)); 81 | } 82 | 83 | @override 84 | Future send(RTCDataChannelMessage message) { 85 | if (!message.isBinary) { 86 | _jsDc.send(message.text.toJS); 87 | } else { 88 | _jsDc.send(message.binary.toJS); 89 | } 90 | return Future.value(); 91 | } 92 | 93 | @override 94 | Future close() { 95 | _jsDc.close(); 96 | return Future.value(); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /lib/src/rtc_dtmf_sender_impl.dart: -------------------------------------------------------------------------------- 1 | import 'package:web/web.dart' as web; 2 | import 'package:webrtc_interface/webrtc_interface.dart'; 3 | 4 | class RTCDTMFSenderWeb extends RTCDTMFSender { 5 | RTCDTMFSenderWeb(this._jsDtmfSender); 6 | final web.RTCDTMFSender _jsDtmfSender; 7 | 8 | @override 9 | Future insertDTMF(String tones, 10 | {int duration = 100, int interToneGap = 70}) async { 11 | return _jsDtmfSender.insertDTMF(tones, duration, interToneGap); 12 | } 13 | 14 | @override 15 | Future canInsertDtmf() async { 16 | return _jsDtmfSender.canInsertDTMF; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /lib/src/rtc_peerconnection_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:collection'; 3 | import 'dart:js_interop'; 4 | import 'dart:js_interop_unsafe'; 5 | 6 | import 'package:dart_webrtc/dart_webrtc.dart'; 7 | import 'package:web/web.dart' as web; 8 | 9 | import 'media_stream_track_impl.dart'; 10 | import 'rtc_data_channel_impl.dart'; 11 | import 'rtc_dtmf_sender_impl.dart'; 12 | import 'rtc_rtp_receiver_impl.dart'; 13 | import 'rtc_rtp_sender_impl.dart'; 14 | import 'rtc_rtp_transceiver_impl.dart'; 15 | 16 | extension on web.RTCDataChannelInit { 17 | external set binaryType(String value); 18 | } 19 | 20 | /* 21 | * PeerConnection 22 | */ 23 | class RTCPeerConnectionWeb extends RTCPeerConnection { 24 | RTCPeerConnectionWeb(this._peerConnectionId, this._jsPc) { 25 | final void Function(web.RTCDataChannelEvent) toDataChannel = 26 | (web.RTCDataChannelEvent dataChannelEvent) { 27 | onDataChannel?.call(RTCDataChannelWeb(dataChannelEvent.channel)); 28 | }; 29 | 30 | final void Function(web.RTCPeerConnectionIceEvent) onIceCandidateCb = 31 | (web.RTCPeerConnectionIceEvent iceEvent) { 32 | if (iceEvent.candidate != null) { 33 | onIceCandidate?.call(_iceFromJs(iceEvent.candidate!)); 34 | } 35 | }; 36 | 37 | _jsPc.addEventListener('datachannel', toDataChannel.toJS); 38 | 39 | _jsPc.addEventListener('icecandidate', onIceCandidateCb.toJS); 40 | 41 | void Function(JSAny) onIceConnectionStateChange = (_) { 42 | _iceConnectionState = 43 | iceConnectionStateForString(_jsPc.iceConnectionState); 44 | onIceConnectionState?.call(_iceConnectionState!); 45 | 46 | if (web.Device.isFirefox) { 47 | switch (_iceConnectionState!) { 48 | case RTCIceConnectionState.RTCIceConnectionStateNew: 49 | _connectionState = RTCPeerConnectionState.RTCPeerConnectionStateNew; 50 | break; 51 | case RTCIceConnectionState.RTCIceConnectionStateChecking: 52 | _connectionState = 53 | RTCPeerConnectionState.RTCPeerConnectionStateConnecting; 54 | break; 55 | case RTCIceConnectionState.RTCIceConnectionStateConnected: 56 | _connectionState = 57 | RTCPeerConnectionState.RTCPeerConnectionStateConnected; 58 | break; 59 | case RTCIceConnectionState.RTCIceConnectionStateFailed: 60 | _connectionState = 61 | RTCPeerConnectionState.RTCPeerConnectionStateFailed; 62 | break; 63 | case RTCIceConnectionState.RTCIceConnectionStateDisconnected: 64 | _connectionState = 65 | RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; 66 | break; 67 | case RTCIceConnectionState.RTCIceConnectionStateClosed: 68 | _connectionState = 69 | RTCPeerConnectionState.RTCPeerConnectionStateClosed; 70 | break; 71 | default: 72 | break; 73 | } 74 | onConnectionState?.call(_connectionState!); 75 | } 76 | }; 77 | 78 | _jsPc.addEventListener( 79 | 'iceconnectionstatechange', onIceConnectionStateChange.toJS); 80 | 81 | void Function(JSAny) onIceGatheringStateChange = (_) { 82 | _iceGatheringState = iceGatheringStateforString(_jsPc.iceGatheringState); 83 | onIceGatheringState?.call(_iceGatheringState!); 84 | }; 85 | 86 | _jsPc.onicegatheringstatechange = onIceGatheringStateChange.toJS; 87 | 88 | void Function(JSAny) onSignalingStateChange = (_) { 89 | _signalingState = signalingStateForString(_jsPc.signalingState); 90 | onSignalingState?.call(_signalingState!); 91 | }; 92 | 93 | _jsPc.addEventListener('signalingstatechange', onSignalingStateChange.toJS); 94 | 95 | if (!web.Device.isFirefox) { 96 | final void Function(JSAny) onConnectionStateChange = (_) { 97 | _connectionState = peerConnectionStateForString(_jsPc.connectionState); 98 | onConnectionState?.call(_connectionState!); 99 | }; 100 | _jsPc.addEventListener( 101 | 'connectionstatechange', onConnectionStateChange.toJS); 102 | } 103 | 104 | void Function(JSAny) onNegotationNeeded = (_) { 105 | onRenegotiationNeeded?.call(); 106 | }; 107 | 108 | _jsPc.addEventListener('negotiationneeded', onNegotationNeeded.toJS); 109 | 110 | void Function(web.RTCTrackEvent) onTrackEvent = 111 | (web.RTCTrackEvent trackEvent) { 112 | onTrack?.call( 113 | RTCTrackEvent( 114 | track: MediaStreamTrackWeb(trackEvent.track), 115 | receiver: RTCRtpReceiverWeb(trackEvent.receiver), 116 | transceiver: 117 | RTCRtpTransceiverWeb.fromJsObject(trackEvent.transceiver), 118 | streams: trackEvent.streams.toDart 119 | .map((dynamic stream) => 120 | MediaStreamWeb(stream, _peerConnectionId)) 121 | .toList()), 122 | ); 123 | }; 124 | _jsPc.addEventListener('track', onTrackEvent.toJS); 125 | } 126 | 127 | final String _peerConnectionId; 128 | late final web.RTCPeerConnection _jsPc; 129 | final _localStreams = {}; 130 | final _configuration = {}; 131 | 132 | RTCSignalingState? _signalingState; 133 | RTCIceGatheringState? _iceGatheringState; 134 | RTCIceConnectionState? _iceConnectionState; 135 | RTCPeerConnectionState? _connectionState; 136 | 137 | @override 138 | RTCSignalingState? get signalingState => _signalingState; 139 | 140 | @override 141 | Future getSignalingState() async { 142 | _signalingState = signalingStateForString(_jsPc.signalingState); 143 | return signalingState; 144 | } 145 | 146 | @override 147 | RTCIceGatheringState? get iceGatheringState => _iceGatheringState; 148 | 149 | @override 150 | Future getIceGatheringState() async { 151 | _iceGatheringState = iceGatheringStateforString(_jsPc.iceGatheringState); 152 | return _iceGatheringState; 153 | } 154 | 155 | @override 156 | RTCIceConnectionState? get iceConnectionState => _iceConnectionState; 157 | 158 | @override 159 | Future getIceConnectionState() async { 160 | _iceConnectionState = iceConnectionStateForString(_jsPc.iceConnectionState); 161 | if (web.Device.isFirefox) { 162 | switch (_iceConnectionState!) { 163 | case RTCIceConnectionState.RTCIceConnectionStateNew: 164 | _connectionState = RTCPeerConnectionState.RTCPeerConnectionStateNew; 165 | break; 166 | case RTCIceConnectionState.RTCIceConnectionStateChecking: 167 | _connectionState = 168 | RTCPeerConnectionState.RTCPeerConnectionStateConnecting; 169 | break; 170 | case RTCIceConnectionState.RTCIceConnectionStateConnected: 171 | _connectionState = 172 | RTCPeerConnectionState.RTCPeerConnectionStateConnected; 173 | break; 174 | case RTCIceConnectionState.RTCIceConnectionStateFailed: 175 | _connectionState = 176 | RTCPeerConnectionState.RTCPeerConnectionStateFailed; 177 | break; 178 | case RTCIceConnectionState.RTCIceConnectionStateDisconnected: 179 | _connectionState = 180 | RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; 181 | break; 182 | case RTCIceConnectionState.RTCIceConnectionStateClosed: 183 | _connectionState = 184 | RTCPeerConnectionState.RTCPeerConnectionStateClosed; 185 | break; 186 | default: 187 | break; 188 | } 189 | } 190 | return _iceConnectionState; 191 | } 192 | 193 | @override 194 | RTCPeerConnectionState? get connectionState => _connectionState; 195 | 196 | @override 197 | Future getConnectionState() async { 198 | /// platform is Firefox 199 | if (web.Device.isFirefox) { 200 | await getIceConnectionState(); 201 | } else { 202 | _connectionState = peerConnectionStateForString(_jsPc.connectionState); 203 | } 204 | return _connectionState; 205 | } 206 | 207 | @override 208 | Future dispose() { 209 | _jsPc.close(); 210 | return Future.value(); 211 | } 212 | 213 | @override 214 | Map get getConfiguration => _configuration; 215 | 216 | @override 217 | Future setConfiguration(Map configuration) { 218 | _configuration.addAll(configuration); 219 | _jsPc.setConfiguration(configuration.jsify() as web.RTCConfiguration); 220 | return Future.value(); 221 | } 222 | 223 | @override 224 | Future createOffer( 225 | [Map? constraints]) async { 226 | final args = {}; 227 | if (constraints != null) { 228 | for (var key in constraints.keys) { 229 | args[key] = constraints[key]; 230 | } 231 | } 232 | final desc = await _jsPc.createOffer(args.jsify() as JSObject).toDart; 233 | 234 | return RTCSessionDescription(desc!.sdp, desc.type); 235 | } 236 | 237 | @override 238 | Future createAnswer( 239 | [Map? constraints]) async { 240 | final args = {}; 241 | if (constraints != null) { 242 | for (var key in constraints.keys) { 243 | args[key] = constraints[key]; 244 | } 245 | } 246 | final desc = await _jsPc.createAnswer(args.jsify() as JSObject).toDart; 247 | return RTCSessionDescription(desc!.sdp, desc.type); 248 | } 249 | 250 | @override 251 | Future addStream(MediaStream stream) { 252 | var _native = stream as MediaStreamWeb; 253 | _localStreams.putIfAbsent( 254 | stream.id, () => MediaStreamWeb(_native.jsStream, _peerConnectionId)); 255 | 256 | _jsPc.addStream(stream.jsStream); 257 | 258 | return Future.value(); 259 | } 260 | 261 | @override 262 | Future removeStream(MediaStream stream) async { 263 | var _native = stream as MediaStreamWeb; 264 | _localStreams.remove(stream.id); 265 | _jsPc.removeStream(_native.jsStream); 266 | return Future.value(); 267 | } 268 | 269 | @override 270 | Future setLocalDescription(RTCSessionDescription description) async { 271 | await _jsPc 272 | .setLocalDescription(web.RTCLocalSessionDescriptionInit( 273 | type: description.type!, 274 | sdp: description.sdp!, 275 | )) 276 | .toDart; 277 | } 278 | 279 | @override 280 | Future setRemoteDescription(RTCSessionDescription description) async { 281 | await _jsPc 282 | .setRemoteDescription(web.RTCSessionDescriptionInit( 283 | type: description.type!, 284 | sdp: description.sdp!, 285 | )) 286 | .toDart; 287 | } 288 | 289 | @override 290 | Future getLocalDescription() async { 291 | if (null == _jsPc.localDescription) { 292 | return null; 293 | } 294 | return _sessionFromJs(_jsPc.localDescription); 295 | } 296 | 297 | @override 298 | Future getRemoteDescription() async { 299 | if (null == _jsPc.remoteDescription) { 300 | return null; 301 | } 302 | return _sessionFromJs(_jsPc.remoteDescription); 303 | } 304 | 305 | @override 306 | Future addCandidate(RTCIceCandidate candidate) async { 307 | await _jsPc 308 | .addIceCandidate(web.RTCIceCandidateInit( 309 | candidate: candidate.candidate!, 310 | sdpMid: candidate.sdpMid!, 311 | sdpMLineIndex: candidate.sdpMLineIndex)) 312 | .toDart; 313 | } 314 | 315 | @override 316 | Future> getStats([MediaStreamTrack? track]) async { 317 | web.RTCStatsReport stats; 318 | if (track != null) { 319 | var jsTrack = (track as MediaStreamTrackWeb).jsTrack; 320 | stats = await _jsPc.getStats(jsTrack).toDart; 321 | } else { 322 | stats = await _jsPc.getStats().toDart; 323 | } 324 | 325 | var report = []; 326 | stats.callMethodVarArgs('forEach'.toJS, [ 327 | (JSObject value, JSAny key) { 328 | var map = value.dartify() as LinkedHashMap; 329 | var stats = {}; 330 | for (var entry in map.entries) { 331 | stats[(entry.key as JSString).toDart] = entry.value; 332 | } 333 | report.add(StatsReport( 334 | value.getProperty('id'.toJS).toDart, 335 | value.getProperty('type'.toJS).toDart, 336 | value.getProperty('timestamp'.toJS).toDartDouble, 337 | stats)); 338 | }.toJS, 339 | ]); 340 | return report; 341 | } 342 | 343 | @override 344 | List getLocalStreams() => 345 | _jsPc.getLocalStreams().toDart.map((e) => _localStreams[e.id]!).toList(); 346 | 347 | @override 348 | List getRemoteStreams() => _jsPc 349 | .getRemoteStreams() 350 | .toDart 351 | .map((e) => MediaStreamWeb(e, _peerConnectionId)) 352 | .toList(); 353 | 354 | @override 355 | Future createDataChannel( 356 | String label, RTCDataChannelInit dataChannelDict) { 357 | var dcInit = web.RTCDataChannelInit( 358 | id: dataChannelDict.id, 359 | ordered: dataChannelDict.ordered, 360 | protocol: dataChannelDict.protocol, 361 | negotiated: dataChannelDict.negotiated, 362 | ); 363 | 364 | if (dataChannelDict.binaryType == 'binary') { 365 | dcInit.binaryType = 'arraybuffer'; // Avoid Blob in data channel 366 | } 367 | 368 | if (dataChannelDict.maxRetransmits > 0) { 369 | dcInit.maxRetransmits = dataChannelDict.maxRetransmits; 370 | } 371 | 372 | if (dataChannelDict.maxRetransmitTime > 0) { 373 | dcInit.maxPacketLifeTime = dataChannelDict.maxRetransmitTime; 374 | } 375 | 376 | final jsDc = _jsPc.createDataChannel( 377 | label, 378 | dcInit, 379 | ); 380 | 381 | return Future.value(RTCDataChannelWeb(jsDc)); 382 | } 383 | 384 | @override 385 | Future restartIce() { 386 | _jsPc.restartIce(); 387 | return Future.value(); 388 | } 389 | 390 | @override 391 | Future close() async { 392 | _jsPc.close(); 393 | return Future.value(); 394 | } 395 | 396 | @override 397 | RTCDTMFSender createDtmfSender(MediaStreamTrack track) { 398 | var _native = track as MediaStreamTrackWeb; 399 | var jsDtmfSender = _jsPc.createDTMFSender(_native.jsTrack); 400 | return RTCDTMFSenderWeb(jsDtmfSender); 401 | } 402 | 403 | // 404 | // utility section 405 | // 406 | 407 | RTCIceCandidate _iceFromJs(web.RTCIceCandidate candidate) => RTCIceCandidate( 408 | candidate.candidate, 409 | candidate.sdpMid, 410 | candidate.sdpMLineIndex, 411 | ); 412 | 413 | RTCSessionDescription _sessionFromJs(web.RTCSessionDescription? sd) => 414 | RTCSessionDescription(sd?.sdp, sd?.type); 415 | 416 | @override 417 | Future addTrack(MediaStreamTrack track, 418 | [MediaStream? stream]) async { 419 | var jStream = (stream as MediaStreamWeb).jsStream; 420 | var jsTrack = (track as MediaStreamTrackWeb).jsTrack; 421 | var sender = _jsPc.addTrack(jsTrack, jStream); 422 | return RTCRtpSenderWeb.fromJsSender(sender); 423 | } 424 | 425 | @override 426 | Future removeTrack(RTCRtpSender sender) async { 427 | var nativeSender = sender as RTCRtpSenderWeb; 428 | // var nativeTrack = nativeSender.track as MediaStreamTrackWeb; 429 | _jsPc.removeTrack(nativeSender.jsRtpSender); 430 | return Future.value(true); 431 | } 432 | 433 | @override 434 | Future> getSenders() async { 435 | var senders = _jsPc.getSenders(); 436 | var list = []; 437 | for (var e in senders.toDart) { 438 | list.add(RTCRtpSenderWeb.fromJsSender(e)); 439 | } 440 | return list; 441 | } 442 | 443 | @override 444 | Future> getReceivers() async { 445 | var receivers = _jsPc.getReceivers(); 446 | 447 | var list = []; 448 | for (var e in receivers.toDart) { 449 | list.add(RTCRtpReceiverWeb(e)); 450 | } 451 | 452 | return list; 453 | } 454 | 455 | @override 456 | Future> getTransceivers() async { 457 | var transceivers = _jsPc.getTransceivers(); 458 | 459 | var list = []; 460 | for (var e in transceivers.toDart) { 461 | list.add(RTCRtpTransceiverWeb.fromJsObject(e)); 462 | } 463 | 464 | return list; 465 | } 466 | 467 | //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } 468 | // 469 | // https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addTransceiver 470 | // 471 | @override 472 | Future addTransceiver({ 473 | MediaStreamTrack? track, 474 | RTCRtpMediaType? kind, 475 | RTCRtpTransceiverInit? init, 476 | }) async { 477 | final jsTrack = track is MediaStreamTrackWeb ? track.jsTrack : null; 478 | final kindString = kind != null ? typeRTCRtpMediaTypetoString[kind] : null; 479 | final trackOrKind = jsTrack ?? kindString; 480 | assert(trackOrKind != null, 'track or kind must not be null'); 481 | 482 | final transceiver = init != null 483 | ? _jsPc.addTransceiver(trackOrKind.jsify()!, 484 | init.toJsObject() as web.RTCRtpTransceiverInit) 485 | : _jsPc.addTransceiver(trackOrKind.jsify()!); 486 | 487 | return RTCRtpTransceiverWeb.fromJsObject( 488 | transceiver, 489 | peerConnectionId: _peerConnectionId, 490 | ); 491 | } 492 | } 493 | 494 | extension _AddRemoveStream on web.RTCPeerConnection { 495 | external void addStream(web.MediaStream stream); 496 | 497 | external void removeStream(web.MediaStream stream); 498 | 499 | external JSArray getLocalStreams(); 500 | external JSArray getRemoteStreams(); 501 | 502 | external web.RTCDTMFSender createDTMFSender(web.MediaStreamTrack track); 503 | } 504 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_capailities_imp.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_interop'; 2 | import 'dart:js_interop_unsafe'; 3 | 4 | import 'package:web/web.dart' as web; 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | class RTCRtpCapabilitiesWeb { 8 | static RTCRtpCapabilities fromJsObject(web.RTCRtpCapabilities object) { 9 | return RTCRtpCapabilities.fromMap({ 10 | 'codecs': object.codecs.toDart.map((e) => e.dartify()), 11 | 'headerExtensions': 12 | object.headerExtensions.toDart.map((e) => e.dartify()), 13 | 'fecMechanisms': object.getProperty('fecMechanisms'.toJS).dartify() ?? [] 14 | }); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_parameters_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_interop'; 2 | import 'dart:js_interop_unsafe'; 3 | 4 | import 'package:web/web.dart' as web; 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | class RTCRtpParametersWeb { 8 | static RTCRtpParameters fromJsObject(web.RTCRtpParameters object) { 9 | final transactionId = 10 | object.getProperty('transactionId'.toJS)?.toDart; 11 | return RTCRtpParameters( 12 | transactionId: transactionId, 13 | rtcp: transactionId != null 14 | ? RTCRTCPParametersWeb.fromJsObject(object.rtcp) 15 | : null, 16 | headerExtensions: headerExtensionsFromJsObject(object), 17 | encodings: encodingsFromJsObject(object), 18 | codecs: codecsFromJsObject(object)); 19 | } 20 | 21 | static List headerExtensionsFromJsObject( 22 | web.RTCRtpParameters object) { 23 | return object.headerExtensions.toDart.map((e) { 24 | final map = (e as JSObject).dartify() as Map; 25 | if (map.containsKey('id')) { 26 | map['id'] = (map['id'] as num).toInt(); 27 | } 28 | return RTCHeaderExtension.fromMap(map); 29 | }).toList(); 30 | } 31 | 32 | static List encodingsFromJsObject(JSObject object) { 33 | var encodings = object.hasProperty('encodings'.toJS).toDart 34 | ? object.getProperty('encodings'.toJS).toDart 35 | : []; 36 | var list = []; 37 | encodings.forEach((e) { 38 | list.add(RTCRtpEncodingWeb.fromJsObject(e)); 39 | }); 40 | return list; 41 | } 42 | 43 | static List codecsFromJsObject(JSObject object) { 44 | var encodings = object.hasProperty('codecs'.toJS).toDart 45 | ? object.getProperty('codecs'.toJS).toDart 46 | : []; 47 | var list = []; 48 | encodings.forEach((e) { 49 | list.add(RTCRTPCodecWeb.fromJsObject(e)); 50 | }); 51 | return list; 52 | } 53 | } 54 | 55 | class RTCRTCPParametersWeb { 56 | static RTCRTCPParameters fromJsObject(web.RTCRtcpParameters object) { 57 | return RTCRTCPParameters.fromMap( 58 | {'cname': object.cname, 'reducedSize': object.reducedSize}); 59 | } 60 | } 61 | 62 | class RTCHeaderExtensionWeb { 63 | static RTCHeaderExtension fromJsObject( 64 | web.RTCRtpHeaderExtensionParameters object) { 65 | return RTCHeaderExtension.fromMap( 66 | {'uri': object.uri, 'id': object.id, 'encrypted': object.encrypted}); 67 | } 68 | } 69 | 70 | class RTCRtpEncodingWeb { 71 | static RTCRtpEncoding fromJsObject(web.RTCRtpEncodingParameters object) { 72 | return RTCRtpEncoding.fromMap({ 73 | 'rid': object.getProperty('rid'.toJS)?.toDart, 74 | 'active': object.active, 75 | 'maxBitrate': object.getProperty('maxBitrate'.toJS)?.toDartInt, 76 | 'maxFramerate': 77 | object.getProperty('maxFramerate'.toJS)?.toDartInt, 78 | 'minBitrate': object.getProperty('minBitrate'.toJS)?.toDartInt, 79 | 'numTemporalLayers': 80 | object.getProperty('numTemporalLayers'.toJS)?.toDartInt, 81 | 'scaleResolutionDownBy': object 82 | .getProperty('scaleResolutionDownBy'.toJS) 83 | ?.toDartDouble, 84 | 'ssrc': object.getProperty('ssrc'.toJS)?.toDart 85 | }); 86 | } 87 | } 88 | 89 | class RTCRTPCodecWeb { 90 | static RTCRTPCodec fromJsObject(web.RTCRtpCodecParameters object) { 91 | return RTCRTPCodec.fromMap({ 92 | 'payloadType': object.payloadType, 93 | 'name': object.getProperty('name'.toJS)?.toDart, 94 | 'kind': object.getProperty('kind'.toJS)?.toDart, 95 | 'clockRate': object.clockRate, 96 | 'numChannels': 97 | object.getProperty('numChannels'.toJS)?.toDartInt, 98 | 'parameters': object.getProperty('parameters'.toJS)?.dartify(), 99 | }); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_receiver_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:collection'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | import 'media_stream_track_impl.dart'; 9 | import 'rtc_rtp_parameters_impl.dart'; 10 | 11 | class RTCRtpReceiverWeb extends RTCRtpReceiver { 12 | RTCRtpReceiverWeb(this._jsRtpReceiver); 13 | 14 | /// private: 15 | final web.RTCRtpReceiver _jsRtpReceiver; 16 | 17 | @override 18 | Future> getStats() async { 19 | var stats = await _jsRtpReceiver.getStats().toDart; 20 | var report = []; 21 | stats.callMethodVarArgs('forEach'.toJS, [ 22 | (JSObject value, JSAny key) { 23 | var map = value.dartify() as LinkedHashMap; 24 | var stats = {}; 25 | for (var entry in map.entries) { 26 | stats[(entry.key as JSString).toDart] = entry.value; 27 | } 28 | report.add(StatsReport( 29 | value.getProperty('id'.toJS).toDart, 30 | value.getProperty('type'.toJS).toDart, 31 | value.getProperty('timestamp'.toJS).toDartDouble, 32 | stats)); 33 | }.toJS, 34 | ]); 35 | return report; 36 | } 37 | 38 | /// The WebRTC specification only defines RTCRtpParameters in terms of senders, 39 | /// but this API also applies them to receivers, similar to ORTC: 40 | /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. 41 | @override 42 | RTCRtpParameters get parameters { 43 | var parameters = _jsRtpReceiver.getParameters(); 44 | return RTCRtpParametersWeb.fromJsObject(parameters); 45 | } 46 | 47 | @override 48 | MediaStreamTrack get track => MediaStreamTrackWeb(_jsRtpReceiver.track); 49 | 50 | @override 51 | String get receiverId => '${_jsRtpReceiver.hashCode}'; 52 | 53 | web.RTCRtpReceiver get jsRtpReceiver => _jsRtpReceiver; 54 | } 55 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_sender_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:collection'; 3 | import 'dart:js_interop'; 4 | import 'dart:js_interop_unsafe'; 5 | 6 | import 'package:web/web.dart' as web; 7 | import 'package:webrtc_interface/webrtc_interface.dart'; 8 | 9 | import 'media_stream_impl.dart'; 10 | import 'media_stream_track_impl.dart'; 11 | import 'rtc_dtmf_sender_impl.dart'; 12 | import 'rtc_rtp_parameters_impl.dart'; 13 | 14 | class RTCRtpSenderWeb extends RTCRtpSender { 15 | RTCRtpSenderWeb(this._jsRtpSender, this._ownsTrack); 16 | 17 | factory RTCRtpSenderWeb.fromJsSender(web.RTCRtpSender jsRtpSender) { 18 | return RTCRtpSenderWeb(jsRtpSender, jsRtpSender.track != null); 19 | } 20 | 21 | final web.RTCRtpSender _jsRtpSender; 22 | bool _ownsTrack = false; 23 | 24 | @override 25 | Future replaceTrack(MediaStreamTrack? track) async { 26 | try { 27 | if (track != null) { 28 | var nativeTrack = track as MediaStreamTrackWeb; 29 | _jsRtpSender.replaceTrack(nativeTrack.jsTrack); 30 | } else { 31 | _jsRtpSender.replaceTrack(null); 32 | } 33 | } on Exception catch (e) { 34 | throw 'Unable to RTCRtpSender::replaceTrack: ${e.toString()}'; 35 | } 36 | } 37 | 38 | @override 39 | Future setTrack(MediaStreamTrack? track, 40 | {bool takeOwnership = true}) async { 41 | try { 42 | if (track != null) { 43 | var nativeTrack = track as MediaStreamTrackWeb; 44 | _jsRtpSender.callMethod('setTrack'.toJS, nativeTrack.jsTrack); 45 | } else { 46 | _jsRtpSender.callMethod('setTrack'.toJS, null); 47 | } 48 | } on Exception catch (e) { 49 | throw 'Unable to RTCRtpSender::setTrack: ${e.toString()}'; 50 | } 51 | } 52 | 53 | @override 54 | Future setStreams(List streams) async { 55 | try { 56 | final nativeStreams = streams.cast(); 57 | _jsRtpSender.callMethodVarArgs( 58 | 'setStreams'.toJS, nativeStreams.map((e) => e.jsStream).toList()); 59 | } on Exception catch (e) { 60 | throw 'Unable to RTCRtpSender::setStreams: ${e.toString()}'; 61 | } 62 | } 63 | 64 | @override 65 | RTCRtpParameters get parameters { 66 | var parameters = _jsRtpSender.getParameters(); 67 | return RTCRtpParametersWeb.fromJsObject(parameters); 68 | } 69 | 70 | @override 71 | Future setParameters(RTCRtpParameters parameters) async { 72 | try { 73 | var oldParameters = _jsRtpSender.getParameters(); 74 | 75 | oldParameters.encodings = 76 | (parameters.encodings?.map((e) => e.toMap()).toList().jsify() ?? 77 | [].jsify()) as JSArray; 78 | await _jsRtpSender.setParameters(oldParameters).toDart; 79 | return Future.value(true); 80 | } on Exception catch (e) { 81 | throw 'Unable to RTCRtpSender::setParameters: ${e.toString()}'; 82 | } 83 | } 84 | 85 | @override 86 | Future> getStats() async { 87 | var stats = await _jsRtpSender.getStats().toDart; 88 | var report = []; 89 | stats.callMethodVarArgs('forEach'.toJS, [ 90 | (JSObject value, JSAny key) { 91 | var map = value.dartify() as LinkedHashMap; 92 | var stats = {}; 93 | for (var entry in map.entries) { 94 | stats[(entry.key as JSString).toDart] = entry.value; 95 | } 96 | report.add(StatsReport( 97 | value.getProperty('id'.toJS).toDart, 98 | value.getProperty('type'.toJS).toDart, 99 | value.getProperty('timestamp'.toJS).toDartDouble, 100 | stats)); 101 | }.toJS, 102 | ]); 103 | return report; 104 | } 105 | 106 | @override 107 | MediaStreamTrack? get track { 108 | if (null != _jsRtpSender.track) { 109 | return MediaStreamTrackWeb(_jsRtpSender.track!); 110 | } 111 | return null; 112 | } 113 | 114 | @override 115 | String get senderId => '${_jsRtpSender.hashCode}'; 116 | 117 | @override 118 | bool get ownsTrack => _ownsTrack; 119 | 120 | @override 121 | RTCDTMFSender get dtmfSender => RTCDTMFSenderWeb(_jsRtpSender.dtmf!); 122 | 123 | @override 124 | Future dispose() async {} 125 | 126 | web.RTCRtpSender get jsRtpSender => _jsRtpSender; 127 | } 128 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_transceiver_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_interop'; 3 | import 'dart:js_interop_unsafe'; 4 | 5 | import 'package:web/web.dart' as web; 6 | import 'package:webrtc_interface/webrtc_interface.dart'; 7 | 8 | import 'media_stream_impl.dart'; 9 | import 'rtc_rtp_receiver_impl.dart'; 10 | import 'rtc_rtp_sender_impl.dart'; 11 | 12 | List listToRtpEncodings(List> list) { 13 | return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); 14 | } 15 | 16 | @Deprecated('RTCRtpTransceiverInitWeb isn\'t referenced from anywhere.') 17 | class RTCRtpTransceiverInitWeb extends RTCRtpTransceiverInit { 18 | RTCRtpTransceiverInitWeb(TransceiverDirection direction, 19 | List streams, List sendEncodings) 20 | : super( 21 | direction: direction, 22 | streams: streams, 23 | sendEncodings: sendEncodings); 24 | 25 | factory RTCRtpTransceiverInitWeb.fromMap(Map map) { 26 | if (map['direction'] == null) { 27 | throw Exception('You must provide a direction'); 28 | } 29 | if (map['streams'] == null) { 30 | throw Exception('You must provide the streams'); 31 | } 32 | 33 | return RTCRtpTransceiverInitWeb( 34 | typeStringToRtpTransceiverDirection[map['direction']]!, 35 | (map['streams'] as List).map((e) => e).toList(), 36 | listToRtpEncodings(map['sendEncodings'])); 37 | } 38 | 39 | Map toMap() => { 40 | 'direction': typeRtpTransceiverDirectionToString[direction], 41 | if (streams != null) 'streamIds': streams!.map((e) => e.id).toList(), 42 | if (sendEncodings != null) 43 | 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), 44 | }; 45 | } 46 | 47 | extension RTCRtpTransceiverInitWebExt on RTCRtpTransceiverInit { 48 | JSObject toJsObject() => { 49 | 'direction': typeRtpTransceiverDirectionToString[direction], 50 | if (streams != null) 51 | 'streams': 52 | streams!.map((e) => (e as MediaStreamWeb).jsStream).toList(), 53 | if (sendEncodings != null) 54 | 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), 55 | }.jsify() as JSObject; 56 | } 57 | 58 | class RTCRtpTransceiverWeb extends RTCRtpTransceiver { 59 | RTCRtpTransceiverWeb(this._jsTransceiver, _peerConnectionId); 60 | 61 | factory RTCRtpTransceiverWeb.fromJsObject(web.RTCRtpTransceiver jsTransceiver, 62 | {String? peerConnectionId}) { 63 | var transceiver = RTCRtpTransceiverWeb(jsTransceiver, peerConnectionId); 64 | return transceiver; 65 | } 66 | 67 | web.RTCRtpTransceiver _jsTransceiver; 68 | 69 | @override 70 | Future getCurrentDirection() async => 71 | typeStringToRtpTransceiverDirection[_jsTransceiver.currentDirection]; 72 | 73 | @override 74 | Future getDirection() async => 75 | typeStringToRtpTransceiverDirection[_jsTransceiver.direction]!; 76 | 77 | @override 78 | String get mid => _jsTransceiver.mid!; 79 | 80 | @override 81 | RTCRtpSender get sender => 82 | RTCRtpSenderWeb.fromJsSender(_jsTransceiver.sender); 83 | 84 | @override 85 | RTCRtpReceiver get receiver => RTCRtpReceiverWeb(_jsTransceiver.receiver); 86 | 87 | @override 88 | bool get stoped => 89 | _jsTransceiver.getProperty('stopped'.toJS).toDart; 90 | 91 | @override 92 | String get transceiverId => mid; 93 | 94 | @override 95 | Future setDirection(TransceiverDirection direction) async { 96 | try { 97 | _jsTransceiver.direction = 98 | typeRtpTransceiverDirectionToString[direction]!; 99 | } on Exception catch (e) { 100 | throw 'Unable to RTCRtpTransceiver::setDirection: ${e.toString()}'; 101 | } 102 | } 103 | 104 | @override 105 | Future stop() async { 106 | try { 107 | _jsTransceiver.stop(); 108 | } on Exception catch (e) { 109 | throw 'Unable to RTCRtpTransceiver::stop: ${e..toString()}'; 110 | } 111 | } 112 | 113 | @override 114 | Future setCodecPreferences(List codecs) async { 115 | try { 116 | _jsTransceiver.setCodecPreferences(codecs 117 | .map((e) => e.toMap().jsify() as web.RTCRtpCodec) 118 | .toList() 119 | .toJS); 120 | } on Exception catch (e) { 121 | throw 'Unable to RTCRtpTransceiver::setCodecPreferences: ${e..toString()}'; 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /lib/src/rtc_track_event_impl.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/webrtc_interface.dart'; 2 | 3 | class RTCTrackEventWeb extends RTCTrackEvent { 4 | RTCTrackEventWeb( 5 | {RTCRtpReceiver? receiver, 6 | required List streams, 7 | required MediaStreamTrack track, 8 | RTCRtpTransceiver? transceiver}) 9 | : super( 10 | receiver: receiver, 11 | streams: streams, 12 | track: track, 13 | transceiver: transceiver); 14 | } 15 | -------------------------------------------------------------------------------- /lib/src/rtc_transform_stream.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_interop'; 2 | import 'dart:js_interop_unsafe'; 3 | 4 | import 'package:web/web.dart'; 5 | 6 | extension PropsRTCRtpScriptTransformer on RTCRtpScriptTransformer { 7 | set handled(bool value) { 8 | setProperty('handled'.toJS, value.toJS); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /lib/src/rtc_video_element.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_interop'; 2 | 3 | import 'package:web/web.dart' as web; 4 | 5 | import '../dart_webrtc.dart'; 6 | 7 | class RTCVideoElement { 8 | RTCVideoElement() { 9 | _html = web.HTMLVideoElement() 10 | ..autoplay = true 11 | ..muted = false 12 | ..controls = false 13 | ..style.objectFit = 'contain' 14 | ..style.border = 'none'; 15 | 16 | // Allows Safari iOS to play the video inline 17 | _html.setAttribute('playsinline', 'true'); 18 | } 19 | 20 | MediaStream? _stream; 21 | 22 | late web.HTMLVideoElement _html; 23 | web.HTMLVideoElement get htmlElement => _html; 24 | 25 | /// contain or cover 26 | set objectFit(String fit) => _html.style.objectFit = fit; 27 | 28 | set srcObject(MediaStream? stream) { 29 | _stream = stream; 30 | if (stream != null) { 31 | _html.srcObject = (stream as MediaStreamWeb).jsStream; 32 | } else { 33 | _html.srcObject = null; 34 | } 35 | } 36 | 37 | int get videoWidth => _html.videoWidth; 38 | 39 | int get videoHeight => _html.videoHeight; 40 | 41 | Stream get onEnded => _html.onEnded; 42 | 43 | Stream get onError => _html.onError; 44 | 45 | Stream get onCanPlay => _html.onCanPlay; 46 | 47 | Stream get onResize => _html.onResize; 48 | 49 | dynamic get error => _html.error; 50 | 51 | MediaStream? get srcObject => _stream; 52 | 53 | set muted(bool v) => _html.muted = v; 54 | bool get muted => _html.muted; 55 | 56 | set autoplay(bool v) => _html.autoplay = v; 57 | bool get autoplay => _html.autoplay; 58 | 59 | set controls(bool v) => _html.controls = v; 60 | bool get controls => _html.controls; 61 | 62 | void load() => _html.load(); 63 | 64 | void removeAttribute(String name) => _html.removeAttribute(name); 65 | 66 | Future setSinkId(String sinkId) => _html.setSinkId(sinkId).toDart; 67 | } 68 | -------------------------------------------------------------------------------- /lib/src/utils.dart: -------------------------------------------------------------------------------- 1 | import 'dart:math'; 2 | 3 | import 'package:web/web.dart' as web; 4 | 5 | bool get isMobile { 6 | final toMatch = [ 7 | 'Android', 8 | 'webOS', 9 | 'iPhone', 10 | 'iPad', 11 | 'iPod', 12 | 'BlackBerry', 13 | 'Windows Phone' 14 | ]; 15 | return toMatch.indexWhere((device) => web.window.navigator.userAgent 16 | .contains(RegExp(device, caseSensitive: false))) != 17 | -1; 18 | } 19 | 20 | String randomString(int length) { 21 | const chars = 'abcdefghijklmnopqrstuvwxyz0123456789'; 22 | final rnd = Random(); 23 | final buf = StringBuffer(); 24 | for (var i = 0; i < length; i++) { 25 | buf.write(chars[rnd.nextInt(chars.length)]); 26 | } 27 | return buf.toString(); 28 | } 29 | -------------------------------------------------------------------------------- /pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: dart_webrtc 2 | description: Use the dart/js library to re-wrap the webrtc js interface of the browser, to adapted common browsers. 3 | version: 1.5.4 4 | homepage: https://github.com/flutter-webrtc/dart-webrtc 5 | 6 | environment: 7 | sdk: '>=3.3.0 <4.0.0' 8 | 9 | dependencies: 10 | collection: ^1.17.1 11 | js: ">0.6.0 <0.8.0" 12 | logging: ^1.1.0 13 | meta: ^1.8.0 14 | synchronized: ^3.0.0+3 15 | web: ^1.0.0 16 | webrtc_interface: ^1.2.2+hotfix.2 17 | 18 | dev_dependencies: 19 | build_runner: ^2.3.3 20 | build_web_compilers: 21 | http: ">0.13.0 <1.3.0" 22 | import_sorter: ^4.6.0 23 | pedantic: ^1.9.0 24 | protoo_client: ^0.3.0 25 | test: ^1.15.4 -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /web/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flutter-webrtc/dart-webrtc/233f617988cf507c6d3b1fc675b9edbfb713f4a3/web/favicon.ico -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | dart_webrtc 10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 |
18 |
19 | 20 | 21 | -------------------------------------------------------------------------------- /web/main.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:typed_data'; 3 | 4 | import 'package:dart_webrtc/dart_webrtc.dart'; 5 | import 'package:web/web.dart' as web; 6 | 7 | /* 8 | import 'test_media_devices.dart' as media_devices_tests; 9 | import 'test_media_stream.dart' as media_stream_tests; 10 | import 'test_media_stream_track.dart' as media_stream_track_tests; 11 | import 'test_peerconnection.dart' as peerconnection_tests; 12 | import 'test_video_element.dart' as video_elelment_tests; 13 | */ 14 | void main() { 15 | /* 16 | video_elelment_tests.testFunctions.forEach((Function func) => func()); 17 | media_devices_tests.testFunctions.forEach((Function func) => func()); 18 | media_stream_tests.testFunctions.forEach((Function func) => func()); 19 | media_stream_track_tests.testFunctions.forEach((Function func) => func()); 20 | peerconnection_tests.testFunctions.forEach((Function func) => func()); 21 | */ 22 | 23 | loopBackTest(); 24 | } 25 | 26 | List pc1FrameCryptors = []; 27 | List pc2FrameCryptors = []; 28 | 29 | void loopBackTest() async { 30 | var local = web.document.querySelector('#local'); 31 | var localVideo = RTCVideoElement(); 32 | local!.append(localVideo.htmlElement); 33 | 34 | var remote = web.document.querySelector('#remote'); 35 | var remotelVideo = RTCVideoElement(); 36 | remote!.append(remotelVideo.htmlElement); 37 | 38 | var acaps = await getRtpSenderCapabilities('audio'); 39 | print('sender audio capabilities: ${acaps.toMap()}'); 40 | 41 | var vcaps = await getRtpSenderCapabilities('video'); 42 | print('sender video capabilities: ${vcaps.toMap()}'); 43 | 44 | var enableE2EE = true; 45 | 46 | var acapabilities = await getRtpReceiverCapabilities('audio'); 47 | print('receiver audio capabilities: ${acapabilities.toMap()}'); 48 | 49 | var vcapabilities = await getRtpReceiverCapabilities('video'); 50 | print('receiver video capabilities: ${vcapabilities.toMap()}'); 51 | 52 | var keyProviderOptions = KeyProviderOptions( 53 | sharedKey: false, 54 | ratchetWindowSize: 16, 55 | failureTolerance: -1, 56 | ratchetSalt: Uint8List.fromList('testSalt'.codeUnits), 57 | discardFrameWhenCryptorNotReady: true); 58 | var keyProviderForSender = 59 | await frameCryptorFactory.createDefaultKeyProvider(keyProviderOptions); 60 | 61 | var keyProviderForReceiver = 62 | await frameCryptorFactory.createDefaultKeyProvider(keyProviderOptions); 63 | 64 | await keyProviderForSender.setKey( 65 | participantId: 'sender', 66 | index: 0, 67 | key: Uint8List.fromList('testkey'.codeUnits)); 68 | 69 | await keyProviderForReceiver.setKey( 70 | participantId: 'receiver', 71 | index: 0, 72 | key: Uint8List.fromList('testkey'.codeUnits)); 73 | 74 | var pc2 = 75 | await createPeerConnection({'encodedInsertableStreams': enableE2EE}); 76 | 77 | pc2.onTrack = (event) async { 78 | if (event.track.kind == 'video') { 79 | remotelVideo.srcObject = event.streams[0]; 80 | } 81 | if (enableE2EE) { 82 | var fc = await frameCryptorFactory.createFrameCryptorForRtpReceiver( 83 | participantId: 'receiver', 84 | receiver: event.receiver!, 85 | algorithm: Algorithm.kAesGcm, 86 | keyProvider: keyProviderForReceiver); 87 | if (keyProviderOptions.discardFrameWhenCryptorNotReady) { 88 | Timer(Duration(seconds: 1), () { 89 | fc.setEnabled(true); 90 | }); 91 | } else { 92 | await fc.setEnabled(true); 93 | } 94 | 95 | fc.onFrameCryptorStateChanged = (id, state) { 96 | print('receiver: frameCryptorStateChanged: $state'); 97 | }; 98 | 99 | await fc.setKeyIndex(0); 100 | if (event.track.kind == 'video') { 101 | await fc.updateCodec('vp8'); 102 | } 103 | pc2FrameCryptors.add(fc); 104 | } 105 | }; 106 | pc2.onConnectionState = (state) { 107 | print('connectionState $state'); 108 | }; 109 | 110 | pc2.onIceConnectionState = (state) { 111 | print('iceConnectionState $state'); 112 | }; 113 | 114 | var pc1 = 115 | await createPeerConnection({'encodedInsertableStreams': enableE2EE}); 116 | 117 | pc1.onIceCandidate = (candidate) => pc2.addCandidate(candidate); 118 | pc2.onIceCandidate = (candidate) => pc1.addCandidate(candidate); 119 | 120 | var stream = 121 | await navigator.mediaDevices.getUserMedia({'audio': true, 'video': true}); 122 | /*.getUserMedia(MediaStreamConstraints(audio: true, video: true))*/ 123 | print('getDisplayMedia: stream.id => ${stream.id}'); 124 | 125 | navigator.mediaDevices.ondevicechange = (event) async { 126 | var list = await navigator.mediaDevices.enumerateDevices(); 127 | print('ondevicechange: '); 128 | list.where((element) => element.kind == 'audiooutput').forEach((e) { 129 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 130 | }); 131 | }; 132 | /* 133 | var list = await navigator.mediaDevices.enumerateDevices(); 134 | list.forEach((e) { 135 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 136 | }); 137 | var outputList = list.where((element) => element.kind == 'audiooutput'); 138 | if (outputList.isNotEmpty) { 139 | var sinkId = outputList.last.deviceId; 140 | try { 141 | await navigator.mediaDevices 142 | .selectAudioOutput(AudioOutputOptions(deviceId: sinkId)); 143 | } catch (e) { 144 | print('selectAudioOutput error: ${e.toString()}'); 145 | await localVideo.setSinkId(sinkId); 146 | } 147 | }*/ 148 | 149 | stream.getTracks().forEach((track) async { 150 | var sender = await pc1.addTrack(track, stream); 151 | if (enableE2EE) { 152 | var fc = await frameCryptorFactory.createFrameCryptorForRtpSender( 153 | participantId: 'sender', 154 | sender: sender, 155 | algorithm: Algorithm.kAesGcm, 156 | keyProvider: keyProviderForSender); 157 | await fc.setEnabled(true); 158 | await fc.setKeyIndex(0); 159 | if (track.kind == 'video') { 160 | await fc.updateCodec('vp8'); 161 | } 162 | fc.onFrameCryptorStateChanged = (id, state) { 163 | print('sender: frameCryptorStateChanged: $state'); 164 | }; 165 | pc1FrameCryptors.add(fc); 166 | } 167 | }); 168 | /* 169 | var transceivers = await pc1.getTransceivers(); 170 | transceivers.forEach((transceiver) { 171 | print('transceiver: ${transceiver.sender.track!.kind!}'); 172 | if (transceiver.sender.track!.kind! == 'video') { 173 | transceiver.setCodecPreferences(vcapabilities.codecs! 174 | .takeWhile( 175 | (element) => element.mimeType.toLowerCase() == 'video/h264') 176 | .toList()); 177 | } else if (transceiver.sender.track!.kind! == 'audio') { 178 | transceiver.setCodecPreferences(acapabilities.codecs! 179 | .takeWhile( 180 | (element) => element.mimeType.toLowerCase() == 'audio/pcmu') 181 | .toList()); 182 | } 183 | }); 184 | */ 185 | await pc1.createDataChannel( 186 | 'label', RTCDataChannelInit()..binaryType = 'binary'); 187 | var offer = await pc1.createOffer(); 188 | 189 | await pc2.addTransceiver( 190 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 191 | init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); 192 | await pc2.addTransceiver( 193 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 194 | init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); 195 | 196 | await pc1.setLocalDescription(offer); 197 | await pc2.setRemoteDescription(offer); 198 | var answer = await pc2.createAnswer({}); 199 | await pc2.setLocalDescription(answer); 200 | await pc1.setRemoteDescription(answer); 201 | 202 | localVideo.muted = true; 203 | localVideo.srcObject = stream; 204 | 205 | var key2 = 206 | await keyProviderForSender.ratchetKey(index: 0, participantId: 'sender'); 207 | print('ratchetKey key2: ${key2.toList()}'); 208 | 209 | await keyProviderForSender.setKey( 210 | index: 1, 211 | participantId: 'sender', 212 | key: Uint8List.fromList('testkey3'.codeUnits)); 213 | 214 | await keyProviderForReceiver.setKey( 215 | index: 1, 216 | participantId: 'receiver', 217 | key: Uint8List.fromList('testkey3'.codeUnits)); 218 | 219 | [...pc1FrameCryptors, ...pc2FrameCryptors].forEach((element) async { 220 | await element.setKeyIndex(1); 221 | }); 222 | 223 | await keyProviderForSender.setKey( 224 | index: 2, 225 | participantId: 'sender', 226 | key: Uint8List.fromList('testkey4'.codeUnits)); 227 | 228 | await keyProviderForReceiver.setKey( 229 | index: 2, 230 | participantId: 'receiver', 231 | key: Uint8List.fromList('testkey4'.codeUnits)); 232 | 233 | [...pc1FrameCryptors, ...pc2FrameCryptors].forEach((element) async { 234 | await element.setKeyIndex(2); 235 | }); 236 | 237 | var key = 238 | await keyProviderForSender.ratchetKey(index: 2, participantId: 'sender'); 239 | print('ratchetKey key: ${key.toList()}'); 240 | 241 | /* 242 | var key1 = 243 | await keyProviderForSender.ratchetKey(index: 0, participantId: 'sender'); 244 | print('ratchetKey key1: ${key1.toList()}'); 245 | 246 | [...pc1FrameCryptors, ...pc2FrameCryptors].forEach((element) async { 247 | await element.setKeyIndex(0); 248 | });*/ 249 | 250 | /* 251 | await keyProvider.setKey( 252 | index: 0, 253 | participantId: 'sender', 254 | key: Uint8List.fromList('testkey2'.codeUnits)); 255 | 256 | */ 257 | 258 | Timer.periodic(Duration(seconds: 1), (timer) async { 259 | var senders = await pc1.getSenders(); 260 | var receivers = await pc2.getReceivers(); 261 | 262 | print('senders: ${senders.length}'); 263 | print('receivers: ${receivers.length}'); 264 | 265 | senders.forEach((sender) { 266 | sender.getStats().then((stats) { 267 | print( 268 | 'sender stats: ${stats.map((e) => 'id: ${e.id}, type: ${e.type}, timestamp: ${e.timestamp}, values: ${e.values.toString()} ')}'); 269 | }); 270 | }); 271 | 272 | receivers.forEach((receiver) { 273 | receiver.getStats().then((stats) { 274 | print( 275 | 'receiver stats: ${stats.map((e) => 'id: ${e.id}, type: ${e.type}, timestamp: ${e.timestamp}, values: ${e.values.toString()} ')}'); 276 | }); 277 | }); 278 | }); 279 | } 280 | -------------------------------------------------------------------------------- /web/p2p/p2p.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:js/js.dart'; 3 | import 'package:test/test.dart'; 4 | import 'package:web/web.dart' as web; 5 | 6 | import 'signaling.dart'; 7 | 8 | void main() { 9 | test('String.split() splits the string on the delimiter', () { 10 | var string = 'foo,bar,baz'; 11 | expect(string.split(','), equals(['foo', 'bar', 'baz'])); 12 | }); 13 | 14 | test('String.trim() removes surrounding whitespace', () { 15 | var string = ' foo '; 16 | expect(string.trim(), equals('foo')); 17 | }); 18 | 19 | var signaling = Signaling('demo.cloudwebrtc.com'); 20 | 21 | var local = web.document.querySelector('#local'); 22 | 23 | var localVideo = RTCVideoElement(); 24 | 25 | local?.append(localVideo.htmlElement); 26 | 27 | var remote = web.document.querySelector('#remote'); 28 | 29 | var remoteVideo = RTCVideoElement(); 30 | 31 | remote?.append(remoteVideo.htmlElement); 32 | 33 | signaling.onLocalStream = allowInterop((MediaStream stream) { 34 | localVideo.srcObject = stream; 35 | }); 36 | 37 | signaling.onAddRemoteStream = allowInterop((MediaStream stream) { 38 | remoteVideo.srcObject = stream; 39 | }); 40 | 41 | signaling.connect(); 42 | signaling.onStateChange = (SignalingState state) { 43 | web.document.querySelector('#output')?.text = state.toString(); 44 | if (state == SignalingState.CallStateBye) { 45 | localVideo.srcObject = null; 46 | remoteVideo.srcObject = null; 47 | } 48 | }; 49 | } 50 | 51 | /* 52 | void loopBackTest() { 53 | var local = document.querySelector('#local'); 54 | var localVideo = VideoElement() 55 | ..autoplay = true 56 | ..muted = true 57 | ..controls = false 58 | ..style.objectFit = 'contain' // contain or cover 59 | ..style.border = 'none' 60 | ..id = 'dart-webrtc-video-01'; 61 | 62 | // Allows Safari iOS to play the video inline 63 | localVideo.setAttribute('playsinline', 'true'); 64 | local.append(localVideo); 65 | dartWebRTCTest(localVideo); 66 | } 67 | 68 | void dartWebRTCTest(VideoElement video) async { 69 | var list = await PromiseToFuture>( 70 | navigator.mediaDevices.enumerateDevices()); 71 | list.forEach((e) { 72 | if (e is MediaDeviceInfo) { 73 | print('MediaDeviceInfo: ${e.label}'); 74 | } else if (e is InputDeviceInfo) { 75 | print('InputDeviceInfo: ${e.label}'); 76 | } 77 | }); 78 | 79 | var pc = RTCPeerConnection(); 80 | print('connectionState: ${pc.connectionState}'); 81 | pc.onaddstream = allowInterop((MediaStreamEvent event) {}); 82 | var stream = await PromiseToFuture( 83 | navigator.mediaDevices.getDisplayMedia() 84 | /*.getUserMedia(MediaStreamConstraints(audio: true, video: true))*/); 85 | print('getDisplayMedia: stream.id => ${stream.id}'); 86 | stream.oninactive = allowInterop((Event event) { 87 | print('oninactive: stream.id => ${event.target.id}'); 88 | video.srcObject = null; 89 | video.remove(); 90 | }); 91 | pc.addStream(stream); 92 | var rtcVideo = ConvertToRTCVideoElement(video); 93 | rtcVideo.srcObject = stream; 94 | } 95 | */ 96 | -------------------------------------------------------------------------------- /web/p2p/random_string.dart: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2016, Damon Douglas. All rights reserved. Use of this source code 2 | // is governed by a BSD-style license that can be found in the LICENSE file. 3 | 4 | /// Simple library for generating random ascii strings. 5 | /// 6 | /// More dartdocs go here. 7 | /// 8 | /// 9 | /// A simple usage example: 10 | /// 11 | /// import 'package:random_string/random_string.dart' as random; 12 | /// main() { 13 | /// print(randomBetween(10,20)); // some integer between 10 and 20 14 | /// print(randomNumeric(4)); // sequence of 4 random numbers i.e. 3259 15 | /// print(randomString(10)); // random sequence of 10 characters i.e. e~f93(4l- 16 | /// print(randomAlpha(5)); // random sequence of 5 alpha characters i.e. aRztC 17 | /// print(randomAlphaNumeric(10)); // random sequence of 10 alpha numeric i.e. aRztC1y32B 18 | /// } 19 | 20 | library random_string; 21 | 22 | import 'dart:math'; 23 | 24 | const ASCII_START = 33; 25 | const ASCII_END = 126; 26 | const NUMERIC_START = 48; 27 | const NUMERIC_END = 57; 28 | const LOWER_ALPHA_START = 97; 29 | const LOWER_ALPHA_END = 122; 30 | const UPPER_ALPHA_START = 65; 31 | const UPPER_ALPHA_END = 90; 32 | 33 | /// Generates a random integer where [from] <= [to]. 34 | int randomBetween(int from, int to) { 35 | if (from > to) throw Exception('$from cannot be > $to'); 36 | var rand = Random(); 37 | return ((to - from) * rand.nextDouble()).toInt() + from; 38 | } 39 | 40 | /// Generates a random string of [length] with characters 41 | /// between ascii [from] to [to]. 42 | /// Defaults to characters of ascii '!' to '~'. 43 | String randomString(int length, {int from = ASCII_START, int to = ASCII_END}) { 44 | return String.fromCharCodes( 45 | List.generate(length, (index) => randomBetween(from, to))); 46 | } 47 | 48 | /// Generates a random string of [length] with only numeric characters. 49 | String randomNumeric(int length) => 50 | randomString(length, from: NUMERIC_START, to: NUMERIC_END); 51 | /* 52 | /// Generates a random string of [length] with only alpha characters. 53 | String randomAlpha(int length) { 54 | var lowerAlphaLength = randomBetween(0, length); 55 | var upperAlphaLength = length - lowerAlphaLength; 56 | var lowerAlpha = randomString(lowerAlphaLength, 57 | from: LOWER_ALPHA_START, to: LOWER_ALPHA_END); 58 | var upperAlpha = randomString(upperAlphaLength, 59 | from: UPPER_ALPHA_START, to: UPPER_ALPHA_END); 60 | return randomMerge(lowerAlpha, upperAlpha); 61 | } 62 | 63 | /// Generates a random string of [length] with alpha-numeric characters. 64 | String randomAlphaNumeric(int length) { 65 | var alphaLength = randomBetween(0, length); 66 | var numericLength = length - alphaLength; 67 | var alpha = randomAlpha(alphaLength); 68 | var numeric = randomNumeric(numericLength); 69 | return randomMerge(alpha, numeric); 70 | } 71 | 72 | /// Merge [a] with [b] and scramble characters. 73 | String randomMerge(String a, String b) { 74 | var mergedCodeUnits = new List.from("$a$b".codeUnits); 75 | mergedCodeUnits.shuffle(); 76 | return new String.fromCharCodes(mergedCodeUnits); 77 | }*/ 78 | -------------------------------------------------------------------------------- /web/p2p/signaling.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:convert'; 3 | 4 | import 'package:dart_webrtc/dart_webrtc.dart'; 5 | 6 | import 'random_string.dart'; 7 | import 'simple_websocket.dart'; 8 | 9 | enum SignalingState { 10 | CallStateNew, 11 | CallStateRinging, 12 | CallStateInvite, 13 | CallStateConnected, 14 | CallStateBye, 15 | ConnectionOpen, 16 | ConnectionClosed, 17 | ConnectionError, 18 | } 19 | 20 | /* 21 | * callbacks for Signaling API. 22 | */ 23 | typedef SignalingStateCallback = void Function(SignalingState state); 24 | typedef StreamStateCallback = void Function(MediaStream stream); 25 | typedef OtherEventCallback = void Function(dynamic event); 26 | typedef DataChannelMessageCallback = void Function( 27 | RTCDataChannel dc, RTCDataChannelMessage data); 28 | typedef DataChannelCallback = void Function(RTCDataChannel dc); 29 | 30 | class Signaling { 31 | Signaling(this._host); 32 | 33 | final JsonEncoder _encoder = JsonEncoder(); 34 | final String _selfId = randomNumeric(6); 35 | late SimpleWebSocket? _socket; 36 | var _sessionId; 37 | final _host; 38 | final _port = 8086; 39 | final _peerConnections = {}; 40 | final _dataChannels = {}; 41 | final _remoteCandidates = []; 42 | var _iceServers = >[]; 43 | var _turnCredential; 44 | 45 | MediaStream? _localStream; 46 | late List _remoteStreams; 47 | SignalingStateCallback? onStateChange; 48 | StreamStateCallback? onLocalStream; 49 | StreamStateCallback? onAddRemoteStream; 50 | StreamStateCallback? onRemoveRemoteStream; 51 | OtherEventCallback? onPeersUpdate; 52 | DataChannelMessageCallback? onDataChannelMessage; 53 | DataChannelCallback? onDataChannel; 54 | 55 | void close() { 56 | if (_localStream != null) { 57 | _localStream?.getTracks().forEach((element) { 58 | element.stop(); 59 | }); 60 | _localStream = null; 61 | } 62 | 63 | _peerConnections.forEach((key, pc) { 64 | pc.close(); 65 | }); 66 | _socket?.close(); 67 | } 68 | 69 | void switchCamera() { 70 | if (_localStream != null) { 71 | // TODO(cloudwebrtc): _localStream.getVideoTracks()[0].switchCamera(); 72 | } 73 | } 74 | 75 | void invite(String peer_id, String media, use_screen) { 76 | _sessionId = _selfId + '-' + peer_id; 77 | 78 | onStateChange?.call(SignalingState.CallStateNew); 79 | 80 | _createPeerConnection(peer_id, media, use_screen).then((pc) { 81 | _peerConnections[peer_id] = pc; 82 | if (media == 'data') { 83 | _createDataChannel(peer_id, pc); 84 | } 85 | _createOffer(peer_id, pc, media); 86 | }); 87 | } 88 | 89 | void bye() { 90 | _send('bye', { 91 | 'session_id': _sessionId, 92 | 'from': _selfId, 93 | }); 94 | } 95 | 96 | void onMessage(message) async { 97 | Map mapData = message; 98 | var data = mapData['data']; 99 | 100 | switch (mapData['type']) { 101 | case 'peers': 102 | List peers = data; 103 | var event = {}; 104 | event['self'] = _selfId; 105 | event['peers'] = peers; 106 | onPeersUpdate?.call(event); 107 | break; 108 | case 'offer': 109 | var id = data['from']; 110 | var description = data['description']; 111 | var media = data['media']; 112 | var sessionId = data['session_id']; 113 | _sessionId = sessionId; 114 | 115 | onStateChange?.call(SignalingState.CallStateNew); 116 | 117 | var pc = await _createPeerConnection(id, media, false); 118 | _peerConnections[id] = pc; 119 | await pc.setRemoteDescription( 120 | RTCSessionDescription(description['sdp'], description['type'])); 121 | await _createAnswer(id, pc, media); 122 | if (_remoteCandidates.isNotEmpty) { 123 | _remoteCandidates.forEach((candidate) async { 124 | await pc.addCandidate(candidate); 125 | }); 126 | _remoteCandidates.clear(); 127 | } 128 | 129 | break; 130 | case 'answer': 131 | var id = data['from']; 132 | var description = data['description']; 133 | 134 | var pc = _peerConnections[id]; 135 | if (pc != null) { 136 | await pc.setRemoteDescription( 137 | RTCSessionDescription(description['sdp'], description['type'])); 138 | } 139 | 140 | break; 141 | case 'candidate': 142 | var id = data['from']; 143 | var candidateMap = data['candidate']; 144 | var pc = _peerConnections[id]; 145 | var candidate = RTCIceCandidate(candidateMap['candidate'], 146 | candidateMap['sdpMid'], candidateMap['sdpMLineIndex']); 147 | if (pc != null) { 148 | await pc.addCandidate(candidate); 149 | } else { 150 | _remoteCandidates.add(candidate); 151 | } 152 | 153 | break; 154 | case 'leave': 155 | var id = data; 156 | var pc = _peerConnections.remove(id); 157 | _dataChannels.remove(id); 158 | 159 | if (_localStream != null) { 160 | _localStream!.getTracks().forEach((element) { 161 | element.stop(); 162 | }); 163 | _localStream = null; 164 | } 165 | 166 | if (pc != null) { 167 | await pc.close(); 168 | } 169 | _sessionId = null; 170 | onStateChange?.call(SignalingState.CallStateBye); 171 | 172 | break; 173 | case 'bye': 174 | var to = data['to']; 175 | var sessionId = data['session_id']; 176 | print('bye: ' + sessionId); 177 | 178 | if (_localStream != null) { 179 | _localStream!.getTracks().forEach((element) { 180 | element.stop(); 181 | }); 182 | _localStream = null; 183 | } 184 | 185 | var pc = _peerConnections[to]; 186 | if (pc != null) { 187 | await pc.close(); 188 | _peerConnections.remove(to); 189 | } 190 | 191 | var dc = _dataChannels[to]; 192 | if (dc != null) { 193 | await dc.close(); 194 | _dataChannels.remove(to); 195 | } 196 | 197 | _sessionId = null; 198 | onStateChange?.call(SignalingState.CallStateBye); 199 | 200 | break; 201 | case 'keepalive': 202 | print('keepalive response!'); 203 | 204 | break; 205 | default: 206 | break; 207 | } 208 | } 209 | 210 | Future connect() async { 211 | var url = 'https://$_host:$_port/ws'; 212 | _socket = SimpleWebSocket(url); 213 | 214 | print('connect to $url'); 215 | 216 | if (_turnCredential == null) { 217 | try { 218 | _turnCredential = await getTurnCredential(_host, _port); 219 | _iceServers = [ 220 | { 221 | 'urls': _turnCredential['uris'][0], 222 | 'username': _turnCredential['username'], 223 | 'credential': _turnCredential['password'] 224 | } 225 | ]; 226 | } catch (e) { 227 | print('error: ${e.toString()}'); 228 | } 229 | } 230 | 231 | _socket?.onOpen = () { 232 | print('onOpen'); 233 | onStateChange?.call(SignalingState.ConnectionOpen); 234 | _send('new', 235 | {'name': 'dart_webrtc', 'id': _selfId, 'user_agent': 'broswer'}); 236 | }; 237 | 238 | _socket?.onMessage = (message) { 239 | print('Received data: ' + message); 240 | var decoder = JsonDecoder(); 241 | onMessage.call(decoder.convert(message)); 242 | }; 243 | 244 | _socket?.onClose = (int code, String reason) { 245 | print('Closed by server [$code => $reason]!'); 246 | onStateChange?.call(SignalingState.ConnectionClosed); 247 | }; 248 | 249 | await _socket?.connect(); 250 | } 251 | 252 | Future createStream(media, user_screen) async { 253 | var stream = await user_screen 254 | ? await navigator.mediaDevices.getDisplayMedia({}) 255 | : await navigator.mediaDevices.getUserMedia({ 256 | 'audio': true, 257 | 'video': { 258 | 'mandatory': { 259 | 'minWidth': 260 | '640', // Provide your own width, height and frame rate here 261 | 'minHeight': '480', 262 | 'minFrameRate': '30', 263 | }, 264 | 'facingMode': 'user', 265 | 'optional': [], 266 | } 267 | }); 268 | 269 | onLocalStream?.call(stream); 270 | 271 | return stream; 272 | } 273 | 274 | Future _createPeerConnection( 275 | id, media, user_screen) async { 276 | if (media != 'data') _localStream = await createStream(media, user_screen); 277 | var pc = await createPeerConnection({ 278 | 'iceServers': _iceServers.isNotEmpty 279 | ? _iceServers 280 | : [ 281 | {'urls': 'stun:stun.l.google.com:19302'} 282 | ] 283 | }); 284 | if (media != 'data') await pc.addStream(_localStream!); 285 | 286 | pc.onIceCandidate = (RTCIceCandidate? candidate) { 287 | try { 288 | if (candidate != null) { 289 | print(candidate.candidate); 290 | _send('candidate', { 291 | 'to': id, 292 | 'from': _selfId, 293 | 'candidate': { 294 | 'sdpMLineIndex': candidate.sdpMLineIndex, 295 | 'sdpMid': candidate.sdpMid, 296 | 'candidate': candidate.candidate, 297 | }, 298 | 'session_id': _sessionId, 299 | }); 300 | } 301 | } catch (e) { 302 | print(e.toString()); 303 | } 304 | }; 305 | 306 | pc.onIceConnectionState = (state) { 307 | print(state); 308 | }; 309 | 310 | pc.onAddStream = (MediaStream stream) { 311 | onAddRemoteStream?.call(stream); 312 | }; 313 | 314 | pc.onRemoveStream = (MediaStream stream) { 315 | onRemoveRemoteStream?.call(stream); 316 | _remoteStreams.removeWhere((it) => it.id == stream.id); 317 | }; 318 | 319 | pc.onDataChannel = (RTCDataChannel channel) { 320 | _addDataChannel(id, channel); 321 | }; 322 | 323 | return pc; 324 | } 325 | 326 | void _addDataChannel(id, RTCDataChannel channel) { 327 | channel.onMessage = (RTCDataChannelMessage msg) { 328 | onDataChannelMessage?.call(channel, msg); 329 | }; 330 | _dataChannels[id] = channel; 331 | onDataChannel?.call(channel); 332 | } 333 | 334 | void _createDataChannel(id, RTCPeerConnection pc, 335 | {String label = 'fileTransfer'}) async { 336 | var dataChannelDict = RTCDataChannelInit(); 337 | var channel = await pc.createDataChannel(label, dataChannelDict); 338 | _addDataChannel(id, channel); 339 | } 340 | 341 | void _createOffer(String id, RTCPeerConnection pc, String media) async { 342 | try { 343 | var offer = await pc.createOffer({ 344 | 'offerToReceiveAudio': media == 'data' ? false : true, 345 | 'offerToReceiveVideo': media == 'data' ? false : true, 346 | }); 347 | //print('type => ${offer.type}, sdp => ${offer.sdp}'); 348 | await pc.setLocalDescription(offer); 349 | _send('offer', { 350 | 'to': id, 351 | 'from': _selfId, 352 | 'description': {'sdp': offer.sdp, 'type': offer.type}, 353 | 'session_id': _sessionId, 354 | 'media': media, 355 | }); 356 | } catch (e) { 357 | print(e.toString()); 358 | } 359 | } 360 | 361 | Future _createAnswer(String id, RTCPeerConnection pc, media) async { 362 | try { 363 | var answer = await pc.createAnswer(); 364 | await pc.setLocalDescription(answer); 365 | _send('answer', { 366 | 'to': id, 367 | 'from': _selfId, 368 | 'description': {'sdp': answer.sdp, 'type': answer.type}, 369 | 'session_id': _sessionId, 370 | }); 371 | } catch (e) { 372 | print(e.toString()); 373 | } 374 | } 375 | 376 | void _send(event, data) { 377 | var request = {}; 378 | request['type'] = event; 379 | request['data'] = data; 380 | _socket?.send(_encoder.convert(request)); 381 | } 382 | } 383 | -------------------------------------------------------------------------------- /web/p2p/simple_websocket.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | import 'package:http/http.dart' as http; 3 | import 'package:web/web.dart' as web; 4 | 5 | typedef OnMessageCallback = void Function(dynamic msg); 6 | typedef OnCloseCallback = void Function(int code, String reason); 7 | typedef OnOpenCallback = void Function(); 8 | 9 | class SimpleWebSocket { 10 | SimpleWebSocket(this._url) { 11 | _url = _url.replaceAll('https:', 'wss:'); 12 | } 13 | 14 | String _url; 15 | var _socket; 16 | OnOpenCallback? onOpen; 17 | OnMessageCallback? onMessage; 18 | OnCloseCallback? onClose; 19 | 20 | Future connect() async { 21 | try { 22 | _socket = web.WebSocket(_url); 23 | _socket.onOpen.listen((e) { 24 | onOpen?.call(); 25 | }); 26 | 27 | _socket.onMessage.listen((e) { 28 | onMessage?.call(e.data); 29 | }); 30 | 31 | _socket.onClose.listen((e) { 32 | onClose?.call(e.code, e.reason); 33 | }); 34 | } catch (e) { 35 | onClose?.call(500, e.toString()); 36 | } 37 | } 38 | 39 | void send(data) { 40 | if (_socket != null && _socket.readyState == web.WebSocket.OPEN) { 41 | _socket.send(data); 42 | print('send: $data'); 43 | } else { 44 | print('WebSocket not connected, message $data not sent'); 45 | } 46 | } 47 | 48 | void close() { 49 | if (_socket != null) _socket.close(); 50 | } 51 | } 52 | 53 | Future getTurnCredential(String host, int port) async { 54 | var url = 'https://$host:$port/api/turn?service=turn&username=flutter-webrtc'; 55 | final res = await http.get(Uri.parse(url)); 56 | if (res.statusCode == 200) { 57 | var data = json.decode(res.body); 58 | print('getTurnCredential:response => $data.'); 59 | return data; 60 | } 61 | return {}; 62 | } 63 | -------------------------------------------------------------------------------- /web/styles.css: -------------------------------------------------------------------------------- 1 | @import url(https://fonts.googleapis.com/css?family=Roboto); 2 | 3 | html, body { 4 | width: 100%; 5 | height: 100%; 6 | margin: 0; 7 | padding: 0; 8 | font-family: 'Roboto', sans-serif; 9 | } 10 | 11 | #output { 12 | padding: 20px; 13 | text-align: center; 14 | } 15 | -------------------------------------------------------------------------------- /web/test_media_devices.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | void closeMediaStream(MediaStream stream) { 5 | stream.getTracks().forEach((element) { 6 | element.stop(); 7 | }); 8 | } 9 | 10 | List testFunctions = [ 11 | () => test('MediaDevices.enumerateDevices()', () async { 12 | var list = await navigator.mediaDevices.enumerateDevices(); 13 | list.forEach((e) { 14 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 15 | }); 16 | }), 17 | () => test('MediaDevices.getUserMedia()', () async { 18 | var stream = await navigator.mediaDevices 19 | .getUserMedia({'audio': true, 'video': true}); 20 | print('getUserMedia: stream.id => ${stream.id}'); 21 | 22 | print( 23 | 'getUserMedia: audio track.id => ${stream.getAudioTracks()[0].id}'); 24 | expect(stream.getAudioTracks().isNotEmpty, true); 25 | print( 26 | 'getUserMedia: video track.id => ${stream.getVideoTracks()[0].id}'); 27 | expect(stream.getVideoTracks().isNotEmpty, true); 28 | 29 | closeMediaStream(stream); 30 | 31 | stream = await navigator.mediaDevices 32 | .getUserMedia({'audio': false, 'video': true}); 33 | 34 | expect(stream.getAudioTracks().isEmpty, true); 35 | expect(stream.getVideoTracks().isNotEmpty, true); 36 | 37 | closeMediaStream(stream); 38 | 39 | stream = await navigator.mediaDevices 40 | .getUserMedia({'audio': true, 'video': false}); 41 | 42 | expect(stream.getAudioTracks().isNotEmpty, true); 43 | expect(stream.getVideoTracks().isEmpty, true); 44 | 45 | closeMediaStream(stream); 46 | /* 47 | expect( 48 | await mediaDevices.getUserMedia( 49 | constraints: 50 | MediaStreamConstraints(audio: false, video: false)), 51 | throwsException);*/ 52 | }), 53 | () => test('MediaDevices.getDisplayMedia()', () async { 54 | /* 55 | var stream = await mediaDevices.getDisplayMedia( 56 | constraints: MediaStreamConstraints(audio: false, video: true)); 57 | print('getDisplayMedia: stream.id => ${stream.id}'); 58 | expect(stream != null, true); 59 | expect(stream.getAudioTracks().isEmpty, true); 60 | print( 61 | 'getDisplayMedia: video track.id => ${stream.getVideoTracks()[0].id}'); 62 | expect(stream.getVideoTracks().isNotEmpty, true); 63 | 64 | closeMediaStream(stream); 65 | */ 66 | }) 67 | ]; 68 | -------------------------------------------------------------------------------- /web/test_media_stream.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | MediaStream? mediaStream; 5 | 6 | List testFunctions = [ 7 | () => test('mediaStream?.constructor()', () async { 8 | mediaStream = await navigator.mediaDevices 9 | .getUserMedia({'audio': true, 'video': true}); 10 | 11 | expect(mediaStream?.id != null, true); 12 | }), 13 | () => test('mediaStream?.active()', () { 14 | expect(mediaStream?.active, true); 15 | }), 16 | () => test('mediaStream?.getTracks()', () { 17 | expect(mediaStream?.getTracks().length, 2); 18 | }), 19 | () => test('mediaStream?.getAudioTracks()', () { 20 | expect(mediaStream?.getAudioTracks().length, 1); 21 | var track = mediaStream 22 | ?.getTrackById(mediaStream?.getAudioTracks()[0].id ?? ''); 23 | expect(track?.id, mediaStream?.getAudioTracks()[0].id); 24 | }), 25 | () => test('mediaStream?.getVideoTracks()', () { 26 | expect(mediaStream?.getVideoTracks().length, 1); 27 | var track = mediaStream 28 | ?.getTrackById(mediaStream?.getVideoTracks()[0].id ?? ''); 29 | expect(track!.id, mediaStream?.getVideoTracks()[0].id); 30 | }), 31 | () => test('mediaStream?.removeTrack()', () { 32 | var track = mediaStream 33 | ?.getTrackById(mediaStream?.getVideoTracks()[0].id ?? ''); 34 | mediaStream?.removeTrack(track!); 35 | expect(mediaStream?.getVideoTracks().length, 0); 36 | }), 37 | () => test('mediaStream?.close()', () { 38 | mediaStream?.getTracks().forEach((element) { 39 | element.stop(); 40 | mediaStream?.removeTrack(element); 41 | }); 42 | expect(mediaStream?.getTracks().isEmpty, true); 43 | }) 44 | ]; 45 | -------------------------------------------------------------------------------- /web/test_media_stream_track.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | MediaStreamTrack? audioTrack, videoTrack; 5 | 6 | List testFunctions = [ 7 | () => test('MediaStreamTrack.constructor()', () async { 8 | var stream = await navigator.mediaDevices 9 | .getUserMedia({'audio': true, 'video': true}); 10 | 11 | audioTrack = stream.getAudioTracks()[0]; 12 | expect(audioTrack != null, true); 13 | 14 | expect(audioTrack?.kind, 'audio'); 15 | 16 | videoTrack = stream.getVideoTracks()[0]; 17 | expect(videoTrack != null, true); 18 | 19 | expect(videoTrack?.kind, 'video'); 20 | }), 21 | () => test('MediaStreamTrack.id()', () { 22 | expect(audioTrack?.id is String, true); 23 | expect(audioTrack?.id?.isNotEmpty, true); 24 | expect(videoTrack?.id is String, true); 25 | expect(videoTrack?.id?.isNotEmpty, true); 26 | }), 27 | () => test('MediaStreamTrack.label()', () { 28 | expect(audioTrack?.label is String, true); 29 | expect(audioTrack?.id?.isNotEmpty, true); 30 | expect(videoTrack?.id is String, true); 31 | expect(videoTrack?.id?.isNotEmpty, true); 32 | }), 33 | () => test('MediaStreamTrack.enabled()', () { 34 | expect(audioTrack?.enabled, true); 35 | audioTrack?.enabled = false; 36 | expect(audioTrack?.enabled, false); 37 | 38 | expect(videoTrack?.enabled, true); 39 | videoTrack?.enabled = false; 40 | expect(videoTrack?.enabled, false); 41 | }), 42 | () => test('MediaStreamTrack.readyState() | MediaStreamTrack.stop()', () { 43 | /* 44 | expect(audioTrack?.readyState, 'live'); 45 | audioTrack?.stop(); 46 | expect(audioTrack?.readyState, 'ended'); 47 | 48 | expect(videoTrack?.readyState, 'live'); 49 | videoTrack?.stop(); 50 | expect(videoTrack?.readyState, 'ended'); 51 | */ 52 | }) 53 | ]; 54 | -------------------------------------------------------------------------------- /web/test_peerconnection.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | late RTCPeerConnection pc1; 5 | late RTCPeerConnection pc2; 6 | 7 | late RTCSessionDescription offer; 8 | late RTCSessionDescription answer; 9 | 10 | void addStateCallbacks(RTCPeerConnection pc, String title) { 11 | pc.onConnectionState = (RTCPeerConnectionState state) { 12 | print('$title: onconnectionstatechange => ${state.toString()}'); 13 | }; 14 | pc.onIceConnectionState = (RTCIceConnectionState state) { 15 | print('$title: oniceconnectionstatechange => ${state.toString()}'); 16 | }; 17 | pc.onIceGatheringState = (RTCIceGatheringState state) { 18 | print('$title: onicegatheringstatechange => ${state.toString()}'); 19 | }; 20 | pc.onSignalingState = (RTCSignalingState state) { 21 | print('$title: onsignalingstatechange => ${state.toString()}'); 22 | }; 23 | 24 | pc.onAddStream = (MediaStream stream) { 25 | print('$title: onaddstream => ${stream.id}'); 26 | }; 27 | 28 | pc.onTrack = (RTCTrackEvent event) async { 29 | print( 30 | '$title: ontrack => ${event.track.id}, \nkind => ${event.track.kind}\nstream.length => ${event.streams.length}'); 31 | var params = event.receiver!.parameters; 32 | print('reducedSize => ${params.rtcp!.reducedSize}'); 33 | }; 34 | } 35 | 36 | List testFunctions = [ 37 | () => test('RTCPeerConnection.constructor()', () async { 38 | pc1 = await createPeerConnection({'iceServers': []}); 39 | 40 | expect(pc1.connectionState, 41 | RTCPeerConnectionState.RTCPeerConnectionStateNew); 42 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateStable); 43 | 44 | pc2 = await createPeerConnection({'iceServers': []}); 45 | 46 | expect(pc2.connectionState, 47 | RTCPeerConnectionState.RTCPeerConnectionStateNew); 48 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateStable); 49 | 50 | addStateCallbacks(pc1, 'pc1'); 51 | addStateCallbacks(pc2, 'pc2'); 52 | 53 | pc1.onIceCandidate = (RTCIceCandidate? candidate) async { 54 | if (candidate == null) { 55 | print('pc1: end-of-candidate'); 56 | return; 57 | } 58 | print('pc1: onicecaniddate => ${candidate.candidate}'); 59 | await pc2.addCandidate(candidate); 60 | }; 61 | 62 | pc2.onIceCandidate = (RTCIceCandidate? candidate) async { 63 | if (candidate == null) { 64 | print('pc2: end-of-candidate'); 65 | return; 66 | } 67 | print('pc2: onicecaniddate => ${candidate.candidate}'); 68 | await pc1.addCandidate(candidate); 69 | }; 70 | }), 71 | () => test('RTCPeerConnection.addTransceiver()', () async { 72 | await pc1.addTransceiver( 73 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 74 | init: RTCRtpTransceiverInit( 75 | direction: TransceiverDirection.SendOnly)); 76 | await pc1.addTransceiver( 77 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 78 | init: RTCRtpTransceiverInit( 79 | direction: TransceiverDirection.SendOnly)); 80 | 81 | await pc2.addTransceiver( 82 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 83 | init: RTCRtpTransceiverInit( 84 | direction: TransceiverDirection.RecvOnly)); 85 | await pc2.addTransceiver( 86 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 87 | init: RTCRtpTransceiverInit( 88 | direction: TransceiverDirection.RecvOnly)); 89 | }), 90 | () => test('RTCPeerConnection.createOffer()', () async { 91 | offer = await pc1.createOffer(); 92 | print('pc1 offer => ${offer.sdp}'); 93 | await pc1.setLocalDescription(offer); 94 | expect(pc1.signalingState, 95 | RTCSignalingState.RTCSignalingStateHaveLocalOffer); 96 | 97 | await pc2.setRemoteDescription(offer); 98 | expect(pc2.signalingState, 99 | RTCSignalingState.RTCSignalingStateHaveRemoteOffer); 100 | }), 101 | () => test('RTCPeerConnection.createAnswer()', () async { 102 | answer = await pc2.createAnswer({}); 103 | await pc2.setLocalDescription(answer); 104 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateStable); 105 | print('pc2 answer => ${answer.sdp}'); 106 | await pc1.setRemoteDescription(answer); 107 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateStable); 108 | }), 109 | () => test('RTCPeerConnection.localDescription()', () async { 110 | var localDescription1 = await pc1.getLocalDescription(); 111 | expect(localDescription1!.type, 'offer'); 112 | expect(localDescription1.sdp!.isNotEmpty, true); 113 | var localDescription2 = await pc2.getLocalDescription(); 114 | expect(localDescription2!.type, 'answer'); 115 | expect(localDescription2.sdp!.isNotEmpty, true); 116 | }), 117 | () => test('RTCPeerConnection.remoteDescription()', () async { 118 | var localDescription1 = await pc1.getLocalDescription(); 119 | expect(localDescription1!.type, 'answer'); 120 | expect(localDescription1.sdp!.isNotEmpty, true); 121 | var localDescription2 = await pc2.getLocalDescription(); 122 | expect(localDescription2!.type, 'offer'); 123 | expect(localDescription2.sdp!.isNotEmpty, true); 124 | }), 125 | () => test('RTCPeerConnection.close()', () async { 126 | await Future.delayed(Duration(seconds: 5), () { 127 | pc1.close(); 128 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateClosed); 129 | pc2.close(); 130 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateClosed); 131 | }); 132 | }) 133 | ]; 134 | -------------------------------------------------------------------------------- /web/test_template.dart: -------------------------------------------------------------------------------- 1 | import 'package:test/test.dart'; 2 | 3 | List testFunctions = [ 4 | () => test('ClassName.constructor()', () {}), 5 | () => test('ClassName.method1()', () {}), 6 | () => test('ClassName.method2()', () {}), 7 | () => test('ClassName.method3()', () {}) 8 | ]; 9 | -------------------------------------------------------------------------------- /web/test_video_element.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | late RTCVideoElement? localVideo; 5 | 6 | List testFunctions = [ 7 | () => test('RTCVideoElement.constructor()', () { 8 | localVideo = RTCVideoElement(); 9 | expect(localVideo != null, true); 10 | }), 11 | () => test('RTCVideoElement.muted()', () { 12 | localVideo?.muted = true; 13 | expect(localVideo?.muted, true); 14 | localVideo?.muted = false; 15 | expect(localVideo?.muted, false); 16 | }), 17 | () => test('RTCVideoElement.controls()', () { 18 | localVideo?.controls = false; 19 | expect(localVideo?.controls, false); 20 | localVideo?.controls = true; 21 | expect(localVideo?.controls, true); 22 | }), 23 | () => test('RTCVideoElement.autoplay()', () { 24 | localVideo?.autoplay = false; 25 | expect(localVideo?.autoplay, false); 26 | localVideo?.autoplay = true; 27 | expect(localVideo?.autoplay, true); 28 | }) 29 | ]; 30 | --------------------------------------------------------------------------------