├── .github └── workflows │ ├── build.yml │ └── publish.yaml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── analysis_options.yaml ├── lib ├── src │ ├── enums.dart │ ├── factory.dart │ ├── frame_cryptor.dart │ ├── media_recorder.dart │ ├── media_stream.dart │ ├── media_stream_track.dart │ ├── mediadevices.dart │ ├── navigator.dart │ ├── rtc_configuration.dart │ ├── rtc_data_channel.dart │ ├── rtc_dtmf_sender.dart │ ├── rtc_ice_candidate.dart │ ├── rtc_peerconnection.dart │ ├── rtc_rtcp_parameters.dart │ ├── rtc_rtp_capabilities.dart │ ├── rtc_rtp_parameters.dart │ ├── rtc_rtp_receiver.dart │ ├── rtc_rtp_sender.dart │ ├── rtc_rtp_transceiver.dart │ ├── rtc_session_description.dart │ ├── rtc_stats_report.dart │ ├── rtc_track_event.dart │ └── rtc_video_renderer.dart └── webrtc_interface.dart └── pubspec.yaml /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | test: 11 | name: Test on ${{ matrix.os }} 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - uses: actions/setup-java@v1 20 | with: 21 | java-version: '12.x' 22 | - uses: subosito/flutter-action@v1 23 | with: 24 | flutter-version: '2.2.3' 25 | channel: 'stable' 26 | - run: dart pub get 27 | - run: dart format lib/ test/ --set-exit-if-changed 28 | - run: dart pub run import_sorter:main --no-comments --exit-if-changed 29 | - run: dart analyze 30 | -------------------------------------------------------------------------------- /.github/workflows/publish.yaml: -------------------------------------------------------------------------------- 1 | name: Publish to pub.dev 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v[0-9]+.[0-9]+.[0-9]+*' 7 | 8 | jobs: 9 | publish: 10 | permissions: 11 | id-token: write # Required for authentication using OIDC 12 | uses: dart-lang/setup-dart/.github/workflows/publish.yml@v1 13 | # with: 14 | # working-directory: path/to/package/within/repository 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://www.dartlang.org/guides/libraries/private-files 2 | 3 | # Files and directories created by pub 4 | .dart_tool/ 5 | .packages 6 | build/ 7 | # If you're building an application, you may want to check-in your pubspec.lock 8 | pubspec.lock 9 | 10 | # Directory created by dartdoc 11 | # If you don't generate documentation locally you can remove this line. 12 | doc/api/ 13 | 14 | # Avoid committing generated Javascript files: 15 | *.dart.js 16 | *.info.json # Produced by the --dump-info flag. 17 | *.js # When generated by dart2js. Don't specify *.js if your 18 | # project includes source files written in JavaScript. 19 | *.js_ 20 | *.js.deps 21 | *.js.map 22 | 23 | .DS_Store -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | -------------------------------------------- 4 | [1.2.3] - 2025-04-29 5 | 6 | * Media recording changes (#31). 7 | 8 | [1.2.2+hotfix.1] - 2025-03-24 9 | 10 | * rename getBufferAmount to getBufferedAmount. 11 | 12 | [1.2.2] - 2025-03-24 13 | 14 | * add getBufferedAmount for RTCDataChannel. 15 | 16 | [1.2.1+hotfix.1] - 2025-02-23 17 | 18 | * fix: make videoValue non nullable (#30) 19 | * fix: make optional cname in RTCRTCPParameters nullable (#27) 20 | 21 | [1.2.0] - 2024-04-08 22 | 23 | * feat: add keyRingSize/discardFrameWhenCryptorNotReady to KeyProviderOptions. 24 | 25 | [1.1.2] - 2023-09-14 26 | 27 | * Add more frame cryptor api. 28 | 29 | [1.1.1] - 2023-08-14 30 | 31 | * Add more async methods. 32 | 33 | [1.1.0] - 2023-06-29 34 | 35 | * Add FrameCryptor interface. 36 | 37 | [1.0.13] - 2023-04-14 38 | 39 | * Add RTCDegradationPreference to RTCRtpParameters. 40 | 41 | [1.0.12] - 2023-04-10 42 | 43 | * Add addStreams to RTCRtpSender. 44 | 45 | [1.0.11] - 2023-01-30 46 | 47 | * Add RTCRtpCapabilities interface. 48 | 49 | [1.0.10] - 2022-11-12 50 | 51 | * Change MediaStream.clone to async. 52 | 53 | [1.0.9] - 2022-11-02 54 | 55 | * Update MediaRecorder interface. 56 | 57 | [1.0.8] - 2022-09-06 58 | 59 | * Added callback onFirstFrameRendered. 60 | 61 | [1.0.7] - 2022-08-04 62 | 63 | * Add stub for selectAudioOutput. 64 | 65 | [1.0.6] - 2022-08-04 66 | 67 | * Add selectAudioOutput method to MediaDevices. 68 | * Add ondevicechange property to MediaDevices. 69 | 70 | [1.0.5] - 2022-05-31 71 | 72 | * Added Function(int currentAmount, int changedAmount)? onBufferedAmountChange callback (bufferedAmount should be set to non nullable after bufferedAmount implementation on all platforms). 73 | * Added Function(int currentAmount)? onBufferedAmountLow callback and bufferedAmountLowThreshold variable. 74 | 75 | [1.0.4] - 2022-05-08 76 | 77 | * Change to nullable track for replaceTrack/setTrack. 78 | 79 | [1.0.3] - 2022-03-31 80 | 81 | * Added RTCDataChannel.id 82 | 83 | [1.0.2] - 2022-02-07 84 | 85 | * chore: Add restartIce. 86 | * fix: Fix case for RTCIceCandidate.sdpMLineIndex. 87 | 88 | [1.0.1] - 2021-11-25 89 | 90 | * Added comment for VideoRenderer.onResize. 91 | * Remove unnecessary function alias declarations. 92 | 93 | [1.0.0] - 2021-11-19 94 | 95 | * initial version. 96 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Flutter WebRTC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # webrtc-interface 2 | 3 | WebRTC interface for Dart-Web/Futter. 4 | -------------------------------------------------------------------------------- /analysis_options.yaml: -------------------------------------------------------------------------------- 1 | include: package:pedantic/analysis_options.yaml 2 | 3 | linter: 4 | rules: 5 | - always_declare_return_types 6 | - avoid_empty_else 7 | - await_only_futures 8 | - avoid_returning_null_for_void 9 | - camel_case_extensions 10 | - camel_case_types 11 | - cancel_subscriptions 12 | - directives_ordering 13 | - flutter_style_todos 14 | - sort_constructors_first 15 | - sort_unnamed_constructors_first 16 | - sort_pub_dependencies 17 | - type_init_formals 18 | - unnecessary_brace_in_string_interps 19 | - unnecessary_const 20 | - unnecessary_new 21 | - unnecessary_getters_setters 22 | - unnecessary_null_aware_assignments 23 | - unnecessary_null_in_if_null_operators 24 | - unnecessary_overrides 25 | - unnecessary_parenthesis 26 | - unnecessary_statements 27 | - unnecessary_string_interpolations 28 | - unnecessary_this 29 | - unrelated_type_equality_checks 30 | - use_rethrow_when_possible 31 | - valid_regexps 32 | - void_checks 33 | 34 | analyzer: 35 | errors: 36 | # treat missing required parameters as a warning (not a hint) 37 | missing_required_param: warning 38 | # treat missing returns as a warning (not a hint) 39 | missing_return: warning 40 | # allow having TODOs in the code 41 | todo: ignore 42 | # allow self-reference to deprecated members (we do this because otherwise we have 43 | # to annotate every member in every test, assert, etc, when we deprecate something) 44 | deprecated_member_use_from_same_package: ignore 45 | # Ignore analyzer hints for updating pubspecs when using Future or 46 | # Stream and not importing dart:async 47 | # Please see https://github.com/flutter/flutter/pull/24528 for details. 48 | sdk_version_async_exported_from_core: ignore 49 | -------------------------------------------------------------------------------- /lib/src/enums.dart: -------------------------------------------------------------------------------- 1 | enum RecorderAudioChannel { INPUT, OUTPUT } 2 | 3 | /// RTCDataChannelMessage type 4 | enum MessageType { text, binary } 5 | 6 | enum RTCDataChannelState { 7 | RTCDataChannelConnecting, 8 | RTCDataChannelOpen, 9 | RTCDataChannelClosing, 10 | RTCDataChannelClosed, 11 | } 12 | 13 | enum RTCSignalingState { 14 | RTCSignalingStateStable, 15 | RTCSignalingStateHaveLocalOffer, 16 | RTCSignalingStateHaveRemoteOffer, 17 | RTCSignalingStateHaveLocalPrAnswer, 18 | RTCSignalingStateHaveRemotePrAnswer, 19 | RTCSignalingStateClosed 20 | } 21 | 22 | enum RTCIceGatheringState { 23 | RTCIceGatheringStateNew, 24 | RTCIceGatheringStateGathering, 25 | RTCIceGatheringStateComplete 26 | } 27 | 28 | enum RTCPeerConnectionState { 29 | RTCPeerConnectionStateClosed, 30 | RTCPeerConnectionStateFailed, 31 | RTCPeerConnectionStateDisconnected, 32 | RTCPeerConnectionStateNew, 33 | RTCPeerConnectionStateConnecting, 34 | RTCPeerConnectionStateConnected 35 | } 36 | 37 | enum RTCIceConnectionState { 38 | RTCIceConnectionStateNew, 39 | RTCIceConnectionStateChecking, 40 | RTCIceConnectionStateCompleted, 41 | RTCIceConnectionStateConnected, 42 | RTCIceConnectionStateCount, 43 | RTCIceConnectionStateFailed, 44 | RTCIceConnectionStateDisconnected, 45 | RTCIceConnectionStateClosed, 46 | } 47 | 48 | enum RTCVideoViewObjectFit { 49 | RTCVideoViewObjectFitContain, 50 | RTCVideoViewObjectFitCover, 51 | } 52 | 53 | enum RTCRtpMediaType { 54 | RTCRtpMediaTypeAudio, 55 | RTCRtpMediaTypeVideo, 56 | RTCRtpMediaTypeData, 57 | } 58 | 59 | final typeRTCRtpMediaTypetoString = { 60 | RTCRtpMediaType.RTCRtpMediaTypeAudio: 'audio', 61 | RTCRtpMediaType.RTCRtpMediaTypeVideo: 'video', 62 | RTCRtpMediaType.RTCRtpMediaTypeData: 'data', 63 | }; 64 | 65 | final typeStringToRTCRtpMediaType = { 66 | 'audio': RTCRtpMediaType.RTCRtpMediaTypeAudio, 67 | 'video': RTCRtpMediaType.RTCRtpMediaTypeVideo, 68 | 'data': RTCRtpMediaType.RTCRtpMediaTypeData, 69 | }; 70 | 71 | enum TransceiverDirection { 72 | SendRecv, 73 | SendOnly, 74 | RecvOnly, 75 | Inactive, 76 | Stopped, 77 | } 78 | 79 | final typeStringToRtpTransceiverDirection = { 80 | 'sendrecv': TransceiverDirection.SendRecv, 81 | 'sendonly': TransceiverDirection.SendOnly, 82 | 'recvonly': TransceiverDirection.RecvOnly, 83 | 'inactive': TransceiverDirection.Inactive, 84 | 'stopped': TransceiverDirection.Stopped, 85 | }; 86 | 87 | final typeRtpTransceiverDirectionToString = { 88 | TransceiverDirection.SendRecv: 'sendrecv', 89 | TransceiverDirection.SendOnly: 'sendonly', 90 | TransceiverDirection.RecvOnly: 'recvonly', 91 | TransceiverDirection.Inactive: 'inactive', 92 | TransceiverDirection.Stopped: 'stopped,' 93 | }; 94 | 95 | RTCIceConnectionState iceConnectionStateForString(String? state) { 96 | switch (state) { 97 | case 'new': 98 | return RTCIceConnectionState.RTCIceConnectionStateNew; 99 | case 'checking': 100 | return RTCIceConnectionState.RTCIceConnectionStateChecking; 101 | case 'connected': 102 | return RTCIceConnectionState.RTCIceConnectionStateConnected; 103 | case 'completed': 104 | return RTCIceConnectionState.RTCIceConnectionStateCompleted; 105 | case 'failed': 106 | return RTCIceConnectionState.RTCIceConnectionStateFailed; 107 | case 'disconnected': 108 | return RTCIceConnectionState.RTCIceConnectionStateDisconnected; 109 | case 'closed': 110 | return RTCIceConnectionState.RTCIceConnectionStateClosed; 111 | case 'count': 112 | return RTCIceConnectionState.RTCIceConnectionStateCount; 113 | } 114 | return RTCIceConnectionState.RTCIceConnectionStateClosed; 115 | } 116 | 117 | RTCIceGatheringState iceGatheringStateforString(String? state) { 118 | switch (state) { 119 | case 'new': 120 | return RTCIceGatheringState.RTCIceGatheringStateNew; 121 | case 'gathering': 122 | return RTCIceGatheringState.RTCIceGatheringStateGathering; 123 | case 'complete': 124 | return RTCIceGatheringState.RTCIceGatheringStateComplete; 125 | } 126 | return RTCIceGatheringState.RTCIceGatheringStateNew; 127 | } 128 | 129 | RTCSignalingState signalingStateForString(String? state) { 130 | switch (state) { 131 | case 'stable': 132 | return RTCSignalingState.RTCSignalingStateStable; 133 | case 'have-local-offer': 134 | return RTCSignalingState.RTCSignalingStateHaveLocalOffer; 135 | case 'have-local-pranswer': 136 | return RTCSignalingState.RTCSignalingStateHaveLocalPrAnswer; 137 | case 'have-remote-offer': 138 | return RTCSignalingState.RTCSignalingStateHaveRemoteOffer; 139 | case 'have-remote-pranswer': 140 | return RTCSignalingState.RTCSignalingStateHaveRemotePrAnswer; 141 | case 'closed': 142 | return RTCSignalingState.RTCSignalingStateClosed; 143 | } 144 | return RTCSignalingState.RTCSignalingStateClosed; 145 | } 146 | 147 | RTCDataChannelState rtcDataChannelStateForString(String state) { 148 | switch (state) { 149 | case 'connecting': 150 | return RTCDataChannelState.RTCDataChannelConnecting; 151 | case 'open': 152 | return RTCDataChannelState.RTCDataChannelOpen; 153 | case 'closing': 154 | return RTCDataChannelState.RTCDataChannelClosing; 155 | case 'closed': 156 | return RTCDataChannelState.RTCDataChannelClosed; 157 | } 158 | return RTCDataChannelState.RTCDataChannelClosed; 159 | } 160 | 161 | RTCPeerConnectionState peerConnectionStateForString(String? state) { 162 | switch (state) { 163 | case 'new': 164 | return RTCPeerConnectionState.RTCPeerConnectionStateNew; 165 | case 'connecting': 166 | return RTCPeerConnectionState.RTCPeerConnectionStateConnecting; 167 | case 'connected': 168 | return RTCPeerConnectionState.RTCPeerConnectionStateConnected; 169 | case 'closed': 170 | return RTCPeerConnectionState.RTCPeerConnectionStateClosed; 171 | case 'disconnected': 172 | return RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; 173 | case 'failed': 174 | return RTCPeerConnectionState.RTCPeerConnectionStateFailed; 175 | } 176 | 177 | return RTCPeerConnectionState.RTCPeerConnectionStateClosed; 178 | } 179 | 180 | enum RTCDegradationPreference { 181 | DISABLED, 182 | MAINTAIN_FRAMERATE, 183 | MAINTAIN_RESOLUTION, 184 | BALANCED, 185 | } 186 | 187 | final typeRTCDegradationPreferenceString = { 188 | RTCDegradationPreference.DISABLED: 'disabled', 189 | RTCDegradationPreference.MAINTAIN_FRAMERATE: 'maintain-framerate', 190 | RTCDegradationPreference.MAINTAIN_RESOLUTION: 'maintain-resolution', 191 | RTCDegradationPreference.BALANCED: 'balanced', 192 | }; 193 | 194 | RTCDegradationPreference degradationPreferenceforString(String? degradation) { 195 | switch (degradation) { 196 | case 'disabled': 197 | return RTCDegradationPreference.DISABLED; 198 | case 'maintain-framerate': 199 | return RTCDegradationPreference.MAINTAIN_FRAMERATE; 200 | case 'maintain-resolution': 201 | return RTCDegradationPreference.MAINTAIN_RESOLUTION; 202 | case 'balanced': 203 | return RTCDegradationPreference.BALANCED; 204 | } 205 | return RTCDegradationPreference.BALANCED; 206 | } 207 | -------------------------------------------------------------------------------- /lib/src/factory.dart: -------------------------------------------------------------------------------- 1 | import 'frame_cryptor.dart'; 2 | import 'media_recorder.dart'; 3 | import 'media_stream.dart'; 4 | import 'navigator.dart'; 5 | import 'rtc_peerconnection.dart'; 6 | import 'rtc_rtp_capabilities.dart'; 7 | import 'rtc_video_renderer.dart'; 8 | 9 | abstract class RTCFactory { 10 | Future createPeerConnection( 11 | Map configuration, 12 | [Map constraints]); 13 | 14 | Future createLocalMediaStream(String label); 15 | 16 | Future getRtpSenderCapabilities(String kind); 17 | 18 | Future getRtpReceiverCapabilities(String kind); 19 | 20 | MediaRecorder mediaRecorder(); 21 | 22 | VideoRenderer videoRenderer(); 23 | 24 | Navigator get navigator; 25 | 26 | FrameCryptorFactory get frameCryptorFactory; 27 | } 28 | -------------------------------------------------------------------------------- /lib/src/frame_cryptor.dart: -------------------------------------------------------------------------------- 1 | import 'dart:typed_data'; 2 | 3 | import 'rtc_rtp_receiver.dart'; 4 | import 'rtc_rtp_sender.dart'; 5 | 6 | /// Built-in Algorithm. 7 | enum Algorithm { 8 | kAesGcm, 9 | kAesCbc, 10 | } 11 | 12 | class KeyProviderOptions { 13 | KeyProviderOptions({ 14 | required this.sharedKey, 15 | required this.ratchetSalt, 16 | required this.ratchetWindowSize, 17 | this.uncryptedMagicBytes, 18 | this.failureTolerance = -1, 19 | this.keyRingSize = 16, 20 | this.discardFrameWhenCryptorNotReady = false, 21 | }); 22 | bool sharedKey; 23 | Uint8List ratchetSalt; 24 | Uint8List? uncryptedMagicBytes; 25 | int ratchetWindowSize; 26 | int failureTolerance; 27 | 28 | /// key ring size should be between 1 and 255 29 | /// default is 16 30 | int keyRingSize; 31 | bool discardFrameWhenCryptorNotReady; 32 | Map toJson() { 33 | return { 34 | 'sharedKey': sharedKey, 35 | 'ratchetSalt': ratchetSalt, 36 | if (uncryptedMagicBytes != null) 37 | 'uncryptedMagicBytes': uncryptedMagicBytes, 38 | 'ratchetWindowSize': ratchetWindowSize, 39 | 'failureTolerance': failureTolerance, 40 | 'keyRingSize': keyRingSize, 41 | 'discardFrameWhenCryptorNotReady': discardFrameWhenCryptorNotReady, 42 | }; 43 | } 44 | } 45 | 46 | /// Shared secret key for frame encryption. 47 | abstract class KeyProvider { 48 | /// The unique identifier of the key provider. 49 | String get id; 50 | 51 | Future setSharedKey({required Uint8List key, int index = 0}) => 52 | throw UnimplementedError(); 53 | 54 | Future ratchetSharedKey({int index = 0}) => 55 | throw UnimplementedError(); 56 | 57 | Future exportSharedKey({int index = 0}) => 58 | throw UnimplementedError(); 59 | 60 | /// Set the raw key at the given index. 61 | Future setKey({ 62 | required String participantId, 63 | required int index, 64 | required Uint8List key, 65 | }); 66 | 67 | /// ratchet the key at the given index. 68 | Future ratchetKey({ 69 | required String participantId, 70 | required int index, 71 | }); 72 | 73 | /// Export the key at the given index. 74 | Future exportKey({ 75 | required String participantId, 76 | required int index, 77 | }); 78 | 79 | Future setSifTrailer({required Uint8List trailer}) => 80 | throw UnimplementedError(); 81 | 82 | /// Dispose the key manager. 83 | Future dispose(); 84 | } 85 | 86 | enum FrameCryptorState { 87 | FrameCryptorStateNew, 88 | FrameCryptorStateOk, 89 | FrameCryptorStateEncryptionFailed, 90 | FrameCryptorStateDecryptionFailed, 91 | FrameCryptorStateMissingKey, 92 | FrameCryptorStateKeyRatcheted, 93 | FrameCryptorStateInternalError, 94 | } 95 | 96 | /// Frame encryption/decryption. 97 | /// 98 | abstract class FrameCryptor { 99 | FrameCryptor(); 100 | 101 | Function(String participantId, FrameCryptorState state)? 102 | onFrameCryptorStateChanged; 103 | 104 | /// The unique identifier of the frame cryptor. 105 | String get participantId; 106 | 107 | /// Enable/Disable frame crypto for the sender or receiver. 108 | Future setEnabled(bool enabled); 109 | 110 | /// Get the enabled state for the sender or receiver. 111 | Future get enabled; 112 | 113 | /// Set the key index for the sender or receiver. 114 | /// If the key index is not set, the key index will be set to 0. 115 | Future setKeyIndex(int index); 116 | 117 | /// Get the key index for the sender or receiver. 118 | Future get keyIndex; 119 | 120 | Future updateCodec(String codec); 121 | 122 | /// Dispose the frame cryptor. 123 | Future dispose(); 124 | } 125 | 126 | /// Factory for creating frame Cryptors. 127 | /// For End 2 End Encryption, you need to create a [KeyProvider] for each peer. 128 | /// And set your key in keyProvider. 129 | abstract class FrameCryptorFactory { 130 | /// Shared key manager. 131 | Future createDefaultKeyProvider(KeyProviderOptions options); 132 | 133 | /// Create a frame Cryptor from a [RTCRtpSender]. 134 | Future createFrameCryptorForRtpSender({ 135 | required String participantId, 136 | required RTCRtpSender sender, 137 | required Algorithm algorithm, 138 | required KeyProvider keyProvider, 139 | }); 140 | 141 | /// Create a frame Cryptor from a [RTCRtpReceiver]. 142 | Future createFrameCryptorForRtpReceiver({ 143 | required String participantId, 144 | required RTCRtpReceiver receiver, 145 | required Algorithm algorithm, 146 | required KeyProvider keyProvider, 147 | }); 148 | } 149 | -------------------------------------------------------------------------------- /lib/src/media_recorder.dart: -------------------------------------------------------------------------------- 1 | import 'enums.dart'; 2 | import 'media_stream.dart'; 3 | import 'media_stream_track.dart'; 4 | 5 | abstract class MediaRecorder { 6 | /// Starts recording to file at [path]. 7 | /// Optionally, on Android choose [audioChannel] to record. 8 | /// On web platform use [startWeb] instead. 9 | Future start( 10 | String path, { 11 | MediaStreamTrack? videoTrack, 12 | RecorderAudioChannel? audioChannel, 13 | }); 14 | 15 | /// Only for Flutter Web 16 | void startWeb( 17 | MediaStream stream, { 18 | Function(dynamic blob, bool isLastOne)? onDataChunk, 19 | String mimeType, 20 | int timeSlice = 1000, 21 | }); 22 | 23 | Future stop(); 24 | } 25 | -------------------------------------------------------------------------------- /lib/src/media_stream.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | 3 | import 'media_stream_track.dart'; 4 | 5 | ///https://w3c.github.io/mediacapture-main/#mediastream 6 | abstract class MediaStream { 7 | MediaStream(this._id, this._ownerTag); 8 | final String _id; 9 | final String _ownerTag; 10 | 11 | /// The event type of this event handler is addtrack. 12 | Function(MediaStreamTrack track)? onAddTrack; 13 | 14 | /// The event type of this event handler is removetrack. 15 | Function(MediaStreamTrack track)? onRemoveTrack; 16 | 17 | String get id => _id; 18 | 19 | String get ownerTag => _ownerTag; 20 | 21 | /// The active attribute return true if this [MediaStream] is active and false otherwise. 22 | /// [MediaStream] is considered active if at least one of its [MediaStreamTracks] is not in the [MediaStreamTrack.ended] state. 23 | /// Once every track has ended, the stream's active property becomes false. 24 | bool? get active; 25 | 26 | @deprecated 27 | Future getMediaTracks(); 28 | 29 | /// Adds the given [MediaStreamTrack] to this [MediaStream]. 30 | Future addTrack(MediaStreamTrack track, {bool addToNative = true}); 31 | 32 | /// Removes the given [MediaStreamTrack] object from this [MediaStream]. 33 | Future removeTrack(MediaStreamTrack track, 34 | {bool removeFromNative = true}); 35 | 36 | /// Returns a List [MediaStreamTrack] objects representing all the tracks in this stream. 37 | List getTracks(); 38 | 39 | /// Returns a List [MediaStreamTrack] objects representing the audio tracks in this stream. 40 | /// The list represents a snapshot of all the [MediaStreamTrack] objects in this stream's track set whose kind is equal to 'audio'. 41 | List getAudioTracks(); 42 | 43 | /// Returns a List [MediaStreamTrack] objects representing the video tracks in this stream. 44 | /// The list represents a snapshot of all the [MediaStreamTrack] objects in this stream's track set whose kind is equal to 'video'. 45 | List getVideoTracks(); 46 | 47 | /// Returns either a [MediaStreamTrack] object from this stream's track set whose id is equal to trackId, or [StateError], if no such track exists. 48 | MediaStreamTrack? getTrackById(String trackId) { 49 | for (var item in getTracks()) { 50 | if (item.id == trackId) { 51 | return item; 52 | } 53 | } 54 | return null; 55 | } 56 | 57 | /// Clones the given [MediaStream] and all its tracks. 58 | Future clone() { 59 | throw UnimplementedError(); 60 | } 61 | 62 | Future dispose() async { 63 | return Future.value(); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /lib/src/media_stream_track.dart: -------------------------------------------------------------------------------- 1 | import 'dart:typed_data'; 2 | 3 | typedef StreamTrackCallback = Function(); 4 | 5 | abstract class MediaStreamTrack { 6 | MediaStreamTrack(); 7 | 8 | /// Returns the unique identifier of the track 9 | String? get id; 10 | 11 | /// This may label audio and video sources (e.g., "Internal microphone" or 12 | /// "External USB Webcam"). 13 | /// 14 | /// Returns the label of the object's corresponding source, if any. 15 | /// If the corresponding source has or had no label, returns an empty string. 16 | String? get label; 17 | 18 | /// Returns the string 'audio' if this object represents an audio track 19 | /// or 'video' if this object represents a video track. 20 | String? get kind; 21 | 22 | /// Callback for onmute event 23 | StreamTrackCallback? onMute; 24 | 25 | /// Callback for unmute event 26 | StreamTrackCallback? onUnMute; 27 | 28 | /// Callback foronended event 29 | StreamTrackCallback? onEnded; 30 | 31 | /// Returns the enable state of [MediaStreamTrack] 32 | bool get enabled; 33 | 34 | /// Set the enable state of [MediaStreamTrack] 35 | /// 36 | /// Note: After a [MediaStreamTrack] has ended, setting the enable state 37 | /// will not change the ended state. 38 | set enabled(bool b); 39 | 40 | /// Returns true if the track is muted, and false otherwise. 41 | bool? get muted; 42 | 43 | /// Returns a map containing the set of constraints most recently established 44 | /// for the track using a prior call to applyConstraints(). 45 | /// 46 | /// These constraints indicate values and ranges of values that the Web site 47 | /// or application has specified are required or acceptable for the included 48 | /// constrainable properties. 49 | Map getConstraints() { 50 | throw UnimplementedError(); 51 | } 52 | 53 | /// Applies a set of constraints to the track. 54 | /// 55 | /// These constraints let the Web site or app establish ideal values and 56 | /// acceptable ranges of values for the constrainable properties of the track, 57 | /// such as frame rate, dimensions, echo cancelation, and so forth. 58 | Future applyConstraints([Map? constraints]) { 59 | throw UnimplementedError(); 60 | } 61 | 62 | // TODO(wermathurin): This ticket is related to the implementation of jsTrack.getCapabilities(), 63 | // https://github.com/dart-lang/sdk/issues/44319. 64 | // 65 | // MediaTrackCapabilities getCapabilities() { 66 | // throw UnimplementedError(); 67 | // } 68 | 69 | Future clone() async { 70 | throw UnimplementedError(); 71 | } 72 | 73 | Future stop(); 74 | 75 | // 76 | // https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/getSettings 77 | // 78 | Map getSettings() => throw UnimplementedError(); 79 | 80 | /// Throws error if switching camera failed 81 | @Deprecated('use Helper.switchCamera() instead') 82 | Future switchCamera() { 83 | throw UnimplementedError(); 84 | } 85 | 86 | @deprecated 87 | Future adaptRes(int width, int height) { 88 | throw UnimplementedError(); 89 | } 90 | 91 | void enableSpeakerphone(bool enable) { 92 | throw UnimplementedError(); 93 | } 94 | 95 | Future captureFrame() { 96 | throw UnimplementedError(); 97 | } 98 | 99 | Future hasTorch() { 100 | throw UnimplementedError(); 101 | } 102 | 103 | Future setTorch(bool torch) { 104 | throw UnimplementedError(); 105 | } 106 | 107 | @Deprecated('use stop() instead') 108 | Future dispose(); 109 | 110 | @override 111 | String toString() { 112 | return 'Track(id: $id, kind: $kind, label: $label, enabled: $enabled, muted: $muted)'; 113 | } 114 | } 115 | 116 | // TODO(wermathurin): Need to implement missing API 117 | // readonly attribute MediaStreamTrackState readyState; 118 | // MediaTrackCapabilities getCapabilities(); 119 | // MediaTrackSettings getSettings(); 120 | -------------------------------------------------------------------------------- /lib/src/mediadevices.dart: -------------------------------------------------------------------------------- 1 | import 'media_stream.dart'; 2 | 3 | class MediaStreamConstraints { 4 | MediaStreamConstraints({this.audio, this.video}); 5 | 6 | /// Either a bool (which indicates whether or not an audio track is requested) 7 | /// or a MediaTrackConstraints object providing the constraints which must be 8 | /// met by the audio track included in the returned MediaStream. 9 | /// 10 | /// If constraints are specified, an audio track is inherently requested. 11 | dynamic audio; 12 | 13 | /// Either a bool (which indicates whether or not a video track is requested) 14 | /// or a MediaTrackConstraints object providing the constraints which must be 15 | /// met by the video track included in the returned MediaStream. 16 | /// 17 | /// If constraints are specified, a video track is inherently requested. 18 | dynamic video; 19 | } 20 | 21 | /// [MediaTrackSupportedConstraints] represents the list of constraints 22 | /// controlling the capabilities of a [MediaStreamTrack]. 23 | class MediaTrackSupportedConstraints { 24 | MediaTrackSupportedConstraints({ 25 | this.aspectRatio = false, 26 | this.autoGainControl = false, 27 | this.brightness = false, 28 | this.channelCount = false, 29 | this.colorTemperature = false, 30 | this.contrast = false, 31 | this.deviceId = false, 32 | this.echoCancellation = false, 33 | this.exposureCompensation = false, 34 | this.exposureMode = false, 35 | this.exposureTime = false, 36 | this.facingMode = false, 37 | this.focusDistance = false, 38 | this.focusMode = false, 39 | this.frameRate = false, 40 | this.groupId = false, 41 | this.height = false, 42 | this.iso = false, 43 | this.latency = false, 44 | this.noiseSuppression = false, 45 | this.pan = false, 46 | this.pointsOfInterest = false, 47 | this.resizeMode = false, 48 | this.sampleRate = false, 49 | this.sampleSize = false, 50 | this.saturation = false, 51 | this.sharpness = false, 52 | this.tilt = false, 53 | this.torch = false, 54 | this.whiteBalanceMode = false, 55 | this.width = false, 56 | this.zoom = false, 57 | }); 58 | 59 | final bool aspectRatio, 60 | autoGainControl, 61 | brightness, 62 | channelCount, 63 | colorTemperature, 64 | contrast, 65 | deviceId, 66 | echoCancellation, 67 | exposureCompensation, 68 | exposureMode, 69 | exposureTime, 70 | facingMode, 71 | focusDistance, 72 | focusMode, 73 | frameRate, 74 | groupId, 75 | height, 76 | iso, 77 | latency, 78 | noiseSuppression, 79 | pan, 80 | pointsOfInterest, 81 | resizeMode, 82 | sampleRate, 83 | sampleSize, 84 | saturation, 85 | sharpness, 86 | tilt, 87 | torch, 88 | whiteBalanceMode, 89 | width, 90 | zoom; 91 | } 92 | 93 | abstract class MediaDevices { 94 | /// Calling this method will prompts the user to select and grant permission 95 | /// to capture the contents of a display or portion thereof (such as a window) 96 | /// as a MediaStream. The resulting stream can then be recorded using the 97 | /// MediaStream Recording API or transmitted as part of a WebRTC session. 98 | Future getUserMedia(Map mediaConstraints); 99 | 100 | /// Calling this method will prompts the user to select and grant permission 101 | /// to capture the contents of a display or portion thereof (such as a window) 102 | /// as a MediaStream. The resulting stream can then be recorded using the 103 | /// MediaStream Recording API or transmitted as part of a WebRTC session. 104 | Future getDisplayMedia(Map mediaConstraints); 105 | 106 | @Deprecated('use enumerateDevices() instead') 107 | Future> getSources(); 108 | 109 | /// Returns a List of [MediaDeviceInfo] describing the devices. 110 | Future> enumerateDevices(); 111 | 112 | /// Returns [MediaTrackSupportedConstraints] recognized by a User Agent for 113 | /// controlling the Capabilities of a [MediaStreamTrack] object. 114 | MediaTrackSupportedConstraints getSupportedConstraints() { 115 | throw UnimplementedError(); 116 | } 117 | 118 | /// A function you provide which accepts as input a Event object describing 119 | /// the devicechange event that occurred. There is no information about the 120 | /// change included in the event object; to get the updated list of devices, 121 | /// you'll have to use enumerateDevices(). 122 | Function(dynamic event)? ondevicechange; 123 | 124 | /// Prompts the user to select a specific audio output device. 125 | Future selectAudioOutput([AudioOutputOptions? options]) => 126 | throw UnimplementedError(); 127 | } 128 | 129 | /// This describe the media input and output devices, such as microphones, 130 | /// cameras, headsets, and so forth. 131 | class MediaDeviceInfo { 132 | MediaDeviceInfo({ 133 | this.kind, 134 | required this.label, 135 | this.groupId, 136 | required this.deviceId, 137 | }); 138 | 139 | /// Returns a String that is an identifier for the represented device that 140 | /// is persisted across sessions. It is un-guessable by other applications 141 | /// and unique to the origin of the calling application. It is reset when 142 | /// the user clears cookies (for Private Browsing, a different identifier 143 | /// is used that is not persisted across sessions). 144 | final String deviceId; 145 | 146 | /// Returns a String that is a group identifier. Two devices have the same 147 | /// group identifier if they belong to the same physical device 148 | /// — for example a monitor with both a built-in camera and a microphone. 149 | final String? groupId; 150 | 151 | /// Returns an enumerated value that is either 'videoinput', 'audioinput' or 152 | /// 'audiooutput'. 153 | final String? kind; 154 | 155 | /// Returns a String that is a label describing this device 156 | /// (for example "External USB Webcam"). 157 | final String label; 158 | } 159 | 160 | /// An object that configures what device(s) may be offered in the user prompt. 161 | class AudioOutputOptions { 162 | AudioOutputOptions({ 163 | this.deviceId = '', 164 | }); 165 | 166 | /// A string representing the id of the (only) device to display in the prompt (with default value: ""). 167 | final String deviceId; 168 | } 169 | -------------------------------------------------------------------------------- /lib/src/navigator.dart: -------------------------------------------------------------------------------- 1 | import 'media_stream.dart'; 2 | import 'mediadevices.dart'; 3 | 4 | abstract class Navigator { 5 | @Deprecated('use mediadevice.getUserMedia() instead') 6 | Future getUserMedia(Map mediaConstraints); 7 | 8 | @Deprecated('use mediadevice.getDisplayMedia() instead') 9 | Future getDisplayMedia(Map mediaConstraints); 10 | 11 | @Deprecated('use mediadevice.enumerateDevices() instead') 12 | Future> getSources(); 13 | 14 | MediaDevices get mediaDevices; 15 | } 16 | -------------------------------------------------------------------------------- /lib/src/rtc_configuration.dart: -------------------------------------------------------------------------------- 1 | // abstract class RTCOfferOptions { 2 | // RTCOfferOptions({ 3 | // bool iceRestart, 4 | // bool offerToReceiveAudio, 5 | // bool offerToReceiveVideo, 6 | // bool voiceActivityDetection, 7 | // }); 8 | // bool get iceRestart; 9 | // bool get offerToReceiveAudio; 10 | // bool get offerToReceiveVideo; 11 | // bool get voiceActivityDetection; 12 | // } 13 | 14 | // abstract class RTCAnswerOptions { 15 | // RTCAnswerOptions({bool voiceActivityDetection}); 16 | // bool get voiceActivityDetection; 17 | // } 18 | 19 | // abstract class RTCConfiguration { 20 | // RTCConfiguration({ 21 | // List iceServers, 22 | // String rtcpMuxPolicy, 23 | // String iceTransportPolicy, 24 | // String bundlePolicy, 25 | // String peerIdentity, 26 | // int iceCandidatePoolSize, 27 | // }); 28 | // List get iceServers; 29 | 30 | // ///Optional: 'negotiate' or 'require' 31 | // String get rtcpMuxPolicy; 32 | 33 | // ///Optional: 'relay' or 'all' 34 | // String get iceTransportPolicy; 35 | 36 | // /// A DOMString which specifies the target peer identity for the 37 | // /// RTCPeerConnection. If this value is set (it defaults to null), 38 | // /// the RTCPeerConnection will not connect to a remote peer unless 39 | // /// it can successfully authenticate with the given name. 40 | // String get peerIdentity; 41 | 42 | // int get iceCandidatePoolSize; 43 | 44 | // ///Optional: 'balanced' | 'max-compat' | 'max-bundle' 45 | // String get bundlePolicy; 46 | // } 47 | 48 | // abstract class RTCIceServer { 49 | // RTCIceServer({String urls, String username, String credential}); 50 | // // String or List 51 | // dynamic get urls; 52 | // String get username; 53 | // String get credential; 54 | // } 55 | -------------------------------------------------------------------------------- /lib/src/rtc_data_channel.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:typed_data'; 3 | 4 | import 'enums.dart'; 5 | 6 | class RTCDataChannelInit { 7 | bool ordered = true; 8 | int maxRetransmitTime = -1; 9 | int maxRetransmits = -1; 10 | String protocol = 'sctp'; //sctp | quic 11 | String binaryType = 'text'; // "binary" || text 12 | bool negotiated = false; 13 | int id = 0; 14 | Map toMap() { 15 | return { 16 | 'ordered': ordered, 17 | if (maxRetransmitTime > 0) 18 | //https://www.chromestatus.com/features/5198350873788416 19 | 'maxPacketLifeTime': maxRetransmitTime, 20 | if (maxRetransmits > 0) 'maxRetransmits': maxRetransmits, 21 | 'protocol': protocol, 22 | 'negotiated': negotiated, 23 | 'id': id 24 | }; 25 | } 26 | } 27 | 28 | /// A class that represents a datachannel message. 29 | /// Can either contain binary data as a [Uint8List] or 30 | /// text data as a [String]. 31 | class RTCDataChannelMessage { 32 | /// Construct a text message with a [String]. 33 | RTCDataChannelMessage(String text) { 34 | _data = text; 35 | _isBinary = false; 36 | } 37 | 38 | /// Construct a binary message with a [Uint8List]. 39 | RTCDataChannelMessage.fromBinary(Uint8List binary) { 40 | _data = binary; 41 | _isBinary = true; 42 | } 43 | 44 | late dynamic _data; 45 | late bool _isBinary; 46 | 47 | /// Tells whether this message contains binary. 48 | /// If this is false, it's a text message. 49 | bool get isBinary => _isBinary; 50 | 51 | MessageType get type => isBinary ? MessageType.binary : MessageType.text; 52 | 53 | /// Text contents of this message as [String]. 54 | /// Use only on text messages. 55 | /// See: [isBinary]. 56 | String get text => _data; 57 | 58 | /// Binary contents of this message as [Uint8List]. 59 | /// Use only on binary messages. 60 | /// See: [isBinary]. 61 | Uint8List get binary => _data; 62 | } 63 | 64 | abstract class RTCDataChannel { 65 | RTCDataChannel(); 66 | 67 | Function(RTCDataChannelState state)? onDataChannelState; 68 | Function(RTCDataChannelMessage data)? onMessage; 69 | Function(int currentAmount, int changedAmount)? onBufferedAmountChange; 70 | Function(int currentAmount)? onBufferedAmountLow; 71 | 72 | /// Get current state. 73 | RTCDataChannelState? get state; 74 | 75 | /// Get channel id 76 | int? get id; 77 | 78 | /// Get channel label 79 | String? get label; 80 | 81 | int? get bufferedAmount; 82 | 83 | /// Get the buffered amount from native DC. 84 | Future getBufferedAmount() => throw UnimplementedError(); 85 | 86 | /// Set threshold to trigger onBufferedAmountLow callback 87 | int? bufferedAmountLowThreshold; 88 | 89 | /// Stream of state change events. Emits the new state on change. 90 | /// Closes when the [RTCDataChannel] is closed. 91 | late Stream stateChangeStream; 92 | 93 | /// Stream of incoming messages. Emits the message. 94 | /// Closes when the [RTCDataChannel] is closed. 95 | late Stream messageStream; 96 | 97 | /// Send a message to this datachannel. 98 | /// To send a text message, use the default constructor to instantiate a text [RTCDataChannelMessage] 99 | /// for the [message] parameter. 100 | /// To send a binary message, pass a binary [RTCDataChannelMessage] 101 | /// constructed with [RTCDataChannelMessage.fromBinary] 102 | Future send(RTCDataChannelMessage message); 103 | 104 | Future close(); 105 | } 106 | -------------------------------------------------------------------------------- /lib/src/rtc_dtmf_sender.dart: -------------------------------------------------------------------------------- 1 | abstract class RTCDTMFSender { 2 | /// tones:A String containing the DTMF codes to be transmitted to the recipient. 3 | /// Specifying an empty string as the tones parameter clears the tone 4 | /// buffer, aborting any currently queued tones. A "," character inserts 5 | /// a two second delay. 6 | /// duration: This value must be between 40 ms and 6000 ms (6 seconds). 7 | /// The default is 100 ms. 8 | /// interToneGap: The length of time, in milliseconds, to wait between tones. 9 | /// The browser will enforce a minimum value of 30 ms (that is, 10 | /// if you specify a lower value, 30 ms will be used instead); 11 | /// the default is 70 ms. 12 | Future insertDTMF(String tones, 13 | {int duration = 100, int interToneGap = 70}); 14 | 15 | /// Compatible with old methods 16 | @Deprecated('Use method insertDTMF instead') 17 | Future sendDtmf(String tones, 18 | {int duration = 100, int interToneGap = 70}) => 19 | insertDTMF(tones, duration: duration, interToneGap: interToneGap); 20 | 21 | Future canInsertDtmf(); 22 | } 23 | -------------------------------------------------------------------------------- /lib/src/rtc_ice_candidate.dart: -------------------------------------------------------------------------------- 1 | class RTCIceCandidate { 2 | RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMLineIndex); 3 | final String? candidate; 4 | final String? sdpMid; 5 | final int? sdpMLineIndex; 6 | dynamic toMap() { 7 | return { 8 | 'candidate': candidate, 9 | 'sdpMid': sdpMid, 10 | 'sdpMLineIndex': sdpMLineIndex 11 | }; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /lib/src/rtc_peerconnection.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | 3 | import 'enums.dart'; 4 | import 'media_stream.dart'; 5 | import 'media_stream_track.dart'; 6 | import 'rtc_data_channel.dart'; 7 | import 'rtc_dtmf_sender.dart'; 8 | import 'rtc_ice_candidate.dart'; 9 | import 'rtc_rtp_receiver.dart'; 10 | import 'rtc_rtp_sender.dart'; 11 | import 'rtc_rtp_transceiver.dart'; 12 | import 'rtc_session_description.dart'; 13 | import 'rtc_stats_report.dart'; 14 | import 'rtc_track_event.dart'; 15 | 16 | abstract class RTCPeerConnection { 17 | RTCPeerConnection(); 18 | 19 | // public: delegate 20 | Function(RTCSignalingState state)? onSignalingState; 21 | Function(RTCPeerConnectionState state)? onConnectionState; 22 | Function(RTCIceGatheringState state)? onIceGatheringState; 23 | Function(RTCIceConnectionState state)? onIceConnectionState; 24 | Function(RTCIceCandidate candidate)? onIceCandidate; 25 | Function(MediaStream stream)? onAddStream; 26 | Function(MediaStream stream)? onRemoveStream; 27 | Function(MediaStream stream, MediaStreamTrack track)? onAddTrack; 28 | Function(MediaStream stream, MediaStreamTrack track)? onRemoveTrack; 29 | Function(RTCDataChannel channel)? onDataChannel; 30 | Function()? onRenegotiationNeeded; 31 | 32 | /// Unified-Plan 33 | Function(RTCTrackEvent event)? onTrack; 34 | 35 | RTCSignalingState? get signalingState; 36 | 37 | Future getSignalingState() async { 38 | return signalingState; 39 | } 40 | 41 | RTCIceGatheringState? get iceGatheringState; 42 | 43 | Future getIceGatheringState() async { 44 | return iceGatheringState; 45 | } 46 | 47 | RTCIceConnectionState? get iceConnectionState; 48 | 49 | Future getIceConnectionState() async { 50 | return iceConnectionState; 51 | } 52 | 53 | RTCPeerConnectionState? get connectionState; 54 | 55 | Future getConnectionState() async { 56 | return connectionState; 57 | } 58 | 59 | Future dispose(); 60 | 61 | Map get getConfiguration; 62 | 63 | Future setConfiguration(Map configuration); 64 | 65 | Future createOffer([Map constraints]); 66 | 67 | Future createAnswer( 68 | [Map constraints]); 69 | 70 | Future addStream(MediaStream stream); 71 | 72 | Future removeStream(MediaStream stream); 73 | 74 | Future getLocalDescription(); 75 | 76 | Future setLocalDescription(RTCSessionDescription description); 77 | 78 | Future getRemoteDescription(); 79 | 80 | Future setRemoteDescription(RTCSessionDescription description); 81 | 82 | Future addCandidate(RTCIceCandidate candidate); 83 | 84 | Future> getStats([MediaStreamTrack? track]); 85 | 86 | List getLocalStreams(); 87 | 88 | List getRemoteStreams(); 89 | 90 | Future createDataChannel( 91 | String label, RTCDataChannelInit dataChannelDict); 92 | 93 | Future restartIce(); 94 | 95 | Future close(); 96 | 97 | RTCDTMFSender createDtmfSender(MediaStreamTrack track); 98 | 99 | /// Unified-Plan. 100 | Future> getSenders(); 101 | 102 | Future> get senders => getSenders(); 103 | 104 | Future> getReceivers(); 105 | 106 | Future> get receivers => getReceivers(); 107 | 108 | Future> getTransceivers(); 109 | 110 | Future> get transceivers => getTransceivers(); 111 | 112 | Future addTrack(MediaStreamTrack track, [MediaStream stream]); 113 | 114 | Future removeTrack(RTCRtpSender sender); 115 | 116 | /// 'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } 117 | Future addTransceiver( 118 | {MediaStreamTrack track, 119 | RTCRtpMediaType kind, 120 | RTCRtpTransceiverInit init}); 121 | } 122 | -------------------------------------------------------------------------------- /lib/src/rtc_rtcp_parameters.dart: -------------------------------------------------------------------------------- 1 | class RTCRTCPParameters { 2 | RTCRTCPParameters(this.cname, this.reducedSize); 3 | factory RTCRTCPParameters.fromMap(Map map) { 4 | return RTCRTCPParameters(map['cname'], map['reducedSize']); 5 | } 6 | 7 | /// The Canonical Name used by RTCP 8 | String? cname; 9 | 10 | /// Whether reduced size RTCP is configured or compound RTCP 11 | bool reducedSize; 12 | 13 | Map toMap() { 14 | return { 15 | 'cname': cname, 16 | 'reducedSize': reducedSize, 17 | }; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_capabilities.dart: -------------------------------------------------------------------------------- 1 | class RTCRtpCodecCapability { 2 | RTCRtpCodecCapability( 3 | {this.channels, 4 | required this.clockRate, 5 | required this.mimeType, 6 | this.sdpFmtpLine}); 7 | factory RTCRtpCodecCapability.fromMap(Map map) { 8 | return RTCRtpCodecCapability( 9 | channels: map['channels'], 10 | clockRate: map['clockRate'], 11 | mimeType: map['mimeType'], 12 | sdpFmtpLine: map['sdpFmtpLine'], 13 | ); 14 | } 15 | num? channels; 16 | num clockRate; 17 | String mimeType; 18 | String? sdpFmtpLine; 19 | Map toMap() { 20 | return { 21 | if (channels != null) 'channels': channels, 22 | 'clockRate': clockRate, 23 | 'mimeType': mimeType, 24 | if (sdpFmtpLine != null) 'sdpFmtpLine': sdpFmtpLine, 25 | }; 26 | } 27 | } 28 | 29 | class RTCRtpHeaderExtensionCapability { 30 | RTCRtpHeaderExtensionCapability(this.uri); 31 | factory RTCRtpHeaderExtensionCapability.fromMap(Map map) { 32 | return RTCRtpHeaderExtensionCapability(map['uri']); 33 | } 34 | String uri; 35 | Map toMap() { 36 | return { 37 | 'uri': uri, 38 | }; 39 | } 40 | } 41 | 42 | class RTCRtpCapabilities { 43 | RTCRtpCapabilities({this.codecs, this.headerExtensions, this.fecMechanisms}); 44 | factory RTCRtpCapabilities.fromMap(Map map) { 45 | var codecs = []; 46 | dynamic codecsMap = map['codecs']; 47 | codecsMap.forEach((params) { 48 | codecs.add(RTCRtpCodecCapability.fromMap(params)); 49 | }); 50 | var headerExtensions = []; 51 | dynamic headerExtensionsMap = map['headerExtensions']; 52 | headerExtensionsMap.forEach((params) { 53 | headerExtensions.add(RTCRtpHeaderExtensionCapability.fromMap(params)); 54 | }); 55 | var fecMechanisms = []; 56 | dynamic fecMechanismsMap = map['fecMechanisms']; 57 | fecMechanismsMap.forEach((params) { 58 | fecMechanisms.add(params); 59 | }); 60 | return RTCRtpCapabilities( 61 | codecs: codecs, 62 | headerExtensions: headerExtensions, 63 | fecMechanisms: fecMechanisms); 64 | } 65 | List? codecs; 66 | List? headerExtensions; 67 | List? fecMechanisms; 68 | 69 | Map toMap() { 70 | return { 71 | 'codecs': codecs?.map((e) => e.toMap()).toList(), 72 | 'headerExtensions': headerExtensions?.map((e) => e.toMap()).toList(), 73 | if (fecMechanisms != null) 'fecMechanisms': fecMechanisms, 74 | }; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_parameters.dart: -------------------------------------------------------------------------------- 1 | import 'enums.dart'; 2 | import 'rtc_rtcp_parameters.dart'; 3 | 4 | class RTCRTPCodec { 5 | RTCRTPCodec( 6 | {this.payloadType, 7 | this.name, 8 | this.kind, 9 | this.clockRate, 10 | this.numChannels, 11 | this.parameters}); 12 | 13 | factory RTCRTPCodec.fromMap(Map map) { 14 | return RTCRTPCodec( 15 | payloadType: map['payloadType'], 16 | name: map['name'], 17 | kind: map['kind'], 18 | clockRate: map['clockRate'], 19 | numChannels: map['numChannels'] ?? 1, 20 | parameters: map['parameters']); 21 | } 22 | // Payload type used to identify this codec in RTP packets. 23 | int? payloadType; 24 | 25 | /// Name used to identify the codec. Equivalent to MIME subtype. 26 | String? name; 27 | 28 | /// The media type of this codec. Equivalent to MIME top-level type. 29 | String? kind; 30 | 31 | /// Clock rate in Hertz. 32 | int? clockRate; 33 | 34 | /// The number of audio channels used. Set to null for video codecs. 35 | int? numChannels; 36 | 37 | /// The "format specific parameters" field from the "a=fmtp" line in the SDP 38 | Map? parameters; 39 | 40 | Map toMap() { 41 | return { 42 | 'payloadType': payloadType, 43 | 'name': name, 44 | 'kind': kind, 45 | 'clockRate': clockRate, 46 | 'numChannels': numChannels, 47 | 'parameters': parameters, 48 | }; 49 | } 50 | } 51 | 52 | class RTCRtpEncoding { 53 | RTCRtpEncoding({ 54 | this.rid, 55 | this.active = true, 56 | this.maxBitrate, 57 | this.maxFramerate, 58 | this.minBitrate, 59 | this.numTemporalLayers = 1, 60 | this.scaleResolutionDownBy = 1.0, 61 | this.ssrc, 62 | this.scalabilityMode, 63 | }); 64 | 65 | factory RTCRtpEncoding.fromMap(Map map) => RTCRtpEncoding( 66 | rid: map['rid'], 67 | active: map['active'], 68 | maxBitrate: map['maxBitrate'], 69 | maxFramerate: map['maxFramerate'], 70 | minBitrate: map['minBitrate'], 71 | numTemporalLayers: map['numTemporalLayers'], 72 | scaleResolutionDownBy: map['scaleResolutionDownBy'], 73 | ssrc: map['ssrc'], 74 | scalabilityMode: map['scalabilityMode'], 75 | ); 76 | 77 | /// If non-null, this represents the RID that identifies this encoding layer. 78 | /// RIDs are used to identify layers in simulcast. 79 | String? rid; 80 | 81 | /// Set to true to cause this encoding to be sent, and false for it not to 82 | /// be sent. 83 | bool active; 84 | 85 | /// If non-null, this represents the Transport Independent Application 86 | /// Specific maximum bandwidth defined in RFC3890. If null, there is no 87 | /// maximum bitrate. 88 | int? maxBitrate; 89 | 90 | /// The minimum bitrate in bps for video. 91 | int? minBitrate; 92 | 93 | /// The max framerate in fps for video. 94 | int? maxFramerate; 95 | 96 | /// The number of temporal layers for video. 97 | int? numTemporalLayers; 98 | 99 | /// If non-null, scale the width and height down by this factor for video. If null, 100 | /// implementation default scaling factor will be used. 101 | double? scaleResolutionDownBy; 102 | 103 | /// SSRC to be used by this encoding. 104 | /// Can't be changed between getParameters/setParameters. 105 | int? ssrc; 106 | 107 | String? scalabilityMode; 108 | 109 | Map toMap() => { 110 | 'active': active, 111 | if (rid != null) 'rid': rid, 112 | if (maxBitrate != null) 'maxBitrate': maxBitrate, 113 | if (maxFramerate != null) 'maxFramerate': maxFramerate, 114 | if (minBitrate != null) 'minBitrate': minBitrate, 115 | if (numTemporalLayers != null) 'numTemporalLayers': numTemporalLayers, 116 | if (scaleResolutionDownBy != null) 117 | 'scaleResolutionDownBy': scaleResolutionDownBy, 118 | if (scalabilityMode != null) 'scalabilityMode': scalabilityMode, 119 | if (ssrc != null) 'ssrc': ssrc, 120 | }; 121 | } 122 | 123 | class RTCHeaderExtension { 124 | RTCHeaderExtension({this.uri, this.id, this.encrypted}); 125 | factory RTCHeaderExtension.fromMap(Map map) { 126 | return RTCHeaderExtension( 127 | uri: map['uri'], id: map['id'], encrypted: map['encrypted']); 128 | } 129 | 130 | /// The URI of the RTP header extension, as defined in RFC5285. 131 | String? uri; 132 | 133 | /// The value put in the RTP packet to identify the header extension. 134 | int? id; 135 | 136 | /// Whether the header extension is encrypted or not. 137 | bool? encrypted; 138 | 139 | Map toMap() { 140 | return { 141 | 'uri': uri, 142 | 'id': id, 143 | 'encrypted': encrypted, 144 | }; 145 | } 146 | } 147 | 148 | class RTCRtpParameters { 149 | RTCRtpParameters({ 150 | this.transactionId, 151 | this.rtcp, 152 | this.headerExtensions, 153 | this.encodings, 154 | this.codecs, 155 | this.degradationPreference, 156 | }); 157 | 158 | factory RTCRtpParameters.fromMap(Map map) { 159 | var encodings = []; 160 | dynamic encodingsMap = map['encodings']; 161 | encodingsMap.forEach((params) { 162 | encodings.add(RTCRtpEncoding.fromMap(params)); 163 | }); 164 | var headerExtensions = []; 165 | dynamic headerExtensionsMap = map['headerExtensions']; 166 | headerExtensionsMap.forEach((params) { 167 | headerExtensions.add(RTCHeaderExtension.fromMap(params)); 168 | }); 169 | var codecs = []; 170 | dynamic codecsMap = map['codecs']; 171 | codecsMap.forEach((params) { 172 | codecs.add(RTCRTPCodec.fromMap(params)); 173 | }); 174 | 175 | var degradationPreference = map['degradationPreference']; 176 | var rtcp = RTCRTCPParameters.fromMap(map['rtcp']); 177 | return RTCRtpParameters( 178 | transactionId: map['transactionId'], 179 | rtcp: rtcp, 180 | headerExtensions: headerExtensions, 181 | encodings: encodings, 182 | degradationPreference: 183 | degradationPreferenceforString(degradationPreference), 184 | codecs: codecs); 185 | } 186 | 187 | String? transactionId; 188 | 189 | RTCRTCPParameters? rtcp; 190 | 191 | List? headerExtensions; 192 | 193 | List? encodings; 194 | 195 | RTCDegradationPreference? degradationPreference; 196 | 197 | /// Codec parameters can't currently be changed between getParameters and 198 | /// setParameters. Though in the future it will be possible to reorder them or 199 | /// remove them. 200 | List? codecs; 201 | 202 | Map toMap() { 203 | var headerExtensionsList = []; 204 | headerExtensions?.forEach((params) { 205 | headerExtensionsList.add(params.toMap()); 206 | }); 207 | var encodingList = []; 208 | encodings?.forEach((params) { 209 | encodingList.add(params.toMap()); 210 | }); 211 | var codecsList = []; 212 | codecs?.forEach((params) { 213 | codecsList.add(params.toMap()); 214 | }); 215 | return { 216 | 'transactionId': transactionId, 217 | if (rtcp != null) 'rtcp': rtcp!.toMap(), 218 | 'headerExtensions': headerExtensionsList, 219 | 'encodings': encodingList, 220 | 'codecs': codecsList, 221 | if (degradationPreference != null) 222 | 'degradationPreference': 223 | typeRTCDegradationPreferenceString[degradationPreference!], 224 | }; 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_receiver.dart: -------------------------------------------------------------------------------- 1 | import 'enums.dart'; 2 | import 'media_stream_track.dart'; 3 | import 'rtc_rtp_parameters.dart'; 4 | import 'rtc_stats_report.dart'; 5 | 6 | abstract class RTCRtpReceiver { 7 | RTCRtpReceiver(); 8 | 9 | Future> getStats(); 10 | 11 | /// public: 12 | Function(RTCRtpReceiver rtpReceiver, RTCRtpMediaType mediaType)? 13 | onFirstPacketReceived; 14 | 15 | /// The WebRTC specification only defines RTCRtpParameters in terms of senders, 16 | /// but this API also applies them to receivers, similar to ORTC: 17 | /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. 18 | RTCRtpParameters get parameters; 19 | 20 | MediaStreamTrack? get track; 21 | 22 | String get receiverId; 23 | } 24 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_sender.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | 3 | import 'media_stream.dart'; 4 | import 'media_stream_track.dart'; 5 | import 'rtc_dtmf_sender.dart'; 6 | import 'rtc_rtp_parameters.dart'; 7 | import 'rtc_stats_report.dart'; 8 | 9 | abstract class RTCRtpSender { 10 | RTCRtpSender(); 11 | 12 | Future setParameters(RTCRtpParameters parameters); 13 | 14 | Future replaceTrack(MediaStreamTrack? track); 15 | 16 | Future setTrack(MediaStreamTrack? track, {bool takeOwnership = true}); 17 | 18 | Future> getStats(); 19 | 20 | Future setStreams(List streams); 21 | 22 | RTCRtpParameters get parameters; 23 | 24 | MediaStreamTrack? get track; 25 | 26 | String get senderId; 27 | 28 | bool get ownsTrack; 29 | 30 | RTCDTMFSender get dtmfSender; 31 | 32 | Future dispose(); 33 | } 34 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_transceiver.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | 3 | import 'enums.dart'; 4 | import 'media_stream.dart'; 5 | import 'rtc_rtp_capabilities.dart'; 6 | import 'rtc_rtp_parameters.dart'; 7 | import 'rtc_rtp_receiver.dart'; 8 | import 'rtc_rtp_sender.dart'; 9 | 10 | List listToRtpEncodings(List> list) { 11 | return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); 12 | } 13 | 14 | class RTCRtpTransceiverInit { 15 | RTCRtpTransceiverInit({ 16 | this.direction, 17 | this.streams, 18 | this.sendEncodings, 19 | }); 20 | TransceiverDirection? direction; 21 | List? streams; 22 | List? sendEncodings; 23 | } 24 | 25 | abstract class RTCRtpTransceiver { 26 | RTCRtpTransceiver(); 27 | 28 | Future getCurrentDirection(); 29 | 30 | Future setDirection(TransceiverDirection direction); 31 | 32 | Future getDirection(); 33 | 34 | Future setCodecPreferences(List codecs); 35 | 36 | String get mid; 37 | 38 | RTCRtpSender get sender; 39 | 40 | RTCRtpReceiver get receiver; 41 | 42 | bool get stoped; 43 | 44 | String get transceiverId; 45 | 46 | Future stop(); 47 | 48 | /// Deprecated methods. 49 | @Deprecated('Use the `await getCurrentDirection` instead') 50 | TransceiverDirection get currentDirection => throw UnimplementedError( 51 | 'Need to be call asynchronously from native sdk, so the method is deprecated'); 52 | } 53 | -------------------------------------------------------------------------------- /lib/src/rtc_session_description.dart: -------------------------------------------------------------------------------- 1 | class RTCSessionDescription { 2 | RTCSessionDescription(this.sdp, this.type); 3 | String? sdp; 4 | String? type; 5 | dynamic toMap() { 6 | return {'sdp': sdp, 'type': type}; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /lib/src/rtc_stats_report.dart: -------------------------------------------------------------------------------- 1 | class StatsReport { 2 | StatsReport(this.id, this.type, this.timestamp, this.values); 3 | factory StatsReport.fromMap(Map map) => 4 | StatsReport(map['id'], map['type'], map['timestamp'], map); 5 | final String id; 6 | final String type; 7 | final double timestamp; 8 | final Map values; 9 | } 10 | -------------------------------------------------------------------------------- /lib/src/rtc_track_event.dart: -------------------------------------------------------------------------------- 1 | import 'media_stream.dart'; 2 | import 'media_stream_track.dart'; 3 | import 'rtc_rtp_receiver.dart'; 4 | import 'rtc_rtp_transceiver.dart'; 5 | 6 | class RTCTrackEvent { 7 | RTCTrackEvent({ 8 | this.receiver, 9 | required this.streams, 10 | required this.track, 11 | this.transceiver, 12 | }); 13 | final RTCRtpReceiver? receiver; 14 | final List streams; 15 | final MediaStreamTrack track; 16 | final RTCRtpTransceiver? transceiver; 17 | } 18 | -------------------------------------------------------------------------------- /lib/src/rtc_video_renderer.dart: -------------------------------------------------------------------------------- 1 | import 'media_stream.dart'; 2 | 3 | class RTCVideoValue { 4 | const RTCVideoValue({ 5 | this.width = 0.0, 6 | this.height = 0.0, 7 | this.rotation = 0, 8 | this.renderVideo = false, 9 | }); 10 | static const RTCVideoValue empty = RTCVideoValue(); 11 | final double width; 12 | final double height; 13 | final int rotation; 14 | final bool renderVideo; 15 | double get aspectRatio { 16 | if (width == 0.0 || height == 0.0) { 17 | return 1.0; 18 | } 19 | return (rotation == 90 || rotation == 270) 20 | ? height / width 21 | : width / height; 22 | } 23 | 24 | RTCVideoValue copyWith({ 25 | double? width, 26 | double? height, 27 | int? rotation, 28 | bool renderVideo = true, 29 | }) { 30 | return RTCVideoValue( 31 | width: width ?? this.width, 32 | height: height ?? this.height, 33 | rotation: rotation ?? this.rotation, 34 | renderVideo: this.width != 0 && this.height != 0 && renderVideo, 35 | ); 36 | } 37 | 38 | @override 39 | String toString() => 40 | '$runtimeType(width: $width, height: $height, rotation: $rotation)'; 41 | } 42 | 43 | abstract class VideoRenderer { 44 | /// When the video size changes, or the native texture 45 | /// changes (angle or size), notify the user to redraw the Widget. 46 | Function? onResize; 47 | 48 | /// When the first frame is rendered, notify the user that video started playing. 49 | Function? onFirstFrameRendered; 50 | 51 | int get videoWidth; 52 | 53 | int get videoHeight; 54 | 55 | bool get muted; 56 | set muted(bool mute); 57 | 58 | ///Return true if the audioOutput have been succesfully changed 59 | Future audioOutput(String deviceId); 60 | 61 | bool get renderVideo; 62 | 63 | int? get textureId; 64 | 65 | Future initialize(); 66 | 67 | MediaStream? get srcObject; 68 | 69 | set srcObject(MediaStream? stream); 70 | 71 | Future dispose(); 72 | } 73 | -------------------------------------------------------------------------------- /lib/webrtc_interface.dart: -------------------------------------------------------------------------------- 1 | library webrtc_interface; 2 | 3 | export 'src/enums.dart'; 4 | export 'src/factory.dart'; 5 | export 'src/frame_cryptor.dart'; 6 | export 'src/media_recorder.dart'; 7 | export 'src/media_stream.dart'; 8 | export 'src/media_stream_track.dart'; 9 | export 'src/mediadevices.dart'; 10 | export 'src/navigator.dart'; 11 | export 'src/rtc_data_channel.dart'; 12 | export 'src/rtc_dtmf_sender.dart'; 13 | export 'src/rtc_ice_candidate.dart'; 14 | export 'src/rtc_peerconnection.dart'; 15 | export 'src/rtc_rtcp_parameters.dart'; 16 | export 'src/rtc_rtp_capabilities.dart'; 17 | export 'src/rtc_rtp_parameters.dart'; 18 | export 'src/rtc_rtp_receiver.dart'; 19 | export 'src/rtc_rtp_sender.dart'; 20 | export 'src/rtc_rtp_transceiver.dart'; 21 | export 'src/rtc_session_description.dart'; 22 | export 'src/rtc_stats_report.dart'; 23 | export 'src/rtc_track_event.dart'; 24 | export 'src/rtc_video_renderer.dart'; 25 | -------------------------------------------------------------------------------- /pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: webrtc_interface 2 | description: WebRTC Interface for Dart-Web/Flutter. 3 | version: 1.2.3 4 | homepage: https://flutter-webrtc.org 5 | 6 | environment: 7 | sdk: ">=2.12.0 <4.0.0" 8 | 9 | dev_dependencies: 10 | import_sorter: ^4.6.0 11 | pedantic: ^1.11.1 12 | test: any 13 | --------------------------------------------------------------------------------