├── renovate.json ├── web ├── favicon.ico ├── styles.css ├── test_template.dart ├── index.html ├── test_video_element.dart ├── p2p │ ├── simple_websocket.dart │ ├── random_string.dart │ ├── p2p.dart │ └── signaling.dart ├── test_media_stream.dart ├── test_media_stream_track.dart ├── test_media_devices.dart ├── main.dart └── test_peerconnection.dart ├── lib ├── dart_webrtc.dart └── src │ ├── rtc_track_event_impl.dart │ ├── rtc_dtmf_sender_impl.dart │ ├── navigator_impl.dart │ ├── media_devices.dart │ ├── media_recorder.dart │ ├── rtc_rtp_receiver_impl.dart │ ├── rtc_video_element.dart │ ├── media_stream_impl.dart │ ├── media_recorder_impl.dart │ ├── factory_impl.dart │ ├── rtc_data_channel_impl.dart │ ├── media_stream_track_impl.dart │ ├── rtc_rtp_parameters_impl.dart │ ├── rtc_rtp_sender_impl.dart │ ├── rtc_rtp_transceiver_impl.dart │ ├── mediadevices_impl.dart │ └── rtc_peerconnection_impl.dart ├── .github └── workflows │ ├── publish.yaml │ └── build.yml ├── README.md ├── .gitignore ├── pubspec.yaml ├── LICENSE ├── CHANGELOG.md └── analysis_options.yaml /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /web/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tsukumijima/dart-webrtc/main/web/favicon.ico -------------------------------------------------------------------------------- /web/styles.css: -------------------------------------------------------------------------------- 1 | @import url(https://fonts.googleapis.com/css?family=Roboto); 2 | 3 | html, body { 4 | width: 100%; 5 | height: 100%; 6 | margin: 0; 7 | padding: 0; 8 | font-family: 'Roboto', sans-serif; 9 | } 10 | 11 | #output { 12 | padding: 20px; 13 | text-align: center; 14 | } 15 | -------------------------------------------------------------------------------- /web/test_template.dart: -------------------------------------------------------------------------------- 1 | import 'package:test/test.dart'; 2 | 3 | List testFunctions = [ 4 | () => test('ClassName.constructor()', () {}), 5 | () => test('ClassName.method1()', () {}), 6 | () => test('ClassName.method2()', () {}), 7 | () => test('ClassName.method3()', () {}) 8 | ]; 9 | -------------------------------------------------------------------------------- /lib/dart_webrtc.dart: -------------------------------------------------------------------------------- 1 | library dart_webrtc; 2 | 3 | export 'package:webrtc_interface/webrtc_interface.dart' 4 | hide MediaDevices, MediaRecorder, Navigator; 5 | 6 | export 'src/factory_impl.dart'; 7 | export 'src/media_devices.dart'; 8 | export 'src/media_recorder.dart'; 9 | export 'src/media_stream_impl.dart'; 10 | export 'src/rtc_video_element.dart'; 11 | -------------------------------------------------------------------------------- /lib/src/rtc_track_event_impl.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/webrtc_interface.dart'; 2 | 3 | class RTCTrackEventWeb extends RTCTrackEvent { 4 | RTCTrackEventWeb( 5 | {RTCRtpReceiver? receiver, 6 | required List streams, 7 | required MediaStreamTrack track, 8 | RTCRtpTransceiver? transceiver}) 9 | : super( 10 | receiver: receiver, 11 | streams: streams, 12 | track: track, 13 | transceiver: transceiver); 14 | } 15 | -------------------------------------------------------------------------------- /.github/workflows/publish.yaml: -------------------------------------------------------------------------------- 1 | name: Publish plugin 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | publish: 9 | 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v1 15 | - name: Publish 16 | uses: sakebook/actions-flutter-pub-publisher@v1.3.1 17 | with: 18 | credential: ${{ secrets.CREDENTIAL_JSON }} 19 | flutter_package: true 20 | skip_test: true 21 | dry_run: false 22 | -------------------------------------------------------------------------------- /lib/src/rtc_dtmf_sender_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:html' as html; 2 | import 'package:webrtc_interface/webrtc_interface.dart'; 3 | 4 | class RTCDTMFSenderWeb extends RTCDTMFSender { 5 | RTCDTMFSenderWeb(this._jsDtmfSender); 6 | final html.RtcDtmfSender _jsDtmfSender; 7 | 8 | @override 9 | Future insertDTMF(String tones, 10 | {int duration = 100, int interToneGap = 70}) async { 11 | return _jsDtmfSender.insertDtmf(tones, duration, interToneGap); 12 | } 13 | 14 | @override 15 | Future canInsertDtmf() async { 16 | return _jsDtmfSender.canInsertDtmf ?? false; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dart-webrtc 2 | 3 | A webrtc interface wrapped in dart language. 4 | 5 | Use the [dart/js](https://pub.dev/packages/js) library to re-wrap the [webrtc](https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API) js interface of the browser, to adapted common browsers. 6 | 7 | This library will be used for [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) for [flutter web](https://flutter.dev/web) plugin. 8 | 9 | ## How to develop 10 | 11 | * `git clone https://github.com/flutter-webrtc/dart-webrtc && cd dart-webrtc` 12 | * `pub get` 13 | * `pub global activate webdev` 14 | * `webdev serve --auto=refresh` 15 | -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | dart_webrtc 10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 |
18 |
19 | 20 | 21 | -------------------------------------------------------------------------------- /lib/src/navigator_impl.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/webrtc_interface.dart'; 2 | 3 | import 'mediadevices_impl.dart'; 4 | 5 | class NavigatorWeb extends Navigator { 6 | @override 7 | Future getDisplayMedia(Map mediaConstraints) { 8 | return mediaDevices.getDisplayMedia(mediaConstraints); 9 | } 10 | 11 | @override 12 | Future getSources() { 13 | return mediaDevices.enumerateDevices(); 14 | } 15 | 16 | @override 17 | Future getUserMedia(Map mediaConstraints) { 18 | return mediaDevices.getUserMedia(mediaConstraints); 19 | } 20 | 21 | @override 22 | MediaDevices get mediaDevices => MediaDevicesWeb(); 23 | } 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://www.dartlang.org/guides/libraries/private-files 2 | 3 | # Files and directories created by pub 4 | .dart_tool/ 5 | .packages 6 | build/ 7 | # If you're building an application, you may want to check-in your pubspec.lock 8 | pubspec.lock 9 | 10 | # Directory created by dartdoc 11 | # If you don't generate documentation locally you can remove this line. 12 | doc/api/ 13 | 14 | # Avoid committing generated Javascript files: 15 | *.dart.js 16 | *.info.json # Produced by the --dump-info flag. 17 | *.js # When generated by dart2js. Don't specify *.js if your 18 | # project includes source files written in JavaScript. 19 | *.js_ 20 | *.js.deps 21 | *.js.map 22 | .DS_Store 23 | -------------------------------------------------------------------------------- /pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: dart_webrtc 2 | description: Use the dart/js library to re-wrap the webrtc js interface of the browser, to adapted common browsers. 3 | publish_to: none 4 | version: 1.0.12 5 | homepage: https://github.com/flutter-webrtc/dart-webrtc 6 | 7 | environment: 8 | sdk: '>=2.13.0 <3.0.0' 9 | 10 | dependencies: 11 | platform_detect: ^2.0.7 12 | webrtc_interface: 13 | git: 14 | url: https://github.com/tsukumijima/webrtc-interface.git 15 | ref: b9d22dbdff9f0f7065b7796defe9cc2afb143fcd 16 | 17 | dev_dependencies: 18 | build_runner: ^1.10.0 19 | build_web_compilers: ^2.11.0 20 | http: ^0.13.3 21 | import_sorter: ^4.6.0 22 | pedantic: ^1.9.0 23 | protoo_client: ^0.3.0 24 | test: ^1.15.4 25 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | test: 11 | name: Test on ${{ matrix.os }} 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - uses: actions/setup-java@v1 20 | with: 21 | java-version: '12.x' 22 | - uses: subosito/flutter-action@v1 23 | with: 24 | flutter-version: '2.2.3' 25 | channel: 'stable' 26 | - run: dart pub get 27 | - run: dart format lib/ test/ --set-exit-if-changed 28 | - run: dart pub run import_sorter:main --no-comments --exit-if-changed 29 | - run: dart analyze 30 | -------------------------------------------------------------------------------- /lib/src/media_devices.dart: -------------------------------------------------------------------------------- 1 | import '../dart_webrtc.dart'; 2 | 3 | class MediaDevices { 4 | @Deprecated( 5 | 'Use the navigator.mediaDevices.getUserMedia(Map) provide from the facrory instead') 6 | static Future getUserMedia( 7 | Map mediaConstraints) async { 8 | return navigator.mediaDevices.getUserMedia(mediaConstraints); 9 | } 10 | 11 | @Deprecated( 12 | 'Use the navigator.mediaDevices.getDisplayMedia(Map) provide from the facrory instead') 13 | static Future getDisplayMedia( 14 | Map mediaConstraints) async { 15 | return navigator.mediaDevices.getDisplayMedia(mediaConstraints); 16 | } 17 | 18 | @Deprecated( 19 | 'Use the navigator.mediaDevices.getSources() provide from the facrory instead') 20 | static Future> getSources() { 21 | return navigator.mediaDevices.getSources(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /lib/src/media_recorder.dart: -------------------------------------------------------------------------------- 1 | import 'package:webrtc_interface/src/media_recorder.dart' as _interface; 2 | 3 | import '../dart_webrtc.dart'; 4 | 5 | class MediaRecorder extends _interface.MediaRecorder { 6 | MediaRecorder() : _delegate = mediaRecorder(); 7 | final _interface.MediaRecorder _delegate; 8 | 9 | @override 10 | Future start(String path, 11 | {MediaStreamTrack? videoTrack, RecorderAudioChannel? audioChannel}) => 12 | _delegate.start(path, videoTrack: videoTrack, audioChannel: audioChannel); 13 | 14 | @override 15 | Future stop() => _delegate.stop(); 16 | 17 | @override 18 | void startWeb( 19 | MediaStream stream, { 20 | Function(dynamic blob, bool isLastOne)? onDataChunk, 21 | String? mimeType, 22 | int timeSlice = 1000, 23 | }) => 24 | _delegate.startWeb( 25 | stream, 26 | onDataChunk: onDataChunk, 27 | mimeType: mimeType ?? 'video/webm', 28 | timeSlice: timeSlice, 29 | ); 30 | } 31 | -------------------------------------------------------------------------------- /web/test_video_element.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | late RTCVideoElement? localVideo; 5 | 6 | List testFunctions = [ 7 | () => test('RTCVideoElement.constructor()', () { 8 | localVideo = RTCVideoElement(); 9 | expect(localVideo != null, true); 10 | }), 11 | () => test('RTCVideoElement.muted()', () { 12 | localVideo?.muted = true; 13 | expect(localVideo?.muted, true); 14 | localVideo?.muted = false; 15 | expect(localVideo?.muted, false); 16 | }), 17 | () => test('RTCVideoElement.controls()', () { 18 | localVideo?.controls = false; 19 | expect(localVideo?.controls, false); 20 | localVideo?.controls = true; 21 | expect(localVideo?.controls, true); 22 | }), 23 | () => test('RTCVideoElement.autoplay()', () { 24 | localVideo?.autoplay = false; 25 | expect(localVideo?.autoplay, false); 26 | localVideo?.autoplay = true; 27 | expect(localVideo?.autoplay, true); 28 | }) 29 | ]; 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Flutter WebRTC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_receiver_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:html'; 2 | import 'dart:js_util' as jsutil; 3 | import 'package:webrtc_interface/webrtc_interface.dart'; 4 | 5 | import 'media_stream_track_impl.dart'; 6 | import 'rtc_rtp_parameters_impl.dart'; 7 | 8 | class RTCRtpReceiverWeb extends RTCRtpReceiver { 9 | RTCRtpReceiverWeb(this._jsRtpReceiver); 10 | 11 | /// private: 12 | final RtcRtpReceiver _jsRtpReceiver; 13 | 14 | @override 15 | Future> getStats() async { 16 | var stats = await jsutil.promiseToFuture( 17 | jsutil.callMethod(_jsRtpReceiver, 'getStats', [])); 18 | var report = []; 19 | stats.forEach((key, value) { 20 | report.add( 21 | StatsReport(value['id'], value['type'], value['timestamp'], value)); 22 | }); 23 | return report; 24 | } 25 | 26 | /// The WebRTC specification only defines RTCRtpParameters in terms of senders, 27 | /// but this API also applies them to receivers, similar to ORTC: 28 | /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. 29 | @override 30 | RTCRtpParameters get parameters { 31 | var parameters = jsutil.callMethod(_jsRtpReceiver, 'getParameters', []); 32 | return RTCRtpParametersWeb.fromJsObject(parameters); 33 | } 34 | 35 | @override 36 | MediaStreamTrack get track => MediaStreamTrackWeb(_jsRtpReceiver.track!); 37 | 38 | @override 39 | String get receiverId => jsutil.getProperty(_jsRtpReceiver, 'receiverId'); 40 | } 41 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | -------------------------------------------- 4 | [1.0.12] - 2022-12-12 5 | 6 | * fix: Convert iceconnectionstate to connectionstate for Firefox. 7 | 8 | [1.0.11] - 2022-11-12 9 | 10 | * Change MediaStream.clone to async. 11 | 12 | [1.0.10] - 2022-11-02 13 | 14 | * Update MediaRecorder interface. 15 | 16 | [1.0.9] - 2022-10-10 17 | 18 | * Use RTCPeerConnection::onConnectionStateChange. 19 | 20 | -------------------------------------------- 21 | [1.0.8] - 2022-09-06 22 | 23 | * Bump version for webrtc-interface. 24 | 25 | [1.0.7] - 2022-08-04 26 | 27 | * Bump version for webrtc-interface. 28 | 29 | [1.0.6] - 2022-05-08 30 | 31 | * Support null tracks in replaceTrack/setTrack. 32 | 33 | [1.0.5] - 2022-03-31 34 | 35 | * Added RTCDataChannel.id 36 | 37 | [1.0.4] - 2022-02-07 38 | 39 | * Add restartIce. 40 | * Bump version for webrtc-interface. 41 | 42 | [1.0.3] - 2021-12-28 43 | 44 | * export media_stream_impl.dart to fix do not import impl files. 45 | 46 | [1.0.2] - 2021-11-27 47 | 48 | * Fix the type error of minified function in release mode. 49 | 50 | [1.0.1] - 2021-11-25 51 | 52 | * Bump interface version to 1.0.1 53 | * Reduce code. 54 | 55 | 1.0.0 56 | 57 | * Refactor using webrtc_interface. 58 | 59 | 0.2.3 60 | 61 | * Fix bug for simulcast. 62 | 63 | 0.2.2 64 | 65 | * Fix bug for unified-plan. 66 | 67 | 0.2.1 68 | 69 | * Fix getStats. 70 | 71 | 0.2.0 72 | 73 | * Implement basic functions. 74 | 75 | 0.1.0 76 | 77 | * First working version. 78 | 79 | 0.0.1 80 | 81 | * Initial version, created by Stagehand 82 | -------------------------------------------------------------------------------- /analysis_options.yaml: -------------------------------------------------------------------------------- 1 | include: package:pedantic/analysis_options.yaml 2 | 3 | linter: 4 | rules: 5 | - always_declare_return_types 6 | - avoid_empty_else 7 | - await_only_futures 8 | - avoid_returning_null_for_void 9 | - cancel_subscriptions 10 | - directives_ordering 11 | - flutter_style_todos 12 | - sort_constructors_first 13 | - sort_unnamed_constructors_first 14 | - sort_pub_dependencies 15 | - type_init_formals 16 | - unnecessary_brace_in_string_interps 17 | - unnecessary_const 18 | - unnecessary_new 19 | - unnecessary_getters_setters 20 | - unnecessary_null_aware_assignments 21 | - unnecessary_null_in_if_null_operators 22 | - unnecessary_overrides 23 | - unnecessary_parenthesis 24 | - unnecessary_statements 25 | - unnecessary_string_interpolations 26 | - unnecessary_this 27 | - unrelated_type_equality_checks 28 | - use_rethrow_when_possible 29 | - valid_regexps 30 | - void_checks 31 | 32 | analyzer: 33 | errors: 34 | # treat missing required parameters as a warning (not a hint) 35 | missing_required_param: warning 36 | # treat missing returns as a warning (not a hint) 37 | missing_return: warning 38 | # allow having TODOs in the code 39 | todo: ignore 40 | # allow self-reference to deprecated members (we do this because otherwise we have 41 | # to annotate every member in every test, assert, etc, when we deprecate something) 42 | deprecated_member_use_from_same_package: ignore 43 | # Ignore analyzer hints for updating pubspecs when using Future or 44 | # Stream and not importing dart:async 45 | # Please see https://github.com/flutter/flutter/pull/24528 for details. 46 | sdk_version_async_exported_from_core: ignore 47 | -------------------------------------------------------------------------------- /web/p2p/simple_websocket.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | import 'dart:html'; 3 | 4 | import 'package:http/http.dart' as http; 5 | 6 | typedef OnMessageCallback = void Function(dynamic msg); 7 | typedef OnCloseCallback = void Function(int code, String reason); 8 | typedef OnOpenCallback = void Function(); 9 | 10 | class SimpleWebSocket { 11 | SimpleWebSocket(this._url) { 12 | _url = _url.replaceAll('https:', 'wss:'); 13 | } 14 | 15 | String _url; 16 | var _socket; 17 | OnOpenCallback? onOpen; 18 | OnMessageCallback? onMessage; 19 | OnCloseCallback? onClose; 20 | 21 | Future connect() async { 22 | try { 23 | _socket = WebSocket(_url); 24 | _socket.onOpen.listen((e) { 25 | onOpen?.call(); 26 | }); 27 | 28 | _socket.onMessage.listen((e) { 29 | onMessage?.call(e.data); 30 | }); 31 | 32 | _socket.onClose.listen((e) { 33 | onClose?.call(e.code, e.reason); 34 | }); 35 | } catch (e) { 36 | onClose?.call(500, e.toString()); 37 | } 38 | } 39 | 40 | void send(data) { 41 | if (_socket != null && _socket.readyState == WebSocket.OPEN) { 42 | _socket.send(data); 43 | print('send: $data'); 44 | } else { 45 | print('WebSocket not connected, message $data not sent'); 46 | } 47 | } 48 | 49 | void close() { 50 | if (_socket != null) _socket.close(); 51 | } 52 | } 53 | 54 | Future getTurnCredential(String host, int port) async { 55 | var url = 'https://$host:$port/api/turn?service=turn&username=flutter-webrtc'; 56 | final res = await http.get(Uri.parse(url)); 57 | if (res.statusCode == 200) { 58 | var data = json.decode(res.body); 59 | print('getTurnCredential:response => $data.'); 60 | return data; 61 | } 62 | return {}; 63 | } 64 | -------------------------------------------------------------------------------- /web/test_media_stream.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | MediaStream? mediaStream; 5 | 6 | List testFunctions = [ 7 | () => test('mediaStream?.constructor()', () async { 8 | mediaStream = await navigator.mediaDevices 9 | .getUserMedia({'audio': true, 'video': true}); 10 | 11 | expect(mediaStream?.id != null, true); 12 | }), 13 | () => test('mediaStream?.active()', () { 14 | expect(mediaStream?.active, true); 15 | }), 16 | () => test('mediaStream?.getTracks()', () { 17 | expect(mediaStream?.getTracks().length, 2); 18 | }), 19 | () => test('mediaStream?.getAudioTracks()', () { 20 | expect(mediaStream?.getAudioTracks().length, 1); 21 | var track = mediaStream 22 | ?.getTrackById(mediaStream?.getAudioTracks()[0].id ?? ''); 23 | expect(track?.id, mediaStream?.getAudioTracks()[0].id); 24 | }), 25 | () => test('mediaStream?.getVideoTracks()', () { 26 | expect(mediaStream?.getVideoTracks().length, 1); 27 | var track = mediaStream 28 | ?.getTrackById(mediaStream?.getVideoTracks()[0].id ?? ''); 29 | expect(track!.id, mediaStream?.getVideoTracks()[0].id); 30 | }), 31 | () => test('mediaStream?.removeTrack()', () { 32 | var track = mediaStream 33 | ?.getTrackById(mediaStream?.getVideoTracks()[0].id ?? ''); 34 | mediaStream?.removeTrack(track!); 35 | expect(mediaStream?.getVideoTracks().length, 0); 36 | }), 37 | () => test('mediaStream?.close()', () { 38 | mediaStream?.getTracks().forEach((element) { 39 | element.stop(); 40 | mediaStream?.removeTrack(element); 41 | }); 42 | expect(mediaStream?.getTracks().isEmpty, true); 43 | }) 44 | ]; 45 | -------------------------------------------------------------------------------- /lib/src/rtc_video_element.dart: -------------------------------------------------------------------------------- 1 | import 'dart:html' as html; 2 | 3 | import '../dart_webrtc.dart'; 4 | 5 | class RTCVideoElement { 6 | RTCVideoElement() { 7 | _html = html.VideoElement() 8 | ..autoplay = true 9 | ..muted = false 10 | ..controls = false 11 | ..style.objectFit = 'contain' 12 | ..style.border = 'none'; 13 | 14 | // Allows Safari iOS to play the video inline 15 | _html.setAttribute('playsinline', 'true'); 16 | } 17 | 18 | MediaStream? _stream; 19 | 20 | late html.VideoElement _html; 21 | html.VideoElement get htmlElement => _html; 22 | 23 | /// contain or cover 24 | set objectFit(String fit) => _html.style.objectFit = fit; 25 | 26 | set srcObject(MediaStream? stream) { 27 | _stream = stream; 28 | if (stream != null) { 29 | _html.srcObject = (stream as MediaStreamWeb).jsStream; 30 | } else { 31 | _html.srcObject = null; 32 | } 33 | } 34 | 35 | int get videoWidth => _html.videoWidth; 36 | 37 | int get videoHeight => _html.videoHeight; 38 | 39 | Stream get onEnded => _html.onEnded; 40 | 41 | Stream get onError => _html.onError; 42 | 43 | Stream get onCanPlay => _html.onCanPlay; 44 | 45 | Stream get onResize => _html.onResize; 46 | 47 | dynamic get error => _html.error; 48 | 49 | MediaStream? get srcObject => _stream; 50 | 51 | set muted(bool v) => _html.muted = v; 52 | bool get muted => _html.muted; 53 | 54 | set autoplay(bool v) => _html.autoplay = v; 55 | bool get autoplay => _html.autoplay; 56 | 57 | set controls(bool v) => _html.controls = v; 58 | bool get controls => _html.controls; 59 | 60 | void load() => _html.load(); 61 | 62 | void removeAttribute(String name) => _html.removeAttribute(name); 63 | 64 | Future setSinkId(String sinkId) => _html.setSinkId(sinkId); 65 | } 66 | -------------------------------------------------------------------------------- /lib/src/media_stream_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'package:webrtc_interface/webrtc_interface.dart'; 4 | 5 | import 'media_stream_track_impl.dart'; 6 | 7 | class MediaStreamWeb extends MediaStream { 8 | MediaStreamWeb(this.jsStream, String ownerTag) 9 | : assert(jsStream.id != null), 10 | super(jsStream.id!, ownerTag); 11 | final html.MediaStream jsStream; 12 | 13 | @override 14 | Future getMediaTracks() { 15 | return Future.value(); 16 | } 17 | 18 | @override 19 | Future addTrack(MediaStreamTrack track, {bool addToNative = true}) { 20 | if (addToNative) { 21 | var _native = track as MediaStreamTrackWeb; 22 | jsStream.addTrack(_native.jsTrack); 23 | } 24 | return Future.value(); 25 | } 26 | 27 | @override 28 | Future removeTrack(MediaStreamTrack track, 29 | {bool removeFromNative = true}) async { 30 | if (removeFromNative) { 31 | var _native = track as MediaStreamTrackWeb; 32 | jsStream.removeTrack(_native.jsTrack); 33 | } 34 | } 35 | 36 | @override 37 | List getAudioTracks() { 38 | var audioTracks = []; 39 | jsStream.getAudioTracks().forEach( 40 | (dynamic jsTrack) => audioTracks.add(MediaStreamTrackWeb(jsTrack))); 41 | return audioTracks; 42 | } 43 | 44 | @override 45 | List getVideoTracks() { 46 | var audioTracks = []; 47 | jsStream.getVideoTracks().forEach( 48 | (dynamic jsTrack) => audioTracks.add(MediaStreamTrackWeb(jsTrack))); 49 | return audioTracks; 50 | } 51 | 52 | @override 53 | List getTracks() { 54 | return [...getAudioTracks(), ...getVideoTracks()]; 55 | } 56 | 57 | @override 58 | bool? get active => jsStream.active; 59 | 60 | @override 61 | Future clone() async { 62 | return MediaStreamWeb(jsStream.clone(), ownerTag); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /web/test_media_stream_track.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | MediaStreamTrack? audioTrack, videoTrack; 5 | 6 | List testFunctions = [ 7 | () => test('MediaStreamTrack.constructor()', () async { 8 | var stream = await navigator.mediaDevices 9 | .getUserMedia({'audio': true, 'video': true}); 10 | 11 | audioTrack = stream.getAudioTracks()[0]; 12 | expect(audioTrack != null, true); 13 | 14 | expect(audioTrack?.kind, 'audio'); 15 | 16 | videoTrack = stream.getVideoTracks()[0]; 17 | expect(videoTrack != null, true); 18 | 19 | expect(videoTrack?.kind, 'video'); 20 | }), 21 | () => test('MediaStreamTrack.id()', () { 22 | expect(audioTrack?.id is String, true); 23 | expect(audioTrack?.id?.isNotEmpty, true); 24 | expect(videoTrack?.id is String, true); 25 | expect(videoTrack?.id?.isNotEmpty, true); 26 | }), 27 | () => test('MediaStreamTrack.label()', () { 28 | expect(audioTrack?.label is String, true); 29 | expect(audioTrack?.id?.isNotEmpty, true); 30 | expect(videoTrack?.id is String, true); 31 | expect(videoTrack?.id?.isNotEmpty, true); 32 | }), 33 | () => test('MediaStreamTrack.enabled()', () { 34 | expect(audioTrack?.enabled, true); 35 | audioTrack?.enabled = false; 36 | expect(audioTrack?.enabled, false); 37 | 38 | expect(videoTrack?.enabled, true); 39 | videoTrack?.enabled = false; 40 | expect(videoTrack?.enabled, false); 41 | }), 42 | () => test('MediaStreamTrack.readyState() | MediaStreamTrack.stop()', () { 43 | /* 44 | expect(audioTrack?.readyState, 'live'); 45 | audioTrack?.stop(); 46 | expect(audioTrack?.readyState, 'ended'); 47 | 48 | expect(videoTrack?.readyState, 'live'); 49 | videoTrack?.stop(); 50 | expect(videoTrack?.readyState, 'ended'); 51 | */ 52 | }) 53 | ]; 54 | -------------------------------------------------------------------------------- /lib/src/media_recorder_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'dart:js' as js; 4 | import 'package:webrtc_interface/webrtc_interface.dart'; 5 | 6 | import 'media_stream_impl.dart'; 7 | 8 | class MediaRecorderWeb extends MediaRecorder { 9 | late html.MediaRecorder _recorder; 10 | late Completer _completer; 11 | 12 | @override 13 | Future start( 14 | String path, { 15 | MediaStreamTrack? videoTrack, 16 | MediaStreamTrack? audioTrack, 17 | RecorderAudioChannel? audioChannel, 18 | int? rotation, 19 | }) { 20 | throw 'Use startWeb on Flutter Web!'; 21 | } 22 | 23 | @override 24 | void startWeb( 25 | MediaStream stream, { 26 | Function(dynamic blob, bool isLastOne)? onDataChunk, 27 | String mimeType = 'video/webm', 28 | int timeSlice = 1000, 29 | }) { 30 | var _native = stream as MediaStreamWeb; 31 | _recorder = html.MediaRecorder(_native.jsStream, {'mimeType': mimeType}); 32 | if (onDataChunk == null) { 33 | var _chunks = []; 34 | _completer = Completer(); 35 | _recorder.addEventListener('dataavailable', (html.Event event) { 36 | final html.Blob blob = js.JsObject.fromBrowserObject(event)['data']; 37 | if (blob.size > 0) { 38 | _chunks.add(blob); 39 | } 40 | if (_recorder.state == 'inactive') { 41 | final blob = html.Blob(_chunks, mimeType); 42 | _completer.complete(html.Url.createObjectUrlFromBlob(blob)); 43 | } 44 | }); 45 | _recorder.onError.listen((error) { 46 | _completer.completeError(error); 47 | }); 48 | } else { 49 | _recorder.addEventListener('dataavailable', (html.Event event) { 50 | onDataChunk( 51 | js.JsObject.fromBrowserObject(event)['data'], 52 | _recorder.state == 'inactive', 53 | ); 54 | }); 55 | } 56 | _recorder.start(timeSlice); 57 | } 58 | 59 | @override 60 | Future stop() { 61 | _recorder.stop(); 62 | return _completer.future; 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /lib/src/factory_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:convert'; 3 | import 'dart:html' as html; 4 | import 'package:webrtc_interface/webrtc_interface.dart'; 5 | 6 | import 'media_recorder_impl.dart'; 7 | import 'media_stream_impl.dart'; 8 | import 'navigator_impl.dart'; 9 | import 'rtc_peerconnection_impl.dart'; 10 | 11 | class RTCFactoryWeb extends RTCFactory { 12 | RTCFactoryWeb._internal(); 13 | static final instance = RTCFactoryWeb._internal(); 14 | 15 | @override 16 | Future createPeerConnection( 17 | Map configuration, 18 | [Map? constraints]) async { 19 | final constr = (constraints != null && constraints.isNotEmpty) 20 | ? constraints 21 | : { 22 | 'mandatory': {}, 23 | 'optional': [ 24 | {'DtlsSrtpKeyAgreement': true}, 25 | ], 26 | }; 27 | final jsRtcPc = html.RtcPeerConnection({...constr, ...configuration}); 28 | final _peerConnectionId = base64Encode(jsRtcPc.toString().codeUnits); 29 | return RTCPeerConnectionWeb(_peerConnectionId, jsRtcPc); 30 | } 31 | 32 | @override 33 | Future createLocalMediaStream(String label) async { 34 | final jsMs = html.MediaStream(); 35 | return MediaStreamWeb(jsMs, 'local'); 36 | } 37 | 38 | @override 39 | MediaRecorder mediaRecorder() { 40 | return MediaRecorderWeb(); 41 | } 42 | 43 | @override 44 | VideoRenderer videoRenderer() { 45 | throw UnimplementedError(); 46 | } 47 | 48 | @override 49 | Navigator get navigator => NavigatorWeb(); 50 | } 51 | 52 | Future createPeerConnection( 53 | Map configuration, 54 | [Map? constraints]) { 55 | return RTCFactoryWeb.instance 56 | .createPeerConnection(configuration, constraints); 57 | } 58 | 59 | Future createLocalMediaStream(String label) { 60 | return RTCFactoryWeb.instance.createLocalMediaStream(label); 61 | } 62 | 63 | MediaRecorder mediaRecorder() { 64 | return RTCFactoryWeb.instance.mediaRecorder(); 65 | } 66 | 67 | VideoRenderer videoRenderer() { 68 | return RTCFactoryWeb.instance.videoRenderer(); 69 | } 70 | 71 | Navigator get navigator => RTCFactoryWeb.instance.navigator; 72 | -------------------------------------------------------------------------------- /lib/src/rtc_data_channel_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'dart:js_util' as jsutil; 4 | import 'package:webrtc_interface/webrtc_interface.dart'; 5 | 6 | class RTCDataChannelWeb extends RTCDataChannel { 7 | RTCDataChannelWeb(this._jsDc) { 8 | stateChangeStream = _stateChangeController.stream; 9 | messageStream = _messageController.stream; 10 | _jsDc.onClose.listen((_) { 11 | _state = RTCDataChannelState.RTCDataChannelClosed; 12 | _stateChangeController.add(_state); 13 | onDataChannelState?.call(_state); 14 | }); 15 | _jsDc.onOpen.listen((_) { 16 | _state = RTCDataChannelState.RTCDataChannelOpen; 17 | _stateChangeController.add(_state); 18 | onDataChannelState?.call(_state); 19 | }); 20 | _jsDc.onMessage.listen((event) async { 21 | var msg = await _parse(event.data); 22 | _messageController.add(msg); 23 | onMessage?.call(msg); 24 | }); 25 | } 26 | 27 | final html.RtcDataChannel _jsDc; 28 | RTCDataChannelState _state = RTCDataChannelState.RTCDataChannelConnecting; 29 | 30 | @override 31 | RTCDataChannelState get state => _state; 32 | 33 | @override 34 | int? get id => _jsDc.id; 35 | 36 | @override 37 | String? get label => _jsDc.label; 38 | 39 | @override 40 | Future get bufferedAmount => Future.value(_jsDc.bufferedAmount); 41 | 42 | final _stateChangeController = 43 | StreamController.broadcast(sync: true); 44 | final _messageController = 45 | StreamController.broadcast(sync: true); 46 | 47 | Future _parse(dynamic data) async { 48 | if (data is String) return RTCDataChannelMessage(data); 49 | dynamic arrayBuffer; 50 | if (data is html.Blob) { 51 | // This should never happen actually 52 | arrayBuffer = await jsutil 53 | .promiseToFuture(jsutil.callMethod(data, 'arrayBuffer', [])); 54 | } else { 55 | arrayBuffer = data; 56 | } 57 | return RTCDataChannelMessage.fromBinary(arrayBuffer.asUint8List()); 58 | } 59 | 60 | @override 61 | Future send(RTCDataChannelMessage message) { 62 | if (!message.isBinary) { 63 | _jsDc.send(message.text); 64 | } else { 65 | _jsDc.sendTypedData(message.binary); 66 | } 67 | return Future.value(); 68 | } 69 | 70 | @override 71 | Future close() { 72 | _jsDc.close(); 73 | return Future.value(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /web/test_media_devices.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | void closeMediaStream(MediaStream stream) { 5 | stream.getTracks().forEach((element) { 6 | element.stop(); 7 | }); 8 | } 9 | 10 | List testFunctions = [ 11 | () => test('MediaDevices.constructor()', () { 12 | expect(navigator.mediaDevices != null, true); 13 | }), 14 | () => test('MediaDevices.enumerateDevices()', () async { 15 | var list = await navigator.mediaDevices.enumerateDevices(); 16 | list.forEach((e) { 17 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 18 | }); 19 | expect(list != null, true); 20 | }), 21 | () => test('MediaDevices.getUserMedia()', () async { 22 | var stream = await navigator.mediaDevices 23 | .getUserMedia({'audio': true, 'video': true}); 24 | print('getUserMedia: stream.id => ${stream.id}'); 25 | expect(stream != null, true); 26 | 27 | print( 28 | 'getUserMedia: audio track.id => ${stream.getAudioTracks()[0].id}'); 29 | expect(stream.getAudioTracks().isNotEmpty, true); 30 | print( 31 | 'getUserMedia: video track.id => ${stream.getVideoTracks()[0].id}'); 32 | expect(stream.getVideoTracks().isNotEmpty, true); 33 | 34 | closeMediaStream(stream); 35 | 36 | stream = await navigator.mediaDevices 37 | .getUserMedia({'audio': false, 'video': true}); 38 | 39 | expect(stream.getAudioTracks().isEmpty, true); 40 | expect(stream.getVideoTracks().isNotEmpty, true); 41 | 42 | closeMediaStream(stream); 43 | 44 | stream = await navigator.mediaDevices 45 | .getUserMedia({'audio': true, 'video': false}); 46 | 47 | expect(stream.getAudioTracks().isNotEmpty, true); 48 | expect(stream.getVideoTracks().isEmpty, true); 49 | 50 | closeMediaStream(stream); 51 | /* 52 | expect( 53 | await mediaDevices.getUserMedia( 54 | constraints: 55 | MediaStreamConstraints(audio: false, video: false)), 56 | throwsException);*/ 57 | }), 58 | () => test('MediaDevices.getDisplayMedia()', () async { 59 | /* 60 | var stream = await mediaDevices.getDisplayMedia( 61 | constraints: MediaStreamConstraints(audio: false, video: true)); 62 | print('getDisplayMedia: stream.id => ${stream.id}'); 63 | expect(stream != null, true); 64 | expect(stream.getAudioTracks().isEmpty, true); 65 | print( 66 | 'getDisplayMedia: video track.id => ${stream.getVideoTracks()[0].id}'); 67 | expect(stream.getVideoTracks().isNotEmpty, true); 68 | 69 | closeMediaStream(stream); 70 | */ 71 | }) 72 | ]; 73 | -------------------------------------------------------------------------------- /web/p2p/random_string.dart: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2016, Damon Douglas. All rights reserved. Use of this source code 2 | // is governed by a BSD-style license that can be found in the LICENSE file. 3 | 4 | /// Simple library for generating random ascii strings. 5 | /// 6 | /// More dartdocs go here. 7 | /// 8 | /// 9 | /// A simple usage example: 10 | /// 11 | /// import 'package:random_string/random_string.dart' as random; 12 | /// main() { 13 | /// print(randomBetween(10,20)); // some integer between 10 and 20 14 | /// print(randomNumeric(4)); // sequence of 4 random numbers i.e. 3259 15 | /// print(randomString(10)); // random sequence of 10 characters i.e. e~f93(4l- 16 | /// print(randomAlpha(5)); // random sequence of 5 alpha characters i.e. aRztC 17 | /// print(randomAlphaNumeric(10)); // random sequence of 10 alpha numeric i.e. aRztC1y32B 18 | /// } 19 | 20 | library random_string; 21 | 22 | import 'dart:math'; 23 | 24 | const ASCII_START = 33; 25 | const ASCII_END = 126; 26 | const NUMERIC_START = 48; 27 | const NUMERIC_END = 57; 28 | const LOWER_ALPHA_START = 97; 29 | const LOWER_ALPHA_END = 122; 30 | const UPPER_ALPHA_START = 65; 31 | const UPPER_ALPHA_END = 90; 32 | 33 | /// Generates a random integer where [from] <= [to]. 34 | int randomBetween(int from, int to) { 35 | if (from > to) throw Exception('$from cannot be > $to'); 36 | var rand = Random(); 37 | return ((to - from) * rand.nextDouble()).toInt() + from; 38 | } 39 | 40 | /// Generates a random string of [length] with characters 41 | /// between ascii [from] to [to]. 42 | /// Defaults to characters of ascii '!' to '~'. 43 | String randomString(int length, {int from = ASCII_START, int to = ASCII_END}) { 44 | return String.fromCharCodes( 45 | List.generate(length, (index) => randomBetween(from, to))); 46 | } 47 | 48 | /// Generates a random string of [length] with only numeric characters. 49 | String randomNumeric(int length) => 50 | randomString(length, from: NUMERIC_START, to: NUMERIC_END); 51 | /* 52 | /// Generates a random string of [length] with only alpha characters. 53 | String randomAlpha(int length) { 54 | var lowerAlphaLength = randomBetween(0, length); 55 | var upperAlphaLength = length - lowerAlphaLength; 56 | var lowerAlpha = randomString(lowerAlphaLength, 57 | from: LOWER_ALPHA_START, to: LOWER_ALPHA_END); 58 | var upperAlpha = randomString(upperAlphaLength, 59 | from: UPPER_ALPHA_START, to: UPPER_ALPHA_END); 60 | return randomMerge(lowerAlpha, upperAlpha); 61 | } 62 | 63 | /// Generates a random string of [length] with alpha-numeric characters. 64 | String randomAlphaNumeric(int length) { 65 | var alphaLength = randomBetween(0, length); 66 | var numericLength = length - alphaLength; 67 | var alpha = randomAlpha(alphaLength); 68 | var numeric = randomNumeric(numericLength); 69 | return randomMerge(alpha, numeric); 70 | } 71 | 72 | /// Merge [a] with [b] and scramble characters. 73 | String randomMerge(String a, String b) { 74 | var mergedCodeUnits = new List.from("$a$b".codeUnits); 75 | mergedCodeUnits.shuffle(); 76 | return new String.fromCharCodes(mergedCodeUnits); 77 | }*/ 78 | -------------------------------------------------------------------------------- /web/p2p/p2p.dart: -------------------------------------------------------------------------------- 1 | import 'dart:html' as html; 2 | 3 | import 'package:dart_webrtc/dart_webrtc.dart'; 4 | import 'package:js/js.dart'; 5 | import 'package:test/test.dart'; 6 | 7 | import 'signaling.dart'; 8 | 9 | void main() { 10 | test('String.split() splits the string on the delimiter', () { 11 | var string = 'foo,bar,baz'; 12 | expect(string.split(','), equals(['foo', 'bar', 'baz'])); 13 | }); 14 | 15 | test('String.trim() removes surrounding whitespace', () { 16 | var string = ' foo '; 17 | expect(string.trim(), equals('foo')); 18 | }); 19 | 20 | var signaling = Signaling('demo.cloudwebrtc.com'); 21 | 22 | var local = html.document.querySelector('#local'); 23 | 24 | var localVideo = RTCVideoElement(); 25 | 26 | local?.append(localVideo.htmlElement); 27 | 28 | var remote = html.document.querySelector('#remote'); 29 | 30 | var remoteVideo = RTCVideoElement(); 31 | 32 | remote?.append(remoteVideo.htmlElement); 33 | 34 | signaling.onLocalStream = allowInterop((MediaStream stream) { 35 | localVideo.srcObject = stream; 36 | }); 37 | 38 | signaling.onAddRemoteStream = allowInterop((MediaStream stream) { 39 | remoteVideo.srcObject = stream; 40 | }); 41 | 42 | signaling.connect(); 43 | signaling.onStateChange = (SignalingState state) { 44 | html.document.querySelector('#output')?.text = state.toString(); 45 | if (state == SignalingState.CallStateBye) { 46 | localVideo.srcObject = null; 47 | remoteVideo.srcObject = null; 48 | } 49 | }; 50 | } 51 | 52 | /* 53 | void loopBackTest() { 54 | var local = document.querySelector('#local'); 55 | var localVideo = VideoElement() 56 | ..autoplay = true 57 | ..muted = true 58 | ..controls = false 59 | ..style.objectFit = 'contain' // contain or cover 60 | ..style.border = 'none' 61 | ..id = 'dart-webrtc-video-01'; 62 | 63 | // Allows Safari iOS to play the video inline 64 | localVideo.setAttribute('playsinline', 'true'); 65 | local.append(localVideo); 66 | dartWebRTCTest(localVideo); 67 | } 68 | 69 | void dartWebRTCTest(VideoElement video) async { 70 | var list = await PromiseToFuture>( 71 | navigator.mediaDevices.enumerateDevices()); 72 | list.forEach((e) { 73 | if (e is MediaDeviceInfo) { 74 | print('MediaDeviceInfo: ${e.label}'); 75 | } else if (e is InputDeviceInfo) { 76 | print('InputDeviceInfo: ${e.label}'); 77 | } 78 | }); 79 | 80 | var pc = RTCPeerConnection(); 81 | print('connectionState: ${pc.connectionState}'); 82 | pc.onaddstream = allowInterop((MediaStreamEvent event) {}); 83 | var stream = await PromiseToFuture( 84 | navigator.mediaDevices.getDisplayMedia() 85 | /*.getUserMedia(MediaStreamConstraints(audio: true, video: true))*/); 86 | print('getDisplayMedia: stream.id => ${stream.id}'); 87 | stream.oninactive = allowInterop((Event event) { 88 | print('oninactive: stream.id => ${event.target.id}'); 89 | video.srcObject = null; 90 | video.remove(); 91 | }); 92 | pc.addStream(stream); 93 | var rtcVideo = ConvertToRTCVideoElement(video); 94 | rtcVideo.srcObject = stream; 95 | } 96 | */ 97 | -------------------------------------------------------------------------------- /lib/src/media_stream_track_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'dart:js_util' as js; 4 | import 'dart:typed_data'; 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | class MediaStreamTrackWeb extends MediaStreamTrack { 8 | MediaStreamTrackWeb(this.jsTrack) { 9 | jsTrack.onEnded.listen((event) => onEnded?.call()); 10 | jsTrack.onMute.listen((event) => onMute?.call()); 11 | jsTrack.onUnmute.listen((event) => onUnMute?.call()); 12 | } 13 | 14 | final html.MediaStreamTrack jsTrack; 15 | 16 | @override 17 | String? get id => jsTrack.id; 18 | 19 | @override 20 | String? get kind => jsTrack.kind; 21 | 22 | @override 23 | String? get label => jsTrack.label; 24 | 25 | @override 26 | bool get enabled => jsTrack.enabled ?? false; 27 | 28 | @override 29 | bool? get muted => jsTrack.muted; 30 | 31 | @override 32 | set enabled(bool? b) { 33 | jsTrack.enabled = b; 34 | } 35 | 36 | @override 37 | Map getConstraints() { 38 | return jsTrack.getConstraints() as Map; 39 | } 40 | 41 | @override 42 | Future applyConstraints([Map? constraints]) async { 43 | // TODO(wermathurin): Wait for: https://github.com/dart-lang/sdk/commit/1a861435579a37c297f3be0cf69735d5b492bc6c 44 | // to be merged to use jsTrack.applyConstraints() directly 45 | final arg = js.jsify(constraints ?? {}); 46 | 47 | final _val = await js.promiseToFuture( 48 | js.callMethod(jsTrack, 'applyConstraints', [arg])); 49 | return _val; 50 | } 51 | 52 | // TODO(wermathurin): https://github.com/dart-lang/sdk/issues/44319 53 | // @override 54 | // MediaTrackCapabilities getCapabilities() { 55 | // var _converted = jsTrack.getCapabilities(); 56 | // print(_converted['aspectRatio'].runtimeType); 57 | // return null; 58 | // } 59 | 60 | @override 61 | Map getSettings() { 62 | return jsTrack.getSettings() as Map; 63 | } 64 | 65 | @override 66 | Future captureFrame() async { 67 | final imageCapture = html.ImageCapture(jsTrack); 68 | final bitmap = await imageCapture.grabFrame(); 69 | final canvas = html.CanvasElement(); 70 | canvas.width = bitmap.width; 71 | canvas.height = bitmap.height; 72 | final renderer = 73 | canvas.getContext('bitmaprenderer') as html.ImageBitmapRenderingContext; 74 | js.callMethod(renderer, 'transferFromImageBitmap', [bitmap]); 75 | final blod = await canvas.toBlob(); 76 | var array = 77 | await js.promiseToFuture(js.callMethod(blod, 'arrayBuffer', [])); 78 | bitmap.close(); 79 | return array; 80 | } 81 | 82 | @override 83 | Future dispose() async {} 84 | 85 | @override 86 | Future stop() async { 87 | jsTrack.stop(); 88 | } 89 | 90 | @override 91 | Future hasTorch() { 92 | return Future.value(false); 93 | } 94 | 95 | @override 96 | Future setTorch(bool torch) { 97 | throw UnimplementedError('The web implementation does not support torch'); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_parameters_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:js_util' as jsutil; 2 | import 'package:webrtc_interface/webrtc_interface.dart'; 3 | 4 | class RTCRtpParametersWeb { 5 | static RTCRtpParameters fromJsObject(Object object) { 6 | return RTCRtpParameters( 7 | transactionId: jsutil.getProperty(object, 'transactionId'), 8 | rtcp: RTCRTCPParametersWeb.fromJsObject( 9 | jsutil.getProperty(object, 'rtcp')), 10 | headerExtensions: headerExtensionsFromJsObject(object), 11 | encodings: encodingsFromJsObject(object), 12 | codecs: codecsFromJsObject(object)); 13 | } 14 | 15 | static List headerExtensionsFromJsObject(Object object) { 16 | var headerExtensions = jsutil.getProperty(object, 'headerExtensions'); 17 | var list = []; 18 | headerExtensions.forEach((e) { 19 | list.add(RTCHeaderExtensionWeb.fromJsObject(e)); 20 | }); 21 | return list; 22 | } 23 | 24 | static List encodingsFromJsObject(Object object) { 25 | var encodings = jsutil.getProperty(object, 'encodings'); 26 | var list = []; 27 | encodings.forEach((e) { 28 | list.add(RTCRtpEncodingWeb.fromJsObject(e)); 29 | }); 30 | return list; 31 | } 32 | 33 | static List codecsFromJsObject(Object object) { 34 | var encodings = jsutil.getProperty(object, 'codecs'); 35 | var list = []; 36 | encodings.forEach((e) { 37 | list.add(RTCRTPCodecWeb.fromJsObject(e)); 38 | }); 39 | return list; 40 | } 41 | } 42 | 43 | class RTCRTCPParametersWeb { 44 | static RTCRTCPParameters fromJsObject(Object object) { 45 | return RTCRTCPParameters.fromMap({ 46 | 'cname': jsutil.getProperty(object, 'cname'), 47 | 'reducedSize': jsutil.getProperty(object, 'reducedSize') 48 | }); 49 | } 50 | } 51 | 52 | class RTCHeaderExtensionWeb { 53 | static RTCHeaderExtension fromJsObject(Object object) { 54 | return RTCHeaderExtension.fromMap({ 55 | 'uri': jsutil.getProperty(object, 'uri'), 56 | 'id': jsutil.getProperty(object, 'id'), 57 | 'encrypted': jsutil.getProperty(object, 'encrypted') 58 | }); 59 | } 60 | } 61 | 62 | class RTCRtpEncodingWeb { 63 | static RTCRtpEncoding fromJsObject(Object object) { 64 | return RTCRtpEncoding.fromMap({ 65 | 'rid': jsutil.getProperty(object, 'rid'), 66 | 'active': jsutil.getProperty(object, 'active'), 67 | 'maxBitrate': jsutil.getProperty(object, 'maxBitrate'), 68 | 'maxFramerate': jsutil.getProperty(object, 'maxFramerate'), 69 | 'minBitrate': jsutil.getProperty(object, 'minBitrate'), 70 | 'numTemporalLayers': jsutil.getProperty(object, 'numTemporalLayers'), 71 | 'scaleResolutionDownBy': 72 | jsutil.getProperty(object, 'scaleResolutionDownBy'), 73 | 'ssrc': jsutil.getProperty(object, 'ssrc') 74 | }); 75 | } 76 | } 77 | 78 | class RTCRTPCodecWeb { 79 | static RTCRTPCodec fromJsObject(Object object) { 80 | return RTCRTPCodec.fromMap({ 81 | 'payloadType': jsutil.getProperty(object, 'payloadType'), 82 | 'name': jsutil.getProperty(object, 'name'), 83 | 'kind': jsutil.getProperty(object, 'kind'), 84 | 'clockRate': jsutil.getProperty(object, 'clockRate'), 85 | 'numChannels': jsutil.getProperty(object, 'numChannels'), 86 | 'parameters': jsutil.getProperty(object, 'parameters') 87 | }); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_sender_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html'; 3 | import 'dart:js_util' as jsutil; 4 | 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | import 'media_stream_track_impl.dart'; 8 | import 'rtc_dtmf_sender_impl.dart'; 9 | import 'rtc_rtp_parameters_impl.dart'; 10 | 11 | class RTCRtpSenderWeb extends RTCRtpSender { 12 | RTCRtpSenderWeb(this._jsRtpSender, this._ownsTrack); 13 | 14 | factory RTCRtpSenderWeb.fromJsSender(RtcRtpSender jsRtpSender) { 15 | return RTCRtpSenderWeb(jsRtpSender, jsRtpSender.track != null); 16 | } 17 | 18 | final RtcRtpSender _jsRtpSender; 19 | bool _ownsTrack = false; 20 | 21 | @override 22 | Future replaceTrack(MediaStreamTrack? track) async { 23 | try { 24 | if (track != null) { 25 | var nativeTrack = track as MediaStreamTrackWeb; 26 | jsutil.callMethod(_jsRtpSender, 'replaceTrack', [nativeTrack.jsTrack]); 27 | } else { 28 | jsutil.callMethod(_jsRtpSender, 'replaceTrack', [null]); 29 | } 30 | } on Exception catch (e) { 31 | throw 'Unable to RTCRtpSender::replaceTrack: ${e.toString()}'; 32 | } 33 | } 34 | 35 | @override 36 | Future setTrack(MediaStreamTrack? track, 37 | {bool takeOwnership = true}) async { 38 | try { 39 | if (track != null) { 40 | var nativeTrack = track as MediaStreamTrackWeb; 41 | jsutil.callMethod(_jsRtpSender, 'setTrack', [nativeTrack.jsTrack]); 42 | } else { 43 | jsutil.callMethod(_jsRtpSender, 'setTrack', [null]); 44 | } 45 | } on Exception catch (e) { 46 | throw 'Unable to RTCRtpSender::setTrack: ${e.toString()}'; 47 | } 48 | } 49 | 50 | @override 51 | RTCRtpParameters get parameters { 52 | var parameters = jsutil.callMethod(_jsRtpSender, 'getParameters', []); 53 | return RTCRtpParametersWeb.fromJsObject(parameters); 54 | } 55 | 56 | @override 57 | Future setParameters(RTCRtpParameters parameters) async { 58 | try { 59 | var oldParameters = jsutil.callMethod(_jsRtpSender, 'getParameters', []); 60 | jsutil.setProperty( 61 | oldParameters, 62 | 'encodings', 63 | jsutil.jsify( 64 | parameters.encodings?.map((e) => e.toMap()).toList() ?? [])); 65 | await jsutil.promiseToFuture( 66 | jsutil.callMethod(_jsRtpSender, 'setParameters', [oldParameters])); 67 | return Future.value(true); 68 | } on Exception catch (e) { 69 | throw 'Unable to RTCRtpSender::setParameters: ${e.toString()}'; 70 | } 71 | } 72 | 73 | @override 74 | Future> getStats() async { 75 | var stats = await jsutil.promiseToFuture( 76 | jsutil.callMethod(_jsRtpSender, 'getStats', [])); 77 | var report = []; 78 | stats.forEach((key, value) { 79 | report.add( 80 | StatsReport(value['id'], value['type'], value['timestamp'], value)); 81 | }); 82 | return report; 83 | } 84 | 85 | @override 86 | MediaStreamTrack? get track { 87 | if (null != _jsRtpSender.track) { 88 | return MediaStreamTrackWeb(_jsRtpSender.track!); 89 | } 90 | return null; 91 | } 92 | 93 | @override 94 | String get senderId => jsutil.getProperty(_jsRtpSender, 'senderId'); 95 | 96 | @override 97 | bool get ownsTrack => _ownsTrack; 98 | 99 | @override 100 | RTCDTMFSender get dtmfSender => 101 | RTCDTMFSenderWeb(jsutil.getProperty(_jsRtpSender, 'dtmf')); 102 | 103 | @override 104 | Future dispose() async {} 105 | 106 | RtcRtpSender get jsRtpSender => _jsRtpSender; 107 | } 108 | -------------------------------------------------------------------------------- /web/main.dart: -------------------------------------------------------------------------------- 1 | import 'dart:html' as html; 2 | 3 | import 'package:dart_webrtc/dart_webrtc.dart'; 4 | 5 | /* 6 | import 'test_media_devices.dart' as media_devices_tests; 7 | import 'test_media_stream.dart' as media_stream_tests; 8 | import 'test_media_stream_track.dart' as media_stream_track_tests; 9 | import 'test_peerconnection.dart' as peerconnection_tests; 10 | import 'test_video_element.dart' as video_elelment_tests; 11 | */ 12 | void main() { 13 | /* 14 | video_elelment_tests.testFunctions.forEach((Function func) => func()); 15 | media_devices_tests.testFunctions.forEach((Function func) => func()); 16 | media_stream_tests.testFunctions.forEach((Function func) => func()); 17 | media_stream_track_tests.testFunctions.forEach((Function func) => func()); 18 | peerconnection_tests.testFunctions.forEach((Function func) => func()); 19 | */ 20 | loopBackTest(); 21 | } 22 | 23 | void loopBackTest() async { 24 | var local = html.document.querySelector('#local'); 25 | var localVideo = RTCVideoElement(); 26 | local!.append(localVideo.htmlElement); 27 | 28 | var remote = html.document.querySelector('#remote'); 29 | var remotelVideo = RTCVideoElement(); 30 | remote!.append(remotelVideo.htmlElement); 31 | 32 | var pc2 = await createPeerConnection({}); 33 | pc2.onTrack = (event) { 34 | if (event.track.kind == 'video') { 35 | remotelVideo.srcObject = event.streams[0]; 36 | } 37 | }; 38 | pc2.onConnectionState = (state) { 39 | print('connectionState $state'); 40 | }; 41 | 42 | pc2.onIceConnectionState = (state) { 43 | print('iceConnectionState $state'); 44 | }; 45 | 46 | var pc1 = await createPeerConnection({}); 47 | 48 | pc1.onIceCandidate = (candidate) => pc2.addCandidate(candidate); 49 | pc2.onIceCandidate = (candidate) => pc1.addCandidate(candidate); 50 | 51 | var stream = 52 | await navigator.mediaDevices.getUserMedia({'audio': true, 'video': true}); 53 | /*.getUserMedia(MediaStreamConstraints(audio: true, video: true))*/ 54 | print('getDisplayMedia: stream.id => ${stream.id}'); 55 | 56 | navigator.mediaDevices.ondevicechange = (event) async { 57 | var list = await navigator.mediaDevices.enumerateDevices(); 58 | print('ondevicechange: '); 59 | list.where((element) => element.kind == 'audiooutput').forEach((e) { 60 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 61 | }); 62 | }; 63 | 64 | var list = await navigator.mediaDevices.enumerateDevices(); 65 | list.forEach((e) { 66 | print('${e.runtimeType}: ${e.label}, type => ${e.kind}'); 67 | }); 68 | var outputList = list.where((element) => element.kind == 'audiooutput'); 69 | if (outputList.isNotEmpty) { 70 | var sinkId = outputList.last.deviceId; 71 | try { 72 | await navigator.mediaDevices 73 | .selectAudioOutput(AudioOutputOptions(deviceId: sinkId)); 74 | } catch (e) { 75 | print('selectAudioOutput error: ${e.toString()}'); 76 | await localVideo.setSinkId(sinkId); 77 | } 78 | } 79 | 80 | stream.getTracks().forEach((track) async { 81 | await pc1.addTrack(track, stream); 82 | }); 83 | 84 | var offer = await pc1.createOffer(); 85 | 86 | await pc2.addTransceiver( 87 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 88 | init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); 89 | await pc2.addTransceiver( 90 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 91 | init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); 92 | 93 | await pc1.setLocalDescription(offer); 94 | await pc2.setRemoteDescription(offer); 95 | var answer = await pc2.createAnswer({}); 96 | await pc2.setLocalDescription(answer); 97 | 98 | await pc1.setRemoteDescription(answer); 99 | 100 | localVideo.muted = true; 101 | localVideo.srcObject = stream; 102 | } 103 | -------------------------------------------------------------------------------- /lib/src/rtc_rtp_transceiver_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:js_util' as jsutil; 3 | 4 | import 'package:webrtc_interface/webrtc_interface.dart'; 5 | 6 | import 'media_stream_impl.dart'; 7 | import 'rtc_rtp_receiver_impl.dart'; 8 | import 'rtc_rtp_sender_impl.dart'; 9 | 10 | List listToRtpEncodings(List> list) { 11 | return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); 12 | } 13 | 14 | @Deprecated('RTCRtpTransceiverInitWeb isn\'t referenced from anywhere.') 15 | class RTCRtpTransceiverInitWeb extends RTCRtpTransceiverInit { 16 | RTCRtpTransceiverInitWeb(TransceiverDirection direction, 17 | List streams, List sendEncodings) 18 | : super( 19 | direction: direction, 20 | streams: streams, 21 | sendEncodings: sendEncodings); 22 | 23 | factory RTCRtpTransceiverInitWeb.fromMap(Map map) { 24 | if (map['direction'] == null) { 25 | throw Exception('You must provide a direction'); 26 | } 27 | if (map['streams'] == null) { 28 | throw Exception('You must provide the streams'); 29 | } 30 | 31 | return RTCRtpTransceiverInitWeb( 32 | typeStringToRtpTransceiverDirection[map['direction']]!, 33 | (map['streams'] as List).map((e) => e).toList(), 34 | listToRtpEncodings(map['sendEncodings'])); 35 | } 36 | 37 | Map toMap() => { 38 | 'direction': typeRtpTransceiverDirectionToString[direction], 39 | if (streams != null) 'streamIds': streams!.map((e) => e.id).toList(), 40 | if (sendEncodings != null) 41 | 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), 42 | }; 43 | } 44 | 45 | extension RTCRtpTransceiverInitWebExt on RTCRtpTransceiverInit { 46 | dynamic toJsObject() => jsutil.jsify({ 47 | 'direction': typeRtpTransceiverDirectionToString[direction], 48 | if (streams != null) 49 | 'streams': 50 | streams!.map((e) => (e as MediaStreamWeb).jsStream).toList(), 51 | if (sendEncodings != null) 52 | 'sendEncodings': sendEncodings!.map((e) => e.toMap()).toList(), 53 | }); 54 | } 55 | 56 | class RTCRtpTransceiverWeb extends RTCRtpTransceiver { 57 | RTCRtpTransceiverWeb(this._jsTransceiver, _peerConnectionId); 58 | 59 | factory RTCRtpTransceiverWeb.fromJsObject(Object jsTransceiver, 60 | {String? peerConnectionId}) { 61 | var transceiver = RTCRtpTransceiverWeb(jsTransceiver, peerConnectionId); 62 | return transceiver; 63 | } 64 | 65 | Object _jsTransceiver; 66 | 67 | @override 68 | Future getCurrentDirection() async => 69 | typeStringToRtpTransceiverDirection[ 70 | jsutil.getProperty(_jsTransceiver, 'currentDirection')]; 71 | 72 | @override 73 | Future getDirection() async => 74 | typeStringToRtpTransceiverDirection[ 75 | jsutil.getProperty(_jsTransceiver, 'direction')]!; 76 | 77 | @override 78 | String get mid => jsutil.getProperty(_jsTransceiver, 'mid'); 79 | 80 | @override 81 | RTCRtpSender get sender => RTCRtpSenderWeb.fromJsSender( 82 | jsutil.getProperty(_jsTransceiver, 'sender')); 83 | 84 | @override 85 | RTCRtpReceiver get receiver => 86 | RTCRtpReceiverWeb(jsutil.getProperty(_jsTransceiver, 'receiver')); 87 | 88 | @override 89 | bool get stoped => jsutil.getProperty(_jsTransceiver, 'stopped'); 90 | 91 | @override 92 | String get transceiverId => mid; 93 | 94 | @override 95 | Future setDirection(TransceiverDirection direction) async { 96 | try { 97 | jsutil.setProperty(_jsTransceiver, 'direction', 98 | typeRtpTransceiverDirectionToString[direction]); 99 | } on Exception catch (e) { 100 | throw 'Unable to RTCRtpTransceiver::setDirection: ${e.toString()}'; 101 | } 102 | } 103 | 104 | @override 105 | Future stop() async { 106 | try { 107 | jsutil.callMethod(_jsTransceiver, 'stop', []); 108 | } on Exception catch (e) { 109 | throw 'Unable to RTCRtpTransceiver::stop: ${e..toString()}'; 110 | } 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /web/test_peerconnection.dart: -------------------------------------------------------------------------------- 1 | import 'package:dart_webrtc/dart_webrtc.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | late RTCPeerConnection pc1; 5 | late RTCPeerConnection pc2; 6 | 7 | late RTCSessionDescription offer; 8 | late RTCSessionDescription answer; 9 | 10 | void addStateCallbacks(RTCPeerConnection pc, String title) { 11 | pc.onConnectionState = (RTCPeerConnectionState state) { 12 | print('$title: onconnectionstatechange => ${state.toString()}'); 13 | }; 14 | pc.onIceConnectionState = (RTCIceConnectionState state) { 15 | print('$title: oniceconnectionstatechange => ${state.toString()}'); 16 | }; 17 | pc.onIceGatheringState = (RTCIceGatheringState state) { 18 | print('$title: onicegatheringstatechange => ${state.toString()}'); 19 | }; 20 | pc.onSignalingState = (RTCSignalingState state) { 21 | print('$title: onsignalingstatechange => ${state.toString()}'); 22 | }; 23 | 24 | pc.onAddStream = (MediaStream stream) { 25 | print('$title: onaddstream => ${stream.id}'); 26 | }; 27 | 28 | pc.onTrack = (RTCTrackEvent event) async { 29 | print( 30 | '$title: ontrack => ${event.track.id}, \nkind => ${event.track.kind}\nstream.length => ${event.streams.length}'); 31 | var params = event.receiver!.parameters; 32 | print('reducedSize => ${params.rtcp!.reducedSize}'); 33 | }; 34 | } 35 | 36 | List testFunctions = [ 37 | () => test('RTCPeerConnection.constructor()', () async { 38 | pc1 = await createPeerConnection({'iceServers': []}); 39 | 40 | expect(pc1.connectionState, 41 | RTCPeerConnectionState.RTCPeerConnectionStateNew); 42 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateStable); 43 | 44 | pc2 = await createPeerConnection({'iceServers': []}); 45 | 46 | expect(pc2.connectionState, 47 | RTCPeerConnectionState.RTCPeerConnectionStateNew); 48 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateStable); 49 | 50 | addStateCallbacks(pc1, 'pc1'); 51 | addStateCallbacks(pc2, 'pc2'); 52 | 53 | pc1.onIceCandidate = (RTCIceCandidate? candidate) async { 54 | if (candidate == null) { 55 | print('pc1: end-of-candidate'); 56 | return; 57 | } 58 | print('pc1: onicecaniddate => ${candidate.candidate}'); 59 | await pc2.addCandidate(candidate); 60 | }; 61 | 62 | pc2.onIceCandidate = (RTCIceCandidate? candidate) async { 63 | if (candidate == null) { 64 | print('pc2: end-of-candidate'); 65 | return; 66 | } 67 | print('pc2: onicecaniddate => ${candidate.candidate}'); 68 | await pc1.addCandidate(candidate); 69 | }; 70 | }), 71 | () => test('RTCPeerConnection.addTransceiver()', () async { 72 | await pc1.addTransceiver( 73 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 74 | init: RTCRtpTransceiverInit( 75 | direction: TransceiverDirection.SendOnly)); 76 | await pc1.addTransceiver( 77 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 78 | init: RTCRtpTransceiverInit( 79 | direction: TransceiverDirection.SendOnly)); 80 | 81 | await pc2.addTransceiver( 82 | kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, 83 | init: RTCRtpTransceiverInit( 84 | direction: TransceiverDirection.RecvOnly)); 85 | await pc2.addTransceiver( 86 | kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, 87 | init: RTCRtpTransceiverInit( 88 | direction: TransceiverDirection.RecvOnly)); 89 | }), 90 | () => test('RTCPeerConnection.createOffer()', () async { 91 | offer = await pc1.createOffer(); 92 | print('pc1 offer => ${offer.sdp}'); 93 | await pc1.setLocalDescription(offer); 94 | expect(pc1.signalingState, 95 | RTCSignalingState.RTCSignalingStateHaveLocalOffer); 96 | 97 | await pc2.setRemoteDescription(offer); 98 | expect(pc2.signalingState, 99 | RTCSignalingState.RTCSignalingStateHaveRemoteOffer); 100 | }), 101 | () => test('RTCPeerConnection.createAnswer()', () async { 102 | answer = await pc2.createAnswer({}); 103 | await pc2.setLocalDescription(answer); 104 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateStable); 105 | print('pc2 answer => ${answer.sdp}'); 106 | await pc1.setRemoteDescription(answer); 107 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateStable); 108 | }), 109 | () => test('RTCPeerConnection.localDescription()', () async { 110 | var localDescription1 = await pc1.getLocalDescription(); 111 | expect(localDescription1!.type, 'offer'); 112 | expect(localDescription1.sdp!.isNotEmpty, true); 113 | var localDescription2 = await pc2.getLocalDescription(); 114 | expect(localDescription2!.type, 'answer'); 115 | expect(localDescription2.sdp!.isNotEmpty, true); 116 | }), 117 | () => test('RTCPeerConnection.remoteDescription()', () async { 118 | var localDescription1 = await pc1.getLocalDescription(); 119 | expect(localDescription1!.type, 'answer'); 120 | expect(localDescription1.sdp!.isNotEmpty, true); 121 | var localDescription2 = await pc2.getLocalDescription(); 122 | expect(localDescription2!.type, 'offer'); 123 | expect(localDescription2.sdp!.isNotEmpty, true); 124 | }), 125 | () => test('RTCPeerConnection.close()', () async { 126 | await Future.delayed(Duration(seconds: 5), () { 127 | pc1.close(); 128 | expect(pc1.signalingState, RTCSignalingState.RTCSignalingStateClosed); 129 | pc2.close(); 130 | expect(pc2.signalingState, RTCSignalingState.RTCSignalingStateClosed); 131 | }); 132 | }) 133 | ]; 134 | -------------------------------------------------------------------------------- /lib/src/mediadevices_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'dart:js' as js; 4 | import 'dart:js_util' as jsutil; 5 | import 'package:webrtc_interface/webrtc_interface.dart'; 6 | 7 | import 'media_stream_impl.dart'; 8 | 9 | class MediaDevicesWeb extends MediaDevices { 10 | @override 11 | Future getUserMedia( 12 | Map mediaConstraints) async { 13 | try { 14 | if (mediaConstraints['video'] is Map) { 15 | if (mediaConstraints['video']['facingMode'] != null) { 16 | mediaConstraints['video'].remove('facingMode'); 17 | } 18 | } 19 | 20 | mediaConstraints.putIfAbsent('video', () => false); 21 | mediaConstraints.putIfAbsent('audio', () => false); 22 | 23 | final mediaDevices = html.window.navigator.mediaDevices; 24 | if (mediaDevices == null) throw Exception('MediaDevices is null'); 25 | 26 | if (jsutil.hasProperty(mediaDevices, 'getUserMedia')) { 27 | var args = jsutil.jsify(mediaConstraints); 28 | final jsStream = await jsutil.promiseToFuture( 29 | jsutil.callMethod(mediaDevices, 'getUserMedia', [args])); 30 | 31 | return MediaStreamWeb(jsStream, 'local'); 32 | } else { 33 | final jsStream = await html.window.navigator.getUserMedia( 34 | audio: mediaConstraints['audio'], 35 | video: mediaConstraints['video'], 36 | ); 37 | return MediaStreamWeb(jsStream, 'local'); 38 | } 39 | } catch (e) { 40 | throw 'Unable to getUserMedia: ${e.toString()}'; 41 | } 42 | } 43 | 44 | @override 45 | Future getDisplayMedia( 46 | Map mediaConstraints) async { 47 | try { 48 | final mediaDevices = html.window.navigator.mediaDevices; 49 | if (mediaDevices == null) throw Exception('MediaDevices is null'); 50 | 51 | if (jsutil.hasProperty(mediaDevices, 'getDisplayMedia')) { 52 | final arg = jsutil.jsify(mediaConstraints); 53 | final jsStream = await jsutil.promiseToFuture( 54 | jsutil.callMethod(mediaDevices, 'getDisplayMedia', [arg])); 55 | return MediaStreamWeb(jsStream, 'local'); 56 | } else { 57 | final jsStream = await html.window.navigator.getUserMedia( 58 | video: {'mediaSource': 'screen'}, 59 | audio: mediaConstraints['audio'] ?? false); 60 | return MediaStreamWeb(jsStream, 'local'); 61 | } 62 | } catch (e) { 63 | throw 'Unable to getDisplayMedia: ${e.toString()}'; 64 | } 65 | } 66 | 67 | @override 68 | Future> enumerateDevices() async { 69 | final devices = await getSources(); 70 | 71 | return devices.map((e) { 72 | var input = e as html.MediaDeviceInfo; 73 | return MediaDeviceInfo( 74 | deviceId: 75 | input.deviceId ?? 'Generated Device Id :(${devices.indexOf(e)})', 76 | groupId: input.groupId, 77 | kind: input.kind, 78 | label: input.label ?? 'Generated label :(${devices.indexOf(e)})', 79 | ); 80 | }).toList(); 81 | } 82 | 83 | @override 84 | Future> getSources() async { 85 | return html.window.navigator.mediaDevices?.enumerateDevices() ?? 86 | Future.value([]); 87 | } 88 | 89 | @override 90 | MediaTrackSupportedConstraints getSupportedConstraints() { 91 | final mediaDevices = html.window.navigator.mediaDevices; 92 | if (mediaDevices == null) throw Exception('Mediadevices is null'); 93 | 94 | var _mapConstraints = mediaDevices.getSupportedConstraints(); 95 | 96 | return MediaTrackSupportedConstraints( 97 | aspectRatio: _mapConstraints['aspectRatio'], 98 | autoGainControl: _mapConstraints['autoGainControl'], 99 | brightness: _mapConstraints['brightness'], 100 | channelCount: _mapConstraints['channelCount'], 101 | colorTemperature: _mapConstraints['colorTemperature'], 102 | contrast: _mapConstraints['contrast'], 103 | deviceId: _mapConstraints['_mapConstraints'], 104 | echoCancellation: _mapConstraints['echoCancellation'], 105 | exposureCompensation: _mapConstraints['exposureCompensation'], 106 | exposureMode: _mapConstraints['exposureMode'], 107 | exposureTime: _mapConstraints['exposureTime'], 108 | facingMode: _mapConstraints['facingMode'], 109 | focusDistance: _mapConstraints['focusDistance'], 110 | focusMode: _mapConstraints['focusMode'], 111 | frameRate: _mapConstraints['frameRate'], 112 | groupId: _mapConstraints['groupId'], 113 | height: _mapConstraints['height'], 114 | iso: _mapConstraints['iso'], 115 | latency: _mapConstraints['latency'], 116 | noiseSuppression: _mapConstraints['noiseSuppression'], 117 | pan: _mapConstraints['pan'], 118 | pointsOfInterest: _mapConstraints['pointsOfInterest'], 119 | resizeMode: _mapConstraints['resizeMode'], 120 | saturation: _mapConstraints['saturation'], 121 | sampleRate: _mapConstraints['sampleRate'], 122 | sampleSize: _mapConstraints['sampleSize'], 123 | sharpness: _mapConstraints['sharpness'], 124 | tilt: _mapConstraints['tilt'], 125 | torch: _mapConstraints['torch'], 126 | whiteBalanceMode: _mapConstraints['whiteBalanceMode'], 127 | width: _mapConstraints['width'], 128 | zoom: _mapConstraints['zoom']); 129 | } 130 | 131 | @override 132 | Future selectAudioOutput( 133 | [AudioOutputOptions? options]) async { 134 | try { 135 | final mediaDevices = html.window.navigator.mediaDevices; 136 | if (mediaDevices == null) throw Exception('MediaDevices is null'); 137 | 138 | if (jsutil.hasProperty(mediaDevices, 'selectAudioOutput')) { 139 | if (options != null) { 140 | final arg = jsutil.jsify(options); 141 | final deviceInfo = await jsutil.promiseToFuture( 142 | jsutil.callMethod(mediaDevices, 'selectAudioOutput', [arg])); 143 | return MediaDeviceInfo( 144 | kind: deviceInfo.kind, 145 | label: deviceInfo.label ?? '', 146 | deviceId: deviceInfo.deviceId ?? '', 147 | groupId: deviceInfo.groupId, 148 | ); 149 | } else { 150 | final deviceInfo = await jsutil.promiseToFuture( 151 | jsutil.callMethod(mediaDevices, 'selectAudioOutput', [])); 152 | return MediaDeviceInfo( 153 | kind: deviceInfo.kind, 154 | label: deviceInfo.label ?? '', 155 | deviceId: deviceInfo.deviceId ?? '', 156 | groupId: deviceInfo.groupId, 157 | ); 158 | } 159 | } else { 160 | throw UnimplementedError('selectAudioOutput is missing'); 161 | } 162 | } catch (e) { 163 | throw 'Unable to selectAudioOutput: ${e.toString()}, Please try to use MediaElement.setSinkId instead.'; 164 | } 165 | } 166 | 167 | @override 168 | set ondevicechange(Function(dynamic event)? listener) { 169 | try { 170 | final mediaDevices = html.window.navigator.mediaDevices; 171 | if (mediaDevices == null) throw Exception('MediaDevices is null'); 172 | 173 | jsutil.setProperty(mediaDevices, 'ondevicechange', 174 | js.allowInterop((evt) => listener?.call(evt))); 175 | } catch (e) { 176 | throw 'Unable to set ondevicechange: ${e.toString()}'; 177 | } 178 | } 179 | 180 | @override 181 | Function(dynamic event)? get ondevicechange { 182 | try { 183 | final mediaDevices = html.window.navigator.mediaDevices; 184 | if (mediaDevices == null) throw Exception('MediaDevices is null'); 185 | 186 | jsutil.getProperty(mediaDevices, 'ondevicechange'); 187 | } catch (e) { 188 | throw 'Unable to get ondevicechange: ${e.toString()}'; 189 | } 190 | return null; 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /web/p2p/signaling.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:convert'; 3 | 4 | import 'package:dart_webrtc/dart_webrtc.dart'; 5 | 6 | import 'random_string.dart'; 7 | import 'simple_websocket.dart'; 8 | 9 | enum SignalingState { 10 | CallStateNew, 11 | CallStateRinging, 12 | CallStateInvite, 13 | CallStateConnected, 14 | CallStateBye, 15 | ConnectionOpen, 16 | ConnectionClosed, 17 | ConnectionError, 18 | } 19 | 20 | /* 21 | * callbacks for Signaling API. 22 | */ 23 | typedef SignalingStateCallback = void Function(SignalingState state); 24 | typedef StreamStateCallback = void Function(MediaStream stream); 25 | typedef OtherEventCallback = void Function(dynamic event); 26 | typedef DataChannelMessageCallback = void Function( 27 | RTCDataChannel dc, RTCDataChannelMessage data); 28 | typedef DataChannelCallback = void Function(RTCDataChannel dc); 29 | 30 | class Signaling { 31 | Signaling(this._host); 32 | 33 | final JsonEncoder _encoder = JsonEncoder(); 34 | final String _selfId = randomNumeric(6); 35 | late SimpleWebSocket? _socket; 36 | var _sessionId; 37 | final _host; 38 | final _port = 8086; 39 | final _peerConnections = {}; 40 | final _dataChannels = {}; 41 | final _remoteCandidates = []; 42 | var _iceServers = >[]; 43 | var _turnCredential; 44 | 45 | MediaStream? _localStream; 46 | late List _remoteStreams; 47 | SignalingStateCallback? onStateChange; 48 | StreamStateCallback? onLocalStream; 49 | StreamStateCallback? onAddRemoteStream; 50 | StreamStateCallback? onRemoveRemoteStream; 51 | OtherEventCallback? onPeersUpdate; 52 | DataChannelMessageCallback? onDataChannelMessage; 53 | DataChannelCallback? onDataChannel; 54 | 55 | void close() { 56 | if (_localStream != null) { 57 | _localStream?.getTracks().forEach((element) { 58 | element.stop(); 59 | }); 60 | _localStream = null; 61 | } 62 | 63 | _peerConnections.forEach((key, pc) { 64 | pc.close(); 65 | }); 66 | _socket?.close(); 67 | } 68 | 69 | void switchCamera() { 70 | if (_localStream != null) { 71 | // TODO(cloudwebrtc): _localStream.getVideoTracks()[0].switchCamera(); 72 | } 73 | } 74 | 75 | void invite(String peer_id, String media, use_screen) { 76 | _sessionId = _selfId + '-' + peer_id; 77 | 78 | onStateChange?.call(SignalingState.CallStateNew); 79 | 80 | _createPeerConnection(peer_id, media, use_screen).then((pc) { 81 | _peerConnections[peer_id] = pc; 82 | if (media == 'data') { 83 | _createDataChannel(peer_id, pc); 84 | } 85 | _createOffer(peer_id, pc, media); 86 | }); 87 | } 88 | 89 | void bye() { 90 | _send('bye', { 91 | 'session_id': _sessionId, 92 | 'from': _selfId, 93 | }); 94 | } 95 | 96 | void onMessage(message) async { 97 | Map mapData = message; 98 | var data = mapData['data']; 99 | 100 | switch (mapData['type']) { 101 | case 'peers': 102 | List peers = data; 103 | var event = {}; 104 | event['self'] = _selfId; 105 | event['peers'] = peers; 106 | onPeersUpdate?.call(event); 107 | break; 108 | case 'offer': 109 | var id = data['from']; 110 | var description = data['description']; 111 | var media = data['media']; 112 | var sessionId = data['session_id']; 113 | _sessionId = sessionId; 114 | 115 | onStateChange?.call(SignalingState.CallStateNew); 116 | 117 | var pc = await _createPeerConnection(id, media, false); 118 | _peerConnections[id] = pc; 119 | await pc.setRemoteDescription( 120 | RTCSessionDescription(description['sdp'], description['type'])); 121 | await _createAnswer(id, pc, media); 122 | if (_remoteCandidates.isNotEmpty) { 123 | _remoteCandidates.forEach((candidate) async { 124 | await pc.addCandidate(candidate); 125 | }); 126 | _remoteCandidates.clear(); 127 | } 128 | 129 | break; 130 | case 'answer': 131 | var id = data['from']; 132 | var description = data['description']; 133 | 134 | var pc = _peerConnections[id]; 135 | if (pc != null) { 136 | await pc.setRemoteDescription( 137 | RTCSessionDescription(description['sdp'], description['type'])); 138 | } 139 | 140 | break; 141 | case 'candidate': 142 | var id = data['from']; 143 | var candidateMap = data['candidate']; 144 | var pc = _peerConnections[id]; 145 | var candidate = RTCIceCandidate(candidateMap['candidate'], 146 | candidateMap['sdpMid'], candidateMap['sdpMLineIndex']); 147 | if (pc != null) { 148 | await pc.addCandidate(candidate); 149 | } else { 150 | _remoteCandidates.add(candidate); 151 | } 152 | 153 | break; 154 | case 'leave': 155 | var id = data; 156 | var pc = _peerConnections.remove(id); 157 | _dataChannels.remove(id); 158 | 159 | if (_localStream != null) { 160 | _localStream!.getTracks().forEach((element) { 161 | element.stop(); 162 | }); 163 | _localStream = null; 164 | } 165 | 166 | if (pc != null) { 167 | await pc.close(); 168 | } 169 | _sessionId = null; 170 | onStateChange?.call(SignalingState.CallStateBye); 171 | 172 | break; 173 | case 'bye': 174 | var to = data['to']; 175 | var sessionId = data['session_id']; 176 | print('bye: ' + sessionId); 177 | 178 | if (_localStream != null) { 179 | _localStream!.getTracks().forEach((element) { 180 | element.stop(); 181 | }); 182 | _localStream = null; 183 | } 184 | 185 | var pc = _peerConnections[to]; 186 | if (pc != null) { 187 | await pc.close(); 188 | _peerConnections.remove(to); 189 | } 190 | 191 | var dc = _dataChannels[to]; 192 | if (dc != null) { 193 | await dc.close(); 194 | _dataChannels.remove(to); 195 | } 196 | 197 | _sessionId = null; 198 | onStateChange?.call(SignalingState.CallStateBye); 199 | 200 | break; 201 | case 'keepalive': 202 | print('keepalive response!'); 203 | 204 | break; 205 | default: 206 | break; 207 | } 208 | } 209 | 210 | Future connect() async { 211 | var url = 'https://$_host:$_port/ws'; 212 | _socket = SimpleWebSocket(url); 213 | 214 | print('connect to $url'); 215 | 216 | if (_turnCredential == null) { 217 | try { 218 | _turnCredential = await getTurnCredential(_host, _port); 219 | _iceServers = [ 220 | { 221 | 'urls': _turnCredential['uris'][0], 222 | 'username': _turnCredential['username'], 223 | 'credential': _turnCredential['password'] 224 | } 225 | ]; 226 | } catch (e) { 227 | print('error: ${e.toString()}'); 228 | } 229 | } 230 | 231 | _socket?.onOpen = () { 232 | print('onOpen'); 233 | onStateChange?.call(SignalingState.ConnectionOpen); 234 | _send('new', 235 | {'name': 'dart_webrtc', 'id': _selfId, 'user_agent': 'broswer'}); 236 | }; 237 | 238 | _socket?.onMessage = (message) { 239 | print('Received data: ' + message); 240 | var decoder = JsonDecoder(); 241 | onMessage.call(decoder.convert(message)); 242 | }; 243 | 244 | _socket?.onClose = (int code, String reason) { 245 | print('Closed by server [$code => $reason]!'); 246 | onStateChange?.call(SignalingState.ConnectionClosed); 247 | }; 248 | 249 | await _socket?.connect(); 250 | } 251 | 252 | Future createStream(media, user_screen) async { 253 | var stream = await user_screen 254 | ? await navigator.mediaDevices.getDisplayMedia({}) 255 | : await navigator.mediaDevices.getUserMedia({ 256 | 'audio': true, 257 | 'video': { 258 | 'mandatory': { 259 | 'minWidth': 260 | '640', // Provide your own width, height and frame rate here 261 | 'minHeight': '480', 262 | 'minFrameRate': '30', 263 | }, 264 | 'facingMode': 'user', 265 | 'optional': [], 266 | } 267 | }); 268 | 269 | onLocalStream?.call(stream); 270 | 271 | return stream; 272 | } 273 | 274 | Future _createPeerConnection( 275 | id, media, user_screen) async { 276 | if (media != 'data') _localStream = await createStream(media, user_screen); 277 | var pc = await createPeerConnection({ 278 | 'iceServers': _iceServers.isNotEmpty 279 | ? _iceServers 280 | : [ 281 | {'urls': 'stun:stun.l.google.com:19302'} 282 | ] 283 | }); 284 | if (media != 'data') await pc.addStream(_localStream!); 285 | 286 | pc.onIceCandidate = (RTCIceCandidate? candidate) { 287 | try { 288 | if (candidate != null) { 289 | print(candidate.candidate); 290 | _send('candidate', { 291 | 'to': id, 292 | 'from': _selfId, 293 | 'candidate': { 294 | 'sdpMLineIndex': candidate.sdpMLineIndex, 295 | 'sdpMid': candidate.sdpMid, 296 | 'candidate': candidate.candidate, 297 | }, 298 | 'session_id': _sessionId, 299 | }); 300 | } 301 | } catch (e) { 302 | print(e.toString()); 303 | } 304 | }; 305 | 306 | pc.onIceConnectionState = (state) { 307 | print(state); 308 | }; 309 | 310 | pc.onAddStream = (MediaStream stream) { 311 | onAddRemoteStream?.call(stream); 312 | }; 313 | 314 | pc.onRemoveStream = (MediaStream stream) { 315 | onRemoveRemoteStream?.call(stream); 316 | _remoteStreams.removeWhere((it) => it.id == stream.id); 317 | }; 318 | 319 | pc.onDataChannel = (RTCDataChannel channel) { 320 | _addDataChannel(id, channel); 321 | }; 322 | 323 | return pc; 324 | } 325 | 326 | void _addDataChannel(id, RTCDataChannel channel) { 327 | channel.onMessage = (RTCDataChannelMessage msg) { 328 | onDataChannelMessage?.call(channel, msg); 329 | }; 330 | _dataChannels[id] = channel; 331 | onDataChannel?.call(channel); 332 | } 333 | 334 | void _createDataChannel(id, RTCPeerConnection pc, 335 | {String label = 'fileTransfer'}) async { 336 | var dataChannelDict = RTCDataChannelInit(); 337 | var channel = await pc.createDataChannel(label, dataChannelDict); 338 | _addDataChannel(id, channel); 339 | } 340 | 341 | void _createOffer(String id, RTCPeerConnection pc, String media) async { 342 | try { 343 | var offer = await pc.createOffer({ 344 | 'offerToReceiveAudio': media == 'data' ? false : true, 345 | 'offerToReceiveVideo': media == 'data' ? false : true, 346 | }); 347 | //print('type => ${offer.type}, sdp => ${offer.sdp}'); 348 | await pc.setLocalDescription(offer); 349 | _send('offer', { 350 | 'to': id, 351 | 'from': _selfId, 352 | 'description': {'sdp': offer.sdp, 'type': offer.type}, 353 | 'session_id': _sessionId, 354 | 'media': media, 355 | }); 356 | } catch (e) { 357 | print(e.toString()); 358 | } 359 | } 360 | 361 | Future _createAnswer(String id, RTCPeerConnection pc, media) async { 362 | try { 363 | var answer = await pc.createAnswer(); 364 | await pc.setLocalDescription(answer); 365 | _send('answer', { 366 | 'to': id, 367 | 'from': _selfId, 368 | 'description': {'sdp': answer.sdp, 'type': answer.type}, 369 | 'session_id': _sessionId, 370 | }); 371 | } catch (e) { 372 | print(e.toString()); 373 | } 374 | } 375 | 376 | void _send(event, data) { 377 | var request = {}; 378 | request['type'] = event; 379 | request['data'] = data; 380 | _socket?.send(_encoder.convert(request)); 381 | } 382 | } 383 | -------------------------------------------------------------------------------- /lib/src/rtc_peerconnection_impl.dart: -------------------------------------------------------------------------------- 1 | import 'dart:async'; 2 | import 'dart:html' as html; 3 | import 'dart:js' as js; 4 | import 'dart:js_util' as jsutil; 5 | 6 | import 'package:platform_detect/platform_detect.dart'; 7 | import 'package:webrtc_interface/webrtc_interface.dart'; 8 | 9 | import 'media_stream_impl.dart'; 10 | import 'media_stream_track_impl.dart'; 11 | import 'rtc_data_channel_impl.dart'; 12 | import 'rtc_dtmf_sender_impl.dart'; 13 | import 'rtc_rtp_receiver_impl.dart'; 14 | import 'rtc_rtp_sender_impl.dart'; 15 | import 'rtc_rtp_transceiver_impl.dart'; 16 | 17 | /* 18 | * PeerConnection 19 | */ 20 | class RTCPeerConnectionWeb extends RTCPeerConnection { 21 | RTCPeerConnectionWeb(this._peerConnectionId, this._jsPc) { 22 | _jsPc.onAddStream.listen((mediaStreamEvent) { 23 | final jsStream = mediaStreamEvent.stream; 24 | if (jsStream == null) { 25 | throw Exception('Unable to get the stream from the event'); 26 | } 27 | if (jsStream.id == null) { 28 | throw Exception('The stream must have a valid identifier'); 29 | } 30 | 31 | final _remoteStream = _remoteStreams.putIfAbsent( 32 | jsStream.id!, () => MediaStreamWeb(jsStream, _peerConnectionId)); 33 | 34 | onAddStream?.call(_remoteStream); 35 | 36 | jsStream.onAddTrack.listen((mediaStreamTrackEvent) { 37 | final jsTrack = 38 | (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; 39 | if (jsTrack == null) { 40 | throw Exception('The Media Stream track is null'); 41 | } 42 | final track = MediaStreamTrackWeb(jsTrack); 43 | _remoteStream.addTrack(track, addToNative: false).then((_) { 44 | onAddTrack?.call(_remoteStream, track); 45 | }); 46 | }); 47 | 48 | jsStream.onRemoveTrack.listen((mediaStreamTrackEvent) { 49 | final jsTrack = 50 | (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; 51 | if (jsTrack == null) { 52 | throw Exception('The Media Stream track is null'); 53 | } 54 | final track = MediaStreamTrackWeb(jsTrack); 55 | _remoteStream.removeTrack(track, removeFromNative: false).then((_) { 56 | onRemoveTrack?.call(_remoteStream, track); 57 | }); 58 | }); 59 | }); 60 | 61 | _jsPc.onDataChannel.listen((dataChannelEvent) { 62 | if (dataChannelEvent.channel != null) { 63 | onDataChannel?.call(RTCDataChannelWeb(dataChannelEvent.channel!)); 64 | } 65 | }); 66 | 67 | _jsPc.onIceCandidate.listen((iceEvent) { 68 | if (iceEvent.candidate != null) { 69 | onIceCandidate?.call(_iceFromJs(iceEvent.candidate!)); 70 | } 71 | }); 72 | 73 | _jsPc.onIceConnectionStateChange.listen((_) { 74 | _iceConnectionState = 75 | iceConnectionStateForString(_jsPc.iceConnectionState); 76 | onIceConnectionState?.call(_iceConnectionState!); 77 | 78 | if (browser.isFirefox) { 79 | switch (_iceConnectionState!) { 80 | case RTCIceConnectionState.RTCIceConnectionStateNew: 81 | _connectionState = RTCPeerConnectionState.RTCPeerConnectionStateNew; 82 | break; 83 | case RTCIceConnectionState.RTCIceConnectionStateChecking: 84 | _connectionState = 85 | RTCPeerConnectionState.RTCPeerConnectionStateConnecting; 86 | break; 87 | case RTCIceConnectionState.RTCIceConnectionStateConnected: 88 | _connectionState = 89 | RTCPeerConnectionState.RTCPeerConnectionStateConnected; 90 | break; 91 | case RTCIceConnectionState.RTCIceConnectionStateFailed: 92 | _connectionState = 93 | RTCPeerConnectionState.RTCPeerConnectionStateFailed; 94 | break; 95 | case RTCIceConnectionState.RTCIceConnectionStateDisconnected: 96 | _connectionState = 97 | RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; 98 | break; 99 | case RTCIceConnectionState.RTCIceConnectionStateClosed: 100 | _connectionState = 101 | RTCPeerConnectionState.RTCPeerConnectionStateClosed; 102 | break; 103 | default: 104 | break; 105 | } 106 | onConnectionState?.call(_connectionState!); 107 | } 108 | }); 109 | 110 | jsutil.setProperty(_jsPc, 'onicegatheringstatechange', js.allowInterop((_) { 111 | _iceGatheringState = iceGatheringStateforString(_jsPc.iceGatheringState); 112 | onIceGatheringState?.call(_iceGatheringState!); 113 | })); 114 | 115 | _jsPc.onRemoveStream.listen((mediaStreamEvent) { 116 | if (mediaStreamEvent.stream?.id != null) { 117 | final _remoteStream = 118 | _remoteStreams.remove(mediaStreamEvent.stream!.id); 119 | if (_remoteStream != null) { 120 | onRemoveStream?.call(_remoteStream); 121 | } 122 | } 123 | }); 124 | 125 | _jsPc.onSignalingStateChange.listen((_) { 126 | _signalingState = signalingStateForString(_jsPc.signalingState); 127 | onSignalingState?.call(_signalingState!); 128 | }); 129 | 130 | if (!browser.isFirefox) { 131 | _jsPc.onConnectionStateChange.listen((_) { 132 | _connectionState = peerConnectionStateForString(_jsPc.connectionState); 133 | onConnectionState?.call(_connectionState!); 134 | }); 135 | } 136 | 137 | _jsPc.onNegotiationNeeded.listen((_) { 138 | onRenegotiationNeeded?.call(); 139 | }); 140 | 141 | _jsPc.onTrack.listen((trackEvent) { 142 | if (trackEvent.track != null && trackEvent.receiver != null) { 143 | onTrack?.call( 144 | RTCTrackEvent( 145 | track: MediaStreamTrackWeb(trackEvent.track!), 146 | receiver: RTCRtpReceiverWeb(trackEvent.receiver!), 147 | transceiver: RTCRtpTransceiverWeb.fromJsObject( 148 | jsutil.getProperty(trackEvent, 'transceiver')), 149 | streams: (trackEvent.streams != null) 150 | ? trackEvent.streams! 151 | .map((dynamic stream) => 152 | MediaStreamWeb(stream, _peerConnectionId)) 153 | .toList() 154 | : [], 155 | ), 156 | ); 157 | } 158 | }); 159 | } 160 | 161 | final String _peerConnectionId; 162 | late final html.RtcPeerConnection _jsPc; 163 | final _localStreams = {}; 164 | final _remoteStreams = {}; 165 | final _configuration = {}; 166 | 167 | RTCSignalingState? _signalingState; 168 | RTCIceGatheringState? _iceGatheringState; 169 | RTCIceConnectionState? _iceConnectionState; 170 | RTCPeerConnectionState? _connectionState; 171 | 172 | @override 173 | RTCSignalingState? get signalingState => _signalingState; 174 | 175 | @override 176 | RTCIceGatheringState? get iceGatheringState => _iceGatheringState; 177 | 178 | @override 179 | RTCIceConnectionState? get iceConnectionState => _iceConnectionState; 180 | 181 | @override 182 | RTCPeerConnectionState? get connectionState => _connectionState; 183 | 184 | @override 185 | Future dispose() { 186 | _jsPc.close(); 187 | return Future.value(); 188 | } 189 | 190 | @override 191 | Map get getConfiguration => _configuration; 192 | 193 | @override 194 | Future setConfiguration(Map configuration) { 195 | _configuration.addAll(configuration); 196 | 197 | _jsPc.setConfiguration(configuration); 198 | return Future.value(); 199 | } 200 | 201 | @override 202 | Future createOffer( 203 | [Map? constraints]) async { 204 | final args = constraints != null ? [jsutil.jsify(constraints)] : []; 205 | final desc = await jsutil.promiseToFuture( 206 | jsutil.callMethod(_jsPc, 'createOffer', args)); 207 | return RTCSessionDescription( 208 | jsutil.getProperty(desc, 'sdp'), jsutil.getProperty(desc, 'type')); 209 | } 210 | 211 | @override 212 | Future createAnswer( 213 | [Map? constraints]) async { 214 | final args = constraints != null ? [jsutil.jsify(constraints)] : []; 215 | final desc = await jsutil.promiseToFuture( 216 | jsutil.callMethod(_jsPc, 'createAnswer', args)); 217 | return RTCSessionDescription( 218 | jsutil.getProperty(desc, 'sdp'), jsutil.getProperty(desc, 'type')); 219 | } 220 | 221 | @override 222 | Future addStream(MediaStream stream) { 223 | var _native = stream as MediaStreamWeb; 224 | _localStreams.putIfAbsent( 225 | stream.id, () => MediaStreamWeb(_native.jsStream, _peerConnectionId)); 226 | _jsPc.addStream(_native.jsStream); 227 | return Future.value(); 228 | } 229 | 230 | @override 231 | Future removeStream(MediaStream stream) async { 232 | var _native = stream as MediaStreamWeb; 233 | _localStreams.remove(stream.id); 234 | _jsPc.removeStream(_native.jsStream); 235 | return Future.value(); 236 | } 237 | 238 | @override 239 | Future setLocalDescription(RTCSessionDescription description) async { 240 | await _jsPc.setLocalDescription(description.toMap()); 241 | } 242 | 243 | @override 244 | Future setRemoteDescription(RTCSessionDescription description) async { 245 | await _jsPc.setRemoteDescription(description.toMap()); 246 | } 247 | 248 | @override 249 | Future getLocalDescription() async { 250 | if (null == _jsPc.localDescription) { 251 | return null; 252 | } 253 | return _sessionFromJs(_jsPc.localDescription); 254 | } 255 | 256 | @override 257 | Future getRemoteDescription() async { 258 | if (null == _jsPc.remoteDescription) { 259 | return null; 260 | } 261 | return _sessionFromJs(_jsPc.remoteDescription); 262 | } 263 | 264 | @override 265 | Future addCandidate(RTCIceCandidate candidate) { 266 | return jsutil.promiseToFuture( 267 | jsutil.callMethod(_jsPc, 'addIceCandidate', [_iceToJs(candidate)])); 268 | } 269 | 270 | @override 271 | Future> getStats([MediaStreamTrack? track]) async { 272 | var stats; 273 | if (track != null) { 274 | var jsTrack = (track as MediaStreamTrackWeb).jsTrack; 275 | stats = await jsutil.promiseToFuture( 276 | jsutil.callMethod(_jsPc, 'getStats', [jsTrack])); 277 | } else { 278 | stats = await _jsPc.getStats(); 279 | } 280 | 281 | var report = []; 282 | stats.forEach((key, value) { 283 | report.add( 284 | StatsReport(value['id'], value['type'], value['timestamp'], value)); 285 | }); 286 | return report; 287 | } 288 | 289 | @override 290 | List getLocalStreams() => 291 | _jsPc.getLocalStreams().map((e) => _localStreams[e.id]!).toList(); 292 | 293 | @override 294 | List getRemoteStreams() => _jsPc 295 | .getRemoteStreams() 296 | .map((jsStream) => _remoteStreams[jsStream.id]!) 297 | .toList(); 298 | 299 | @override 300 | Future createDataChannel( 301 | String label, RTCDataChannelInit dataChannelDict) { 302 | final map = dataChannelDict.toMap(); 303 | if (dataChannelDict.binaryType == 'binary') { 304 | map['binaryType'] = 'arraybuffer'; // Avoid Blob in data channel 305 | } 306 | 307 | final jsDc = _jsPc.createDataChannel(label, map); 308 | return Future.value(RTCDataChannelWeb(jsDc)); 309 | } 310 | 311 | @override 312 | Future restartIce() { 313 | jsutil.callMethod(_jsPc, 'restartIce', []); 314 | return Future.value(); 315 | } 316 | 317 | @override 318 | Future close() async { 319 | _jsPc.close(); 320 | return Future.value(); 321 | } 322 | 323 | @override 324 | RTCDTMFSender createDtmfSender(MediaStreamTrack track) { 325 | var _native = track as MediaStreamTrackWeb; 326 | var jsDtmfSender = _jsPc.createDtmfSender(_native.jsTrack); 327 | return RTCDTMFSenderWeb(jsDtmfSender); 328 | } 329 | 330 | // 331 | // utility section 332 | // 333 | 334 | RTCIceCandidate _iceFromJs(html.RtcIceCandidate candidate) => RTCIceCandidate( 335 | candidate.candidate, 336 | candidate.sdpMid, 337 | candidate.sdpMLineIndex, 338 | ); 339 | 340 | html.RtcIceCandidate _iceToJs(RTCIceCandidate c) => 341 | html.RtcIceCandidate(c.toMap()); 342 | 343 | RTCSessionDescription _sessionFromJs(html.RtcSessionDescription? sd) => 344 | RTCSessionDescription(sd?.sdp, sd?.type); 345 | 346 | @override 347 | Future addTrack(MediaStreamTrack track, 348 | [MediaStream? stream]) async { 349 | var jStream = (stream as MediaStreamWeb).jsStream; 350 | var jsTrack = (track as MediaStreamTrackWeb).jsTrack; 351 | var sender = _jsPc.addTrack(jsTrack, jStream); 352 | return RTCRtpSenderWeb.fromJsSender(sender); 353 | } 354 | 355 | @override 356 | Future removeTrack(RTCRtpSender sender) async { 357 | var nativeSender = sender as RTCRtpSenderWeb; 358 | // var nativeTrack = nativeSender.track as MediaStreamTrackWeb; 359 | jsutil.callMethod(_jsPc, 'removeTrack', [nativeSender.jsRtpSender]); 360 | return Future.value(true); 361 | } 362 | 363 | @override 364 | Future> getSenders() async { 365 | var senders = jsutil.callMethod(_jsPc, 'getSenders', []); 366 | var list = []; 367 | senders.forEach((e) { 368 | list.add(RTCRtpSenderWeb.fromJsSender(e)); 369 | }); 370 | return list; 371 | } 372 | 373 | @override 374 | Future> getReceivers() async { 375 | var receivers = jsutil.callMethod(_jsPc, 'getReceivers', []); 376 | 377 | var list = []; 378 | receivers.forEach((e) { 379 | list.add(RTCRtpReceiverWeb(e)); 380 | }); 381 | 382 | return list; 383 | } 384 | 385 | @override 386 | Future> getTransceivers() async { 387 | var transceivers = jsutil.callMethod(_jsPc, 'getTransceivers', []); 388 | 389 | var list = []; 390 | transceivers.forEach((e) { 391 | list.add(RTCRtpTransceiverWeb.fromJsObject(e)); 392 | }); 393 | 394 | return list; 395 | } 396 | 397 | //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } 398 | // 399 | // https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addTransceiver 400 | // 401 | @override 402 | Future addTransceiver({ 403 | MediaStreamTrack? track, 404 | RTCRtpMediaType? kind, 405 | RTCRtpTransceiverInit? init, 406 | }) async { 407 | final jsTrack = track is MediaStreamTrackWeb ? track.jsTrack : null; 408 | final kindString = kind != null ? typeRTCRtpMediaTypetoString[kind] : null; 409 | final trackOrKind = jsTrack ?? kindString; 410 | assert(trackOrKind != null, 'track or kind must not be null'); 411 | 412 | final transceiver = jsutil.callMethod( 413 | _jsPc, 414 | 'addTransceiver', 415 | [ 416 | trackOrKind, 417 | if (init != null) init.toJsObject(), 418 | ], 419 | ); 420 | 421 | return RTCRtpTransceiverWeb.fromJsObject( 422 | transceiver, 423 | peerConnectionId: _peerConnectionId, 424 | ); 425 | } 426 | } 427 | --------------------------------------------------------------------------------