├── .gitignore
├── README.md
├── android_webrtc_demo.apk
├── receiver
├── index.html
├── main.css
└── webrtc.js
└── sender
├── AndroidManifest.xml
├── ic_launcher-web.png
├── libs
├── armeabi-v7a
│ └── libjingle_peerconnection_so.so
└── libjingle_peerconnection.jar
├── lint.xml
├── project.properties
├── res
├── drawable-hdpi
│ ├── disconnect.png
│ ├── ic_action_full_screen.png
│ ├── ic_action_return_from_full_screen.png
│ ├── ic_launcher.png
│ └── ic_loopback_call.png
├── drawable-mdpi
│ └── ic_launcher.png
├── drawable-xhdpi
│ └── ic_launcher.png
├── drawable-xxhdpi
│ └── ic_launcher.png
├── layout
│ ├── activity_fullscreen.xml
│ └── fragment_menubar.xml
├── menu
│ └── main.xml
├── values-v11
│ └── styles.xml
├── values-v14
│ └── styles.xml
└── values
│ ├── strings.xml
│ └── styles.xml
└── src
└── com
└── infthink
└── demo
└── webrtc
├── AppRTCAudioManager.java
├── MainActivity.java
├── PeerConnectionClient.java
├── WebRtcHelper.java
└── WebrtcChannel.java
/.gitignore:
--------------------------------------------------------------------------------
1 | # built application files
2 | *.ap_
3 |
4 | # files for the dex VM
5 | *.dex
6 |
7 | # Java class files
8 | *.class
9 |
10 | # generated files
11 | bin/
12 | gen/
13 |
14 | # Ignore gradle files
15 | .gradle/
16 | build/
17 |
18 | # Local configuration file (sdk path, etc)
19 | local.properties
20 |
21 | # Proguard folder generated by Eclipse
22 | proguard/
23 | proguard-project.txt
24 |
25 | # Eclipse files
26 | .project
27 | .classpath
28 | .settings/
29 |
30 | # Android Studio/IDEA
31 | *.iml
32 | .idea
33 |
34 | # MAC cacha file
35 | .DS_Store
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Android WebRTC Demo
2 | ===================
3 |
4 | This is an example to show how to use **Flint** protocol and **WebRTC** libraries on *Android* platform to do camera-mirror.
5 |
6 | ----------
7 |
8 | How To Use
9 | -------------
10 | 1. Download source code
11 | 2. [Download Flint Android SDK](https://github.com/openflint/flint-android-sender-sdk)
12 | 3. Compile Android sender application which is located at **sender** directory by using **Eclipse**.
13 | 4. Run it on Android mobile device which should have cameras(one or two).
14 |
15 | > **Note:**
16 |
17 | > - Please refer http://www.openflint.org/ for more details.
18 |
19 |
20 | ----------
21 | Features
22 | -------------
23 |
24 | 1. Support 2 senders
25 | 2. Support font/back cameras mirror
26 | 3. Support video/audio mirror(need change little code)
27 | 4. Support ***remote mode*** of **Flint**, which can remotely mirror android device's cameras.
28 |
29 | ----------
30 |
31 | Reference
32 | -------------
33 |
34 | 1. [Android WebRTC](http://www.webrtc.org/native-code/android)
35 | 2. [Firefox OS WebRTC](https://developer.mozilla.org/en-US/docs/Web/Guide/API/WebRTC)
36 | 3. [Flint Multi-Screen Protocol](http://www.openflint.org/)
37 |
38 |
--------------------------------------------------------------------------------
/android_webrtc_demo.apk:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/android_webrtc_demo.apk
--------------------------------------------------------------------------------
/receiver/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Android Webrtc Demo For Flint
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
23 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/receiver/main.css:
--------------------------------------------------------------------------------
1 | .hidden {
2 | display: none;
3 | }
4 | a {
5 | color: #69d;
6 | text-decoration: none;
7 | }
8 | a:hover {
9 | color: #8bf;
10 | text-decoration: underline;
11 | }
12 | #room-link a {
13 | white-space: nowrap;
14 | }
15 | body {
16 | background-color: black;
17 | font-family: "Roboto","Open Sans","Lucida Grande",sans-serif;
18 | height: 100%;
19 | margin: 0;
20 | padding: 0;
21 | width: 100%;
22 | }
23 | #remote-canvas {
24 | display: none;
25 | height: 100%;
26 | margin: 0 auto;
27 | width: 100%;
28 | }
29 | div.warning {
30 | background-color: #a80202;
31 | color: black;
32 | font-weight: 400;
33 | opacity: 0.9;
34 | }
35 | #container {
36 | height: 100%;
37 | position: absolute;
38 | }
39 | #info {
40 | z-index: 3;
41 | }
42 | #room-link {
43 | margin: 0 0 29px;
44 | }
45 | #status {
46 | z-index: 4;
47 | }
48 | #videos {
49 | height: 100%;
50 | pointer-events: none;
51 | position: absolute;
52 | transition: all 1s ease 0s;
53 | width: 100%;
54 | }
55 | #videos.active {
56 | transform: rotateY(180deg);
57 | }
58 | footer > div {
59 | background-color: black;
60 | bottom: 0;
61 | color: white;
62 | font-size: 0.9em;
63 | font-weight: 300;
64 | line-height: 2em;
65 | max-height: 80%;
66 | opacity: 0;
67 | overflow-y: auto;
68 | padding: 10px;
69 | position: absolute;
70 | transition: opacity 1s ease 0s;
71 | width: calc(100% - 20px);
72 | }
73 | footer > div.active {
74 | opacity: 0.8;
75 | }
76 | @keyframes rotate {
77 | 0% {
78 | transform: rotateY(0deg);
79 | }
80 | 100% {
81 | transform: rotateY(180deg);
82 | }
83 | }
84 | html {
85 | height: 100%;
86 | margin: 0;
87 | width: 100%;
88 | }
89 | label {
90 | margin: 0 10px 0 0;
91 | }
92 | #mini-video {
93 | border: 1px solid gray;
94 | bottom: 20px;
95 | left: 20px;
96 | max-height: 20%;
97 | max-width: 20%;
98 | opacity: 0;
99 | position: absolute;
100 | transition: opacity 1s ease 0s;
101 | }
102 | #mini-video.active {
103 | opacity: 1;
104 | z-index: 2;
105 | }
106 | #large-video {
107 | display: block;
108 | height: 100%;
109 | max-height: 100%;
110 | max-width: 100%;
111 | opacity: 0;
112 | position: absolute;
113 | transform: rotateY(180deg);
114 | transition: opacity 1s ease 0s;
115 | width: 100%;
116 | }
117 | #large-video.active {
118 | opacity: 1;
119 | z-index: 1;
120 | }
121 |
122 |
--------------------------------------------------------------------------------
/receiver/webrtc.js:
--------------------------------------------------------------------------------
1 | var flint = window.flint || {};
2 |
3 | (function () {
4 | 'use strict';
5 |
6 | // namespace which should be equal to sender's.
7 | var WEBRTC_NAMESPACE = 'urn:flint:com.infthink.demo.webrtc';
8 |
9 | // APP NAME
10 | var WEBRTC_APPNAME = '~flint_android_webrtc_demo';
11 |
12 | function Webrtc(divs) {
13 | self = this;
14 |
15 | self.peers = {};
16 | self.videos = {};
17 | self.streams = {};
18 | self._senders = {};
19 |
20 | window.flintReceiverManager = new FlintReceiverManager(WEBRTC_APPNAME);
21 | window.messageBus = window.flintReceiverManager.createMessageBus(WEBRTC_NAMESPACE);
22 | window.messageBus.on("message", function(message, senderId) {
23 | console.log("onMessage called with: " + message);
24 | var data = JSON.parse(message);
25 | ("onMessage" in self) && self.onMessage(senderId, data);
26 | });
27 |
28 | window.messageBus.on('senderConnected', function (senderId) {
29 | self._senders[senderId] = senderId;
30 | console.log("senderConnected!!!!!");
31 | self.onSenderConnected(senderId);
32 | });
33 | window.messageBus.on('senderDisconnected', function(senderId) {
34 | delete self._senders[senderId];
35 | console.log("senderDisconnected!!!!!");
36 | self.onSenderDisconnected(senderId);
37 | });
38 |
39 | // webkitRTCPeerConnection is Chrome specific
40 | window.RTCPeerConnection = window.RTCPeerConnection|| window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
41 | window.SessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
42 | window.RTCIceCandidate = window.RTCIceCandidate || window.mozRTCIceCandidate;
43 | window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
44 |
45 | // ready to receive messages
46 | window.flintReceiverManager.open();
47 | };
48 |
49 | Webrtc.prototype = {
50 | getSenderList: function() {
51 | return this._senders;
52 | },
53 |
54 | log: function(msg) {
55 | console.log("flint.Webrtc: " + msg);
56 | },
57 |
58 | failure: function(x) {
59 | this.log("ERROR: " + JSON.stringify(x));
60 | },
61 |
62 | getPeerConnection: function(senderId) {
63 | return this.peers[senderId];
64 | },
65 |
66 | // hide video videw
67 | hideVideo: function(element) {
68 | element.classList.remove('active');
69 | element.classList.add('hidden');
70 | },
71 |
72 | // show video view
73 | showVideo: function(element) {
74 | element.classList.remove('hidden');
75 | element.classList.add('active');
76 | },
77 |
78 | // called when sender connected
79 | onSenderConnected: function (senderId) {
80 | this.log('onSenderConnected. Total number of senders: ' + Object.keys(self.getSenderList()).length);
81 | self.showVideo(divs.large_video);
82 | if (Object.keys(self.getSenderList()).length == 2) {
83 | self.showVideo(divs.mini_video);
84 | }
85 | },
86 |
87 | // called when sender disconnected
88 | onSenderDisconnected: function (senderId) {
89 | // delete related data
90 | var pc = this.getPeerConnection(senderId);
91 | if (pc !== 'undefined') {
92 | try {
93 | pc.close();
94 | } catch(e) {
95 | }
96 | delete self.peers[senderId];
97 | delete self.videos[senderId];
98 | delete self.streams[senderId];
99 | }
100 |
101 | // hide mini video
102 | self.hideVideo(divs.mini_video);
103 |
104 | this.log('onSenderDisconnected. Total number of senders: ' + Object.keys(self.getSenderList()).length);
105 |
106 | // display large video
107 | if (Object.keys(self.getSenderList()).length == 1) {
108 | var sender = Object.keys(self.getSenderList())[0];
109 | if (senderId == sender) {
110 | this.log("disconnected??");
111 | return;
112 | }
113 |
114 | self.attachMediaStream(divs.large_video, self.streams[sender]);
115 | divs.mini_video.src = "";
116 |
117 | self.videos[sender] = 'large';
118 |
119 | // notify all that someone left
120 | self.broadcastBye(senderId);
121 | }
122 |
123 | // close app?
124 | if (Object.keys(self.getSenderList()).length == 0) {
125 | window.close();
126 | }
127 | },
128 |
129 | // send message to sender app
130 | sendMessage : function(senderId, msg) {
131 | if (msg == null) {
132 | this.log("send ignore for msg is null!!");
133 | return;
134 | }
135 | this.log("Sending: senderId: " + senderId + " msg:"+ msg);
136 | window.messageBus.send(JSON.stringify(msg), senderId);
137 | },
138 |
139 | // called when received message from sender app
140 | onMessage: function(senderId, data) {
141 | if (!data) {
142 | return;
143 | }
144 |
145 | if (Object.keys(self.getSenderList()).length > 2) {
146 | this.sendMessage(senderId, {'type': 'byte', 'data':'room is full!Current user num[' + Object.keys(self.getSenderList()).length + ']'});
147 | return;
148 | }
149 |
150 | this.log("Received message " + JSON.stringify(data));
151 |
152 | if (data) {
153 | if (data.type === "offer") {
154 | this.processOffer(senderId, data);
155 | } else if (data.type === 'candidate') {
156 | this.processIceCandidate(senderId, data);
157 | } else if (data.type === "switchview") {
158 | this.switchView(senderId, data);
159 | } else if (data.type === "bye") {
160 | this.broadcastBye(senderId);
161 | } else {
162 | this.log("unknown command!!!" + data.type);
163 | }
164 | }
165 | },
166 |
167 | processAnswer: function(senderId, sdp) {
168 | this.log('Received answer...' + sdp.sdp);
169 | var des = new window.SessionDescription(sdp);
170 | var pc = self.getPeerConnection(senderId);
171 | pc.setRemoteDescription(des);
172 | },
173 |
174 | // switch video view. mini<->large
175 | switchView: function() {
176 | this.log("switchView!!");
177 | var senders = Object.keys(self.getSenderList());
178 | if (senders.length == 0) {
179 | return;
180 | }
181 |
182 | // switch views. mini<->large
183 | if (senders.length == 2) {
184 | console.log("switch!!!!!")
185 | var one = senders[0];
186 | var other = senders[1];
187 | if (self.videos[one] == 'large') {
188 | self.attachMediaStream(divs.large_video, self.streams[other]);
189 | self.attachMediaStream(divs.mini_video, self.streams[one]);
190 | } else {
191 | self.attachMediaStream(divs.large_video, self.streams[one]);
192 | self.attachMediaStream(divs.mini_video, self.streams[other]);
193 | }
194 |
195 | var source = self.videos[one];
196 | self.videos[one] = self.videos[other];
197 | self.videos[other] = source;
198 | }
199 | },
200 |
201 | // broadcast bye
202 | broadcastBye: function(senderId) {
203 | window.messageBus.send("{'type':'bye', 'data': 'some user is left!'}");
204 | },
205 |
206 | reattachMediaStream: function(to, from) {
207 | if (typeof to.srcObject !== 'undefined') {
208 | to.srcObject = from.srcObject;
209 | } else {
210 | to.src = from.src;
211 | }
212 | },
213 |
214 | attachMediaStream: function(element, stream) {
215 | if (typeof element.srcObject !== 'undefined') {
216 | element.srcObject = stream;
217 | } else if (typeof element.mozSrcObject !== 'undefined') {
218 | element.mozSrcObject = stream;
219 | } else if (typeof element.src !== 'undefined') {
220 | element.src = URL.createObjectURL(stream);
221 | } else {
222 | console.log('Error attaching stream to element.');
223 | }
224 | },
225 |
226 | // process offer. Echo sender will be serviced by one created RTCPeerConnection.
227 | processOffer: function(senderId, sdp) {
228 | this.log("Applying offer");
229 |
230 | function _createPeerConnection(senderId) {
231 | //var config = {"iceServers":[]};
232 | var config = {"iceServers":[{"url":"stun:stun.services.mozilla.com"}, {"url": "stun:stun.l.google.com:19302"}]};
233 |
234 | var pc = new window.RTCPeerConnection(config, {});
235 |
236 | if (self.peers[senderId]) {
237 | var peerConn = self.peers[senderId];
238 | peerConn.close();
239 | delete self.peers[senderId];
240 | }
241 |
242 | // save it?
243 | self.peers[senderId] = pc;
244 |
245 | // Set callbacks or new media streams
246 | pc.onaddstream = function(obj) {
247 | self.log("Adding remote video stream!");
248 |
249 | // let the new guy displayed in large video view
250 | self.attachMediaStream(divs.large_video, obj.stream);
251 | self.videos[senderId] = 'large';
252 | self.streams[senderId] = obj.stream;
253 |
254 | var senders = Object.keys(self.getSenderList());
255 |
256 | // TEMP fix WA for no stream issue
257 | //if (senders.length == 1) {
258 | // pc.addStream(obj.stream);
259 | //}
260 |
261 | // let the other one displayed in mini video view.
262 | if (senders.length == 2) {
263 | var one = senders[0];
264 | var other = senders[1];
265 | var mini = null;
266 | if (one != senderId) {
267 | mini = one;
268 | } else {
269 | mini = other;
270 | }
271 | self.attachMediaStream(divs.mini_video, self.streams[mini]);
272 | self.videos[mini] = 'mini';
273 |
274 | /*
275 | // let remote video displayed on sender app
276 | if (one == senderId) {
277 | self.log("add stream one?????");
278 | pc.addStream(self.streams[other]);
279 |
280 | var peerConn = self.getPeerConnection(other);
281 | peerConn.removeStream(self.streams[other]);
282 | peerConn.addStream(self.streams[one]);
283 | } else {
284 | self.log("add stream other?????");
285 | pc.addStream(self.streams[one]);
286 |
287 | var peerConn = self.getPeerConnection(one);
288 | peerConn.removeStream(self.streams[one]);
289 | peerConn.addStream(self.streams[other]);
290 | }
291 | */
292 | }
293 | }
294 |
295 | pc.onremovestream = function(obj) {
296 | self.log("Remove video stream");
297 | }
298 |
299 | pc.onicecandidate = _onIceCandidate.bind(this);
300 | pc.onsignalingstatechange = _onSignalingStateChange.bind(this);
301 | pc.oniceconnectionstatechange = _onIceConnectionStateChange.bind(this);
302 | pc.onicechange = _onIceStateChange.bind(this);
303 |
304 | return pc;
305 | }
306 |
307 | function transitionToActive() {
308 | divs.large_video.oncanplay = undefined;
309 | }
310 |
311 | function waitForRemoteVideo() {
312 | if (divs.large_video.readyState >= 2) {
313 | self.log('Remote video started; currentTime: ' + divs.large_video.currentTime);
314 | transitionToActive();
315 | } else {
316 | divs.large_video.oncanplay = waitForRemoteVideo;
317 | }
318 | }
319 |
320 | function _setRemoteOfferSuccess() {
321 | var remoteStreams = pc.getRemoteStreams();
322 | if (remoteStreams.length > 0 && remoteStreams[0].getVideoTracks().length > 0) {
323 | self.log("Waiting for remote video.");
324 | waitForRemoteVideo();
325 | }
326 | self.log("Successfully applied offer...create answer!");
327 | var mediaConstraints = {
328 | 'mandatory': {
329 | 'OfferToReceiveAudio': false,
330 | 'OfferToReceiveVideo': true
331 | },
332 | };
333 | pc.createAnswer(_createAnswerSuccess.bind(this), self.failure, mediaConstraints);
334 | }
335 |
336 | function _createAnswerSuccess(sdp) {
337 | self.log("Successfully created answer " + JSON.stringify(sdp));
338 |
339 | pc.setLocalDescription(sdp, _setLocalAnswerSuccess, self.failure);
340 | self.sendMessage(senderId, sdp);
341 | }
342 |
343 | function _setLocalAnswerSuccess(sdp) {
344 | self.log("Successfully applied local description: " + JSON.stringify(sdp));
345 | }
346 |
347 | function _onIceCandidate(evt) {
348 | self.log("New ICE candidate:" + evt);
349 |
350 | if (evt.candidate) {
351 | window.messageBus.send(JSON.stringify({
352 | type: "candidate",
353 | sdpMLineIndex: evt.candidate.sdpMLineIndex,
354 | sdpMid: evt.candidate.sdpMid,
355 | candidate: evt.candidate.candidate
356 | }), senderId);
357 | }
358 | }
359 |
360 | function _onSignalingStateChange() {
361 | self.log("Signaling state change. New state = " + pc.signalingState);
362 | }
363 |
364 | function _onIceConnectionStateChange() {
365 | self.log("Ice state change. New state = " + pc.iceConnectionState);
366 | }
367 |
368 | function _onIceStateChange(x) {
369 | self.log("Ice state change. New state = " + x);
370 | }
371 |
372 | var pc = _createPeerConnection(senderId);
373 | pc.setRemoteDescription(new window.SessionDescription(sdp),
374 | _setRemoteOfferSuccess.bind(this), self.failure);
375 | },
376 |
377 | processIceCandidate: function(senderId, msg) {
378 | this.log("Applying ICE candidate: " + JSON.stringify(msg));
379 | var candidate = new window.RTCIceCandidate({
380 | sdpMLineIndex: msg.sdpMLineIndex,
381 | sdpMid: msg.sdpMid,
382 | candidate: msg.candidate
383 | });
384 |
385 | var pc = self.getPeerConnection(senderId);
386 | if (pc) {
387 | pc.addIceCandidate(candidate);
388 | } else {
389 | this.failure("processIceCandidate: " + candidate + " pc is null!");
390 | }
391 | },
392 | };
393 |
394 | // Exposes public functions and APIs
395 | flint.Webrtc = Webrtc;
396 | })();
397 |
--------------------------------------------------------------------------------
/sender/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
10 |
11 |
12 |
13 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
30 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/sender/ic_launcher-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/ic_launcher-web.png
--------------------------------------------------------------------------------
/sender/libs/armeabi-v7a/libjingle_peerconnection_so.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/libs/armeabi-v7a/libjingle_peerconnection_so.so
--------------------------------------------------------------------------------
/sender/libs/libjingle_peerconnection.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/libs/libjingle_peerconnection.jar
--------------------------------------------------------------------------------
/sender/lint.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/sender/project.properties:
--------------------------------------------------------------------------------
1 | # This file is automatically generated by Android Tools.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must be checked in Version Control Systems.
5 | #
6 | # To customize properties used by the Ant build system edit
7 | # "ant.properties", and override values to adapt the script to your
8 | # project structure.
9 | #
10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
12 |
13 | # Project target.
14 | target=android-19
15 | android.library.reference.1=../../flint-android-sender-sdk/lib_source/mediarouter
16 | android.library.reference.2=../../flint-android-sender-sdk
17 | android.library.reference.3=../../flint-android-sender-sdk/lib_source/appcompat
18 |
--------------------------------------------------------------------------------
/sender/res/drawable-hdpi/disconnect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-hdpi/disconnect.png
--------------------------------------------------------------------------------
/sender/res/drawable-hdpi/ic_action_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-hdpi/ic_action_full_screen.png
--------------------------------------------------------------------------------
/sender/res/drawable-hdpi/ic_action_return_from_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-hdpi/ic_action_return_from_full_screen.png
--------------------------------------------------------------------------------
/sender/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/sender/res/drawable-hdpi/ic_loopback_call.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-hdpi/ic_loopback_call.png
--------------------------------------------------------------------------------
/sender/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/sender/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/sender/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openflint/android-webrtc-demo/f7a23895780a479766b6b8551a3117adeff86b96/sender/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/sender/res/layout/activity_fullscreen.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
14 |
15 |
24 |
32 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/sender/res/layout/fragment_menubar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
11 |
12 |
18 |
19 |
20 |
21 |
27 |
28 |
34 |
35 |
40 |
41 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/sender/res/menu/main.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
26 |
--------------------------------------------------------------------------------
/sender/res/values-v11/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/sender/res/values-v14/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/sender/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | webrtc
5 | Hello world!
6 |
7 | Stop Receiver Application!
8 |
9 |
10 |
--------------------------------------------------------------------------------
/sender/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
14 |
15 |
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/sender/src/com/infthink/demo/webrtc/AppRTCAudioManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * libjingle
3 | * Copyright 2013, Google Inc.
4 | *
5 | * Redistribution and use in source and binary forms, with or without
6 | * modification, are permitted provided that the following conditions are met:
7 | *
8 | * 1. Redistributions of source code must retain the above copyright notice,
9 | * this list of conditions and the following disclaimer.
10 | * 2. Redistributions in binary form must reproduce the above copyright notice,
11 | * this list of conditions and the following disclaimer in the documentation
12 | * and/or other materials provided with the distribution.
13 | * 3. The name of the author may not be used to endorse or promote products
14 | * derived from this software without specific prior written permission.
15 | *
16 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 | * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 | * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 | */
27 |
28 | package com.infthink.demo.webrtc;
29 |
30 | import android.content.Context;
31 | import android.media.AudioManager;
32 | import android.util.Log;
33 |
34 | /**
35 | * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
36 | * TODO(henrika): add support for device enumeration, device selection etc.
37 | */
38 | public class AppRTCAudioManager {
39 | private static final String TAG = "AppRTCAudioManager";
40 |
41 | private boolean initialized = false;
42 | private AudioManager audioManager;
43 | private int savedAudioMode = AudioManager.MODE_INVALID;
44 | private boolean savedIsSpeakerPhoneOn = false;
45 | private boolean savedIsMicrophoneMute = false;
46 |
47 | /** Construction */
48 | static AppRTCAudioManager create(Context context) {
49 | return new AppRTCAudioManager(context);
50 | }
51 |
52 | private AppRTCAudioManager(Context context) {
53 | Log.d(TAG, "AppRTCAudioManager");
54 | audioManager = ((AudioManager) context
55 | .getSystemService(Context.AUDIO_SERVICE));
56 | }
57 |
58 | public void init() {
59 | Log.d(TAG, "init");
60 | if (initialized) {
61 | return;
62 | }
63 |
64 | // Store current audio state so we can restore it when close() is
65 | // called.
66 | savedAudioMode = audioManager.getMode();
67 | savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
68 | savedIsMicrophoneMute = audioManager.isMicrophoneMute();
69 |
70 | // The AppRTC demo shall always run in COMMUNICATION mode since it will
71 | // result in best possible "VoIP settings", like audio routing, volume
72 | // control etc.
73 | audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
74 |
75 | initialized = true;
76 | }
77 |
78 | public void close() {
79 | Log.d(TAG, "close");
80 | if (!initialized) {
81 | return;
82 | }
83 |
84 | // Restore previously stored audio states.
85 | setSpeakerphoneOn(savedIsSpeakerPhoneOn);
86 | setMicrophoneMute(savedIsMicrophoneMute);
87 | audioManager.setMode(savedAudioMode);
88 |
89 | initialized = false;
90 | }
91 |
92 | /** Sets the speaker phone mode. */
93 | private void setSpeakerphoneOn(boolean on) {
94 | boolean wasOn = audioManager.isSpeakerphoneOn();
95 | if (wasOn == on) {
96 | return;
97 | }
98 | audioManager.setSpeakerphoneOn(on);
99 | }
100 |
101 | /** Sets the microphone mute state. */
102 | private void setMicrophoneMute(boolean on) {
103 | boolean wasMuted = audioManager.isMicrophoneMute();
104 | if (wasMuted == on) {
105 | return;
106 | }
107 | audioManager.setMicrophoneMute(on);
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/sender/src/com/infthink/demo/webrtc/MainActivity.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2013-2015, Infthink (Beijing) Technology Co., Ltd.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS-IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.infthink.demo.webrtc;
17 |
18 | import java.io.IOException;
19 |
20 | import org.json.JSONException;
21 | import org.json.JSONObject;
22 | import org.webrtc.IceCandidate;
23 | import org.webrtc.MediaStream;
24 | import org.webrtc.PeerConnectionFactory;
25 | import org.webrtc.SessionDescription;
26 | import org.webrtc.VideoRenderer;
27 | import org.webrtc.VideoRendererGui;
28 | import org.webrtc.VideoRendererGui.ScalingType;
29 |
30 | import tv.matchstick.flint.ApplicationMetadata;
31 | import tv.matchstick.flint.ConnectionResult;
32 | import tv.matchstick.flint.Flint;
33 | import tv.matchstick.flint.Flint.ApplicationConnectionResult;
34 | import tv.matchstick.flint.FlintDevice;
35 | import tv.matchstick.flint.FlintManager;
36 | import tv.matchstick.flint.FlintMediaControlIntent;
37 | import tv.matchstick.flint.ResultCallback;
38 | import tv.matchstick.flint.Status;
39 | import android.app.Fragment;
40 | import android.content.pm.ActivityInfo;
41 | import android.graphics.Color;
42 | import android.opengl.GLSurfaceView;
43 | import android.os.Bundle;
44 | import android.support.v4.app.FragmentActivity;
45 | import android.support.v7.app.MediaRouteButton;
46 | import android.support.v7.media.MediaRouteSelector;
47 | import android.support.v7.media.MediaRouter;
48 | import android.support.v7.media.MediaRouter.RouteInfo;
49 | import android.util.Log;
50 | import android.util.TypedValue;
51 | import android.view.LayoutInflater;
52 | import android.view.Menu;
53 | import android.view.MenuItem;
54 | import android.view.View;
55 | import android.view.ViewGroup;
56 | import android.view.ViewGroup.LayoutParams;
57 | import android.view.Window;
58 | import android.view.WindowManager;
59 | import android.widget.ImageButton;
60 | import android.widget.TextView;
61 | import android.widget.Toast;
62 |
63 | import com.infthink.demo.webrtc.WebRtcHelper.SignalingParameters;
64 |
65 | public class MainActivity extends FragmentActivity implements
66 | WebRtcHelper.SignalingEvents, PeerConnectionClient.PeerConnectionEvents {
67 |
68 | private static final String TAG = "flint_webrtc";
69 |
70 | private static final String APP_URL = "http://openflint.github.io/android-webrtc-demo/receiver/index.html";
71 |
72 | private PeerConnectionClient mPeerConn;
73 |
74 | private boolean mIceConnected;
75 |
76 | private boolean mStreamAdded = false;
77 |
78 | private View mRootView;
79 | private TextView mEncoderStatView;
80 | private View mMenuBar;
81 | private TextView mRoomName;
82 | private GLSurfaceView mVideoView;
83 | private VideoRenderer.Callbacks mLocalRender;
84 | private VideoRenderer.Callbacks mRemoteRender;
85 | private ImageButton mVideoScalingButton;
86 |
87 | private TextView mHudView;
88 | private final LayoutParams mHudLayout = new LayoutParams(
89 | LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
90 |
91 | private ScalingType mScalingType;
92 |
93 | private Toast mLogToast;
94 |
95 | private AppRTCAudioManager mAudioManager = null;
96 |
97 | private WebRtcHelper mWebrtcHelper;
98 |
99 | private SignalingParameters mSignalingParameters;
100 |
101 | private int mStartBitrate = 0;
102 |
103 | private FlintDevice mSelectedDevice;
104 | private FlintManager mApiClient;
105 | private Flint.Listener mFlingListener;
106 | private ConnectionCallbacks mConnectionCallbacks;
107 | private MediaRouter mMediaRouter;
108 | private MediaRouteSelector mMediaRouteSelector;
109 | private MediaRouter.Callback mMediaRouterCallback;
110 | private WebrtcChannel mWebrtcChannel;
111 | private MediaRouteButton mMediaRouteButton;
112 | private int mRouteCount = 0;
113 |
114 | @Override
115 | protected void onCreate(Bundle savedInstanceState) {
116 | super.onCreate(savedInstanceState);
117 |
118 | requestWindowFeature(Window.FEATURE_NO_TITLE);
119 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
120 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
121 | getWindow().getDecorView().setSystemUiVisibility(
122 | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
123 | | View.SYSTEM_UI_FLAG_FULLSCREEN
124 | | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
125 |
126 | setContentView(R.layout.activity_fullscreen);
127 |
128 | // init flint related
129 | String APPLICATION_ID = "~flint_android_webrtc_demo";
130 | Flint.FlintApi.setApplicationId(APPLICATION_ID);
131 |
132 | mWebrtcChannel = new MyWebrtcChannel();
133 |
134 | mMediaRouter = MediaRouter.getInstance(getApplicationContext());
135 | mMediaRouteSelector = new MediaRouteSelector.Builder()
136 | .addControlCategory(
137 | FlintMediaControlIntent
138 | .categoryForFlint(APPLICATION_ID)).build();
139 |
140 | mMediaRouterCallback = new MediaRouterCallback();
141 | mFlingListener = new FlingListener();
142 | mConnectionCallbacks = new ConnectionCallbacks();
143 |
144 | mIceConnected = false;
145 |
146 | // init views
147 | mRootView = findViewById(android.R.id.content);
148 | mEncoderStatView = (TextView) findViewById(R.id.encoder_stat);
149 | mMenuBar = findViewById(R.id.menubar_fragment);
150 | mRoomName = (TextView) findViewById(R.id.room_name);
151 | mVideoView = (GLSurfaceView) findViewById(R.id.glview);
152 |
153 | mMediaRouteButton = (MediaRouteButton) mMenuBar
154 | .findViewById(R.id.media_route_button);
155 | mMediaRouteButton.setRouteSelector(mMediaRouteSelector);
156 |
157 | VideoRendererGui.setView(mVideoView);
158 |
159 | mScalingType = ScalingType.SCALE_ASPECT_FILL;
160 | mRemoteRender = VideoRendererGui.create(0, 0, 100, 100, mScalingType,
161 | false);
162 | mLocalRender = VideoRendererGui.create(0, 0, 100, 100, mScalingType,
163 | true);
164 |
165 | mVideoView.setOnClickListener(new View.OnClickListener() {
166 | @Override
167 | public void onClick(View view) {
168 | int visibility = mMenuBar.getVisibility() == View.VISIBLE ? View.INVISIBLE
169 | : View.VISIBLE;
170 | mEncoderStatView.setVisibility(visibility);
171 | mMenuBar.setVisibility(visibility);
172 | mRoomName.setVisibility(visibility);
173 | if (visibility == View.VISIBLE) {
174 | mEncoderStatView.bringToFront();
175 | mMenuBar.bringToFront();
176 | mRoomName.bringToFront();
177 | mRootView.invalidate();
178 | }
179 | }
180 | });
181 |
182 | ((ImageButton) findViewById(R.id.button_disconnect))
183 | .setOnClickListener(new View.OnClickListener() {
184 | @Override
185 | public void onClick(View view) {
186 | logAndToast("Disconnecting call.");
187 | disconnect();
188 | }
189 | });
190 |
191 | ((ImageButton) findViewById(R.id.button_switch_camera))
192 | .setOnClickListener(new View.OnClickListener() {
193 | @Override
194 | public void onClick(View view) {
195 | if (mPeerConn != null) {
196 | mPeerConn.switchCamera();
197 | }
198 | }
199 | });
200 |
201 | ((ImageButton) findViewById(R.id.button_toggle_debug))
202 | .setOnClickListener(new View.OnClickListener() {
203 | @Override
204 | public void onClick(View view) {
205 | int visibility = mHudView.getVisibility() == View.VISIBLE ? View.INVISIBLE
206 | : View.VISIBLE;
207 | mHudView.setVisibility(visibility);
208 |
209 | // use this to send view switch
210 | if (mApiClient != null && mApiClient.isConnected()) {
211 | mWebrtcChannel.sendSwitchView(mApiClient);
212 | }
213 | }
214 | });
215 |
216 | mVideoScalingButton = (ImageButton) findViewById(R.id.button_scaling_mode);
217 | mVideoScalingButton.setOnClickListener(new View.OnClickListener() {
218 | @Override
219 | public void onClick(View view) {
220 | if (mScalingType == ScalingType.SCALE_ASPECT_FILL) {
221 | mVideoScalingButton
222 | .setBackgroundResource(R.drawable.ic_action_full_screen);
223 | mScalingType = ScalingType.SCALE_ASPECT_FIT;
224 | } else {
225 | mVideoScalingButton
226 | .setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
227 | mScalingType = ScalingType.SCALE_ASPECT_FILL;
228 | }
229 | updateVideoView();
230 | }
231 | });
232 |
233 | mHudView = new TextView(this);
234 | mHudView.setTextColor(Color.BLACK);
235 | mHudView.setBackgroundColor(Color.WHITE);
236 | mHudView.setAlpha(0.4f);
237 | mHudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
238 | mHudView.setVisibility(View.INVISIBLE);
239 | addContentView(mHudView, mHudLayout);
240 |
241 | // Create and audio manager that will take care of audio routing,
242 | // audio modes, audio device enumeration etc.
243 | mAudioManager = AppRTCAudioManager.create(this);
244 |
245 | // ready to init webrtc params
246 | mWebrtcHelper = new WebRtcHelper(this);
247 | mWebrtcHelper.initParams();
248 | }
249 |
250 | @Override
251 | protected void onStart() {
252 | super.onStart();
253 | mMediaRouter.addCallback(mMediaRouteSelector, mMediaRouterCallback,
254 | MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
255 | }
256 |
257 | @Override
258 | protected void onResume() {
259 | super.onResume();
260 | }
261 |
262 | @Override
263 | protected void onPause() {
264 | super.onPause();
265 | }
266 |
267 | @Override
268 | protected void onStop() {
269 | setSelectedDevice(null);
270 | mMediaRouter.removeCallback(mMediaRouterCallback);
271 | super.onStop();
272 | }
273 |
274 | @Override
275 | protected void onDestroy() {
276 | disconnect();
277 | super.onDestroy();
278 | }
279 |
280 | /**
281 | * Called when the options menu is first created.
282 | */
283 | @Override
284 | public boolean onCreateOptionsMenu(Menu menu) {
285 | super.onCreateOptionsMenu(menu);
286 | getMenuInflater().inflate(R.menu.main, menu);
287 | return true;
288 | }
289 |
290 | @Override
291 | public boolean onOptionsItemSelected(MenuItem item) {
292 | switch (item.getItemId()) {
293 | case R.id.action_stop:
294 | stopApplication();
295 | break;
296 | }
297 |
298 | return true;
299 | }
300 |
301 | public static class MenuBarFragment extends Fragment {
302 | @Override
303 | public View onCreateView(LayoutInflater inflater, ViewGroup container,
304 | Bundle savedInstanceState) {
305 | return inflater
306 | .inflate(R.layout.fragment_menubar, container, false);
307 | }
308 | }
309 |
310 | @Override
311 | public void onParamInitDone(SignalingParameters params) {
312 |
313 | // TODO Auto-generated method stub
314 | logAndToast("onInitDone...");
315 |
316 | if (mAudioManager != null) {
317 | // Store existing audio settings and change audio mode to
318 | // MODE_IN_COMMUNICATION for best possible VoIP performance.
319 | logAndToast("Initializing the audio manager...");
320 | mAudioManager.init();
321 | }
322 | mSignalingParameters = params;
323 | abortUnless(PeerConnectionFactory.initializeAndroidGlobals(this, true,
324 | true, true, VideoRendererGui.getEGLContext()),
325 | "Failed to initializeAndroidGlobals");
326 | logAndToast("Creating peer connection...");
327 | if (mPeerConn != null) {
328 | mPeerConn.close();
329 | mPeerConn = null;
330 | }
331 |
332 | mPeerConn = new PeerConnectionClient(this, mLocalRender, mRemoteRender,
333 | mSignalingParameters, this, mStartBitrate);
334 | /*
335 | * if (mPeerConn.isHDVideo()) {
336 | * setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); }
337 | * else {
338 | * setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
339 | * }
340 | */
341 | setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
342 | if (mApiClient != null && mApiClient.isConnected()) {
343 | mPeerConn.createOffer();
344 | }
345 | // if (mApiClient != null && mApiClient.isConnected()) {
346 | // mWebrtcChannel.sendHello(mApiClient);
347 | // }
348 | }
349 |
350 | @Override
351 | public void onLocalDescription(SessionDescription sdp) {
352 | // TODO Auto-generated method stub
353 | logAndToast("onLocalDescription...");
354 |
355 | if (mWebrtcHelper != null) {
356 | logAndToast("Sending " + sdp.type + " ...");
357 | if (mSignalingParameters.initiator) {
358 | mWebrtcChannel.sendOfferSdp(mApiClient, sdp);
359 | } else {
360 | mWebrtcChannel.sendAnswerSdp(mApiClient, sdp);
361 | }
362 | }
363 | }
364 |
365 | @Override
366 | public void onIceCandidate(IceCandidate candidate) {
367 | // TODO Auto-generated method stub
368 | logAndToast("onIceCandidate...");
369 |
370 | if (mWebrtcChannel != null) {
371 | mWebrtcChannel.sendLocalIceCandidate(mApiClient, candidate);
372 | }
373 | }
374 |
375 | @Override
376 | public void onIceConnected() {
377 | // TODO Auto-generated method stub
378 | logAndToast("onIceConnected...");
379 |
380 | // logAndToast("ICE connected");
381 | mIceConnected = true;
382 | updateVideoView();
383 | }
384 |
385 | @Override
386 | public void onIceDisconnected() {
387 | // TODO Auto-generated method stub
388 | logAndToast("onIceDisconnected...");
389 |
390 | mIceConnected = false;
391 | mStreamAdded = false;
392 | updateVideoView();
393 | }
394 |
395 | @Override
396 | public void onAddStream(MediaStream stream) {
397 | // TODO Auto-generated method stub
398 | logAndToast("onAddStream...");
399 | mStreamAdded = true;
400 | }
401 |
402 | @Override
403 | public void onPeerConnectionError(String description) {
404 | // TODO Auto-generated method stub
405 | logAndToast("onPeerConnectionError...");
406 | }
407 |
408 | private class MyWebrtcChannel extends WebrtcChannel {
409 | public void onMessageReceived(FlintDevice flingDevice,
410 | String namespace, String message) {
411 | Log.e(TAG, "WebrtcChannel: message received:" + message);
412 | try {
413 | JSONObject json = new JSONObject(message);
414 | String type = (String) json.get("type");
415 | if (type.equals("candidate")) {
416 | IceCandidate candidate = new IceCandidate(
417 | (String) json.get("sdpMid"),
418 | json.getInt("sdpMLineIndex"),
419 | (String) json.get("candidate"));
420 | Log.e(TAG, "onMessageReceived:type[" + type + "]sdpMid[" + (String) json.get("sdpMid") + "]sdpMLineIndex[" + json.getInt("sdpMLineIndex") + "]candidate[" + (String) json.get("candidate") + "]");
421 | onRemoteIceCandidate(candidate);
422 | } else if (type.equals("answer") || type.equals("offer")) {
423 | SessionDescription sdp = new SessionDescription(
424 | SessionDescription.Type.fromCanonicalForm(type),
425 | (String) json.get("sdp"));
426 | Log.e(TAG, "onMessageReceived:type[" + type + "]sdp["
427 | + (String) json.get("sdp") + "]");
428 | onRemoteDescription(sdp);
429 | } else if (type.equals("bye")) {
430 | String data = (String) json.get("data");
431 | onChannelClose(data);
432 | } else {
433 | onChannelError("Unexpected channel message: " + message);
434 | }
435 | } catch (JSONException e) {
436 | onChannelError("Channel message JSON parsing error: "
437 | + e.toString());
438 | }
439 | }
440 | }
441 |
442 | private class MediaRouterCallback extends MediaRouter.Callback {
443 | @Override
444 | public void onRouteAdded(MediaRouter router, RouteInfo route) {
445 | Log.d(TAG, "onRouteAdded");
446 | if (++mRouteCount == 1) {
447 | // Show the button when a device is discovered.
448 | mMediaRouteButton.setVisibility(View.VISIBLE);
449 | }
450 | }
451 |
452 | @Override
453 | public void onRouteRemoved(MediaRouter router, RouteInfo route) {
454 | Log.d(TAG, "onRouteRemoved");
455 | if (--mRouteCount == 0) {
456 | // Hide the button if there are no devices discovered.
457 | mMediaRouteButton.setVisibility(View.GONE);
458 | }
459 | }
460 |
461 | @Override
462 | public void onRouteSelected(MediaRouter router, RouteInfo route) {
463 | Log.d(TAG, "onRouteSelected: " + route);
464 | MainActivity.this.onRouteSelected(route);
465 | }
466 |
467 | @Override
468 | public void onRouteUnselected(MediaRouter router, RouteInfo route) {
469 | Log.d(TAG, "onRouteUnselected: " + route);
470 | MainActivity.this.onRouteUnselected(route);
471 | }
472 | }
473 |
474 | private class FlingListener extends Flint.Listener {
475 | @Override
476 | public void onApplicationDisconnected(int statusCode) {
477 | Log.d(TAG, "Flint.Listener.onApplicationDisconnected: "
478 | + statusCode);
479 |
480 | mSelectedDevice = null;
481 | mMediaRouter.selectRoute(mMediaRouter.getDefaultRoute());
482 |
483 | if (mApiClient == null) {
484 | return;
485 | }
486 |
487 | try {
488 | Flint.FlintApi.removeMessageReceivedCallbacks(mApiClient,
489 | mWebrtcChannel.getNamespace());
490 | } catch (IOException e) {
491 | Log.w(TAG, "Exception while launching application", e);
492 | }
493 | }
494 | }
495 |
496 | /**
497 | * Called when a user selects a route.
498 | */
499 | private void onRouteSelected(RouteInfo route) {
500 | Log.d(TAG, "onRouteSelected: " + route.getName());
501 |
502 | FlintDevice device = FlintDevice.getFromBundle(route.getExtras());
503 | setSelectedDevice(device);
504 | }
505 |
506 | /**
507 | * Called when a user unselects a route.
508 | */
509 | private void onRouteUnselected(RouteInfo route) {
510 | if (route != null) {
511 | Log.d(TAG, "onRouteUnselected: " + route.getName());
512 | }
513 | setSelectedDevice(null);
514 | }
515 |
516 | /**
517 | * Stop receiver application.
518 | */
519 | public void stopApplication() {
520 | if (mApiClient == null || !mApiClient.isConnected()) {
521 | return;
522 | }
523 |
524 | Flint.FlintApi.stopApplication(mApiClient).setResultCallback(
525 | new ResultCallback() {
526 | @Override
527 | public void onResult(Status result) {
528 | if (result.isSuccess()) {
529 | //
530 | }
531 | }
532 | });
533 | }
534 |
535 | private void setSelectedDevice(FlintDevice device) {
536 | Log.d(TAG, "setSelectedDevice: " + device);
537 | mSelectedDevice = device;
538 |
539 | if (mSelectedDevice != null) {
540 | try {
541 | disconnectApiClient();
542 | connectApiClient();
543 | } catch (IllegalStateException e) {
544 | Log.w(TAG, "Exception while connecting API client", e);
545 | disconnectApiClient();
546 | }
547 | } else {
548 | if (mApiClient != null) {
549 | if (mApiClient.isConnected()) {
550 | mWebrtcChannel.sendBye(mApiClient);
551 | }
552 |
553 | // stopApplication();
554 |
555 | disconnectApiClient();
556 | }
557 |
558 | mMediaRouter.selectRoute(mMediaRouter.getDefaultRoute());
559 | }
560 | }
561 |
562 | private void connectApiClient() {
563 | Flint.FlintOptions apiOptions = Flint.FlintOptions.builder(
564 | mSelectedDevice, mFlingListener).build();
565 | mApiClient = new FlintManager.Builder(this)
566 | .addApi(Flint.API, apiOptions)
567 | .addConnectionCallbacks(mConnectionCallbacks).build();
568 | mApiClient.connect();
569 | }
570 |
571 | private void disconnectApiClient() {
572 | if (mApiClient != null) {
573 | mApiClient.disconnect();
574 | mApiClient = null;
575 | }
576 | }
577 |
578 | private class ConnectionCallbacks implements
579 | FlintManager.ConnectionCallbacks {
580 | @Override
581 | public void onConnectionSuspended(int cause) {
582 | Log.d(TAG, "ConnectionCallbacks.onConnectionSuspended");
583 | }
584 |
585 | @Override
586 | public void onConnected(Bundle connectionHint) {
587 | Log.d(TAG, "ConnectionCallbacks.onConnected");
588 | Flint.FlintApi.launchApplication(mApiClient, APP_URL)
589 | .setResultCallback(
590 | new ApplicationConnectionResultCallback());
591 | }
592 |
593 | @Override
594 | public void onConnectionFailed(ConnectionResult result) {
595 | Log.d(TAG, "ConnectionFailedListener.onConnectionFailed");
596 | setSelectedDevice(null);
597 | }
598 | }
599 |
600 | private final class ApplicationConnectionResultCallback implements
601 | ResultCallback {
602 | @Override
603 | public void onResult(ApplicationConnectionResult result) {
604 | Status status = result.getStatus();
605 | ApplicationMetadata appMetaData = result.getApplicationMetadata();
606 |
607 | if (status.isSuccess()) {
608 | Log.d(TAG, "ConnectionResultCallback: " + appMetaData.getData());
609 | try {
610 | Flint.FlintApi.setMessageReceivedCallbacks(mApiClient,
611 | mWebrtcChannel.getNamespace(), mWebrtcChannel);
612 |
613 | mWebrtcHelper.initParams(); // start another connection?
614 | } catch (IOException e) {
615 | Log.w(TAG, "Exception while launching application", e);
616 | }
617 | } else {
618 | Log.d(TAG,
619 | "ConnectionResultCallback. Unable to launch the game. statusCode: "
620 | + status.getStatusCode());
621 | }
622 | }
623 | }
624 |
625 | private void logAndToast(String msg) {
626 | Log.e(TAG, msg);
627 | if (mLogToast != null) {
628 | mLogToast.cancel();
629 | }
630 | mLogToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
631 | mLogToast.show();
632 | }
633 |
634 | private void disconnect() {
635 | if (mWebrtcHelper != null) {
636 | mWebrtcHelper.disconnect();
637 | mWebrtcHelper = null;
638 | }
639 | if (mPeerConn != null) {
640 | mPeerConn.close();
641 | mPeerConn = null;
642 | }
643 | if (mAudioManager != null) {
644 | mAudioManager.close();
645 | mAudioManager = null;
646 | }
647 | finish();
648 | }
649 |
650 | private void updateVideoView() {
651 | VideoRendererGui.update(mRemoteRender, 0, 0, 100, 100, mScalingType);
652 | if (mIceConnected && mStreamAdded) {
653 | VideoRendererGui.update(mLocalRender, 70, 70, 28, 28,
654 | ScalingType.SCALE_ASPECT_FIT);
655 | } else {
656 | VideoRendererGui.update(mLocalRender, 0, 0, 100, 100, mScalingType);
657 | }
658 | }
659 |
660 | private void onRemoteDescription(SessionDescription sdp) {
661 | // TODO Auto-generated method stub
662 | logAndToast("onRemoteDescription... " + sdp);
663 |
664 | if (mPeerConn == null) {
665 | Log.e(TAG, "onRemoteDescription: peer is null? ignore!");
666 | return;
667 | }
668 | logAndToast("Received remote " + sdp.type + " ...");
669 | mPeerConn.setRemoteDescription(sdp);
670 | if (!mSignalingParameters.initiator) {
671 | logAndToast("Creating ANSWER...");
672 | // Create answer. Answer SDP will be sent to offering client in
673 | // PeerConnectionEvents.onLocalDescription event.
674 | mPeerConn.createAnswer();
675 | }
676 | }
677 |
678 | private void onRemoteIceCandidate(IceCandidate candidate) {
679 | // TODO Auto-generated method stub
680 |
681 | logAndToast("onRemoteIceCandidate: " + candidate + " ...");
682 | if (mPeerConn != null) {
683 | mPeerConn.addRemoteIceCandidate(candidate);
684 | }
685 | }
686 |
687 | private void onChannelClose(String description) {
688 | // TODO Auto-generated method stub
689 | logAndToast("onChannelClose...");
690 |
691 | mIceConnected = false;
692 | mStreamAdded = false;
693 | updateVideoView();
694 | }
695 |
696 | private void onChannelError(String description) {
697 | // TODO Auto-generated method stub
698 | logAndToast("onChannelError...: " + description);
699 |
700 | MainActivity.this.onRouteUnselected(null);
701 | }
702 |
703 | // Poor-man's assert(): die with |msg| unless |condition| is true.
704 | private static void abortUnless(boolean condition, String msg) {
705 | if (!condition) {
706 | throw new RuntimeException(msg);
707 | }
708 | }
709 |
710 | }
711 |
--------------------------------------------------------------------------------
/sender/src/com/infthink/demo/webrtc/PeerConnectionClient.java:
--------------------------------------------------------------------------------
1 | /*
2 | * libjingle
3 | * Copyright 2014, Google Inc.
4 | *
5 | * Redistribution and use in source and binary forms, with or without
6 | * modification, are permitted provided that the following conditions are met:
7 | *
8 | * 1. Redistributions of source code must retain the above copyright notice,
9 | * this list of conditions and the following disclaimer.
10 | * 2. Redistributions in binary form must reproduce the above copyright notice,
11 | * this list of conditions and the following disclaimer in the documentation
12 | * and/or other materials provided with the distribution.
13 | * 3. The name of the author may not be used to endorse or promote products
14 | * derived from this software without specific prior written permission.
15 | *
16 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 | * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 | * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 | */
27 |
28 | package com.infthink.demo.webrtc;
29 |
30 | import android.app.Activity;
31 | import android.util.Log;
32 |
33 | import org.webrtc.DataChannel;
34 | import org.webrtc.IceCandidate;
35 | import org.webrtc.Logging;
36 | import org.webrtc.MediaConstraints;
37 | import org.webrtc.MediaStream;
38 | import org.webrtc.MediaStreamTrack;
39 | import org.webrtc.PeerConnection;
40 | import org.webrtc.MediaConstraints.KeyValuePair;
41 | import org.webrtc.PeerConnection.IceConnectionState;
42 | import org.webrtc.PeerConnectionFactory;
43 | import org.webrtc.SdpObserver;
44 | import org.webrtc.SessionDescription;
45 | import org.webrtc.StatsObserver;
46 | import org.webrtc.VideoCapturer;
47 | import org.webrtc.VideoRenderer;
48 | import org.webrtc.VideoSource;
49 | import org.webrtc.VideoTrack;
50 |
51 | import com.infthink.demo.webrtc.WebRtcHelper.SignalingParameters;
52 |
53 | import java.util.EnumSet;
54 | import java.util.LinkedList;
55 | import java.util.regex.Matcher;
56 | import java.util.regex.Pattern;
57 |
58 | public class PeerConnectionClient {
59 | private static final String TAG = "flint_webrtc";
60 | public static final String VIDEO_TRACK_ID = "ARDAMSv0";
61 | public static final String AUDIO_TRACK_ID = "ARDAMSa0";
62 |
63 | private final Activity activity;
64 | private PeerConnectionFactory factory;
65 | private PeerConnection pc;
66 | private VideoSource videoSource;
67 | private boolean videoSourceStopped;
68 | private final PCObserver pcObserver = new PCObserver();
69 | private final SDPObserver sdpObserver = new SDPObserver();
70 | private final VideoRenderer.Callbacks localRender;
71 | private final VideoRenderer.Callbacks remoteRender;
72 | // Queued remote ICE candidates are consumed only after both local and
73 | // remote descriptions are set. Similarly local ICE candidates are sent to
74 | // remote peer after both local and remote description are set.
75 | private LinkedList queuedRemoteCandidates = null;
76 | private MediaConstraints sdpMediaConstraints;
77 | private MediaConstraints videoConstraints;
78 | private PeerConnectionEvents events;
79 | private int startBitrate;
80 | private boolean isInitiator;
81 | private boolean useFrontFacingCamera = true;
82 | private SessionDescription localSdp = null; // either offer or answer SDP
83 | private MediaStream mediaStream = null;
84 |
85 | public PeerConnectionClient(Activity activity,
86 | VideoRenderer.Callbacks localRender,
87 | VideoRenderer.Callbacks remoteRender,
88 | SignalingParameters signalingParameters,
89 | PeerConnectionEvents events, int startBitrate) {
90 | this.activity = activity;
91 | this.localRender = localRender;
92 | this.remoteRender = remoteRender;
93 | this.events = events;
94 | this.startBitrate = startBitrate;
95 | isInitiator = signalingParameters.initiator;
96 | queuedRemoteCandidates = new LinkedList();
97 |
98 | sdpMediaConstraints = new MediaConstraints();
99 | sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
100 | "OfferToReceiveAudio", "false"));
101 | sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
102 | "OfferToReceiveVideo", "true"));
103 | videoConstraints = signalingParameters.videoConstraints;
104 |
105 | factory = new PeerConnectionFactory();
106 | MediaConstraints pcConstraints = signalingParameters.pcConstraints;
107 | /*
108 | * pcConstraints.optional.add( new
109 | * MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
110 | */
111 | pc = factory.createPeerConnection(signalingParameters.iceServers,
112 | pcConstraints, pcObserver);
113 | //isInitiator = true;
114 |
115 | // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
116 | // NOTE: this _must_ happen while |factory| is alive!
117 | // Logging.enableTracing(
118 | // "logcat:",
119 | // EnumSet.of(Logging.TraceLevel.TRACE_ALL),
120 | // Logging.Severity.LS_SENSITIVE);
121 |
122 | mediaStream = factory.createLocalMediaStream("ARDAMS");
123 | if (videoConstraints != null) {
124 | mediaStream.addTrack(createVideoTrack(useFrontFacingCamera));
125 | }
126 |
127 | if (signalingParameters.audioConstraints != null) {
128 | mediaStream
129 | .addTrack(factory.createAudioTrack(
130 | AUDIO_TRACK_ID,
131 | factory.createAudioSource(signalingParameters.audioConstraints)));
132 | }
133 | pc.addStream(mediaStream);
134 | }
135 |
136 | public boolean isHDVideo() {
137 | if (videoConstraints == null) {
138 | return false;
139 | }
140 | int minWidth = 0;
141 | int minHeight = 0;
142 | for (KeyValuePair keyValuePair : videoConstraints.mandatory) {
143 | if (keyValuePair.getKey().equals("minWidth")) {
144 | try {
145 | minWidth = Integer.parseInt(keyValuePair.getValue());
146 | } catch (NumberFormatException e) {
147 | Log.e(TAG,
148 | "Can not parse video width from video constraints");
149 | }
150 | } else if (keyValuePair.getKey().equals("minHeight")) {
151 | try {
152 | minHeight = Integer.parseInt(keyValuePair.getValue());
153 | } catch (NumberFormatException e) {
154 | Log.e(TAG,
155 | "Can not parse video height from video constraints");
156 | }
157 | }
158 | }
159 | if (minWidth * minHeight >= 1280 * 720) {
160 | return true;
161 | } else {
162 | return false;
163 | }
164 | }
165 |
166 | public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
167 | return pc.getStats(observer, track);
168 | }
169 |
170 | public void createOffer() {
171 | activity.runOnUiThread(new Runnable() {
172 | public void run() {
173 | if (pc != null) {
174 | isInitiator = true;
175 | pc.createOffer(sdpObserver, sdpMediaConstraints);
176 | }
177 | }
178 | });
179 | }
180 |
181 | public void createAnswer() {
182 | activity.runOnUiThread(new Runnable() {
183 | public void run() {
184 | if (pc != null) {
185 | isInitiator = false;
186 | pc.createAnswer(sdpObserver, sdpMediaConstraints);
187 | }
188 | }
189 | });
190 | }
191 |
192 | public void addRemoteIceCandidate(final IceCandidate candidate) {
193 | activity.runOnUiThread(new Runnable() {
194 | public void run() {
195 | if (pc != null) {
196 | if (queuedRemoteCandidates != null) {
197 | Log.e(TAG, "add candidate to queue!");
198 | queuedRemoteCandidates.add(candidate);
199 | } else {
200 | Log.e(TAG, "addIceCandidate:" + candidate);
201 | pc.addIceCandidate(candidate);
202 | }
203 | }
204 | }
205 | });
206 | }
207 |
208 | public void setRemoteDescription(final SessionDescription sdp) {
209 | activity.runOnUiThread(new Runnable() {
210 | public void run() {
211 | if (pc != null) {
212 | String sdpDescription = preferISAC(sdp.description);
213 | if (startBitrate > 0) {
214 | sdpDescription = setStartBitrate(sdpDescription,
215 | startBitrate);
216 | }
217 | Log.e(TAG, "Set remote SDP.[" + sdpDescription+ "]");
218 | SessionDescription sdpRemote = new SessionDescription(
219 | sdp.type, sdpDescription);
220 | pc.setRemoteDescription(sdpObserver, sdpRemote);
221 | }
222 | }
223 | });
224 | }
225 |
226 | public void stopVideoSource() {
227 | if (videoSource != null) {
228 | Log.d(TAG, "Stop video source.");
229 | videoSource.stop();
230 | videoSourceStopped = true;
231 | }
232 | }
233 |
234 | public void startVideoSource() {
235 | if (videoSource != null && videoSourceStopped) {
236 | Log.d(TAG, "Restart video source.");
237 | videoSource.restart();
238 | videoSourceStopped = false;
239 | }
240 | }
241 |
242 | public void close() {
243 | activity.runOnUiThread(new Runnable() {
244 | public void run() {
245 | Log.d(TAG, "Closing peer connection.");
246 | if (pc != null) {
247 | pc.dispose();
248 | pc = null;
249 | }
250 | if (videoSource != null) {
251 | videoSource.dispose();
252 | videoSource = null;
253 | }
254 | if (factory != null) {
255 | factory.dispose();
256 | factory = null;
257 | }
258 | }
259 | });
260 | }
261 |
262 | /**
263 | * SDP/ICE ready callbacks.
264 | */
265 | public static interface PeerConnectionEvents {
266 | /**
267 | * Callback fired once offer is created and local SDP is set.
268 | */
269 | public void onLocalDescription(final SessionDescription sdp);
270 |
271 | /**
272 | * Callback fired once local Ice candidate is generated.
273 | */
274 | public void onIceCandidate(final IceCandidate candidate);
275 |
276 | /**
277 | * Callback fired once connection is established (IceConnectionState is
278 | * CONNECTED).
279 | */
280 | public void onIceConnected();
281 |
282 | /**
283 | * Callback fired once connection is closed (IceConnectionState is
284 | * DISCONNECTED).
285 | */
286 | public void onIceDisconnected();
287 |
288 | /**
289 | * Callback fired once peer connection error happened.
290 | */
291 | public void onPeerConnectionError(String description);
292 |
293 | /**
294 | * Calledback fired when stream added
295 | */
296 | public void onAddStream(final MediaStream stream);
297 | }
298 |
299 | private void reportError(final String errorMessage) {
300 | activity.runOnUiThread(new Runnable() {
301 | public void run() {
302 | events.onPeerConnectionError(errorMessage);
303 | }
304 | });
305 | }
306 |
307 | // Cycle through likely device names for the camera and return the first
308 | // capturer that works, or crash if none do.
309 | private VideoCapturer getVideoCapturer(boolean useFrontFacing) {
310 | String[] cameraFacing = { "front", "back" };
311 | if (!useFrontFacing) {
312 | cameraFacing[0] = "back";
313 | cameraFacing[1] = "front";
314 | }
315 | for (String facing : cameraFacing) {
316 | int[] cameraIndex = { 0, 1 };
317 | int[] cameraOrientation = { 0, 90, 180, 270 };
318 | for (int index : cameraIndex) {
319 | for (int orientation : cameraOrientation) {
320 | String name = "Camera " + index + ", Facing " + facing
321 | + ", Orientation " + orientation;
322 | VideoCapturer capturer = VideoCapturer.create(name);
323 | if (capturer != null) {
324 | Log.d(TAG, "Using camera: " + name);
325 | return capturer;
326 | }
327 | }
328 | }
329 | }
330 | reportError("Failed to open capturer");
331 | return null;
332 | }
333 |
334 | private VideoTrack createVideoTrack(boolean frontFacing) {
335 | VideoCapturer capturer = getVideoCapturer(frontFacing);
336 | if (videoSource != null) {
337 | videoSource.stop();
338 | videoSource.dispose();
339 | }
340 |
341 | videoSource = factory.createVideoSource(capturer, videoConstraints);
342 | String trackExtension = frontFacing ? "frontFacing" : "backFacing";
343 | VideoTrack videoTrack = factory.createVideoTrack(VIDEO_TRACK_ID
344 | + trackExtension, videoSource);
345 | videoTrack.addRenderer(new VideoRenderer(localRender));
346 | return videoTrack;
347 | }
348 |
349 | // Poor-man's assert(): die with |msg| unless |condition| is true.
350 | private void abortUnless(boolean condition, String msg) {
351 | if (!condition) {
352 | reportError(msg);
353 | }
354 | }
355 |
356 | private static String setStartBitrate(String sdpDescription, int bitrateKbps) {
357 | String[] lines = sdpDescription.split("\r\n");
358 | int lineIndex = -1;
359 | String vp8RtpMap = null;
360 | Pattern vp8Pattern = Pattern
361 | .compile("^a=rtpmap:(\\d+) VP8/90000[\r]?$");
362 | for (int i = 0; i < lines.length; i++) {
363 | Matcher vp8Matcher = vp8Pattern.matcher(lines[i]);
364 | if (vp8Matcher.matches()) {
365 | vp8RtpMap = vp8Matcher.group(1);
366 | lineIndex = i;
367 | break;
368 | }
369 | }
370 | if (vp8RtpMap == null) {
371 | Log.e(TAG, "No rtpmap for VP8 codec");
372 | return sdpDescription;
373 | }
374 | Log.d(TAG, "Found rtpmap " + vp8RtpMap + " at " + lines[lineIndex]);
375 | StringBuilder newSdpDescription = new StringBuilder();
376 | for (int i = 0; i < lines.length; i++) {
377 | newSdpDescription.append(lines[i]).append("\r\n");
378 | if (i == lineIndex) {
379 | String bitrateSet = "a=fmtp:" + vp8RtpMap
380 | + " x-google-start-bitrate=" + bitrateKbps;
381 | Log.d(TAG, "Add remote SDP line: " + bitrateSet);
382 | newSdpDescription.append(bitrateSet).append("\r\n");
383 | }
384 | }
385 | return newSdpDescription.toString();
386 | }
387 |
388 | // Mangle SDP to prefer ISAC/16000 over any other audio codec.
389 | private static String preferISAC(String sdpDescription) {
390 | String[] lines = sdpDescription.split("\r\n");
391 | int mLineIndex = -1;
392 | String isac16kRtpMap = null;
393 | Pattern isac16kPattern = Pattern
394 | .compile("^a=rtpmap:(\\d+) ISAC/16000[\r]?$");
395 | for (int i = 0; (i < lines.length)
396 | && (mLineIndex == -1 || isac16kRtpMap == null); ++i) {
397 | if (lines[i].startsWith("m=audio ")) {
398 | mLineIndex = i;
399 | continue;
400 | }
401 | Matcher isac16kMatcher = isac16kPattern.matcher(lines[i]);
402 | if (isac16kMatcher.matches()) {
403 | isac16kRtpMap = isac16kMatcher.group(1);
404 | continue;
405 | }
406 | }
407 | if (mLineIndex == -1) {
408 | Log.e(TAG, "No m=audio line, so can't prefer iSAC");
409 | return sdpDescription;
410 | }
411 | if (isac16kRtpMap == null) {
412 | Log.d(TAG, "No ISAC/16000 line, so can't prefer iSAC");
413 | return sdpDescription;
414 | }
415 | String[] origMLineParts = lines[mLineIndex].split(" ");
416 | StringBuilder newMLine = new StringBuilder();
417 | int origPartIndex = 0;
418 | // Format is: m= ...
419 | newMLine.append(origMLineParts[origPartIndex++]).append(" ");
420 | newMLine.append(origMLineParts[origPartIndex++]).append(" ");
421 | newMLine.append(origMLineParts[origPartIndex++]).append(" ");
422 | newMLine.append(isac16kRtpMap);
423 | for (; origPartIndex < origMLineParts.length; ++origPartIndex) {
424 | if (!origMLineParts[origPartIndex].equals(isac16kRtpMap)) {
425 | newMLine.append(" ").append(origMLineParts[origPartIndex]);
426 | }
427 | }
428 | lines[mLineIndex] = newMLine.toString();
429 | StringBuilder newSdpDescription = new StringBuilder();
430 | for (String line : lines) {
431 | newSdpDescription.append(line).append("\r\n");
432 | }
433 | return newSdpDescription.toString();
434 |
435 | }
436 |
437 | private void drainCandidates() {
438 | if (queuedRemoteCandidates != null) {
439 | Log.e(TAG, "Add " + queuedRemoteCandidates.size()
440 | + " remote candidates");
441 | for (IceCandidate candidate : queuedRemoteCandidates) {
442 | pc.addIceCandidate(candidate);
443 | }
444 | queuedRemoteCandidates = null;
445 | }
446 | }
447 |
448 | public void switchCamera() {
449 | if (videoConstraints == null)
450 | return; // No video is sent.
451 |
452 | if (pc.signalingState() != PeerConnection.SignalingState.STABLE) {
453 | Log.e(TAG, "Switching camera during negotiation is not handled.");
454 | return;
455 | }
456 |
457 | pc.removeStream(mediaStream);
458 | VideoTrack currentTrack = mediaStream.videoTracks.get(0);
459 | mediaStream.removeTrack(currentTrack);
460 |
461 | String trackId = currentTrack.id();
462 | // On Android, there can only be one camera open at the time and we
463 | // need to release our implicit references to the videoSource before the
464 | // PeerConnectionFactory is released. Since createVideoTrack creates a
465 | // new
466 | // videoSource and frees the old one, we need to release the track here.
467 | currentTrack.dispose();
468 |
469 | useFrontFacingCamera = !useFrontFacingCamera;
470 | VideoTrack newTrack = createVideoTrack(useFrontFacingCamera);
471 | mediaStream.addTrack(newTrack);
472 | pc.addStream(mediaStream);
473 |
474 | SessionDescription remoteDesc = pc.getRemoteDescription();
475 | if (localSdp == null || remoteDesc == null) {
476 | Log.d(TAG, "Switching camera before the negotiation started.");
477 | return;
478 | }
479 |
480 | localSdp = new SessionDescription(localSdp.type,
481 | localSdp.description.replaceAll(trackId, newTrack.id()));
482 |
483 | if (isInitiator) {
484 | pc.setLocalDescription(new SwitchCameraSdbObserver(), localSdp);
485 | pc.setRemoteDescription(new SwitchCameraSdbObserver(), remoteDesc);
486 | } else {
487 | pc.setRemoteDescription(new SwitchCameraSdbObserver(), remoteDesc);
488 | pc.setLocalDescription(new SwitchCameraSdbObserver(), localSdp);
489 | }
490 | }
491 |
492 | // Implementation detail: observe ICE & stream changes and react
493 | // accordingly.
494 | private class PCObserver implements PeerConnection.Observer {
495 | @Override
496 | public void onIceCandidate(final IceCandidate candidate) {
497 | activity.runOnUiThread(new Runnable() {
498 | public void run() {
499 | events.onIceCandidate(candidate);
500 | }
501 | });
502 | }
503 |
504 | @Override
505 | public void onSignalingChange(PeerConnection.SignalingState newState) {
506 | Log.d(TAG, "SignalingState: " + newState);
507 | }
508 |
509 | @Override
510 | public void onIceConnectionChange(
511 | PeerConnection.IceConnectionState newState) {
512 | Log.d(TAG, "IceConnectionState: " + newState);
513 | if (newState == IceConnectionState.CONNECTED) {
514 | activity.runOnUiThread(new Runnable() {
515 | public void run() {
516 | events.onIceConnected();
517 | }
518 | });
519 | } else if (newState == IceConnectionState.DISCONNECTED) {
520 | activity.runOnUiThread(new Runnable() {
521 | public void run() {
522 | events.onIceDisconnected();
523 | }
524 | });
525 | } else if (newState == IceConnectionState.FAILED) {
526 | reportError("ICE connection failed.");
527 | }
528 | }
529 |
530 | @Override
531 | public void onIceGatheringChange(
532 | PeerConnection.IceGatheringState newState) {
533 | Log.d(TAG, "IceGatheringState: " + newState);
534 | }
535 |
536 | @Override
537 | public void onAddStream(final MediaStream stream) {
538 | RuntimeException e = new RuntimeException();
539 | e.printStackTrace();
540 |
541 | activity.runOnUiThread(new Runnable() {
542 | public void run() {
543 | abortUnless(stream.audioTracks.size() <= 1
544 | && stream.videoTracks.size() <= 1,
545 | "Weird-looking stream: " + stream);
546 | Log.e(TAG, "onAddStream 1: " + stream.videoTracks.size());
547 | if (stream.videoTracks.size() == 1) {
548 | Log.e(TAG, "onAddStream 2");
549 | stream.videoTracks.get(0).addRenderer(
550 | new VideoRenderer(remoteRender));
551 |
552 | events.onAddStream(stream);
553 | }
554 | Log.e(TAG, "onAddStream 3");
555 | }
556 | });
557 | }
558 |
559 | @Override
560 | public void onRemoveStream(final MediaStream stream) {
561 | activity.runOnUiThread(new Runnable() {
562 | public void run() {
563 | stream.videoTracks.get(0).dispose();
564 | }
565 | });
566 | }
567 |
568 | @Override
569 | public void onDataChannel(final DataChannel dc) {
570 | reportError("AppRTC doesn't use data channels, but got: "
571 | + dc.label() + " anyway!");
572 | }
573 |
574 | @Override
575 | public void onRenegotiationNeeded() {
576 | // No need to do anything; AppRTC follows a pre-agreed-upon
577 | // signaling/negotiation protocol.
578 | }
579 | }
580 |
581 | // Implementation detail: handle offer creation/signaling and answer
582 | // setting,
583 | // as well as adding remote ICE candidates once the answer SDP is set.
584 | private class SDPObserver implements SdpObserver {
585 | @Override
586 | public void onCreateSuccess(final SessionDescription origSdp) {
587 | abortUnless(localSdp == null, "multiple SDP create?!?");
588 | final SessionDescription sdp = new SessionDescription(origSdp.type,
589 | preferISAC(origSdp.description));
590 | localSdp = sdp;
591 | activity.runOnUiThread(new Runnable() {
592 | public void run() {
593 | if (pc != null) {
594 | Log.e(TAG, "Set local SDP from " + sdp.type);
595 | pc.setLocalDescription(sdpObserver, sdp);
596 | }
597 | }
598 | });
599 | }
600 |
601 | @Override
602 | public void onSetSuccess() {
603 | activity.runOnUiThread(new Runnable() {
604 | public void run() {
605 | if (pc == null) {
606 | Log.e(TAG, "onSetSuccess: failed for pc is null!");
607 | return;
608 | }
609 | if (isInitiator) {
610 | // For offering peer connection we first create offer
611 | // and set
612 | // local SDP, then after receiving answer set remote
613 | // SDP.
614 | if (pc.getRemoteDescription() == null) {
615 | // We've just set our local SDP so time to send it.
616 | Log.e(TAG, "Local SDP set succesfully");
617 | events.onLocalDescription(localSdp);
618 | } else {
619 | // We've just set remote description, so drain
620 | // remote
621 | // and send local ICE candidates.
622 | Log.e(TAG, "Remote SDP set succesfully");
623 | drainCandidates();
624 | }
625 | } else {
626 | // For answering peer connection we set remote SDP and
627 | // then
628 | // create answer and set local SDP.
629 | if (pc.getLocalDescription() != null) {
630 | // We've just set our local SDP so time to send it,
631 | // drain
632 | // remote and send local ICE candidates.
633 | Log.e(TAG, "Local SDP set succesfully");
634 | events.onLocalDescription(localSdp);
635 | drainCandidates();
636 | } else {
637 | // We've just set remote SDP - do nothing for now -
638 | // answer will be created soon.
639 | Log.e(TAG, "Remote SDP set succesfully");
640 | }
641 | }
642 | }
643 | });
644 | }
645 |
646 | @Override
647 | public void onCreateFailure(final String error) {
648 | reportError("createSDP error: " + error);
649 | }
650 |
651 | @Override
652 | public void onSetFailure(final String error) {
653 | reportError("setSDP error: " + error);
654 | }
655 | }
656 |
657 | private class SwitchCameraSdbObserver implements SdpObserver {
658 | @Override
659 | public void onCreateSuccess(SessionDescription sdp) {
660 | }
661 |
662 | @Override
663 | public void onSetSuccess() {
664 | Log.d(TAG, "Camera switch SDP set succesfully");
665 | }
666 |
667 | @Override
668 | public void onCreateFailure(final String error) {
669 | }
670 |
671 | @Override
672 | public void onSetFailure(final String error) {
673 | reportError("setSDP error while switching camera: " + error);
674 | }
675 | }
676 | }
677 |
--------------------------------------------------------------------------------
/sender/src/com/infthink/demo/webrtc/WebRtcHelper.java:
--------------------------------------------------------------------------------
1 | package com.infthink.demo.webrtc;
2 |
3 | import java.io.IOException;
4 | import java.util.LinkedList;
5 | import java.util.List;
6 |
7 | import org.json.JSONArray;
8 | import org.json.JSONException;
9 | import org.json.JSONObject;
10 | import org.webrtc.MediaConstraints;
11 | import org.webrtc.PeerConnection;
12 |
13 | import android.util.Log;
14 |
15 | public class WebRtcHelper {
16 | private static final String TAG = "WebRtcHelper";
17 |
18 | /**
19 | * Struct holding the signaling parameters of an AppRTC room.
20 | */
21 | public static class SignalingParameters {
22 | public final List iceServers;
23 | public final boolean initiator;
24 | public final MediaConstraints pcConstraints;
25 | public final MediaConstraints videoConstraints;
26 | public final MediaConstraints audioConstraints;
27 | public final String offerSdp;
28 |
29 | public SignalingParameters(List iceServers,
30 | boolean initiator, MediaConstraints pcConstraints,
31 | MediaConstraints videoConstraints,
32 | MediaConstraints audioConstraints, String offerSdp) {
33 | this.iceServers = iceServers;
34 | this.initiator = initiator;
35 | this.pcConstraints = pcConstraints;
36 | this.videoConstraints = videoConstraints;
37 | this.audioConstraints = audioConstraints;
38 | this.offerSdp = offerSdp;
39 | }
40 | }
41 |
42 | /**
43 | * Callback interface for messages delivered on signaling channel.
44 | *
45 | * Methods are guaranteed to be invoked on the UI thread of |activity|.
46 | */
47 | public static interface SignalingEvents {
48 | /**
49 | * Callback fired once the room's signaling parameters
50 | * SignalingParameters are extracted.
51 | */
52 | public void onParamInitDone(final SignalingParameters params);
53 | }
54 |
55 | private SignalingEvents events;
56 | private SignalingParameters signalingParameters;
57 |
58 | public WebRtcHelper(SignalingEvents events) {
59 | this.events = events;
60 | }
61 |
62 | public void initParams() {
63 | try {
64 | signalingParameters = initParameters();
65 | events.onParamInitDone(signalingParameters);
66 | } catch (Exception e) {
67 | e.printStackTrace();
68 | }
69 | }
70 |
71 | /**
72 | * Disconnect
73 | */
74 | public void disconnect() {
75 | }
76 |
77 | // Fetches |url| and fishes the signaling parameters out of the JSON.
78 | private SignalingParameters initParameters() throws IOException,
79 | JSONException {
80 | String offerSdp = null;
81 |
82 | boolean initiator = true; // provide offer?
83 |
84 | LinkedList iceServers = iceServersFromPCConfigJSON("");
85 |
86 | MediaConstraints pcConstraints = new MediaConstraints();
87 | pcConstraints.optional.add(new MediaConstraints.KeyValuePair(
88 | "DtlsSrtpKeyAgreement", "true"));
89 |
90 | pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
91 | "OfferToReceiveAudio", "false"));
92 | pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
93 | "OfferToReceiveVideo", "true"));
94 |
95 | MediaConstraints videoConstraints = constraintsFromJSON(getAVConstraints(
96 | "video", "{}"));
97 | Log.d(TAG, "videoConstraints: " + videoConstraints);
98 |
99 | MediaConstraints audioConstraints = constraintsFromJSON(getAVVConstraints(
100 | "audio", "{}"));
101 | audioConstraints = null;
102 | Log.d(TAG, "audioConstraints: " + audioConstraints);
103 |
104 | return new SignalingParameters(iceServers, initiator, pcConstraints,
105 | videoConstraints, audioConstraints, offerSdp);
106 | }
107 |
108 | // Return the constraints specified for |type| of "audio" or "video" in
109 | // |mediaConstraintsString|.
110 | private String getAVConstraints(String type, String mediaConstraintsString)
111 | throws JSONException {
112 | return "{\"mandatory\": { maxWidth: 1280, maxHeight: 720, minWidth: 640, minHeight: 480}, \"optional\": []}";
113 | // return
114 | // "{\"optional\": [{\"minWidth\": \"1280\", \"minHeight\": \"720\"}], \"mandatory\": {}}";
115 | }
116 |
117 | private String getAVVConstraints(String type, String mediaConstraintsString)
118 | throws JSONException {
119 | return "{\"mandatory\": {}, \"optional\": []}";
120 | }
121 |
122 | private MediaConstraints constraintsFromJSON(String jsonString)
123 | throws JSONException {
124 | if (jsonString == null) {
125 | return null;
126 | }
127 | MediaConstraints constraints = new MediaConstraints();
128 | JSONObject json = new JSONObject(jsonString);
129 | JSONObject mandatoryJSON = json.optJSONObject("mandatory");
130 | if (mandatoryJSON != null) {
131 | JSONArray mandatoryKeys = mandatoryJSON.names();
132 | if (mandatoryKeys != null) {
133 | for (int i = 0; i < mandatoryKeys.length(); ++i) {
134 | String key = mandatoryKeys.getString(i);
135 | String value = mandatoryJSON.getString(key);
136 | constraints.mandatory
137 | .add(new MediaConstraints.KeyValuePair(key, value));
138 | }
139 | }
140 | }
141 | JSONArray optionalJSON = json.optJSONArray("optional");
142 | if (optionalJSON != null) {
143 | for (int i = 0; i < optionalJSON.length(); ++i) {
144 | JSONObject keyValueDict = optionalJSON.getJSONObject(i);
145 | String key = keyValueDict.names().getString(0);
146 | String value = keyValueDict.getString(key);
147 | constraints.optional.add(new MediaConstraints.KeyValuePair(key,
148 | value));
149 | }
150 | }
151 | return constraints;
152 | }
153 |
154 | // Return the list of ICE servers described by a WebRTCPeerConnection
155 | // configuration string.
156 | private LinkedList iceServersFromPCConfigJSON(
157 | String pcConfig) throws JSONException {
158 | LinkedList ret = new LinkedList();
159 | ret.add(new
160 | PeerConnection.IceServer("stun:stun.services.mozilla.com"));
161 |
162 | ret.add(new
163 | PeerConnection.IceServer("stun:stun.l.google.com:19302"));
164 | //
165 | ret.add(new
166 | PeerConnection.IceServer("turn:turn.bistri.com:80", "homeo", "homeo"));
167 | //
168 | ret.add(new
169 | PeerConnection.IceServer("turn:turn.anyfirewall.com:443?transport=tcp", "webrtc", "webrtc"));
170 | return ret;
171 | }
172 |
173 | }
174 |
--------------------------------------------------------------------------------
/sender/src/com/infthink/demo/webrtc/WebrtcChannel.java:
--------------------------------------------------------------------------------
1 | package com.infthink.demo.webrtc;
2 |
3 | import org.json.JSONException;
4 | import org.json.JSONObject;
5 | import org.webrtc.IceCandidate;
6 | import org.webrtc.SessionDescription;
7 |
8 | import tv.matchstick.flint.Flint;
9 | import tv.matchstick.flint.FlintDevice;
10 | import tv.matchstick.flint.FlintManager;
11 | import tv.matchstick.flint.ResultCallback;
12 | import tv.matchstick.flint.Status;
13 | import android.util.Log;
14 |
15 | public abstract class WebrtcChannel implements Flint.MessageReceivedCallback {
16 | private static final String TAG = WebrtcChannel.class.getSimpleName();
17 |
18 | private static final String WEBRTC_NAMESPACE = "urn:flint:com.infthink.demo.webrtc";
19 |
20 | protected WebrtcChannel() {
21 | }
22 |
23 | /**
24 | * Returns the namespace for this fling channel.
25 | */
26 | public String getNamespace() {
27 | return WEBRTC_NAMESPACE;
28 | }
29 |
30 | @Override
31 | public void onMessageReceived(FlintDevice flingDevice, String namespace,
32 | String message) {
33 | Log.d(TAG, "onTextMessageReceived: " + message);
34 | }
35 |
36 | private final class SendMessageResultCallback implements
37 | ResultCallback {
38 | String mMessage;
39 |
40 | SendMessageResultCallback(String message) {
41 | mMessage = message;
42 | }
43 |
44 | @Override
45 | public void onResult(Status result) {
46 | if (!result.isSuccess()) {
47 | Log.d(TAG,
48 | "Failed to send message. statusCode: "
49 | + result.getStatusCode() + " message: "
50 | + mMessage);
51 | }
52 | }
53 | }
54 |
55 | /**
56 | * Send local SDP (offer or answer, depending on role) to the other
57 | * participant. Note that it is important to send the output of
58 | * create{Offer,Answer} and not merely the current value of
59 | * getLocalDescription() because the latter may include ICE candidates that
60 | * we might want to filter elsewhere.
61 | */
62 | public void sendOfferSdp(FlintManager apiClient,
63 | final SessionDescription sdp) {
64 | Log.e("flint_webrtc", "Offer[" + sdp.description + "]");
65 | JSONObject json = new JSONObject();
66 | jsonPut(json, "type", "offer");
67 | jsonPut(json, "sdp", sdp.description);
68 | sendMessage(apiClient, json.toString());
69 | }
70 |
71 | public void sendAnswerSdp(FlintManager apiClient,
72 | final SessionDescription sdp) {
73 | JSONObject json = new JSONObject();
74 | jsonPut(json, "type", "answer");
75 | jsonPut(json, "sdp", sdp.description);
76 | sendMessage(apiClient, json.toString());
77 | }
78 |
79 | /**
80 | * Send Ice candidate to the other participant.
81 | */
82 | public void sendLocalIceCandidate(FlintManager apiClient,
83 | final IceCandidate candidate) {
84 | Log.e("flint_webrtc", "sendLocalIceCandidate:sdpMLineIndex[" + candidate.sdpMLineIndex+ "]sdpMid[" + candidate.sdpMid + "]candidate[" +candidate.sdp +"]");
85 |
86 | JSONObject json = new JSONObject();
87 | jsonPut(json, "type", "candidate");
88 | jsonPut(json, "sdpMLineIndex", candidate.sdpMLineIndex);
89 | jsonPut(json, "sdpMid", candidate.sdpMid);
90 | jsonPut(json, "candidate", candidate.sdp);
91 | sendMessage(apiClient, json.toString());
92 | }
93 |
94 | public void sendSwitchView(FlintManager apiClient) {
95 | JSONObject json = new JSONObject();
96 | jsonPut(json, "type", "switchview");
97 | sendMessage(apiClient, json.toString());
98 | }
99 |
100 | public void sendHello(FlintManager apiClient) {
101 | JSONObject json = new JSONObject();
102 | jsonPut(json, "type", "hello");
103 | sendMessage(apiClient, json.toString());
104 | }
105 |
106 | public void sendBye(FlintManager apiClient) {
107 | JSONObject json = new JSONObject();
108 | jsonPut(json, "type", "bye");
109 | sendMessage(apiClient, json.toString());
110 | }
111 |
112 | private static void jsonPut(JSONObject json, String key, Object value) {
113 | try {
114 | json.put(key, value);
115 | } catch (JSONException e) {
116 | throw new RuntimeException(e);
117 | }
118 | }
119 |
120 | private final void sendMessage(FlintManager apiClient, String message) {
121 | Log.d(TAG, "Sending message: (ns=" + WEBRTC_NAMESPACE + ") " + message);
122 | Flint.FlintApi.sendMessage(apiClient, WEBRTC_NAMESPACE, message)
123 | .setResultCallback(new SendMessageResultCallback(message));
124 | }
125 | }
126 |
--------------------------------------------------------------------------------