├── .gradle ├── 5.4.1 │ ├── executionHistory │ │ ├── executionHistory.bin │ │ └── executionHistory.lock │ ├── fileChanges │ │ └── last-build.bin │ ├── fileContent │ │ └── fileContent.lock │ ├── fileHashes │ │ ├── fileHashes.bin │ │ ├── fileHashes.lock │ │ └── resourceHashesCache.bin │ ├── gc.properties │ └── javaCompile │ │ ├── classAnalysis.bin │ │ ├── javaCompile.lock │ │ └── taskHistory.bin ├── 5.6.4 │ ├── fileChanges │ │ └── last-build.bin │ ├── fileHashes │ │ ├── fileHashes.bin │ │ └── fileHashes.lock │ └── gc.properties ├── buildOutputCleanup │ ├── buildOutputCleanup.lock │ └── cache.properties └── vcs-1 │ └── gc.properties ├── README.md ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ ├── java │ └── com │ │ └── example │ │ └── webrtc │ │ ├── Audio.java │ │ ├── MainActivity.java │ │ ├── NTSTokenAsyncTask.java │ │ ├── NetworkCallback.java │ │ ├── models │ │ ├── DataModel.java │ │ ├── Example.java │ │ ├── IceCandidateModel.java │ │ └── IceServer.java │ │ └── webRTCModules │ │ └── WebRTCConnection.java │ └── res │ ├── drawable-v24 │ └── ic_launcher_foreground.xml │ ├── drawable │ └── ic_launcher_background.xml │ ├── layout │ └── activity_main.xml │ ├── mipmap-anydpi-v26 │ ├── ic_launcher.xml │ └── ic_launcher_round.xml │ ├── mipmap-hdpi │ ├── ic_launcher.png │ └── ic_launcher_round.png │ ├── mipmap-mdpi │ ├── ic_launcher.png │ └── ic_launcher_round.png │ ├── mipmap-xhdpi │ ├── ic_launcher.png │ └── ic_launcher_round.png │ ├── mipmap-xxhdpi │ ├── ic_launcher.png │ └── ic_launcher_round.png │ ├── mipmap-xxxhdpi │ ├── ic_launcher.png │ └── ic_launcher_round.png │ └── values │ ├── colors.xml │ ├── strings.xml │ └── styles.xml ├── build.gradle ├── data_exchange.gif ├── exchanging_content.png ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── local.properties ├── port_socket_connection.gif ├── settings.gradle └── steve_bill.PNG /.gradle/5.4.1/executionHistory/executionHistory.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/executionHistory/executionHistory.bin -------------------------------------------------------------------------------- /.gradle/5.4.1/executionHistory/executionHistory.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/executionHistory/executionHistory.lock -------------------------------------------------------------------------------- /.gradle/5.4.1/fileChanges/last-build.bin: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gradle/5.4.1/fileContent/fileContent.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/fileContent/fileContent.lock -------------------------------------------------------------------------------- /.gradle/5.4.1/fileHashes/fileHashes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/fileHashes/fileHashes.bin -------------------------------------------------------------------------------- /.gradle/5.4.1/fileHashes/fileHashes.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/fileHashes/fileHashes.lock -------------------------------------------------------------------------------- /.gradle/5.4.1/fileHashes/resourceHashesCache.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/fileHashes/resourceHashesCache.bin -------------------------------------------------------------------------------- /.gradle/5.4.1/gc.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/gc.properties -------------------------------------------------------------------------------- /.gradle/5.4.1/javaCompile/classAnalysis.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/javaCompile/classAnalysis.bin -------------------------------------------------------------------------------- /.gradle/5.4.1/javaCompile/javaCompile.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/javaCompile/javaCompile.lock -------------------------------------------------------------------------------- /.gradle/5.4.1/javaCompile/taskHistory.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.4.1/javaCompile/taskHistory.bin -------------------------------------------------------------------------------- /.gradle/5.6.4/fileChanges/last-build.bin: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gradle/5.6.4/fileHashes/fileHashes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.6.4/fileHashes/fileHashes.bin -------------------------------------------------------------------------------- /.gradle/5.6.4/fileHashes/fileHashes.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.6.4/fileHashes/fileHashes.lock -------------------------------------------------------------------------------- /.gradle/5.6.4/gc.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/5.6.4/gc.properties -------------------------------------------------------------------------------- /.gradle/buildOutputCleanup/buildOutputCleanup.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/buildOutputCleanup/buildOutputCleanup.lock -------------------------------------------------------------------------------- /.gradle/buildOutputCleanup/cache.properties: -------------------------------------------------------------------------------- 1 | #Wed Apr 08 16:40:51 PKT 2020 2 | gradle.version=5.6.4 3 | -------------------------------------------------------------------------------- /.gradle/vcs-1/gc.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/d2a41f4ff929365bc3a28dd001594bb29908efdf/.gradle/vcs-1/gc.properties -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # "WebRTC Native Stack is considered as a signal chain as close to the Hardware Abstraction Layer (HAL) as possible." 2 | 3 | ----- 4 | 5 | **Getting Started** 6 | 7 | ------ 8 | 9 | This repository involves a **Step by Step Gide to perform WebRTC STUN, TURN Communication in the real world**. 10 | This Guide is based on the referenced article which is basically a complete understanding of [WebRTC in the real world: STUN, TURN and signaling](https://www.html5rocks.com/en/tutorials/webrtc/infrastructure/) for Web Browsers. 11 | 12 | If you want to do communication using WebRTC in *Android*, then Good luck guys! There is no need to beating around the bush (Redundant Information About WebRTC on Internet (Mostly For Web Browsers)). Just calm down and read this article. You will end up with a realtime communication over TURN and STUN using WebRTC. 13 | 14 | For *Compilation and Building the WebRTC Library for Android*, you should have to look into this refernce: 15 | 16 | - [Compile_WebRTC_Library_For_Android](https://github.com/mail2chromium/Compile_WebRTC_Library_For_Android) 17 | 18 | 19 | For *real-time Communication and AudioProcessing* in Android, I will recommend you to must visit these refernces: 20 | 21 | - [Android-Audio-Processing-Using-WebRTC](https://github.com/mail2chromium/Android-Audio-Processing-Using-WebRTC) 22 | - [Android-Native-Development-For-WebRTC](https://github.com/mail2chromium/Android-Native-Development-For-WebRTC) 23 | 24 | 25 | ---- 26 | 27 | ### Content of this Document 28 | 29 | ----- 30 | 31 | - [Quick Introduction](#quick-Introduction) 32 | 33 | - [Signaling](#signaling) 34 | 35 | - [STUN vs TURN](#stun-vs-TURN) 36 | 37 | - [Peer to Peer Communication](#peer-to-Peer-Communication) 38 | 39 | - [Exchange Media Configuration Information](#exchange-Media-Configuration-Information) 40 | - [Exchange Network Configuration Information](#exchange-Network-Configuration-Information) 41 | - [Communication Via DataChannel](#communication-Via-DataChannel) 42 | 43 | - [Conclusion](#Conclusion) 44 | 45 | ---- 46 | 47 | ### [Quick Introduction](#quick-Introduction) 48 | 49 | ----- 50 | 51 | WebRTC applications need to do several things like: 52 | 53 | - Get *streaming* audio, video or other data. 54 | - Get *network information* such as IP addresses and ports, and exchange this with other WebRTC clients (known as peers) to enable connection, even through NATs and firewalls. 55 | - Coordinate *signaling communication* to report errors and initiate or close sessions. 56 | - *Exchange information* about media and client capability, such as resolution and codecs. 57 | - *Communicate streaming* audio, video or data. 58 | 59 | WebRTC implemented open standards for real-time, plugin-free video, audio and data communication. 60 | WebRTC is used in various apps like *WhatsApp, Facebook Messenger, appear.in* and platforms such as *TokBox*. 61 | To acquire and communicate streaming data, WebRTC implements three APIs: 62 | 63 | - MediaStream 64 | - PeerConnection 65 | - DataChannel 66 | 67 | All three APIs are supported on `mobile` and `desktop` by *Chrome, Safari, Firefox, Edge and Opera*. 68 | You can get the complete documentation and details of these three APIs in these references: 69 | 70 | - [WebRTC 1.0: Real-time Communication Between Browsers](https://w3c.github.io/webrtc-pc/) 71 | - [Media Capture and Streams](https://www.w3.org/TR/mediacapture-streams/#intro) 72 | - [WebRTC APIs](https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API) 73 | 74 | All of the above three documentations belong to Web-browsers only. But here, we will discuss for Android. 75 | 76 | ---- 77 | 78 | ### [Signaling](#signaling) 79 | 80 | ----- 81 | 82 | The most important thing to remember is that "**Signaling is not the part of WebRTC! Why?** 83 | Because of these few reasons such as; 84 | 85 | - To avoid redundancy, and 86 | - To maximize compatibility with established technologies. 87 | 88 | The exchange of information via signaling must have completed successfully before **peer-to-peer streaming** can begin. Signaling is used to exchange three types of information: 89 | 90 | - *Session control messages:* to initialize or close communication and report errors. 91 | - *Network configuration:* to the outside world, what's my computer's IP address and port? 92 | - *Media capabilities:* what codecs and resolutions can be handled by my browser and the browser it wants to communicate with? 93 | 94 | Signaling methods and protocols are not specified by WebRTC standards. On the other hand, Signaling is the process of coordinating communication. In order for a android application to; 95 | 96 | - Set up a 'call' {Voice, Video}, 97 | 98 | - Share media files i.e. (Voice Notes, Documents etc) 99 | 100 | Android clients (related terminologies {terminals, nodes, members}) need to exchange information such as: 101 | 102 | **1.** Session control messages used to open or close communication. 103 | 104 | SDP interface describes one end of a connection—or potential connection—and how it's configured. It involves a JSON object with multiple two values (sdp, type) 105 | 106 | ``` 107 | { 108 | sdp : "v=0 o=- 3709108758280432862 2 IN IP4 127.0.0.1 s=- t=0 0 a=msid-semantic: WMS m=application 9 DTLS/SCTP 5000 c=IN IP4 0.0.0.0 a=ice-ufrag:/MM7lfHOlMNfSMRk a=ice-pwd:EogZo3Zihb1g0XWgYFKHpeTk a=fingerprint:sha-256 BE:69:CE:D2:D6:41:41:DB:93:3E:3C:F5:D5:3D:D2:5A:33:8A:B0:A6:47:08:AE:24:A0:F6:FE:8F:39:65:21:CE a=setup:actpass a=mid:data a=sctpmap:5000 webrtc-datachannel 1024 ", 109 | 110 | type : "offer" 111 | } 112 | 113 | ``` 114 | You can get more information about [SDP (Session Description Protocol)](https://developer.mozilla.org/en-US/docs/Glossary/SDP). 115 | 116 | The property `SessionDescription.type` is a read-only value of type `SdpType` which describes the description's type. The possible values are defined by an enum of type `SdpType` are as follows. 117 | 118 | ``` 119 | "offer", the description is the initial proposal in an offer/answer exchange. 120 | "answer", the description is the definitive choice in an offer/answer exchange. 121 | "pranswer", the description is a provisional answer and may be changed when the definitive choice will be given. 122 | "rollback", the description rolls back to offer/answer state to the last stable state. 123 | ``` 124 | 125 | Here is very specific and detailed intuition about [SdpType (Session Description Type)](https://developer.mozilla.org/en-US/docs/Web/API/RTCSessionDescription/type). 126 | 127 | ----- 128 | 129 | **2.** Error messages and Callbacks. 130 | 131 | **3.** Media metadata such as codecs and codec settings, bandwidth and media types. 132 | 133 | **4.** Key data, used to establish secure connections. 134 | 135 | **5.** Share ICECandidates with each other which is Network data, such as a host's IP address and port as seen by the outside world. 136 | 137 | It involves a JSON object with 4 values (candidate, sdpMid, sdpMLineIndex, type): 138 | 139 | ``` 140 | { 141 | candidate:"candidate:5720275078 1 udp 8837102613 9201:398:am9u:14uf:2934:r39a:h753:z43i 38842 typ host generation 3 ufrag uEJl network-id 3 network-cost 82", 142 | sdpMid:"audio", 143 | sdpMLineIndex:2, 144 | type:"candidate" 145 | } 146 | 147 | ``` 148 | 149 | IceCandidate Interface represents a candidate [ICE (Internet Connectivity Establishment)](https://developer.mozilla.org/en-US/docs/Glossary/ICE) configuration which may be used to establish an PeerConnection between Android Phones. 150 | 151 | This signaling process needs a way for clients to pass messages back and forth. That mechanism is not implemented by the WebRTC APIs: You need to build it yourself. I will describe below some ways to build a signaling service. 152 | However, a little context is, you can follow these references to implement signaling: 153 | 154 | - [Google Talk Call Signaling](https://developers.google.com/talk/call_signaling?csw=1) 155 | - [Process Signaling using Nats.io](https://docs.nats.io/nats-streaming-server/process-signaling) 156 | - [Cloud Functions of Firebase for Signaling](https://firebase.google.com/docs/functions) 157 | 158 | ---- 159 | 160 | ### [STUN vs TURN](#stun-vs-TURN) 161 | 162 | ----- 163 | 164 | [STUN (Session Traversal Utilities for NAT)](https://tools.ietf.org/html/rfc5389) is a standardized set of methods, including a network protocol, for traversal of network address translator gateways in applications of real-time voice, video, messaging, and other interactive communications. 165 | [TURN (Traversal Using Relay NAT)](https://tools.ietf.org/html/rfc5766) is a protocol that allows a client to obtain IP addresses and ports from such a relay. 166 | 167 | While Communicating with Android Peers, there are mostly two alternate data paths are shown, although only one data pathway will be active in a connection. 168 | This is because the *Data (`Audio, Video, Messages, Fax` etc) Pathway* can be 169 | 170 | - Either a direct connection (**92%** of connection attempts can take place directly) or 171 | - Through a relay server (**8%** of connection attempts require an intermediary relay server). 172 | 173 | A third data pathway, not shown, is a direct connection from computer to computer when there is no intermediary firewall. 174 | The following diagram shows these two pathways. 175 | 176 | ![Data Exchange Between Peers](https://github.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/blob/master/data_exchange.gif) 177 | 178 | Now obviously, while doing such a stuff, it turns out that you will be confused with a question such as [STUN or TURN which one to refer and Why?](https://www.webrtc-experiment.com/docs/STUN-or-TURN.html) 179 | You can use open source as well as paid STUN & TURN Services such as: 180 | 181 | - [CoTURN Open Source TURN Server Project](https://github.com/coturn/coturn) 182 | - [STUNTMAN Open Source STUN Server Project](https://github.com/jselbie/stunserver) 183 | - [Twilio Network Traversal Service](https://www.twilio.com/docs/stun-turn) 184 | 185 | **Request and Response Architecture of STUN/TURN Services:** 186 | 187 | A REST API to Access the TURN Services basically involves the following steps such as: 188 | 189 | **Request:** 190 | 191 | The request includes the following parameters, specified in the URL: 192 | 193 | - service: specifies the desired service (turn) 194 | - username: an optional user id to be associated with the 195 | - credentials 196 | - key: if an API key is used for authentication, the API key 197 | 198 | **Example:** 199 | ``` 200 | GET /?service=turn&username=mbzrxpgjys 201 | ``` 202 | 203 | **Response:** 204 | 205 | The response is returned with content-type "application/json", and consists of a JSON object with the following parameters: 206 | 207 | - username 208 | - password 209 | - ttl 210 | - uris 211 | 212 | **Example:** 213 | 214 | ``` 215 | { 216 | "username" : "12334939:mbzrxpgjys", 217 | "password" : "adfsaflsjfldssia", 218 | "ttl" : 86400, 219 | "uris" : [ 220 | "turn:1.2.3.4:9991?transport=udp", 221 | "turn:1.2.3.4:9992?transport=tcp", 222 | "turns:1.2.3.4:443?transport=tcp" 223 | ] 224 | } 225 | ``` 226 | 227 | **WebRTC Interactions:** 228 | 229 | The returned JSON is parsed into an `IceServer` object, and supplied as part to use when creating a `PeerConnection` as follows: 230 | 231 | ``` 232 | List iceServers = new LinkedList<>(); 233 | for (int i = 0; i < example.iceServers.size(); i++) { 234 | if (!example.iceServers.get(i).username.isEmpty()) 235 | iceServers.add(new PeerConnection.IceServer(example.iceServers.get(i).url, example.iceServers.get(i).username, example.iceServers.get(i).credential)); 236 | else 237 | iceServers.add(new PeerConnection.IceServer(example.iceServers.get(i).url)); 238 | } 239 | 240 | constraints = new MediaConstraints(); 241 | peerConnection = peerConnectionFactory.createPeerConnection(iceServers, constraints, peerConnectionObserver); 242 | ``` 243 | 244 | You can get the more details about the in draft i.e [A REST API For Access To TURN Services draft-uberti-behave-turn-rest-00](https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00). In general, WebRTC needs servers to fulfill four types of server-side functionality such as: 245 | 246 | - User discovery and communication 247 | - Signaling 248 | - NAT/firewall traversal 249 | - Relay servers in case peer-to-peer communication fails 250 | 251 | ---- 252 | 253 | ### [Peer to Peer Communication](#peer-to-Peer-Communication) 254 | 255 | ----- 256 | 257 | Here we will discuss the complete workaround to establish Peer-to-Peer communication between two Android Phones/Terminal/Nodes which basiclly depend on these modules: 258 | 259 | - [Exchange Media Configuration Information](#exchange-Media-Configuration-Information) 260 | - [Exchange Network Configuration Information](#exchange-Network-Configuration-Information) 261 | - [Communication Via DataChannel](#communication-Via-DataChannel) 262 | 263 | I will start explaining the following Process in very specific detail. So first look at the following entire exchange of information between Two Peers: 264 | 265 | ![Entire Information Exchange Block Diagram](https://github.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/blob/master/exchanging_content.png) 266 | 267 | **Basic Terminologies:** 268 | 269 | - Peer-A <--> `Caller` <--> Steve Jobs (Steve) The Person who initiate PeerConnection 270 | - Peer-A <--> `Callee` <--> Bill Gates (Bill) The Person who accepts PeerConnection 271 | 272 | **PeerConnection** is the API used by the WebRTC Android Application to create connection between peers and communicate `audio` and `video`. 273 | PeerConnection has two tasks to initialize the Process such as: 274 | 275 | - Ascertain (make sure) the local media conditions such as *bandwidth*, **media types**, **resolution** & **codec capabilities** (`opus, speex` etc). 276 | - Start Gathering the list of potential network addresses for the application's host, known as `IceCandidates`. 277 | 278 | This metadata is used for the offer and answer mechanism. Once this local data has been ascertained (Gathered), it must be exchanged via a [signaling mechanism](https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Session_lifetime#Signaling) with the remote peer. 279 | 280 | ----- 281 | 282 | ### [Exchange Media Configuration Information](#exchange-Media-Configuration-Information) 283 | 284 | ----- 285 | 286 | Now I will discuss complete Offer/Answer mechanism with all its glory details: 287 | 288 | 289 | **1.** Steve creates a `PeerConnection` Object. 290 | 291 | ``` 292 | peerConnection = peerConnectionFactory.createPeerConnection(iceServers, constraints, peerConnectionObserver); 293 | 294 | ``` 295 | 296 | **2.** Steve creates an **offer** ([an SDP session description](https://developer.mozilla.org/en-US/docs/Web/API/RTCSessionDescription/RTCSessionDescription)) with the PeerConnection `createOffer()` method. 297 | 298 | ``` 299 | peerConnection.createOffer(sdpObserver, constraints); 300 | 301 | ``` 302 | 303 | **3.** Steve calls `setLocalDescription()` with his offer. 304 | 305 | ``` 306 | peerConnection.setLocalDescription(sdpObserver, sessionDescription); 307 | 308 | ``` 309 | 310 | **4.** Steve makes a json object for ([jsonify](https://developer.android.com/reference/org/json/JSONObject)) the offer and uses a signaling mechanism to send it to Bill. 311 | 312 | 313 | - [Google Talk Call Signaling](https://developers.google.com/talk/call_signaling?csw=1) 314 | - [Process Signaling using Nats.io](https://docs.nats.io/nats-streaming-server/process-signaling) 315 | - [Cloud Functions of Firebase for Signaling](https://firebase.google.com/docs/functions) 316 | 317 | 318 | 319 | **5.** Bill calls `setRemoteDescription()` with Steve's offer, so that his *PeerConnection* knows about Steve's setup for audio or video communication. 320 | 321 | ``` 322 | peerConnection.setRemoteDescription(sdpObserver, sdp2); 323 | ``` 324 | 325 | **6.** Bill then calls the `createAnswer()` method, and the success callback for this is passed a local session description: Bill's answer as follows 326 | 327 | ``` 328 | peerConnection.createAnswer(sdpObserver, constraints); 329 | ``` 330 | 331 | **7.** Bill sets her answer as the local description by calling `setLocalDescription()`. 332 | 333 | ``` 334 | peerConnection.setLocalDescription(sdpObserver, sessionDescription); 335 | ``` 336 | 337 | **8.** Bill then uses the signaling mechanism to send his json object of the (jsonify) answer back to Steve. 338 | 339 | ``` 340 | { 341 | sdp : "v=0 o=- 3709108758280432862 2 IN IP4 127.0.0.1 s=- t=0 0 a=msid-semantic: WMS m=application 9 DTLS/SCTP 5000 c=IN IP4 0.0.0.0 a=ice-ufrag:/MM7lfHOlMNfSMRk a=ice-pwd:EogZo3Zihb1g0XWgYFKHpeTk a=fingerprint:sha-256 BE:69:CE:D2:D6:41:41:DB:93:3E:3C:F5:D5:3D:D2:5A:33:8A:B0:A6:47:08:AE:24:A0:F6:FE:8F:39:65:21:CE a=setup:actpass a=mid:data a=sctpmap:5000 webrtc-datachannel 1024 ", 342 | 343 | type : "answer" 344 | } 345 | 346 | ``` 347 | 348 | **9.** Steve then sets Bill's answer as the remote session description using `setRemoteDescription()`. 349 | 350 | ``` 351 | peerConnection.setRemoteDescription(sdpObserver, sdp2); 352 | 353 | ``` 354 | 355 | ----- 356 | 357 | ### [Exchange Network Configuration Information](#exchange-Network-Configuration-Information) 358 | 359 | ----- 360 | 361 | In the portion, Each of pair start 'Gathering or finding candidates' which refers to the process of finding *network interfaces* and *ports* using the [Ice FrameWork](https://www.html5rocks.com/en/tutorials/webrtc/basics/#ice). 362 | 363 | **1.** Steve creates an `PeerConnection` object with an `onicecandidate(IceCandidate iceCandidate)` handler. 364 | 365 | ``` 366 | @Override 367 | public void onIceCandidate(IceCandidate iceCandidate) { 368 | if (username.equals("Steve")) { 369 | IceCandidateModel iceCandidateModel = new IceCandidateModel("candidate", iceCandidate.sdpMLineIndex, iceCandidate.sdpMid, iceCandidate.sdp); 370 | Log.e(TAG, "STEVE's ICE CANDIDATE: " + iceCandidateModel.candidate + " " + iceCandidateModel.sdpMid + " " + iceCandidateModel.type + " " + iceCandidateModel.sdpMLineIndex); 371 | // Do some signaling Stuff 372 | } else if (username.equals("Bill")) { 373 | IceCandidateModel iceCandidateModel = new IceCandidateModel("candidate", iceCandidate.sdpMLineIndex, iceCandidate.sdpMid, iceCandidate.sdp); 374 | Log.e(TAG, "BILL's ICE CANDIDATE: " + iceCandidateModel.candidate + " " + iceCandidateModel.sdpMid + " " + iceCandidateModel.type + " " + iceCandidateModel.sdpMLineIndex); 375 | // Do some signaling Stuff 376 | } 377 | 378 | ``` 379 | 380 | **2.** The handler is run when network candidates become available. 381 | 382 | **3.** Steve sends *serialized candidate data* (json object) to Bill, via whatever signaling channel they are using: TCPSocket, Firebase or some other mechanism. 383 | 384 | **4.** When Bill gets a candidate message from Steve, he calls `addIceCandidate()` method, to add the candidate to the remote peer description. 385 | 386 | ``` 387 | IceCandidate iceCandidate = new IceCandidate(iceCandidateModel.sdpMid, iceCandidateModel.sdpMLineIndex, iceCandidateModel.candidate); 388 | peerConnection.addIceCandidate(iceCandidate); 389 | ``` 390 | 391 | #### Summary 392 | 393 | To sum up the above discussion, We have a **Android Client** also known as *Steve* first create an offer using `PeerConnection createOffer()` method. The return from this is passed an SessionDescription: Steve's local session description. 394 | In the callback, Steve sets the *local description* using `setLocalDescription()` and then sends this session description to Bill via their signaling channel. Note that `PeerConnection` won't start gathering candidates until `setLocalDescription()` is called. 395 | Bill sets the description which Steve sent him as the *remote description* using `setRemoteDescription()`. 396 | Bill runs the `PeerConnection createAnswer()` method, passing it the remote description he got from Steve, so a local session can be generated that is compatible with him. The `createAnswer()` callback is passed an `SessionDescription`: Bill sets that as the *local description* and sends it to Steve. 397 | When Steve gets Bill's session description, he sets that as the *remote description* with setRemoteDescription. Its a very simple and pretty straight forward discussion: Bravo!!! 398 | 399 | ![Steve and Bill Communication](https://github.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/blob/master/steve_bill.PNG) 400 | 401 | ---- 402 | 403 | ### [Communication Via DataChannel](#communication-Via-DataChannel) 404 | 405 | ----- 406 | 407 | Once the `PeerConnection` is established between the *Peer-A* and *Peer-B*, they both can use DataChannel API to share Data between each other. The DataChannel API enables peer-to-peer exchange of arbitrary data (audio, video, etc), with low latency and high throughput. 408 | Steve sends its useful information in the form of bytes using DataChannel `send(new DataChannel.Buffer(buffer, false))`. 409 | 410 | ``` 411 | new Thread(new Runnable() { 412 | @Override 413 | public void run() { 414 | while (true) { 415 | ByteBuffer buffer = ByteBuffer.wrap(audio.getRecordedData()); 416 | dataChannel.send(new DataChannel.Buffer(buffer, false)); 417 | } 418 | } 419 | }).start(); 420 | ``` 421 | 422 | On the other side, Bill receives, steve information using DataChannel `buffer.data.get(data)` method. 423 | 424 | ``` 425 | @Override 426 | public void onMessage(final DataChannel.Buffer buffer) { 427 | try { 428 | 429 | if (!buffer.binary) { 430 | int limit = buffer.data.limit(); 431 | byte[] data = new byte[limit]; 432 | buffer.data.get(data); 433 | byteQueue.add(data); 434 | 435 | } else { 436 | Log.e(TAG, "Data is not received."); 437 | 438 | } 439 | } catch (Exception e) { 440 | e.printStackTrace(); 441 | Log.e(TAG, "ERROR: " + e.toString()); 442 | } 443 | } 444 | ``` 445 | 446 | Communication occurs directly between Android Phones. DataChannel communication can be much faster than Simple Socket communication even if a relay (TURN) server is required when 'hole punching' to cope with firewalls and NATs fails. 447 | 448 | There are many potential use cases for the API, including: 449 | 450 | - Gaming 451 | - Remote desktop applications 452 | - Real-time text chat 453 | - File transfer 454 | - Decentralized networks 455 | 456 | The API has several features to make the most of RTCPeerConnection and enable powerful and flexible peer-to-peer communication: 457 | 458 | - Leveraging of RTCPeerConnection session setup. 459 | - Multiple simultaneous channels, with prioritization. 460 | - Reliable and unreliable delivery semantics. 461 | - Built-in security (DTLS) and congestion control. 462 | - Ability to use with or without audio or video. 463 | 464 | Each `P2P TransportChannel` represents a data channel between the local and remote Android Terminals. This channel actually obscures a complex system designed for robustness and performance. P2P TransportChannel manages a number of different Connection objects, 465 | each of which is specialized for a different connection type (UDP, TCP, etc). A Connection object actually wraps a pair of objects: 466 | 467 | - A Port subclass, representing the local connection; and 468 | - An address representing the remote connection. 469 | 470 | If a particular connection fails, P2P TransportChannel will seamlessly switch to the next best connection. The following diagram shows a high level view of the data pathway inside the Peer to Peer Component. 471 | 472 | ![P2P Transport Channel](https://github.com/mail2chromium/Android_Realtime_Communication_Using_WebRTC/blob/master/port_socket_connection.gif) 473 | 474 | ----- 475 | 476 | ### [Conclusion](#Conclusion) 477 | 478 | ----- 479 | 480 | The WebRTC APIs and standards can very useful and optimized tools for content creation and communication—for real-time audio, gaming, video production, music making, news gathering and many other applications. 481 | I have tried my best to give you guys the more appropriate and detailed information with the simple implementation. All you need is to create and Android Application and inside the 482 | application `build.gradle` file just includes the following dependency: 483 | 484 | ``` 485 | implementation 'io.pristine:libjingle:11139@aar' 486 | 487 | ``` 488 | 489 | As [libjingle (Build on the top of WebRTC Native Stack)](https://chromium.googlesource.com/external/webrtc/stable/talk/+/3798b4190c4a196ed1f38f68492a1e51072d8024/libjingle.gyp) has implemented all of the *three* above mentioned WebRTC APIs in a very detailed and specific way to follow the standards of WebRTC. To get the detailed intuition about 490 | libjingle you can visit [Google Talk for Developers](https://developers.google.com/talk/libjingle/developer_guide). Good Luck Guys!!! 491 | -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | # Built application files 2 | *.apk 3 | *.ap_ 4 | *.aab 5 | # Files for the ART/Dalvik VM 6 | *.dex 7 | # Java class files 8 | *.class 9 | # Generated files 10 | bin/ 11 | gen/ 12 | out/ 13 | # Uncomment the following line in case you need and you don't have the release build type files in your app 14 | # release/ 15 | # Gradle files 16 | .gradle/ 17 | build/ 18 | # Local configuration file (sdk path, etc) 19 | local.properties 20 | # Proguard folder generated by Eclipse 21 | proguard/ 22 | # Log Files 23 | *.log 24 | # Android Studio Navigation editor temp files 25 | .navigation/ 26 | # Android Studio captures folder 27 | captures/ 28 | # IntelliJ 29 | *.iml 30 | .idea/workspace.xml 31 | .idea/tasks.xml 32 | .idea/gradle.xml 33 | .idea/assetWizardSettings.xml 34 | .idea/dictionaries 35 | .idea/libraries 36 | # Android Studio 3 in .gitignore file. 37 | .idea/caches 38 | .idea/modules.xml 39 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you 40 | .idea/navEditor.xml 41 | # Keystore files 42 | # Uncomment the following lines if you do not want to check your keystore files in. 43 | #*.jks 44 | #*.keystore 45 | # External native build folder generated in Android Studio 2.2 and later 46 | .externalNativeBuild 47 | app/.cxx/ 48 | # Google Services (e.g. APIs or Firebase) 49 | # google-services.json 50 | # Freeline 51 | freeline.py 52 | freeline/ 53 | freeline_project_description.json 54 | # fastlane 55 | fastlane/report.xml 56 | fastlane/Preview.html 57 | fastlane/screenshots 58 | fastlane/test_output 59 | fastlane/readme.md 60 | # Version control 61 | vcs.xml 62 | # lint 63 | lint/intermediates/ 64 | lint/generated/ 65 | lint/outputs/ 66 | lint/tmp/ 67 | # lint/reports/ 68 | Collapse 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | apply plugin: 'com.google.gms.google-services' 3 | 4 | android { 5 | compileSdkVersion 29 6 | buildToolsVersion "29.0.2" 7 | defaultConfig { 8 | applicationId "com.example.webrtc" 9 | minSdkVersion 21 10 | targetSdkVersion 29 11 | versionCode 1 12 | versionName "1.0" 13 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" 14 | } 15 | // buildTypes { 16 | // release { 17 | // minifyEnabled false 18 | // proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' 19 | // } 20 | // } 21 | 22 | buildTypes { 23 | release { 24 | minifyEnabled false 25 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 26 | } 27 | } 28 | } 29 | 30 | repositories { 31 | mavenCentral() 32 | } 33 | 34 | dependencies { 35 | implementation fileTree(dir: 'libs', include: ['*.jar']) 36 | implementation 'androidx.appcompat:appcompat:1.1.0' 37 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3' 38 | implementation 'io.pristine:libjingle:11139@aar' 39 | implementation 'com.google.code.gson:gson:2.8.6' 40 | implementation 'com.android.volley:volley:1.1.1' 41 | 42 | implementation 'com.google.firebase:firebase-database:19.2.1' 43 | testImplementation 'junit:junit:4.12' 44 | androidTestImplementation 'androidx.test.ext:junit:1.1.0' 45 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' 46 | 47 | } 48 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/Audio.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc; 2 | 3 | import android.annotation.TargetApi; 4 | import android.media.AudioAttributes; 5 | import android.media.AudioFormat; 6 | import android.media.AudioRecord; 7 | import android.media.AudioTrack; 8 | import android.media.MediaRecorder; 9 | import android.os.Build; 10 | import android.util.Log; 11 | 12 | public class Audio { 13 | 14 | private static final String TAG = "Audio"; 15 | 16 | public static final int SAMPLE_RATE = 16000; 17 | 18 | private AudioRecord audioRecord; 19 | private AudioTrack audioTrack; 20 | private int minBufferSizeInBytes; 21 | private byte[] byteData; 22 | 23 | @TargetApi(Build.VERSION_CODES.LOLLIPOP) 24 | public void inititalize(){ 25 | 26 | minBufferSizeInBytes = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); 27 | Log.e(TAG, "MIN BUFFER SIZE: " + minBufferSizeInBytes); 28 | // minBufferSizeInShort = minBufferSizeInBytes / 2; 29 | 30 | audioRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_COMMUNICATION, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSizeInBytes); 31 | 32 | audioTrack = new AudioTrack(new AudioAttributes.Builder() 33 | .setFlags(AudioAttributes.FLAG_AUDIBILITY_ENFORCED) 34 | .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) 35 | .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) 36 | .build() 37 | , new AudioFormat.Builder() 38 | .setChannelMask(AudioFormat.CHANNEL_OUT_MONO) 39 | .setSampleRate(SAMPLE_RATE) 40 | .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 41 | .build() 42 | , minBufferSizeInBytes, AudioTrack.MODE_STREAM, audioRecord.getAudioSessionId()); 43 | 44 | byteData = new byte[minBufferSizeInBytes]; 45 | } 46 | 47 | public void startRecording(){ 48 | audioRecord.startRecording(); 49 | } 50 | 51 | public byte[] getRecordedData(){ 52 | audioRecord.read(byteData,0, minBufferSizeInBytes); 53 | return byteData; 54 | } 55 | 56 | public void startPlaying(){ 57 | audioTrack.play(); 58 | } 59 | 60 | public void playBytes(byte[] byteData){ 61 | audioTrack.write(byteData,0,byteData.length); 62 | } 63 | } 64 | 65 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc; 2 | 3 | import androidx.appcompat.app.AppCompatActivity; 4 | 5 | import android.os.Bundle; 6 | import android.view.View; 7 | import android.widget.Button; 8 | import android.widget.EditText; 9 | 10 | import com.example.webrtc.webRTCModules.WebRTCConnection; 11 | 12 | public class MainActivity extends AppCompatActivity{ 13 | 14 | private static final String TAG = "MainActivity"; 15 | 16 | private Button btnSend, btnReceive, btnSendMessage; 17 | 18 | private WebRTCConnection connect; 19 | private EditText meetingId_et, username_et, callTo_et; 20 | 21 | 22 | @Override 23 | protected void onCreate(Bundle savedInstanceState) { 24 | super.onCreate(savedInstanceState); 25 | setContentView(R.layout.activity_main); 26 | 27 | btnSend = findViewById(R.id.btn_send); 28 | btnReceive = findViewById(R.id.btn_receive); 29 | btnSendMessage = findViewById(R.id.btn_send_message); 30 | meetingId_et = findViewById(R.id.meetingId); 31 | username_et = findViewById(R.id.username); 32 | callTo_et = findViewById(R.id.callingTo); 33 | 34 | } 35 | 36 | @Override 37 | protected void onStart() { 38 | super.onStart(); 39 | 40 | btnSend.setOnClickListener(new View.OnClickListener() { 41 | @Override 42 | public void onClick(View view) { 43 | if (!meetingId_et.getText().toString().equals("") && !username_et.getText().toString().equals("") && !callTo_et.getText().toString().equals("")) { 44 | connect = new WebRTCConnection(getApplicationContext(), meetingId_et.getText().toString(), username_et.getText().toString(), callTo_et.getText().toString(), true); 45 | connect.playAudio(); 46 | } 47 | } 48 | }); 49 | 50 | btnReceive.setOnClickListener(new View.OnClickListener() { 51 | @Override 52 | public void onClick(View view) { 53 | if (!meetingId_et.getText().toString().equals("") && !username_et.getText().toString().equals("") && !callTo_et.getText().toString().equals("")) { 54 | connect = new WebRTCConnection(getApplicationContext(), meetingId_et.getText().toString(), username_et.getText().toString(), callTo_et.getText().toString(), false); 55 | connect.playAudio(); 56 | } 57 | } 58 | }); 59 | 60 | btnSendMessage.setOnClickListener(new View.OnClickListener() { 61 | @Override 62 | public void onClick(View v) { 63 | 64 | connect.startSend(); 65 | } 66 | }); 67 | 68 | 69 | } 70 | 71 | 72 | } 73 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/NTSTokenAsyncTask.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc; 2 | 3 | import android.content.Context; 4 | import android.os.AsyncTask; 5 | import android.util.Log; 6 | 7 | import com.example.webrtc.models.Example; 8 | import com.example.webrtc.models.IceServer; 9 | 10 | import org.json.JSONArray; 11 | import org.json.JSONObject; 12 | 13 | import java.io.BufferedReader; 14 | import java.io.InputStreamReader; 15 | import java.net.HttpURLConnection; 16 | import java.net.URL; 17 | import java.util.ArrayList; 18 | import java.util.List; 19 | 20 | public class NTSTokenAsyncTask extends AsyncTask { 21 | private static final String TAG = "NTSTokenAsyncTask"; 22 | private StringBuilder result; 23 | private String command; 24 | private HttpURLConnection conn; 25 | private Example example = new Example(); 26 | private NetworkCallback networkCallback; 27 | 28 | //todo: This Class is use to fetch token from either of STUN/TURN Services 29 | // You are using... 30 | // Token Parameters: 31 | // - username 32 | // - password 33 | // - uris 34 | // Visit for Open Source STUN/TURN Server: https://gist.github.com/yetithefoot/7592580 35 | 36 | public NTSTokenAsyncTask(Context context, NetworkCallback networkCallback) { 37 | command = "Paste Your RestAPI to Fetch TURN Services Response"; 38 | this.networkCallback = networkCallback; 39 | } 40 | 41 | @Override 42 | protected Void doInBackground(Void... voids) { 43 | result = new StringBuilder(); 44 | URL url = null; 45 | try { 46 | url = new URL(command); 47 | 48 | conn = (HttpURLConnection) url.openConnection(); 49 | conn.setRequestMethod("GET"); 50 | BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); 51 | String line; 52 | while ((line = rd.readLine()) != null) { 53 | result.append(line); 54 | } 55 | rd.close(); 56 | 57 | JSONObject jObject = new JSONObject(result.toString()); 58 | 59 | 60 | JSONArray iceServers = jObject.getJSONArray("ice_servers"); 61 | List iceServerList = new ArrayList<>(); 62 | for (int i = 0; i < iceServers.length(); i++) { 63 | String ur = ""; 64 | String urls = ""; 65 | String username = ""; 66 | String credential = ""; 67 | 68 | if (iceServers.getJSONObject(i).has("url")) { 69 | ur = iceServers.getJSONObject(i).getString("url"); 70 | } 71 | 72 | if (iceServers.getJSONObject(i).has("urls")) { 73 | urls = iceServers.getJSONObject(i).getString("urls"); 74 | } 75 | 76 | if (iceServers.getJSONObject(i).has("username")) { 77 | username = iceServers.getJSONObject(i).getString("username"); 78 | } 79 | 80 | if (iceServers.getJSONObject(i).has("credential")) { 81 | credential = iceServers.getJSONObject(i).getString("credential"); 82 | } 83 | 84 | IceServer iceServer = new IceServer(ur, urls, username, credential); 85 | iceServerList.add(iceServer); 86 | } 87 | 88 | String username = jObject.getString("username"); 89 | String password = jObject.getString("password"); 90 | example = new Example(username, password, iceServerList); 91 | 92 | } catch (Exception e) { 93 | e.printStackTrace(); 94 | Log.e(TAG, "THAT DIDN'T work: " + e.toString()); 95 | } 96 | 97 | return null; 98 | } 99 | 100 | @Override 101 | protected void onPostExecute(Void aVoid) { 102 | super.onPostExecute(aVoid); 103 | networkCallback.InitializePeerConnection(example); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/NetworkCallback.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc; 2 | 3 | import com.example.webrtc.models.Example; 4 | 5 | public interface NetworkCallback { 6 | 7 | void InitializePeerConnection(Example example); 8 | } 9 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/models/DataModel.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc.models; 2 | 3 | public class DataModel { 4 | 5 | public String sdp; 6 | public String type; 7 | 8 | public DataModel() { 9 | } 10 | 11 | public DataModel(String sdp, String type) { 12 | this.sdp = sdp; 13 | this.type = type; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/models/Example.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc.models; 2 | 3 | import com.google.gson.annotations.Expose; 4 | import com.google.gson.annotations.SerializedName; 5 | 6 | import java.util.List; 7 | 8 | public class Example { 9 | 10 | public Example() { 11 | } 12 | 13 | public Example(String username, String password , List iceServers) { 14 | this.username = username; 15 | this.password = password; 16 | this.iceServers = iceServers; 17 | } 18 | 19 | @SerializedName("username") 20 | @Expose 21 | public String username; 22 | @SerializedName("password") 23 | @Expose 24 | public String password; 25 | @SerializedName("ice_servers") 26 | @Expose 27 | public List iceServers = null; 28 | } 29 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/models/IceCandidateModel.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc.models; 2 | 3 | public class IceCandidateModel { 4 | 5 | public String type; 6 | public int sdpMLineIndex; 7 | public String sdpMid; 8 | public String candidate; 9 | 10 | public IceCandidateModel() { 11 | } 12 | 13 | public IceCandidateModel(String type, int sdpMLineIndex, String sdpMid, String candidate) { 14 | this.type = type; 15 | this.sdpMLineIndex = sdpMLineIndex; 16 | this.sdpMid = sdpMid; 17 | this.candidate = candidate; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/models/IceServer.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc.models; 2 | 3 | import com.google.gson.annotations.Expose; 4 | import com.google.gson.annotations.SerializedName; 5 | 6 | public class IceServer { 7 | 8 | public IceServer(String url, String urls, String username, String credential) { 9 | this.url = url; 10 | this.urls = urls; 11 | this.username = username; 12 | this.credential = credential; 13 | } 14 | 15 | @SerializedName("url") 16 | @Expose 17 | public String url; 18 | @SerializedName("urls") 19 | @Expose 20 | public String urls; 21 | @SerializedName("username") 22 | @Expose 23 | public String username; 24 | @SerializedName("credential") 25 | @Expose 26 | public String credential; 27 | 28 | } 29 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/webrtc/webRTCModules/WebRTCConnection.java: -------------------------------------------------------------------------------- 1 | package com.example.webrtc.webRTCModules; 2 | 3 | import android.content.Context; 4 | import android.util.Log; 5 | 6 | import androidx.annotation.NonNull; 7 | 8 | import com.example.webrtc.Audio; 9 | import com.example.webrtc.NTSTokenAsyncTask; 10 | import com.example.webrtc.NetworkCallback; 11 | import com.example.webrtc.models.DataModel; 12 | import com.example.webrtc.models.Example; 13 | import com.example.webrtc.models.IceCandidateModel; 14 | import com.google.firebase.database.DataSnapshot; 15 | import com.google.firebase.database.DatabaseError; 16 | import com.google.firebase.database.DatabaseReference; 17 | import com.google.firebase.database.FirebaseDatabase; 18 | import com.google.firebase.database.ValueEventListener; 19 | 20 | import org.webrtc.DataChannel; 21 | import org.webrtc.IceCandidate; 22 | import org.webrtc.MediaConstraints; 23 | import org.webrtc.MediaStream; 24 | import org.webrtc.PeerConnection; 25 | import org.webrtc.PeerConnectionFactory; 26 | import org.webrtc.SdpObserver; 27 | import org.webrtc.SessionDescription; 28 | 29 | import java.nio.ByteBuffer; 30 | import java.util.LinkedList; 31 | import java.util.List; 32 | import java.util.Queue; 33 | 34 | public class WebRTCConnection implements NetworkCallback { 35 | private static final String TAG = "Connect_webRTC"; 36 | 37 | private Context context; 38 | 39 | private DataChannel dataChannel; 40 | private MediaConstraints constraints; 41 | private PeerConnection peerConnection; 42 | private PeerConnectionFactory peerConnectionFactory; 43 | private DatabaseReference databaseReference; 44 | 45 | 46 | private Boolean createdOffer = false; 47 | private Audio audio; 48 | private Queue byteQueue; 49 | private String meetingId; 50 | private String username = ""; 51 | private String usernameRoot = ""; 52 | private String usernameCandidate = ""; 53 | private String callingTo = ""; 54 | private String callingToRoot = ""; 55 | private String callingToCandidates = ""; 56 | 57 | public WebRTCConnection(Context context, String meetingId, String username, String callingTo, boolean createdOffer) { 58 | this.context = context; 59 | this.meetingId = meetingId; 60 | this.username = username; 61 | this.usernameRoot = username+"Root"; 62 | this.usernameCandidate = username + "Candidates"; 63 | 64 | this.callingTo = callingTo; 65 | this.callingToRoot = callingTo+"Root"; 66 | this.callingToCandidates = callingTo + "Candidates"; 67 | 68 | this.createdOffer = createdOffer; 69 | 70 | 71 | byteQueue = new LinkedList<>(); 72 | 73 | PeerConnectionFactory.initializeAndroidGlobals(context, true, true, true); 74 | 75 | constraints = new MediaConstraints(); 76 | peerConnectionFactory = new PeerConnectionFactory(); 77 | databaseReference = FirebaseDatabase.getInstance().getReference().child(meetingId); 78 | 79 | NTSTokenAsyncTask ntsToken = new NTSTokenAsyncTask(context, this); 80 | ntsToken.execute(); 81 | 82 | audio = new Audio(); 83 | audio.inititalize(); 84 | 85 | audio.startRecording(); 86 | audio.startPlaying(); 87 | 88 | 89 | 90 | } 91 | 92 | //todo: to Play Audio Bytes 93 | public void playAudio() { 94 | 95 | new Thread(new Runnable() { 96 | @Override 97 | public void run() { 98 | while (true) { 99 | try { 100 | if (!byteQueue.isEmpty()) { 101 | audio.playBytes(byteQueue.remove()); 102 | } 103 | } catch (Exception e) { 104 | Log.e(TAG, "PlayAudio Exception: " + e.toString()); 105 | } 106 | } 107 | } 108 | }).start(); 109 | } 110 | 111 | //todo: to create offer 112 | private void createOffer() { 113 | peerConnection.createOffer(sdpObserver, constraints); 114 | } 115 | 116 | //todo: to setup firebase database signaling listener 117 | // to actively fetching IceCandidates from either Parties {Steve, Bill} 118 | public void setupListeners() { 119 | 120 | /*todo: Make a signaling Call back channel to send CandidateDataListener 121 | todo: You can use Firebase Database model to save the information of Steve 122 | todo: So that Bill Can this information 123 | */ 124 | 125 | databaseReference.child(callingToRoot).addValueEventListener(candidateDataListener); 126 | 127 | } 128 | 129 | //todo: Start Sending Data using DataChannel to either Parties {Steve, Bill} 130 | public boolean startSend() { 131 | 132 | try { 133 | new Thread(new Runnable() { 134 | @Override 135 | public void run() { 136 | while (true) { 137 | ByteBuffer buffer = ByteBuffer.wrap(audio.getRecordedData()); 138 | dataChannel.send(new DataChannel.Buffer(buffer, false)); 139 | } 140 | } 141 | }).start(); 142 | return true; 143 | }catch (Exception e) 144 | { 145 | e.printStackTrace(); 146 | return false; 147 | } 148 | } 149 | 150 | //todo: Session Description Callbacks 151 | SdpObserver sdpObserver = new SdpObserver() { 152 | @Override 153 | public void onCreateSuccess(SessionDescription sessionDescription) { 154 | peerConnection.setLocalDescription(sdpObserver, sessionDescription); 155 | DataModel dataModel = new DataModel(sessionDescription.description, sessionDescription.type.toString().toLowerCase()); 156 | 157 | // todo: Do some signaling Stuff to Send Steve or Bill Local Session Description 158 | // todo: Send dataModel to other Person 159 | } 160 | 161 | @Override 162 | public void onSetSuccess() { 163 | 164 | } 165 | 166 | @Override 167 | public void onCreateFailure(String s) { 168 | 169 | } 170 | 171 | @Override 172 | public void onSetFailure(String s) { 173 | 174 | } 175 | }; 176 | 177 | //todo: Signaling Channel, Here is simplest firebase model, You Should try your own approach 178 | // to set Parameters 179 | ValueEventListener candidateDataListener = new ValueEventListener() { 180 | @Override 181 | public void onDataChange(@NonNull DataSnapshot dataSnapshot) { 182 | DataModel dataModel = dataSnapshot.child(callingTo).getValue(DataModel.class); 183 | if (dataModel != null) { 184 | SessionDescription sdp2 = new SessionDescription(SessionDescription.Type.fromCanonicalForm(dataModel.type), dataModel.sdp); 185 | peerConnection.setRemoteDescription(sdpObserver, sdp2); 186 | 187 | if(!createdOffer) 188 | peerConnection.createAnswer(sdpObserver, constraints); 189 | 190 | for (DataSnapshot postSnapshot : dataSnapshot.child(callingToCandidates).getChildren()) { 191 | IceCandidateModel iceCandidateModel = postSnapshot.getValue(IceCandidateModel.class); 192 | 193 | IceCandidate iceCandidate1 = new IceCandidate(iceCandidateModel.sdpMid, iceCandidateModel.sdpMLineIndex, iceCandidateModel.candidate); 194 | peerConnection.addIceCandidate(iceCandidate1); 195 | } 196 | 197 | } else { 198 | Log.e(TAG, "DATA MODEL IS NULL"); 199 | } 200 | } 201 | 202 | @Override 203 | public void onCancelled(@NonNull DatabaseError databaseError) { 204 | 205 | } 206 | }; 207 | 208 | //todo: PeerConnection Callbacks 209 | PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() { 210 | @Override 211 | public void onSignalingChange(PeerConnection.SignalingState signalingState) { 212 | Log.d(TAG, "onSignalingChange() " + signalingState.name()); 213 | } 214 | 215 | @Override 216 | public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { 217 | Log.d(TAG, "onIceConnectionChange() " + iceConnectionState.name()); 218 | } 219 | 220 | @Override 221 | public void onIceConnectionReceivingChange(boolean b) { 222 | Log.d(TAG, "onIceConnectionReceivingChange(): " + b); 223 | } 224 | 225 | @Override 226 | public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { 227 | Log.d(TAG, "onIceGatheringChange() " + iceGatheringState.name()); 228 | } 229 | 230 | @Override 231 | public void onIceCandidate(IceCandidate iceCandidate) { 232 | Log.d(TAG, "onIceCandidate: " + iceCandidate.toString()); 233 | 234 | IceCandidateModel iceCandidateModel = new IceCandidateModel("candidate", iceCandidate.sdpMLineIndex, iceCandidate.sdpMid, iceCandidate.sdp); 235 | // Do Some Signaling stuff to share IceCandidate Model with other Steve or Bill 236 | databaseReference.child(usernameRoot).child(usernameCandidate).push().setValue(iceCandidateModel); 237 | 238 | } 239 | 240 | @Override 241 | public void onAddStream(MediaStream mediaStream) { 242 | 243 | } 244 | 245 | @Override 246 | public void onRemoveStream(MediaStream mediaStream) { 247 | 248 | } 249 | 250 | @Override 251 | public void onDataChannel(DataChannel dataChannel1) { 252 | dataChannel = dataChannel1; 253 | dataChannel.registerObserver(dataChannelObserver); 254 | } 255 | 256 | @Override 257 | public void onRenegotiationNeeded() { 258 | Log.d(TAG, "onRenegotiationNeeded()"); 259 | 260 | } 261 | }; 262 | 263 | //todo: DataChannel Callbacks 264 | DataChannel.Observer dataChannelObserver = new DataChannel.Observer() { 265 | @Override 266 | public void onBufferedAmountChange(long l) { 267 | 268 | } 269 | 270 | @Override 271 | public void onStateChange() { 272 | 273 | } 274 | 275 | @Override 276 | public void onMessage(final DataChannel.Buffer buffer) { 277 | try { 278 | 279 | if (!buffer.binary) { 280 | int limit = buffer.data.limit(); 281 | byte[] data = new byte[limit]; 282 | buffer.data.get(data); 283 | 284 | byteQueue.add(data); 285 | 286 | } else { 287 | Log.e(TAG, "Data is received but not binary."); 288 | } 289 | } catch (Exception e) { 290 | e.printStackTrace(); 291 | Log.e(TAG, "ERROR: " + e.toString()); 292 | } 293 | } 294 | }; 295 | 296 | //todo: Initial PeerConnection and DataChannel 297 | @Override 298 | public void InitializePeerConnection(Example example) { 299 | 300 | /* 301 | url: 'turn:192.158.29.39:3478?transport=udp', 302 | credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=', 303 | username: '28224511:1379330808' 304 | */ 305 | List iceServers = new LinkedList<>(); 306 | for (int i = 0; i < example.iceServers.size(); i++) { 307 | if (!example.iceServers.get(i).username.isEmpty()) 308 | iceServers.add(new PeerConnection.IceServer(example.iceServers.get(i).url, example.iceServers.get(i).username, example.iceServers.get(i).credential)); 309 | else 310 | iceServers.add(new PeerConnection.IceServer(example.iceServers.get(i).url)); 311 | } 312 | 313 | constraints = new MediaConstraints(); 314 | peerConnection = peerConnectionFactory.createPeerConnection(iceServers, constraints, peerConnectionObserver); 315 | 316 | DataChannel.Init init = new DataChannel.Init(); 317 | init.ordered = true; 318 | dataChannel = peerConnection.createDataChannel("RTCDataChannel", init); 319 | dataChannel.registerObserver(dataChannelObserver); 320 | 321 | setupListeners(); 322 | 323 | if(createdOffer) 324 | { 325 | createOffer(); 326 | } 327 | } 328 | 329 | 330 | } -------------------------------------------------------------------------------- /app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 | 7 | 12 | 13 | 19 | 22 | 25 | 26 | 27 | 28 | 34 | 35 | -------------------------------------------------------------------------------- /app/src/main/res/drawable/ic_launcher_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 10 | 15 | 20 | 25 | 30 | 35 | 40 | 45 | 50 | 55 | 60 | 65 | 70 | 75 | 80 | 85 | 90 | 95 | 100 | 105 | 110 | 115 | 120 | 125 | 130 | 135 | 140 | 145 | 150 | 155 | 160 | 165 | 170 | 171 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 |