├── .gitignore
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── developerspace
│ │ └── webrtcsample
│ │ └── ExampleInstrumentedTest.kt
│ ├── main
│ ├── AndroidManifest.xml
│ ├── ic_launcher-playstore.png
│ ├── java
│ │ └── com
│ │ │ └── developerspace
│ │ │ └── webrtcsample
│ │ │ ├── AppSdpObserver.kt
│ │ │ ├── Constants.kt
│ │ │ ├── MainActivity.kt
│ │ │ ├── PeerConnectionObserver.kt
│ │ │ ├── RTCActivity.kt
│ │ │ ├── RTCAudioManager.kt
│ │ │ ├── RTCClient.kt
│ │ │ ├── SignalingClient.kt
│ │ │ └── SignalingClientListener.kt
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ ├── circle_background.xml
│ │ ├── curve_background.xml
│ │ ├── ic_baseline_call_end_24.xml
│ │ ├── ic_baseline_cameraswitch_24.xml
│ │ ├── ic_baseline_hearing_24.xml
│ │ ├── ic_baseline_mic_24.xml
│ │ ├── ic_baseline_mic_off_24.xml
│ │ ├── ic_baseline_speaker_up_24.xml
│ │ ├── ic_baseline_videocam_24.xml
│ │ ├── ic_baseline_videocam_off_24.xml
│ │ ├── ic_launcher_background.xml
│ │ └── webrtc.png
│ │ ├── layout
│ │ ├── activity_main.xml
│ │ └── activity_start.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── values-night
│ │ └── themes.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── ic_launcher_background.xml
│ │ ├── strings.xml
│ │ └── themes.xml
│ └── test
│ └── java
│ └── com
│ └── developerspace
│ └── webrtcsample
│ └── ExampleUnitTest.kt
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── images
├── offer-image.png
├── offercandidate-sample.PNG
├── sample-A.png
└── screenshot-A.PNG
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.aar
4 | *.ap_
5 | *.aab
6 |
7 | # Files for the ART/Dalvik VM
8 | *.dex
9 |
10 | # Java class files
11 | *.class
12 |
13 | # Generated files
14 | bin/
15 | gen/
16 | out/
17 | # Uncomment the following line in case you need and you don't have the release build type files in your app
18 | # release/
19 |
20 | # Gradle files
21 | .gradle/
22 | build/
23 |
24 | # Local configuration file (sdk path, etc)
25 | local.properties
26 |
27 | # Proguard folder generated by Eclipse
28 | proguard/
29 |
30 | # Log Files
31 | *.log
32 |
33 | # Android Studio Navigation editor temp files
34 | .navigation/
35 |
36 | # Android Studio captures folder
37 | captures/
38 |
39 | # IntelliJ
40 | *.iml
41 | .idea/workspace.xml
42 | .idea/tasks.xml
43 | .idea/gradle.xml
44 | .idea/assetWizardSettings.xml
45 | .idea/dictionaries
46 | .idea/libraries
47 | # Android Studio 3 in .gitignore file.
48 | .idea/caches
49 | .idea/modules.xml
50 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
51 | .idea/navEditor.xml
52 |
53 | # Keystore files
54 | # Uncomment the following lines if you do not want to check your keystore files in.
55 | #*.jks
56 | #*.keystore
57 |
58 | # External native build folder generated in Android Studio 2.2 and later
59 | .externalNativeBuild
60 | .cxx/
61 |
62 | # Google Services (e.g. APIs or Firebase)
63 | # google-services.json
64 |
65 | # Freeline
66 | freeline.py
67 | freeline/
68 | freeline_project_description.json
69 |
70 | # fastlane
71 | fastlane/report.xml
72 | fastlane/Preview.html
73 | fastlane/screenshots
74 | fastlane/test_output
75 | fastlane/readme.md
76 |
77 | # Version control
78 | vcs.xml
79 |
80 | # lint
81 | lint/intermediates/
82 | lint/generated/
83 | lint/outputs/
84 | lint/tmp/
85 | # lint/reports/
86 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Developer Space
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # WebRTC-Kotlin-Sample
3 | [](#)
4 | [](#)
5 | 
6 |
7 | An android application which uses WebRTC and Firestore as signaling server to support real time media communication.
8 |
9 | [](https://youtu.be/MlRN8dV9lOs)
10 |
11 |
12 | ---
13 |
14 | ## Pre-requisites
15 | :heavy_check_mark: Android studio installed in your system.
16 | :heavy_check_mark: Android Device or Emulator to run your app.
17 | :heavy_check_mark: Setup Account on Firebase and integrate app with your Firebase Project.
18 |
19 | ---
20 |
21 | ## Setup :hammer:
22 |
23 | - You can clone the project from the WebRTC Kotlin Sample repository.
24 |
25 | ```// Clone this repository
26 | git clone https://github.com/developerspace-samples/WebRTC-Kotlin-Sample.git
27 | ```
28 |
29 | - Please make sure to create a Firebase Project and set-up with this app. You need to add `google-service.json` file of your Firebase project in your `app` folder.
30 |
For more details please check the below link.
31 | https://firebase.google.com/docs/android/setup
32 |
33 | Once the setup is done you can run the project in Android Studio.
34 |
35 | ---
36 |
37 |
38 |
39 |
40 | :handshake: Open for Contribution
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | id 'kotlin-android'
4 | id 'kotlin-android-extensions'
5 | }
6 | apply plugin: 'com.google.gms.google-services'
7 |
8 | android {
9 | compileSdkVersion 30
10 | buildToolsVersion "30.0.3"
11 |
12 | defaultConfig {
13 | applicationId "com.dwarsh.webrtcsample"
14 | minSdkVersion 21
15 | targetSdkVersion 30
16 | versionCode 1
17 | versionName "1.0"
18 |
19 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
20 | }
21 |
22 | buildTypes {
23 | release {
24 | minifyEnabled false
25 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
26 | }
27 | }
28 | dataBinding {
29 | enabled = true
30 | }
31 |
32 | packagingOptions {
33 | exclude("META-INF/kotlinx-io.kotlin_module")
34 | exclude("META-INF/atomicfu.kotlin_module")
35 | exclude("META-INF/kotlinx-coroutines-io.kotlin_module")
36 | exclude("META-INF/kotlinx-coroutines-core.kotlin_module")
37 | }
38 |
39 | compileOptions {
40 | sourceCompatibility JavaVersion.VERSION_1_8
41 | targetCompatibility JavaVersion.VERSION_1_8
42 | }
43 | kotlinOptions {
44 | jvmTarget = '1.8'
45 | }
46 | }
47 |
48 | dependencies {
49 |
50 | implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
51 | implementation 'androidx.core:core-ktx:1.3.2'
52 | implementation 'androidx.appcompat:appcompat:1.2.0'
53 | implementation 'com.google.android.material:material:1.3.0'
54 | implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
55 | testImplementation 'junit:junit:4.+'
56 | androidTestImplementation 'androidx.test.ext:junit:1.1.2'
57 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
58 | implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.2.1")
59 | implementation 'org.webrtc:google-webrtc:1.0.32006'
60 | implementation platform('com.google.firebase:firebase-bom:27.0.0')
61 | implementation 'com.google.firebase:firebase-analytics-ktx'
62 | // Declare the dependency for the Cloud Firestore library
63 | // When using the BoM, you don't specify versions in Firebase library dependencies
64 | implementation 'com.google.firebase:firebase-firestore-ktx'
65 |
66 | //Ktor dependencies (you can retorfit instead)
67 | implementation("io.ktor:ktor-client-android:$ktor_version")
68 | implementation("io.ktor:ktor-client-websocket:$ktor_version")
69 | implementation("io.ktor:ktor-client-cio:$ktor_version")
70 | implementation("io.ktor:ktor-client-gson:$ktor_version")
71 | }
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/developerspace/webrtcsample/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import androidx.test.platform.app.InstrumentationRegistry
4 | import androidx.test.ext.junit.runners.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getInstrumentation().targetContext
22 | assertEquals("com.dwarsh.webrtcsample", appContext.packageName)
23 | }
24 | }
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
17 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/app/src/main/ic_launcher-playstore.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/ic_launcher-playstore.png
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/AppSdpObserver.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import org.webrtc.SdpObserver
4 | import org.webrtc.SessionDescription
5 |
6 | open class AppSdpObserver : SdpObserver {
7 | override fun onSetFailure(p0: String?) {
8 | }
9 |
10 | override fun onSetSuccess() {
11 | }
12 |
13 | override fun onCreateSuccess(p0: SessionDescription?) {
14 | }
15 |
16 | override fun onCreateFailure(p0: String?) {
17 | }
18 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/Constants.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | class Constants {
4 | companion object {
5 | var isCallEnded: Boolean = false
6 | var isIntiatedNow : Boolean = true
7 | }
8 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import android.content.Intent
4 | import androidx.appcompat.app.AppCompatActivity
5 | import android.os.Bundle
6 | import com.google.firebase.firestore.ktx.firestore
7 | import com.google.firebase.ktx.Firebase
8 | import kotlinx.android.synthetic.main.activity_start.*
9 |
10 | class MainActivity : AppCompatActivity() {
11 |
12 | val db = Firebase.firestore
13 |
14 | override fun onCreate(savedInstanceState: Bundle?) {
15 | super.onCreate(savedInstanceState)
16 | setContentView(R.layout.activity_start)
17 | Constants.isIntiatedNow = true
18 | Constants.isCallEnded = true
19 | start_meeting.setOnClickListener {
20 | if (meeting_id.text.toString().trim().isNullOrEmpty())
21 | meeting_id.error = "Please enter meeting id"
22 | else {
23 | db.collection("calls")
24 | .document(meeting_id.text.toString())
25 | .get()
26 | .addOnSuccessListener {
27 | if (it["type"]=="OFFER" || it["type"]=="ANSWER" || it["type"]=="END_CALL") {
28 | meeting_id.error = "Please enter new meeting ID"
29 | } else {
30 | val intent = Intent(this@MainActivity, RTCActivity::class.java)
31 | intent.putExtra("meetingID",meeting_id.text.toString())
32 | intent.putExtra("isJoin",false)
33 | startActivity(intent)
34 | }
35 | }
36 | .addOnFailureListener {
37 | meeting_id.error = "Please enter new meeting ID"
38 | }
39 | }
40 | }
41 | join_meeting.setOnClickListener {
42 | if (meeting_id.text.toString().trim().isNullOrEmpty())
43 | meeting_id.error = "Please enter meeting id"
44 | else {
45 | val intent = Intent(this@MainActivity, RTCActivity::class.java)
46 | intent.putExtra("meetingID",meeting_id.text.toString())
47 | intent.putExtra("isJoin",true)
48 | startActivity(intent)
49 | }
50 | }
51 | }
52 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/PeerConnectionObserver.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import org.webrtc.*
4 |
5 | open class PeerConnectionObserver : PeerConnection.Observer {
6 | override fun onIceCandidate(p0: IceCandidate?) {
7 | }
8 |
9 | override fun onDataChannel(p0: DataChannel?) {
10 | }
11 |
12 | override fun onIceConnectionReceivingChange(p0: Boolean) {
13 | }
14 |
15 | override fun onIceConnectionChange(p0: PeerConnection.IceConnectionState?) {
16 | }
17 |
18 | override fun onIceGatheringChange(p0: PeerConnection.IceGatheringState?) {
19 | }
20 |
21 | override fun onAddStream(p0: MediaStream?) {
22 | }
23 |
24 | override fun onSignalingChange(p0: PeerConnection.SignalingState?) {
25 | }
26 |
27 | override fun onIceCandidatesRemoved(p0: Array?) {
28 | }
29 |
30 | override fun onRemoveStream(p0: MediaStream?) {
31 | }
32 |
33 | override fun onRenegotiationNeeded() {
34 | }
35 |
36 | override fun onAddTrack(p0: RtpReceiver?, p1: Array?) {
37 | }
38 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/RTCActivity.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import android.Manifest
4 | import android.content.Intent
5 | import android.content.pm.PackageManager
6 | import androidx.appcompat.app.AppCompatActivity
7 | import android.os.Bundle
8 | import android.util.Log
9 | import android.widget.Toast
10 | import androidx.appcompat.app.AlertDialog
11 | import androidx.core.app.ActivityCompat
12 | import androidx.core.content.ContextCompat
13 | import androidx.core.view.isGone
14 | import kotlinx.android.synthetic.main.activity_main.*
15 | import kotlinx.android.synthetic.main.activity_start.*
16 | import kotlinx.coroutines.ExperimentalCoroutinesApi
17 | import org.webrtc.*
18 | import java.util.*
19 |
20 | @ExperimentalCoroutinesApi
21 | class RTCActivity : AppCompatActivity() {
22 |
23 | companion object {
24 | private const val CAMERA_AUDIO_PERMISSION_REQUEST_CODE = 1
25 | private const val CAMERA_PERMISSION = Manifest.permission.CAMERA
26 | private const val AUDIO_PERMISSION = Manifest.permission.RECORD_AUDIO
27 | }
28 |
29 | private lateinit var rtcClient: RTCClient
30 | private lateinit var signallingClient: SignalingClient
31 |
32 | private val audioManager by lazy { RTCAudioManager.create(this) }
33 |
34 | val TAG = "MainActivity"
35 |
36 | private var meetingID : String = "test-call"
37 |
38 | private var isJoin = false
39 |
40 | private var isMute = false
41 |
42 | private var isVideoPaused = false
43 |
44 | private var inSpeakerMode = true
45 |
46 | private val sdpObserver = object : AppSdpObserver() {
47 | override fun onCreateSuccess(p0: SessionDescription?) {
48 | super.onCreateSuccess(p0)
49 | // signallingClient.send(p0)
50 | }
51 | }
52 |
53 | override fun onCreate(savedInstanceState: Bundle?) {
54 | super.onCreate(savedInstanceState)
55 | setContentView(R.layout.activity_main)
56 |
57 | if (intent.hasExtra("meetingID"))
58 | meetingID = intent.getStringExtra("meetingID")!!
59 | if (intent.hasExtra("isJoin"))
60 | isJoin = intent.getBooleanExtra("isJoin",false)
61 |
62 | checkCameraAndAudioPermission()
63 | audioManager.selectAudioDevice(RTCAudioManager.AudioDevice.SPEAKER_PHONE)
64 | switch_camera_button.setOnClickListener {
65 | rtcClient.switchCamera()
66 | }
67 |
68 | audio_output_button.setOnClickListener {
69 | if (inSpeakerMode) {
70 | inSpeakerMode = false
71 | audio_output_button.setImageResource(R.drawable.ic_baseline_hearing_24)
72 | audioManager.setDefaultAudioDevice(RTCAudioManager.AudioDevice.EARPIECE)
73 | } else {
74 | inSpeakerMode = true
75 | audio_output_button.setImageResource(R.drawable.ic_baseline_speaker_up_24)
76 | audioManager.setDefaultAudioDevice(RTCAudioManager.AudioDevice.SPEAKER_PHONE)
77 | }
78 | }
79 | video_button.setOnClickListener {
80 | if (isVideoPaused) {
81 | isVideoPaused = false
82 | video_button.setImageResource(R.drawable.ic_baseline_videocam_off_24)
83 | } else {
84 | isVideoPaused = true
85 | video_button.setImageResource(R.drawable.ic_baseline_videocam_24)
86 | }
87 | rtcClient.enableVideo(isVideoPaused)
88 | }
89 | mic_button.setOnClickListener {
90 | if (isMute) {
91 | isMute = false
92 | mic_button.setImageResource(R.drawable.ic_baseline_mic_off_24)
93 | } else {
94 | isMute = true
95 | mic_button.setImageResource(R.drawable.ic_baseline_mic_24)
96 | }
97 | rtcClient.enableAudio(isMute)
98 | }
99 | end_call_button.setOnClickListener {
100 | rtcClient.endCall(meetingID)
101 | remote_view.isGone = false
102 | Constants.isCallEnded = true
103 | finish()
104 | startActivity(Intent(this@RTCActivity, MainActivity::class.java))
105 | }
106 | }
107 |
108 | private fun checkCameraAndAudioPermission() {
109 | if ((ContextCompat.checkSelfPermission(this, CAMERA_PERMISSION)
110 | != PackageManager.PERMISSION_GRANTED) &&
111 | (ContextCompat.checkSelfPermission(this,AUDIO_PERMISSION)
112 | != PackageManager.PERMISSION_GRANTED)) {
113 | requestCameraAndAudioPermission()
114 | } else {
115 | onCameraAndAudioPermissionGranted()
116 | }
117 | }
118 |
119 | private fun onCameraAndAudioPermissionGranted() {
120 | rtcClient = RTCClient(
121 | application,
122 | object : PeerConnectionObserver() {
123 | override fun onIceCandidate(p0: IceCandidate?) {
124 | super.onIceCandidate(p0)
125 | signallingClient.sendIceCandidate(p0, isJoin)
126 | rtcClient.addIceCandidate(p0)
127 | }
128 |
129 | override fun onAddStream(p0: MediaStream?) {
130 | super.onAddStream(p0)
131 | Log.e(TAG, "onAddStream: $p0")
132 | p0?.videoTracks?.get(0)?.addSink(remote_view)
133 | }
134 |
135 | override fun onIceConnectionChange(p0: PeerConnection.IceConnectionState?) {
136 | Log.e(TAG, "onIceConnectionChange: $p0")
137 | }
138 |
139 | override fun onIceConnectionReceivingChange(p0: Boolean) {
140 | Log.e(TAG, "onIceConnectionReceivingChange: $p0")
141 | }
142 |
143 | override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) {
144 | Log.e(TAG, "onConnectionChange: $newState")
145 | }
146 |
147 | override fun onDataChannel(p0: DataChannel?) {
148 | Log.e(TAG, "onDataChannel: $p0")
149 | }
150 |
151 | override fun onStandardizedIceConnectionChange(newState: PeerConnection.IceConnectionState?) {
152 | Log.e(TAG, "onStandardizedIceConnectionChange: $newState")
153 | }
154 |
155 | override fun onAddTrack(p0: RtpReceiver?, p1: Array?) {
156 | Log.e(TAG, "onAddTrack: $p0 \n $p1")
157 | }
158 |
159 | override fun onTrack(transceiver: RtpTransceiver?) {
160 | Log.e(TAG, "onTrack: $transceiver" )
161 | }
162 | }
163 | )
164 |
165 | rtcClient.initSurfaceView(remote_view)
166 | rtcClient.initSurfaceView(local_view)
167 | rtcClient.startLocalVideoCapture(local_view)
168 | signallingClient = SignalingClient(meetingID,createSignallingClientListener())
169 | if (!isJoin)
170 | rtcClient.call(sdpObserver,meetingID)
171 | }
172 |
173 | private fun createSignallingClientListener() = object : SignalingClientListener {
174 | override fun onConnectionEstablished() {
175 | end_call_button.isClickable = true
176 | }
177 |
178 | override fun onOfferReceived(description: SessionDescription) {
179 | rtcClient.onRemoteSessionReceived(description)
180 | Constants.isIntiatedNow = false
181 | rtcClient.answer(sdpObserver,meetingID)
182 | remote_view_loading.isGone = true
183 | }
184 |
185 | override fun onAnswerReceived(description: SessionDescription) {
186 | rtcClient.onRemoteSessionReceived(description)
187 | Constants.isIntiatedNow = false
188 | remote_view_loading.isGone = true
189 | }
190 |
191 | override fun onIceCandidateReceived(iceCandidate: IceCandidate) {
192 | rtcClient.addIceCandidate(iceCandidate)
193 | }
194 |
195 | override fun onCallEnded() {
196 | if (!Constants.isCallEnded) {
197 | Constants.isCallEnded = true
198 | rtcClient.endCall(meetingID)
199 | finish()
200 | startActivity(Intent(this@RTCActivity, MainActivity::class.java))
201 | }
202 | }
203 | }
204 |
205 | private fun requestCameraAndAudioPermission(dialogShown: Boolean = false) {
206 | if (ActivityCompat.shouldShowRequestPermissionRationale(this, CAMERA_PERMISSION) &&
207 | ActivityCompat.shouldShowRequestPermissionRationale(this, AUDIO_PERMISSION) &&
208 | !dialogShown) {
209 | showPermissionRationaleDialog()
210 | } else {
211 | ActivityCompat.requestPermissions(this, arrayOf(CAMERA_PERMISSION, AUDIO_PERMISSION), CAMERA_AUDIO_PERMISSION_REQUEST_CODE)
212 | }
213 | }
214 |
215 | private fun showPermissionRationaleDialog() {
216 | AlertDialog.Builder(this)
217 | .setTitle("Camera And Audio Permission Required")
218 | .setMessage("This app need the camera and audio to function")
219 | .setPositiveButton("Grant") { dialog, _ ->
220 | dialog.dismiss()
221 | requestCameraAndAudioPermission(true)
222 | }
223 | .setNegativeButton("Deny") { dialog, _ ->
224 | dialog.dismiss()
225 | onCameraPermissionDenied()
226 | }
227 | .show()
228 | }
229 |
230 | override fun onRequestPermissionsResult(requestCode: Int, permissions: Array, grantResults: IntArray) {
231 | super.onRequestPermissionsResult(requestCode, permissions, grantResults)
232 | if (requestCode == CAMERA_AUDIO_PERMISSION_REQUEST_CODE && grantResults.all { it == PackageManager.PERMISSION_GRANTED }) {
233 | onCameraAndAudioPermissionGranted()
234 | } else {
235 | onCameraPermissionDenied()
236 | }
237 | }
238 |
239 | private fun onCameraPermissionDenied() {
240 | Toast.makeText(this, "Camera and Audio Permission Denied", Toast.LENGTH_LONG).show()
241 | }
242 |
243 | override fun onDestroy() {
244 | signallingClient.destroy()
245 | super.onDestroy()
246 | }
247 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/RTCAudioManager.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import android.content.BroadcastReceiver
4 | import android.content.Context
5 | import android.content.Intent
6 | import android.content.IntentFilter
7 | import android.content.pm.PackageManager
8 | import android.media.AudioDeviceInfo
9 | import android.media.AudioManager
10 | import android.media.AudioManager.OnAudioFocusChangeListener
11 | import android.os.Build
12 | import android.preference.PreferenceManager
13 | import android.util.Log
14 | import androidx.annotation.Nullable
15 | import org.webrtc.ThreadUtils
16 | import java.util.*
17 | import kotlin.collections.HashSet
18 |
19 |
20 | class RTCAudioManager(context: Context) {
21 | /**
22 | * AudioDevice is the names of possible audio devices that we currently
23 | * support.
24 | */
25 | enum class AudioDevice {
26 | SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, NONE
27 | }
28 |
29 | /** AudioManager state. */
30 | enum class AudioManagerState {
31 | UNINITIALIZED, PREINITIALIZED, RUNNING
32 | }
33 |
34 | /** Selected audio device change event. */
35 | interface AudioManagerEvents {
36 | // Callback fired once audio device is changed or list of available audio devices changed.
37 | fun onAudioDeviceChanged(
38 | selectedAudioDevice: AudioDevice?, availableAudioDevices: Set?
39 | )
40 | }
41 |
42 | private val apprtcContext: Context
43 |
44 | @Nullable
45 | private val audioManager: AudioManager
46 |
47 | @Nullable
48 | private var audioManagerEvents: AudioManagerEvents? = null
49 | private var amState: AudioManagerState
50 | private var savedAudioMode = AudioManager.MODE_INVALID
51 | private var savedIsSpeakerPhoneOn = false
52 | private var savedIsMicrophoneMute = false
53 | private var hasWiredHeadset = false
54 |
55 | // Default audio device; speaker phone for video calls or earpiece for audio
56 | // only calls.
57 | private var defaultAudioDevice: AudioDevice? = null
58 |
59 | // Contains the currently selected audio device.
60 | // This device is changed automatically using a certain scheme where e.g.
61 | // a wired headset "wins" over speaker phone. It is also possible for a
62 | // user to explicitly select a device (and overrid any predefined scheme).
63 | // See |userSelectedAudioDevice| for details.
64 | private var selectedAudioDevice: AudioDevice? = null
65 |
66 | // Contains the user-selected audio device which overrides the predefined
67 | // selection scheme.
68 | private var userSelectedAudioDevice: AudioDevice? = null
69 |
70 | // Contains speakerphone setting: auto, true or false
71 | @Nullable
72 | private val useSpeakerphone: String?
73 |
74 |
75 | // Contains a list of available audio devices. A Set collection is used to
76 | // avoid duplicate elements.
77 | private var audioDevices: MutableSet = HashSet()
78 |
79 | // Broadcast receiver for wired headset intent broadcasts.
80 | private val wiredHeadsetReceiver: BroadcastReceiver
81 |
82 | // Callback method for changes in audio focus.
83 | @Nullable
84 | private var audioFocusChangeListener: OnAudioFocusChangeListener? = null
85 |
86 |
87 | /* Receiver which handles changes in wired headset availability. */
88 | private inner class WiredHeadsetReceiver() : BroadcastReceiver() {
89 | override fun onReceive(context: Context?, intent: Intent) {
90 | val state = intent.getIntExtra("state", STATE_UNPLUGGED)
91 | val microphone = intent.getIntExtra("microphone", HAS_NO_MIC)
92 | val name = intent.getStringExtra("name")
93 | Log.d(TAG, "WiredHeadsetReceiver.onReceive"
94 | + ": " + "a=" + intent.action.toString() + ", s=" +
95 | (if (state == STATE_UNPLUGGED) "unplugged" else "plugged").toString()
96 | + ", m=" + (if (microphone == HAS_MIC) "mic" else "no mic").toString()
97 | + ", n=" + name.toString() + ", sb=" + isInitialStickyBroadcast)
98 | hasWiredHeadset = (state == STATE_PLUGGED)
99 | updateAudioDeviceState()
100 | }
101 |
102 | private val STATE_UNPLUGGED = 0
103 | private val STATE_PLUGGED = 1
104 | private val HAS_NO_MIC = 0
105 | private val HAS_MIC = 1
106 | }
107 |
108 | fun start(audioManagerEvents: AudioManagerEvents?) {
109 | Log.d(TAG, "start")
110 | ThreadUtils.checkIsOnMainThread()
111 | if (amState == AudioManagerState.RUNNING) {
112 | Log.e(TAG, "AudioManager is already active")
113 | return
114 | }
115 | // else if (amState == AudioManagerState.UNINITIALIZED) {
116 | // preInitAudio()
117 | // }
118 | // TODO perhaps call new method called preInitAudio() here if UNINITIALIZED.
119 | Log.d(TAG, "AudioManager starts...")
120 | this.audioManagerEvents = audioManagerEvents
121 | amState = AudioManagerState.RUNNING
122 |
123 | // Store current audio state so we can restore it when stop() is called.
124 | savedAudioMode = audioManager.mode
125 | savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn
126 | savedIsMicrophoneMute = audioManager.isMicrophoneMute
127 | hasWiredHeadset = hasWiredHeadset()
128 |
129 | // Create an AudioManager.OnAudioFocusChangeListener instance.
130 | audioFocusChangeListener =
131 | OnAudioFocusChangeListener { focusChange ->
132 |
133 | // Called on the listener to notify if the audio focus for this listener has been changed.
134 | // The |focusChange| value indicates whether the focus was gained, whether the focus was lost,
135 | // and whether that loss is transient, or whether the new focus holder will hold it for an
136 | // unknown amount of time.
137 |
138 | val typeOfChange: String
139 | when (focusChange) {
140 | AudioManager.AUDIOFOCUS_GAIN -> typeOfChange = "AUDIOFOCUS_GAIN"
141 | AudioManager.AUDIOFOCUS_GAIN_TRANSIENT -> typeOfChange =
142 | "AUDIOFOCUS_GAIN_TRANSIENT"
143 | AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE -> typeOfChange =
144 | "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE"
145 | AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK -> typeOfChange =
146 | "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK"
147 | AudioManager.AUDIOFOCUS_LOSS -> typeOfChange = "AUDIOFOCUS_LOSS"
148 | AudioManager.AUDIOFOCUS_LOSS_TRANSIENT -> typeOfChange =
149 | "AUDIOFOCUS_LOSS_TRANSIENT"
150 | AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK -> typeOfChange =
151 | "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK"
152 | else -> typeOfChange = "AUDIOFOCUS_INVALID"
153 | }
154 | Log.d(TAG, "onAudioFocusChange: $typeOfChange")
155 | }
156 |
157 | // Request audio playout focus (without ducking) and install listener for changes in focus.
158 | val result = audioManager.requestAudioFocus(
159 | audioFocusChangeListener,
160 | AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT
161 | )
162 | if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
163 | Log.d(TAG, "Audio focus request granted for VOICE_CALL streams")
164 | } else {
165 | Log.e(TAG, "Audio focus request failed")
166 | }
167 |
168 | // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
169 | // required to be in this mode when playout and/or recording starts for
170 | // best possible VoIP performance.
171 | audioManager.mode = AudioManager.MODE_IN_COMMUNICATION
172 |
173 | // Always disable microphone mute during a WebRTC call.
174 | setMicrophoneMute(false)
175 |
176 | // Set initial device states.
177 | userSelectedAudioDevice = AudioDevice.NONE
178 | selectedAudioDevice = AudioDevice.NONE
179 | audioDevices.clear()
180 |
181 | // Do initial selection of audio device. This setting can later be changed
182 | // either by adding/removing a BT or wired headset or by covering/uncovering
183 | // the proximity sensor.
184 | updateAudioDeviceState()
185 |
186 | // Register receiver for broadcast intents related to adding/removing a
187 | // wired headset.
188 | registerReceiver(wiredHeadsetReceiver, IntentFilter(Intent.ACTION_HEADSET_PLUG))
189 | Log.d(TAG, "AudioManager started")
190 | }
191 |
192 | fun stop() {
193 | Log.d(TAG, "stop")
194 | ThreadUtils.checkIsOnMainThread()
195 | if (amState != AudioManagerState.RUNNING) {
196 | Log.e(
197 | TAG,
198 | "Trying to stop AudioManager in incorrect state: $amState"
199 | )
200 | return
201 | }
202 | amState = AudioManagerState.UNINITIALIZED
203 | unregisterReceiver(wiredHeadsetReceiver)
204 |
205 | // Restore previously stored audio states.
206 | setSpeakerphoneOn(savedIsSpeakerPhoneOn)
207 | setMicrophoneMute(savedIsMicrophoneMute)
208 | audioManager.mode = savedAudioMode
209 |
210 | // Abandon audio focus. Gives the previous focus owner, if any, focus.
211 | audioManager.abandonAudioFocus(audioFocusChangeListener)
212 | audioFocusChangeListener = null
213 | Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams")
214 |
215 | audioManagerEvents = null
216 | Log.d(TAG, "AudioManager stopped")
217 | }
218 |
219 | /** Changes selection of the currently active audio device. */
220 | private fun setAudioDeviceInternal(device: AudioDevice?) {
221 | Log.d(TAG, "setAudioDeviceInternal(device=$device)")
222 | if (audioDevices.contains(device)) {
223 | when (device) {
224 | AudioDevice.SPEAKER_PHONE -> setSpeakerphoneOn(true)
225 | AudioDevice.EARPIECE -> setSpeakerphoneOn(false)
226 | AudioDevice.WIRED_HEADSET -> setSpeakerphoneOn(false)
227 | else -> Log.e(TAG, "Invalid audio device selection")
228 | }
229 | }
230 | selectedAudioDevice = device
231 | }
232 |
233 | /**
234 | * Changes default audio device.
235 | */
236 | fun setDefaultAudioDevice(defaultDevice: AudioDevice?) {
237 | ThreadUtils.checkIsOnMainThread()
238 | when (defaultDevice) {
239 | AudioDevice.SPEAKER_PHONE -> defaultAudioDevice = defaultDevice
240 | AudioDevice.EARPIECE -> if (hasEarpiece()) {
241 | defaultAudioDevice = defaultDevice
242 | } else {
243 | defaultAudioDevice = AudioDevice.SPEAKER_PHONE
244 | }
245 | else -> Log.e(TAG, "Invalid default audio device selection")
246 | }
247 | Log.d(TAG, "setDefaultAudioDevice(device=$defaultAudioDevice)")
248 | updateAudioDeviceState()
249 | }
250 |
251 | /** Changes selection of the currently active audio device. */
252 | fun selectAudioDevice(device: AudioDevice) {
253 | ThreadUtils.checkIsOnMainThread()
254 | if (!audioDevices.contains(device)) {
255 | Log.e(
256 | TAG,
257 | "Can not select $device from available $audioDevices"
258 | )
259 | }
260 | userSelectedAudioDevice = device
261 | updateAudioDeviceState()
262 | }
263 |
264 | /** Returns current set of available/selectable audio devices. */
265 | fun getAudioDevices(): Set {
266 | ThreadUtils.checkIsOnMainThread()
267 | return Collections.unmodifiableSet(HashSet(audioDevices)) as Set
268 | }
269 |
270 | /** Returns the currently selected audio device. */
271 | fun getSelectedAudioDevice(): AudioDevice? {
272 | ThreadUtils.checkIsOnMainThread()
273 | return selectedAudioDevice
274 | }
275 |
276 | /** Helper method for receiver registration. */
277 | private fun registerReceiver(receiver: BroadcastReceiver, filter: IntentFilter) {
278 | apprtcContext.registerReceiver(receiver, filter)
279 | }
280 |
281 | /** Helper method for unregistration of an existing receiver. */
282 | private fun unregisterReceiver(receiver: BroadcastReceiver) {
283 | apprtcContext.unregisterReceiver(receiver)
284 | }
285 |
286 | /** Sets the speaker phone mode. */
287 | private fun setSpeakerphoneOn(on: Boolean) {
288 | val wasOn = audioManager.isSpeakerphoneOn
289 | if (wasOn == on) {
290 | return
291 | }
292 | audioManager.isSpeakerphoneOn = on
293 | }
294 |
295 | /** Sets the microphone mute state. */
296 | private fun setMicrophoneMute(on: Boolean) {
297 | val wasMuted = audioManager.isMicrophoneMute
298 | if (wasMuted == on) {
299 | return
300 | }
301 | audioManager.isMicrophoneMute = on
302 | }
303 |
304 | /** Gets the current earpiece state. */
305 | private fun hasEarpiece(): Boolean {
306 | return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY)
307 | }
308 |
309 | /**
310 | * Checks whether a wired headset is connected or not.
311 | * This is not a valid indication that audio playback is actually over
312 | * the wired headset as audio routing depends on other conditions. We
313 | * only use it as an early indicator (during initialization) of an attached
314 | * wired headset.
315 | */
316 | @Deprecated("")
317 | private fun hasWiredHeadset(): Boolean {
318 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
319 | return audioManager.isWiredHeadsetOn
320 | } else {
321 | val devices = audioManager.getDevices(AudioManager.GET_DEVICES_INPUTS)
322 | for (device: AudioDeviceInfo in devices) {
323 | val type = device.type
324 | if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
325 | Log.d(TAG, "hasWiredHeadset: found wired headset")
326 | return true
327 | } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
328 | Log.d(TAG, "hasWiredHeadset: found USB audio device")
329 | return true
330 | }
331 | }
332 | return false
333 | }
334 | }
335 |
336 | /**
337 | * Updates list of possible audio devices and make new device selection.
338 | */
339 | fun updateAudioDeviceState() {
340 | ThreadUtils.checkIsOnMainThread()
341 | Log.d(
342 | TAG, ("--- updateAudioDeviceState: "
343 | + "wired headset=" + hasWiredHeadset)
344 | )
345 | Log.d(
346 | TAG, ("Device status: "
347 | + "available=" + audioDevices + ", "
348 | + "selected=" + selectedAudioDevice + ", "
349 | + "user selected=" + userSelectedAudioDevice)
350 | )
351 |
352 |
353 | // Update the set of available audio devices.
354 | val newAudioDevices: MutableSet = HashSet()
355 |
356 | if (hasWiredHeadset) {
357 | // If a wired headset is connected, then it is the only possible option.
358 | newAudioDevices.add(AudioDevice.WIRED_HEADSET)
359 | } else {
360 | // No wired headset, hence the audio-device list can contain speaker
361 | // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
362 | newAudioDevices.add(AudioDevice.SPEAKER_PHONE)
363 | if (hasEarpiece()) {
364 | newAudioDevices.add(AudioDevice.EARPIECE)
365 | }
366 | }
367 | // Store state which is set to true if the device list has changed.
368 | var audioDeviceSetUpdated = audioDevices != newAudioDevices
369 | // Update the existing audio device set.
370 | audioDevices = newAudioDevices
371 | // Correct user selected audio devices if needed.
372 | if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
373 | // If user selected speaker phone, but then plugged wired headset then make
374 | // wired headset as user selected device.
375 | userSelectedAudioDevice = AudioDevice.WIRED_HEADSET
376 | }
377 | if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
378 | // If user selected wired headset, but then unplugged wired headset then make
379 | // speaker phone as user selected device.
380 | userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE
381 | }
382 |
383 |
384 | // Update selected audio device.
385 | val newAudioDevice: AudioDevice?
386 | if (hasWiredHeadset) {
387 | // If a wired headset is connected, but Bluetooth is not, then wired headset is used as
388 | // audio device.
389 | newAudioDevice = AudioDevice.WIRED_HEADSET
390 | } else {
391 | // No wired headset and no Bluetooth, hence the audio-device list can contain speaker
392 | // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
393 | // |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
394 | // depending on the user's selection.
395 | newAudioDevice = defaultAudioDevice
396 | }
397 | // Switch to new device but only if there has been any changes.
398 | if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
399 | // Do the required device switch.
400 | setAudioDeviceInternal(newAudioDevice)
401 | Log.d(
402 | TAG, ("New device status: "
403 | + "available=" + audioDevices + ", "
404 | + "selected=" + newAudioDevice)
405 | )
406 | if (audioManagerEvents != null) {
407 | // Notify a listening client that audio device has been changed.
408 | audioManagerEvents!!.onAudioDeviceChanged(selectedAudioDevice, audioDevices)
409 | }
410 | }
411 | Log.d(TAG, "--- updateAudioDeviceState done")
412 | }
413 |
414 | companion object {
415 | private val TAG = "AppRTCAudioManager"
416 | private val SPEAKERPHONE_AUTO = "auto"
417 | private val SPEAKERPHONE_TRUE = "true"
418 | private val SPEAKERPHONE_FALSE = "false"
419 |
420 | /** Construction. */
421 | fun create(context: Context): RTCAudioManager {
422 | return RTCAudioManager(context)
423 | }
424 | }
425 |
426 | init {
427 | Log.d(TAG, "ctor")
428 | ThreadUtils.checkIsOnMainThread()
429 | apprtcContext = context
430 | audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
431 | wiredHeadsetReceiver = WiredHeadsetReceiver()
432 | amState = AudioManagerState.UNINITIALIZED
433 | val sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context)
434 | useSpeakerphone = sharedPreferences.getString(
435 | context.getString(R.string.pref_speakerphone_key),
436 | context.getString(R.string.pref_speakerphone_default)
437 | )
438 | Log.d(TAG, "useSpeakerphone: $useSpeakerphone")
439 | if ((useSpeakerphone == SPEAKERPHONE_FALSE)) {
440 | defaultAudioDevice = AudioDevice.EARPIECE
441 | } else {
442 | defaultAudioDevice = AudioDevice.SPEAKER_PHONE
443 | }
444 | Log.d(TAG, "defaultAudioDevice: $defaultAudioDevice")
445 | }
446 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/RTCClient.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import android.app.Application
4 | import android.content.Context
5 | import android.util.Log
6 | import com.google.firebase.firestore.ktx.firestore
7 | import com.google.firebase.ktx.Firebase
8 | import org.webrtc.*
9 |
10 |
11 | class RTCClient(
12 | context: Application,
13 | observer: PeerConnection.Observer
14 | ) {
15 |
16 | companion object {
17 | private const val LOCAL_TRACK_ID = "local_track"
18 | private const val LOCAL_STREAM_ID = "local_track"
19 | }
20 |
21 | private val rootEglBase: EglBase = EglBase.create()
22 |
23 | private var localAudioTrack : AudioTrack? = null
24 | private var localVideoTrack : VideoTrack? = null
25 | val TAG = "RTCClient"
26 |
27 | var remoteSessionDescription : SessionDescription? = null
28 |
29 | val db = Firebase.firestore
30 |
31 | init {
32 | initPeerConnectionFactory(context)
33 | }
34 |
35 | private val iceServer = listOf(
36 | PeerConnection.IceServer.builder("stun:stun.l.google.com:19302")
37 | .createIceServer()
38 | )
39 |
40 | private val peerConnectionFactory by lazy { buildPeerConnectionFactory() }
41 | private val videoCapturer by lazy { getVideoCapturer(context) }
42 |
43 | private val audioSource by lazy { peerConnectionFactory.createAudioSource(MediaConstraints())}
44 | private val localVideoSource by lazy { peerConnectionFactory.createVideoSource(false) }
45 | private val peerConnection by lazy { buildPeerConnection(observer) }
46 |
47 | private fun initPeerConnectionFactory(context: Application) {
48 | val options = PeerConnectionFactory.InitializationOptions.builder(context)
49 | .setEnableInternalTracer(true)
50 | .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
51 | .createInitializationOptions()
52 | PeerConnectionFactory.initialize(options)
53 | }
54 |
55 | private fun buildPeerConnectionFactory(): PeerConnectionFactory {
56 | return PeerConnectionFactory
57 | .builder()
58 | .setVideoDecoderFactory(DefaultVideoDecoderFactory(rootEglBase.eglBaseContext))
59 | .setVideoEncoderFactory(DefaultVideoEncoderFactory(rootEglBase.eglBaseContext, true, true))
60 | .setOptions(PeerConnectionFactory.Options().apply {
61 | disableEncryption = true
62 | disableNetworkMonitor = true
63 | })
64 | .createPeerConnectionFactory()
65 | }
66 |
67 | private fun buildPeerConnection(observer: PeerConnection.Observer) = peerConnectionFactory.createPeerConnection(
68 | iceServer,
69 | observer
70 | )
71 |
72 | private fun getVideoCapturer(context: Context) =
73 | Camera2Enumerator(context).run {
74 | deviceNames.find {
75 | isFrontFacing(it)
76 | }?.let {
77 | createCapturer(it, null)
78 | } ?: throw IllegalStateException()
79 | }
80 |
81 | fun initSurfaceView(view: SurfaceViewRenderer) = view.run {
82 | setMirror(true)
83 | setEnableHardwareScaler(true)
84 | init(rootEglBase.eglBaseContext, null)
85 | }
86 |
87 | fun startLocalVideoCapture(localVideoOutput: SurfaceViewRenderer) {
88 | val surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().name, rootEglBase.eglBaseContext)
89 | (videoCapturer as VideoCapturer).initialize(surfaceTextureHelper, localVideoOutput.context, localVideoSource.capturerObserver)
90 | videoCapturer.startCapture(320, 240, 60)
91 | localAudioTrack = peerConnectionFactory.createAudioTrack(LOCAL_TRACK_ID + "_audio", audioSource);
92 | localVideoTrack = peerConnectionFactory.createVideoTrack(LOCAL_TRACK_ID, localVideoSource)
93 | localVideoTrack?.addSink(localVideoOutput)
94 | val localStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID)
95 | localStream.addTrack(localVideoTrack)
96 | localStream.addTrack(localAudioTrack)
97 | peerConnection?.addStream(localStream)
98 | }
99 |
100 | private fun PeerConnection.call(sdpObserver: SdpObserver, meetingID: String) {
101 | val constraints = MediaConstraints().apply {
102 | mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
103 | }
104 |
105 | createOffer(object : SdpObserver by sdpObserver {
106 | override fun onCreateSuccess(desc: SessionDescription?) {
107 | setLocalDescription(object : SdpObserver {
108 | override fun onSetFailure(p0: String?) {
109 | Log.e(TAG, "onSetFailure: $p0")
110 | }
111 |
112 | override fun onSetSuccess() {
113 | val offer = hashMapOf(
114 | "sdp" to desc?.description,
115 | "type" to desc?.type
116 | )
117 | db.collection("calls").document(meetingID)
118 | .set(offer)
119 | .addOnSuccessListener {
120 | Log.e(TAG, "DocumentSnapshot added")
121 | }
122 | .addOnFailureListener { e ->
123 | Log.e(TAG, "Error adding document", e)
124 | }
125 | Log.e(TAG, "onSetSuccess")
126 | }
127 |
128 | override fun onCreateSuccess(p0: SessionDescription?) {
129 | Log.e(TAG, "onCreateSuccess: Description $p0")
130 | }
131 |
132 | override fun onCreateFailure(p0: String?) {
133 | Log.e(TAG, "onCreateFailure: $p0")
134 | }
135 | }, desc)
136 | sdpObserver.onCreateSuccess(desc)
137 | }
138 |
139 | override fun onSetFailure(p0: String?) {
140 | Log.e(TAG, "onSetFailure: $p0")
141 | }
142 |
143 | override fun onCreateFailure(p0: String?) {
144 | Log.e(TAG, "onCreateFailure: $p0")
145 | }
146 | }, constraints)
147 | }
148 |
149 | private fun PeerConnection.answer(sdpObserver: SdpObserver, meetingID: String) {
150 | val constraints = MediaConstraints().apply {
151 | mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"))
152 | }
153 | createAnswer(object : SdpObserver by sdpObserver {
154 | override fun onCreateSuccess(desc: SessionDescription?) {
155 | val answer = hashMapOf(
156 | "sdp" to desc?.description,
157 | "type" to desc?.type
158 | )
159 | db.collection("calls").document(meetingID)
160 | .set(answer)
161 | .addOnSuccessListener {
162 | Log.e(TAG, "DocumentSnapshot added")
163 | }
164 | .addOnFailureListener { e ->
165 | Log.e(TAG, "Error adding document", e)
166 | }
167 | setLocalDescription(object : SdpObserver {
168 | override fun onSetFailure(p0: String?) {
169 | Log.e(TAG, "onSetFailure: $p0")
170 | }
171 |
172 | override fun onSetSuccess() {
173 | Log.e(TAG, "onSetSuccess")
174 | }
175 |
176 | override fun onCreateSuccess(p0: SessionDescription?) {
177 | Log.e(TAG, "onCreateSuccess: Description $p0")
178 | }
179 |
180 | override fun onCreateFailure(p0: String?) {
181 | Log.e(TAG, "onCreateFailureLocal: $p0")
182 | }
183 | }, desc)
184 | sdpObserver.onCreateSuccess(desc)
185 | }
186 |
187 | override fun onCreateFailure(p0: String?) {
188 | Log.e(TAG, "onCreateFailureRemote: $p0")
189 | }
190 | }, constraints)
191 | }
192 |
193 | fun call(sdpObserver: SdpObserver, meetingID: String) = peerConnection?.call(sdpObserver, meetingID)
194 |
195 | fun answer(sdpObserver: SdpObserver, meetingID: String) = peerConnection?.answer(sdpObserver, meetingID)
196 |
197 | fun onRemoteSessionReceived(sessionDescription: SessionDescription) {
198 | remoteSessionDescription = sessionDescription
199 | peerConnection?.setRemoteDescription(object : SdpObserver {
200 | override fun onSetFailure(p0: String?) {
201 | Log.e(TAG, "onSetFailure: $p0")
202 | }
203 |
204 | override fun onSetSuccess() {
205 | Log.e(TAG, "onSetSuccessRemoteSession")
206 | }
207 |
208 | override fun onCreateSuccess(p0: SessionDescription?) {
209 | Log.e(TAG, "onCreateSuccessRemoteSession: Description $p0")
210 | }
211 |
212 | override fun onCreateFailure(p0: String?) {
213 | Log.e(TAG, "onCreateFailure")
214 | }
215 | }, sessionDescription)
216 |
217 | }
218 |
219 | fun addIceCandidate(iceCandidate: IceCandidate?) {
220 | peerConnection?.addIceCandidate(iceCandidate)
221 | }
222 |
223 | fun endCall(meetingID: String) {
224 | db.collection("calls").document(meetingID).collection("candidates")
225 | .get().addOnSuccessListener {
226 | val iceCandidateArray: MutableList = mutableListOf()
227 | for ( dataSnapshot in it) {
228 | if (dataSnapshot.contains("type") && dataSnapshot["type"]=="offerCandidate") {
229 | val offerCandidate = dataSnapshot
230 | iceCandidateArray.add(IceCandidate(offerCandidate["sdpMid"].toString(), Math.toIntExact(offerCandidate["sdpMLineIndex"] as Long), offerCandidate["sdp"].toString()))
231 | } else if (dataSnapshot.contains("type") && dataSnapshot["type"]=="answerCandidate") {
232 | val answerCandidate = dataSnapshot
233 | iceCandidateArray.add(IceCandidate(answerCandidate["sdpMid"].toString(), Math.toIntExact(answerCandidate["sdpMLineIndex"] as Long), answerCandidate["sdp"].toString()))
234 | }
235 | }
236 | peerConnection?.removeIceCandidates(iceCandidateArray.toTypedArray())
237 | }
238 | val endCall = hashMapOf(
239 | "type" to "END_CALL"
240 | )
241 | db.collection("calls").document(meetingID)
242 | .set(endCall)
243 | .addOnSuccessListener {
244 | Log.e(TAG, "DocumentSnapshot added")
245 | }
246 | .addOnFailureListener { e ->
247 | Log.e(TAG, "Error adding document", e)
248 | }
249 |
250 | peerConnection?.close()
251 | }
252 |
253 | fun enableVideo(videoEnabled: Boolean) {
254 | if (localVideoTrack !=null)
255 | localVideoTrack?.setEnabled(videoEnabled)
256 | }
257 |
258 | fun enableAudio(audioEnabled: Boolean) {
259 | if (localAudioTrack != null)
260 | localAudioTrack?.setEnabled(audioEnabled)
261 | }
262 | fun switchCamera() {
263 | videoCapturer.switchCamera(null)
264 | }
265 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/SignalingClient.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import android.util.Log
4 | import com.google.firebase.firestore.ktx.firestore
5 | import com.google.firebase.ktx.Firebase
6 | import com.google.gson.Gson
7 | import io.ktor.util.*
8 | import kotlinx.coroutines.*
9 | import kotlinx.coroutines.channels.ConflatedBroadcastChannel
10 | import org.json.JSONObject
11 | import org.webrtc.IceCandidate
12 | import org.webrtc.SessionDescription
13 |
14 | @ExperimentalCoroutinesApi
15 | @KtorExperimentalAPI
16 | class SignalingClient(
17 | private val meetingID : String,
18 | private val listener: SignalingClientListener
19 | ) : CoroutineScope {
20 |
21 | companion object {
22 | private const val HOST_ADDRESS = "192.168.0.12"
23 | }
24 |
25 | var jsonObject : JSONObject?= null
26 |
27 | private val job = Job()
28 |
29 | val TAG = "SignallingClient"
30 |
31 | val db = Firebase.firestore
32 |
33 | private val gson = Gson()
34 |
35 | var SDPtype : String? = null
36 | override val coroutineContext = Dispatchers.IO + job
37 |
38 | // private val client = HttpClient(CIO) {
39 | // install(WebSockets)
40 | // install(JsonFeature) {
41 | // serializer = GsonSerializer()
42 | // }
43 | // }
44 |
45 | private val sendChannel = ConflatedBroadcastChannel()
46 |
47 | init {
48 | connect()
49 | }
50 |
51 | private fun connect() = launch {
52 | db.enableNetwork().addOnSuccessListener {
53 | listener.onConnectionEstablished()
54 | }
55 | val sendData = sendChannel.offer("")
56 | sendData.let {
57 | Log.v(this@SignalingClient.javaClass.simpleName, "Sending: $it")
58 | // val data = hashMapOf(
59 | // "data" to it
60 | // )
61 | // db.collection("calls")
62 | // .add(data)
63 | // .addOnSuccessListener { documentReference ->
64 | // Log.e(TAG, "DocumentSnapshot added with ID: ${documentReference.id}")
65 | // }
66 | // .addOnFailureListener { e ->
67 | // Log.e(TAG, "Error adding document", e)
68 | // }
69 | }
70 | try {
71 | db.collection("calls")
72 | .document(meetingID)
73 | .addSnapshotListener { snapshot, e ->
74 |
75 | if (e != null) {
76 | Log.w(TAG, "listen:error", e)
77 | return@addSnapshotListener
78 | }
79 |
80 | if (snapshot != null && snapshot.exists()) {
81 | val data = snapshot.data
82 | if (data?.containsKey("type")!! &&
83 | data.getValue("type").toString() == "OFFER") {
84 | listener.onOfferReceived(SessionDescription(
85 | SessionDescription.Type.OFFER,data["sdp"].toString()))
86 | SDPtype = "Offer"
87 | } else if (data?.containsKey("type") &&
88 | data.getValue("type").toString() == "ANSWER") {
89 | listener.onAnswerReceived(SessionDescription(
90 | SessionDescription.Type.ANSWER,data["sdp"].toString()))
91 | SDPtype = "Answer"
92 | } else if (!Constants.isIntiatedNow && data.containsKey("type") &&
93 | data.getValue("type").toString() == "END_CALL") {
94 | listener.onCallEnded()
95 | SDPtype = "End Call"
96 |
97 | }
98 | Log.d(TAG, "Current data: ${snapshot.data}")
99 | } else {
100 | Log.d(TAG, "Current data: null")
101 | }
102 | }
103 | db.collection("calls").document(meetingID)
104 | .collection("candidates").addSnapshotListener{ querysnapshot,e->
105 | if (e != null) {
106 | Log.w(TAG, "listen:error", e)
107 | return@addSnapshotListener
108 | }
109 |
110 | if (querysnapshot != null && !querysnapshot.isEmpty) {
111 | for (dataSnapShot in querysnapshot) {
112 |
113 | val data = dataSnapShot.data
114 | if (SDPtype == "Offer" && data.containsKey("type") && data.get("type")=="offerCandidate") {
115 | listener.onIceCandidateReceived(
116 | IceCandidate(data["sdpMid"].toString(),
117 | Math.toIntExact(data["sdpMLineIndex"] as Long),
118 | data["sdpCandidate"].toString()))
119 | } else if (SDPtype == "Answer" && data.containsKey("type") && data.get("type")=="answerCandidate") {
120 | listener.onIceCandidateReceived(
121 | IceCandidate(data["sdpMid"].toString(),
122 | Math.toIntExact(data["sdpMLineIndex"] as Long),
123 | data["sdpCandidate"].toString()))
124 | }
125 | Log.e(TAG, "candidateQuery: $dataSnapShot" )
126 | }
127 | }
128 | }
129 | // db.collection("calls").document(meetingID)
130 | // .get()
131 | // .addOnSuccessListener { result ->
132 | // val data = result.data
133 | // if (data?.containsKey("type")!! && data.getValue("type").toString() == "OFFER") {
134 | // Log.e(TAG, "connect: OFFER - $data")
135 | // listener.onOfferReceived(SessionDescription(SessionDescription.Type.OFFER,data["sdp"].toString()))
136 | // } else if (data?.containsKey("type") && data.getValue("type").toString() == "ANSWER") {
137 | // Log.e(TAG, "connect: ANSWER - $data")
138 | // listener.onAnswerReceived(SessionDescription(SessionDescription.Type.ANSWER,data["sdp"].toString()))
139 | // }
140 | // }
141 | // .addOnFailureListener {
142 | // Log.e(TAG, "connect: $it")
143 | // }
144 |
145 | } catch (exception: Exception) {
146 | Log.e(TAG, "connectException: $exception")
147 |
148 | }
149 | }
150 |
151 | fun sendIceCandidate(candidate: IceCandidate?,isJoin : Boolean) = runBlocking {
152 | val type = when {
153 | isJoin -> "answerCandidate"
154 | else -> "offerCandidate"
155 | }
156 | val candidateConstant = hashMapOf(
157 | "serverUrl" to candidate?.serverUrl,
158 | "sdpMid" to candidate?.sdpMid,
159 | "sdpMLineIndex" to candidate?.sdpMLineIndex,
160 | "sdpCandidate" to candidate?.sdp,
161 | "type" to type
162 | )
163 | db.collection("calls")
164 | .document("$meetingID").collection("candidates").document(type)
165 | .set(candidateConstant as Map)
166 | .addOnSuccessListener {
167 | Log.e(TAG, "sendIceCandidate: Success" )
168 | }
169 | .addOnFailureListener {
170 | Log.e(TAG, "sendIceCandidate: Error $it" )
171 | }
172 | }
173 |
174 | fun destroy() {
175 | // client.close()
176 | job.complete()
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/app/src/main/java/com/developerspace/webrtcsample/SignalingClientListener.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import org.webrtc.IceCandidate
4 | import org.webrtc.SessionDescription
5 |
6 | interface SignalingClientListener {
7 | fun onConnectionEstablished()
8 | fun onOfferReceived(description: SessionDescription)
9 | fun onAnswerReceived(description: SessionDescription)
10 | fun onIceCandidateReceived(iceCandidate: IceCandidate)
11 | fun onCallEnded()
12 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/circle_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | -
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/curve_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | -
7 |
8 |
9 |
10 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_call_end_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_cameraswitch_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_hearing_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_mic_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_mic_off_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_speaker_up_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_videocam_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_baseline_videocam_off_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/webrtc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/drawable/webrtc.png
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
14 |
15 |
22 |
32 |
42 |
53 |
64 |
75 |
86 |
97 |
98 |
99 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_start.xml:
--------------------------------------------------------------------------------
1 |
2 |
10 |
17 |
23 |
27 |
36 |
44 |
49 |
61 |
73 |
74 |
75 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values-night/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 | #558CB8
11 |
--------------------------------------------------------------------------------
/app/src/main/res/values/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #F8FFF4
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | WebRTC Sample
3 | speakerphone_preference
4 | auto
5 |
--------------------------------------------------------------------------------
/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
17 |
--------------------------------------------------------------------------------
/app/src/test/java/com/developerspace/webrtcsample/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package com.developerspace.webrtcsample
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | ext.kotlin_version = "1.4.31"
4 | ext.ktor_version = '1.1.4'
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 | dependencies {
10 | classpath "com.android.tools.build:gradle:4.1.2"
11 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
12 | classpath 'com.google.gms:google-services:4.3.5'
13 | // NOTE: Do not place your application dependencies here; they belong
14 | // in the individual module build.gradle files
15 | }
16 | }
17 |
18 | allprojects {
19 | repositories {
20 | google()
21 | jcenter()
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | # Kotlin code style for this project: "official" or "obsolete":
21 | kotlin.code.style=official
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Fri Apr 16 01:59:56 IST 2021
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/images/offer-image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/images/offer-image.png
--------------------------------------------------------------------------------
/images/offercandidate-sample.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/images/offercandidate-sample.PNG
--------------------------------------------------------------------------------
/images/sample-A.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/images/sample-A.png
--------------------------------------------------------------------------------
/images/screenshot-A.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developerspace-samples/WebRTC-Kotlin-Sample/e53667dd1728c8bfd389f9133eda6affa7add57d/images/screenshot-A.PNG
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 | rootProject.name = "WebRTC Sample"
--------------------------------------------------------------------------------