├── app
├── .gitignore
├── src
│ ├── main
│ │ ├── res
│ │ │ ├── values
│ │ │ │ ├── strings.xml
│ │ │ │ ├── colors.xml
│ │ │ │ └── themes.xml
│ │ │ ├── drawable
│ │ │ │ ├── firebase.png
│ │ │ │ ├── ic_record.xml
│ │ │ │ ├── ic_camera_on.xml
│ │ │ │ ├── ic_video_call.xml
│ │ │ │ ├── ic_camera_off.xml
│ │ │ │ ├── ic_mic_on.xml
│ │ │ │ ├── ic_speaker.xml
│ │ │ │ ├── ic_screen_share.xml
│ │ │ │ ├── ic_switch_camera.xml
│ │ │ │ ├── ic_mic_off.xml
│ │ │ │ ├── ic_stop_screen_share.xml
│ │ │ │ ├── ic_call.xml
│ │ │ │ ├── ic_end_call.xml
│ │ │ │ ├── ic_ear.xml
│ │ │ │ ├── ic_launcher_foreground.xml
│ │ │ │ └── ic_launcher_background.xml
│ │ │ ├── mipmap-hdpi
│ │ │ │ ├── ic_launcher.webp
│ │ │ │ └── ic_launcher_round.webp
│ │ │ ├── mipmap-mdpi
│ │ │ │ ├── ic_launcher.webp
│ │ │ │ └── ic_launcher_round.webp
│ │ │ ├── mipmap-xhdpi
│ │ │ │ ├── ic_launcher.webp
│ │ │ │ └── ic_launcher_round.webp
│ │ │ ├── mipmap-xxhdpi
│ │ │ │ ├── ic_launcher.webp
│ │ │ │ └── ic_launcher_round.webp
│ │ │ ├── mipmap-xxxhdpi
│ │ │ │ ├── ic_launcher.webp
│ │ │ │ └── ic_launcher_round.webp
│ │ │ ├── mipmap-anydpi-v26
│ │ │ │ ├── ic_launcher.xml
│ │ │ │ └── ic_launcher_round.xml
│ │ │ ├── mipmap-anydpi-v33
│ │ │ │ └── ic_launcher.xml
│ │ │ ├── xml
│ │ │ │ ├── backup_rules.xml
│ │ │ │ └── data_extraction_rules.xml
│ │ │ ├── values-night
│ │ │ │ └── themes.xml
│ │ │ ├── drawable-v24
│ │ │ │ └── ic_launcher_foreground.xml
│ │ │ └── layout
│ │ │ │ ├── activity_login.xml
│ │ │ │ ├── activity_main.xml
│ │ │ │ ├── item_main_recycler_view.xml
│ │ │ │ └── activity_call.xml
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── codewithkael
│ │ │ │ └── firebasevideocall
│ │ │ │ ├── utils
│ │ │ │ ├── UserStatus.kt
│ │ │ │ ├── MyApplication.kt
│ │ │ │ ├── FirebaseFieldNames.kt
│ │ │ │ ├── MyEventListener.kt
│ │ │ │ ├── DataModel.kt
│ │ │ │ ├── AppModule.kt
│ │ │ │ └── Extensions.kt
│ │ │ │ ├── service
│ │ │ │ ├── MainServiceActions.kt
│ │ │ │ ├── MainServiceReceiver.kt
│ │ │ │ ├── MainServiceRepository.kt
│ │ │ │ └── MainService.kt
│ │ │ │ ├── ui
│ │ │ │ ├── CloseActivity.kt
│ │ │ │ ├── LoginActivity.kt
│ │ │ │ ├── MainActivity.kt
│ │ │ │ └── CallActivity.kt
│ │ │ │ ├── webrtc
│ │ │ │ ├── MySdpObserver.kt
│ │ │ │ ├── MyPeerObserver.kt
│ │ │ │ ├── ProximitySensor.java
│ │ │ │ ├── WebRTCClient.kt
│ │ │ │ ├── BluetoothManager.java
│ │ │ │ └── RTCAudioManager.java
│ │ │ │ ├── adapters
│ │ │ │ └── MainRecyclerViewAdapter.kt
│ │ │ │ ├── firebaseClient
│ │ │ │ └── FirebaseClient.kt
│ │ │ │ └── repository
│ │ │ │ └── MainRepository.kt
│ │ └── AndroidManifest.xml
│ ├── test
│ │ └── java
│ │ │ └── com
│ │ │ └── codewithkael
│ │ │ └── firebasevideocall
│ │ │ └── ExampleUnitTest.kt
│ └── androidTest
│ │ └── java
│ │ └── com
│ │ └── codewithkael
│ │ └── firebasevideocall
│ │ └── ExampleInstrumentedTest.kt
├── proguard-rules.pro
├── google-services.json
└── build.gradle
├── .idea
├── .gitignore
├── compiler.xml
├── vcs.xml
├── misc.xml
├── gradle.xml
└── deploymentTargetDropDown.xml
├── .gitattributes
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── README.md
├── .gitignore
├── settings.gradle
├── gradle.properties
├── gradlew.bat
└── gradlew
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FirebaseVideoCall
3 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/app/src/main/res/drawable/firebase.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/drawable/firebase.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-hdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-mdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithkael/FirebaseWebRTCVideoCall/HEAD/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/UserStatus.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | enum class UserStatus {
4 | ONLINE,OFFLINE,IN_CALL
5 | }
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FirebaseWebRTCVideoCall
2 | An Implementation of webrtc video call with firebase Signaling server follow my Youtube Channel to see the full course about this source.
3 |
4 | my channel name is CodeWithKael
5 | www.youtube.com/@CodeWithKael
6 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/MyApplication.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | import android.app.Application
4 | import dagger.hilt.android.HiltAndroidApp
5 |
6 | @HiltAndroidApp
7 | class MyApplication : Application()
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/FirebaseFieldNames.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | object FirebaseFieldNames {
4 | const val STATUS = "status"
5 | const val PASSWORD = "password"
6 | const val LATEST_EVENT = "latest_event"
7 | }
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Aug 02 12:10:21 IRDT 2023
2 | distributionBase=GRADLE_USER_HOME
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-bin.zip
4 | distributionPath=wrapper/dists
5 | zipStorePath=wrapper/dists
6 | zipStoreBase=GRADLE_USER_HOME
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 | .cxx
15 | local.properties
16 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/service/MainServiceActions.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.service
2 |
3 | enum class MainServiceActions {
4 | START_SERVICE,SETUP_VIEWS,END_CALL,SWITCH_CAMERA,TOGGLE_AUDIO,TOGGLE_VIDEO,TOGGLE_AUDIO_DEVICE,
5 | TOGGLE_SCREEN_SHARE,STOP_SERVICE
6 | }
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_record.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v33/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/ui/CloseActivity.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.ui
2 |
3 | import android.os.Bundle
4 | import androidx.appcompat.app.AppCompatActivity
5 |
6 | class CloseActivity : AppCompatActivity() {
7 | override fun onCreate(savedInstanceState: Bundle?) {
8 | super.onCreate(savedInstanceState)
9 | finishAffinity()
10 | }
11 | }
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | pluginManagement {
2 | repositories {
3 | google()
4 | mavenCentral()
5 | gradlePluginPortal()
6 | }
7 | }
8 | dependencyResolutionManagement {
9 | repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
10 | repositories {
11 | google()
12 | mavenCentral()
13 | }
14 | }
15 | rootProject.name = "FirebaseVideoCall"
16 | include ':app'
17 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_camera_on.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_video_call.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/test/java/com/codewithkael/firebasevideocall/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/MyEventListener.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | import com.google.firebase.database.DataSnapshot
4 | import com.google.firebase.database.DatabaseError
5 | import com.google.firebase.database.ValueEventListener
6 |
7 | open class MyEventListener : ValueEventListener {
8 | override fun onDataChange(snapshot: DataSnapshot) {
9 |
10 | }
11 |
12 | override fun onCancelled(error: DatabaseError) {
13 | }
14 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_camera_off.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/MySdpObserver.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.webrtc
2 |
3 | import org.webrtc.SdpObserver
4 | import org.webrtc.SessionDescription
5 |
6 | open class MySdpObserver : SdpObserver {
7 | override fun onCreateSuccess(desc: SessionDescription?) {
8 |
9 | }
10 |
11 | override fun onSetSuccess() {
12 | }
13 |
14 | override fun onCreateFailure(p0: String?) {
15 | }
16 |
17 | override fun onSetFailure(p0: String?) {
18 | }
19 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_mic_on.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_speaker.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/xml/backup_rules.xml:
--------------------------------------------------------------------------------
1 |
8 |
9 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_screen_share.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/DataModel.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | enum class DataModelType {
4 | StartAudioCall,StartVideoCall,Offer,Answer,IceCandidates,EndCall
5 | }
6 | data class DataModel(
7 | val sender:String?=null,
8 | val target:String,
9 | val type:DataModelType,
10 | val data:String?=null,
11 | val timeStamp:Long = System.currentTimeMillis()
12 | )
13 |
14 |
15 | fun DataModel.isValid(): Boolean {
16 | return System.currentTimeMillis() - this.timeStamp < 60000
17 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_switch_camera.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/xml/data_extraction_rules.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
12 |
13 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 | #57FFFFFF
11 | #57000000
12 | #3AAE09
13 | #EAE78F
14 | #E82039
15 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_mic_off.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_stop_screen_share.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_call.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_end_call.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/codewithkael/firebasevideocall/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall
2 |
3 | import androidx.test.platform.app.InstrumentationRegistry
4 | import androidx.test.ext.junit.runners.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getInstrumentation().targetContext
22 | assertEquals("com.codewithkael.firebasevideocall", appContext.packageName)
23 | }
24 | }
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
19 |
20 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/service/MainServiceReceiver.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.service
2 |
3 | import android.content.BroadcastReceiver
4 | import android.content.Context
5 | import android.content.Intent
6 | import com.codewithkael.firebasevideocall.ui.CloseActivity
7 | import dagger.hilt.android.AndroidEntryPoint
8 | import javax.inject.Inject
9 |
10 | @AndroidEntryPoint
11 | class MainServiceReceiver : BroadcastReceiver() {
12 |
13 | @Inject lateinit var serviceRepository: MainServiceRepository
14 | override fun onReceive(context: Context?, intent: Intent?) {
15 | if (intent?.action == "ACTION_EXIT"){
16 | //we want to exit the whole application
17 | serviceRepository.stopService()
18 | context?.startActivity(Intent(context,CloseActivity::class.java))
19 |
20 | }
21 |
22 | }
23 | }
--------------------------------------------------------------------------------
/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/values-night/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_ear.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/AppModule.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | import android.content.Context
4 | import com.google.firebase.database.DatabaseReference
5 | import com.google.firebase.database.FirebaseDatabase
6 | import com.google.gson.Gson
7 | import dagger.Module
8 | import dagger.Provides
9 | import dagger.hilt.InstallIn
10 | import dagger.hilt.android.qualifiers.ApplicationContext
11 | import dagger.hilt.components.SingletonComponent
12 |
13 | @Module
14 | @InstallIn(SingletonComponent::class)
15 | class AppModule {
16 |
17 | @Provides
18 | fun provideContext(@ApplicationContext context:Context) : Context = context.applicationContext
19 |
20 | @Provides
21 | fun provideGson():Gson = Gson()
22 |
23 | @Provides
24 | fun provideDataBaseInstance():FirebaseDatabase = FirebaseDatabase.getInstance()
25 |
26 | @Provides
27 | fun provideDatabaseReference(db:FirebaseDatabase): DatabaseReference = db.reference
28 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/utils/Extensions.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.utils
2 |
3 | import android.widget.Toast
4 | import androidx.appcompat.app.AppCompatActivity
5 | import com.permissionx.guolindev.PermissionX
6 |
7 | fun AppCompatActivity.getCameraAndMicPermission(success:()->Unit){
8 | PermissionX.init(this)
9 | .permissions(android.Manifest.permission.CAMERA,android.Manifest.permission.RECORD_AUDIO)
10 | .request{allGranted,_,_ ->
11 |
12 | if (allGranted){
13 | success()
14 | } else{
15 | Toast.makeText(this, "camera and mic permission is required", Toast.LENGTH_SHORT)
16 | .show()
17 | }
18 | }
19 | }
20 |
21 | fun Int.convertToHumanTime() : String{
22 | val seconds = this%60
23 | val minutes = this/60
24 | val secondsString = if (seconds<10) "0$seconds" else "$seconds"
25 | val minutesString = if (minutes < 10) "0$minutes" else "$minutes"
26 | return "$minutesString:$secondsString"
27 | }
--------------------------------------------------------------------------------
/.idea/deploymentTargetDropDown.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/MyPeerObserver.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.webrtc
2 |
3 | import org.webrtc.*
4 |
5 | open class MyPeerObserver : PeerConnection.Observer {
6 | override fun onSignalingChange(p0: PeerConnection.SignalingState?) {
7 |
8 | }
9 |
10 | override fun onIceConnectionChange(p0: PeerConnection.IceConnectionState?) {
11 | }
12 |
13 | override fun onIceConnectionReceivingChange(p0: Boolean) {
14 | }
15 |
16 | override fun onIceGatheringChange(p0: PeerConnection.IceGatheringState?) {
17 | }
18 |
19 | override fun onIceCandidate(p0: IceCandidate?) {
20 | }
21 |
22 | override fun onIceCandidatesRemoved(p0: Array?) {
23 | }
24 |
25 | override fun onAddStream(p0: MediaStream?) {
26 | }
27 |
28 | override fun onRemoveStream(p0: MediaStream?) {
29 | }
30 |
31 | override fun onDataChannel(p0: DataChannel?) {
32 | }
33 |
34 | override fun onRenegotiationNeeded() {
35 | }
36 |
37 | override fun onAddTrack(p0: RtpReceiver?, p1: Array?) {
38 | }
39 | }
--------------------------------------------------------------------------------
/app/google-services.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_info": {
3 | "project_number": "12957448847",
4 | "firebase_url": "https://fir-videocall-d1cf8-default-rtdb.europe-west1.firebasedatabase.app",
5 | "project_id": "fir-videocall-d1cf8",
6 | "storage_bucket": "fir-videocall-d1cf8.appspot.com"
7 | },
8 | "client": [
9 | {
10 | "client_info": {
11 | "mobilesdk_app_id": "1:12957448847:android:abea1125f53b8d133b0efa",
12 | "android_client_info": {
13 | "package_name": "com.codewithkael.firebasevideocall"
14 | }
15 | },
16 | "oauth_client": [
17 | {
18 | "client_id": "12957448847-ao0slpr11b5pcbb589q9jsujt13otk2r.apps.googleusercontent.com",
19 | "client_type": 3
20 | }
21 | ],
22 | "api_key": [
23 | {
24 | "current_key": "AIzaSyCcxKqsrfEZfUJuPu6yMpLiHgYmpm68xHE"
25 | }
26 | ],
27 | "services": {
28 | "appinvite_service": {
29 | "other_platform_oauth_client": [
30 | {
31 | "client_id": "12957448847-ao0slpr11b5pcbb589q9jsujt13otk2r.apps.googleusercontent.com",
32 | "client_type": 3
33 | }
34 | ]
35 | }
36 | }
37 | }
38 | ],
39 | "configuration_version": "1"
40 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Kotlin code style for this project: "official" or "obsolete":
19 | kotlin.code.style=official
20 | # Enables namespacing of each library's R class so that its R class includes only the
21 | # resources declared in the library itself and none from the library's dependencies,
22 | # thereby reducing the size of the R class for that library
23 | android.nonTransitiveRClass=true
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/ui/LoginActivity.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.ui
2 |
3 | import android.content.Intent
4 | import androidx.appcompat.app.AppCompatActivity
5 | import android.os.Bundle
6 | import android.widget.Toast
7 | import com.codewithkael.firebasevideocall.databinding.ActivityLoginBinding
8 | import com.codewithkael.firebasevideocall.repository.MainRepository
9 | import dagger.hilt.android.AndroidEntryPoint
10 | import javax.inject.Inject
11 |
12 |
13 | @AndroidEntryPoint
14 | class LoginActivity : AppCompatActivity() {
15 |
16 | private lateinit var views:ActivityLoginBinding
17 | @Inject lateinit var mainRepository: MainRepository
18 |
19 | override fun onCreate(savedInstanceState: Bundle?) {
20 | super.onCreate(savedInstanceState)
21 | views = ActivityLoginBinding.inflate(layoutInflater)
22 | setContentView(views.root)
23 | init()
24 | }
25 |
26 |
27 | private fun init(){
28 | views.apply {
29 | btn.setOnClickListener {
30 | mainRepository.login(
31 | usernameEt.text.toString(),passwordEt.text.toString()
32 | ){ isDone, reason ->
33 | if (!isDone){
34 | Toast.makeText(this@LoginActivity, reason, Toast.LENGTH_SHORT).show()
35 | }else{
36 | //start moving to our main activity
37 | startActivity(Intent(this@LoginActivity, MainActivity::class.java).apply {
38 | putExtra("username",usernameEt.text.toString())
39 | })
40 | }
41 | }
42 | }
43 | }
44 | }
45 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
21 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
32 |
34 |
36 |
37 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | id 'org.jetbrains.kotlin.android'
4 | id 'com.google.gms.google-services'
5 | id 'kotlin-kapt'
6 | id 'com.google.dagger.hilt.android'
7 | }
8 |
9 | android {
10 | namespace 'com.codewithkael.firebasevideocall'
11 | compileSdk 33
12 |
13 | defaultConfig {
14 | applicationId "com.codewithkael.firebasevideocall"
15 | minSdk 24
16 | targetSdk 33
17 | versionCode 1
18 | versionName "1.0"
19 |
20 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
21 | }
22 |
23 | buildTypes {
24 | release {
25 | minifyEnabled false
26 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
27 | }
28 | }
29 | compileOptions {
30 | sourceCompatibility JavaVersion.VERSION_1_8
31 | targetCompatibility JavaVersion.VERSION_1_8
32 | }
33 | kotlinOptions {
34 | jvmTarget = '1.8'
35 | }
36 | viewBinding{
37 | enabled=true
38 | }
39 | }
40 |
41 | dependencies {
42 |
43 | implementation 'androidx.core:core-ktx:1.7.0'
44 | implementation 'androidx.appcompat:appcompat:1.6.1'
45 | implementation 'com.google.android.material:material:1.9.0'
46 | implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
47 | implementation platform('com.google.firebase:firebase-bom:32.2.0')
48 | implementation 'com.google.firebase:firebase-database-ktx:20.2.2'
49 | implementation 'com.google.code.gson:gson:2.10.1'
50 | implementation 'com.mesibo.api:webrtc:1.0.5'
51 | implementation "com.google.dagger:hilt-android:2.44"
52 | kapt "com.google.dagger:hilt-compiler:2.44"
53 | implementation 'com.guolindev.permissionx:permissionx:1.6.1'
54 |
55 | testImplementation 'junit:junit:4.13.2'
56 | androidTestImplementation 'androidx.test.ext:junit:1.1.5'
57 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
58 | }
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_login.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
16 |
25 |
34 |
42 |
43 |
53 |
54 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
16 |
28 |
38 |
48 |
49 |
50 |
58 |
59 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/service/MainServiceRepository.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.service
2 |
3 | import android.content.Context
4 | import android.content.Intent
5 | import android.os.Build
6 | import javax.inject.Inject
7 |
8 | class MainServiceRepository @Inject constructor(
9 | private val context: Context
10 | ) {
11 |
12 | fun startService(username:String){
13 | Thread{
14 | val intent = Intent(context, MainService::class.java)
15 | intent.putExtra("username",username)
16 | intent.action = MainServiceActions.START_SERVICE.name
17 | startServiceIntent(intent)
18 | }.start()
19 | }
20 |
21 | private fun startServiceIntent(intent: Intent){
22 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O){
23 | context.startForegroundService(intent)
24 | }else{
25 | context.startService(intent)
26 | }
27 | }
28 |
29 | fun setupViews(videoCall: Boolean, caller: Boolean, target: String) {
30 | val intent = Intent(context,MainService::class.java)
31 | intent.apply {
32 | action = MainServiceActions.SETUP_VIEWS.name
33 | putExtra("isVideoCall",videoCall)
34 | putExtra("target",target)
35 | putExtra("isCaller",caller)
36 | }
37 | startServiceIntent(intent)
38 | }
39 |
40 | fun sendEndCall() {
41 | val intent = Intent(context,MainService::class.java)
42 | intent.action = MainServiceActions.END_CALL.name
43 | startServiceIntent(intent)
44 | }
45 |
46 | fun switchCamera() {
47 | val intent = Intent(context,MainService::class.java)
48 | intent.action = MainServiceActions.SWITCH_CAMERA.name
49 | startServiceIntent(intent)
50 | }
51 |
52 | fun toggleAudio(shouldBeMuted: Boolean) {
53 | val intent = Intent(context, MainService::class.java)
54 | intent.action = MainServiceActions.TOGGLE_AUDIO.name
55 | intent.putExtra("shouldBeMuted",shouldBeMuted)
56 | startServiceIntent(intent)
57 | }
58 |
59 | fun toggleVideo(shouldBeMuted: Boolean) {
60 | val intent = Intent(context, MainService::class.java)
61 | intent.action = MainServiceActions.TOGGLE_VIDEO.name
62 | intent.putExtra("shouldBeMuted",shouldBeMuted)
63 | startServiceIntent(intent)
64 | }
65 |
66 | fun toggleAudioDevice(type: String) {
67 | val intent = Intent(context, MainService::class.java)
68 | intent.action = MainServiceActions.TOGGLE_AUDIO_DEVICE.name
69 | intent.putExtra("type",type)
70 | startServiceIntent(intent)
71 | }
72 |
73 | fun toggleScreenShare(isStarting: Boolean) {
74 | val intent = Intent(context,MainService::class.java)
75 | intent.action = MainServiceActions.TOGGLE_SCREEN_SHARE.name
76 | intent.putExtra("isStarting",isStarting)
77 | startServiceIntent(intent)
78 | }
79 |
80 | fun stopService() {
81 | val intent = Intent(context,MainService::class.java)
82 | intent.action = MainServiceActions.STOP_SERVICE.name
83 | startServiceIntent(intent)
84 | }
85 |
86 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/adapters/MainRecyclerViewAdapter.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.adapters
2 |
3 | import android.view.LayoutInflater
4 | import android.view.ViewGroup
5 | import androidx.core.view.isVisible
6 | import androidx.recyclerview.widget.RecyclerView
7 | import com.codewithkael.firebasevideocall.R
8 | import com.codewithkael.firebasevideocall.databinding.ItemMainRecyclerViewBinding
9 | import com.codewithkael.firebasevideocall.utils.UserStatus
10 |
11 | class MainRecyclerViewAdapter(private val listener:Listener) : RecyclerView.Adapter() {
12 |
13 | private var usersList:List>?=null
14 | fun updateList(list:List>){
15 | this.usersList = list
16 | notifyDataSetChanged()
17 | }
18 |
19 | override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): MainRecyclerViewHolder {
20 | val binding = ItemMainRecyclerViewBinding.inflate(
21 | LayoutInflater.from(parent.context),parent,false
22 | )
23 | return MainRecyclerViewHolder(binding)
24 | }
25 |
26 | override fun getItemCount(): Int {
27 | return usersList?.size?:0
28 | }
29 |
30 | override fun onBindViewHolder(holder: MainRecyclerViewHolder, position: Int) {
31 | usersList?.let { list->
32 | val user = list[position]
33 | holder.bind(user,{
34 | listener.onVideoCallClicked(it)
35 | },{
36 | listener.onAudioCallClicked(it)
37 | })
38 | }
39 | }
40 |
41 | interface Listener {
42 | fun onVideoCallClicked(username:String)
43 | fun onAudioCallClicked(username:String)
44 | }
45 |
46 |
47 |
48 | class MainRecyclerViewHolder(private val binding: ItemMainRecyclerViewBinding):
49 | RecyclerView.ViewHolder(binding.root){
50 | private val context = binding.root.context
51 |
52 | fun bind(
53 | user:Pair,
54 | videoCallClicked:(String) -> Unit,
55 | audioCallClicked:(String)-> Unit
56 | ){
57 | binding.apply {
58 | when (user.second) {
59 | "ONLINE" -> {
60 | videoCallBtn.isVisible = true
61 | audioCallBtn.isVisible = true
62 | videoCallBtn.setOnClickListener {
63 | videoCallClicked.invoke(user.first)
64 | }
65 | audioCallBtn.setOnClickListener {
66 | audioCallClicked.invoke(user.first)
67 | }
68 | statusTv.setTextColor(context.resources.getColor(R.color.light_green, null))
69 | statusTv.text = "Online"
70 | }
71 | "OFFLINE" -> {
72 | videoCallBtn.isVisible = false
73 | audioCallBtn.isVisible = false
74 | statusTv.setTextColor(context.resources.getColor(R.color.red, null))
75 | statusTv.text = "Offline"
76 | }
77 | "IN_CALL" -> {
78 | videoCallBtn.isVisible = false
79 | audioCallBtn.isVisible = false
80 | statusTv.setTextColor(context.resources.getColor(R.color.yellow, null))
81 | statusTv.text = "In Call"
82 | }
83 | }
84 |
85 | usernameTv.text = user.first
86 | }
87 |
88 |
89 |
90 | }
91 |
92 |
93 |
94 | }
95 | }
--------------------------------------------------------------------------------
/app/src/main/res/layout/item_main_recycler_view.xml:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 |
15 |
16 |
27 |
28 |
41 |
42 |
43 |
44 |
55 |
56 |
67 |
68 |
80 |
81 |
82 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_call.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
11 |
12 |
20 |
28 |
39 |
40 |
51 |
52 |
53 |
54 |
62 |
68 |
74 |
80 |
86 |
87 |
93 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/firebaseClient/FirebaseClient.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.firebaseClient
2 |
3 | import com.codewithkael.firebasevideocall.utils.DataModel
4 | import com.codewithkael.firebasevideocall.utils.FirebaseFieldNames.LATEST_EVENT
5 | import com.codewithkael.firebasevideocall.utils.FirebaseFieldNames.PASSWORD
6 | import com.codewithkael.firebasevideocall.utils.FirebaseFieldNames.STATUS
7 | import com.codewithkael.firebasevideocall.utils.MyEventListener
8 | import com.codewithkael.firebasevideocall.utils.UserStatus
9 | import com.google.firebase.database.DataSnapshot
10 | import com.google.firebase.database.DatabaseReference
11 | import com.google.gson.Gson
12 | import javax.inject.Inject
13 | import javax.inject.Singleton
14 |
15 | @Singleton
16 | class FirebaseClient @Inject constructor(
17 | private val dbRef:DatabaseReference,
18 | private val gson:Gson
19 | ) {
20 |
21 | private var currentUsername:String?=null
22 | private fun setUsername(username: String){
23 | this.currentUsername = username
24 | }
25 |
26 |
27 | fun login(username: String, password: String, done: (Boolean, String?) -> Unit) {
28 | dbRef.addListenerForSingleValueEvent(object : MyEventListener(){
29 | override fun onDataChange(snapshot: DataSnapshot) {
30 | //if the current user exists
31 | if (snapshot.hasChild(username)){
32 | //user exists , its time to check the password
33 | val dbPassword = snapshot.child(username).child(PASSWORD).value
34 | if (password == dbPassword) {
35 | //password is correct and sign in
36 | dbRef.child(username).child(STATUS).setValue(UserStatus.ONLINE)
37 | .addOnCompleteListener {
38 | setUsername(username)
39 | done(true,null)
40 | }.addOnFailureListener {
41 | done(false,"${it.message}")
42 | }
43 | }else{
44 | //password is wrong, notify user
45 | done(false,"Password is wrong")
46 | }
47 |
48 | }else{
49 | //user doesnt exist, register the user
50 | dbRef.child(username).child(PASSWORD).setValue(password).addOnCompleteListener {
51 | dbRef.child(username).child(STATUS).setValue(UserStatus.ONLINE)
52 | .addOnCompleteListener {
53 | setUsername(username)
54 | done(true,null)
55 | }.addOnFailureListener {
56 | done(false,it.message)
57 | }
58 | }.addOnFailureListener {
59 | done(false,it.message)
60 | }
61 |
62 | }
63 | }
64 | })
65 | }
66 |
67 | fun observeUsersStatus(status: (List>) -> Unit) {
68 | dbRef.addValueEventListener(object : MyEventListener() {
69 | override fun onDataChange(snapshot: DataSnapshot) {
70 | val list = snapshot.children.filter { it.key !=currentUsername }.map {
71 | it.key!! to it.child(STATUS).value.toString()
72 | }
73 | status(list)
74 | }
75 | })
76 | }
77 |
78 | fun subscribeForLatestEvent(listener:Listener){
79 | try {
80 | dbRef.child(currentUsername!!).child(LATEST_EVENT).addValueEventListener(
81 | object : MyEventListener() {
82 | override fun onDataChange(snapshot: DataSnapshot) {
83 | super.onDataChange(snapshot)
84 | val event = try {
85 | gson.fromJson(snapshot.value.toString(),DataModel::class.java)
86 | }catch (e:Exception){
87 | e.printStackTrace()
88 | null
89 | }
90 | event?.let {
91 | listener.onLatestEventReceived(it)
92 | }
93 | }
94 | }
95 | )
96 | }catch (e:Exception){
97 | e.printStackTrace()
98 | }
99 | }
100 |
101 | fun sendMessageToOtherClient(message:DataModel, success:(Boolean) -> Unit){
102 | val convertedMessage = gson.toJson(message.copy(sender = currentUsername))
103 | dbRef.child(message.target).child(LATEST_EVENT).setValue(convertedMessage)
104 | .addOnCompleteListener {
105 | success(true)
106 | }.addOnFailureListener {
107 | success(false)
108 | }
109 | }
110 |
111 | fun changeMyStatus(status: UserStatus) {
112 | dbRef.child(currentUsername!!).child(STATUS).setValue(status.name)
113 | }
114 |
115 | fun clearLatestEvent() {
116 | dbRef.child(currentUsername!!).child(LATEST_EVENT).setValue(null)
117 | }
118 |
119 | fun logOff(function:()->Unit) {
120 | dbRef.child(currentUsername!!).child(STATUS).setValue(UserStatus.OFFLINE)
121 | .addOnCompleteListener { function() }
122 | }
123 |
124 |
125 | interface Listener {
126 | fun onLatestEventReceived(event:DataModel)
127 | }
128 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/ui/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.ui
2 |
3 | import android.content.Intent
4 | import android.os.Bundle
5 | import android.util.Log
6 | import android.widget.Toast
7 | import androidx.appcompat.app.AppCompatActivity
8 | import androidx.core.view.isVisible
9 | import androidx.recyclerview.widget.LinearLayoutManager
10 | import com.codewithkael.firebasevideocall.adapters.MainRecyclerViewAdapter
11 | import com.codewithkael.firebasevideocall.databinding.ActivityMainBinding
12 | import com.codewithkael.firebasevideocall.repository.MainRepository
13 | import com.codewithkael.firebasevideocall.service.MainService
14 | import com.codewithkael.firebasevideocall.service.MainServiceRepository
15 | import com.codewithkael.firebasevideocall.utils.DataModel
16 | import com.codewithkael.firebasevideocall.utils.DataModelType
17 | import com.codewithkael.firebasevideocall.utils.getCameraAndMicPermission
18 | import dagger.hilt.android.AndroidEntryPoint
19 | import javax.inject.Inject
20 |
21 | @AndroidEntryPoint
22 | class MainActivity : AppCompatActivity(), MainRecyclerViewAdapter.Listener, MainService.Listener {
23 | private val TAG = "MainActivity"
24 |
25 | private lateinit var views: ActivityMainBinding
26 | private var username: String? = null
27 |
28 | @Inject
29 | lateinit var mainRepository: MainRepository
30 | @Inject
31 | lateinit var mainServiceRepository: MainServiceRepository
32 | private var mainAdapter: MainRecyclerViewAdapter? = null
33 |
34 | override fun onCreate(savedInstanceState: Bundle?) {
35 | super.onCreate(savedInstanceState)
36 | views = ActivityMainBinding.inflate(layoutInflater)
37 | setContentView(views.root)
38 | init()
39 | }
40 |
41 | private fun init() {
42 | username = intent.getStringExtra("username")
43 | if (username == null) finish()
44 | //1. observe other users status
45 | subscribeObservers()
46 | //2. start foreground service to listen negotiations and calls.
47 | startMyService()
48 | }
49 |
50 | private fun subscribeObservers() {
51 | setupRecyclerView()
52 | MainService.listener = this
53 | mainRepository.observeUsersStatus {
54 | Log.d(TAG, "subscribeObservers: $it")
55 | mainAdapter?.updateList(it)
56 | }
57 | }
58 |
59 | private fun setupRecyclerView() {
60 | mainAdapter = MainRecyclerViewAdapter(this)
61 | val layoutManager = LinearLayoutManager(this)
62 | views.mainRecyclerView.apply {
63 | setLayoutManager(layoutManager)
64 | adapter = mainAdapter
65 | }
66 | }
67 |
68 | private fun startMyService() {
69 | mainServiceRepository.startService(username!!)
70 | }
71 |
72 | override fun onVideoCallClicked(username: String) {
73 | //check if permission of mic and camera is taken
74 | getCameraAndMicPermission {
75 | mainRepository.sendConnectionRequest(username, true) {
76 | if (it){
77 | //we have to start video call
78 | //we wanna create an intent to move to call activity
79 | startActivity(Intent(this,CallActivity::class.java).apply {
80 | putExtra("target",username)
81 | putExtra("isVideoCall",true)
82 | putExtra("isCaller",true)
83 | })
84 |
85 | }
86 | }
87 |
88 | }
89 | }
90 |
91 | override fun onAudioCallClicked(username: String) {
92 | getCameraAndMicPermission {
93 | mainRepository.sendConnectionRequest(username, false) {
94 | if (it){
95 | //we have to start audio call
96 | //we wanna create an intent to move to call activity
97 | startActivity(Intent(this,CallActivity::class.java).apply {
98 | putExtra("target",username)
99 | putExtra("isVideoCall",false)
100 | putExtra("isCaller",true)
101 | })
102 | }
103 | }
104 | }
105 | }
106 |
107 | override fun onBackPressed() {
108 | super.onBackPressed()
109 | mainServiceRepository.stopService()
110 | }
111 |
112 | override fun onCallReceived(model: DataModel) {
113 | runOnUiThread {
114 | views.apply {
115 | val isVideoCall = model.type == DataModelType.StartVideoCall
116 | val isVideoCallText = if (isVideoCall) "Video" else "Audio"
117 | incomingCallTitleTv.text = "${model.sender} is $isVideoCallText Calling you"
118 | incomingCallLayout.isVisible = true
119 | acceptButton.setOnClickListener {
120 | getCameraAndMicPermission {
121 | incomingCallLayout.isVisible = false
122 | //create an intent to go to video call activity
123 | startActivity(Intent(this@MainActivity,CallActivity::class.java).apply {
124 | putExtra("target",model.sender)
125 | putExtra("isVideoCall",isVideoCall)
126 | putExtra("isCaller",false)
127 | })
128 | }
129 | }
130 | declineButton.setOnClickListener {
131 | incomingCallLayout.isVisible = false
132 | }
133 |
134 | }
135 | }
136 | }
137 |
138 |
139 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/ProximitySensor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package com.codewithkael.firebasevideocall.webrtc;
12 |
13 | import android.annotation.SuppressLint;
14 | import android.content.Context;
15 | import android.hardware.Sensor;
16 | import android.hardware.SensorEvent;
17 | import android.hardware.SensorEventListener;
18 | import android.hardware.SensorManager;
19 |
20 | import org.webrtc.ThreadUtils;
21 |
22 | /**
23 | * AppRTCProximitySensor manages functions related to the proximity sensor in
24 | * the AppRTC demo.
25 | * On most device, the proximity sensor is implemented as a boolean-sensor.
26 | * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
27 | * value i.e. the LUX value of the light sensor is compared with a threshold.
28 | * A LUX-value more than the threshold means the proximity sensor returns "FAR".
29 | * Anything less than the threshold value and the sensor returns "NEAR".
30 | */
31 | @SuppressLint("MissingPermission")
32 | public class ProximitySensor implements SensorEventListener {
33 | private static final String TAG = ProximitySensor.class.getSimpleName();
34 |
35 | // This class should be created, started and stopped on one thread
36 | // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
37 | // the case. Only active when |DEBUG| is set to true.
38 | private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
39 |
40 | private final Runnable onSensorStateListener;
41 | private final SensorManager sensorManager;
42 | private Sensor proximitySensor = null;
43 | private boolean lastStateReportIsNear = false;
44 |
45 | private ProximitySensor(Context context, Runnable sensorStateListener) {
46 | onSensorStateListener = sensorStateListener;
47 | sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
48 | }
49 |
50 | /**
51 | * Construction
52 | */
53 | static ProximitySensor create(Context context, Runnable sensorStateListener) {
54 | return new ProximitySensor(context, sensorStateListener);
55 | }
56 |
57 | /**
58 | * Activate the proximity sensor. Also do initialization if called for the
59 | * first time.
60 | */
61 | public boolean start() {
62 | threadChecker.checkIsOnValidThread();
63 | if (!initDefaultSensor()) {
64 | // Proximity sensor is not supported on this device.
65 | return false;
66 | }
67 | sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
68 | return true;
69 | }
70 |
71 | /**
72 | * Deactivate the proximity sensor.
73 | */
74 | public void stop() {
75 | threadChecker.checkIsOnValidThread();
76 | if (proximitySensor == null) {
77 | return;
78 | }
79 | sensorManager.unregisterListener(this, proximitySensor);
80 | }
81 |
82 | /**
83 | * Getter for last reported state. Set to true if "near" is reported.
84 | */
85 | public boolean sensorReportsNearState() {
86 | threadChecker.checkIsOnValidThread();
87 | return lastStateReportIsNear;
88 | }
89 |
90 | @Override
91 | public final void onAccuracyChanged(Sensor sensor, int accuracy) {
92 | threadChecker.checkIsOnValidThread();
93 |
94 | }
95 |
96 | @Override
97 | public final void onSensorChanged(SensorEvent event) {
98 | threadChecker.checkIsOnValidThread();
99 | // As a best practice; do as little as possible within this method and
100 | // avoid blocking.
101 | float distanceInCentimeters = event.values[0];
102 | lastStateReportIsNear = distanceInCentimeters < proximitySensor.getMaximumRange();
103 |
104 | // Report about new state to listening client. Client can then call
105 | // sensorReportsNearState() to query the current state (NEAR or FAR).
106 | if (onSensorStateListener != null) {
107 | onSensorStateListener.run();
108 | }
109 |
110 | }
111 |
112 | /**
113 | * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
114 | * does not support this type of sensor and false will be returned in such
115 | * cases.
116 | */
117 | private boolean initDefaultSensor() {
118 | if (proximitySensor != null) {
119 | return true;
120 | }
121 | proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
122 | if (proximitySensor == null) {
123 | return false;
124 | }
125 | logProximitySensorInfo();
126 | return true;
127 | }
128 |
129 | /**
130 | * Helper method for logging information about the proximity sensor.
131 | */
132 | private void logProximitySensorInfo() {
133 | if (proximitySensor == null) {
134 | return;
135 | }
136 | StringBuilder info = new StringBuilder("Proximity sensor: ");
137 | info.append("name=").append(proximitySensor.getName());
138 | info.append(", vendor: ").append(proximitySensor.getVendor());
139 | info.append(", power: ").append(proximitySensor.getPower());
140 | info.append(", resolution: ").append(proximitySensor.getResolution());
141 | info.append(", max range: ").append(proximitySensor.getMaximumRange());
142 | info.append(", min delay: ").append(proximitySensor.getMinDelay());
143 | // Added in API level 20.
144 | info.append(", type: ").append(proximitySensor.getStringType());
145 | // Added in API level 21.
146 | info.append(", max delay: ").append(proximitySensor.getMaxDelay());
147 | info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
148 | info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
149 | }
150 | }
151 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/repository/MainRepository.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.repository
2 |
3 | import android.content.Intent
4 | import com.codewithkael.firebasevideocall.firebaseClient.FirebaseClient
5 | import com.codewithkael.firebasevideocall.utils.DataModel
6 | import com.codewithkael.firebasevideocall.utils.DataModelType.*
7 | import com.codewithkael.firebasevideocall.utils.UserStatus
8 | import com.codewithkael.firebasevideocall.webrtc.MyPeerObserver
9 | import com.codewithkael.firebasevideocall.webrtc.WebRTCClient
10 | import com.google.gson.Gson
11 | import org.webrtc.*
12 | import javax.inject.Inject
13 | import javax.inject.Singleton
14 |
15 | @Singleton
16 | class MainRepository @Inject constructor(
17 | private val firebaseClient: FirebaseClient,
18 | private val webRTCClient: WebRTCClient,
19 | private val gson: Gson
20 | ) : WebRTCClient.Listener {
21 |
22 | private var target: String? = null
23 | var listener: Listener? = null
24 | private var remoteView:SurfaceViewRenderer?=null
25 |
26 | fun login(username: String, password: String, isDone: (Boolean, String?) -> Unit) {
27 | firebaseClient.login(username, password, isDone)
28 | }
29 |
30 | fun observeUsersStatus(status: (List>) -> Unit) {
31 | firebaseClient.observeUsersStatus(status)
32 | }
33 |
34 | fun initFirebase() {
35 | firebaseClient.subscribeForLatestEvent(object : FirebaseClient.Listener {
36 | override fun onLatestEventReceived(event: DataModel) {
37 | listener?.onLatestEventReceived(event)
38 | when (event.type) {
39 | Offer->{
40 | webRTCClient.onRemoteSessionReceived(
41 | SessionDescription(
42 | SessionDescription.Type.OFFER,
43 | event.data.toString()
44 | )
45 | )
46 | webRTCClient.answer(target!!)
47 | }
48 | Answer->{
49 | webRTCClient.onRemoteSessionReceived(
50 | SessionDescription(
51 | SessionDescription.Type.ANSWER,
52 | event.data.toString()
53 | )
54 | )
55 | }
56 | IceCandidates->{
57 | val candidate: IceCandidate? = try {
58 | gson.fromJson(event.data.toString(),IceCandidate::class.java)
59 | }catch (e:Exception){
60 | null
61 | }
62 | candidate?.let {
63 | webRTCClient.addIceCandidateToPeer(it)
64 | }
65 | }
66 | EndCall->{
67 | listener?.endCall()
68 | }
69 | else -> Unit
70 | }
71 | }
72 |
73 | })
74 | }
75 |
76 | fun sendConnectionRequest(target: String, isVideoCall: Boolean, success: (Boolean) -> Unit) {
77 | firebaseClient.sendMessageToOtherClient(
78 | DataModel(
79 | type = if (isVideoCall) StartVideoCall else StartAudioCall,
80 | target = target
81 | ), success
82 | )
83 | }
84 |
85 | fun setTarget(target: String) {
86 | this.target = target
87 | }
88 |
89 | interface Listener {
90 | fun onLatestEventReceived(data: DataModel)
91 | fun endCall()
92 | }
93 |
94 | fun initWebrtcClient(username: String) {
95 | webRTCClient.listener = this
96 | webRTCClient.initializeWebrtcClient(username, object : MyPeerObserver() {
97 |
98 | override fun onAddStream(p0: MediaStream?) {
99 | super.onAddStream(p0)
100 | try {
101 | p0?.videoTracks?.get(0)?.addSink(remoteView)
102 | }catch (e:Exception){
103 | e.printStackTrace()
104 | }
105 |
106 | }
107 |
108 | override fun onIceCandidate(p0: IceCandidate?) {
109 | super.onIceCandidate(p0)
110 | p0?.let {
111 | webRTCClient.sendIceCandidate(target!!, it)
112 | }
113 | }
114 |
115 | override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) {
116 | super.onConnectionChange(newState)
117 | if (newState == PeerConnection.PeerConnectionState.CONNECTED) {
118 | // 1. change my status to in call
119 | changeMyStatus(UserStatus.IN_CALL)
120 | // 2. clear latest event inside my user section in firebase database
121 | firebaseClient.clearLatestEvent()
122 | }
123 | }
124 | })
125 | }
126 |
127 | fun initLocalSurfaceView(view: SurfaceViewRenderer, isVideoCall: Boolean) {
128 | webRTCClient.initLocalSurfaceView(view, isVideoCall)
129 | }
130 |
131 | fun initRemoteSurfaceView(view: SurfaceViewRenderer) {
132 | webRTCClient.initRemoteSurfaceView(view)
133 | this.remoteView = view
134 | }
135 |
136 | fun startCall() {
137 | webRTCClient.call(target!!)
138 | }
139 |
140 | fun endCall() {
141 | webRTCClient.closeConnection()
142 | changeMyStatus(UserStatus.ONLINE)
143 | }
144 |
145 | fun sendEndCall() {
146 | onTransferEventToSocket(
147 | DataModel(
148 | type = EndCall,
149 | target = target!!
150 | )
151 | )
152 | }
153 |
154 | private fun changeMyStatus(status: UserStatus) {
155 | firebaseClient.changeMyStatus(status)
156 | }
157 |
158 | fun toggleAudio(shouldBeMuted: Boolean) {
159 | webRTCClient.toggleAudio(shouldBeMuted)
160 | }
161 |
162 | fun toggleVideo(shouldBeMuted: Boolean) {
163 | webRTCClient.toggleVideo(shouldBeMuted)
164 | }
165 |
166 | fun switchCamera() {
167 | webRTCClient.switchCamera()
168 | }
169 |
170 | override fun onTransferEventToSocket(data: DataModel) {
171 | firebaseClient.sendMessageToOtherClient(data) {}
172 | }
173 |
174 | fun setScreenCaptureIntent(screenPermissionIntent: Intent) {
175 | webRTCClient.setPermissionIntent(screenPermissionIntent)
176 | }
177 |
178 | fun toggleScreenShare(isStarting: Boolean) {
179 | if (isStarting){
180 | webRTCClient.startScreenCapturing()
181 | }else{
182 | webRTCClient.stopScreenCapturing()
183 | }
184 | }
185 |
186 | fun logOff(function: () -> Unit) = firebaseClient.logOff(function)
187 |
188 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/service/MainService.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.service
2 |
3 | import android.app.NotificationChannel
4 | import android.app.NotificationManager
5 | import android.app.PendingIntent
6 | import android.app.Service
7 | import android.content.Intent
8 | import android.os.Build
9 | import android.os.IBinder
10 | import android.util.Log
11 | import androidx.core.app.NotificationCompat
12 | import com.codewithkael.firebasevideocall.R
13 | import com.codewithkael.firebasevideocall.repository.MainRepository
14 | import com.codewithkael.firebasevideocall.service.MainServiceActions.*
15 | import com.codewithkael.firebasevideocall.utils.DataModel
16 | import com.codewithkael.firebasevideocall.utils.DataModelType
17 | import com.codewithkael.firebasevideocall.utils.isValid
18 | import com.codewithkael.firebasevideocall.webrtc.RTCAudioManager
19 | import dagger.hilt.android.AndroidEntryPoint
20 | import org.webrtc.SurfaceViewRenderer
21 | import javax.inject.Inject
22 |
23 | @AndroidEntryPoint
24 | class MainService : Service(), MainRepository.Listener {
25 |
26 | private val TAG = "MainService"
27 |
28 | private var isServiceRunning = false
29 | private var username: String? = null
30 |
31 | @Inject
32 | lateinit var mainRepository: MainRepository
33 |
34 | private lateinit var notificationManager: NotificationManager
35 | private lateinit var rtcAudioManager: RTCAudioManager
36 | private var isPreviousCallStateVideo = true
37 |
38 |
39 | companion object {
40 | var listener: Listener? = null
41 | var endCallListener:EndCallListener?=null
42 | var localSurfaceView: SurfaceViewRenderer?=null
43 | var remoteSurfaceView: SurfaceViewRenderer?=null
44 | var screenPermissionIntent : Intent?=null
45 | }
46 |
47 | override fun onCreate() {
48 | super.onCreate()
49 | rtcAudioManager = RTCAudioManager.create(this)
50 | rtcAudioManager.setDefaultAudioDevice(RTCAudioManager.AudioDevice.SPEAKER_PHONE)
51 | notificationManager = getSystemService(
52 | NotificationManager::class.java
53 | )
54 | }
55 |
56 | override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
57 | intent?.let { incomingIntent ->
58 | when (incomingIntent.action) {
59 | START_SERVICE.name -> handleStartService(incomingIntent)
60 | SETUP_VIEWS.name -> handleSetupViews(incomingIntent)
61 | END_CALL.name -> handleEndCall()
62 | SWITCH_CAMERA.name -> handleSwitchCamera()
63 | TOGGLE_AUDIO.name -> handleToggleAudio(incomingIntent)
64 | TOGGLE_VIDEO.name -> handleToggleVideo(incomingIntent)
65 | TOGGLE_AUDIO_DEVICE.name -> handleToggleAudioDevice(incomingIntent)
66 | TOGGLE_SCREEN_SHARE.name -> handleToggleScreenShare(incomingIntent)
67 | STOP_SERVICE.name -> handleStopService()
68 | else -> Unit
69 | }
70 | }
71 |
72 | return START_STICKY
73 | }
74 |
75 | private fun handleStopService() {
76 | mainRepository.endCall()
77 | mainRepository.logOff {
78 | isServiceRunning = false
79 | stopSelf()
80 | }
81 | }
82 |
83 | private fun handleToggleScreenShare(incomingIntent: Intent) {
84 | val isStarting = incomingIntent.getBooleanExtra("isStarting",true)
85 | if (isStarting){
86 | // we should start screen share
87 | //but we have to keep it in mind that we first should remove the camera streaming first
88 | if (isPreviousCallStateVideo){
89 | mainRepository.toggleVideo(true)
90 | }
91 | mainRepository.setScreenCaptureIntent(screenPermissionIntent!!)
92 | mainRepository.toggleScreenShare(true)
93 |
94 | }else{
95 | //we should stop screen share and check if camera streaming was on so we should make it on back again
96 | mainRepository.toggleScreenShare(false)
97 | if (isPreviousCallStateVideo){
98 | mainRepository.toggleVideo(false)
99 | }
100 | }
101 | }
102 |
103 | private fun handleToggleAudioDevice(incomingIntent: Intent) {
104 | val type = when(incomingIntent.getStringExtra("type")){
105 | RTCAudioManager.AudioDevice.EARPIECE.name -> RTCAudioManager.AudioDevice.EARPIECE
106 | RTCAudioManager.AudioDevice.SPEAKER_PHONE.name -> RTCAudioManager.AudioDevice.SPEAKER_PHONE
107 | else -> null
108 | }
109 |
110 | type?.let {
111 | rtcAudioManager.setDefaultAudioDevice(it)
112 | rtcAudioManager.selectAudioDevice(it)
113 | Log.d(TAG, "handleToggleAudioDevice: $it")
114 | }
115 |
116 |
117 | }
118 |
119 | private fun handleToggleVideo(incomingIntent: Intent) {
120 | val shouldBeMuted = incomingIntent.getBooleanExtra("shouldBeMuted",true)
121 | this.isPreviousCallStateVideo = !shouldBeMuted
122 | mainRepository.toggleVideo(shouldBeMuted)
123 | }
124 |
125 | private fun handleToggleAudio(incomingIntent: Intent) {
126 | val shouldBeMuted = incomingIntent.getBooleanExtra("shouldBeMuted",true)
127 | mainRepository.toggleAudio(shouldBeMuted)
128 | }
129 |
130 | private fun handleSwitchCamera() {
131 | mainRepository.switchCamera()
132 | }
133 |
134 | private fun handleEndCall() {
135 | //1. we have to send a signal to other peer that call is ended
136 | mainRepository.sendEndCall()
137 | //2.end out call process and restart our webrtc client
138 | endCallAndRestartRepository()
139 | }
140 |
141 | private fun endCallAndRestartRepository(){
142 | mainRepository.endCall()
143 | endCallListener?.onCallEnded()
144 | mainRepository.initWebrtcClient(username!!)
145 | }
146 |
147 | private fun handleSetupViews(incomingIntent: Intent) {
148 | val isCaller = incomingIntent.getBooleanExtra("isCaller",false)
149 | val isVideoCall = incomingIntent.getBooleanExtra("isVideoCall",true)
150 | val target = incomingIntent.getStringExtra("target")
151 | this.isPreviousCallStateVideo = isVideoCall
152 | mainRepository.setTarget(target!!)
153 | //initialize our widgets and start streaming our video and audio source
154 | //and get prepared for call
155 | mainRepository.initLocalSurfaceView(localSurfaceView!!,isVideoCall)
156 | mainRepository.initRemoteSurfaceView(remoteSurfaceView!!)
157 |
158 |
159 | if (!isCaller){
160 | //start the video call
161 | mainRepository.startCall()
162 | }
163 |
164 | }
165 |
166 | private fun handleStartService(incomingIntent: Intent) {
167 | //start our foreground service
168 | if (!isServiceRunning) {
169 | isServiceRunning = true
170 | username = incomingIntent.getStringExtra("username")
171 | startServiceWithNotification()
172 |
173 | //setup my clients
174 | mainRepository.listener = this
175 | mainRepository.initFirebase()
176 | mainRepository.initWebrtcClient(username!!)
177 |
178 | }
179 | }
180 |
181 | private fun startServiceWithNotification() {
182 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
183 | val notificationChannel = NotificationChannel(
184 | "channel1", "foreground", NotificationManager.IMPORTANCE_HIGH
185 | )
186 |
187 | val intent = Intent(this,MainServiceReceiver::class.java).apply {
188 | action = "ACTION_EXIT"
189 | }
190 | val pendingIntent : PendingIntent =
191 | PendingIntent.getBroadcast(this,0 ,intent,PendingIntent.FLAG_IMMUTABLE)
192 |
193 | notificationManager.createNotificationChannel(notificationChannel)
194 | val notification = NotificationCompat.Builder(
195 | this, "channel1"
196 | ).setSmallIcon(R.mipmap.ic_launcher)
197 | .addAction(R.drawable.ic_end_call,"Exit",pendingIntent)
198 |
199 | startForeground(1, notification.build())
200 | }
201 | }
202 |
203 |
204 | override fun onBind(intent: Intent?): IBinder? {
205 | return null
206 | }
207 |
208 | override fun onLatestEventReceived(data: DataModel) {
209 | if (data.isValid()) {
210 | when (data.type) {
211 | DataModelType.StartVideoCall,
212 | DataModelType.StartAudioCall -> {
213 | listener?.onCallReceived(data)
214 | }
215 | else -> Unit
216 | }
217 | }
218 | }
219 |
220 | override fun endCall() {
221 | //we are receiving end call signal from remote peer
222 | endCallAndRestartRepository()
223 | }
224 |
225 | interface Listener {
226 | fun onCallReceived(model: DataModel)
227 | }
228 |
229 | interface EndCallListener {
230 | fun onCallEnded()
231 | }
232 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/ui/CallActivity.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.ui
2 |
3 | import android.app.Activity
4 | import android.content.Context
5 | import android.content.Intent
6 | import android.media.projection.MediaProjectionManager
7 | import android.os.Bundle
8 | import androidx.activity.result.ActivityResultLauncher
9 | import androidx.activity.result.contract.ActivityResultContracts
10 | import androidx.appcompat.app.AlertDialog
11 | import androidx.appcompat.app.AppCompatActivity
12 | import androidx.core.view.isVisible
13 | import com.codewithkael.firebasevideocall.R
14 | import com.codewithkael.firebasevideocall.databinding.ActivityCallBinding
15 | import com.codewithkael.firebasevideocall.service.MainService
16 | import com.codewithkael.firebasevideocall.service.MainServiceRepository
17 | import com.codewithkael.firebasevideocall.utils.convertToHumanTime
18 | import com.codewithkael.firebasevideocall.webrtc.RTCAudioManager
19 | import dagger.hilt.android.AndroidEntryPoint
20 | import kotlinx.coroutines.*
21 | import javax.inject.Inject
22 |
23 | @AndroidEntryPoint
24 | class CallActivity : AppCompatActivity(), MainService.EndCallListener {
25 |
26 | private var target:String?=null
27 | private var isVideoCall:Boolean= true
28 | private var isCaller:Boolean = true
29 |
30 | private var isMicrophoneMuted = false
31 | private var isCameraMuted = false
32 | private var isSpeakerMode = true
33 | private var isScreenCasting = false
34 |
35 |
36 | @Inject lateinit var serviceRepository: MainServiceRepository
37 | private lateinit var requestScreenCaptureLauncher:ActivityResultLauncher
38 |
39 | private lateinit var views:ActivityCallBinding
40 |
41 | override fun onStart() {
42 | super.onStart()
43 | requestScreenCaptureLauncher = registerForActivityResult(ActivityResultContracts
44 | .StartActivityForResult()) { result ->
45 | if (result.resultCode == Activity.RESULT_OK){
46 | val intent = result.data
47 | //its time to give this intent to our service and service passes it to our webrtc client
48 | MainService.screenPermissionIntent = intent
49 | isScreenCasting = true
50 | updateUiToScreenCaptureIsOn()
51 | serviceRepository.toggleScreenShare(true)
52 | }
53 | }
54 | }
55 |
56 | override fun onCreate(savedInstanceState: Bundle?) {
57 | super.onCreate(savedInstanceState)
58 | views = ActivityCallBinding.inflate(layoutInflater)
59 | setContentView(views.root)
60 | init()
61 | }
62 |
63 | private fun init(){
64 | intent.getStringExtra("target")?.let {
65 | this.target = it
66 | }?: kotlin.run {
67 | finish()
68 | }
69 |
70 | isVideoCall = intent.getBooleanExtra("isVideoCall",true)
71 | isCaller = intent.getBooleanExtra("isCaller",true)
72 |
73 | views.apply {
74 | callTitleTv.text = "In call with $target"
75 | CoroutineScope(Dispatchers.IO).launch {
76 | for (i in 0..3600){
77 | delay(1000)
78 | withContext(Dispatchers.Main){
79 | //convert this int to human readable time
80 | callTimerTv.text = i.convertToHumanTime()
81 | }
82 | }
83 | }
84 |
85 | if (!isVideoCall){
86 | toggleCameraButton.isVisible = false
87 | screenShareButton.isVisible = false
88 | switchCameraButton.isVisible = false
89 |
90 | }
91 | MainService.remoteSurfaceView = remoteView
92 | MainService.localSurfaceView = localView
93 | serviceRepository.setupViews(isVideoCall,isCaller,target!!)
94 |
95 | endCallButton.setOnClickListener {
96 | serviceRepository.sendEndCall()
97 | }
98 |
99 | switchCameraButton.setOnClickListener {
100 | serviceRepository.switchCamera()
101 | }
102 | }
103 | setupMicToggleClicked()
104 | setupCameraToggleClicked()
105 | setupToggleAudioDevice()
106 | setupScreenCasting()
107 | MainService.endCallListener = this
108 | }
109 |
110 | private fun setupScreenCasting() {
111 | views.apply {
112 | screenShareButton.setOnClickListener {
113 | if (!isScreenCasting){
114 | //we have to start casting
115 | AlertDialog.Builder(this@CallActivity)
116 | .setTitle("Screen Casting")
117 | .setMessage("You sure to start casting ?")
118 | .setPositiveButton("Yes"){dialog,_ ->
119 | //start screen casting process
120 | startScreenCapture()
121 | dialog.dismiss()
122 | }.setNegativeButton("No") {dialog,_ ->
123 | dialog.dismiss()
124 | }.create().show()
125 | }else{
126 | //we have to end screen casting
127 | isScreenCasting = false
128 | updateUiToScreenCaptureIsOff()
129 | serviceRepository.toggleScreenShare(false)
130 | }
131 | }
132 |
133 | }
134 | }
135 |
136 | private fun startScreenCapture() {
137 | val mediaProjectionManager = application.getSystemService(
138 | Context.MEDIA_PROJECTION_SERVICE
139 | ) as MediaProjectionManager
140 |
141 | val captureIntent = mediaProjectionManager.createScreenCaptureIntent()
142 | requestScreenCaptureLauncher.launch(captureIntent)
143 |
144 | }
145 |
146 | private fun updateUiToScreenCaptureIsOn(){
147 | views.apply {
148 | localView.isVisible = false
149 | switchCameraButton.isVisible = false
150 | toggleCameraButton.isVisible = false
151 | screenShareButton.setImageResource(R.drawable.ic_stop_screen_share)
152 | }
153 |
154 | }
155 | private fun updateUiToScreenCaptureIsOff() {
156 | views.apply {
157 | localView.isVisible = true
158 | switchCameraButton.isVisible = true
159 | toggleCameraButton.isVisible = true
160 | screenShareButton.setImageResource(R.drawable.ic_screen_share)
161 | }
162 | }
163 | private fun setupMicToggleClicked(){
164 | views.apply {
165 | toggleMicrophoneButton.setOnClickListener {
166 | if (!isMicrophoneMuted){
167 | //we should mute our mic
168 | //1. send a command to repository
169 | serviceRepository.toggleAudio(true)
170 | //2. update ui to mic is muted
171 | toggleMicrophoneButton.setImageResource(R.drawable.ic_mic_on)
172 | }else{
173 | //we should set it back to normal
174 | //1. send a command to repository to make it back to normal status
175 | serviceRepository.toggleAudio(false)
176 | //2. update ui
177 | toggleMicrophoneButton.setImageResource(R.drawable.ic_mic_off)
178 | }
179 | isMicrophoneMuted = !isMicrophoneMuted
180 | }
181 | }
182 | }
183 |
184 | override fun onBackPressed() {
185 | super.onBackPressed()
186 | serviceRepository.sendEndCall()
187 | }
188 |
189 | private fun setupToggleAudioDevice(){
190 | views.apply {
191 | toggleAudioDevice.setOnClickListener {
192 | if (isSpeakerMode){
193 | //we should set it to earpiece mode
194 | toggleAudioDevice.setImageResource(R.drawable.ic_speaker)
195 | //we should send a command to our service to switch between devices
196 | serviceRepository.toggleAudioDevice(RTCAudioManager.AudioDevice.EARPIECE.name)
197 |
198 | }else{
199 | //we should set it to speaker mode
200 | toggleAudioDevice.setImageResource(R.drawable.ic_ear)
201 | serviceRepository.toggleAudioDevice(RTCAudioManager.AudioDevice.SPEAKER_PHONE.name)
202 |
203 | }
204 | isSpeakerMode = !isSpeakerMode
205 | }
206 |
207 | }
208 | }
209 |
210 | private fun setupCameraToggleClicked(){
211 | views.apply {
212 | toggleCameraButton.setOnClickListener {
213 | if (!isCameraMuted){
214 | serviceRepository.toggleVideo(true)
215 | toggleCameraButton.setImageResource(R.drawable.ic_camera_on)
216 | }else{
217 | serviceRepository.toggleVideo(false)
218 | toggleCameraButton.setImageResource(R.drawable.ic_camera_off)
219 | }
220 |
221 | isCameraMuted = !isCameraMuted
222 | }
223 | }
224 | }
225 |
226 | override fun onCallEnded() {
227 | finish()
228 | }
229 |
230 | override fun onDestroy() {
231 | super.onDestroy()
232 | MainService.remoteSurfaceView?.release()
233 | MainService.remoteSurfaceView = null
234 |
235 | MainService.localSurfaceView?.release()
236 | MainService.localSurfaceView =null
237 |
238 | }
239 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/WebRTCClient.kt:
--------------------------------------------------------------------------------
1 | package com.codewithkael.firebasevideocall.webrtc
2 |
3 | import android.content.Context
4 | import android.content.Intent
5 | import android.media.projection.MediaProjection
6 | import android.util.DisplayMetrics
7 | import android.util.Log
8 | import android.view.WindowManager
9 | import com.codewithkael.firebasevideocall.utils.DataModel
10 | import com.codewithkael.firebasevideocall.utils.DataModelType
11 | import com.google.gson.Gson
12 | import org.webrtc.*
13 | import javax.inject.Inject
14 | import javax.inject.Singleton
15 |
16 | @Singleton
17 | class WebRTCClient @Inject constructor(
18 | private val context: Context,
19 | private val gson: Gson
20 | ) {
21 | //class variables
22 | var listener: Listener? = null
23 | private lateinit var username: String
24 |
25 | //webrtc variables
26 | private val eglBaseContext = EglBase.create().eglBaseContext
27 | private val peerConnectionFactory by lazy { createPeerConnectionFactory() }
28 | private var peerConnection: PeerConnection? = null
29 | private val iceServer = listOf(
30 | PeerConnection.IceServer.builder("turn:a.relay.metered.ca:443?transport=tcp")
31 | .setUsername("83eebabf8b4cce9d5dbcb649")
32 | .setPassword("2D7JvfkOQtBdYW3R").createIceServer()
33 | )
34 | private val localVideoSource by lazy { peerConnectionFactory.createVideoSource(false) }
35 | private val localAudioSource by lazy { peerConnectionFactory.createAudioSource(MediaConstraints())}
36 | private val videoCapturer = getVideoCapturer(context)
37 | private var surfaceTextureHelper:SurfaceTextureHelper?=null
38 | private val mediaConstraint = MediaConstraints().apply {
39 | mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo","true"))
40 | mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio","true"))
41 | }
42 |
43 | //call variables
44 | private lateinit var localSurfaceView: SurfaceViewRenderer
45 | private lateinit var remoteSurfaceView: SurfaceViewRenderer
46 | private var localStream: MediaStream? = null
47 | private var localTrackId = ""
48 | private var localStreamId = ""
49 | private var localAudioTrack:AudioTrack?=null
50 | private var localVideoTrack:VideoTrack?=null
51 |
52 | //screen casting
53 | private var permissionIntent:Intent?=null
54 | private var screenCapturer:VideoCapturer?=null
55 | private val localScreenVideoSource by lazy { peerConnectionFactory.createVideoSource(false) }
56 | private var localScreenShareVideoTrack:VideoTrack?=null
57 |
58 | //installing requirements section
59 | init {
60 | initPeerConnectionFactory()
61 | }
62 | private fun initPeerConnectionFactory() {
63 | val options = PeerConnectionFactory.InitializationOptions.builder(context)
64 | .setEnableInternalTracer(true).setFieldTrials("WebRTC-H264HighProfile/Enabled/")
65 | .createInitializationOptions()
66 | PeerConnectionFactory.initialize(options)
67 | }
68 | private fun createPeerConnectionFactory(): PeerConnectionFactory {
69 | return PeerConnectionFactory.builder()
70 | .setVideoDecoderFactory(
71 | DefaultVideoDecoderFactory(eglBaseContext)
72 | ).setVideoEncoderFactory(
73 | DefaultVideoEncoderFactory(
74 | eglBaseContext, true, true
75 | )
76 | ).setOptions(PeerConnectionFactory.Options().apply {
77 | disableNetworkMonitor = false
78 | disableEncryption = false
79 | }).createPeerConnectionFactory()
80 | }
81 | fun initializeWebrtcClient(
82 | username: String, observer: PeerConnection.Observer
83 | ) {
84 | this.username = username
85 | localTrackId = "${username}_track"
86 | localStreamId = "${username}_stream"
87 | peerConnection = createPeerConnection(observer)
88 | }
89 | private fun createPeerConnection(observer: PeerConnection.Observer): PeerConnection? {
90 | return peerConnectionFactory.createPeerConnection(iceServer, observer)
91 | }
92 |
93 | //negotiation section
94 | fun call(target:String){
95 | peerConnection?.createOffer(object : MySdpObserver() {
96 | override fun onCreateSuccess(desc: SessionDescription?) {
97 | super.onCreateSuccess(desc)
98 | peerConnection?.setLocalDescription(object : MySdpObserver() {
99 | override fun onSetSuccess() {
100 | super.onSetSuccess()
101 | listener?.onTransferEventToSocket(
102 | DataModel(type = DataModelType.Offer,
103 | sender = username,
104 | target = target,
105 | data = desc?.description)
106 | )
107 | }
108 | },desc)
109 | }
110 | },mediaConstraint)
111 | }
112 |
113 | fun answer(target:String){
114 | peerConnection?.createAnswer(object : MySdpObserver() {
115 | override fun onCreateSuccess(desc: SessionDescription?) {
116 | super.onCreateSuccess(desc)
117 | peerConnection?.setLocalDescription(object : MySdpObserver() {
118 | override fun onSetSuccess() {
119 | super.onSetSuccess()
120 | listener?.onTransferEventToSocket(
121 | DataModel(type = DataModelType.Answer,
122 | sender = username,
123 | target = target,
124 | data = desc?.description)
125 | )
126 | }
127 | },desc)
128 | }
129 | },mediaConstraint)
130 | }
131 |
132 | fun onRemoteSessionReceived(sessionDescription: SessionDescription){
133 | peerConnection?.setRemoteDescription(MySdpObserver(),sessionDescription)
134 | }
135 |
136 | fun addIceCandidateToPeer(iceCandidate: IceCandidate){
137 | peerConnection?.addIceCandidate(iceCandidate)
138 | }
139 |
140 | fun sendIceCandidate(target: String,iceCandidate: IceCandidate){
141 | addIceCandidateToPeer(iceCandidate)
142 | listener?.onTransferEventToSocket(
143 | DataModel(
144 | type = DataModelType.IceCandidates,
145 | sender = username,
146 | target = target,
147 | data = gson.toJson(iceCandidate)
148 | )
149 | )
150 | }
151 |
152 | fun closeConnection(){
153 | try {
154 | videoCapturer.dispose()
155 | screenCapturer?.dispose()
156 | localStream?.dispose()
157 | peerConnection?.close()
158 | }catch (e:Exception){
159 | e.printStackTrace()
160 | }
161 | }
162 |
163 | fun switchCamera(){
164 | videoCapturer.switchCamera(null)
165 | }
166 |
167 | fun toggleAudio(shouldBeMuted:Boolean){
168 | if (shouldBeMuted){
169 | localStream?.removeTrack(localAudioTrack)
170 | }else{
171 | localStream?.addTrack(localAudioTrack)
172 | }
173 | }
174 |
175 | fun toggleVideo(shouldBeMuted: Boolean){
176 | try {
177 | if (shouldBeMuted){
178 | stopCapturingCamera()
179 | }else{
180 | startCapturingCamera(localSurfaceView)
181 | }
182 | }catch (e:Exception){
183 | e.printStackTrace()
184 | }
185 | }
186 |
187 | //streaming section
188 | private fun initSurfaceView(view: SurfaceViewRenderer) {
189 | view.run {
190 | setMirror(false)
191 | setEnableHardwareScaler(true)
192 | init(eglBaseContext, null)
193 | }
194 | }
195 | fun initRemoteSurfaceView(view:SurfaceViewRenderer){
196 | this.remoteSurfaceView = view
197 | initSurfaceView(view)
198 | }
199 | fun initLocalSurfaceView(localView: SurfaceViewRenderer, isVideoCall: Boolean) {
200 | this.localSurfaceView = localView
201 | initSurfaceView(localView)
202 | startLocalStreaming(localView, isVideoCall)
203 | }
204 | private fun startLocalStreaming(localView: SurfaceViewRenderer, isVideoCall: Boolean) {
205 | localStream = peerConnectionFactory.createLocalMediaStream(localStreamId)
206 | if (isVideoCall){
207 | startCapturingCamera(localView)
208 | }
209 |
210 | localAudioTrack = peerConnectionFactory.createAudioTrack(localTrackId+"_audio",localAudioSource)
211 | localStream?.addTrack(localAudioTrack)
212 | peerConnection?.addStream(localStream)
213 | }
214 | private fun startCapturingCamera(localView: SurfaceViewRenderer){
215 | surfaceTextureHelper = SurfaceTextureHelper.create(
216 | Thread.currentThread().name,eglBaseContext
217 | )
218 |
219 | videoCapturer.initialize(
220 | surfaceTextureHelper,context,localVideoSource.capturerObserver
221 | )
222 |
223 | videoCapturer.startCapture(
224 | 720,480,20
225 | )
226 |
227 | localVideoTrack = peerConnectionFactory.createVideoTrack(localTrackId+"_video",localVideoSource)
228 | localVideoTrack?.addSink(localView)
229 | localStream?.addTrack(localVideoTrack)
230 | }
231 | private fun getVideoCapturer(context: Context):CameraVideoCapturer =
232 | Camera2Enumerator(context).run {
233 | deviceNames.find {
234 | isFrontFacing(it)
235 | }?.let {
236 | createCapturer(it,null)
237 | }?:throw IllegalStateException()
238 | }
239 | private fun stopCapturingCamera(){
240 |
241 | videoCapturer.dispose()
242 | localVideoTrack?.removeSink(localSurfaceView)
243 | localSurfaceView.clearImage()
244 | localStream?.removeTrack(localVideoTrack)
245 | localVideoTrack?.dispose()
246 | }
247 |
248 | //screen capture section
249 |
250 | fun setPermissionIntent(screenPermissionIntent: Intent) {
251 | this.permissionIntent = screenPermissionIntent
252 | }
253 |
254 | fun startScreenCapturing() {
255 | val displayMetrics = DisplayMetrics()
256 | val windowsManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
257 | windowsManager.defaultDisplay.getMetrics(displayMetrics)
258 |
259 | val screenWidthPixels = displayMetrics.widthPixels
260 | val screenHeightPixels = displayMetrics.heightPixels
261 |
262 | val surfaceTextureHelper = SurfaceTextureHelper.create(
263 | Thread.currentThread().name,eglBaseContext
264 | )
265 |
266 | screenCapturer = createScreenCapturer()
267 | screenCapturer!!.initialize(
268 | surfaceTextureHelper,context,localScreenVideoSource.capturerObserver
269 | )
270 | screenCapturer!!.startCapture(screenWidthPixels,screenHeightPixels,15)
271 |
272 | localScreenShareVideoTrack =
273 | peerConnectionFactory.createVideoTrack(localTrackId+"_video",localScreenVideoSource)
274 | localScreenShareVideoTrack?.addSink(localSurfaceView)
275 | localStream?.addTrack(localScreenShareVideoTrack)
276 | peerConnection?.addStream(localStream)
277 |
278 | }
279 |
280 | fun stopScreenCapturing() {
281 | screenCapturer?.stopCapture()
282 | screenCapturer?.dispose()
283 | localScreenShareVideoTrack?.removeSink(localSurfaceView)
284 | localSurfaceView.clearImage()
285 | localStream?.removeTrack(localScreenShareVideoTrack)
286 | localScreenShareVideoTrack?.dispose()
287 |
288 | }
289 |
290 | private fun createScreenCapturer():VideoCapturer {
291 | return ScreenCapturerAndroid(permissionIntent, object : MediaProjection.Callback() {
292 | override fun onStop() {
293 | super.onStop()
294 | Log.d("permissions", "onStop: permission of screen casting is stopped")
295 | }
296 | })
297 | }
298 |
299 |
300 | interface Listener {
301 | fun onTransferEventToSocket(data: DataModel)
302 | }
303 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/BluetoothManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package com.codewithkael.firebasevideocall.webrtc;
12 |
13 | import android.annotation.SuppressLint;
14 | import android.bluetooth.BluetoothAdapter;
15 | import android.bluetooth.BluetoothDevice;
16 | import android.bluetooth.BluetoothHeadset;
17 | import android.bluetooth.BluetoothProfile;
18 | import android.content.BroadcastReceiver;
19 | import android.content.Context;
20 | import android.content.Intent;
21 | import android.content.IntentFilter;
22 | import android.content.pm.PackageManager;
23 | import android.os.Handler;
24 | import android.os.Looper;
25 | import android.os.Process;
26 |
27 | import org.webrtc.ThreadUtils;
28 |
29 | import java.util.List;
30 | import java.util.Set;
31 |
32 | /**
33 | * AppRTCProximitySensor manages functions related to Bluetoth devices in the
34 | * AppRTC demo.
35 | */
36 | @SuppressLint("MissingPermission")
37 | public class BluetoothManager {
38 | private static final String TAG = "AppRTCBluetoothManager";
39 |
40 | // Timeout interval for starting or stopping audio to a Bluetooth SCO device.
41 | private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
42 | // Maximum number of SCO connection attempts.
43 | private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
44 | private final Context apprtcContext;
45 | private final RTCAudioManager apprtcAudioManager;
46 | private final android.media.AudioManager audioManager;
47 | private final Handler handler;
48 | private final BluetoothProfile.ServiceListener bluetoothServiceListener;
49 | private final BroadcastReceiver bluetoothHeadsetReceiver;
50 | private int scoConnectionAttempts;
51 | private State bluetoothState;
52 | private BluetoothAdapter bluetoothAdapter;
53 | private BluetoothHeadset bluetoothHeadset;
54 | private BluetoothDevice bluetoothDevice;
55 | // Runs when the Bluetooth timeout expires. We use that timeout after calling
56 | // startScoAudio() or stopScoAudio() because we're not guaranteed to get a
57 | // callback after those calls.
58 | private final Runnable bluetoothTimeoutRunnable = this::bluetoothTimeout;
59 |
60 | private BluetoothManager(Context context, RTCAudioManager audioManager) {
61 | ThreadUtils.checkIsOnMainThread();
62 | apprtcContext = context;
63 | apprtcAudioManager = audioManager;
64 | this.audioManager = getAudioManager(context);
65 | bluetoothState = State.UNINITIALIZED;
66 | bluetoothServiceListener = new BluetoothServiceListener();
67 | bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
68 | handler = new Handler(Looper.getMainLooper());
69 | }
70 |
71 | /**
72 | * Construction.
73 | */
74 | static BluetoothManager create(Context context, RTCAudioManager audioManager) {
75 | return new BluetoothManager(context, audioManager);
76 | }
77 |
78 | /**
79 | * Returns the internal state.
80 | */
81 | public State getState() {
82 | ThreadUtils.checkIsOnMainThread();
83 | return bluetoothState;
84 | }
85 |
86 | /**
87 | * Activates components required to detect Bluetooth devices and to enable
88 | * BT SCO (audio is routed via BT SCO) for the headset profile. The end
89 | * state will be HEADSET_UNAVAILABLE but a state machine has started which
90 | * will start a state change sequence where the final outcome depends on
91 | * if/when the BT headset is enabled.
92 | * Example of state change sequence when start() is called while BT device
93 | * is connected and enabled:
94 | * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
95 | * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
96 | * Note that the AppRTCAudioManager is also involved in driving this state
97 | * change.
98 | */
99 | @SuppressLint("MissingPermission")
100 | public void start() {
101 | ThreadUtils.checkIsOnMainThread();
102 | if (!hasPermission()) {
103 | return;
104 | }
105 | if (bluetoothState != State.UNINITIALIZED) {
106 | return;
107 | }
108 | bluetoothHeadset = null;
109 | bluetoothDevice = null;
110 | scoConnectionAttempts = 0;
111 | // Get a handle to the default local Bluetooth adapter.
112 | bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
113 | if (bluetoothAdapter == null) {
114 | return;
115 | }
116 | // Ensure that the device supports use of BT SCO audio for off call use cases.
117 | if (!audioManager.isBluetoothScoAvailableOffCall()) {
118 | return;
119 | }
120 | logBluetoothAdapterInfo(bluetoothAdapter);
121 | // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
122 | // Hands-Free) proxy object and install a listener.
123 | if (!getBluetoothProfileProxy(apprtcContext, bluetoothServiceListener)) {
124 | return;
125 | }
126 | // Register receivers for BluetoothHeadset change notifications.
127 | IntentFilter bluetoothHeadsetFilter = new IntentFilter();
128 | // Register receiver for change in connection state of the Headset profile.
129 | bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
130 | // Register receiver for change in audio connection state of the Headset profile.
131 | bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
132 | registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
133 | bluetoothState = State.HEADSET_UNAVAILABLE;
134 | }
135 |
136 | /**
137 | * Stops and closes all components related to Bluetooth audio.
138 | */
139 | public void stop() {
140 | ThreadUtils.checkIsOnMainThread();
141 | if (bluetoothAdapter == null) {
142 | return;
143 | }
144 | // Stop BT SCO connection with remote device if needed.
145 | stopScoAudio();
146 | // Close down remaining BT resources.
147 | if (bluetoothState == State.UNINITIALIZED) {
148 | return;
149 | }
150 | unregisterReceiver(bluetoothHeadsetReceiver);
151 | cancelTimer();
152 | if (bluetoothHeadset != null) {
153 | bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
154 | bluetoothHeadset = null;
155 | }
156 | bluetoothAdapter = null;
157 | bluetoothDevice = null;
158 | bluetoothState = State.UNINITIALIZED;
159 | }
160 |
161 | /**
162 | * Starts Bluetooth SCO connection with remote device.
163 | * Note that the phone application always has the priority on the usage of the SCO connection
164 | * for telephony. If this method is called while the phone is in call it will be ignored.
165 | * Similarly, if a call is received or sent while an application is using the SCO connection,
166 | * the connection will be lost for the application and NOT returned automatically when the call
167 | * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
168 | * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
169 | * audio connection is established.
170 | * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
171 | * higher. It might be required to initiates a virtual voice call since many devices do not
172 | * accept SCO audio without a "call".
173 | */
174 | public boolean startScoAudio() {
175 | ThreadUtils.checkIsOnMainThread();
176 | if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
177 | return false;
178 | }
179 | if (bluetoothState != State.HEADSET_AVAILABLE) {
180 | return false;
181 | }
182 | // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
183 | // The SCO connection establishment can take several seconds, hence we cannot rely on the
184 | // connection to be available when the method returns but instead register to receive the
185 | // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
186 | bluetoothState = State.SCO_CONNECTING;
187 | audioManager.startBluetoothSco();
188 | audioManager.setBluetoothScoOn(true);
189 | scoConnectionAttempts++;
190 | startTimer();
191 | return true;
192 | }
193 |
194 | /**
195 | * Stops Bluetooth SCO connection with remote device.
196 | */
197 | public void stopScoAudio() {
198 | ThreadUtils.checkIsOnMainThread();
199 | if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
200 | return;
201 | }
202 | cancelTimer();
203 | audioManager.stopBluetoothSco();
204 | audioManager.setBluetoothScoOn(false);
205 | bluetoothState = State.SCO_DISCONNECTING;
206 | }
207 |
208 | /**
209 | * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
210 | * Service via IPC) to update the list of connected devices for the HEADSET
211 | * profile. The internal state will change to HEADSET_UNAVAILABLE or to
212 | * HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected
213 | * device if available.
214 | */
215 | public void updateDevice() {
216 | if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
217 | return;
218 | }
219 | // Get connected devices for the headset profile. Returns the set of
220 | // devices which are in state STATE_CONNECTED. The BluetoothDevice class
221 | // is just a thin wrapper for a Bluetooth hardware address.
222 | List devices = bluetoothHeadset.getConnectedDevices();
223 | if (devices.isEmpty()) {
224 | bluetoothDevice = null;
225 | bluetoothState = State.HEADSET_UNAVAILABLE;
226 | } else {
227 | // Always use first device in list. Android only supports one device.
228 | bluetoothDevice = devices.get(0);
229 | bluetoothState = State.HEADSET_AVAILABLE;
230 | }
231 | }
232 |
233 | /**
234 | * Stubs for test mocks.
235 | */
236 | protected android.media.AudioManager getAudioManager(Context context) {
237 | return (android.media.AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
238 | }
239 |
240 | protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
241 | apprtcContext.registerReceiver(receiver, filter);
242 | }
243 |
244 | protected void unregisterReceiver(BroadcastReceiver receiver) {
245 | apprtcContext.unregisterReceiver(receiver);
246 | }
247 |
248 | protected boolean getBluetoothProfileProxy(Context context, BluetoothProfile.ServiceListener listener) {
249 | return bluetoothAdapter.getProfileProxy(context, listener, BluetoothProfile.HEADSET);
250 | }
251 |
252 | protected boolean hasPermission() {
253 | return apprtcContext.checkPermission(android.Manifest.permission.BLUETOOTH, Process.myPid(), Process.myUid()) == PackageManager.PERMISSION_GRANTED;
254 | }
255 |
256 | /**
257 | * Logs the state of the local Bluetooth adapter.
258 | */
259 | @SuppressLint("HardwareIds")
260 | protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
261 | // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
262 | Set pairedDevices = localAdapter.getBondedDevices();
263 |
264 | }
265 |
266 | /**
267 | * Ensures that the audio manager updates its list of available audio devices.
268 | */
269 | private void updateAudioDeviceState() {
270 | ThreadUtils.checkIsOnMainThread();
271 | apprtcAudioManager.updateAudioDeviceState();
272 | }
273 |
274 | /**
275 | * Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds.
276 | */
277 | private void startTimer() {
278 | ThreadUtils.checkIsOnMainThread();
279 | handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
280 | }
281 |
282 | /**
283 | * Cancels any outstanding timer tasks.
284 | */
285 | private void cancelTimer() {
286 | ThreadUtils.checkIsOnMainThread();
287 | handler.removeCallbacks(bluetoothTimeoutRunnable);
288 | }
289 |
290 | /**
291 | * Called when start of the BT SCO channel takes too long time. Usually
292 | * happens when the BT device has been turned on during an ongoing call.
293 | */
294 | private void bluetoothTimeout() {
295 | ThreadUtils.checkIsOnMainThread();
296 | if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
297 | return;
298 | }
299 | if (bluetoothState != State.SCO_CONNECTING) {
300 | return;
301 | }
302 | // Bluetooth SCO should be connecting; check the latest result.
303 | boolean scoConnected = false;
304 | List devices = bluetoothHeadset.getConnectedDevices();
305 | if (devices.size() > 0) {
306 | bluetoothDevice = devices.get(0);
307 | if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
308 | scoConnected = true;
309 | }
310 | }
311 | if (scoConnected) {
312 | // We thought BT had timed out, but it's actually on; updating state.
313 | bluetoothState = State.SCO_CONNECTED;
314 | scoConnectionAttempts = 0;
315 | } else {
316 | // Give up and "cancel" our request by calling stopBluetoothSco().
317 | stopScoAudio();
318 | }
319 | updateAudioDeviceState();
320 | }
321 |
322 | /**
323 | * Checks whether audio uses Bluetooth SCO.
324 | */
325 | private boolean isScoOn() {
326 | return audioManager.isBluetoothScoOn();
327 | }
328 |
329 | /**
330 | * Converts BluetoothAdapter states into local string representations.
331 | */
332 | private String stateToString(int state) {
333 | switch (state) {
334 | case BluetoothAdapter.STATE_DISCONNECTED:
335 | return "DISCONNECTED";
336 | case BluetoothAdapter.STATE_CONNECTED:
337 | return "CONNECTED";
338 | case BluetoothAdapter.STATE_CONNECTING:
339 | return "CONNECTING";
340 | case BluetoothAdapter.STATE_DISCONNECTING:
341 | return "DISCONNECTING";
342 | case BluetoothAdapter.STATE_OFF:
343 | return "OFF";
344 | case BluetoothAdapter.STATE_ON:
345 | return "ON";
346 | case BluetoothAdapter.STATE_TURNING_OFF:
347 | // Indicates the local Bluetooth adapter is turning off. Local clients should immediately
348 | // attempt graceful disconnection of any remote links.
349 | return "TURNING_OFF";
350 | case BluetoothAdapter.STATE_TURNING_ON:
351 | // Indicates the local Bluetooth adapter is turning on. However local clients should wait
352 | // for STATE_ON before attempting to use the adapter.
353 | return "TURNING_ON";
354 | default:
355 | return "INVALID";
356 | }
357 | }
358 |
359 | // Bluetooth connection state.
360 | public enum State {
361 | // Bluetooth is not available; no adapter or Bluetooth is off.
362 | UNINITIALIZED, // Bluetooth error happened when trying to start Bluetooth.
363 | ERROR, // Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
364 | // SCO is not started or disconnected.
365 | HEADSET_UNAVAILABLE, // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
366 | // present, but SCO is not started or disconnected.
367 | HEADSET_AVAILABLE, // Bluetooth audio SCO connection with remote device is closing.
368 | SCO_DISCONNECTING, // Bluetooth audio SCO connection with remote device is initiated.
369 | SCO_CONNECTING, // Bluetooth audio SCO connection with remote device is established.
370 | SCO_CONNECTED
371 | }
372 |
373 | /**
374 | * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
375 | * connected to or disconnected from the service.
376 | */
377 | private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
378 | @Override
379 | // Called to notify the client when the proxy object has been connected to the service.
380 | // Once we have the profile proxy object, we can use it to monitor the state of the
381 | // connection and perform other operations that are relevant to the headset profile.
382 | public void onServiceConnected(int profile, BluetoothProfile proxy) {
383 | if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
384 | return;
385 | }
386 | // Android only supports one connected Bluetooth Headset at a time.
387 | bluetoothHeadset = (BluetoothHeadset) proxy;
388 | updateAudioDeviceState();
389 | }
390 |
391 | @Override
392 | public void onServiceDisconnected(int profile) {
393 | if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
394 | return;
395 | }
396 | stopScoAudio();
397 | bluetoothHeadset = null;
398 | bluetoothDevice = null;
399 | bluetoothState = State.HEADSET_UNAVAILABLE;
400 | updateAudioDeviceState();
401 | }
402 | }
403 |
404 | // Intent broadcast receiver which handles changes in Bluetooth device availability.
405 | // Detects headset changes and Bluetooth SCO state changes.
406 | private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
407 | @Override
408 | public void onReceive(Context context, Intent intent) {
409 | if (bluetoothState == State.UNINITIALIZED) {
410 | return;
411 | }
412 | final String action = intent.getAction();
413 | // Change in connection state of the Headset profile. Note that the
414 | // change does not tell us anything about whether we're streaming
415 | // audio to BT over SCO. Typically received when user turns on a BT
416 | // headset while audio is active using another audio device.
417 | if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
418 | final int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
419 | if (state == BluetoothHeadset.STATE_CONNECTED) {
420 | scoConnectionAttempts = 0;
421 | updateAudioDeviceState();
422 | } else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
423 | // Bluetooth is probably powered off during the call.
424 | stopScoAudio();
425 | updateAudioDeviceState();
426 | }
427 | // Change in the audio (SCO) connection state of the Headset profile.
428 | // Typically received after call to startScoAudio() has finalized.
429 | } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
430 | final int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
431 | if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
432 | cancelTimer();
433 | if (bluetoothState == State.SCO_CONNECTING) {
434 | bluetoothState = State.SCO_CONNECTED;
435 | scoConnectionAttempts = 0;
436 | updateAudioDeviceState();
437 | } else {
438 | }
439 | } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
440 | } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
441 | if (isInitialStickyBroadcast()) {
442 | return;
443 | }
444 | updateAudioDeviceState();
445 | }
446 | }
447 | }
448 | }
449 | }
450 |
--------------------------------------------------------------------------------
/app/src/main/java/com/codewithkael/firebasevideocall/webrtc/RTCAudioManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package com.codewithkael.firebasevideocall.webrtc;
12 |
13 | import android.annotation.SuppressLint;
14 | import android.content.BroadcastReceiver;
15 | import android.content.Context;
16 | import android.content.Intent;
17 | import android.content.IntentFilter;
18 | import android.content.SharedPreferences;
19 | import android.content.pm.PackageManager;
20 | import android.media.AudioDeviceInfo;
21 | import android.preference.PreferenceManager;
22 |
23 | import org.webrtc.ThreadUtils;
24 |
25 | import java.util.Collections;
26 | import java.util.HashSet;
27 | import java.util.Set;
28 |
29 |
30 | /**
31 | * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
32 | */
33 | @SuppressLint("MissingPermission")
34 | public class RTCAudioManager {
35 | private static final String TAG = RTCAudioManager.class.getSimpleName();
36 | private static final String SPEAKERPHONE_AUTO = "auto";
37 | private static final String SPEAKERPHONE_TRUE = "true";
38 | private static final String SPEAKERPHONE_FALSE = "false";
39 | private final Context apprtcContext;
40 | // Contains speakerphone setting: auto, true or false
41 | private final String useSpeakerphone;
42 | // Handles all tasks related to Bluetooth headset devices.
43 | private final BluetoothManager bluetoothManager;
44 | private final android.media.AudioManager audioManager;
45 | private AudioManagerEvents audioManagerEvents;
46 | private AudioManagerState amState;
47 | private int savedAudioMode = android.media.AudioManager.MODE_INVALID;
48 | private boolean savedIsSpeakerPhoneOn = false;
49 | private boolean savedIsMicrophoneMute = false;
50 | private boolean hasWiredHeadset = false;
51 | // Default audio device; speaker phone for video calls or earpiece for audio
52 | // only calls.
53 | private AudioDevice defaultAudioDevice;
54 | // Contains the currently selected audio device.
55 | // This device is changed automatically using a certain scheme where e.g.
56 | // a wired headset "wins" over speaker phone. It is also possible for a
57 | // user to explicitly select a device (and overrid any predefined scheme).
58 | // See |userSelectedAudioDevice| for details.
59 | private AudioDevice selectedAudioDevice;
60 | // Contains the user-selected audio device which overrides the predefined
61 | // selection scheme.
62 | // TODO(henrika): always set to AudioDevice.NONE today. Add support for
63 | // explicit selection based on choice by userSelectedAudioDevice.
64 | private AudioDevice userSelectedAudioDevice;
65 | // Proximity sensor object. It measures the proximity of an object in cm
66 | // relative to the view screen of a device and can therefore be used to
67 | // assist device switching (close to ear <=> use headset earpiece if
68 | // available, far from ear <=> use speaker phone).
69 | private ProximitySensor proximitySensor;
70 | // Contains a list of available audio devices. A Set collection is used to
71 | // avoid duplicate elements.
72 | private Set audioDevices = new HashSet<>();
73 | // Broadcast receiver for wired headset intent broadcasts.
74 | private final BroadcastReceiver wiredHeadsetReceiver;
75 | // Callback method for changes in audio focus.
76 | private android.media.AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
77 |
78 | private RTCAudioManager(Context context) {
79 | ThreadUtils.checkIsOnMainThread();
80 | apprtcContext = context;
81 | audioManager = ((android.media.AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
82 | bluetoothManager = BluetoothManager.create(context, this);
83 | wiredHeadsetReceiver = new WiredHeadsetReceiver();
84 | amState = AudioManagerState.UNINITIALIZED;
85 |
86 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
87 | useSpeakerphone = sharedPreferences.getString("speakerphone_preference", "auto");
88 | if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) {
89 | defaultAudioDevice = AudioDevice.EARPIECE;
90 | } else {
91 | defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
92 | }
93 |
94 | // Create and initialize the proximity sensor.
95 | // Tablet devices (e.g. Nexus 7) does not support proximity sensors.
96 | // Note that, the sensor will not be active until start() has been called.
97 | proximitySensor = ProximitySensor.create(context, new Runnable() {
98 | // This method will be called each time a state change is detected.
99 | // Example: user holds his hand over the device (closer than ~5 cm),
100 | // or removes his hand from the device.
101 | public void run() {
102 | onProximitySensorChangedState();
103 | }
104 | });
105 |
106 | }
107 |
108 | /**
109 | * Construction.
110 | */
111 | public static RTCAudioManager create(Context context) {
112 | return new RTCAudioManager(context);
113 | }
114 |
115 | /**
116 | * This method is called when the proximity sensor reports a state change,
117 | * e.g. from "NEAR to FAR" or from "FAR to NEAR".
118 | */
119 | private void onProximitySensorChangedState() {
120 | if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) {
121 | return;
122 | }
123 |
124 | // The proximity sensor should only be activated when there are exactly two
125 | // available audio devices.
126 | if (audioDevices.size() == 2 && audioDevices.contains(AudioDevice.EARPIECE) && audioDevices.contains(AudioDevice.SPEAKER_PHONE)) {
127 | if (proximitySensor.sensorReportsNearState()) {
128 | // Sensor reports that a "handset is being held up to a person's ear",
129 | // or "something is covering the light sensor".
130 | setAudioDeviceInternal(AudioDevice.EARPIECE);
131 | } else {
132 | // Sensor reports that a "handset is removed from a person's ear", or
133 | // "the light sensor is no longer covered".
134 | setAudioDeviceInternal(AudioDevice.SPEAKER_PHONE);
135 | }
136 | }
137 | }
138 |
139 | public void start(AudioManagerEvents audioManagerEvents) {
140 | ThreadUtils.checkIsOnMainThread();
141 | if (amState == AudioManagerState.RUNNING) {
142 | return;
143 | }
144 | // TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED.
145 |
146 | this.audioManagerEvents = audioManagerEvents;
147 | amState = AudioManagerState.RUNNING;
148 |
149 | // Store current audio state so we can restore it when stop() is called.
150 | savedAudioMode = audioManager.getMode();
151 | savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
152 | savedIsMicrophoneMute = audioManager.isMicrophoneMute();
153 | hasWiredHeadset = hasWiredHeadset();
154 |
155 | // Create an AudioManager.OnAudioFocusChangeListener instance.
156 | audioFocusChangeListener = new android.media.AudioManager.OnAudioFocusChangeListener() {
157 | // Called on the listener to notify if the audio focus for this listener has been changed.
158 | // The |focusChange| value indicates whether the focus was gained, whether the focus was lost,
159 | // and whether that loss is transient, or whether the new focus holder will hold it for an
160 | // unknown amount of time.
161 | // TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
162 | // logging for now.
163 | @Override
164 | public void onAudioFocusChange(int focusChange) {
165 | String typeOfChange = "AUDIOFOCUS_NOT_DEFINED";
166 | switch (focusChange) {
167 | case android.media.AudioManager.AUDIOFOCUS_GAIN:
168 | typeOfChange = "AUDIOFOCUS_GAIN";
169 | break;
170 | case android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
171 | typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT";
172 | break;
173 | case android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
174 | typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
175 | break;
176 | case android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
177 | typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
178 | break;
179 | case android.media.AudioManager.AUDIOFOCUS_LOSS:
180 | typeOfChange = "AUDIOFOCUS_LOSS";
181 | break;
182 | case android.media.AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
183 | typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT";
184 | break;
185 | case android.media.AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
186 | typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
187 | break;
188 | default:
189 | typeOfChange = "AUDIOFOCUS_INVALID";
190 | break;
191 | }
192 | }
193 | };
194 |
195 | // Request audio playout focus (without ducking) and install listener for changes in focus.
196 | int result = audioManager.requestAudioFocus(audioFocusChangeListener, android.media.AudioManager.STREAM_VOICE_CALL, android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
197 |
198 | // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
199 | // required to be in this mode when playout and/or recording starts for
200 | // best possible VoIP performance.
201 | audioManager.setMode(android.media.AudioManager.MODE_IN_COMMUNICATION);
202 |
203 | // Always disable microphone mute during a WebRTC call.
204 | setMicrophoneMute(false);
205 |
206 | // Set initial device states.
207 | userSelectedAudioDevice = AudioDevice.NONE;
208 | selectedAudioDevice = AudioDevice.NONE;
209 | audioDevices.clear();
210 |
211 | // Initialize and start Bluetooth if a BT device is available or initiate
212 | // detection of new (enabled) BT devices.
213 | bluetoothManager.start();
214 |
215 | // Do initial selection of audio device. This setting can later be changed
216 | // either by adding/removing a BT or wired headset or by covering/uncovering
217 | // the proximity sensor.
218 | updateAudioDeviceState();
219 |
220 | // Register receiver for broadcast intents related to adding/removing a
221 | // wired headset.
222 | registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
223 | }
224 |
225 | @SuppressLint("WrongConstant")
226 | public void stop() {
227 | ThreadUtils.checkIsOnMainThread();
228 | if (amState != AudioManagerState.RUNNING) {
229 | return;
230 | }
231 | amState = AudioManagerState.UNINITIALIZED;
232 |
233 | unregisterReceiver(wiredHeadsetReceiver);
234 |
235 | bluetoothManager.stop();
236 |
237 | // Restore previously stored audio states.
238 | setSpeakerphoneOn(savedIsSpeakerPhoneOn);
239 | setMicrophoneMute(savedIsMicrophoneMute);
240 | audioManager.setMode(savedAudioMode);
241 |
242 | // Abandon audio focus. Gives the previous focus owner, if any, focus.
243 | audioManager.abandonAudioFocus(audioFocusChangeListener);
244 | audioFocusChangeListener = null;
245 |
246 | if (proximitySensor != null) {
247 | proximitySensor.stop();
248 | proximitySensor = null;
249 | }
250 |
251 | audioManagerEvents = null;
252 | }
253 |
254 | /**
255 | * Changes selection of the currently active audio device.
256 | */
257 | private void setAudioDeviceInternal(AudioDevice device) {
258 | switch (device) {
259 | case SPEAKER_PHONE:
260 | setSpeakerphoneOn(true);
261 | break;
262 | case EARPIECE:
263 | case BLUETOOTH:
264 | case WIRED_HEADSET:
265 | setSpeakerphoneOn(false);
266 | break;
267 | default:
268 | break;
269 | }
270 | selectedAudioDevice = device;
271 | }
272 |
273 | /**
274 | * Changes default audio device.
275 | * TODO(henrika): add usage of this method in the AppRTCMobile client.
276 | */
277 | public void setDefaultAudioDevice(AudioDevice defaultDevice) {
278 | ThreadUtils.checkIsOnMainThread();
279 | switch (defaultDevice) {
280 | case SPEAKER_PHONE:
281 | defaultAudioDevice = defaultDevice;
282 | break;
283 | case EARPIECE:
284 | if (hasEarpiece()) {
285 | defaultAudioDevice = defaultDevice;
286 | } else {
287 | defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
288 | }
289 | break;
290 | default:
291 | break;
292 | }
293 | updateAudioDeviceState();
294 | }
295 |
296 | /**
297 | * Changes selection of the currently active audio device.
298 | */
299 | public void selectAudioDevice(AudioDevice device) {
300 | ThreadUtils.checkIsOnMainThread();
301 | if (!audioDevices.contains(device)) {
302 | }
303 | userSelectedAudioDevice = device;
304 | updateAudioDeviceState();
305 | }
306 |
307 | /**
308 | * Returns current set of available/selectable audio devices.
309 | */
310 | public Set getAudioDevices() {
311 | ThreadUtils.checkIsOnMainThread();
312 | return Collections.unmodifiableSet(new HashSet<>(audioDevices));
313 | }
314 |
315 | /**
316 | * Returns the currently selected audio device.
317 | */
318 | public AudioDevice getSelectedAudioDevice() {
319 | ThreadUtils.checkIsOnMainThread();
320 | return selectedAudioDevice;
321 | }
322 |
323 | /**
324 | * Helper method for receiver registration.
325 | */
326 | private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
327 | apprtcContext.registerReceiver(receiver, filter);
328 | }
329 |
330 | /**
331 | * Helper method for unregistration of an existing receiver.
332 | */
333 | private void unregisterReceiver(BroadcastReceiver receiver) {
334 | apprtcContext.unregisterReceiver(receiver);
335 | }
336 |
337 | /**
338 | * Sets the speaker phone mode.
339 | */
340 | private void setSpeakerphoneOn(boolean on) {
341 | boolean wasOn = audioManager.isSpeakerphoneOn();
342 | if (wasOn == on) {
343 | return;
344 | }
345 | audioManager.setSpeakerphoneOn(on);
346 | }
347 |
348 | /**
349 | * Sets the microphone mute state.
350 | */
351 | private void setMicrophoneMute(boolean on) {
352 | boolean wasMuted = audioManager.isMicrophoneMute();
353 | if (wasMuted == on) {
354 | return;
355 | }
356 | audioManager.setMicrophoneMute(on);
357 | }
358 |
359 | /**
360 | * Gets the current earpiece state.
361 | */
362 | private boolean hasEarpiece() {
363 | return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
364 | }
365 |
366 | /**
367 | * Checks whether a wired headset is connected or not.
368 | * This is not a valid indication that audio playback is actually over
369 | * the wired headset as audio routing depends on other conditions. We
370 | * only use it as an early indicator (during initialization) of an attached
371 | * wired headset.
372 | */
373 | @Deprecated
374 | private boolean hasWiredHeadset() {
375 | @SuppressLint("WrongConstant") final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_ALL);
376 | for (AudioDeviceInfo device : devices) {
377 | final int type = device.getType();
378 | if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
379 | return true;
380 | } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
381 | return true;
382 | }
383 | }
384 | return false;
385 | }
386 |
387 | /**
388 | * Updates list of possible audio devices and make new device selection.
389 | * TODO(henrika): add unit test to verify all state transitions.
390 | */
391 | public void updateAudioDeviceState() {
392 | ThreadUtils.checkIsOnMainThread();
393 |
394 | // Check if any Bluetooth headset is connected. The internal BT state will
395 | // change accordingly.
396 | // TODO(henrika): perhaps wrap required state into BT manager.
397 | if (bluetoothManager.getState() == BluetoothManager.State.HEADSET_AVAILABLE || bluetoothManager.getState() == BluetoothManager.State.HEADSET_UNAVAILABLE || bluetoothManager.getState() == BluetoothManager.State.SCO_DISCONNECTING) {
398 | bluetoothManager.updateDevice();
399 | }
400 |
401 | // Update the set of available audio devices.
402 | Set newAudioDevices = new HashSet<>();
403 |
404 | if (bluetoothManager.getState() == BluetoothManager.State.SCO_CONNECTED || bluetoothManager.getState() == BluetoothManager.State.SCO_CONNECTING || bluetoothManager.getState() == BluetoothManager.State.HEADSET_AVAILABLE) {
405 | newAudioDevices.add(AudioDevice.BLUETOOTH);
406 | }
407 |
408 | if (hasWiredHeadset) {
409 | // If a wired headset is connected, then it is the only possible option.
410 | newAudioDevices.add(AudioDevice.WIRED_HEADSET);
411 | } else {
412 | // No wired headset, hence the audio-device list can contain speaker
413 | // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
414 | newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
415 | if (hasEarpiece()) {
416 | newAudioDevices.add(AudioDevice.EARPIECE);
417 | }
418 | }
419 | // Store state which is set to true if the device list has changed.
420 | boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
421 | // Update the existing audio device set.
422 | audioDevices = newAudioDevices;
423 | // Correct user selected audio devices if needed.
424 | if (bluetoothManager.getState() == BluetoothManager.State.HEADSET_UNAVAILABLE && userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
425 | // If BT is not available, it can't be the user selection.
426 | userSelectedAudioDevice = AudioDevice.NONE;
427 | }
428 | if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
429 | // If user selected speaker phone, but then plugged wired headset then make
430 | // wired headset as user selected device.
431 | userSelectedAudioDevice = AudioDevice.WIRED_HEADSET;
432 | }
433 | if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
434 | // If user selected wired headset, but then unplugged wired headset then make
435 | // speaker phone as user selected device.
436 | userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE;
437 | }
438 |
439 | // Need to start Bluetooth if it is available and user either selected it explicitly or
440 | // user did not select any output device.
441 | boolean needBluetoothAudioStart = bluetoothManager.getState() == BluetoothManager.State.HEADSET_AVAILABLE && (userSelectedAudioDevice == AudioDevice.NONE || userSelectedAudioDevice == AudioDevice.BLUETOOTH);
442 |
443 | // Need to stop Bluetooth audio if user selected different device and
444 | // Bluetooth SCO connection is established or in the process.
445 | boolean needBluetoothAudioStop = (bluetoothManager.getState() == BluetoothManager.State.SCO_CONNECTED || bluetoothManager.getState() == BluetoothManager.State.SCO_CONNECTING) && (userSelectedAudioDevice != AudioDevice.NONE && userSelectedAudioDevice != AudioDevice.BLUETOOTH);
446 |
447 | if (bluetoothManager.getState() != BluetoothManager.State.HEADSET_AVAILABLE && bluetoothManager.getState() != BluetoothManager.State.SCO_CONNECTING) {
448 | bluetoothManager.getState();
449 | }
450 |
451 | // Start or stop Bluetooth SCO connection given states set earlier.
452 | if (needBluetoothAudioStop) {
453 | bluetoothManager.stopScoAudio();
454 | bluetoothManager.updateDevice();
455 | }
456 |
457 | if (needBluetoothAudioStart && !needBluetoothAudioStop) {
458 | // Attempt to start Bluetooth SCO audio (takes a few second to start).
459 | if (!bluetoothManager.startScoAudio()) {
460 | // Remove BLUETOOTH from list of available devices since SCO failed.
461 | audioDevices.remove(AudioDevice.BLUETOOTH);
462 | audioDeviceSetUpdated = true;
463 | }
464 | }
465 |
466 | // Update selected audio device.
467 | AudioDevice newAudioDevice;
468 |
469 | if (bluetoothManager.getState() == BluetoothManager.State.SCO_CONNECTED) {
470 | // If a Bluetooth is connected, then it should be used as output audio
471 | // device. Note that it is not sufficient that a headset is available;
472 | // an active SCO channel must also be up and running.
473 | newAudioDevice = AudioDevice.BLUETOOTH;
474 | } else if (hasWiredHeadset) {
475 | // If a wired headset is connected, but Bluetooth is not, then wired headset is used as
476 | // audio device.
477 | newAudioDevice = AudioDevice.WIRED_HEADSET;
478 | } else {
479 | // No wired headset and no Bluetooth, hence the audio-device list can contain speaker
480 | // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
481 | // |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
482 | // depending on the user's selection.
483 | newAudioDevice = defaultAudioDevice;
484 | }
485 | // Switch to new device but only if there has been any changes.
486 | if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
487 | // Do the required device switch.
488 | setAudioDeviceInternal(newAudioDevice);
489 |
490 | if (audioManagerEvents != null) {
491 | // Notify a listening client that audio device has been changed.
492 | audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
493 | }
494 | }
495 | }
496 |
497 | /**
498 | * AudioDevice is the names of possible audio devices that we currently
499 | * support.
500 | */
501 | public enum AudioDevice {
502 | SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE
503 | }
504 |
505 | /**
506 | * AudioManager state.
507 | */
508 | public enum AudioManagerState {
509 | UNINITIALIZED, PREINITIALIZED, RUNNING,
510 | }
511 |
512 | /**
513 | * Selected audio device change event.
514 | */
515 | public interface AudioManagerEvents {
516 | // Callback fired once audio device is changed or list of available audio devices changed.
517 | void onAudioDeviceChanged(AudioDevice selectedAudioDevice, Set availableAudioDevices);
518 | }
519 |
520 | /* Receiver which handles changes in wired headset availability. */
521 | private class WiredHeadsetReceiver extends BroadcastReceiver {
522 | private static final int STATE_UNPLUGGED = 0;
523 | private static final int STATE_PLUGGED = 1;
524 | private static final int HAS_NO_MIC = 0;
525 | private static final int HAS_MIC = 1;
526 |
527 | @Override
528 | public void onReceive(Context context, Intent intent) {
529 | int state = intent.getIntExtra("state", STATE_UNPLUGGED);
530 | int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
531 | String name = intent.getStringExtra("name");
532 | hasWiredHeadset = (state == STATE_PLUGGED);
533 | updateAudioDeviceState();
534 | }
535 | }
536 | }
537 |
--------------------------------------------------------------------------------