├── README.md
├── android
├── src
│ └── main
│ │ ├── AndroidManifest.xml
│ │ └── java
│ │ └── com
│ │ └── zxcpoiu
│ │ └── incallmanager
│ │ ├── AppRTC
│ │ ├── README
│ │ ├── LICENSE
│ │ ├── diff
│ │ │ ├── AppRTCProximitySensor.java.diff
│ │ │ └── AppRTCBluetoothManager.java.diff
│ │ ├── AppRTCProximitySensor.java
│ │ └── AppRTCBluetoothManager.java
│ │ ├── InCallManagerPackage.java
│ │ ├── InCallWakeLockUtils.java
│ │ ├── InCallProximityManager.java
│ │ └── InCallManagerModule.java
└── build.gradle
├── ios
├── RNInCallManager
│ ├── RNInCallManager.h
│ └── RNInCallManager.m
└── RNInCallManager.xcodeproj
│ └── project.pbxproj
├── .gitignore
├── LICENSE
├── ReactNativeIncallManager.podspec
├── package.json
└── index.js
/README.md:
--------------------------------------------------------------------------------
1 | This repo has been moved to https://github.com/react-native-webrtc/react-native-incall-manager
2 |
3 |
--------------------------------------------------------------------------------
/android/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/ios/RNInCallManager/RNInCallManager.h:
--------------------------------------------------------------------------------
1 | //
2 | // RNInCallManager.h
3 | // RNInCallManager
4 | //
5 | // Created by Ian Yu-Hsun Lin (@ianlin) on 05/12/2017.
6 | // Copyright © 2017 zxcpoiu. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | #import
13 | #import
14 |
15 | @interface RNInCallManager : RCTEventEmitter
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/README:
--------------------------------------------------------------------------------
1 | From WebRTC Source
2 | M64
3 |
4 | Files in this library with prefix "AppRTC" are all get from webrtc source.
5 | The license / patents remain it's original. see LICENSE in this directory
6 |
7 | This library slightly modified original class/package name to get compiled in.
8 | You can find diffs in `diff` directory as well.
9 |
10 | - examples/androidapp/src/org/appspot/apprtc:
11 | * AppRTCBluetoothManager.java
12 | * AppRTCProximitySensor.java
13 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # CUSTOM
2 | #
3 | .*.swp
4 |
5 | # OSX
6 | #
7 | .DS_Store
8 |
9 | # Xcode
10 | #
11 | build/
12 | *.pbxuser
13 | !default.pbxuser
14 | *.mode1v3
15 | !default.mode1v3
16 | *.mode2v3
17 | !default.mode2v3
18 | *.perspectivev3
19 | !default.perspectivev3
20 | xcuserdata
21 | *.xccheckout
22 | *.moved-aside
23 | DerivedData
24 | *.hmap
25 | *.ipa
26 | *.xcuserstate
27 | project.xcworkspace
28 |
29 | # Android/IJ
30 | #
31 | .idea
32 | .gradle
33 | local.properties
34 |
35 | # node.js
36 | #
37 | node_modules/
38 | npm-debug.log
39 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | def safeExtGet(prop, fallback) {
4 | rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
5 | }
6 |
7 | android {
8 | compileSdkVersion safeExtGet('compileSdkVersion', 23)
9 | buildToolsVersion safeExtGet('buildToolsVersion', "23.0.1")
10 |
11 | defaultConfig {
12 | minSdkVersion safeExtGet('minSdkVersion', 16)
13 | targetSdkVersion safeExtGet('targetSdkVersion', 22)
14 | versionCode 1
15 | versionName "1.0"
16 | }
17 | }
18 |
19 | dependencies {
20 | implementation 'com.facebook.react:react-native:+'
21 | }
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | ISC License
2 |
3 | Copyright (c) 2016, zxcpoiu
4 |
5 | Permission to use, copy, modify, and/or distribute this software for any
6 | purpose with or without fee is hereby granted, provided that the above
7 | copyright notice and this permission notice appear in all copies.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 |
--------------------------------------------------------------------------------
/ReactNativeIncallManager.podspec:
--------------------------------------------------------------------------------
1 | require 'json'
2 |
3 | package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
4 |
5 | Pod::Spec.new do |s|
6 | s.name = 'ReactNativeIncallManager'
7 | s.version = package['version']
8 | s.summary = package['description']
9 | s.description = package['description']
10 | s.homepage = package['homepage']
11 | s.license = package['license']
12 | s.author = package['author']
13 | s.source = { :git => 'https://github.com/zxcpoiu/react-native-incall-manager.git', :tag => s.version }
14 |
15 | s.platform = :ios, '9.0'
16 | s.ios.deployment_target = '8.0'
17 |
18 | s.preserve_paths = 'LICENSE', 'package.json'
19 | s.source_files = '**/*.{h,m}'
20 | s.exclude_files = 'example/**/*'
21 | s.dependency 'React-Core'
22 | end
23 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "react-native-incall-manager",
3 | "version": "3.3.0",
4 | "description": "Handling media-routes/sensors/events during a audio/video chat on React Native",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "git+https://github.com/zxcpoiu/react-native-incall-manager.git"
12 | },
13 | "keywords": [
14 | "React",
15 | "ReactNative",
16 | "WebRTC",
17 | "Phone",
18 | "Call",
19 | "InCall",
20 | "Talk",
21 | "SIP",
22 | "Voip"
23 | ],
24 | "author": "Henry Lin ",
25 | "license": "ISC",
26 | "bugs": {
27 | "url": "https://github.com/zxcpoiu/react-native-incall-manager/issues"
28 | },
29 | "homepage": "https://github.com/zxcpoiu/react-native-incall-manager#readme",
30 | "peerDependencies": {
31 | "react-native": ">=0.40.0"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2011, The WebRTC project authors. All rights reserved.
2 |
3 | Redistribution and use in source and binary forms, with or without
4 | modification, are permitted provided that the following conditions are
5 | met:
6 |
7 | * Redistributions of source code must retain the above copyright
8 | notice, this list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright
11 | notice, this list of conditions and the following disclaimer in
12 | the documentation and/or other materials provided with the
13 | distribution.
14 |
15 | * Neither the name of Google nor the names of its contributors may
16 | be used to endorse or promote products derived from this software
17 | without specific prior written permission.
18 |
19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/InCallManagerPackage.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 Henry Lin @zxcpoiu
3 | *
4 | * Permission to use, copy, modify, and distribute this software for any
5 | * purpose with or without fee is hereby granted, provided that the above
6 | * copyright notice and this permission notice appear in all copies.
7 | *
8 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 | */
16 |
17 | package com.zxcpoiu.incallmanager;
18 |
19 | import com.facebook.react.ReactPackage;
20 | import com.facebook.react.bridge.JavaScriptModule;
21 | import com.facebook.react.bridge.NativeModule;
22 | import com.facebook.react.bridge.ReactApplicationContext;
23 | import com.facebook.react.uimanager.ViewManager;
24 |
25 | import java.util.Collections;
26 | import java.util.List;
27 |
28 | public class InCallManagerPackage implements ReactPackage {
29 |
30 | @Override
31 | public List createNativeModules(ReactApplicationContext reactContext) {
32 | return Collections.singletonList(new InCallManagerModule(reactContext));
33 | }
34 |
35 | // Deprecated RN 0.47
36 | public List> createJSModules() {
37 | return Collections.emptyList();
38 | }
39 |
40 | @Override
41 | public List createViewManagers(ReactApplicationContext reactContext) {
42 | return Collections.emptyList();
43 | }
44 |
45 | public static void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
46 | InCallManagerModule.onRequestPermissionsResult(requestCode, permissions, grantResults);
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/diff/AppRTCProximitySensor.java.diff:
--------------------------------------------------------------------------------
1 | --- /home/zxcpoiu/git/webrtcbuilds/out/src/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java 2017-11-30 16:59:50.918956062 +0800
2 | +++ AppRTCProximitySensor.java 2017-12-08 18:02:05.004106849 +0800
3 | @@ -8,7 +8,7 @@
4 | * be found in the AUTHORS file in the root of the source tree.
5 | */
6 |
7 | -package org.appspot.apprtc;
8 | +package com.zxcpoiu.incallmanager.AppRTC;
9 |
10 | import android.content.Context;
11 | import android.hardware.Sensor;
12 | @@ -17,8 +17,6 @@
13 | import android.hardware.SensorManager;
14 | import android.os.Build;
15 | import android.util.Log;
16 | -import org.appspot.apprtc.util.AppRTCUtils;
17 | -import org.webrtc.ThreadUtils;
18 |
19 | /**
20 | * AppRTCProximitySensor manages functions related to the proximity sensor in
21 | @@ -35,7 +33,6 @@
22 | // This class should be created, started and stopped on one thread
23 | // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
24 | // the case. Only active when |DEBUG| is set to true.
25 | - private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
26 |
27 | private final Runnable onSensorStateListener;
28 | private final SensorManager sensorManager;
29 | @@ -43,12 +40,12 @@
30 | private boolean lastStateReportIsNear = false;
31 |
32 | /** Construction */
33 | - static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
34 | + public static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
35 | return new AppRTCProximitySensor(context, sensorStateListener);
36 | }
37 |
38 | private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
39 | - Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
40 | + Log.d(TAG, "AppRTCProximitySensor");
41 | onSensorStateListener = sensorStateListener;
42 | sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
43 | }
44 | @@ -58,8 +55,7 @@
45 | * first time.
46 | */
47 | public boolean start() {
48 | - threadChecker.checkIsOnValidThread();
49 | - Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
50 | + Log.d(TAG, "start");
51 | if (!initDefaultSensor()) {
52 | // Proximity sensor is not supported on this device.
53 | return false;
54 | @@ -70,8 +66,7 @@
55 |
56 | /** Deactivate the proximity sensor. */
57 | public void stop() {
58 | - threadChecker.checkIsOnValidThread();
59 | - Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
60 | + Log.d(TAG, "stop");
61 | if (proximitySensor == null) {
62 | return;
63 | }
64 | @@ -80,14 +75,11 @@
65 |
66 | /** Getter for last reported state. Set to true if "near" is reported. */
67 | public boolean sensorReportsNearState() {
68 | - threadChecker.checkIsOnValidThread();
69 | return lastStateReportIsNear;
70 | }
71 |
72 | @Override
73 | public final void onAccuracyChanged(Sensor sensor, int accuracy) {
74 | - threadChecker.checkIsOnValidThread();
75 | - AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
76 | if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
77 | Log.e(TAG, "The values returned by this sensor cannot be trusted");
78 | }
79 | @@ -95,8 +87,6 @@
80 |
81 | @Override
82 | public final void onSensorChanged(SensorEvent event) {
83 | - threadChecker.checkIsOnValidThread();
84 | - AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
85 | // As a best practice; do as little as possible within this method and
86 | // avoid blocking.
87 | float distanceInCentimeters = event.values[0];
88 | @@ -114,7 +104,7 @@
89 | onSensorStateListener.run();
90 | }
91 |
92 | - Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
93 | + Log.d(TAG, "onSensorChanged" + ": "
94 | + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
95 | + event.values[0]);
96 | }
97 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/InCallWakeLockUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 Henry Lin @zxcpoiu
3 | *
4 | * Permission to use, copy, modify, and distribute this software for any
5 | * purpose with or without fee is hereby granted, provided that the above
6 | * copyright notice and this permission notice appear in all copies.
7 | *
8 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 | */
16 |
17 | package com.zxcpoiu.incallmanager;
18 |
19 | import android.content.Context;
20 | import android.os.PowerManager;
21 | import android.os.PowerManager.WakeLock;
22 | import android.util.Log;
23 |
24 | public class InCallWakeLockUtils {
25 | private static final String TAG = "InCallWakeLockUtils";
26 |
27 | private PowerManager mPowerManager;
28 |
29 | private WakeLock mFullLock = null;
30 | private WakeLock mPokeFullLock = null;
31 | private WakeLock mPartialLock = null;
32 |
33 |
34 | public InCallWakeLockUtils(Context context) {
35 | mPowerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
36 |
37 | mFullLock = mPowerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK | PowerManager.ACQUIRE_CAUSES_WAKEUP | PowerManager.ON_AFTER_RELEASE, TAG);
38 | mFullLock.setReferenceCounted(false);
39 |
40 | mPartialLock = mPowerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG);
41 | mPartialLock.setReferenceCounted(false);
42 |
43 | mPokeFullLock = mPowerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK | PowerManager.ACQUIRE_CAUSES_WAKEUP | PowerManager.ON_AFTER_RELEASE, TAG);
44 | mPokeFullLock.setReferenceCounted(false);
45 | }
46 |
47 | private boolean _acquireWakeLock(WakeLock lock) {
48 | return _acquireWakeLock(lock, 0);
49 | }
50 |
51 | private boolean _acquireWakeLock(WakeLock lock, long timeout) {
52 | synchronized (lock) {
53 | if (!lock.isHeld()) {
54 | if (timeout > 0) {
55 | lock.acquire(timeout);
56 | } else {
57 | lock.acquire();
58 | }
59 | return true;
60 | }
61 | }
62 | return false;
63 | }
64 |
65 | private boolean _releaseWakeLock(WakeLock lock) {
66 | synchronized (lock) {
67 | if (lock.isHeld()) {
68 | lock.release();
69 | return true;
70 | }
71 | }
72 | return false;
73 | }
74 |
75 | public boolean acquireFullWakeLock() {
76 | boolean sta = _acquireWakeLock(mFullLock);
77 | Log.d(TAG, "acquireFullWakeLock(). sta=" + sta);
78 | return sta;
79 | }
80 |
81 | public boolean releaseFullWakeLock() {
82 | boolean sta = _releaseWakeLock(mFullLock);
83 | Log.d(TAG, "releaseFullWakeLock(). sta=" + sta);
84 | return sta;
85 | }
86 |
87 | public boolean acquirePokeFullWakeLock() {
88 | boolean sta = _acquireWakeLock(mPokeFullLock);
89 | Log.d(TAG, "acquirePokeFullWakeLock(). sta=" + sta);
90 | return sta;
91 | }
92 |
93 | public boolean releasePokeFullWakeLock() {
94 | boolean sta = _releaseWakeLock(mPokeFullLock);
95 | Log.d(TAG, "releasePokeFullWakeLock(). sta=" + sta);
96 | return sta;
97 | }
98 |
99 | public boolean acquirePartialWakeLock() {
100 | boolean sta = _acquireWakeLock(mPartialLock);
101 | Log.d(TAG, "acquirePartialWakeLock(). sta=" + sta);
102 | return sta;
103 | }
104 |
105 | public boolean releasePartialWakeLock() {
106 | boolean sta = _releaseWakeLock(mPartialLock);
107 | Log.d(TAG, "releasePartialWakeLock(). sta=" + sta);
108 | return sta;
109 | }
110 |
111 | public boolean acquirePokeFullWakeLockReleaseAfter(long timeout) {
112 | boolean sta = _acquireWakeLock(mPokeFullLock, timeout);
113 | Log.d(TAG, String.format("acquirePokeFullWakeLockReleaseAfter() timeout=%s, sta=%s", timeout, sta));
114 | return sta;
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/diff/AppRTCBluetoothManager.java.diff:
--------------------------------------------------------------------------------
1 | --- /home/zxcpoiu/git/webrtcbuilds/out/src/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java 2017-11-30 16:59:50.918956062 +0800
2 | +++ AppRTCBluetoothManager.java 2017-12-08 18:01:45.348130079 +0800
3 | @@ -8,7 +8,7 @@
4 | * be found in the AUTHORS file in the root of the source tree.
5 | */
6 |
7 | -package org.appspot.apprtc;
8 | +package com.zxcpoiu.incallmanager.AppRTC;
9 |
10 | import android.annotation.SuppressLint;
11 | import android.bluetooth.BluetoothAdapter;
12 | @@ -27,8 +27,8 @@
13 | import android.util.Log;
14 | import java.util.List;
15 | import java.util.Set;
16 | -import org.appspot.apprtc.util.AppRTCUtils;
17 | -import org.webrtc.ThreadUtils;
18 | +
19 | +import com.zxcpoiu.incallmanager.InCallManagerModule;
20 |
21 | /**
22 | * AppRTCProximitySensor manages functions related to Bluetoth devices in the
23 | @@ -63,7 +63,7 @@
24 | }
25 |
26 | private final Context apprtcContext;
27 | - private final AppRTCAudioManager apprtcAudioManager;
28 | + private final InCallManagerModule apprtcAudioManager;
29 | private final AudioManager audioManager;
30 | private final Handler handler;
31 |
32 | @@ -190,14 +190,13 @@
33 | }
34 |
35 | /** Construction. */
36 | - static AppRTCBluetoothManager create(Context context, AppRTCAudioManager audioManager) {
37 | - Log.d(TAG, "create" + AppRTCUtils.getThreadInfo());
38 | + public static AppRTCBluetoothManager create(Context context, InCallManagerModule audioManager) {
39 | + Log.d(TAG, "create");
40 | return new AppRTCBluetoothManager(context, audioManager);
41 | }
42 |
43 | - protected AppRTCBluetoothManager(Context context, AppRTCAudioManager audioManager) {
44 | + protected AppRTCBluetoothManager(Context context, InCallManagerModule audioManager) {
45 | Log.d(TAG, "ctor");
46 | - ThreadUtils.checkIsOnMainThread();
47 | apprtcContext = context;
48 | apprtcAudioManager = audioManager;
49 | this.audioManager = getAudioManager(context);
50 | @@ -209,7 +208,6 @@
51 |
52 | /** Returns the internal state. */
53 | public State getState() {
54 | - ThreadUtils.checkIsOnMainThread();
55 | return bluetoothState;
56 | }
57 |
58 | @@ -227,7 +225,6 @@
59 | * change.
60 | */
61 | public void start() {
62 | - ThreadUtils.checkIsOnMainThread();
63 | Log.d(TAG, "start");
64 | if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
65 | Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
66 | @@ -275,7 +272,6 @@
67 |
68 | /** Stops and closes all components related to Bluetooth audio. */
69 | public void stop() {
70 | - ThreadUtils.checkIsOnMainThread();
71 | Log.d(TAG, "stop: BT state=" + bluetoothState);
72 | if (bluetoothAdapter == null) {
73 | return;
74 | @@ -312,7 +308,6 @@
75 | * accept SCO audio without a "call".
76 | */
77 | public boolean startScoAudio() {
78 | - ThreadUtils.checkIsOnMainThread();
79 | Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
80 | + "attempts: " + scoConnectionAttempts + ", "
81 | + "SCO is on: " + isScoOn());
82 | @@ -341,7 +336,6 @@
83 |
84 | /** Stops Bluetooth SCO connection with remote device. */
85 | public void stopScoAudio() {
86 | - ThreadUtils.checkIsOnMainThread();
87 | Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
88 | + "SCO is on: " + isScoOn());
89 | if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
90 | @@ -432,21 +426,18 @@
91 |
92 | /** Ensures that the audio manager updates its list of available audio devices. */
93 | private void updateAudioDeviceState() {
94 | - ThreadUtils.checkIsOnMainThread();
95 | Log.d(TAG, "updateAudioDeviceState");
96 | apprtcAudioManager.updateAudioDeviceState();
97 | }
98 |
99 | /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
100 | private void startTimer() {
101 | - ThreadUtils.checkIsOnMainThread();
102 | Log.d(TAG, "startTimer");
103 | handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
104 | }
105 |
106 | /** Cancels any outstanding timer tasks. */
107 | private void cancelTimer() {
108 | - ThreadUtils.checkIsOnMainThread();
109 | Log.d(TAG, "cancelTimer");
110 | handler.removeCallbacks(bluetoothTimeoutRunnable);
111 | }
112 | @@ -456,7 +447,6 @@
113 | * happens when the BT device has been turned on during an ongoing call.
114 | */
115 | private void bluetoothTimeout() {
116 | - ThreadUtils.checkIsOnMainThread();
117 | if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
118 | return;
119 | }
120 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/AppRTCProximitySensor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package com.zxcpoiu.incallmanager.AppRTC;
12 |
13 | import android.content.Context;
14 | import android.hardware.Sensor;
15 | import android.hardware.SensorEvent;
16 | import android.hardware.SensorEventListener;
17 | import android.hardware.SensorManager;
18 | import android.os.Build;
19 | import android.util.Log;
20 |
21 | /**
22 | * AppRTCProximitySensor manages functions related to the proximity sensor in
23 | * the AppRTC demo.
24 | * On most device, the proximity sensor is implemented as a boolean-sensor.
25 | * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
26 | * value i.e. the LUX value of the light sensor is compared with a threshold.
27 | * A LUX-value more than the threshold means the proximity sensor returns "FAR".
28 | * Anything less than the threshold value and the sensor returns "NEAR".
29 | */
30 | public class AppRTCProximitySensor implements SensorEventListener {
31 | private static final String TAG = "AppRTCProximitySensor";
32 |
33 | // This class should be created, started and stopped on one thread
34 | // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
35 | // the case. Only active when |DEBUG| is set to true.
36 |
37 | private final Runnable onSensorStateListener;
38 | private final SensorManager sensorManager;
39 | private Sensor proximitySensor = null;
40 | private boolean lastStateReportIsNear = false;
41 |
42 | /** Construction */
43 | public static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
44 | return new AppRTCProximitySensor(context, sensorStateListener);
45 | }
46 |
47 | private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
48 | Log.d(TAG, "AppRTCProximitySensor");
49 | onSensorStateListener = sensorStateListener;
50 | sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
51 | }
52 |
53 | /**
54 | * Activate the proximity sensor. Also do initialization if called for the
55 | * first time.
56 | */
57 | public boolean start() {
58 | Log.d(TAG, "start");
59 | if (!initDefaultSensor()) {
60 | // Proximity sensor is not supported on this device.
61 | return false;
62 | }
63 | sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
64 | return true;
65 | }
66 |
67 | /** Deactivate the proximity sensor. */
68 | public void stop() {
69 | Log.d(TAG, "stop");
70 | if (proximitySensor == null) {
71 | return;
72 | }
73 | sensorManager.unregisterListener(this, proximitySensor);
74 | }
75 |
76 | /** Getter for last reported state. Set to true if "near" is reported. */
77 | public boolean sensorReportsNearState() {
78 | return lastStateReportIsNear;
79 | }
80 |
81 | @Override
82 | public final void onAccuracyChanged(Sensor sensor, int accuracy) {
83 | if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
84 | Log.e(TAG, "The values returned by this sensor cannot be trusted");
85 | }
86 | }
87 |
88 | @Override
89 | public final void onSensorChanged(SensorEvent event) {
90 | // As a best practice; do as little as possible within this method and
91 | // avoid blocking.
92 | float distanceInCentimeters = event.values[0];
93 | if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
94 | Log.d(TAG, "Proximity sensor => NEAR state");
95 | lastStateReportIsNear = true;
96 | } else {
97 | Log.d(TAG, "Proximity sensor => FAR state");
98 | lastStateReportIsNear = false;
99 | }
100 |
101 | // Report about new state to listening client. Client can then call
102 | // sensorReportsNearState() to query the current state (NEAR or FAR).
103 | if (onSensorStateListener != null) {
104 | onSensorStateListener.run();
105 | }
106 |
107 | Log.d(TAG, "onSensorChanged" + ": "
108 | + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
109 | + event.values[0]);
110 | }
111 |
112 | /**
113 | * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
114 | * does not support this type of sensor and false will be returned in such
115 | * cases.
116 | */
117 | private boolean initDefaultSensor() {
118 | if (proximitySensor != null) {
119 | return true;
120 | }
121 | proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
122 | if (proximitySensor == null) {
123 | return false;
124 | }
125 | logProximitySensorInfo();
126 | return true;
127 | }
128 |
129 | /** Helper method for logging information about the proximity sensor. */
130 | private void logProximitySensorInfo() {
131 | if (proximitySensor == null) {
132 | return;
133 | }
134 | StringBuilder info = new StringBuilder("Proximity sensor: ");
135 | info.append("name=").append(proximitySensor.getName());
136 | info.append(", vendor: ").append(proximitySensor.getVendor());
137 | info.append(", power: ").append(proximitySensor.getPower());
138 | info.append(", resolution: ").append(proximitySensor.getResolution());
139 | info.append(", max range: ").append(proximitySensor.getMaximumRange());
140 | info.append(", min delay: ").append(proximitySensor.getMinDelay());
141 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
142 | // Added in API level 20.
143 | info.append(", type: ").append(proximitySensor.getStringType());
144 | }
145 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
146 | // Added in API level 21.
147 | info.append(", max delay: ").append(proximitySensor.getMaxDelay());
148 | info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
149 | info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
150 | }
151 | Log.d(TAG, info.toString());
152 | }
153 | }
154 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/InCallProximityManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 Henry Lin @zxcpoiu
3 | *
4 | * Permission to use, copy, modify, and distribute this software for any
5 | * purpose with or without fee is hereby granted, provided that the above
6 | * copyright notice and this permission notice appear in all copies.
7 | *
8 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 | */
16 |
17 | package com.zxcpoiu.incallmanager;
18 |
19 | import android.content.Context;
20 | import android.hardware.Sensor;
21 | import android.hardware.SensorManager;
22 | import android.os.PowerManager;
23 | import android.os.PowerManager.WakeLock;
24 | import android.util.Log;
25 |
26 | import java.lang.reflect.Field;
27 | import java.lang.reflect.Method;
28 | import java.lang.Runnable;
29 |
30 | import com.zxcpoiu.incallmanager.AppRTC.AppRTCProximitySensor;
31 |
32 | public class InCallProximityManager {
33 | private static final String TAG = "InCallProximityManager";
34 |
35 | private WakeLock mProximityLock = null;
36 | private Method mPowerManagerRelease;
37 | private boolean proximitySupported = false;
38 | private AppRTCProximitySensor proximitySensor = null;
39 |
40 | /** Construction */
41 | static InCallProximityManager create(Context context, final InCallManagerModule inCallManager) {
42 | return new InCallProximityManager(context, inCallManager);
43 | }
44 |
45 | private InCallProximityManager(Context context, final InCallManagerModule inCallManager) {
46 | Log.d(TAG, "InCallProximityManager");
47 | checkProximitySupport(context);
48 | if (proximitySupported) {
49 | proximitySensor = AppRTCProximitySensor.create(context,
50 | new Runnable() {
51 | @Override
52 | public void run() {
53 | inCallManager.onProximitySensorChangedState(proximitySensor.sensorReportsNearState());
54 | }
55 | }
56 | );
57 | }
58 | }
59 |
60 | private void checkProximitySupport(Context context) {
61 | SensorManager sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
62 | if (sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY) == null) {
63 | proximitySupported = false;
64 | return;
65 | }
66 |
67 | PowerManager powerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
68 |
69 | proximitySupported = true;
70 |
71 | // --- Check if PROXIMITY_SCREEN_OFF_WAKE_LOCK is implemented.
72 | try {
73 | boolean _proximitySupported = false;
74 | Field field = PowerManager.class.getDeclaredField("PROXIMITY_SCREEN_OFF_WAKE_LOCK");
75 | int proximityScreenOffWakeLock = (Integer) field.get(null);
76 |
77 | if (android.os.Build.VERSION.SDK_INT < 17) {
78 | Method method = powerManager.getClass().getDeclaredMethod("getSupportedWakeLockFlags");
79 | int powerManagerSupportedFlags = (Integer) method.invoke(powerManager);
80 | _proximitySupported = ((powerManagerSupportedFlags & proximityScreenOffWakeLock) != 0x0);
81 | } else {
82 | // --- android 4.2+
83 | Method method = powerManager.getClass().getDeclaredMethod("isWakeLockLevelSupported", int.class);
84 | _proximitySupported = (Boolean) method.invoke(powerManager, proximityScreenOffWakeLock);
85 | }
86 |
87 | if (_proximitySupported) {
88 | mProximityLock = powerManager.newWakeLock(proximityScreenOffWakeLock, TAG);
89 | mProximityLock.setReferenceCounted(false);
90 | }
91 | } catch (Exception e) {
92 | Log.d(TAG, "Failed to get proximity screen locker. exception: ", e);
93 | }
94 |
95 | if (mProximityLock != null) {
96 | Log.d(TAG, "use native screen locker...");
97 | try {
98 | mPowerManagerRelease = mProximityLock.getClass().getDeclaredMethod("release", int.class);
99 | } catch (Exception e) {
100 | Log.d(TAG, "failed to get proximity screen locker: `release()`. exception: ", e);
101 | }
102 | } else {
103 | Log.d(TAG, "fallback to old school screen locker...");
104 | }
105 | }
106 |
107 | public boolean start() {
108 | if (!proximitySupported) {
109 | return false;
110 | }
111 | return proximitySensor.start();
112 | }
113 |
114 | public void stop() {
115 | proximitySensor.stop();
116 | }
117 |
118 | public boolean isProximitySupported() {
119 | return proximitySupported;
120 | }
121 |
122 | public boolean isProximityWakeLockSupported() {
123 | return mProximityLock != null;
124 | }
125 |
126 | public boolean getProximityIsNear() {
127 | return (proximitySupported) ? proximitySensor.sensorReportsNearState() : false;
128 | }
129 |
130 | public void acquireProximityWakeLock() {
131 | if (!isProximityWakeLockSupported()) {
132 | return;
133 | }
134 | synchronized (mProximityLock) {
135 | if (!mProximityLock.isHeld()) {
136 | Log.d(TAG, "acquireProximityWakeLock()");
137 | mProximityLock.acquire();
138 | }
139 | }
140 | }
141 |
142 | public void releaseProximityWakeLock(final boolean waitForNoProximity) {
143 | if (!isProximityWakeLockSupported()) {
144 | return;
145 | }
146 | synchronized (mProximityLock) {
147 | if (mProximityLock.isHeld()) {
148 | try {
149 | int flags = waitForNoProximity ? PowerManager.RELEASE_FLAG_WAIT_FOR_NO_PROXIMITY : 0;
150 | mPowerManagerRelease.invoke(mProximityLock, flags);
151 | Log.d(TAG, "releaseProximityWakeLock()");
152 | } catch (Exception e) {
153 | Log.e(TAG, "failed to release proximity lock. e: ", e);
154 | }
155 | }
156 | }
157 | }
158 | }
159 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 | var _InCallManager = require('react-native').NativeModules.InCallManager;
3 | import {
4 | Platform,
5 | Vibration,
6 | } from 'react-native';
7 |
8 | class InCallManager {
9 | constructor() {
10 | this.vibrate = false;
11 | this.recordPermission = 'unknow';
12 | this.cameraPermission = 'unknow';
13 | this.audioUriMap = {
14 | ringtone: { _BUNDLE_: null, _DEFAULT_: null},
15 | ringback: { _BUNDLE_: null, _DEFAULT_: null},
16 | busytone: { _BUNDLE_: null, _DEFAULT_: null},
17 | };
18 | this.checkRecordPermission = this.checkRecordPermission.bind(this);
19 | this.requestRecordPermission = this.requestRecordPermission.bind(this);
20 | this.checkCameraPermission = this.checkCameraPermission.bind(this);
21 | this.requestCameraPermission = this.requestCameraPermission.bind(this);
22 | this.checkRecordPermission();
23 | this.checkCameraPermission();
24 | }
25 |
26 | start(setup) {
27 | setup = (setup === undefined) ? {} : setup;
28 | let auto = (setup.auto === false) ? false : true;
29 | let media = (setup.media === 'video') ? 'video' : 'audio';
30 | let ringback = (!!setup.ringback) ? (typeof setup.ringback === 'string') ? setup.ringback : "" : "";
31 | _InCallManager.start(media, auto, ringback);
32 | }
33 |
34 | stop(setup) {
35 | setup = (setup === undefined) ? {} : setup;
36 | let busytone = (!!setup.busytone) ? (typeof setup.busytone === 'string') ? setup.busytone : "" : "";
37 | _InCallManager.stop(busytone);
38 | }
39 |
40 | turnScreenOff() {
41 | _InCallManager.turnScreenOff();
42 | }
43 |
44 | turnScreenOn() {
45 | _InCallManager.turnScreenOn();
46 | }
47 |
48 | async getIsWiredHeadsetPluggedIn() {
49 | if (Platform.OS === 'ios') {
50 | return await _InCallManager.getIsWiredHeadsetPluggedIn();
51 | } else {
52 | console.log("Android doesn't support getIsWiredHeadsetPluggedIn() yet.");
53 | return null;
54 | }
55 | }
56 |
57 | setFlashOn(enable, brightness) {
58 | if (Platform.OS === 'ios') {
59 | enable = (enable === true) ? true : false;
60 | brightness = (typeof brightness === 'number') ? brightness : 0;
61 | _InCallManager.setFlashOn(enable, brightness);
62 | } else {
63 | console.log("Android doesn't support setFlashOn(enable, brightness)");
64 | }
65 | }
66 |
67 |
68 | setKeepScreenOn(enable) {
69 | enable = (enable === true) ? true : false;
70 | _InCallManager.setKeepScreenOn(enable);
71 | }
72 |
73 | setSpeakerphoneOn(enable) {
74 | enable = (enable === true) ? true : false;
75 | _InCallManager.setSpeakerphoneOn(enable);
76 | }
77 |
78 | setForceSpeakerphoneOn(_flag) {
79 | let flag = (typeof _flag === "boolean") ? (_flag) ? 1 : -1 : 0;
80 | _InCallManager.setForceSpeakerphoneOn(flag);
81 | }
82 |
83 | setMicrophoneMute(enable) {
84 | enable = (enable === true) ? true : false;
85 | _InCallManager.setMicrophoneMute(enable);
86 | }
87 |
88 | startRingtone(ringtone, vibrate_pattern, ios_category, seconds) {
89 | ringtone = (typeof ringtone === 'string') ? ringtone : "_DEFAULT_";
90 | this.vibrate = (Array.isArray(vibrate_pattern)) ? true : false;
91 | ios_category = (ios_category === 'playback') ? 'playback' : "default";
92 | seconds = (typeof seconds === 'number' && seconds > 0) ? parseInt(seconds) : -1; // --- android only, default looping
93 |
94 | if (Platform.OS === 'android') {
95 | _InCallManager.startRingtone(ringtone, seconds);
96 | } else {
97 | _InCallManager.startRingtone(ringtone, ios_category);
98 | }
99 |
100 | // --- should not use repeat, it may cause infinite loop in some cases.
101 | if (this.vibrate) {
102 | Vibration.vibrate(vibrate_pattern, false); // --- ios needs RN 0.34 to support vibration pattern
103 | }
104 | }
105 |
106 | stopRingtone() {
107 | if (this.vibrate) {
108 | Vibration.cancel();
109 | }
110 | _InCallManager.stopRingtone();
111 | }
112 |
113 | startRingback(ringback) {
114 | ringback = (typeof ringback === 'string') ? ringback : "_DTMF_";
115 |
116 | _InCallManager.startRingback(ringback);
117 | }
118 |
119 | stopRingback() {
120 | _InCallManager.stopRingback();
121 | }
122 |
123 | async checkRecordPermission() {
124 | // --- on android which api < 23, it will always be "granted"
125 | let result = await _InCallManager.checkRecordPermission();
126 | this.recordPermission = result;
127 | return result;
128 | }
129 |
130 | async requestRecordPermission() {
131 | // --- on android which api < 23, it will always be "granted"
132 | let result = await _InCallManager.requestRecordPermission();
133 | this.recordPermission = result;
134 | return result;
135 | }
136 |
137 | async checkCameraPermission() {
138 | // --- on android which api < 23, it will always be "granted"
139 | let result = await _InCallManager.checkCameraPermission();
140 | this.cameraPermission = result;
141 | return result;
142 | }
143 |
144 | async requestCameraPermission() {
145 | // --- on android which api < 23, it will always be "granted"
146 | let result = await _InCallManager.requestCameraPermission();
147 | this.cameraPermission = result;
148 | return result;
149 | }
150 |
151 | pokeScreen(_timeout) {
152 | if (Platform.OS === 'android') {
153 | let timeout = (typeof _timeout === "number" && _timeout > 0) ? _timeout : 3000; // --- default 3000 ms
154 | _InCallManager.pokeScreen(timeout);
155 | } else {
156 | console.log("ios doesn't support pokeScreen()");
157 | }
158 | }
159 |
160 | async getAudioUri(audioType, fileType) {
161 | if (typeof this.audioUriMap[audioType] === "undefined") {
162 | return null;
163 | }
164 | if (this.audioUriMap[audioType][fileType]) {
165 | return this.audioUriMap[audioType][fileType];
166 | } else {
167 | try {
168 | let result = await _InCallManager.getAudioUriJS(audioType, fileType);
169 | if (typeof result === 'string' && result.length > 0) {
170 | this.audioUriMap[audioType][fileType] = result;
171 | return result
172 | } else {
173 | return null;
174 | }
175 | } catch (err) {
176 | return null;
177 | }
178 | }
179 | }
180 |
181 | async chooseAudioRoute(route) {
182 | let result = await _InCallManager.chooseAudioRoute(route);
183 | return result;
184 | }
185 | }
186 |
187 | export default new InCallManager();
188 |
--------------------------------------------------------------------------------
/ios/RNInCallManager.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 231CD25B1FD68A17004DD25D /* RNInCallManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 231CD25A1FD68A17004DD25D /* RNInCallManager.m */; };
11 | 231CD25C1FD68A17004DD25D /* RNInCallManager.h in CopyFiles */ = {isa = PBXBuildFile; fileRef = 231CD2591FD68A17004DD25D /* RNInCallManager.h */; };
12 | /* End PBXBuildFile section */
13 |
14 | /* Begin PBXCopyFilesBuildPhase section */
15 | 231CD2541FD68A17004DD25D /* CopyFiles */ = {
16 | isa = PBXCopyFilesBuildPhase;
17 | buildActionMask = 2147483647;
18 | dstPath = "include/$(PRODUCT_NAME)";
19 | dstSubfolderSpec = 16;
20 | files = (
21 | 231CD25C1FD68A17004DD25D /* RNInCallManager.h in CopyFiles */,
22 | );
23 | runOnlyForDeploymentPostprocessing = 0;
24 | };
25 | /* End PBXCopyFilesBuildPhase section */
26 |
27 | /* Begin PBXFileReference section */
28 | 231CD2561FD68A17004DD25D /* libRNInCallManager.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNInCallManager.a; sourceTree = BUILT_PRODUCTS_DIR; };
29 | 231CD2591FD68A17004DD25D /* RNInCallManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNInCallManager.h; sourceTree = ""; };
30 | 231CD25A1FD68A17004DD25D /* RNInCallManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNInCallManager.m; sourceTree = ""; };
31 | /* End PBXFileReference section */
32 |
33 | /* Begin PBXFrameworksBuildPhase section */
34 | 231CD2531FD68A17004DD25D /* Frameworks */ = {
35 | isa = PBXFrameworksBuildPhase;
36 | buildActionMask = 2147483647;
37 | files = (
38 | );
39 | runOnlyForDeploymentPostprocessing = 0;
40 | };
41 | /* End PBXFrameworksBuildPhase section */
42 |
43 | /* Begin PBXGroup section */
44 | 231CD24D1FD68A17004DD25D = {
45 | isa = PBXGroup;
46 | children = (
47 | 231CD2581FD68A17004DD25D /* RNInCallManager */,
48 | 231CD2571FD68A17004DD25D /* Products */,
49 | );
50 | sourceTree = "";
51 | };
52 | 231CD2571FD68A17004DD25D /* Products */ = {
53 | isa = PBXGroup;
54 | children = (
55 | 231CD2561FD68A17004DD25D /* libRNInCallManager.a */,
56 | );
57 | name = Products;
58 | sourceTree = "";
59 | };
60 | 231CD2581FD68A17004DD25D /* RNInCallManager */ = {
61 | isa = PBXGroup;
62 | children = (
63 | 231CD2591FD68A17004DD25D /* RNInCallManager.h */,
64 | 231CD25A1FD68A17004DD25D /* RNInCallManager.m */,
65 | );
66 | path = RNInCallManager;
67 | sourceTree = "";
68 | };
69 | /* End PBXGroup section */
70 |
71 | /* Begin PBXNativeTarget section */
72 | 231CD2551FD68A17004DD25D /* RNInCallManager */ = {
73 | isa = PBXNativeTarget;
74 | buildConfigurationList = 231CD25F1FD68A17004DD25D /* Build configuration list for PBXNativeTarget "RNInCallManager" */;
75 | buildPhases = (
76 | 231CD2521FD68A17004DD25D /* Sources */,
77 | 231CD2531FD68A17004DD25D /* Frameworks */,
78 | 231CD2541FD68A17004DD25D /* CopyFiles */,
79 | );
80 | buildRules = (
81 | );
82 | dependencies = (
83 | );
84 | name = RNInCallManager;
85 | productName = RNInCallManager;
86 | productReference = 231CD2561FD68A17004DD25D /* libRNInCallManager.a */;
87 | productType = "com.apple.product-type.library.static";
88 | };
89 | /* End PBXNativeTarget section */
90 |
91 | /* Begin PBXProject section */
92 | 231CD24E1FD68A17004DD25D /* Project object */ = {
93 | isa = PBXProject;
94 | attributes = {
95 | LastUpgradeCheck = 0910;
96 | ORGANIZATIONNAME = zxcpoiu;
97 | TargetAttributes = {
98 | 231CD2551FD68A17004DD25D = {
99 | CreatedOnToolsVersion = 9.1;
100 | ProvisioningStyle = Automatic;
101 | };
102 | };
103 | };
104 | buildConfigurationList = 231CD2511FD68A17004DD25D /* Build configuration list for PBXProject "RNInCallManager" */;
105 | compatibilityVersion = "Xcode 8.0";
106 | developmentRegion = en;
107 | hasScannedForEncodings = 0;
108 | knownRegions = (
109 | en,
110 | );
111 | mainGroup = 231CD24D1FD68A17004DD25D;
112 | productRefGroup = 231CD2571FD68A17004DD25D /* Products */;
113 | projectDirPath = "";
114 | projectRoot = "";
115 | targets = (
116 | 231CD2551FD68A17004DD25D /* RNInCallManager */,
117 | );
118 | };
119 | /* End PBXProject section */
120 |
121 | /* Begin PBXSourcesBuildPhase section */
122 | 231CD2521FD68A17004DD25D /* Sources */ = {
123 | isa = PBXSourcesBuildPhase;
124 | buildActionMask = 2147483647;
125 | files = (
126 | 231CD25B1FD68A17004DD25D /* RNInCallManager.m in Sources */,
127 | );
128 | runOnlyForDeploymentPostprocessing = 0;
129 | };
130 | /* End PBXSourcesBuildPhase section */
131 |
132 | /* Begin XCBuildConfiguration section */
133 | 231CD25D1FD68A17004DD25D /* Debug */ = {
134 | isa = XCBuildConfiguration;
135 | buildSettings = {
136 | ALWAYS_SEARCH_USER_PATHS = NO;
137 | CLANG_ANALYZER_NONNULL = YES;
138 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
139 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
140 | CLANG_CXX_LIBRARY = "libc++";
141 | CLANG_ENABLE_MODULES = YES;
142 | CLANG_ENABLE_OBJC_ARC = YES;
143 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
144 | CLANG_WARN_BOOL_CONVERSION = YES;
145 | CLANG_WARN_COMMA = YES;
146 | CLANG_WARN_CONSTANT_CONVERSION = YES;
147 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
148 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
149 | CLANG_WARN_EMPTY_BODY = YES;
150 | CLANG_WARN_ENUM_CONVERSION = YES;
151 | CLANG_WARN_INFINITE_RECURSION = YES;
152 | CLANG_WARN_INT_CONVERSION = YES;
153 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
154 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
155 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
156 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
157 | CLANG_WARN_STRICT_PROTOTYPES = YES;
158 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
159 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
160 | CLANG_WARN_UNREACHABLE_CODE = YES;
161 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
162 | CODE_SIGN_IDENTITY = "iPhone Developer";
163 | COPY_PHASE_STRIP = NO;
164 | DEBUG_INFORMATION_FORMAT = dwarf;
165 | ENABLE_STRICT_OBJC_MSGSEND = YES;
166 | ENABLE_TESTABILITY = YES;
167 | GCC_C_LANGUAGE_STANDARD = gnu11;
168 | GCC_DYNAMIC_NO_PIC = NO;
169 | GCC_NO_COMMON_BLOCKS = YES;
170 | GCC_OPTIMIZATION_LEVEL = 0;
171 | GCC_PREPROCESSOR_DEFINITIONS = (
172 | "DEBUG=1",
173 | "$(inherited)",
174 | );
175 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
176 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
177 | GCC_WARN_UNDECLARED_SELECTOR = YES;
178 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
179 | GCC_WARN_UNUSED_FUNCTION = YES;
180 | GCC_WARN_UNUSED_VARIABLE = YES;
181 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
182 | MTL_ENABLE_DEBUG_INFO = YES;
183 | ONLY_ACTIVE_ARCH = YES;
184 | SDKROOT = iphoneos;
185 | };
186 | name = Debug;
187 | };
188 | 231CD25E1FD68A17004DD25D /* Release */ = {
189 | isa = XCBuildConfiguration;
190 | buildSettings = {
191 | ALWAYS_SEARCH_USER_PATHS = NO;
192 | CLANG_ANALYZER_NONNULL = YES;
193 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
194 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
195 | CLANG_CXX_LIBRARY = "libc++";
196 | CLANG_ENABLE_MODULES = YES;
197 | CLANG_ENABLE_OBJC_ARC = YES;
198 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
199 | CLANG_WARN_BOOL_CONVERSION = YES;
200 | CLANG_WARN_COMMA = YES;
201 | CLANG_WARN_CONSTANT_CONVERSION = YES;
202 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
203 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
204 | CLANG_WARN_EMPTY_BODY = YES;
205 | CLANG_WARN_ENUM_CONVERSION = YES;
206 | CLANG_WARN_INFINITE_RECURSION = YES;
207 | CLANG_WARN_INT_CONVERSION = YES;
208 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
209 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
210 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
211 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
212 | CLANG_WARN_STRICT_PROTOTYPES = YES;
213 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
214 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
215 | CLANG_WARN_UNREACHABLE_CODE = YES;
216 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
217 | CODE_SIGN_IDENTITY = "iPhone Developer";
218 | COPY_PHASE_STRIP = NO;
219 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
220 | ENABLE_NS_ASSERTIONS = NO;
221 | ENABLE_STRICT_OBJC_MSGSEND = YES;
222 | GCC_C_LANGUAGE_STANDARD = gnu11;
223 | GCC_NO_COMMON_BLOCKS = YES;
224 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
225 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
226 | GCC_WARN_UNDECLARED_SELECTOR = YES;
227 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
228 | GCC_WARN_UNUSED_FUNCTION = YES;
229 | GCC_WARN_UNUSED_VARIABLE = YES;
230 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
231 | MTL_ENABLE_DEBUG_INFO = NO;
232 | SDKROOT = iphoneos;
233 | VALIDATE_PRODUCT = YES;
234 | };
235 | name = Release;
236 | };
237 | 231CD2601FD68A17004DD25D /* Debug */ = {
238 | isa = XCBuildConfiguration;
239 | buildSettings = {
240 | CODE_SIGN_STYLE = Automatic;
241 | OTHER_LDFLAGS = "-ObjC";
242 | PRODUCT_NAME = "$(TARGET_NAME)";
243 | SKIP_INSTALL = YES;
244 | TARGETED_DEVICE_FAMILY = "1,2";
245 | };
246 | name = Debug;
247 | };
248 | 231CD2611FD68A17004DD25D /* Release */ = {
249 | isa = XCBuildConfiguration;
250 | buildSettings = {
251 | CODE_SIGN_STYLE = Automatic;
252 | OTHER_LDFLAGS = "-ObjC";
253 | PRODUCT_NAME = "$(TARGET_NAME)";
254 | SKIP_INSTALL = YES;
255 | TARGETED_DEVICE_FAMILY = "1,2";
256 | };
257 | name = Release;
258 | };
259 | /* End XCBuildConfiguration section */
260 |
261 | /* Begin XCConfigurationList section */
262 | 231CD2511FD68A17004DD25D /* Build configuration list for PBXProject "RNInCallManager" */ = {
263 | isa = XCConfigurationList;
264 | buildConfigurations = (
265 | 231CD25D1FD68A17004DD25D /* Debug */,
266 | 231CD25E1FD68A17004DD25D /* Release */,
267 | );
268 | defaultConfigurationIsVisible = 0;
269 | defaultConfigurationName = Release;
270 | };
271 | 231CD25F1FD68A17004DD25D /* Build configuration list for PBXNativeTarget "RNInCallManager" */ = {
272 | isa = XCConfigurationList;
273 | buildConfigurations = (
274 | 231CD2601FD68A17004DD25D /* Debug */,
275 | 231CD2611FD68A17004DD25D /* Release */,
276 | );
277 | defaultConfigurationIsVisible = 0;
278 | defaultConfigurationName = Release;
279 | };
280 | /* End XCConfigurationList section */
281 | };
282 | rootObject = 231CD24E1FD68A17004DD25D /* Project object */;
283 | }
284 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/AppRTC/AppRTCBluetoothManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package com.zxcpoiu.incallmanager.AppRTC;
12 |
13 | import android.annotation.SuppressLint;
14 | import android.bluetooth.BluetoothAdapter;
15 | import android.bluetooth.BluetoothDevice;
16 | import android.bluetooth.BluetoothHeadset;
17 | import android.bluetooth.BluetoothProfile;
18 | import android.content.BroadcastReceiver;
19 | import android.content.Context;
20 | import android.content.Intent;
21 | import android.content.IntentFilter;
22 | import android.content.pm.PackageManager;
23 | import android.media.AudioManager;
24 | import android.os.Handler;
25 | import android.os.Looper;
26 | import android.os.Process;
27 | import android.util.Log;
28 | import java.util.List;
29 | import java.util.Set;
30 |
31 | import com.zxcpoiu.incallmanager.InCallManagerModule;
32 |
33 | /**
34 | * AppRTCProximitySensor manages functions related to Bluetoth devices in the
35 | * AppRTC demo.
36 | */
37 | public class AppRTCBluetoothManager {
38 | private static final String TAG = "AppRTCBluetoothManager";
39 |
40 | // Timeout interval for starting or stopping audio to a Bluetooth SCO device.
41 | private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
42 | // Maximum number of SCO connection attempts.
43 | private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
44 |
45 | // Bluetooth connection state.
46 | public enum State {
47 | // Bluetooth is not available; no adapter or Bluetooth is off.
48 | UNINITIALIZED,
49 | // Bluetooth error happened when trying to start Bluetooth.
50 | ERROR,
51 | // Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
52 | // SCO is not started or disconnected.
53 | HEADSET_UNAVAILABLE,
54 | // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
55 | // present, but SCO is not started or disconnected.
56 | HEADSET_AVAILABLE,
57 | // Bluetooth audio SCO connection with remote device is closing.
58 | SCO_DISCONNECTING,
59 | // Bluetooth audio SCO connection with remote device is initiated.
60 | SCO_CONNECTING,
61 | // Bluetooth audio SCO connection with remote device is established.
62 | SCO_CONNECTED
63 | }
64 |
65 | private final Context apprtcContext;
66 | private final InCallManagerModule apprtcAudioManager;
67 | private final AudioManager audioManager;
68 | private final Handler handler;
69 |
70 | int scoConnectionAttempts;
71 | private State bluetoothState;
72 | private final BluetoothProfile.ServiceListener bluetoothServiceListener;
73 | private BluetoothAdapter bluetoothAdapter;
74 | private BluetoothHeadset bluetoothHeadset;
75 | private BluetoothDevice bluetoothDevice;
76 | private final BroadcastReceiver bluetoothHeadsetReceiver;
77 |
78 | // Runs when the Bluetooth timeout expires. We use that timeout after calling
79 | // startScoAudio() or stopScoAudio() because we're not guaranteed to get a
80 | // callback after those calls.
81 | private final Runnable bluetoothTimeoutRunnable = new Runnable() {
82 | @Override
83 | public void run() {
84 | bluetoothTimeout();
85 | }
86 | };
87 |
88 | /**
89 | * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
90 | * connected to or disconnected from the service.
91 | */
92 | private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
93 | @Override
94 | // Called to notify the client when the proxy object has been connected to the service.
95 | // Once we have the profile proxy object, we can use it to monitor the state of the
96 | // connection and perform other operations that are relevant to the headset profile.
97 | public void onServiceConnected(int profile, BluetoothProfile proxy) {
98 | if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
99 | return;
100 | }
101 | Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState);
102 | // Android only supports one connected Bluetooth Headset at a time.
103 | bluetoothHeadset = (BluetoothHeadset) proxy;
104 | updateAudioDeviceState();
105 | Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState);
106 | }
107 |
108 | @Override
109 | /** Notifies the client when the proxy object has been disconnected from the service. */
110 | public void onServiceDisconnected(int profile) {
111 | if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
112 | return;
113 | }
114 | Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState);
115 | stopScoAudio();
116 | bluetoothHeadset = null;
117 | bluetoothDevice = null;
118 | bluetoothState = State.HEADSET_UNAVAILABLE;
119 | updateAudioDeviceState();
120 | Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState);
121 | }
122 | }
123 |
124 | // Intent broadcast receiver which handles changes in Bluetooth device availability.
125 | // Detects headset changes and Bluetooth SCO state changes.
126 | private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
127 | @Override
128 | public void onReceive(Context context, Intent intent) {
129 | if (bluetoothState == State.UNINITIALIZED) {
130 | return;
131 | }
132 | final String action = intent.getAction();
133 | // Change in connection state of the Headset profile. Note that the
134 | // change does not tell us anything about whether we're streaming
135 | // audio to BT over SCO. Typically received when user turns on a BT
136 | // headset while audio is active using another audio device.
137 | if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
138 | final int state =
139 | intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
140 | Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
141 | + "a=ACTION_CONNECTION_STATE_CHANGED, "
142 | + "s=" + stateToString(state) + ", "
143 | + "sb=" + isInitialStickyBroadcast() + ", "
144 | + "BT state: " + bluetoothState);
145 | if (state == BluetoothHeadset.STATE_CONNECTED) {
146 | scoConnectionAttempts = 0;
147 | updateAudioDeviceState();
148 | } else if (state == BluetoothHeadset.STATE_CONNECTING) {
149 | // No action needed.
150 | } else if (state == BluetoothHeadset.STATE_DISCONNECTING) {
151 | // No action needed.
152 | } else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
153 | // Bluetooth is probably powered off during the call.
154 | stopScoAudio();
155 | updateAudioDeviceState();
156 | }
157 | // Change in the audio (SCO) connection state of the Headset profile.
158 | // Typically received after call to startScoAudio() has finalized.
159 | } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
160 | final int state = intent.getIntExtra(
161 | BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
162 | Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
163 | + "a=ACTION_AUDIO_STATE_CHANGED, "
164 | + "s=" + stateToString(state) + ", "
165 | + "sb=" + isInitialStickyBroadcast() + ", "
166 | + "BT state: " + bluetoothState);
167 | if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
168 | cancelTimer();
169 | if (bluetoothState == State.SCO_CONNECTING) {
170 | Log.d(TAG, "+++ Bluetooth audio SCO is now connected");
171 | bluetoothState = State.SCO_CONNECTED;
172 | scoConnectionAttempts = 0;
173 | updateAudioDeviceState();
174 | } else {
175 | Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED");
176 | }
177 | } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
178 | Log.d(TAG, "+++ Bluetooth audio SCO is now connecting...");
179 | } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
180 | Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected");
181 | if (isInitialStickyBroadcast()) {
182 | Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast.");
183 | return;
184 | }
185 | updateAudioDeviceState();
186 | }
187 | }
188 | Log.d(TAG, "onReceive done: BT state=" + bluetoothState);
189 | }
190 | }
191 |
192 | /** Construction. */
193 | public static AppRTCBluetoothManager create(Context context, InCallManagerModule audioManager) {
194 | Log.d(TAG, "create");
195 | return new AppRTCBluetoothManager(context, audioManager);
196 | }
197 |
198 | protected AppRTCBluetoothManager(Context context, InCallManagerModule audioManager) {
199 | Log.d(TAG, "ctor");
200 | apprtcContext = context;
201 | apprtcAudioManager = audioManager;
202 | this.audioManager = getAudioManager(context);
203 | bluetoothState = State.UNINITIALIZED;
204 | bluetoothServiceListener = new BluetoothServiceListener();
205 | bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
206 | handler = new Handler(Looper.getMainLooper());
207 | }
208 |
209 | /** Returns the internal state. */
210 | public State getState() {
211 | return bluetoothState;
212 | }
213 |
214 | /**
215 | * Activates components required to detect Bluetooth devices and to enable
216 | * BT SCO (audio is routed via BT SCO) for the headset profile. The end
217 | * state will be HEADSET_UNAVAILABLE but a state machine has started which
218 | * will start a state change sequence where the final outcome depends on
219 | * if/when the BT headset is enabled.
220 | * Example of state change sequence when start() is called while BT device
221 | * is connected and enabled:
222 | * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
223 | * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
224 | * Note that the AppRTCAudioManager is also involved in driving this state
225 | * change.
226 | */
227 | public void start() {
228 | Log.d(TAG, "start");
229 | if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
230 | Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
231 | return;
232 | }
233 | if (bluetoothState != State.UNINITIALIZED) {
234 | Log.w(TAG, "Invalid BT state");
235 | return;
236 | }
237 | bluetoothHeadset = null;
238 | bluetoothDevice = null;
239 | scoConnectionAttempts = 0;
240 | // Get a handle to the default local Bluetooth adapter.
241 | bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
242 | if (bluetoothAdapter == null) {
243 | Log.w(TAG, "Device does not support Bluetooth");
244 | return;
245 | }
246 | // Ensure that the device supports use of BT SCO audio for off call use cases.
247 | if (!audioManager.isBluetoothScoAvailableOffCall()) {
248 | Log.e(TAG, "Bluetooth SCO audio is not available off call");
249 | return;
250 | }
251 | logBluetoothAdapterInfo(bluetoothAdapter);
252 | // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
253 | // Hands-Free) proxy object and install a listener.
254 | if (!getBluetoothProfileProxy(
255 | apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) {
256 | Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed");
257 | return;
258 | }
259 | // Register receivers for BluetoothHeadset change notifications.
260 | IntentFilter bluetoothHeadsetFilter = new IntentFilter();
261 | // Register receiver for change in connection state of the Headset profile.
262 | bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
263 | // Register receiver for change in audio connection state of the Headset profile.
264 | bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
265 | registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
266 | Log.d(TAG, "HEADSET profile state: "
267 | + stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET)));
268 | Log.d(TAG, "Bluetooth proxy for headset profile has started");
269 | bluetoothState = State.HEADSET_UNAVAILABLE;
270 | Log.d(TAG, "start done: BT state=" + bluetoothState);
271 | }
272 |
273 | /** Stops and closes all components related to Bluetooth audio. */
274 | public void stop() {
275 | Log.d(TAG, "stop: BT state=" + bluetoothState);
276 | if (bluetoothAdapter == null) {
277 | return;
278 | }
279 | // Stop BT SCO connection with remote device if needed.
280 | stopScoAudio();
281 | // Close down remaining BT resources.
282 | if (bluetoothState == State.UNINITIALIZED) {
283 | return;
284 | }
285 | unregisterReceiver(bluetoothHeadsetReceiver);
286 | cancelTimer();
287 | if (bluetoothHeadset != null) {
288 | bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
289 | bluetoothHeadset = null;
290 | }
291 | bluetoothAdapter = null;
292 | bluetoothDevice = null;
293 | bluetoothState = State.UNINITIALIZED;
294 | Log.d(TAG, "stop done: BT state=" + bluetoothState);
295 | }
296 |
297 | /**
298 | * Starts Bluetooth SCO connection with remote device.
299 | * Note that the phone application always has the priority on the usage of the SCO connection
300 | * for telephony. If this method is called while the phone is in call it will be ignored.
301 | * Similarly, if a call is received or sent while an application is using the SCO connection,
302 | * the connection will be lost for the application and NOT returned automatically when the call
303 | * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
304 | * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
305 | * audio connection is established.
306 | * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
307 | * higher. It might be required to initiates a virtual voice call since many devices do not
308 | * accept SCO audio without a "call".
309 | */
310 | public boolean startScoAudio() {
311 | Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
312 | + "attempts: " + scoConnectionAttempts + ", "
313 | + "SCO is on: " + isScoOn());
314 | if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
315 | Log.e(TAG, "BT SCO connection fails - no more attempts");
316 | return false;
317 | }
318 | if (bluetoothState != State.HEADSET_AVAILABLE) {
319 | Log.e(TAG, "BT SCO connection fails - no headset available");
320 | return false;
321 | }
322 | // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
323 | Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED...");
324 | // The SCO connection establishment can take several seconds, hence we cannot rely on the
325 | // connection to be available when the method returns but instead register to receive the
326 | // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
327 | bluetoothState = State.SCO_CONNECTING;
328 | audioManager.startBluetoothSco();
329 | audioManager.setBluetoothScoOn(true);
330 | scoConnectionAttempts++;
331 | startTimer();
332 | Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", "
333 | + "SCO is on: " + isScoOn());
334 | return true;
335 | }
336 |
337 | /** Stops Bluetooth SCO connection with remote device. */
338 | public void stopScoAudio() {
339 | Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
340 | + "SCO is on: " + isScoOn());
341 | if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
342 | return;
343 | }
344 | cancelTimer();
345 | audioManager.stopBluetoothSco();
346 | audioManager.setBluetoothScoOn(false);
347 | bluetoothState = State.SCO_DISCONNECTING;
348 | Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", "
349 | + "SCO is on: " + isScoOn());
350 | }
351 |
352 | /**
353 | * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
354 | * Service via IPC) to update the list of connected devices for the HEADSET
355 | * profile. The internal state will change to HEADSET_UNAVAILABLE or to
356 | * HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected
357 | * device if available.
358 | */
359 | public void updateDevice() {
360 | if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
361 | return;
362 | }
363 | Log.d(TAG, "updateDevice");
364 | // Get connected devices for the headset profile. Returns the set of
365 | // devices which are in state STATE_CONNECTED. The BluetoothDevice class
366 | // is just a thin wrapper for a Bluetooth hardware address.
367 | List devices = bluetoothHeadset.getConnectedDevices();
368 | if (devices.isEmpty()) {
369 | bluetoothDevice = null;
370 | bluetoothState = State.HEADSET_UNAVAILABLE;
371 | Log.d(TAG, "No connected bluetooth headset");
372 | } else {
373 | // Always use first device in list. Android only supports one device.
374 | bluetoothDevice = devices.get(0);
375 | bluetoothState = State.HEADSET_AVAILABLE;
376 | Log.d(TAG, "Connected bluetooth headset: "
377 | + "name=" + bluetoothDevice.getName() + ", "
378 | + "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice))
379 | + ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice));
380 | }
381 | Log.d(TAG, "updateDevice done: BT state=" + bluetoothState);
382 | }
383 |
384 | /**
385 | * Stubs for test mocks.
386 | */
387 | protected AudioManager getAudioManager(Context context) {
388 | return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
389 | }
390 |
391 | protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
392 | apprtcContext.registerReceiver(receiver, filter);
393 | }
394 |
395 | protected void unregisterReceiver(BroadcastReceiver receiver) {
396 | apprtcContext.unregisterReceiver(receiver);
397 | }
398 |
399 | protected boolean getBluetoothProfileProxy(
400 | Context context, BluetoothProfile.ServiceListener listener, int profile) {
401 | return bluetoothAdapter.getProfileProxy(context, listener, profile);
402 | }
403 |
404 | protected boolean hasPermission(Context context, String permission) {
405 | return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid())
406 | == PackageManager.PERMISSION_GRANTED;
407 | }
408 |
409 | /** Logs the state of the local Bluetooth adapter. */
410 | @SuppressLint("HardwareIds")
411 | protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
412 | Log.d(TAG, "BluetoothAdapter: "
413 | + "enabled=" + localAdapter.isEnabled() + ", "
414 | + "state=" + stateToString(localAdapter.getState()) + ", "
415 | + "name=" + localAdapter.getName() + ", "
416 | + "address=" + localAdapter.getAddress());
417 | // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
418 | Set pairedDevices = localAdapter.getBondedDevices();
419 | if (!pairedDevices.isEmpty()) {
420 | Log.d(TAG, "paired devices:");
421 | for (BluetoothDevice device : pairedDevices) {
422 | Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress());
423 | }
424 | }
425 | }
426 |
427 | /** Ensures that the audio manager updates its list of available audio devices. */
428 | private void updateAudioDeviceState() {
429 | Log.d(TAG, "updateAudioDeviceState");
430 | apprtcAudioManager.updateAudioDeviceState();
431 | }
432 |
433 | /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
434 | private void startTimer() {
435 | Log.d(TAG, "startTimer");
436 | handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
437 | }
438 |
439 | /** Cancels any outstanding timer tasks. */
440 | private void cancelTimer() {
441 | Log.d(TAG, "cancelTimer");
442 | handler.removeCallbacks(bluetoothTimeoutRunnable);
443 | }
444 |
445 | /**
446 | * Called when start of the BT SCO channel takes too long time. Usually
447 | * happens when the BT device has been turned on during an ongoing call.
448 | */
449 | private void bluetoothTimeout() {
450 | if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
451 | return;
452 | }
453 | Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", "
454 | + "attempts: " + scoConnectionAttempts + ", "
455 | + "SCO is on: " + isScoOn());
456 | if (bluetoothState != State.SCO_CONNECTING) {
457 | return;
458 | }
459 | // Bluetooth SCO should be connecting; check the latest result.
460 | boolean scoConnected = false;
461 | List devices = bluetoothHeadset.getConnectedDevices();
462 | if (devices.size() > 0) {
463 | bluetoothDevice = devices.get(0);
464 | if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
465 | Log.d(TAG, "SCO connected with " + bluetoothDevice.getName());
466 | scoConnected = true;
467 | } else {
468 | Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName());
469 | }
470 | }
471 | if (scoConnected) {
472 | // We thought BT had timed out, but it's actually on; updating state.
473 | bluetoothState = State.SCO_CONNECTED;
474 | scoConnectionAttempts = 0;
475 | } else {
476 | // Give up and "cancel" our request by calling stopBluetoothSco().
477 | Log.w(TAG, "BT failed to connect after timeout");
478 | stopScoAudio();
479 | }
480 | updateAudioDeviceState();
481 | Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState);
482 | }
483 |
484 | /** Checks whether audio uses Bluetooth SCO. */
485 | private boolean isScoOn() {
486 | return audioManager.isBluetoothScoOn();
487 | }
488 |
489 | /** Converts BluetoothAdapter states into local string representations. */
490 | private String stateToString(int state) {
491 | switch (state) {
492 | case BluetoothAdapter.STATE_DISCONNECTED:
493 | return "DISCONNECTED";
494 | case BluetoothAdapter.STATE_CONNECTED:
495 | return "CONNECTED";
496 | case BluetoothAdapter.STATE_CONNECTING:
497 | return "CONNECTING";
498 | case BluetoothAdapter.STATE_DISCONNECTING:
499 | return "DISCONNECTING";
500 | case BluetoothAdapter.STATE_OFF:
501 | return "OFF";
502 | case BluetoothAdapter.STATE_ON:
503 | return "ON";
504 | case BluetoothAdapter.STATE_TURNING_OFF:
505 | // Indicates the local Bluetooth adapter is turning off. Local clients should immediately
506 | // attempt graceful disconnection of any remote links.
507 | return "TURNING_OFF";
508 | case BluetoothAdapter.STATE_TURNING_ON:
509 | // Indicates the local Bluetooth adapter is turning on. However local clients should wait
510 | // for STATE_ON before attempting to use the adapter.
511 | return "TURNING_ON";
512 | default:
513 | return "INVALID";
514 | }
515 | }
516 | }
517 |
--------------------------------------------------------------------------------
/ios/RNInCallManager/RNInCallManager.m:
--------------------------------------------------------------------------------
1 | //
2 | // RNInCallManager.m
3 | // RNInCallManager
4 | //
5 | // Created by Ian Yu-Hsun Lin (@ianlin) on 05/12/2017.
6 | // Copyright © 2017 zxcpoiu. All rights reserved.
7 | //
8 |
9 | #import "RNInCallManager.h"
10 |
11 | #import
12 | #import
13 | #import
14 | #import
15 |
16 | //static BOOL const automatic = YES;
17 |
18 | @implementation RNInCallManager
19 | {
20 | UIDevice *_currentDevice;
21 |
22 | AVAudioSession *_audioSession;
23 | AVAudioPlayer *_ringtone;
24 | AVAudioPlayer *_ringback;
25 | AVAudioPlayer *_busytone;
26 |
27 | NSURL *_defaultRingtoneUri;
28 | NSURL *_defaultRingbackUri;
29 | NSURL *_defaultBusytoneUri;
30 | NSURL *_bundleRingtoneUri;
31 | NSURL *_bundleRingbackUri;
32 | NSURL *_bundleBusytoneUri;
33 |
34 | //BOOL isProximitySupported;
35 | BOOL _proximityIsNear;
36 |
37 | // --- tags to indicating which observer has added
38 | BOOL _isProximityRegistered;
39 | BOOL _isAudioSessionInterruptionRegistered;
40 | BOOL _isAudioSessionRouteChangeRegistered;
41 | BOOL _isAudioSessionMediaServicesWereLostRegistered;
42 | BOOL _isAudioSessionMediaServicesWereResetRegistered;
43 | BOOL _isAudioSessionSilenceSecondaryAudioHintRegistered;
44 |
45 | // -- notification observers
46 | id _proximityObserver;
47 | id _audioSessionInterruptionObserver;
48 | id _audioSessionRouteChangeObserver;
49 | id _audioSessionMediaServicesWereLostObserver;
50 | id _audioSessionMediaServicesWereResetObserver;
51 | id _audioSessionSilenceSecondaryAudioHintObserver;
52 |
53 | NSString *_incallAudioMode;
54 | NSString *_incallAudioCategory;
55 | NSString *_origAudioCategory;
56 | NSString *_origAudioMode;
57 | BOOL _audioSessionInitialized;
58 | int _forceSpeakerOn;
59 | NSString *_recordPermission;
60 | NSString *_cameraPermission;
61 | NSString *_media;
62 | }
63 |
64 | + (BOOL)requiresMainQueueSetup
65 | {
66 | return NO;
67 | }
68 |
69 | RCT_EXPORT_MODULE(InCallManager)
70 |
71 | - (instancetype)init
72 | {
73 | if (self = [super init]) {
74 | _currentDevice = [UIDevice currentDevice];
75 | _audioSession = [AVAudioSession sharedInstance];
76 | _ringtone = nil;
77 | _ringback = nil;
78 | _busytone = nil;
79 |
80 | _defaultRingtoneUri = nil;
81 | _defaultRingbackUri = nil;
82 | _defaultBusytoneUri = nil;
83 | _bundleRingtoneUri = nil;
84 | _bundleRingbackUri = nil;
85 | _bundleBusytoneUri = nil;
86 |
87 | _proximityIsNear = NO;
88 |
89 | _isProximityRegistered = NO;
90 | _isAudioSessionInterruptionRegistered = NO;
91 | _isAudioSessionRouteChangeRegistered = NO;
92 | _isAudioSessionMediaServicesWereLostRegistered = NO;
93 | _isAudioSessionMediaServicesWereResetRegistered = NO;
94 | _isAudioSessionSilenceSecondaryAudioHintRegistered = NO;
95 |
96 | _proximityObserver = nil;
97 | _audioSessionInterruptionObserver = nil;
98 | _audioSessionRouteChangeObserver = nil;
99 | _audioSessionMediaServicesWereLostObserver = nil;
100 | _audioSessionMediaServicesWereResetObserver = nil;
101 | _audioSessionSilenceSecondaryAudioHintObserver = nil;
102 |
103 | _incallAudioMode = AVAudioSessionModeVoiceChat;
104 | _incallAudioCategory = AVAudioSessionCategoryPlayAndRecord;
105 | _origAudioCategory = nil;
106 | _origAudioMode = nil;
107 | _audioSessionInitialized = NO;
108 | _forceSpeakerOn = 0;
109 | _recordPermission = nil;
110 | _cameraPermission = nil;
111 | _media = @"audio";
112 |
113 | NSLog(@"RNInCallManager.init(): initialized");
114 | }
115 | return self;
116 | }
117 |
118 | - (void)dealloc
119 | {
120 | [[NSNotificationCenter defaultCenter] removeObserver:self];
121 | [self stop:@""];
122 | }
123 |
124 | - (NSArray *)supportedEvents
125 | {
126 | return @[@"Proximity",
127 | @"WiredHeadset"];
128 | }
129 |
130 | RCT_EXPORT_METHOD(start:(NSString *)mediaType
131 | auto:(BOOL)_auto
132 | ringbackUriType:(NSString *)ringbackUriType)
133 | {
134 | if (_audioSessionInitialized) {
135 | return;
136 | }
137 | if (![_recordPermission isEqualToString:@"granted"]) {
138 | NSLog(@"RNInCallManager.start(): recordPermission should be granted. state: %@", _recordPermission);
139 | return;
140 | }
141 | _media = mediaType;
142 |
143 | // --- auto is always true on ios
144 | if ([_media isEqualToString:@"video"]) {
145 | _incallAudioMode = AVAudioSessionModeVideoChat;
146 | } else {
147 | _incallAudioMode = AVAudioSessionModeVoiceChat;
148 | }
149 | NSLog(@"RNInCallManager.start() start InCallManager. media=%@, type=%@, mode=%@", _media, _media, _incallAudioMode);
150 | [self storeOriginalAudioSetup];
151 | _forceSpeakerOn = 0;
152 | [self startAudioSessionNotification];
153 | [self audioSessionSetCategory:_incallAudioCategory
154 | options:0
155 | callerMemo:NSStringFromSelector(_cmd)];
156 | [self audioSessionSetMode:_incallAudioMode
157 | callerMemo:NSStringFromSelector(_cmd)];
158 | [self audioSessionSetActive:YES
159 | options:0
160 | callerMemo:NSStringFromSelector(_cmd)];
161 |
162 | if (ringbackUriType.length > 0) {
163 | NSLog(@"RNInCallManager.start() play ringback first. type=%@", ringbackUriType);
164 | [self startRingback:ringbackUriType];
165 | }
166 |
167 | if ([_media isEqualToString:@"audio"]) {
168 | [self startProximitySensor];
169 | }
170 | [self setKeepScreenOn:YES];
171 | _audioSessionInitialized = YES;
172 | //self.debugAudioSession()
173 | }
174 |
175 | RCT_EXPORT_METHOD(stop:(NSString *)busytoneUriType)
176 | {
177 | if (!_audioSessionInitialized) {
178 | return;
179 | }
180 |
181 | [self stopRingback];
182 |
183 | if (busytoneUriType.length > 0 && [self startBusytone:busytoneUriType]) {
184 | // play busytone first, and call this func again when finish
185 | NSLog(@"RNInCallManager.stop(): play busytone before stop");
186 | return;
187 | } else {
188 | NSLog(@"RNInCallManager.stop(): stop InCallManager");
189 | [self restoreOriginalAudioSetup];
190 | [self stopBusytone];
191 | [self stopProximitySensor];
192 | [self audioSessionSetActive:NO
193 | options:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
194 | callerMemo:NSStringFromSelector(_cmd)];
195 | [self setKeepScreenOn:NO];
196 | [self stopAudioSessionNotification];
197 | [[NSNotificationCenter defaultCenter] removeObserver:self];
198 | _forceSpeakerOn = 0;
199 | _audioSessionInitialized = NO;
200 | }
201 | }
202 |
203 | RCT_EXPORT_METHOD(turnScreenOn)
204 | {
205 | NSLog(@"RNInCallManager.turnScreenOn(): ios doesn't support turnScreenOn()");
206 | }
207 |
208 | RCT_EXPORT_METHOD(turnScreenOff)
209 | {
210 | NSLog(@"RNInCallManager.turnScreenOff(): ios doesn't support turnScreenOff()");
211 | }
212 |
213 | RCT_EXPORT_METHOD(setFlashOn:(BOOL)enable
214 | brightness:(nonnull NSNumber *)brightness)
215 | {
216 | if ([AVCaptureDevice class]) {
217 | AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
218 | if (device.hasTorch && device.position == AVCaptureDevicePositionBack) {
219 | @try {
220 | [device lockForConfiguration:nil];
221 |
222 | if (enable) {
223 | [device setTorchMode:AVCaptureTorchModeOn];
224 | } else {
225 | [device setTorchMode:AVCaptureTorchModeOff];
226 | }
227 |
228 | [device unlockForConfiguration];
229 | } @catch (NSException *e) {}
230 | }
231 | }
232 | }
233 |
234 | RCT_EXPORT_METHOD(setKeepScreenOn:(BOOL)enable)
235 | {
236 | NSLog(@"RNInCallManager.setKeepScreenOn(): enable: %@", enable ? @"YES" : @"NO");
237 | dispatch_async(dispatch_get_main_queue(), ^{
238 | [[UIApplication sharedApplication] setIdleTimerDisabled:enable];
239 | });
240 | }
241 |
242 | RCT_EXPORT_METHOD(setSpeakerphoneOn:(BOOL)enable)
243 | {
244 | BOOL success;
245 | NSError *error = nil;
246 | NSArray* routes = [_audioSession availableInputs];
247 |
248 | if(!enable){
249 | NSLog(@"Routing audio via Earpiece");
250 | @try {
251 | success = [_audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&error];
252 | if (!success) NSLog(@"Cannot set category due to error: %@", error);
253 | success = [_audioSession setMode:AVAudioSessionModeVoiceChat error:&error];
254 | if (!success) NSLog(@"Cannot set mode due to error: %@", error);
255 | [_audioSession setPreferredOutputNumberOfChannels:0 error:nil];
256 | if (!success) NSLog(@"Port override failed due to: %@", error);
257 | [_audioSession overrideOutputAudioPort:[AVAudioSessionPortBuiltInReceiver intValue] error:&error];
258 | success = [_audioSession setActive:YES error:&error];
259 | if (!success) NSLog(@"Audio session override failed: %@", error);
260 | else NSLog(@"AudioSession override is successful ");
261 |
262 | } @catch (NSException *e) {
263 | NSLog(@"Error occurred while routing audio via Earpiece: %@", e.reason);
264 | }
265 | } else {
266 | NSLog(@"Routing audio via Loudspeaker");
267 | @try {
268 | NSLog(@"Available routes: %@", routes[0]);
269 | success = [_audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
270 | withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker
271 | error:nil];
272 | if (!success) NSLog(@"Cannot set category due to error: %@", error);
273 | success = [_audioSession setMode:AVAudioSessionModeVoiceChat error: &error];
274 | if (!success) NSLog(@"Cannot set mode due to error: %@", error);
275 | [_audioSession setPreferredOutputNumberOfChannels:0 error:nil];
276 | [_audioSession overrideOutputAudioPort:[AVAudioSessionPortBuiltInSpeaker intValue] error: &error];
277 | if (!success) NSLog(@"Port override failed due to: %@", error);
278 | success = [_audioSession setActive:YES error:&error];
279 | if (!success) NSLog(@"Audio session override failed: %@", error);
280 | else NSLog(@"AudioSession override is successful ");
281 | } @catch (NSException *e) {
282 | NSLog(@"Error occurred while routing audio via Loudspeaker: %@", e.reason);
283 | }
284 | }
285 | }
286 |
287 | RCT_EXPORT_METHOD(setForceSpeakerphoneOn:(int)flag)
288 | {
289 | _forceSpeakerOn = flag;
290 | NSLog(@"RNInCallManager.setForceSpeakerphoneOn(): flag: %d", flag);
291 | [self updateAudioRoute];
292 | }
293 |
294 | RCT_EXPORT_METHOD(setMicrophoneMute:(BOOL)enable)
295 | {
296 | NSLog(@"RNInCallManager.setMicrophoneMute(): ios doesn't support setMicrophoneMute()");
297 | }
298 |
299 | RCT_EXPORT_METHOD(startRingback:(NSString *)_ringbackUriType)
300 | {
301 | // you may rejected by apple when publish app if you use system sound instead of bundled sound.
302 | NSLog(@"RNInCallManager.startRingback(): type=%@", _ringbackUriType);
303 |
304 | @try {
305 | if (_ringback != nil) {
306 | if ([_ringback isPlaying]) {
307 | NSLog(@"RNInCallManager.startRingback(): is already playing");
308 | return;
309 | } else {
310 | [self stopRingback];
311 | }
312 | }
313 | // ios don't have embedded DTMF tone generator. use system dtmf sound files.
314 | NSString *ringbackUriType = [_ringbackUriType isEqualToString:@"_DTMF_"]
315 | ? @"_DEFAULT_"
316 | : _ringbackUriType;
317 | NSURL *ringbackUri = [self getRingbackUri:ringbackUriType];
318 | if (ringbackUri == nil) {
319 | NSLog(@"RNInCallManager.startRingback(): no available media");
320 | return;
321 | }
322 | //self.storeOriginalAudioSetup()
323 | _ringback = [[AVAudioPlayer alloc] initWithContentsOfURL:ringbackUri error:nil];
324 | _ringback.delegate = self;
325 | _ringback.numberOfLoops = -1; // you need to stop it explicitly
326 | [_ringback prepareToPlay];
327 |
328 | //self.audioSessionSetCategory(self.incallAudioCategory, [.DefaultToSpeaker, .AllowBluetooth], #function)
329 | [self audioSessionSetCategory:_incallAudioCategory
330 | options:0
331 | callerMemo:NSStringFromSelector(_cmd)];
332 | [self audioSessionSetMode:_incallAudioMode
333 | callerMemo:NSStringFromSelector(_cmd)];
334 | [_ringback play];
335 | } @catch (NSException *e) {
336 | NSLog(@"RNInCallManager.startRingback(): caught error=%@", e.reason);
337 | }
338 | }
339 |
340 | RCT_EXPORT_METHOD(stopRingback)
341 | {
342 | if (_ringback != nil) {
343 | NSLog(@"RNInCallManager.stopRingback()");
344 | [_ringback stop];
345 | _ringback = nil;
346 | // --- need to reset route based on config because WebRTC seems will switch audio mode automatically when call established.
347 | //[self updateAudioRoute];
348 | }
349 | }
350 |
351 | RCT_EXPORT_METHOD(startRingtone:(NSString *)ringtoneUriType
352 | ringtoneCategory:(NSString *)ringtoneCategory)
353 | {
354 | // you may rejected by apple when publish app if you use system sound instead of bundled sound.
355 | NSLog(@"RNInCallManager.startRingtone(): type: %@", ringtoneUriType);
356 | @try {
357 | if (_ringtone != nil) {
358 | if ([_ringtone isPlaying]) {
359 | NSLog(@"RNInCallManager.startRingtone(): is already playing.");
360 | return;
361 | } else {
362 | [self stopRingtone];
363 | }
364 | }
365 | NSURL *ringtoneUri = [self getRingtoneUri:ringtoneUriType];
366 | if (ringtoneUri == nil) {
367 | NSLog(@"RNInCallManager.startRingtone(): no available media");
368 | return;
369 | }
370 |
371 | // --- ios has Ringer/Silent switch, so just play without check ringer volume.
372 | [self storeOriginalAudioSetup];
373 | _ringtone = [[AVAudioPlayer alloc] initWithContentsOfURL:ringtoneUri error:nil];
374 | _ringtone.delegate = self;
375 | _ringtone.numberOfLoops = -1; // you need to stop it explicitly
376 | [_ringtone prepareToPlay];
377 |
378 | // --- 1. if we use Playback, it can supports background playing (starting from foreground), but it would not obey Ring/Silent switch.
379 | // --- make sure you have enabled 'audio' tag ( or 'voip' tag ) at XCode -> Capabilities -> BackgroundMode
380 | // --- 2. if we use SoloAmbient, it would obey Ring/Silent switch in the foreground, but does not support background playing,
381 | // --- thus, then you should play ringtone again via local notification after back to home during a ring session.
382 |
383 | // we prefer 2. by default, since most of users doesn't want to interrupted by a ringtone if Silent mode is on.
384 |
385 | //self.audioSessionSetCategory(AVAudioSessionCategoryPlayback, [.DuckOthers], #function)
386 | if ([ringtoneCategory isEqualToString:@"playback"]) {
387 | [self audioSessionSetCategory:AVAudioSessionCategoryPlayback
388 | options:0
389 | callerMemo:NSStringFromSelector(_cmd)];
390 | } else {
391 | [self audioSessionSetCategory:AVAudioSessionCategorySoloAmbient
392 | options:0
393 | callerMemo:NSStringFromSelector(_cmd)];
394 | }
395 | [self audioSessionSetMode:AVAudioSessionModeDefault
396 | callerMemo:NSStringFromSelector(_cmd)];
397 | //[self audioSessionSetActive:YES
398 | // options:nil
399 | // callerMemo:NSStringFromSelector(_cmd)];
400 | [_ringtone play];
401 | } @catch (NSException *e) {
402 | NSLog(@"RNInCallManager.startRingtone(): caught error = %@", e.reason);
403 | }
404 | }
405 |
406 | RCT_EXPORT_METHOD(stopRingtone)
407 | {
408 | if (_ringtone != nil) {
409 | NSLog(@"RNInCallManager.stopRingtone()");
410 | [_ringtone stop];
411 | _ringtone = nil;
412 | [self restoreOriginalAudioSetup];
413 | [self audioSessionSetActive:NO
414 | options:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
415 | callerMemo:NSStringFromSelector(_cmd)];
416 | }
417 | }
418 |
419 | - (void)_checkRecordPermission
420 | {
421 | NSString *recordPermission = @"unsupported";
422 | switch ([_audioSession recordPermission]) {
423 | case AVAudioSessionRecordPermissionGranted:
424 | recordPermission = @"granted";
425 | break;
426 | case AVAudioSessionRecordPermissionDenied:
427 | recordPermission = @"denied";
428 | break;
429 | case AVAudioSessionRecordPermissionUndetermined:
430 | recordPermission = @"undetermined";
431 | break;
432 | default:
433 | recordPermission = @"unknow";
434 | break;
435 | }
436 | _recordPermission = recordPermission;
437 | NSLog(@"RNInCallManager._checkRecordPermission(): recordPermission=%@", _recordPermission);
438 | }
439 |
440 | RCT_EXPORT_METHOD(checkRecordPermission:(RCTPromiseResolveBlock)resolve
441 | reject:(RCTPromiseRejectBlock)reject)
442 | {
443 | [self _checkRecordPermission];
444 | if (_recordPermission != nil) {
445 | resolve(_recordPermission);
446 | } else {
447 | reject(@"error_code", @"error message", RCTErrorWithMessage(@"checkRecordPermission is nil"));
448 | }
449 | }
450 |
451 | RCT_EXPORT_METHOD(requestRecordPermission:(RCTPromiseResolveBlock)resolve
452 | reject:(RCTPromiseRejectBlock)reject)
453 | {
454 | NSLog(@"RNInCallManager.requestRecordPermission(): waiting for user confirmation...");
455 | [_audioSession requestRecordPermission:^(BOOL granted) {
456 | if (granted) {
457 | self->_recordPermission = @"granted";
458 | } else {
459 | self->_recordPermission = @"denied";
460 | }
461 | NSLog(@"RNInCallManager.requestRecordPermission(): %@", self->_recordPermission);
462 | resolve(self->_recordPermission);
463 | }];
464 | }
465 |
466 | - (NSString *)_checkMediaPermission:(NSString *)targetMediaType
467 | {
468 | switch ([AVCaptureDevice authorizationStatusForMediaType:targetMediaType]) {
469 | case AVAuthorizationStatusAuthorized:
470 | return @"granted";
471 | case AVAuthorizationStatusDenied:
472 | return @"denied";
473 | case AVAuthorizationStatusNotDetermined:
474 | return @"undetermined";
475 | case AVAuthorizationStatusRestricted:
476 | return @"restricted";
477 | default:
478 | return @"unknow";
479 | }
480 | }
481 |
482 | - (void)_checkCameraPermission
483 | {
484 | _cameraPermission = [self _checkMediaPermission:AVMediaTypeVideo];
485 | NSLog(@"RNInCallManager._checkCameraPermission(): using iOS7 api. cameraPermission=%@", _cameraPermission);
486 | }
487 |
488 | RCT_EXPORT_METHOD(checkCameraPermission:(RCTPromiseResolveBlock)resolve
489 | reject:(RCTPromiseRejectBlock)reject)
490 | {
491 | [self _checkCameraPermission];
492 | if (_cameraPermission != nil) {
493 | resolve(_cameraPermission);
494 | } else {
495 | reject(@"error_code", @"error message", RCTErrorWithMessage(@"checkCameraPermission is nil"));
496 | }
497 | }
498 |
499 | RCT_EXPORT_METHOD(requestCameraPermission:(RCTPromiseResolveBlock)resolve
500 | reject:(RCTPromiseRejectBlock)reject)
501 | {
502 | NSLog(@"RNInCallManager.requestCameraPermission(): waiting for user confirmation...");
503 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
504 | completionHandler:^(BOOL granted) {
505 | if (granted) {
506 | self->_cameraPermission = @"granted";
507 | } else {
508 | self->_cameraPermission = @"denied";
509 | }
510 | NSLog(@"RNInCallManager.requestCameraPermission(): %@", self->_cameraPermission);
511 | resolve(self->_cameraPermission);
512 | }];
513 | }
514 |
515 | RCT_EXPORT_METHOD(getAudioUriJS:(NSString *)audioType
516 | fileType:(NSString *)fileType
517 | resolve:(RCTPromiseResolveBlock)resolve
518 | reject:(RCTPromiseRejectBlock)reject)
519 | {
520 | NSURL *result = nil;
521 | if ([audioType isEqualToString:@"ringback"]) {
522 | result = [self getRingbackUri:fileType];
523 | } else if ([audioType isEqualToString:@"busytone"]) {
524 | result = [self getBusytoneUri:fileType];
525 | } else if ([audioType isEqualToString:@"ringtone"]) {
526 | result = [self getRingtoneUri:fileType];
527 | }
528 | if (result != nil) {
529 | if (result.absoluteString.length > 0) {
530 | resolve(result.absoluteString);
531 | return;
532 | }
533 | }
534 | reject(@"error_code", @"getAudioUriJS() failed", RCTErrorWithMessage(@"getAudioUriJS() failed"));
535 | }
536 |
537 | RCT_EXPORT_METHOD(getIsWiredHeadsetPluggedIn:(RCTPromiseResolveBlock)resolve
538 | reject:(RCTPromiseRejectBlock)reject)
539 | {
540 | BOOL wiredHeadsetPluggedIn = [self isWiredHeadsetPluggedIn];
541 | resolve(@{
542 | @"isWiredHeadsetPluggedIn": wiredHeadsetPluggedIn ? @YES : @NO,
543 | });
544 | }
545 |
546 | - (void)updateAudioRoute
547 | {
548 | NSLog(@"RNInCallManager.updateAudioRoute(): [Enter] forceSpeakerOn flag=%d media=%@ category=%@ mode=%@", _forceSpeakerOn, _media, _audioSession.category, _audioSession.mode);
549 | //self.debugAudioSession()
550 |
551 | //AVAudioSessionPortOverride overrideAudioPort;
552 | int overrideAudioPort;
553 | NSString *overrideAudioPortString = @"";
554 | NSString *audioMode = @"";
555 |
556 | // --- WebRTC native code will change audio mode automatically when established.
557 | // --- It would have some race condition if we change audio mode with webrtc at the same time.
558 | // --- So we should not change audio mode as possible as we can. Only when default video call which wants to force speaker off.
559 | // --- audio: only override speaker on/off; video: should change category if needed and handle proximity sensor. ( because default proximity is off when video call )
560 | if (_forceSpeakerOn == 1) {
561 | // --- force ON, override speaker only, keep audio mode remain.
562 | overrideAudioPort = AVAudioSessionPortOverrideSpeaker;
563 | overrideAudioPortString = @".Speaker";
564 | if ([_media isEqualToString:@"video"]) {
565 | audioMode = AVAudioSessionModeVideoChat;
566 | [self stopProximitySensor];
567 | }
568 | } else if (_forceSpeakerOn == -1) {
569 | // --- force off
570 | overrideAudioPort = AVAudioSessionPortOverrideNone;
571 | overrideAudioPortString = @".None";
572 | if ([_media isEqualToString:@"video"]) {
573 | audioMode = AVAudioSessionModeVoiceChat;
574 | [self startProximitySensor];
575 | }
576 | } else { // use default behavior
577 | overrideAudioPort = AVAudioSessionPortOverrideNone;
578 | overrideAudioPortString = @".None";
579 | if ([_media isEqualToString:@"video"]) {
580 | audioMode = AVAudioSessionModeVideoChat;
581 | [self stopProximitySensor];
582 | }
583 | }
584 |
585 | BOOL isCurrentRouteToSpeaker;
586 | isCurrentRouteToSpeaker = [self checkAudioRoute:@[AVAudioSessionPortBuiltInSpeaker]
587 | routeType:@"output"];
588 | if ((overrideAudioPort == AVAudioSessionPortOverrideSpeaker && !isCurrentRouteToSpeaker)
589 | || (overrideAudioPort == AVAudioSessionPortOverrideNone && isCurrentRouteToSpeaker)) {
590 | @try {
591 | [_audioSession overrideOutputAudioPort:overrideAudioPort error:nil];
592 | NSLog(@"RNInCallManager.updateAudioRoute(): audioSession.overrideOutputAudioPort(%@) success", overrideAudioPortString);
593 | } @catch (NSException *e) {
594 | NSLog(@"RNInCallManager.updateAudioRoute(): audioSession.overrideOutputAudioPort(%@) fail: %@", overrideAudioPortString, e.reason);
595 | }
596 | } else {
597 | NSLog(@"RNInCallManager.updateAudioRoute(): did NOT overrideOutputAudioPort()");
598 | }
599 |
600 | if (audioMode.length > 0 && ![_audioSession.mode isEqualToString:audioMode]) {
601 | [self audioSessionSetMode:audioMode
602 | callerMemo:NSStringFromSelector(_cmd)];
603 | NSLog(@"RNInCallManager.updateAudioRoute() audio mode has changed to %@", audioMode);
604 | } else {
605 | NSLog(@"RNInCallManager.updateAudioRoute() did NOT change audio mode");
606 | }
607 | //self.debugAudioSession()
608 | }
609 |
610 | - (BOOL)checkAudioRoute:(NSArray *)targetPortTypeArray
611 | routeType:(NSString *)routeType
612 | {
613 | AVAudioSessionRouteDescription *currentRoute = _audioSession.currentRoute;
614 |
615 | if (currentRoute != nil) {
616 | NSArray *routes = [routeType isEqualToString:@"input"]
617 | ? currentRoute.inputs
618 | : currentRoute.outputs;
619 | for (AVAudioSessionPortDescription *portDescription in routes) {
620 | if ([targetPortTypeArray containsObject:portDescription.portType]) {
621 | return YES;
622 | }
623 | }
624 | }
625 | return NO;
626 | }
627 |
628 | - (BOOL)startBusytone:(NSString *)_busytoneUriType
629 | {
630 | // you may rejected by apple when publish app if you use system sound instead of bundled sound.
631 | NSLog(@"RNInCallManager.startBusytone(): type: %@", _busytoneUriType);
632 | @try {
633 | if (_busytone != nil) {
634 | if ([_busytone isPlaying]) {
635 | NSLog(@"RNInCallManager.startBusytone(): is already playing");
636 | return NO;
637 | } else {
638 | [self stopBusytone];
639 | }
640 | }
641 |
642 | // ios don't have embedded DTMF tone generator. use system dtmf sound files.
643 | NSString *busytoneUriType = [_busytoneUriType isEqualToString:@"_DTMF_"]
644 | ? @"_DEFAULT_"
645 | : _busytoneUriType;
646 | NSURL *busytoneUri = [self getBusytoneUri:busytoneUriType];
647 | if (busytoneUri == nil) {
648 | NSLog(@"RNInCallManager.startBusytone(): no available media");
649 | return NO;
650 | }
651 | //[self storeOriginalAudioSetup];
652 | _busytone = [[AVAudioPlayer alloc] initWithContentsOfURL:busytoneUri error:nil];
653 | _busytone.delegate = self;
654 | _busytone.numberOfLoops = 0; // it's part of start(), will stop at stop()
655 | [_busytone prepareToPlay];
656 |
657 | //self.audioSessionSetCategory(self.incallAudioCategory, [.DefaultToSpeaker, .AllowBluetooth], #function)
658 | [self audioSessionSetCategory:_incallAudioCategory
659 | options:0
660 | callerMemo:NSStringFromSelector(_cmd)];
661 | [self audioSessionSetMode:_incallAudioMode
662 | callerMemo:NSStringFromSelector(_cmd)];
663 | [_busytone play];
664 | } @catch (NSException *e) {
665 | NSLog(@"RNInCallManager.startBusytone(): caught error = %@", e.reason);
666 | return NO;
667 | }
668 | return YES;
669 | }
670 |
671 | - (void)stopBusytone
672 | {
673 | if (_busytone != nil) {
674 | NSLog(@"RNInCallManager.stopBusytone()");
675 | [_busytone stop];
676 | _busytone = nil;
677 | }
678 | }
679 |
680 | - (BOOL)isWiredHeadsetPluggedIn
681 | {
682 | // --- only check for a audio device plugged into headset port instead bluetooth/usb/hdmi
683 | return [self checkAudioRoute:@[AVAudioSessionPortHeadphones]
684 | routeType:@"output"]
685 | || [self checkAudioRoute:@[AVAudioSessionPortHeadsetMic]
686 | routeType:@"input"];
687 | }
688 |
689 | - (void)audioSessionSetCategory:(NSString *)audioCategory
690 | options:(AVAudioSessionCategoryOptions)options
691 | callerMemo:(NSString *)callerMemo
692 | {
693 | @try {
694 | if (options != 0) {
695 | [_audioSession setCategory:audioCategory
696 | withOptions:options
697 | error:nil];
698 | } else {
699 | [_audioSession setCategory:audioCategory
700 | error:nil];
701 | }
702 | NSLog(@"RNInCallManager.%@: audioSession.setCategory: %@, withOptions: %lu success", callerMemo, audioCategory, (unsigned long)options);
703 | } @catch (NSException *e) {
704 | NSLog(@"RNInCallManager.%@: audioSession.setCategory: %@, withOptions: %lu fail: %@", callerMemo, audioCategory, (unsigned long)options, e.reason);
705 | }
706 | }
707 |
708 | - (void)audioSessionSetMode:(NSString *)audioMode
709 | callerMemo:(NSString *)callerMemo
710 | {
711 | @try {
712 | [_audioSession setMode:audioMode error:nil];
713 | NSLog(@"RNInCallManager.%@: audioSession.setMode(%@) success", callerMemo, audioMode);
714 | } @catch (NSException *e) {
715 | NSLog(@"RNInCallManager.%@: audioSession.setMode(%@) fail: %@", callerMemo, audioMode, e.reason);
716 | }
717 | }
718 |
719 | - (void)audioSessionSetActive:(BOOL)audioActive
720 | options:(AVAudioSessionSetActiveOptions)options
721 | callerMemo:(NSString *)callerMemo
722 | {
723 | @try {
724 | if (options != 0) {
725 | [_audioSession setActive:audioActive
726 | withOptions:options
727 | error:nil];
728 | } else {
729 | [_audioSession setActive:audioActive
730 | error:nil];
731 | }
732 | NSLog(@"RNInCallManager.%@: audioSession.setActive(%@), withOptions: %lu success", callerMemo, audioActive ? @"YES" : @"NO", (unsigned long)options);
733 | } @catch (NSException *e) {
734 | NSLog(@"RNInCallManager.%@: audioSession.setActive(%@), withOptions: %lu fail: %@", callerMemo, audioActive ? @"YES" : @"NO", (unsigned long)options, e.reason);
735 | }
736 | }
737 |
738 | - (void)storeOriginalAudioSetup
739 | {
740 | NSLog(@"RNInCallManager.storeOriginalAudioSetup(): origAudioCategory=%@, origAudioMode=%@", _audioSession.category, _audioSession.mode);
741 | _origAudioCategory = _audioSession.category;
742 | _origAudioMode = _audioSession.mode;
743 | }
744 |
745 | - (void)restoreOriginalAudioSetup
746 | {
747 | NSLog(@"RNInCallManager.restoreOriginalAudioSetup(): origAudioCategory=%@, origAudioMode=%@", _audioSession.category, _audioSession.mode);
748 | [self audioSessionSetCategory:_origAudioCategory
749 | options:0
750 | callerMemo:NSStringFromSelector(_cmd)];
751 | [self audioSessionSetMode:_origAudioMode
752 | callerMemo:NSStringFromSelector(_cmd)];
753 | }
754 |
755 | - (void)startProximitySensor
756 | {
757 | if (_isProximityRegistered) {
758 | return;
759 | }
760 |
761 | NSLog(@"RNInCallManager.startProximitySensor()");
762 | dispatch_async(dispatch_get_main_queue(), ^{
763 | self->_currentDevice.proximityMonitoringEnabled = YES;
764 | });
765 |
766 | // --- in case it didn't deallocate when ViewDidUnload
767 | [self stopObserve:_proximityObserver
768 | name:UIDeviceProximityStateDidChangeNotification
769 | object:nil];
770 |
771 | _proximityObserver = [self startObserve:UIDeviceProximityStateDidChangeNotification
772 | object:_currentDevice
773 | queue: nil
774 | block:^(NSNotification *notification) {
775 | BOOL state = self->_currentDevice.proximityState;
776 | if (state != self->_proximityIsNear) {
777 | NSLog(@"RNInCallManager.UIDeviceProximityStateDidChangeNotification(): isNear: %@", state ? @"YES" : @"NO");
778 | self->_proximityIsNear = state;
779 | [self sendEventWithName:@"Proximity" body:@{@"isNear": state ? @YES : @NO}];
780 | }
781 | }];
782 |
783 | _isProximityRegistered = YES;
784 | }
785 |
786 | - (void)stopProximitySensor
787 | {
788 | if (!_isProximityRegistered) {
789 | return;
790 | }
791 |
792 | NSLog(@"RNInCallManager.stopProximitySensor()");
793 | dispatch_async(dispatch_get_main_queue(), ^{
794 | self->_currentDevice.proximityMonitoringEnabled = NO;
795 | });
796 |
797 | // --- remove all no matter what object
798 | [self stopObserve:_proximityObserver
799 | name:UIDeviceProximityStateDidChangeNotification
800 | object:nil];
801 |
802 | _isProximityRegistered = NO;
803 | }
804 |
805 | - (void)startAudioSessionNotification
806 | {
807 | NSLog(@"RNInCallManager.startAudioSessionNotification() starting...");
808 | [self startAudioSessionInterruptionNotification];
809 | [self startAudioSessionRouteChangeNotification];
810 | [self startAudioSessionMediaServicesWereLostNotification];
811 | [self startAudioSessionMediaServicesWereResetNotification];
812 | [self startAudioSessionSilenceSecondaryAudioHintNotification];
813 | }
814 |
815 | - (void)stopAudioSessionNotification
816 | {
817 | NSLog(@"RNInCallManager.startAudioSessionNotification() stopping...");
818 | [self stopAudioSessionInterruptionNotification];
819 | [self stopAudioSessionRouteChangeNotification];
820 | [self stopAudioSessionMediaServicesWereLostNotification];
821 | [self stopAudioSessionMediaServicesWereResetNotification];
822 | [self stopAudioSessionSilenceSecondaryAudioHintNotification];
823 | }
824 |
825 | - (void)startAudioSessionInterruptionNotification
826 | {
827 | if (_isAudioSessionInterruptionRegistered) {
828 | return;
829 | }
830 | NSLog(@"RNInCallManager.startAudioSessionInterruptionNotification()");
831 |
832 | // --- in case it didn't deallocate when ViewDidUnload
833 | [self stopObserve:_audioSessionInterruptionObserver
834 | name:AVAudioSessionInterruptionNotification
835 | object:nil];
836 |
837 | _audioSessionInterruptionObserver = [self startObserve:AVAudioSessionInterruptionNotification
838 | object:nil
839 | queue:nil
840 | block:^(NSNotification *notification) {
841 | if (notification.userInfo == nil
842 | || ![notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
843 | return;
844 | }
845 |
846 | //NSUInteger rawValue = notification.userInfo[AVAudioSessionInterruptionTypeKey].unsignedIntegerValue;
847 | NSNumber *interruptType = [notification.userInfo objectForKey:@"AVAudioSessionInterruptionTypeKey"];
848 | if ([interruptType unsignedIntegerValue] == AVAudioSessionInterruptionTypeBegan) {
849 | NSLog(@"RNInCallManager.AudioSessionInterruptionNotification: Began");
850 | } else if ([interruptType unsignedIntegerValue] == AVAudioSessionInterruptionTypeEnded) {
851 | NSLog(@"RNInCallManager.AudioSessionInterruptionNotification: Ended");
852 | } else {
853 | NSLog(@"RNInCallManager.AudioSessionInterruptionNotification: Unknow Value");
854 | }
855 | //NSLog(@"RNInCallManager.AudioSessionInterruptionNotification: could not resolve notification");
856 | }];
857 |
858 | _isAudioSessionInterruptionRegistered = YES;
859 | }
860 |
861 | - (void)stopAudioSessionInterruptionNotification
862 | {
863 | if (!_isAudioSessionInterruptionRegistered) {
864 | return;
865 | }
866 | NSLog(@"RNInCallManager.stopAudioSessionInterruptionNotification()");
867 | // --- remove all no matter what object
868 | [self stopObserve:_audioSessionInterruptionObserver
869 | name:AVAudioSessionInterruptionNotification
870 | object: nil];
871 | _isAudioSessionInterruptionRegistered = NO;
872 | }
873 |
874 | - (void)startAudioSessionRouteChangeNotification
875 | {
876 | if (_isAudioSessionRouteChangeRegistered) {
877 | return;
878 | }
879 |
880 | NSLog(@"RNInCallManager.startAudioSessionRouteChangeNotification()");
881 |
882 | // --- in case it didn't deallocate when ViewDidUnload
883 | [self stopObserve:_audioSessionRouteChangeObserver
884 | name: AVAudioSessionRouteChangeNotification
885 | object: nil];
886 |
887 | _audioSessionRouteChangeObserver = [self startObserve:AVAudioSessionRouteChangeNotification
888 | object: nil
889 | queue: nil
890 | block:^(NSNotification *notification) {
891 | if (notification.userInfo == nil
892 | || ![notification.name isEqualToString:AVAudioSessionRouteChangeNotification]) {
893 | return;
894 | }
895 |
896 | NSNumber *routeChangeType = [notification.userInfo objectForKey:@"AVAudioSessionRouteChangeReasonKey"];
897 | NSUInteger routeChangeTypeValue = [routeChangeType unsignedIntegerValue];
898 |
899 | switch (routeChangeTypeValue) {
900 | case AVAudioSessionRouteChangeReasonUnknown:
901 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: Unknown");
902 | break;
903 | case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
904 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: NewDeviceAvailable");
905 | if ([self checkAudioRoute:@[AVAudioSessionPortHeadsetMic]
906 | routeType:@"input"]) {
907 | [self sendEventWithName:@"WiredHeadset"
908 | body:@{
909 | @"isPlugged": @YES,
910 | @"hasMic": @YES,
911 | @"deviceName": AVAudioSessionPortHeadsetMic,
912 | }];
913 | } else if ([self checkAudioRoute:@[AVAudioSessionPortHeadphones]
914 | routeType:@"output"]) {
915 | [self sendEventWithName:@"WiredHeadset"
916 | body:@{
917 | @"isPlugged": @YES,
918 | @"hasMic": @NO,
919 | @"deviceName": AVAudioSessionPortHeadphones,
920 | }];
921 | }
922 | break;
923 | case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
924 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: OldDeviceUnavailable");
925 | if (![self isWiredHeadsetPluggedIn]) {
926 | [self sendEventWithName:@"WiredHeadset"
927 | body:@{
928 | @"isPlugged": @NO,
929 | @"hasMic": @NO,
930 | @"deviceName": @"",
931 | }];
932 | }
933 | break;
934 | case AVAudioSessionRouteChangeReasonCategoryChange:
935 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: CategoryChange. category=%@ mode=%@", self->_audioSession.category, self->_audioSession.mode);
936 | [self updateAudioRoute];
937 | break;
938 | case AVAudioSessionRouteChangeReasonOverride:
939 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: Override");
940 | break;
941 | case AVAudioSessionRouteChangeReasonWakeFromSleep:
942 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: WakeFromSleep");
943 | break;
944 | case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
945 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: NoSuitableRouteForCategory");
946 | break;
947 | case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
948 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: RouteConfigurationChange. category=%@ mode=%@", self->_audioSession.category, self->_audioSession.mode);
949 | break;
950 | default:
951 | NSLog(@"RNInCallManager.AudioRouteChange.Reason: Unknow Value");
952 | break;
953 | }
954 |
955 | NSNumber *silenceSecondaryAudioHintType = [notification.userInfo objectForKey:@"AVAudioSessionSilenceSecondaryAudioHintTypeKey"];
956 | NSUInteger silenceSecondaryAudioHintTypeValue = [silenceSecondaryAudioHintType unsignedIntegerValue];
957 | switch (silenceSecondaryAudioHintTypeValue) {
958 | case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
959 | NSLog(@"RNInCallManager.AudioRouteChange.SilenceSecondaryAudioHint: Begin");
960 | case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
961 | NSLog(@"RNInCallManager.AudioRouteChange.SilenceSecondaryAudioHint: End");
962 | default:
963 | NSLog(@"RNInCallManager.AudioRouteChange.SilenceSecondaryAudioHint: Unknow Value");
964 | }
965 | }];
966 |
967 | _isAudioSessionRouteChangeRegistered = YES;
968 | }
969 |
970 | - (void)stopAudioSessionRouteChangeNotification
971 | {
972 | if (!_isAudioSessionRouteChangeRegistered) {
973 | return;
974 | }
975 |
976 | NSLog(@"RNInCallManager.stopAudioSessionRouteChangeNotification()");
977 | // --- remove all no matter what object
978 | [self stopObserve:_audioSessionRouteChangeObserver
979 | name:AVAudioSessionRouteChangeNotification
980 | object:nil];
981 | _isAudioSessionRouteChangeRegistered = NO;
982 | }
983 |
984 | - (void)startAudioSessionMediaServicesWereLostNotification
985 | {
986 | if (_isAudioSessionMediaServicesWereLostRegistered) {
987 | return;
988 | }
989 |
990 | NSLog(@"RNInCallManager.startAudioSessionMediaServicesWereLostNotification()");
991 |
992 | // --- in case it didn't deallocate when ViewDidUnload
993 | [self stopObserve:_audioSessionMediaServicesWereLostObserver
994 | name:AVAudioSessionMediaServicesWereLostNotification
995 | object:nil];
996 |
997 | _audioSessionMediaServicesWereLostObserver = [self startObserve:AVAudioSessionMediaServicesWereLostNotification
998 | object:nil
999 | queue:nil
1000 | block:^(NSNotification *notification) {
1001 | // --- This notification has no userInfo dictionary.
1002 | NSLog(@"RNInCallManager.AudioSessionMediaServicesWereLostNotification: Media Services Were Lost");
1003 | }];
1004 |
1005 | _isAudioSessionMediaServicesWereLostRegistered = YES;
1006 | }
1007 |
1008 | - (void)stopAudioSessionMediaServicesWereLostNotification
1009 | {
1010 | if (!_isAudioSessionMediaServicesWereLostRegistered) {
1011 | return;
1012 | }
1013 |
1014 | NSLog(@"RNInCallManager.stopAudioSessionMediaServicesWereLostNotification()");
1015 |
1016 | // --- remove all no matter what object
1017 | [self stopObserve:_audioSessionMediaServicesWereLostObserver
1018 | name:AVAudioSessionMediaServicesWereLostNotification
1019 | object:nil];
1020 |
1021 | _isAudioSessionMediaServicesWereLostRegistered = NO;
1022 | }
1023 |
1024 | - (void)startAudioSessionMediaServicesWereResetNotification
1025 | {
1026 | if (_isAudioSessionMediaServicesWereResetRegistered) {
1027 | return;
1028 | }
1029 |
1030 | NSLog(@"RNInCallManager.startAudioSessionMediaServicesWereResetNotification()");
1031 |
1032 | // --- in case it didn't deallocate when ViewDidUnload
1033 | [self stopObserve:_audioSessionMediaServicesWereResetObserver
1034 | name:AVAudioSessionMediaServicesWereResetNotification
1035 | object:nil];
1036 |
1037 | _audioSessionMediaServicesWereResetObserver = [self startObserve:AVAudioSessionMediaServicesWereResetNotification
1038 | object:nil
1039 | queue:nil
1040 | block:^(NSNotification *notification) {
1041 | // --- This notification has no userInfo dictionary.
1042 | NSLog(@"RNInCallManager.AudioSessionMediaServicesWereResetNotification: Media Services Were Reset");
1043 | }];
1044 |
1045 | _isAudioSessionMediaServicesWereResetRegistered = YES;
1046 | }
1047 |
1048 | - (void)stopAudioSessionMediaServicesWereResetNotification
1049 | {
1050 | if (!_isAudioSessionMediaServicesWereResetRegistered) {
1051 | return;
1052 | }
1053 |
1054 | NSLog(@"RNInCallManager.stopAudioSessionMediaServicesWereResetNotification()");
1055 |
1056 | // --- remove all no matter what object
1057 | [self stopObserve:_audioSessionMediaServicesWereResetObserver
1058 | name:AVAudioSessionMediaServicesWereResetNotification
1059 | object:nil];
1060 |
1061 | _isAudioSessionMediaServicesWereResetRegistered = NO;
1062 | }
1063 |
1064 | - (void)startAudioSessionSilenceSecondaryAudioHintNotification
1065 | {
1066 | if (_isAudioSessionSilenceSecondaryAudioHintRegistered) {
1067 | return;
1068 | }
1069 |
1070 | NSLog(@"RNInCallManager.startAudioSessionSilenceSecondaryAudioHintNotification()");
1071 |
1072 | // --- in case it didn't deallocate when ViewDidUnload
1073 | [self stopObserve:_audioSessionSilenceSecondaryAudioHintObserver
1074 | name:AVAudioSessionSilenceSecondaryAudioHintNotification
1075 | object:nil];
1076 |
1077 | _audioSessionSilenceSecondaryAudioHintObserver = [self startObserve:AVAudioSessionSilenceSecondaryAudioHintNotification
1078 | object:nil
1079 | queue:nil
1080 | block:^(NSNotification *notification) {
1081 | if (notification.userInfo == nil
1082 | || ![notification.name isEqualToString:AVAudioSessionSilenceSecondaryAudioHintNotification]) {
1083 | return;
1084 | }
1085 |
1086 | NSNumber *silenceSecondaryAudioHintType = [notification.userInfo objectForKey:@"AVAudioSessionSilenceSecondaryAudioHintTypeKey"];
1087 | NSUInteger silenceSecondaryAudioHintTypeValue = [silenceSecondaryAudioHintType unsignedIntegerValue];
1088 | switch (silenceSecondaryAudioHintTypeValue) {
1089 | case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
1090 | NSLog(@"RNInCallManager.AVAudioSessionSilenceSecondaryAudioHintNotification: Begin");
1091 | break;
1092 | case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
1093 | NSLog(@"RNInCallManager.AVAudioSessionSilenceSecondaryAudioHintNotification: End");
1094 | break;
1095 | default:
1096 | NSLog(@"RNInCallManager.AVAudioSessionSilenceSecondaryAudioHintNotification: Unknow Value");
1097 | break;
1098 | }
1099 | }];
1100 | _isAudioSessionSilenceSecondaryAudioHintRegistered = YES;
1101 | }
1102 |
1103 | - (void)stopAudioSessionSilenceSecondaryAudioHintNotification
1104 | {
1105 | if (!_isAudioSessionSilenceSecondaryAudioHintRegistered) {
1106 | return;
1107 | }
1108 |
1109 | NSLog(@"RNInCallManager.stopAudioSessionSilenceSecondaryAudioHintNotification()");
1110 | // --- remove all no matter what object
1111 | [self stopObserve:_audioSessionSilenceSecondaryAudioHintObserver
1112 | name:AVAudioSessionSilenceSecondaryAudioHintNotification
1113 | object:nil];
1114 |
1115 | _isAudioSessionSilenceSecondaryAudioHintRegistered = NO;
1116 | }
1117 |
1118 | - (id)startObserve:(NSString *)name
1119 | object:(id)object
1120 | queue:(NSOperationQueue *)queue
1121 | block:(void (^)(NSNotification *))block
1122 | {
1123 | return [[NSNotificationCenter defaultCenter] addObserverForName:name
1124 | object:object
1125 | queue:queue
1126 | usingBlock:block];
1127 | }
1128 |
1129 | - (void)stopObserve:(id)observer
1130 | name:(NSString *)name
1131 | object:(id)object
1132 | {
1133 | if (observer == nil) return;
1134 | [[NSNotificationCenter defaultCenter] removeObserver:observer
1135 | name:name
1136 | object:object];
1137 | }
1138 |
1139 | - (NSURL *)getRingbackUri:(NSString *)_type
1140 | {
1141 | NSString *fileBundle = @"incallmanager_ringback";
1142 | NSString *fileBundleExt = @"mp3";
1143 | //NSString *fileSysWithExt = @"vc~ringing.caf"; // --- ringtone of facetime, but can't play it.
1144 | //NSString *fileSysPath = @"/System/Library/Audio/UISounds";
1145 | NSString *fileSysWithExt = @"Marimba.m4r";
1146 | NSString *fileSysPath = @"/Library/Ringtones";
1147 |
1148 | // --- you can't get default user perfrence sound in ios
1149 | NSString *type = [_type isEqualToString:@""] || [_type isEqualToString:@"_DEFAULT_"]
1150 | ? fileSysWithExt
1151 | : _type;
1152 |
1153 | NSURL *bundleUri = _bundleRingbackUri;
1154 | NSURL *defaultUri = _defaultRingbackUri;
1155 |
1156 | NSURL *uri = [self getAudioUri:type
1157 | fileBundle:fileBundle
1158 | fileBundleExt:fileBundleExt
1159 | fileSysWithExt:fileSysWithExt
1160 | fileSysPath:fileSysPath
1161 | uriBundle:&bundleUri
1162 | uriDefault:&defaultUri];
1163 |
1164 | _bundleRingbackUri = bundleUri;
1165 | _defaultRingbackUri = defaultUri;
1166 |
1167 | return uri;
1168 | }
1169 |
1170 | - (NSURL *)getBusytoneUri:(NSString *)_type
1171 | {
1172 | NSString *fileBundle = @"incallmanager_busytone";
1173 | NSString *fileBundleExt = @"mp3";
1174 | NSString *fileSysWithExt = @"ct-busy.caf"; //ct-congestion.caf
1175 | NSString *fileSysPath = @"/System/Library/Audio/UISounds";
1176 | // --- you can't get default user perfrence sound in ios
1177 | NSString *type = [_type isEqualToString:@""] || [_type isEqualToString:@"_DEFAULT_"]
1178 | ? fileSysWithExt
1179 | : _type;
1180 |
1181 | NSURL *bundleUri = _bundleBusytoneUri;
1182 | NSURL *defaultUri = _defaultBusytoneUri;
1183 |
1184 | NSURL *uri = [self getAudioUri:type
1185 | fileBundle:fileBundle
1186 | fileBundleExt:fileBundleExt
1187 | fileSysWithExt:fileSysWithExt
1188 | fileSysPath:fileSysPath
1189 | uriBundle:&bundleUri
1190 | uriDefault:&defaultUri];
1191 |
1192 | _bundleBusytoneUri = bundleUri;
1193 | _defaultBusytoneUri = defaultUri;
1194 |
1195 | return uri;
1196 | }
1197 |
1198 | - (NSURL *)getRingtoneUri:(NSString *)_type
1199 | {
1200 | NSString *fileBundle = @"incallmanager_ringtone";
1201 | NSString *fileBundleExt = @"mp3";
1202 | NSString *fileSysWithExt = @"Opening.m4r"; //Marimba.m4r
1203 | NSString *fileSysPath = @"/Library/Ringtones";
1204 | // --- you can't get default user perfrence sound in ios
1205 | NSString *type = [_type isEqualToString:@""] || [_type isEqualToString:@"_DEFAULT_"]
1206 | ? fileSysWithExt
1207 | : _type;
1208 |
1209 | NSURL *bundleUri = _bundleRingtoneUri;
1210 | NSURL *defaultUri = _defaultRingtoneUri;
1211 |
1212 | NSURL *uri = [self getAudioUri:type
1213 | fileBundle:fileBundle
1214 | fileBundleExt:fileBundleExt
1215 | fileSysWithExt:fileSysWithExt
1216 | fileSysPath:fileSysPath
1217 | uriBundle:&bundleUri
1218 | uriDefault:&defaultUri];
1219 |
1220 | _bundleRingtoneUri = bundleUri;
1221 | _defaultRingtoneUri = defaultUri;
1222 |
1223 | return uri;
1224 | }
1225 |
1226 | - (NSURL *)getAudioUri:(NSString *)_type
1227 | fileBundle:(NSString *)fileBundle
1228 | fileBundleExt:(NSString *)fileBundleExt
1229 | fileSysWithExt:(NSString *)fileSysWithExt
1230 | fileSysPath:(NSString *)fileSysPath
1231 | uriBundle:(NSURL **)uriBundle
1232 | uriDefault:(NSURL **)uriDefault
1233 | {
1234 | NSString *type = _type;
1235 | if ([type isEqualToString:@"_BUNDLE_"]) {
1236 | if (*uriBundle == nil) {
1237 | *uriBundle = [[NSBundle mainBundle] URLForResource:fileBundle withExtension:fileBundleExt];
1238 | if (*uriBundle == nil) {
1239 | NSLog(@"RNInCallManager.getAudioUri(): %@.%@ not found in bundle.", fileBundle, fileBundleExt);
1240 | type = fileSysWithExt;
1241 | } else {
1242 | return *uriBundle;
1243 | }
1244 | } else {
1245 | return *uriBundle;
1246 | }
1247 | }
1248 |
1249 | if (*uriDefault == nil) {
1250 | NSString *target = [NSString stringWithFormat:@"%@/%@", fileSysPath, type];
1251 | *uriDefault = [self getSysFileUri:target];
1252 | }
1253 | return *uriDefault;
1254 | }
1255 |
1256 | - (NSURL *)getSysFileUri:(NSString *)target
1257 | {
1258 | NSURL *url = [[NSURL alloc] initFileURLWithPath:target isDirectory:NO];
1259 |
1260 | if (url != nil) {
1261 | NSString *path = url.path;
1262 | if (path != nil) {
1263 | NSFileManager *fileManager = [[NSFileManager alloc] init];
1264 | BOOL isTargetDirectory;
1265 | if ([fileManager fileExistsAtPath:path isDirectory:&isTargetDirectory]) {
1266 | if (!isTargetDirectory) {
1267 | return url;
1268 | }
1269 | }
1270 | }
1271 | }
1272 | NSLog(@"RNInCallManager.getSysFileUri(): can not get url for %@", target);
1273 | return nil;
1274 | }
1275 |
1276 | #pragma mark - AVAudioPlayerDelegate
1277 |
1278 | // --- this only called when all loop played. it means, an infinite (numberOfLoops = -1) loop will never into here.
1279 | - (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player
1280 | successfully:(BOOL)flag
1281 | {
1282 | NSString *filename = player.url.URLByDeletingPathExtension.lastPathComponent;
1283 | NSLog(@"RNInCallManager.audioPlayerDidFinishPlaying(): finished playing: %@", filename);
1284 | if ([filename isEqualToString:_bundleBusytoneUri.URLByDeletingPathExtension.lastPathComponent]
1285 | || [filename isEqualToString:_defaultBusytoneUri.URLByDeletingPathExtension.lastPathComponent]) {
1286 | //[self stopBusytone];
1287 | NSLog(@"RNInCallManager.audioPlayerDidFinishPlaying(): busytone finished, invoke stop()");
1288 | [self stop:@""];
1289 | }
1290 | }
1291 |
1292 | - (void)audioPlayerDecodeErrorDidOccur:(AVAudioPlayer *)player
1293 | error:(NSError *)error
1294 | {
1295 | NSString *filename = player.url.URLByDeletingPathExtension.lastPathComponent;
1296 | NSLog(@"RNInCallManager.audioPlayerDecodeErrorDidOccur(): player=%@, error=%@", filename, error.localizedDescription);
1297 | }
1298 |
1299 | // --- Deprecated in iOS 8.0.
1300 | //- (void)audioPlayerBeginInterruption:(AVAudioPlayer *)player
1301 | //{
1302 | //}
1303 |
1304 | // --- Deprecated in iOS 8.0.
1305 | //- (void)audioPlayerEndInterruption:(AVAudioPlayer *)player
1306 | //{
1307 | //}
1308 |
1309 | //- (void)debugAudioSession
1310 | //{
1311 | // let currentRoute: Dictionary = ["input": self.audioSession.currentRoute.inputs[0].uid, "output": self.audioSession.currentRoute.outputs[0].uid]
1312 | // var categoryOptions = ""
1313 | // switch self.audioSession.categoryOptions {
1314 | // case AVAudioSessionCategoryOptions.mixWithOthers:
1315 | // categoryOptions = "MixWithOthers"
1316 | // case AVAudioSessionCategoryOptions.duckOthers:
1317 | // categoryOptions = "DuckOthers"
1318 | // case AVAudioSessionCategoryOptions.allowBluetooth:
1319 | // categoryOptions = "AllowBluetooth"
1320 | // case AVAudioSessionCategoryOptions.defaultToSpeaker:
1321 | // categoryOptions = "DefaultToSpeaker"
1322 | // default:
1323 | // categoryOptions = "unknow"
1324 | // }
1325 | // if #available(iOS 9, *) {
1326 | // if categoryOptions == "unknow" && self.audioSession.categoryOptions == AVAudioSessionCategoryOptions.interruptSpokenAudioAndMixWithOthers {
1327 | // categoryOptions = "InterruptSpokenAudioAndMixWithOthers"
1328 | // }
1329 | // }
1330 | // self._checkRecordPermission()
1331 | // let audioSessionProperties: Dictionary = [
1332 | // "category": self.audioSession.category,
1333 | // "categoryOptions": categoryOptions,
1334 | // "mode": self.audioSession.mode,
1335 | // //"inputAvailable": self.audioSession.inputAvailable,
1336 | // "otherAudioPlaying": self.audioSession.isOtherAudioPlaying,
1337 | // "recordPermission" : self.recordPermission,
1338 | // //"availableInputs": self.audioSession.availableInputs,
1339 | // //"preferredInput": self.audioSession.preferredInput,
1340 | // //"inputDataSources": self.audioSession.inputDataSources,
1341 | // //"inputDataSource": self.audioSession.inputDataSource,
1342 | // //"outputDataSources": self.audioSession.outputDataSources,
1343 | // //"outputDataSource": self.audioSession.outputDataSource,
1344 | // "currentRoute": currentRoute,
1345 | // "outputVolume": self.audioSession.outputVolume,
1346 | // "inputGain": self.audioSession.inputGain,
1347 | // "inputGainSettable": self.audioSession.isInputGainSettable,
1348 | // "inputLatency": self.audioSession.inputLatency,
1349 | // "outputLatency": self.audioSession.outputLatency,
1350 | // "sampleRate": self.audioSession.sampleRate,
1351 | // "preferredSampleRate": self.audioSession.preferredSampleRate,
1352 | // "IOBufferDuration": self.audioSession.ioBufferDuration,
1353 | // "preferredIOBufferDuration": self.audioSession.preferredIOBufferDuration,
1354 | // "inputNumberOfChannels": self.audioSession.inputNumberOfChannels,
1355 | // "maximumInputNumberOfChannels": self.audioSession.maximumInputNumberOfChannels,
1356 | // "preferredInputNumberOfChannels": self.audioSession.preferredInputNumberOfChannels,
1357 | // "outputNumberOfChannels": self.audioSession.outputNumberOfChannels,
1358 | // "maximumOutputNumberOfChannels": self.audioSession.maximumOutputNumberOfChannels,
1359 | // "preferredOutputNumberOfChannels": self.audioSession.preferredOutputNumberOfChannels
1360 | // ]
1361 | // /*
1362 | // // --- Too noisy
1363 | // if #available(iOS 8, *) {
1364 | // //audioSessionProperties["secondaryAudioShouldBeSilencedHint"] = self.audioSession.secondaryAudioShouldBeSilencedHint
1365 | // } else {
1366 | // //audioSessionProperties["secondaryAudioShouldBeSilencedHint"] = "unknow"
1367 | // }
1368 | // if #available(iOS 9, *) {
1369 | // //audioSessionProperties["availableCategories"] = self.audioSession.availableCategories
1370 | // //audioSessionProperties["availableModes"] = self.audioSession.availableModes
1371 | // }
1372 | // */
1373 | // NSLog("RNInCallManager.debugAudioSession(): ==========BEGIN==========")
1374 | // // iterate over all keys
1375 | // for (key, value) in audioSessionProperties {
1376 | // NSLog("\(key) = \(value)")
1377 | // }
1378 | // NSLog("RNInCallManager.debugAudioSession(): ==========END==========")
1379 | //}
1380 |
1381 | @end
1382 |
--------------------------------------------------------------------------------
/android/src/main/java/com/zxcpoiu/incallmanager/InCallManagerModule.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 Henry Lin @zxcpoiu
3 | *
4 | * Permission to use, copy, modify, and distribute this software for any
5 | * purpose with or without fee is hereby granted, provided that the above
6 | * copyright notice and this permission notice appear in all copies.
7 | *
8 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 | * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 | */
16 |
17 | package com.zxcpoiu.incallmanager;
18 |
19 | import android.app.Activity;
20 | import android.content.Context;
21 | import android.content.Intent;
22 | import android.content.IntentFilter;
23 | import android.content.BroadcastReceiver;
24 | import android.content.pm.PackageManager;
25 | import android.Manifest.permission;
26 | //import android.media.AudioAttributes; // --- for API 21+
27 | import android.media.AudioManager;
28 | import android.media.AudioDeviceInfo;
29 | import android.media.MediaPlayer;
30 | import android.media.ToneGenerator;
31 | import android.net.Uri;
32 | import android.os.PowerManager;
33 | import android.os.Build;
34 | import android.os.Handler;
35 | import android.provider.Settings;
36 | import androidx.annotation.Nullable;
37 | import androidx.core.app.ActivityCompat;
38 | import androidx.core.content.ContextCompat;
39 | import android.util.Log;
40 | import android.util.SparseArray;
41 | import android.view.Display;
42 | import android.view.KeyEvent;
43 | import android.view.Window;
44 | import android.view.WindowManager;
45 |
46 | import com.facebook.react.bridge.Arguments;
47 | import com.facebook.react.bridge.LifecycleEventListener;
48 | import com.facebook.react.bridge.NativeModule;
49 | import com.facebook.react.bridge.Promise;
50 | import com.facebook.react.bridge.ReactApplicationContext;
51 | import com.facebook.react.bridge.ReactContext;
52 | import com.facebook.react.bridge.ReactContextBaseJavaModule;
53 | import com.facebook.react.bridge.ReactMethod;
54 | import com.facebook.react.bridge.UiThreadUtil;
55 | import com.facebook.react.bridge.WritableMap;
56 | import com.facebook.react.modules.core.DeviceEventManagerModule;
57 |
58 | import java.lang.Runnable;
59 | import java.io.File;
60 | import java.util.Collections;
61 | import java.util.Map;
62 | import java.util.HashMap;
63 | import java.util.HashSet;
64 | import java.util.Random;
65 | import java.util.Set;
66 |
67 | import com.zxcpoiu.incallmanager.AppRTC.AppRTCBluetoothManager;
68 |
69 | public class InCallManagerModule extends ReactContextBaseJavaModule implements LifecycleEventListener {
70 | private static final String REACT_NATIVE_MODULE_NAME = "InCallManager";
71 | private static final String TAG = REACT_NATIVE_MODULE_NAME;
72 | private static SparseArray mRequestPermissionCodePromises;
73 | private static SparseArray mRequestPermissionCodeTargetPermission;
74 | private String mPackageName = "com.zxcpoiu.incallmanager";
75 |
76 | // --- Screen Manager
77 | private PowerManager mPowerManager;
78 | private WindowManager.LayoutParams lastLayoutParams;
79 | private WindowManager mWindowManager;
80 |
81 | // --- AudioRouteManager
82 | private AudioManager audioManager;
83 | private boolean audioManagerActivated = false;
84 | private boolean isAudioFocused = false;
85 | private boolean isOrigAudioSetupStored = false;
86 | private boolean origIsSpeakerPhoneOn = false;
87 | private boolean origIsMicrophoneMute = false;
88 | private int origAudioMode = AudioManager.MODE_INVALID;
89 | private boolean defaultSpeakerOn = false;
90 | private int defaultAudioMode = AudioManager.MODE_IN_COMMUNICATION;
91 | private int forceSpeakerOn = 0;
92 | private boolean automatic = true;
93 | private boolean isProximityRegistered = false;
94 | private boolean proximityIsNear = false;
95 | private static final String ACTION_HEADSET_PLUG = (android.os.Build.VERSION.SDK_INT >= 21) ? AudioManager.ACTION_HEADSET_PLUG : Intent.ACTION_HEADSET_PLUG;
96 | private BroadcastReceiver wiredHeadsetReceiver;
97 | private BroadcastReceiver noisyAudioReceiver;
98 | private BroadcastReceiver mediaButtonReceiver;
99 | private OnFocusChangeListener mOnFocusChangeListener;
100 |
101 | // --- same as: RingtoneManager.getActualDefaultRingtoneUri(reactContext, RingtoneManager.TYPE_RINGTONE);
102 | private Uri defaultRingtoneUri = Settings.System.DEFAULT_RINGTONE_URI;
103 | private Uri defaultRingbackUri = Settings.System.DEFAULT_RINGTONE_URI;
104 | private Uri defaultBusytoneUri = Settings.System.DEFAULT_NOTIFICATION_URI;
105 | //private Uri defaultAlarmAlertUri = Settings.System.DEFAULT_ALARM_ALERT_URI; // --- too annoying
106 | private Uri bundleRingtoneUri;
107 | private Uri bundleRingbackUri;
108 | private Uri bundleBusytoneUri;
109 | private Map audioUriMap;
110 | private MyPlayerInterface mRingtone;
111 | private MyPlayerInterface mRingback;
112 | private MyPlayerInterface mBusytone;
113 | private Handler mRingtoneCountDownHandler;
114 | private String media = "audio";
115 | private static String recordPermission = "unknow";
116 | private static String cameraPermission = "unknow";
117 |
118 | private static final String SPEAKERPHONE_AUTO = "auto";
119 | private static final String SPEAKERPHONE_TRUE = "true";
120 | private static final String SPEAKERPHONE_FALSE = "false";
121 |
122 | /**
123 | * AudioDevice is the names of possible audio devices that we currently
124 | * support.
125 | */
126 | public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE }
127 |
128 | /** AudioManager state. */
129 | public enum AudioManagerState {
130 | UNINITIALIZED,
131 | PREINITIALIZED,
132 | RUNNING,
133 | }
134 |
135 | private int savedAudioMode = AudioManager.MODE_INVALID;
136 | private boolean savedIsSpeakerPhoneOn = false;
137 | private boolean savedIsMicrophoneMute = false;
138 | private boolean hasWiredHeadset = false;
139 |
140 | // Default audio device; speaker phone for video calls or earpiece for audio
141 | // only calls.
142 | private AudioDevice defaultAudioDevice = AudioDevice.NONE;
143 |
144 | // Contains the currently selected audio device.
145 | // This device is changed automatically using a certain scheme where e.g.
146 | // a wired headset "wins" over speaker phone. It is also possible for a
147 | // user to explicitly select a device (and overrid any predefined scheme).
148 | // See |userSelectedAudioDevice| for details.
149 | private AudioDevice selectedAudioDevice;
150 |
151 | // Contains the user-selected audio device which overrides the predefined
152 | // selection scheme.
153 | // TODO(henrika): always set to AudioDevice.NONE today. Add support for
154 | // explicit selection based on choice by userSelectedAudioDevice.
155 | private AudioDevice userSelectedAudioDevice;
156 |
157 | // Contains speakerphone setting: auto, true or false
158 | private final String useSpeakerphone = SPEAKERPHONE_AUTO;
159 |
160 | // Handles all tasks related to Bluetooth headset devices.
161 | private final AppRTCBluetoothManager bluetoothManager;
162 |
163 | private final InCallProximityManager proximityManager;
164 |
165 | private final InCallWakeLockUtils wakeLockUtils;
166 |
167 | // Contains a list of available audio devices. A Set collection is used to
168 | // avoid duplicate elements.
169 | private Set audioDevices = new HashSet<>();
170 |
171 | // Callback method for changes in audio focus.
172 | private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
173 |
174 | interface MyPlayerInterface {
175 | public boolean isPlaying();
176 | public void startPlay(Map data);
177 | public void stopPlay();
178 | }
179 |
180 | @Override
181 | public String getName() {
182 | return REACT_NATIVE_MODULE_NAME;
183 | }
184 |
185 | public InCallManagerModule(ReactApplicationContext reactContext) {
186 | super(reactContext);
187 | mPackageName = reactContext.getPackageName();
188 | reactContext.addLifecycleEventListener(this);
189 | mWindowManager = (WindowManager) reactContext.getSystemService(Context.WINDOW_SERVICE);
190 | mPowerManager = (PowerManager) reactContext.getSystemService(Context.POWER_SERVICE);
191 | audioManager = ((AudioManager) reactContext.getSystemService(Context.AUDIO_SERVICE));
192 | audioUriMap = new HashMap();
193 | audioUriMap.put("defaultRingtoneUri", defaultRingtoneUri);
194 | audioUriMap.put("defaultRingbackUri", defaultRingbackUri);
195 | audioUriMap.put("defaultBusytoneUri", defaultBusytoneUri);
196 | audioUriMap.put("bundleRingtoneUri", bundleRingtoneUri);
197 | audioUriMap.put("bundleRingbackUri", bundleRingbackUri);
198 | audioUriMap.put("bundleBusytoneUri", bundleBusytoneUri);
199 | mRequestPermissionCodePromises = new SparseArray();
200 | mRequestPermissionCodeTargetPermission = new SparseArray();
201 | mOnFocusChangeListener = new OnFocusChangeListener();
202 | bluetoothManager = AppRTCBluetoothManager.create(reactContext, this);
203 | proximityManager = InCallProximityManager.create(reactContext, this);
204 | wakeLockUtils = new InCallWakeLockUtils(reactContext);
205 |
206 | Log.d(TAG, "InCallManager initialized");
207 | }
208 |
209 | private void manualTurnScreenOff() {
210 | Log.d(TAG, "manualTurnScreenOff()");
211 | UiThreadUtil.runOnUiThread(new Runnable() {
212 | public void run() {
213 | Activity mCurrentActivity = getCurrentActivity();
214 | if (mCurrentActivity == null) {
215 | Log.d(TAG, "ReactContext doesn't hava any Activity attached.");
216 | return;
217 | }
218 | Window window = mCurrentActivity.getWindow();
219 | WindowManager.LayoutParams params = window.getAttributes();
220 | lastLayoutParams = params; // --- store last param
221 | params.screenBrightness = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_OFF; // --- Dim as dark as possible. see BRIGHTNESS_OVERRIDE_OFF
222 | window.setAttributes(params);
223 | window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
224 | }
225 | });
226 | }
227 |
228 | private void manualTurnScreenOn() {
229 | Log.d(TAG, "manualTurnScreenOn()");
230 | UiThreadUtil.runOnUiThread(new Runnable() {
231 | public void run() {
232 | Activity mCurrentActivity = getCurrentActivity();
233 | if (mCurrentActivity == null) {
234 | Log.d(TAG, "ReactContext doesn't hava any Activity attached.");
235 | return;
236 | }
237 | Window window = mCurrentActivity.getWindow();
238 | if (lastLayoutParams != null) {
239 | window.setAttributes(lastLayoutParams);
240 | } else {
241 | WindowManager.LayoutParams params = window.getAttributes();
242 | params.screenBrightness = -1; // --- Dim to preferable one
243 | window.setAttributes(params);
244 | }
245 | window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
246 | }
247 | });
248 | }
249 |
250 | private void storeOriginalAudioSetup() {
251 | Log.d(TAG, "storeOriginalAudioSetup()");
252 | if (!isOrigAudioSetupStored) {
253 | origAudioMode = audioManager.getMode();
254 | origIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
255 | origIsMicrophoneMute = audioManager.isMicrophoneMute();
256 | isOrigAudioSetupStored = true;
257 | }
258 | }
259 |
260 | private void restoreOriginalAudioSetup() {
261 | Log.d(TAG, "restoreOriginalAudioSetup()");
262 | if (isOrigAudioSetupStored) {
263 | setSpeakerphoneOn(origIsSpeakerPhoneOn);
264 | setMicrophoneMute(origIsMicrophoneMute);
265 | audioManager.setMode(origAudioMode);
266 | if (getCurrentActivity() != null) {
267 | getCurrentActivity().setVolumeControlStream(AudioManager.USE_DEFAULT_STREAM_TYPE);
268 | }
269 | isOrigAudioSetupStored = false;
270 | }
271 | }
272 |
273 | private void startWiredHeadsetEvent() {
274 | if (wiredHeadsetReceiver == null) {
275 | Log.d(TAG, "startWiredHeadsetEvent()");
276 | IntentFilter filter = new IntentFilter(ACTION_HEADSET_PLUG);
277 | wiredHeadsetReceiver = new BroadcastReceiver() {
278 | @Override
279 | public void onReceive(Context context, Intent intent) {
280 | if (ACTION_HEADSET_PLUG.equals(intent.getAction())) {
281 | hasWiredHeadset = intent.getIntExtra("state", 0) == 1;
282 | updateAudioRoute();
283 | String deviceName = intent.getStringExtra("name");
284 | if (deviceName == null) {
285 | deviceName = "";
286 | }
287 | WritableMap data = Arguments.createMap();
288 | data.putBoolean("isPlugged", (intent.getIntExtra("state", 0) == 1) ? true : false);
289 | data.putBoolean("hasMic", (intent.getIntExtra("microphone", 0) == 1) ? true : false);
290 | data.putString("deviceName", deviceName);
291 | sendEvent("WiredHeadset", data);
292 | } else {
293 | hasWiredHeadset = false;
294 | }
295 | }
296 | };
297 | ReactContext reactContext = getReactApplicationContext();
298 | if (reactContext != null) {
299 | reactContext.registerReceiver(wiredHeadsetReceiver, filter);
300 | } else {
301 | Log.d(TAG, "startWiredHeadsetEvent() reactContext is null");
302 | }
303 | }
304 | }
305 |
306 | private void stopWiredHeadsetEvent() {
307 | if (wiredHeadsetReceiver != null) {
308 | Log.d(TAG, "stopWiredHeadsetEvent()");
309 | this.unregisterReceiver(this.wiredHeadsetReceiver);
310 | wiredHeadsetReceiver = null;
311 | }
312 | }
313 |
314 | private void startNoisyAudioEvent() {
315 | if (noisyAudioReceiver == null) {
316 | Log.d(TAG, "startNoisyAudioEvent()");
317 | IntentFilter filter = new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY);
318 | noisyAudioReceiver = new BroadcastReceiver() {
319 | @Override
320 | public void onReceive(Context context, Intent intent) {
321 | if (AudioManager.ACTION_AUDIO_BECOMING_NOISY.equals(intent.getAction())) {
322 | updateAudioRoute();
323 | sendEvent("NoisyAudio", null);
324 | }
325 | }
326 | };
327 | ReactContext reactContext = getReactApplicationContext();
328 | if (reactContext != null) {
329 | reactContext.registerReceiver(noisyAudioReceiver, filter);
330 | } else {
331 | Log.d(TAG, "startNoisyAudioEvent() reactContext is null");
332 | }
333 | }
334 | }
335 |
336 | private void stopNoisyAudioEvent() {
337 | if (noisyAudioReceiver != null) {
338 | Log.d(TAG, "stopNoisyAudioEvent()");
339 | this.unregisterReceiver(this.noisyAudioReceiver);
340 | noisyAudioReceiver = null;
341 | }
342 | }
343 |
344 | private void startMediaButtonEvent() {
345 | if (mediaButtonReceiver == null) {
346 | Log.d(TAG, "startMediaButtonEvent()");
347 | IntentFilter filter = new IntentFilter(Intent.ACTION_MEDIA_BUTTON);
348 | mediaButtonReceiver = new BroadcastReceiver() {
349 | @Override
350 | public void onReceive(Context context, Intent intent) {
351 | if (Intent.ACTION_MEDIA_BUTTON.equals(intent.getAction())) {
352 | KeyEvent event = (KeyEvent)intent.getParcelableExtra(Intent.EXTRA_KEY_EVENT);
353 | int keyCode = event.getKeyCode();
354 | String keyText = "";
355 | switch (keyCode) {
356 | case KeyEvent.KEYCODE_MEDIA_PLAY:
357 | keyText = "KEYCODE_MEDIA_PLAY";
358 | break;
359 | case KeyEvent.KEYCODE_MEDIA_PAUSE:
360 | keyText = "KEYCODE_MEDIA_PAUSE";
361 | break;
362 | case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
363 | keyText = "KEYCODE_MEDIA_PLAY_PAUSE";
364 | break;
365 | case KeyEvent.KEYCODE_MEDIA_NEXT:
366 | keyText = "KEYCODE_MEDIA_NEXT";
367 | break;
368 | case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
369 | keyText = "KEYCODE_MEDIA_PREVIOUS";
370 | break;
371 | case KeyEvent.KEYCODE_MEDIA_CLOSE:
372 | keyText = "KEYCODE_MEDIA_CLOSE";
373 | break;
374 | case KeyEvent.KEYCODE_MEDIA_EJECT:
375 | keyText = "KEYCODE_MEDIA_EJECT";
376 | break;
377 | case KeyEvent.KEYCODE_MEDIA_RECORD:
378 | keyText = "KEYCODE_MEDIA_RECORD";
379 | break;
380 | case KeyEvent.KEYCODE_MEDIA_STOP:
381 | keyText = "KEYCODE_MEDIA_STOP";
382 | break;
383 | default:
384 | keyText = "KEYCODE_UNKNOW";
385 | break;
386 | }
387 | WritableMap data = Arguments.createMap();
388 | data.putString("eventText", keyText);
389 | data.putInt("eventCode", keyCode);
390 | sendEvent("MediaButton", data);
391 | }
392 | }
393 | };
394 | ReactContext reactContext = getReactApplicationContext();
395 | if (reactContext != null) {
396 | reactContext.registerReceiver(mediaButtonReceiver, filter);
397 | } else {
398 | Log.d(TAG, "startMediaButtonEvent() reactContext is null");
399 | }
400 | }
401 | }
402 |
403 | private void stopMediaButtonEvent() {
404 | if (mediaButtonReceiver != null) {
405 | Log.d(TAG, "stopMediaButtonEvent()");
406 | this.unregisterReceiver(this.mediaButtonReceiver);
407 | mediaButtonReceiver = null;
408 | }
409 | }
410 |
411 | public void onProximitySensorChangedState(boolean isNear) {
412 | if (automatic && getSelectedAudioDevice() == AudioDevice.EARPIECE) {
413 | if (isNear) {
414 | turnScreenOff();
415 | } else {
416 | turnScreenOn();
417 | }
418 | updateAudioRoute();
419 | }
420 | WritableMap data = Arguments.createMap();
421 | data.putBoolean("isNear", isNear);
422 | sendEvent("Proximity", data);
423 | }
424 |
425 |
426 | private void startProximitySensor() {
427 | if (!proximityManager.isProximitySupported()) {
428 | Log.d(TAG, "Proximity Sensor is not supported.");
429 | return;
430 | }
431 | if (isProximityRegistered) {
432 | Log.d(TAG, "Proximity Sensor is already registered.");
433 | return;
434 | }
435 | // --- SENSOR_DELAY_FASTEST(0 milisecs), SENSOR_DELAY_GAME(20 milisecs), SENSOR_DELAY_UI(60 milisecs), SENSOR_DELAY_NORMAL(200 milisecs)
436 | if (!proximityManager.start()) {
437 | Log.d(TAG, "proximityManager.start() failed. return false");
438 | return;
439 | }
440 | Log.d(TAG, "startProximitySensor()");
441 | isProximityRegistered = true;
442 | }
443 |
444 | private void stopProximitySensor() {
445 | if (!proximityManager.isProximitySupported()) {
446 | Log.d(TAG, "Proximity Sensor is not supported.");
447 | return;
448 | }
449 | if (!isProximityRegistered) {
450 | Log.d(TAG, "Proximity Sensor is not registered.");
451 | return;
452 | }
453 | Log.d(TAG, "stopProximitySensor()");
454 | proximityManager.stop();
455 | isProximityRegistered = false;
456 | }
457 |
458 | private class OnFocusChangeListener implements AudioManager.OnAudioFocusChangeListener {
459 |
460 | @Override
461 | public void onAudioFocusChange(final int focusChange) {
462 | String focusChangeStr;
463 | switch (focusChange) {
464 | case AudioManager.AUDIOFOCUS_GAIN:
465 | focusChangeStr = "AUDIOFOCUS_GAIN";
466 | break;
467 | case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
468 | focusChangeStr = "AUDIOFOCUS_GAIN_TRANSIENT";
469 | break;
470 | case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
471 | focusChangeStr = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
472 | break;
473 | case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
474 | focusChangeStr = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
475 | break;
476 | case AudioManager.AUDIOFOCUS_LOSS:
477 | focusChangeStr = "AUDIOFOCUS_LOSS";
478 | break;
479 | case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
480 | focusChangeStr = "AUDIOFOCUS_LOSS_TRANSIENT";
481 | break;
482 | case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
483 | focusChangeStr = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
484 | break;
485 | default:
486 | focusChangeStr = "AUDIOFOCUS_UNKNOW";
487 | break;
488 | }
489 |
490 | Log.d(TAG, "onAudioFocusChange: " + focusChange + " - " + focusChangeStr);
491 |
492 | WritableMap data = Arguments.createMap();
493 | data.putString("eventText", focusChangeStr);
494 | data.putInt("eventCode", focusChange);
495 | sendEvent("onAudioFocusChange", data);
496 | }
497 | }
498 |
499 | /*
500 | // --- TODO: AudioDeviceCallBack android sdk 23+
501 | if (android.os.Build.VERSION.SDK_INT >= 23) {
502 | private class MyAudioDeviceCallback extends AudioDeviceCallback {
503 | public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
504 | mAddCallbackCalled = true;
505 | }
506 | public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
507 | mRemoveCallbackCalled = true;
508 | }
509 | }
510 |
511 | // --- Specifies the Handler object for the thread on which to execute the callback. If null, the Handler associated with the main Looper will be used.
512 | public void test_deviceCallback() {
513 | AudioDeviceCallback callback = new EmptyDeviceCallback();
514 | mAudioManager.registerAudioDeviceCallback(callback, null);
515 | }
516 |
517 | // --- get all audio devices by flags
518 | //public AudioDeviceInfo[] getDevices (int flags)
519 | //Returns an array of AudioDeviceInfo objects corresponding to the audio devices currently connected to the system and meeting the criteria specified in the flags parameter.
520 | //flags int: A set of bitflags specifying the criteria to test.
521 | }
522 |
523 | // --- TODO: adjust valume if needed.
524 | if (android.os.Build.VERSION.SDK_INT >= 21) {
525 | isVolumeFixed ()
526 |
527 | // The following APIs have no effect when volume is fixed:
528 | adjustVolume(int, int)
529 | adjustSuggestedStreamVolume(int, int, int)
530 | adjustStreamVolume(int, int, int)
531 | setStreamVolume(int, int, int)
532 | setRingerMode(int)
533 | setStreamSolo(int, boolean)
534 | setStreamMute(int, boolean)
535 | }
536 |
537 | // -- TODO: bluetooth support
538 | */
539 |
540 | private void sendEvent(final String eventName, @Nullable WritableMap params) {
541 | try {
542 | ReactContext reactContext = getReactApplicationContext();
543 | if (reactContext != null && reactContext.hasActiveCatalystInstance()) {
544 | reactContext
545 | .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
546 | .emit(eventName, params);
547 | } else {
548 | Log.e(TAG, "sendEvent(): reactContext is null or not having CatalystInstance yet.");
549 | }
550 | } catch (RuntimeException e) {
551 | Log.e(TAG, "sendEvent(): java.lang.RuntimeException: Trying to invoke JS before CatalystInstance has been set!");
552 | }
553 | }
554 |
555 | @ReactMethod
556 | public void start(final String _media, final boolean auto, final String ringbackUriType) {
557 | media = _media;
558 | if (media.equals("video")) {
559 | defaultSpeakerOn = true;
560 | } else {
561 | defaultSpeakerOn = false;
562 | }
563 | automatic = auto;
564 | if (!audioManagerActivated) {
565 | audioManagerActivated = true;
566 |
567 | Log.d(TAG, "start audioRouteManager");
568 | wakeLockUtils.acquirePartialWakeLock();
569 | if (mRingtone != null && mRingtone.isPlaying()) {
570 | Log.d(TAG, "stop ringtone");
571 | stopRingtone(); // --- use brandnew instance
572 | }
573 | storeOriginalAudioSetup();
574 | requestAudioFocus();
575 | startEvents();
576 | bluetoothManager.start();
577 | // TODO: even if not acquired focus, we can still play sounds. but need figure out which is better.
578 | //getCurrentActivity().setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
579 | audioManager.setMode(defaultAudioMode);
580 | setSpeakerphoneOn(defaultSpeakerOn);
581 | setMicrophoneMute(false);
582 | forceSpeakerOn = 0;
583 | hasWiredHeadset = hasWiredHeadset();
584 | defaultAudioDevice = (defaultSpeakerOn) ? AudioDevice.SPEAKER_PHONE : (hasEarpiece()) ? AudioDevice.EARPIECE : AudioDevice.SPEAKER_PHONE;
585 | userSelectedAudioDevice = AudioDevice.NONE;
586 | selectedAudioDevice = AudioDevice.NONE;
587 | audioDevices.clear();
588 | updateAudioRoute();
589 |
590 | if (!ringbackUriType.isEmpty()) {
591 | startRingback(ringbackUriType);
592 | }
593 | }
594 | }
595 |
596 | public void stop() {
597 | stop("");
598 | }
599 |
600 | @ReactMethod
601 | public void stop(final String busytoneUriType) {
602 | if (audioManagerActivated) {
603 | stopRingback();
604 | if (!busytoneUriType.isEmpty() && startBusytone(busytoneUriType)) {
605 | // play busytone first, and call this func again when finish
606 | Log.d(TAG, "play busytone before stop InCallManager");
607 | return;
608 | } else {
609 | Log.d(TAG, "stop() InCallManager");
610 | stopBusytone();
611 | stopEvents();
612 | setSpeakerphoneOn(false);
613 | setMicrophoneMute(false);
614 | forceSpeakerOn = 0;
615 | bluetoothManager.stop();
616 | restoreOriginalAudioSetup();
617 | releaseAudioFocus();
618 | audioManagerActivated = false;
619 | }
620 | wakeLockUtils.releasePartialWakeLock();
621 | }
622 | }
623 |
624 | private void startEvents() {
625 | startWiredHeadsetEvent();
626 | startNoisyAudioEvent();
627 | startMediaButtonEvent();
628 | startProximitySensor(); // --- proximity event always enable, but only turn screen off when audio is routing to earpiece.
629 | setKeepScreenOn(true);
630 | }
631 |
632 | private void stopEvents() {
633 | stopWiredHeadsetEvent();
634 | stopNoisyAudioEvent();
635 | stopMediaButtonEvent();
636 | stopProximitySensor();
637 | setKeepScreenOn(false);
638 | turnScreenOn();
639 | }
640 |
641 | private void requestAudioFocus() {
642 | if (!isAudioFocused) {
643 | int result = audioManager.requestAudioFocus(mOnFocusChangeListener, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN);
644 | if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
645 | Log.d(TAG, "AudioFocus granted");
646 | isAudioFocused = true;
647 | } else if (result == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
648 | Log.d(TAG, "AudioFocus failed");
649 | isAudioFocused = false;
650 | }
651 | }
652 | }
653 |
654 | private void releaseAudioFocus() {
655 | if (isAudioFocused) {
656 | audioManager.abandonAudioFocus(null);
657 | isAudioFocused = false;
658 | }
659 | }
660 |
661 | @ReactMethod
662 | public void pokeScreen(int timeout) {
663 | Log.d(TAG, "pokeScreen()");
664 | wakeLockUtils.acquirePokeFullWakeLockReleaseAfter(timeout); // --- default 3000 ms
665 | }
666 |
667 | private void debugScreenPowerState() {
668 | String isDeviceIdleMode = "unknow"; // --- API 23
669 | String isIgnoringBatteryOptimizations = "unknow"; // --- API 23
670 | String isPowerSaveMode = "unknow"; // --- API 21
671 | String isInteractive = "unknow"; // --- API 20 ( before since API 7 is: isScreenOn())
672 | String screenState = "unknow"; // --- API 20
673 |
674 | if (android.os.Build.VERSION.SDK_INT >= 23) {
675 | isDeviceIdleMode = String.format("%s", mPowerManager.isDeviceIdleMode());
676 | isIgnoringBatteryOptimizations = String.format("%s", mPowerManager.isIgnoringBatteryOptimizations(mPackageName));
677 | }
678 | if (android.os.Build.VERSION.SDK_INT >= 21) {
679 | isPowerSaveMode = String.format("%s", mPowerManager.isPowerSaveMode());
680 | }
681 | if (android.os.Build.VERSION.SDK_INT >= 20) {
682 | isInteractive = String.format("%s", mPowerManager.isInteractive());
683 | Display display = mWindowManager.getDefaultDisplay();
684 | switch (display.getState()) {
685 | case Display.STATE_OFF:
686 | screenState = "STATE_OFF";
687 | break;
688 | case Display.STATE_ON:
689 | screenState = "STATE_ON";
690 | break;
691 | case Display.STATE_DOZE:
692 | screenState = "STATE_DOZE";
693 | break;
694 | case Display.STATE_DOZE_SUSPEND:
695 | screenState = "STATE_DOZE_SUSPEND";
696 | break;
697 | default:
698 | break;
699 | }
700 | } else {
701 | isInteractive = String.format("%s", mPowerManager.isScreenOn());
702 | }
703 | Log.d(TAG, String.format("debugScreenPowerState(): screenState='%s', isInteractive='%s', isPowerSaveMode='%s', isDeviceIdleMode='%s', isIgnoringBatteryOptimizations='%s'", screenState, isInteractive, isPowerSaveMode, isDeviceIdleMode, isIgnoringBatteryOptimizations));
704 | }
705 |
706 | @ReactMethod
707 | public void turnScreenOn() {
708 | if (proximityManager.isProximityWakeLockSupported()) {
709 | Log.d(TAG, "turnScreenOn(): use proximity lock.");
710 | proximityManager.releaseProximityWakeLock(true);
711 | } else {
712 | Log.d(TAG, "turnScreenOn(): proximity lock is not supported. try manually.");
713 | manualTurnScreenOn();
714 | }
715 | }
716 |
717 | @ReactMethod
718 | public void turnScreenOff() {
719 | if (proximityManager.isProximityWakeLockSupported()) {
720 | Log.d(TAG, "turnScreenOff(): use proximity lock.");
721 | proximityManager.acquireProximityWakeLock();
722 | } else {
723 | Log.d(TAG, "turnScreenOff(): proximity lock is not supported. try manually.");
724 | manualTurnScreenOff();
725 | }
726 | }
727 |
728 | @ReactMethod
729 | public void setKeepScreenOn(final boolean enable) {
730 | Log.d(TAG, "setKeepScreenOn() " + enable);
731 | UiThreadUtil.runOnUiThread(new Runnable() {
732 | public void run() {
733 | Activity mCurrentActivity = getCurrentActivity();
734 | if (mCurrentActivity == null) {
735 | Log.d(TAG, "ReactContext doesn't hava any Activity attached.");
736 | return;
737 | }
738 | Window window = mCurrentActivity.getWindow();
739 | if (enable) {
740 | window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
741 | } else {
742 | window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
743 | }
744 | }
745 | });
746 | }
747 |
748 | @ReactMethod
749 | public void setSpeakerphoneOn(final boolean enable) {
750 | if (enable != audioManager.isSpeakerphoneOn()) {
751 | Log.d(TAG, "setSpeakerphoneOn(): " + enable);
752 | audioManager.setSpeakerphoneOn(enable);
753 | }
754 | }
755 |
756 | // --- TODO (zxcpoiu): These two api name is really confusing. should be changed.
757 | /**
758 | * flag: Int
759 | * 0: use default action
760 | * 1: force speaker on
761 | * -1: force speaker off
762 | */
763 | @ReactMethod
764 | public void setForceSpeakerphoneOn(final int flag) {
765 | if (flag < -1 || flag > 1) {
766 | return;
767 | }
768 | Log.d(TAG, "setForceSpeakerphoneOn() flag: " + flag);
769 | forceSpeakerOn = flag;
770 |
771 | // --- will call updateAudioDeviceState()
772 | // --- Note: in some devices, it may not contains specified route thus will not be effected.
773 | if (flag == 1) {
774 | selectAudioDevice(AudioDevice.SPEAKER_PHONE);
775 | } else if (flag == -1) {
776 | selectAudioDevice(AudioDevice.EARPIECE); // --- use the most common earpiece to force `speaker off`
777 | } else {
778 | selectAudioDevice(AudioDevice.NONE); // --- NONE will follow default route, the default route of `video` call is speaker.
779 | }
780 | }
781 |
782 | // --- TODO (zxcpoiu): Implement api to let user choose audio devices
783 |
784 | @ReactMethod
785 | public void setMicrophoneMute(final boolean enable) {
786 | if (enable != audioManager.isMicrophoneMute()) {
787 | Log.d(TAG, "setMicrophoneMute(): " + enable);
788 | audioManager.setMicrophoneMute(enable);
789 | }
790 | }
791 |
792 | /**
793 | * This is part of start() process.
794 | * ringbackUriType must not empty. empty means do not play.
795 | */
796 | @ReactMethod
797 | public void startRingback(final String ringbackUriType) {
798 | if (ringbackUriType.isEmpty()) {
799 | return;
800 | }
801 | try {
802 | Log.d(TAG, "startRingback(): UriType=" + ringbackUriType);
803 | if (mRingback != null) {
804 | if (mRingback.isPlaying()) {
805 | Log.d(TAG, "startRingback(): is already playing");
806 | return;
807 | } else {
808 | stopRingback(); // --- use brandnew instance
809 | }
810 | }
811 |
812 | Uri ringbackUri;
813 | Map data = new HashMap();
814 | data.put("name", "mRingback");
815 | if (ringbackUriType.equals("_DTMF_")) {
816 | mRingback = new myToneGenerator(myToneGenerator.RINGBACK);
817 | mRingback.startPlay(data);
818 | return;
819 | } else {
820 | ringbackUri = getRingbackUri(ringbackUriType);
821 | if (ringbackUri == null) {
822 | Log.d(TAG, "startRingback(): no available media");
823 | return;
824 | }
825 | }
826 |
827 | mRingback = new myMediaPlayer();
828 | data.put("sourceUri", ringbackUri);
829 | data.put("setLooping", true);
830 | data.put("audioStream", AudioManager.STREAM_VOICE_CALL);
831 | /*
832 | TODO: for API 21
833 | data.put("audioFlag", AudioAttributes.FLAG_AUDIBILITY_ENFORCED);
834 | data.put("audioUsage", AudioAttributes.USAGE_VOICE_COMMUNICATION); // USAGE_VOICE_COMMUNICATION_SIGNALLING ?
835 | data.put("audioContentType", AudioAttributes.CONTENT_TYPE_SPEECH); // CONTENT_TYPE_MUSIC ?
836 | */
837 | setMediaPlayerEvents((MediaPlayer)mRingback, "mRingback");
838 | mRingback.startPlay(data);
839 | } catch(Exception e) {
840 | Log.d(TAG, "startRingback() failed");
841 | }
842 | }
843 |
844 | @ReactMethod
845 | public void stopRingback() {
846 | try {
847 | if (mRingback != null) {
848 | mRingback.stopPlay();
849 | mRingback = null;
850 | }
851 | } catch(Exception e) {
852 | Log.d(TAG, "stopRingback() failed");
853 | }
854 | }
855 |
856 | /**
857 | * This is part of start() process.
858 | * busytoneUriType must not empty. empty means do not play.
859 | * return false to indicate play tone failed and should be stop() immediately
860 | * otherwise, it will stop() after a tone completed.
861 | */
862 | public boolean startBusytone(final String busytoneUriType) {
863 | if (busytoneUriType.isEmpty()) {
864 | return false;
865 | }
866 | try {
867 | Log.d(TAG, "startBusytone(): UriType=" + busytoneUriType);
868 | if (mBusytone != null) {
869 | if (mBusytone.isPlaying()) {
870 | Log.d(TAG, "startBusytone(): is already playing");
871 | return false;
872 | } else {
873 | stopBusytone(); // --- use brandnew instance
874 | }
875 | }
876 |
877 | Uri busytoneUri;
878 | Map data = new HashMap();
879 | data.put("name", "mBusytone");
880 | if (busytoneUriType.equals("_DTMF_")) {
881 | mBusytone = new myToneGenerator(myToneGenerator.BUSY);
882 | mBusytone.startPlay(data);
883 | return true;
884 | } else {
885 | busytoneUri = getBusytoneUri(busytoneUriType);
886 | if (busytoneUri == null) {
887 | Log.d(TAG, "startBusytone(): no available media");
888 | return false;
889 | }
890 | }
891 |
892 | mBusytone = new myMediaPlayer();
893 | data.put("sourceUri", busytoneUri);
894 | data.put("setLooping", false);
895 | data.put("audioStream", AudioManager.STREAM_VOICE_CALL);
896 | /*
897 | TODO: for API 21
898 | data.put("audioFlag", AudioAttributes.FLAG_AUDIBILITY_ENFORCED);
899 | data.put("audioUsage", AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING); // USAGE_VOICE_COMMUNICATION ?
900 | data.put("audioContentType", AudioAttributes.CONTENT_TYPE_SPEECH);
901 | */
902 | setMediaPlayerEvents((MediaPlayer)mBusytone, "mBusytone");
903 | mBusytone.startPlay(data);
904 | return true;
905 | } catch(Exception e) {
906 | Log.d(TAG, "startBusytone() failed");
907 | Log.d(TAG, e.getMessage());
908 | return false;
909 | }
910 | }
911 |
912 | public void stopBusytone() {
913 | try {
914 | if (mBusytone != null) {
915 | mBusytone.stopPlay();
916 | mBusytone = null;
917 | }
918 | } catch(Exception e) {
919 | Log.d(TAG, "stopBusytone() failed");
920 | }
921 | }
922 |
923 | @ReactMethod
924 | public void startRingtone(final String ringtoneUriType, final int seconds) {
925 | Thread thread = new Thread() {
926 | @Override
927 | public void run() {
928 | try {
929 | Log.d(TAG, "startRingtone(): UriType=" + ringtoneUriType);
930 | if (mRingtone != null) {
931 | if (mRingtone.isPlaying()) {
932 | Log.d(TAG, "startRingtone(): is already playing");
933 | return;
934 | } else {
935 | stopRingtone(); // --- use brandnew instance
936 | }
937 | }
938 |
939 | //if (!audioManager.isStreamMute(AudioManager.STREAM_RING)) {
940 | //if (origRingerMode == AudioManager.RINGER_MODE_NORMAL) {
941 | if (audioManager.getStreamVolume(AudioManager.STREAM_RING) == 0) {
942 | Log.d(TAG, "startRingtone(): ringer is silent. leave without play.");
943 | return;
944 | }
945 |
946 | // --- there is no _DTMF_ option in startRingtone()
947 | Uri ringtoneUri = getRingtoneUri(ringtoneUriType);
948 | if (ringtoneUri == null) {
949 | Log.d(TAG, "startRingtone(): no available media");
950 | return;
951 | }
952 |
953 | if (audioManagerActivated) {
954 | InCallManagerModule.this.stop();
955 | }
956 |
957 | wakeLockUtils.acquirePartialWakeLock();
958 |
959 | storeOriginalAudioSetup();
960 | Map data = new HashMap();
961 | mRingtone = new myMediaPlayer();
962 | data.put("name", "mRingtone");
963 | data.put("sourceUri", ringtoneUri);
964 | data.put("setLooping", true);
965 | data.put("audioStream", AudioManager.STREAM_RING);
966 | /*
967 | TODO: for API 21
968 | data.put("audioFlag", 0);
969 | data.put("audioUsage", AudioAttributes.USAGE_NOTIFICATION_RINGTONE); // USAGE_NOTIFICATION_COMMUNICATION_REQUEST ?
970 | data.put("audioContentType", AudioAttributes.CONTENT_TYPE_MUSIC);
971 | */
972 | setMediaPlayerEvents((MediaPlayer) mRingtone, "mRingtone");
973 | mRingtone.startPlay(data);
974 |
975 | if (seconds > 0) {
976 | mRingtoneCountDownHandler = new Handler();
977 | mRingtoneCountDownHandler.postDelayed(new Runnable() {
978 | public void run() {
979 | try {
980 | Log.d(TAG, String.format("mRingtoneCountDownHandler.stopRingtone() timeout after %d seconds", seconds));
981 | stopRingtone();
982 | } catch(Exception e) {
983 | Log.d(TAG, "mRingtoneCountDownHandler.stopRingtone() failed.");
984 | }
985 | }
986 | }, seconds * 1000);
987 | }
988 | } catch(Exception e) {
989 | wakeLockUtils.releasePartialWakeLock();
990 | Log.d(TAG, "startRingtone() failed");
991 | }
992 | }
993 | };
994 |
995 | thread.start();
996 | }
997 |
998 | @ReactMethod
999 | public void stopRingtone() {
1000 | Thread thread = new Thread() {
1001 | @Override
1002 | public void run() {
1003 | try {
1004 | if (mRingtone != null) {
1005 | mRingtone.stopPlay();
1006 | mRingtone = null;
1007 | restoreOriginalAudioSetup();
1008 | }
1009 | if (mRingtoneCountDownHandler != null) {
1010 | mRingtoneCountDownHandler.removeCallbacksAndMessages(null);
1011 | mRingtoneCountDownHandler = null;
1012 | }
1013 | } catch (Exception e) {
1014 | Log.d(TAG, "stopRingtone() failed");
1015 | }
1016 | wakeLockUtils.releasePartialWakeLock();
1017 | }
1018 | };
1019 |
1020 | thread.start();
1021 | }
1022 |
1023 | private void setMediaPlayerEvents(MediaPlayer mp, final String name) {
1024 |
1025 | mp.setOnErrorListener(new MediaPlayer.OnErrorListener() {
1026 | //http://developer.android.com/reference/android/media/MediaPlayer.OnErrorListener.html
1027 | @Override
1028 | public boolean onError(MediaPlayer mp, int what, int extra) {
1029 | Log.d(TAG, String.format("MediaPlayer %s onError(). what: %d, extra: %d", name, what, extra));
1030 | //return True if the method handled the error
1031 | //return False, or not having an OnErrorListener at all, will cause the OnCompletionListener to be called. Get news & tips
1032 | return true;
1033 | }
1034 | });
1035 |
1036 | mp.setOnInfoListener(new MediaPlayer.OnInfoListener() {
1037 | //http://developer.android.com/reference/android/media/MediaPlayer.OnInfoListener.html
1038 | @Override
1039 | public boolean onInfo(MediaPlayer mp, int what, int extra) {
1040 | Log.d(TAG, String.format("MediaPlayer %s onInfo(). what: %d, extra: %d", name, what, extra));
1041 | //return True if the method handled the info
1042 | //return False, or not having an OnInfoListener at all, will cause the info to be discarded.
1043 | return true;
1044 | }
1045 | });
1046 |
1047 | mp.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
1048 | @Override
1049 | public void onPrepared(MediaPlayer mp) {
1050 | Log.d(TAG, String.format("MediaPlayer %s onPrepared(), start play, isSpeakerPhoneOn %b", name, audioManager.isSpeakerphoneOn()));
1051 | if (name.equals("mBusytone")) {
1052 | audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
1053 | } else if (name.equals("mRingback")) {
1054 | audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
1055 | } else if (name.equals("mRingtone")) {
1056 | audioManager.setMode(AudioManager.MODE_RINGTONE);
1057 | }
1058 | updateAudioRoute();
1059 | mp.start();
1060 | }
1061 | });
1062 |
1063 | mp.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
1064 | @Override
1065 | public void onCompletion(MediaPlayer mp) {
1066 | Log.d(TAG, String.format("MediaPlayer %s onCompletion()", name));
1067 | if (name.equals("mBusytone")) {
1068 | Log.d(TAG, "MyMediaPlayer(): invoke stop()");
1069 | stop();
1070 | }
1071 | }
1072 | });
1073 |
1074 | }
1075 |
1076 |
1077 | // ===== File Uri Start =====
1078 | @ReactMethod
1079 | public void getAudioUriJS(String audioType, String fileType, Promise promise) {
1080 | Uri result = null;
1081 | if (audioType.equals("ringback")) {
1082 | result = getRingbackUri(fileType);
1083 | } else if (audioType.equals("busytone")) {
1084 | result = getBusytoneUri(fileType);
1085 | } else if (audioType.equals("ringtone")) {
1086 | result = getRingtoneUri(fileType);
1087 | }
1088 | try {
1089 | if (result != null) {
1090 | promise.resolve(result.toString());
1091 | } else {
1092 | promise.reject("failed");
1093 | }
1094 | } catch (Exception e) {
1095 | promise.reject("failed");
1096 | }
1097 | }
1098 |
1099 | private Uri getRingtoneUri(final String _type) {
1100 | final String fileBundle = "incallmanager_ringtone";
1101 | final String fileBundleExt = "mp3";
1102 | final String fileSysWithExt = "media_volume.ogg";
1103 | final String fileSysPath = "/system/media/audio/ui"; // --- every devices all ships with different in ringtone. maybe ui sounds are more "stock"
1104 | String type;
1105 | // --- _type MAY be empty
1106 | if (_type.equals("_DEFAULT_") || _type.isEmpty()) {
1107 | //type = fileSysWithExt;
1108 | return getDefaultUserUri("defaultRingtoneUri");
1109 | } else {
1110 | type = _type;
1111 | }
1112 | return getAudioUri(type, fileBundle, fileBundleExt, fileSysWithExt, fileSysPath, "bundleRingtoneUri", "defaultRingtoneUri");
1113 | }
1114 |
1115 | private Uri getRingbackUri(final String _type) {
1116 | final String fileBundle = "incallmanager_ringback";
1117 | final String fileBundleExt = "mp3";
1118 | final String fileSysWithExt = "media_volume.ogg";
1119 | final String fileSysPath = "/system/media/audio/ui"; // --- every devices all ships with different in ringtone. maybe ui sounds are more "stock"
1120 | String type;
1121 | // --- _type would never be empty here. just in case.
1122 | if (_type.equals("_DEFAULT_") || _type.isEmpty()) {
1123 | //type = fileSysWithExt;
1124 | return getDefaultUserUri("defaultRingbackUri");
1125 | } else {
1126 | type = _type;
1127 | }
1128 | return getAudioUri(type, fileBundle, fileBundleExt, fileSysWithExt, fileSysPath, "bundleRingbackUri", "defaultRingbackUri");
1129 | }
1130 |
1131 | private Uri getBusytoneUri(final String _type) {
1132 | final String fileBundle = "incallmanager_busytone";
1133 | final String fileBundleExt = "mp3";
1134 | final String fileSysWithExt = "LowBattery.ogg";
1135 | final String fileSysPath = "/system/media/audio/ui"; // --- every devices all ships with different in ringtone. maybe ui sounds are more "stock"
1136 | String type;
1137 | // --- _type would never be empty here. just in case.
1138 | if (_type.equals("_DEFAULT_") || _type.isEmpty()) {
1139 | //type = fileSysWithExt; // ---
1140 | return getDefaultUserUri("defaultBusytoneUri");
1141 | } else {
1142 | type = _type;
1143 | }
1144 | return getAudioUri(type, fileBundle, fileBundleExt, fileSysWithExt, fileSysPath, "bundleBusytoneUri", "defaultBusytoneUri");
1145 | }
1146 |
1147 | private Uri getAudioUri(final String _type, final String fileBundle, final String fileBundleExt, final String fileSysWithExt, final String fileSysPath, final String uriBundle, final String uriDefault) {
1148 | String type = _type;
1149 | if (type.equals("_BUNDLE_")) {
1150 | if (audioUriMap.get(uriBundle) == null) {
1151 | int res = 0;
1152 | ReactContext reactContext = getReactApplicationContext();
1153 | if (reactContext != null) {
1154 | res = reactContext.getResources().getIdentifier(fileBundle, "raw", mPackageName);
1155 | } else {
1156 | Log.d(TAG, "getAudioUri() reactContext is null");
1157 | }
1158 | if (res <= 0) {
1159 | Log.d(TAG, String.format("getAudioUri() %s.%s not found in bundle.", fileBundle, fileBundleExt));
1160 | audioUriMap.put(uriBundle, null);
1161 | //type = fileSysWithExt;
1162 | return getDefaultUserUri(uriDefault); // --- if specified bundle but not found, use default directlly
1163 | } else {
1164 | audioUriMap.put(uriBundle, Uri.parse("android.resource://" + mPackageName + "/" + Integer.toString(res)));
1165 | //bundleRingtoneUri = Uri.parse("android.resource://" + reactContext.getPackageName() + "/" + R.raw.incallmanager_ringtone);
1166 | //bundleRingtoneUri = Uri.parse("android.resource://" + reactContext.getPackageName() + "/raw/incallmanager_ringtone");
1167 | Log.d(TAG, "getAudioUri() using: " + type);
1168 | return audioUriMap.get(uriBundle);
1169 | }
1170 | } else {
1171 | Log.d(TAG, "getAudioUri() using: " + type);
1172 | return audioUriMap.get(uriBundle);
1173 | }
1174 | }
1175 |
1176 | // --- Check file every time in case user deleted.
1177 | final String target = fileSysPath + "/" + type;
1178 | Uri _uri = getSysFileUri(target);
1179 | if (_uri == null) {
1180 | Log.d(TAG, "getAudioUri() using user default");
1181 | return getDefaultUserUri(uriDefault);
1182 | } else {
1183 | Log.d(TAG, "getAudioUri() using internal: " + target);
1184 | audioUriMap.put(uriDefault, _uri);
1185 | return _uri;
1186 | }
1187 | }
1188 |
1189 | private Uri getSysFileUri(final String target) {
1190 | File file = new File(target);
1191 | if (file.isFile()) {
1192 | return Uri.fromFile(file);
1193 | }
1194 | return null;
1195 | }
1196 |
1197 | private Uri getDefaultUserUri(final String type) {
1198 | // except ringtone, it doesn't suppose to be go here. and every android has different files unlike apple;
1199 | if (type.equals("defaultRingtoneUri")) {
1200 | return Settings.System.DEFAULT_RINGTONE_URI;
1201 | } else if (type.equals("defaultRingbackUri")) {
1202 | return Settings.System.DEFAULT_RINGTONE_URI;
1203 | } else if (type.equals("defaultBusytoneUri")) {
1204 | return Settings.System.DEFAULT_NOTIFICATION_URI; // --- DEFAULT_ALARM_ALERT_URI
1205 | } else {
1206 | return Settings.System.DEFAULT_NOTIFICATION_URI;
1207 | }
1208 | }
1209 | // ===== File Uri End =====
1210 |
1211 |
1212 | // ===== Internal Classes Start =====
1213 | private class myToneGenerator extends Thread implements MyPlayerInterface {
1214 | private int toneType;
1215 | private int toneCategory;
1216 | private boolean playing = false;
1217 | private static final int maxWaitTimeMs = 3600000; // 1 hour fairly enough
1218 | private static final int loadBufferWaitTimeMs = 20;
1219 | private static final int toneVolume = 100; // The volume of the tone, given in percentage of maximum volume (from 0-100).
1220 | // --- constant in ToneGenerator all below 100
1221 | public static final int BEEP = 101;
1222 | public static final int BUSY = 102;
1223 | public static final int CALLEND = 103;
1224 | public static final int CALLWAITING = 104;
1225 | public static final int RINGBACK = 105;
1226 | public static final int SILENT = 106;
1227 | public int customWaitTimeMs = maxWaitTimeMs;
1228 | public String caller;
1229 |
1230 | myToneGenerator(final int t) {
1231 | super();
1232 | toneCategory = t;
1233 | }
1234 |
1235 | public void setCustomWaitTime(final int ms) {
1236 | customWaitTimeMs = ms;
1237 | }
1238 |
1239 | @Override
1240 | public void startPlay(final Map data) {
1241 | String name = (String) data.get("name");
1242 | caller = name;
1243 | start();
1244 | }
1245 |
1246 | @Override
1247 | public boolean isPlaying() {
1248 | return playing;
1249 | }
1250 |
1251 | @Override
1252 | public void stopPlay() {
1253 | synchronized (this) {
1254 | if (playing) {
1255 | notify();
1256 | }
1257 | playing = false;
1258 | }
1259 | }
1260 |
1261 | @Override
1262 | public void run() {
1263 | int toneWaitTimeMs;
1264 | switch (toneCategory) {
1265 | case SILENT:
1266 | //toneType = ToneGenerator.TONE_CDMA_SIGNAL_OFF;
1267 | toneType = ToneGenerator.TONE_CDMA_ANSWER;
1268 | toneWaitTimeMs = 1000;
1269 | break;
1270 | case BUSY:
1271 | //toneType = ToneGenerator.TONE_SUP_BUSY;
1272 | //toneType = ToneGenerator.TONE_SUP_CONGESTION;
1273 | //toneType = ToneGenerator.TONE_SUP_CONGESTION_ABBREV;
1274 | //toneType = ToneGenerator.TONE_CDMA_NETWORK_BUSY;
1275 | //toneType = ToneGenerator.TONE_CDMA_NETWORK_BUSY_ONE_SHOT;
1276 | toneType = ToneGenerator.TONE_SUP_RADIO_NOTAVAIL;
1277 | toneWaitTimeMs = 4000;
1278 | break;
1279 | case RINGBACK:
1280 | //toneType = ToneGenerator.TONE_SUP_RINGTONE;
1281 | toneType = ToneGenerator.TONE_CDMA_NETWORK_USA_RINGBACK;
1282 | toneWaitTimeMs = maxWaitTimeMs; // [STOP MANUALLY]
1283 | break;
1284 | case CALLEND:
1285 | toneType = ToneGenerator.TONE_PROP_PROMPT;
1286 | toneWaitTimeMs = 200; // plays when call ended
1287 | break;
1288 | case CALLWAITING:
1289 | //toneType = ToneGenerator.TONE_CDMA_NETWORK_CALLWAITING;
1290 | toneType = ToneGenerator.TONE_SUP_CALL_WAITING;
1291 | toneWaitTimeMs = maxWaitTimeMs; // [STOP MANUALLY]
1292 | break;
1293 | case BEEP:
1294 | //toneType = ToneGenerator.TONE_SUP_PIP;
1295 | //toneType = ToneGenerator.TONE_CDMA_PIP;
1296 | //toneType = ToneGenerator.TONE_SUP_RADIO_ACK;
1297 | //toneType = ToneGenerator.TONE_PROP_BEEP;
1298 | toneType = ToneGenerator.TONE_PROP_BEEP2;
1299 | toneWaitTimeMs = 1000; // plays when call ended
1300 | break;
1301 | default:
1302 | // --- use ToneGenerator internal type.
1303 | Log.d(TAG, "myToneGenerator: use internal tone type: " + toneCategory);
1304 | toneType = toneCategory;
1305 | toneWaitTimeMs = customWaitTimeMs;
1306 | }
1307 | Log.d(TAG, String.format("myToneGenerator: toneCategory: %d ,toneType: %d, toneWaitTimeMs: %d", toneCategory, toneType, toneWaitTimeMs));
1308 |
1309 | ToneGenerator tg;
1310 | try {
1311 | tg = new ToneGenerator(AudioManager.STREAM_VOICE_CALL, toneVolume);
1312 | } catch (RuntimeException e) {
1313 | Log.d(TAG, "myToneGenerator: Exception caught while creating ToneGenerator: " + e);
1314 | tg = null;
1315 | }
1316 |
1317 | if (tg != null) {
1318 | synchronized (this) {
1319 | if (!playing) {
1320 | playing = true;
1321 |
1322 | // --- make sure audio routing, or it will be wired when switch suddenly
1323 | if (caller.equals("mBusytone")) {
1324 | audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
1325 | } else if (caller.equals("mRingback")) {
1326 | audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
1327 | } else if (caller.equals("mRingtone")) {
1328 | audioManager.setMode(AudioManager.MODE_RINGTONE);
1329 | }
1330 | InCallManagerModule.this.updateAudioRoute();
1331 |
1332 | tg.startTone(toneType);
1333 | try {
1334 | wait(toneWaitTimeMs + loadBufferWaitTimeMs);
1335 | } catch (InterruptedException e) {
1336 | Log.d(TAG, "myToneGenerator stopped. toneType: " + toneType);
1337 | }
1338 | tg.stopTone();
1339 | }
1340 | playing = false;
1341 | tg.release();
1342 | }
1343 | }
1344 | Log.d(TAG, "MyToneGenerator(): play finished. caller=" + caller);
1345 | if (caller.equals("mBusytone")) {
1346 | Log.d(TAG, "MyToneGenerator(): invoke stop()");
1347 | InCallManagerModule.this.stop();
1348 | }
1349 | }
1350 | }
1351 |
1352 | private class myMediaPlayer extends MediaPlayer implements MyPlayerInterface {
1353 |
1354 | //myMediaPlayer() {
1355 | // super();
1356 | //}
1357 |
1358 | @Override
1359 | public void stopPlay() {
1360 | stop();
1361 | reset();
1362 | release();
1363 | }
1364 |
1365 | @Override
1366 | public void startPlay(final Map data) {
1367 | try {
1368 | Uri sourceUri = (Uri) data.get("sourceUri");
1369 | boolean setLooping = (Boolean) data.get("setLooping");
1370 | int stream = (Integer) data.get("audioStream");
1371 | String name = (String) data.get("name");
1372 |
1373 | ReactContext reactContext = getReactApplicationContext();
1374 | setDataSource(reactContext, sourceUri);
1375 | setLooping(setLooping);
1376 | setAudioStreamType(stream); // is better using STREAM_DTMF for ToneGenerator?
1377 |
1378 | /*
1379 | // TODO: use modern and more explicit audio stream api
1380 | if (android.os.Build.VERSION.SDK_INT >= 21) {
1381 | int audioFlag = (Integer) data.get("audioFlag");
1382 | int audioUsage = (Integer) data.get("audioUsage");
1383 | int audioContentType = (Integer) data.get("audioContentType");
1384 |
1385 | setAudioAttributes(
1386 | new AudioAttributes.Builder()
1387 | .setFlags(audioFlag)
1388 | .setLegacyStreamType(stream)
1389 | .setUsage(audioUsage)
1390 | .setContentType(audioContentType)
1391 | .build()
1392 | );
1393 | }
1394 | */
1395 |
1396 | // -- will start at onPrepared() event
1397 | prepareAsync();
1398 | } catch (Exception e) {
1399 | Log.d(TAG, "startPlay() failed");
1400 | }
1401 | }
1402 |
1403 | @Override
1404 | public boolean isPlaying() {
1405 | return super.isPlaying();
1406 | }
1407 | }
1408 | // ===== Internal Classes End =====
1409 |
1410 | // ===== Permission Start =====
1411 | @ReactMethod
1412 | public void checkRecordPermission(Promise promise) {
1413 | Log.d(TAG, "RNInCallManager.checkRecordPermission(): enter");
1414 | _checkRecordPermission();
1415 | if (recordPermission.equals("unknow")) {
1416 | Log.d(TAG, "RNInCallManager.checkRecordPermission(): failed");
1417 | promise.reject(new Exception("checkRecordPermission failed"));
1418 | } else {
1419 | promise.resolve(recordPermission);
1420 | }
1421 | }
1422 |
1423 | @ReactMethod
1424 | public void checkCameraPermission(Promise promise) {
1425 | Log.d(TAG, "RNInCallManager.checkCameraPermission(): enter");
1426 | _checkCameraPermission();
1427 | if (cameraPermission.equals("unknow")) {
1428 | Log.d(TAG, "RNInCallManager.checkCameraPermission(): failed");
1429 | promise.reject(new Exception("checkCameraPermission failed"));
1430 | } else {
1431 | promise.resolve(cameraPermission);
1432 | }
1433 | }
1434 |
1435 | private void _checkRecordPermission() {
1436 | recordPermission = _checkPermission(permission.RECORD_AUDIO);
1437 | Log.d(TAG, String.format("RNInCallManager.checkRecordPermission(): recordPermission=%s", recordPermission));
1438 | }
1439 |
1440 | private void _checkCameraPermission() {
1441 | cameraPermission = _checkPermission(permission.CAMERA);
1442 | Log.d(TAG, String.format("RNInCallManager.checkCameraPermission(): cameraPermission=%s", cameraPermission));
1443 | }
1444 |
1445 | private String _checkPermission(String targetPermission) {
1446 | try {
1447 | ReactContext reactContext = getReactApplicationContext();
1448 | if (ContextCompat.checkSelfPermission(reactContext, targetPermission) == PackageManager.PERMISSION_GRANTED) {
1449 | return "granted";
1450 | } else {
1451 | return "denied";
1452 | }
1453 | } catch (Exception e) {
1454 | Log.d(TAG, "_checkPermission() catch");
1455 | return "denied";
1456 | }
1457 | }
1458 |
1459 | @ReactMethod
1460 | public void requestRecordPermission(Promise promise) {
1461 | Log.d(TAG, "RNInCallManager.requestRecordPermission(): enter");
1462 | _checkRecordPermission();
1463 | if (!recordPermission.equals("granted")) {
1464 | _requestPermission(permission.RECORD_AUDIO, promise);
1465 | } else {
1466 | // --- already granted
1467 | promise.resolve(recordPermission);
1468 | }
1469 | }
1470 |
1471 | @ReactMethod
1472 | public void requestCameraPermission(Promise promise) {
1473 | Log.d(TAG, "RNInCallManager.requestCameraPermission(): enter");
1474 | _checkCameraPermission();
1475 | if (!cameraPermission.equals("granted")) {
1476 | _requestPermission(permission.CAMERA, promise);
1477 | } else {
1478 | // --- already granted
1479 | promise.resolve(cameraPermission);
1480 | }
1481 | }
1482 |
1483 | @ReactMethod
1484 | public void chooseAudioRoute(String audioRoute, Promise promise) {
1485 | Log.d(TAG, "RNInCallManager.chooseAudioRoute(): user choose audioDevice = " + audioRoute);
1486 |
1487 | if (audioRoute.equals(AudioDevice.EARPIECE.name())) {
1488 | selectAudioDevice(AudioDevice.EARPIECE);
1489 | } else if (audioRoute.equals(AudioDevice.SPEAKER_PHONE.name())) {
1490 | selectAudioDevice(AudioDevice.SPEAKER_PHONE);
1491 | } else if (audioRoute.equals(AudioDevice.WIRED_HEADSET.name())) {
1492 | selectAudioDevice(AudioDevice.WIRED_HEADSET);
1493 | } else if (audioRoute.equals(AudioDevice.BLUETOOTH.name())) {
1494 | selectAudioDevice(AudioDevice.BLUETOOTH);
1495 | }
1496 | promise.resolve(getAudioDeviceStatusMap());
1497 | }
1498 |
1499 | private void _requestPermission(String targetPermission, Promise promise) {
1500 | Activity currentActivity = getCurrentActivity();
1501 | if (currentActivity == null) {
1502 | Log.d(TAG, String.format("RNInCallManager._requestPermission(): ReactContext doesn't hava any Activity attached when requesting %s", targetPermission));
1503 | promise.reject(new Exception("_requestPermission(): currentActivity is not attached"));
1504 | return;
1505 | }
1506 | int requestPermissionCode = getRandomInteger(1, 65535);
1507 | while (mRequestPermissionCodePromises.get(requestPermissionCode, null) != null) {
1508 | requestPermissionCode = getRandomInteger(1, 65535);
1509 | }
1510 | mRequestPermissionCodePromises.put(requestPermissionCode, promise);
1511 | mRequestPermissionCodeTargetPermission.put(requestPermissionCode, targetPermission);
1512 | /*
1513 | if (ActivityCompat.shouldShowRequestPermissionRationale(currentActivity, permission.RECORD_AUDIO)) {
1514 | showMessageOKCancel("You need to allow access to microphone for making call", new DialogInterface.OnClickListener() {
1515 | @Override
1516 | public void onClick(DialogInterface dialog, int which) {
1517 | ActivityCompat.requestPermissions(currentActivity, new String[] {permission.RECORD_AUDIO}, requestPermissionCode);
1518 | }
1519 | });
1520 | return;
1521 | }
1522 | */
1523 | ActivityCompat.requestPermissions(currentActivity, new String[] {targetPermission}, requestPermissionCode);
1524 | }
1525 |
1526 | private static int getRandomInteger(int min, int max) {
1527 | if (min >= max) {
1528 | throw new IllegalArgumentException("max must be greater than min");
1529 | }
1530 | Random random = new Random();
1531 | return random.nextInt((max - min) + 1) + min;
1532 | }
1533 |
1534 | protected static void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
1535 | Log.d(TAG, "RNInCallManager.onRequestPermissionsResult(): enter");
1536 | Promise promise = mRequestPermissionCodePromises.get(requestCode, null);
1537 | String targetPermission = mRequestPermissionCodeTargetPermission.get(requestCode, null);
1538 | mRequestPermissionCodePromises.delete(requestCode);
1539 | mRequestPermissionCodeTargetPermission.delete(requestCode);
1540 | if (promise != null && targetPermission != null) {
1541 |
1542 | Map permissionResultMap = new HashMap();
1543 |
1544 | for (int i = 0; i < permissions.length; i++) {
1545 | permissionResultMap.put(permissions[i], grantResults[i]);
1546 | }
1547 |
1548 | if (!permissionResultMap.containsKey(targetPermission)) {
1549 | Log.wtf(TAG, String.format("RNInCallManager.onRequestPermissionsResult(): requested permission %s but did not appear", targetPermission));
1550 | promise.reject(String.format("%s_PERMISSION_NOT_FOUND", targetPermission), String.format("requested permission %s but did not appear", targetPermission));
1551 | return;
1552 | }
1553 |
1554 | String _requestPermissionResult = "unknow";
1555 | if (permissionResultMap.get(targetPermission) == PackageManager.PERMISSION_GRANTED) {
1556 | _requestPermissionResult = "granted";
1557 | } else {
1558 | _requestPermissionResult = "denied";
1559 | }
1560 |
1561 | if (targetPermission.equals(permission.RECORD_AUDIO)) {
1562 | recordPermission = _requestPermissionResult;
1563 | } else if (targetPermission.equals(permission.CAMERA)) {
1564 | cameraPermission = _requestPermissionResult;
1565 | }
1566 | promise.resolve(_requestPermissionResult);
1567 | } else {
1568 | //super.onRequestPermissionsResult(requestCode, permissions, grantResults);
1569 | Log.wtf(TAG, "RNInCallManager.onRequestPermissionsResult(): request code not found");
1570 | promise.reject("PERMISSION_REQUEST_CODE_NOT_FOUND", "request code not found");
1571 | }
1572 | }
1573 | // ===== Permission End =====
1574 |
1575 | private void pause() {
1576 | if (audioManagerActivated) {
1577 | Log.d(TAG, "pause audioRouteManager");
1578 | stopEvents();
1579 | }
1580 | }
1581 |
1582 | private void resume() {
1583 | if (audioManagerActivated) {
1584 | Log.d(TAG, "resume audioRouteManager");
1585 | startEvents();
1586 | }
1587 | }
1588 |
1589 | @Override
1590 | public void onHostResume() {
1591 | Log.d(TAG, "onResume()");
1592 | //resume();
1593 | }
1594 |
1595 | @Override
1596 | public void onHostPause() {
1597 | Log.d(TAG, "onPause()");
1598 | //pause();
1599 | }
1600 |
1601 | @Override
1602 | public void onHostDestroy() {
1603 | Log.d(TAG, "onDestroy()");
1604 | stopRingtone();
1605 | stopRingback();
1606 | stopBusytone();
1607 | stop();
1608 | }
1609 |
1610 | private void updateAudioRoute() {
1611 | if (!automatic) {
1612 | return;
1613 | }
1614 | updateAudioDeviceState();
1615 | }
1616 |
1617 | // ===== NOTE: below functions is based on appRTC DEMO M64 ===== //
1618 | /** Changes selection of the currently active audio device. */
1619 | private void setAudioDeviceInternal(AudioDevice device) {
1620 | Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")");
1621 | if (!audioDevices.contains(device)) {
1622 | Log.e(TAG, "specified audio device does not exist");
1623 | return;
1624 | }
1625 |
1626 | switch (device) {
1627 | case SPEAKER_PHONE:
1628 | setSpeakerphoneOn(true);
1629 | break;
1630 | case EARPIECE:
1631 | setSpeakerphoneOn(false);
1632 | break;
1633 | case WIRED_HEADSET:
1634 | setSpeakerphoneOn(false);
1635 | break;
1636 | case BLUETOOTH:
1637 | setSpeakerphoneOn(false);
1638 | break;
1639 | default:
1640 | Log.e(TAG, "Invalid audio device selection");
1641 | break;
1642 | }
1643 | selectedAudioDevice = device;
1644 | }
1645 |
1646 |
1647 |
1648 | /**
1649 | * Changes default audio device.
1650 | * TODO(henrika): add usage of this method in the AppRTCMobile client.
1651 | */
1652 | public void setDefaultAudioDevice(AudioDevice defaultDevice) {
1653 | switch (defaultDevice) {
1654 | case SPEAKER_PHONE:
1655 | defaultAudioDevice = defaultDevice;
1656 | break;
1657 | case EARPIECE:
1658 | if (hasEarpiece()) {
1659 | defaultAudioDevice = defaultDevice;
1660 | } else {
1661 | defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
1662 | }
1663 | break;
1664 | default:
1665 | Log.e(TAG, "Invalid default audio device selection");
1666 | break;
1667 | }
1668 | Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")");
1669 | updateAudioDeviceState();
1670 | }
1671 |
1672 | /** Changes selection of the currently active audio device. */
1673 | public void selectAudioDevice(AudioDevice device) {
1674 | if (device != AudioDevice.NONE && !audioDevices.contains(device)) {
1675 | Log.e(TAG, "selectAudioDevice() Can not select " + device + " from available " + audioDevices);
1676 | return;
1677 | }
1678 | userSelectedAudioDevice = device;
1679 | updateAudioDeviceState();
1680 | }
1681 |
1682 | /** Returns current set of available/selectable audio devices. */
1683 | public Set getAudioDevices() {
1684 | return Collections.unmodifiableSet(new HashSet<>(audioDevices));
1685 | }
1686 |
1687 | /** Returns the currently selected audio device. */
1688 | public AudioDevice getSelectedAudioDevice() {
1689 | return selectedAudioDevice;
1690 | }
1691 |
1692 | /** Helper method for receiver registration. */
1693 | private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
1694 | getReactApplicationContext().registerReceiver(receiver, filter);
1695 | }
1696 |
1697 | /** Helper method for unregistration of an existing receiver. */
1698 | private void unregisterReceiver(final BroadcastReceiver receiver) {
1699 | final ReactContext reactContext = this.getReactApplicationContext();
1700 | if (reactContext != null) {
1701 | try {
1702 | reactContext.unregisterReceiver(receiver);
1703 | } catch (final Exception e) {
1704 | Log.d(TAG, "unregisterReceiver() failed");
1705 | }
1706 | } else {
1707 | Log.d(TAG, "unregisterReceiver() reactContext is null");
1708 | }
1709 | }
1710 |
1711 | /** Sets the speaker phone mode. */
1712 | /*
1713 | private void setSpeakerphoneOn(boolean on) {
1714 | boolean wasOn = audioManager.isSpeakerphoneOn();
1715 | if (wasOn == on) {
1716 | return;
1717 | }
1718 | audioManager.setSpeakerphoneOn(on);
1719 | }
1720 | */
1721 |
1722 | /** Sets the microphone mute state. */
1723 | /*
1724 | private void setMicrophoneMute(boolean on) {
1725 | boolean wasMuted = audioManager.isMicrophoneMute();
1726 | if (wasMuted == on) {
1727 | return;
1728 | }
1729 | audioManager.setMicrophoneMute(on);
1730 | }
1731 | */
1732 |
1733 | /** Gets the current earpiece state. */
1734 | private boolean hasEarpiece() {
1735 | return getReactApplicationContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
1736 | }
1737 |
1738 | /**
1739 | * Checks whether a wired headset is connected or not.
1740 | * This is not a valid indication that audio playback is actually over
1741 | * the wired headset as audio routing depends on other conditions. We
1742 | * only use it as an early indicator (during initialization) of an attached
1743 | * wired headset.
1744 | */
1745 | @Deprecated
1746 | private boolean hasWiredHeadset() {
1747 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
1748 | return audioManager.isWiredHeadsetOn();
1749 | } else {
1750 | final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
1751 | for (AudioDeviceInfo device : devices) {
1752 | final int type = device.getType();
1753 | if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
1754 | Log.d(TAG, "hasWiredHeadset: found wired headset");
1755 | return true;
1756 | } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
1757 | Log.d(TAG, "hasWiredHeadset: found USB audio device");
1758 | return true;
1759 | }
1760 | }
1761 | return false;
1762 | }
1763 | }
1764 |
1765 |
1766 | /**
1767 | * Updates list of possible audio devices and make new device selection.
1768 | */
1769 | public void updateAudioDeviceState() {
1770 | Log.d(TAG, "--- updateAudioDeviceState: "
1771 | + "wired headset=" + hasWiredHeadset + ", "
1772 | + "BT state=" + bluetoothManager.getState());
1773 | Log.d(TAG, "Device status: "
1774 | + "available=" + audioDevices + ", "
1775 | + "selected=" + selectedAudioDevice + ", "
1776 | + "user selected=" + userSelectedAudioDevice);
1777 |
1778 | // Check if any Bluetooth headset is connected. The internal BT state will
1779 | // change accordingly.
1780 | // TODO(henrika): perhaps wrap required state into BT manager.
1781 | if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
1782 | || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
1783 | || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_DISCONNECTING) {
1784 | bluetoothManager.updateDevice();
1785 | }
1786 |
1787 | // Update the set of available audio devices.
1788 | Set newAudioDevices = new HashSet<>();
1789 |
1790 | // always assume device has speaker phone
1791 | newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
1792 |
1793 | if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
1794 | || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
1795 | || bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
1796 | newAudioDevices.add(AudioDevice.BLUETOOTH);
1797 | }
1798 |
1799 | if (hasWiredHeadset) {
1800 | newAudioDevices.add(AudioDevice.WIRED_HEADSET);
1801 | }
1802 |
1803 | if (hasEarpiece()) {
1804 | newAudioDevices.add(AudioDevice.EARPIECE);
1805 | }
1806 |
1807 | // --- check whether user selected audio device is available
1808 | if (userSelectedAudioDevice != null
1809 | && userSelectedAudioDevice != AudioDevice.NONE
1810 | && !newAudioDevices.contains(userSelectedAudioDevice)) {
1811 | userSelectedAudioDevice = AudioDevice.NONE;
1812 | }
1813 |
1814 | // Store state which is set to true if the device list has changed.
1815 | boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
1816 | // Update the existing audio device set.
1817 | audioDevices = newAudioDevices;
1818 |
1819 | AudioDevice newAudioDevice = getPreferredAudioDevice();
1820 |
1821 | // --- stop bluetooth if needed
1822 | if (selectedAudioDevice == AudioDevice.BLUETOOTH
1823 | && newAudioDevice != AudioDevice.BLUETOOTH
1824 | && (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
1825 | || bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING)
1826 | ) {
1827 | bluetoothManager.stopScoAudio();
1828 | bluetoothManager.updateDevice();
1829 | }
1830 |
1831 | // --- start bluetooth if needed
1832 | if (selectedAudioDevice != AudioDevice.BLUETOOTH
1833 | && newAudioDevice == AudioDevice.BLUETOOTH
1834 | && bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
1835 | // Attempt to start Bluetooth SCO audio (takes a few second to start).
1836 | if (!bluetoothManager.startScoAudio()) {
1837 | // Remove BLUETOOTH from list of available devices since SCO failed.
1838 | audioDevices.remove(AudioDevice.BLUETOOTH);
1839 | audioDeviceSetUpdated = true;
1840 | if (userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
1841 | userSelectedAudioDevice = AudioDevice.NONE;
1842 | }
1843 | newAudioDevice = getPreferredAudioDevice();
1844 | }
1845 | }
1846 |
1847 | if (newAudioDevice == AudioDevice.BLUETOOTH
1848 | && bluetoothManager.getState() != AppRTCBluetoothManager.State.SCO_CONNECTED) {
1849 | newAudioDevice = getPreferredAudioDevice(true); // --- skip bluetooth
1850 | }
1851 |
1852 | // Switch to new device but only if there has been any changes.
1853 | if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
1854 |
1855 | // Do the required device switch.
1856 | setAudioDeviceInternal(newAudioDevice);
1857 | Log.d(TAG, "New device status: "
1858 | + "available=" + audioDevices + ", "
1859 | + "selected=" + newAudioDevice);
1860 | /*
1861 | if (audioManagerEvents != null) {
1862 | // Notify a listening client that audio device has been changed.
1863 | audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
1864 | }
1865 | */
1866 | sendEvent("onAudioDeviceChanged", getAudioDeviceStatusMap());
1867 | }
1868 | Log.d(TAG, "--- updateAudioDeviceState done");
1869 | }
1870 |
1871 | private WritableMap getAudioDeviceStatusMap() {
1872 | WritableMap data = Arguments.createMap();
1873 | String audioDevicesJson = "[";
1874 | for (AudioDevice s: audioDevices) {
1875 | audioDevicesJson += "\"" + s.name() + "\",";
1876 | }
1877 |
1878 | // --- strip the last `,`
1879 | if (audioDevicesJson.length() > 1) {
1880 | audioDevicesJson = audioDevicesJson.substring(0, audioDevicesJson.length() - 1);
1881 | }
1882 | audioDevicesJson += "]";
1883 |
1884 | data.putString("availableAudioDeviceList", audioDevicesJson);
1885 | data.putString("selectedAudioDevice", (selectedAudioDevice == null) ? "" : selectedAudioDevice.name());
1886 |
1887 | return data;
1888 | }
1889 |
1890 | private AudioDevice getPreferredAudioDevice() {
1891 | return getPreferredAudioDevice(false);
1892 | }
1893 |
1894 | private AudioDevice getPreferredAudioDevice(boolean skipBluetooth) {
1895 | final AudioDevice newAudioDevice;
1896 |
1897 | if (userSelectedAudioDevice != null && userSelectedAudioDevice != AudioDevice.NONE) {
1898 | newAudioDevice = userSelectedAudioDevice;
1899 | } else if (!skipBluetooth && audioDevices.contains(AudioDevice.BLUETOOTH)) {
1900 | // If a Bluetooth is connected, then it should be used as output audio
1901 | // device. Note that it is not sufficient that a headset is available;
1902 | // an active SCO channel must also be up and running.
1903 | newAudioDevice = AudioDevice.BLUETOOTH;
1904 | } else if (audioDevices.contains(AudioDevice.WIRED_HEADSET)) {
1905 | // If a wired headset is connected, but Bluetooth is not, then wired headset is used as
1906 | // audio device.
1907 | newAudioDevice = AudioDevice.WIRED_HEADSET;
1908 | } else if (audioDevices.contains(defaultAudioDevice)) {
1909 | newAudioDevice = defaultAudioDevice;
1910 | } else {
1911 | newAudioDevice = AudioDevice.SPEAKER_PHONE;
1912 | }
1913 |
1914 | return newAudioDevice;
1915 | }
1916 | }
1917 |
1918 |
--------------------------------------------------------------------------------