├── .gitignore
├── AUTHORS
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── libs
│ └── autobanh.jar
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ ├── com
│ │ └── example
│ │ │ └── androidthings
│ │ │ └── videortc
│ │ │ ├── CallActivity.java
│ │ │ └── MainActivity.java
│ └── org
│ │ └── appspot
│ │ └── apprtc
│ │ ├── AppRTCClient.java
│ │ ├── PeerConnectionClient.java
│ │ ├── RoomParametersFetcher.java
│ │ ├── WebSocketChannelClient.java
│ │ ├── WebSocketRTCClient.java
│ │ └── util
│ │ ├── AppRTCUtils.java
│ │ └── AsyncHttpURLConnection.java
│ └── res
│ ├── drawable-hdpi
│ ├── disconnect.png
│ ├── ic_action_full_screen.png
│ └── ic_action_return_from_full_screen.png
│ ├── drawable-ldpi
│ ├── disconnect.png
│ ├── ic_action_full_screen.png
│ └── ic_action_return_from_full_screen.png
│ ├── drawable-mdpi
│ ├── disconnect.png
│ ├── ic_action_full_screen.png
│ └── ic_action_return_from_full_screen.png
│ ├── drawable-xhdpi
│ ├── disconnect.png
│ ├── ic_action_full_screen.png
│ └── ic_action_return_from_full_screen.png
│ ├── layout
│ ├── activity_call.xml
│ └── main_activity.xml
│ └── values
│ ├── arrays.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | local.properties
4 | .idea
5 | .DS_Store
6 | build
7 | *.json
8 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | Google Inc.
2 | The WebRTC Project Authors
3 | The Chromium Authors
4 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to become a contributor and submit your own code
2 |
3 | ## Contributor License Agreements
4 |
5 | We'd love to accept your sample apps and patches! Before we can take them, we
6 | have to jump a couple of legal hurdles.
7 |
8 | Please fill out either the individual or corporate Contributor License Agreement (CLA).
9 |
10 | * If you are an individual writing original source code and you're sure you
11 | own the intellectual property, then you'll need to sign an [individual CLA]
12 | (https://developers.google.com/open-source/cla/individual).
13 | * If you work for a company that wants to allow you to contribute your work,
14 | then you'll need to sign a [corporate CLA]
15 | (https://developers.google.com/open-source/cla/corporate).
16 |
17 | Follow either of the two links above to access the appropriate CLA and
18 | instructions for how to sign and return it. Once we receive it, we'll be able to
19 | accept your pull requests.
20 |
21 | ## Contributing A Patch
22 |
23 | 1. Submit an issue describing your proposed change to the repo in question.
24 | 1. The repo owner will respond to your issue promptly.
25 | 1. If your proposed change is accepted, and you haven't already done so, sign a
26 | Contributor License Agreement (see details above).
27 | 1. Fork the desired repo, develop and test your code changes.
28 | 1. Ensure that your code adheres to the existing style in the sample to which
29 | you are contributing. Refer to the
30 | [Android Code Style Guide]
31 | (https://source.android.com/source/code-style.html) for the
32 | recommended coding standards for this organization.
33 | 1. Ensure that your code has an appropriate set of unit tests which all pass.
34 | 1. Submit a pull request.
35 |
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2014 The Android Open Source Project
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Android Things video call sample app using WebRTC
2 |
3 | This Android Things sample app demonstrates how to establish WebRTC video call. It's based on the
4 | original [WebRTC Android sample](https://webrtc.org/native-code/android/), slightly modified
5 | to easily run on Android Things devices. For simplicity it creates a WebRTC room with a random ID,
6 | which is shown on display and in logcat, and then auto-joins the room on boot. Now this room
7 | can be joined to establish a video call connection from either a web client at https://appr.tc or
8 | an Android client running the original WebRTC sample by entering the same room ID.
9 |
10 | Two WebRTC clients need to exchange session description when establishing video call connection
11 | via a signal server. This sample uses a hosted version of the [WebRTC signaling server](https://appr.tc).
12 | Please refer to https://www.html5rocks.com/en/tutorials/webrtc/infrastructure/ for more information
13 | on WebRTC signaling.
14 |
15 | > **Note:** The Android Things Console will be turned down for non-commercial
16 | > use on January 5, 2022. For more details, see the
17 | > [FAQ page](https://developer.android.com/things/faq).
18 |
19 | ## Pre-requisites
20 |
21 | 1. Android Things compatible boards e.g. Raspberry Pi 3 or NXP boards
22 | 1. Android Things compatible camera (for example, the Raspberry Pi 3 camera module)
23 | 1. Android Studio 3+
24 |
25 | ## WebRTC-specific dependencies
26 |
27 | This sample has the following dependencies but they are either already included or directly linked
28 | so that no additional steps are necessary.
29 |
30 | * [WebRTC signaling server](https://appr.tc): The sample is set up to use the one hosted in https://appr.tc,
31 | [source code](https://github.com/webrtc/apprtc#apprtc-demo-code).
32 | * [The Autobahn libraries for WebSocket and WAMP](https://crossbar.io/autobahn/) with the autobanh.jar
33 | included at app/libs/autobanh.jar inside this project.
34 | * [WebRTC library at bintray/JCenter] (https://bintray.com/google/webrtc/google-webrtc)
35 |
36 | ## Build and Run
37 |
38 | Build this app in Android Studio and run it on an Android Things board:
39 |
40 | * Deploy and run the `app` module, which creates and joins a room with random ID
41 | * The room ID will be shown on the display and on logcat. Take note of it, so that you can join the room with another device
42 |
43 | Join the room from another WebRTC client:
44 |
45 | * From a web browser, go to https://appr.tc and enter the same room ID shown on the Android Things display, or
46 | * Use one of the native webRTC samples, like [Android](https://webrtc.org/native-code/android/) or [iOS](https://webrtc.org/native-code/ios/) app and enter the room ID to join
47 |
48 | ## Categories
49 |
50 | - Android Things
51 |
52 | ## Solutions
53 |
54 | - IoT
55 |
56 | ## Languages
57 |
58 | - Java
59 |
60 |
61 | ## License
62 |
63 | See LICENSE
64 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | apply plugin: 'com.android.application'
18 |
19 | android {
20 | compileSdkVersion 27
21 | buildToolsVersion '27.0.3'
22 |
23 | lintOptions {
24 | abortOnError false
25 | }
26 |
27 | defaultConfig {
28 | applicationId "com.example.androidthings.videortc"
29 | minSdkVersion 25
30 | targetSdkVersion 27
31 | versionCode 1
32 | versionName "1.0"
33 | }
34 |
35 | compileOptions {
36 | sourceCompatibility JavaVersion.VERSION_1_8
37 | targetCompatibility JavaVersion.VERSION_1_8
38 | }
39 | }
40 |
41 | dependencies {
42 | implementation fileTree(include: ['*.jar'], dir: 'libs')
43 | implementation 'com.android.support:support-annotations:27.1.0'
44 | implementation 'org.webrtc:google-webrtc:1.0.22672'
45 | }
--------------------------------------------------------------------------------
/app/libs/autobanh.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/libs/autobanh.jar
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
20 |
21 |
22 |
23 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
51 |
52 |
59 |
60 |
61 |
69 |
70 |
71 |
72 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/videortc/CallActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.videortc;
18 |
19 | import android.app.Activity;
20 | import android.app.AlertDialog;
21 | import android.content.DialogInterface;
22 | import android.net.Uri;
23 | import android.os.Bundle;
24 | import android.support.annotation.UiThread;
25 | import android.util.Log;
26 | import android.view.View;
27 | import android.view.View.OnClickListener;
28 | import android.widget.ImageButton;
29 | import android.widget.TextView;
30 | import android.widget.Toast;
31 |
32 | import org.appspot.apprtc.AppRTCClient;
33 | import org.appspot.apprtc.PeerConnectionClient;
34 | import org.appspot.apprtc.WebSocketRTCClient;
35 | import org.webrtc.Camera2Enumerator;
36 | import org.webrtc.CameraEnumerator;
37 | import org.webrtc.IceCandidate;
38 | import org.webrtc.Logging;
39 | import org.webrtc.RendererCommon.ScalingType;
40 | import org.webrtc.SessionDescription;
41 | import org.webrtc.StatsReport;
42 | import org.webrtc.SurfaceViewRenderer;
43 | import org.webrtc.VideoCapturer;
44 | import org.webrtc.VideoFrame;
45 | import org.webrtc.VideoRenderer;
46 | import org.webrtc.VideoSink;
47 |
48 | import java.security.SecureRandom;
49 | import java.util.ArrayList;
50 | import java.util.List;
51 |
52 | /**
53 | * Activity for peer connection call setup, call waiting
54 | * and call view.
55 | */
56 | public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
57 | PeerConnectionClient.PeerConnectionEvents {
58 | private static final String TAG = "CallActivity";
59 | private static final String APPRTC_URL = "https://appr.tc";
60 | private static final String UPPER_ALPHA_DIGITS = "ACEFGHJKLMNPQRUVWXY123456789";
61 |
62 | // Peer connection statistics callback period in ms.
63 | private static final int STAT_CALLBACK_PERIOD = 1000;
64 | private final ProxyRenderer remoteProxyRenderer = new ProxyRenderer();
65 | private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
66 | private final List remoteRenderers = new ArrayList<>();
67 | private PeerConnectionClient peerConnectionClient = null;
68 | private AppRTCClient appRtcClient;
69 | private AppRTCClient.SignalingParameters signalingParameters;
70 | private SurfaceViewRenderer pipRenderer;
71 | private SurfaceViewRenderer fullscreenRenderer;
72 | private Toast logToast;
73 | private boolean activityRunning;
74 | private AppRTCClient.RoomConnectionParameters roomConnectionParameters;
75 | private PeerConnectionClient.PeerConnectionParameters peerConnectionParameters;
76 | private boolean iceConnected;
77 | private boolean isError;
78 | private long callStartedTimeMs = 0;
79 | private boolean micEnabled = true;
80 | private boolean isSwappedFeeds;
81 | // Control buttons for limited UI
82 | private ImageButton disconnectButton;
83 | private ImageButton cameraSwitchButton;
84 | private ImageButton toggleMuteButton;
85 |
86 | @Override
87 | public void onCreate(Bundle savedInstanceState) {
88 | super.onCreate(savedInstanceState);
89 |
90 | setContentView(R.layout.activity_call);
91 |
92 | iceConnected = false;
93 | signalingParameters = null;
94 |
95 | // Create UI controls.
96 | pipRenderer = findViewById(R.id.pip_video_view);
97 | fullscreenRenderer = findViewById(R.id.fullscreen_video_view);
98 |
99 | disconnectButton = findViewById(R.id.button_call_disconnect);
100 | cameraSwitchButton = findViewById(R.id.button_call_switch_camera);
101 | toggleMuteButton = findViewById(R.id.button_call_toggle_mic);
102 |
103 | // Add buttons click events.
104 | disconnectButton.setOnClickListener(new OnClickListener() {
105 | public void onClick(View v) {
106 | onCallHangUp();
107 | }
108 | });
109 |
110 | cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
111 | public void onClick(View view) {
112 | onCameraSwitch();
113 | }
114 | });
115 |
116 | toggleMuteButton.setOnClickListener(new View.OnClickListener() {
117 | public void onClick(View view) {
118 | boolean enabled = onToggleMic();
119 | toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
120 | }
121 | });
122 |
123 | // Swap feeds on pip view click.
124 | pipRenderer.setOnClickListener(new View.OnClickListener() {
125 | @Override
126 | public void onClick(View view) {
127 | setSwappedFeeds(!isSwappedFeeds);
128 | }
129 | });
130 |
131 | remoteRenderers.add(remoteProxyRenderer);
132 |
133 | // Create peer connection client.
134 | peerConnectionClient = new PeerConnectionClient();
135 |
136 | // Create video renderers.
137 | pipRenderer.init(peerConnectionClient.getRenderContext(), null);
138 | pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
139 |
140 | fullscreenRenderer.init(peerConnectionClient.getRenderContext(), null);
141 | fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
142 |
143 | pipRenderer.setZOrderMediaOverlay(true);
144 | pipRenderer.setEnableHardwareScaler(true /* enabled */);
145 | fullscreenRenderer.setEnableHardwareScaler(true /* enabled */);
146 | // Start with local feed in fullscreen and swap it to the pip when the call is connected.
147 | setSwappedFeeds(true /* isSwappedFeeds */);
148 |
149 | // Generate a random room ID with 7 uppercase letters and digits
150 | String randomRoomID = randomString(7, UPPER_ALPHA_DIGITS);
151 | // Show the random room ID so that another client can join from https://appr.tc
152 | TextView roomIdTextView = findViewById(R.id.roomID);
153 | roomIdTextView.setText(getString(R.string.room_id_caption) + randomRoomID);
154 | Log.d(TAG, getString(R.string.room_id_caption) + randomRoomID);
155 |
156 | // Connect video call to the random room
157 | connectVideoCall(randomRoomID);
158 | }
159 |
160 | // Create a random string
161 | private String randomString(int length, String characterSet) {
162 | StringBuilder sb = new StringBuilder(); //consider using StringBuffer if needed
163 | for (int i = 0; i < length; i++) {
164 | int randomInt = new SecureRandom().nextInt(characterSet.length());
165 | sb.append(characterSet.substring(randomInt, randomInt + 1));
166 | }
167 | return sb.toString();
168 | }
169 |
170 | // Join video call with randomly generated roomId
171 | private void connectVideoCall(String roomId) {
172 | Uri roomUri = Uri.parse(APPRTC_URL);
173 |
174 | int videoWidth = 0;
175 | int videoHeight = 0;
176 |
177 | peerConnectionParameters =
178 | new PeerConnectionClient.PeerConnectionParameters(true,
179 | false,
180 | false,
181 | videoWidth,
182 | videoHeight,
183 | 0,
184 | Integer.parseInt(getString(R.string.pref_maxvideobitratevalue_default)),
185 | getString(R.string.pref_videocodec_default),
186 | true,
187 | false,
188 | Integer.parseInt(getString(R.string.pref_startaudiobitratevalue_default)),
189 | getString(R.string.pref_audiocodec_default),
190 | false,
191 | false,
192 | false,
193 | false,
194 | false,
195 | false,
196 | false,
197 | false,
198 | null);
199 |
200 | // Create connection client. Use the standard WebSocketRTCClient.
201 | // DirectRTCClient could be used for point-to-point connection
202 | appRtcClient = new WebSocketRTCClient(this);
203 | // Create connection parameters.
204 | roomConnectionParameters =
205 | new AppRTCClient.RoomConnectionParameters(
206 | roomUri.toString(),
207 | roomId,
208 | false,
209 | null);
210 |
211 | peerConnectionClient.createPeerConnectionFactory(
212 | getApplicationContext(), peerConnectionParameters, CallActivity.this);
213 |
214 | startCall();
215 | }
216 |
217 | public void onCallHangUp() {
218 | disconnect();
219 | }
220 |
221 | public void onCameraSwitch() {
222 | if (peerConnectionClient != null) {
223 | peerConnectionClient.switchCamera();
224 | }
225 | }
226 |
227 | public boolean onToggleMic() {
228 | if (peerConnectionClient != null) {
229 | micEnabled = !micEnabled;
230 | peerConnectionClient.setAudioEnabled(micEnabled);
231 | }
232 | return micEnabled;
233 | }
234 |
235 | private void startCall() {
236 | if (appRtcClient == null) {
237 | Log.e(TAG, "AppRTC client is not allocated for a call.");
238 | return;
239 | }
240 | callStartedTimeMs = System.currentTimeMillis();
241 |
242 | // Start room connection.
243 | logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
244 | appRtcClient.connectToRoom(roomConnectionParameters);
245 | }
246 |
247 | @UiThread
248 | private void callConnected() {
249 | final long delta = System.currentTimeMillis() - callStartedTimeMs;
250 | Log.i(TAG, "Call connected: delay=" + delta + "ms");
251 | if (peerConnectionClient == null || isError) {
252 | Log.w(TAG, "Call is connected in closed or error state");
253 | return;
254 | }
255 | // Enable statistics callback.
256 | peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
257 | setSwappedFeeds(false /* isSwappedFeeds */);
258 | }
259 |
260 | // Disconnect from remote resources, dispose of local resources, and exit.
261 | private void disconnect() {
262 | activityRunning = false;
263 | remoteProxyRenderer.setTarget(null);
264 | localProxyVideoSink.setTarget(null);
265 | if (appRtcClient != null) {
266 | appRtcClient.disconnectFromRoom();
267 | appRtcClient = null;
268 | }
269 | if (pipRenderer != null) {
270 | pipRenderer.release();
271 | pipRenderer = null;
272 | }
273 | if (fullscreenRenderer != null) {
274 | fullscreenRenderer.release();
275 | fullscreenRenderer = null;
276 | }
277 | if (peerConnectionClient != null) {
278 | peerConnectionClient.close();
279 | peerConnectionClient = null;
280 | }
281 | if (iceConnected && !isError) {
282 | setResult(RESULT_OK);
283 | } else {
284 | setResult(RESULT_CANCELED);
285 | }
286 | finish();
287 | }
288 |
289 | private void disconnectWithErrorMessage(final String errorMessage) {
290 | if (!activityRunning) {
291 | Log.e(TAG, "Critical error: " + errorMessage);
292 | disconnect();
293 | } else {
294 | new AlertDialog.Builder(this)
295 | .setTitle(getText(R.string.channel_error_title))
296 | .setMessage(errorMessage)
297 | .setCancelable(false)
298 | .setNeutralButton(R.string.ok,
299 | new DialogInterface.OnClickListener() {
300 | @Override
301 | public void onClick(DialogInterface dialog, int id) {
302 | dialog.cancel();
303 | disconnect();
304 | }
305 | })
306 | .create()
307 | .show();
308 | }
309 | }
310 |
311 | // Log |msg| and Toast about it.
312 | private void logAndToast(String msg) {
313 | Log.d(TAG, msg);
314 | if (logToast != null) {
315 | logToast.cancel();
316 | }
317 | logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
318 | logToast.show();
319 | }
320 |
321 | private void reportError(final String description) {
322 | runOnUiThread(new Runnable() {
323 | @Override
324 | public void run() {
325 | if (!isError) {
326 | isError = true;
327 | disconnectWithErrorMessage(description);
328 | }
329 | }
330 | });
331 | }
332 |
333 | // Create VideoCapturer
334 | private VideoCapturer createVideoCapturer() {
335 | final VideoCapturer videoCapturer;
336 | Logging.d(TAG, "Creating capturer using camera2 API.");
337 | videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
338 | if (videoCapturer == null) {
339 | reportError("Failed to open camera");
340 | return null;
341 | }
342 | return videoCapturer;
343 | }
344 |
345 | // Create VideoCapturer from camera
346 | private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
347 | final String[] deviceNames = enumerator.getDeviceNames();
348 |
349 | // First, try to find front facing camera
350 | Logging.d(TAG, "Looking for front facing cameras.");
351 | for (String deviceName : deviceNames) {
352 | if (enumerator.isFrontFacing(deviceName)) {
353 | Logging.d(TAG, "Creating front facing camera capturer.");
354 | VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
355 |
356 | if (videoCapturer != null) {
357 | return videoCapturer;
358 | }
359 | }
360 | }
361 |
362 | // Front facing camera not found, try something else
363 | Logging.d(TAG, "Looking for other cameras.");
364 | for (String deviceName : deviceNames) {
365 | if (!enumerator.isFrontFacing(deviceName)) {
366 | Logging.d(TAG, "Creating other camera capturer.");
367 | VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
368 |
369 | if (videoCapturer != null) {
370 | return videoCapturer;
371 | }
372 | }
373 | }
374 |
375 | return null;
376 | }
377 |
378 | private void setSwappedFeeds(boolean isSwappedFeeds) {
379 | Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
380 | this.isSwappedFeeds = isSwappedFeeds;
381 | localProxyVideoSink.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
382 | remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
383 | fullscreenRenderer.setMirror(isSwappedFeeds);
384 | pipRenderer.setMirror(!isSwappedFeeds);
385 | }
386 |
387 | // -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
388 | // All callbacks are invoked from websocket signaling looper thread and
389 | // are routed to UI thread.
390 | private void onConnectedToRoomInternal(final AppRTCClient.SignalingParameters params) {
391 | final long delta = System.currentTimeMillis() - callStartedTimeMs;
392 |
393 | signalingParameters = params;
394 | logAndToast("Creating peer connection, delay=" + delta + "ms");
395 | VideoCapturer videoCapturer = null;
396 | if (peerConnectionParameters.videoCallEnabled) {
397 | videoCapturer = createVideoCapturer();
398 | }
399 | peerConnectionClient.createPeerConnection(
400 | localProxyVideoSink, remoteRenderers, videoCapturer, signalingParameters);
401 |
402 | if (signalingParameters.initiator) {
403 | logAndToast("Creating OFFER...");
404 | // Create offer. Offer SDP will be sent to answering client in
405 | // PeerConnectionEvents.onLocalDescription event.
406 | peerConnectionClient.createOffer();
407 | } else {
408 | if (params.offerSdp != null) {
409 | peerConnectionClient.setRemoteDescription(params.offerSdp);
410 | logAndToast("Creating ANSWER...");
411 | // Create answer. Answer SDP will be sent to offering client in
412 | // PeerConnectionEvents.onLocalDescription event.
413 | peerConnectionClient.createAnswer();
414 | }
415 | if (params.iceCandidates != null) {
416 | // Add remote ICE candidates from room.
417 | for (IceCandidate iceCandidate : params.iceCandidates) {
418 | peerConnectionClient.addRemoteIceCandidate(iceCandidate);
419 | }
420 | }
421 | }
422 | }
423 |
424 | @Override
425 | public void onConnectedToRoom(final AppRTCClient.SignalingParameters params) {
426 | runOnUiThread(new Runnable() {
427 | @Override
428 | public void run() {
429 | onConnectedToRoomInternal(params);
430 | }
431 | });
432 | }
433 |
434 | @Override
435 | public void onRemoteDescription(final SessionDescription sdp) {
436 | final long delta = System.currentTimeMillis() - callStartedTimeMs;
437 | runOnUiThread(new Runnable() {
438 | @Override
439 | public void run() {
440 | if (peerConnectionClient == null) {
441 | Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
442 | return;
443 | }
444 | logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms");
445 | peerConnectionClient.setRemoteDescription(sdp);
446 | if (!signalingParameters.initiator) {
447 | logAndToast("Creating ANSWER...");
448 | // Create answer. Answer SDP will be sent to offering client in
449 | // PeerConnectionEvents.onLocalDescription event.
450 | peerConnectionClient.createAnswer();
451 | }
452 | }
453 | });
454 | }
455 |
456 | @Override
457 | public void onRemoteIceCandidate(final IceCandidate candidate) {
458 | runOnUiThread(new Runnable() {
459 | @Override
460 | public void run() {
461 | if (peerConnectionClient == null) {
462 | Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
463 | return;
464 | }
465 | peerConnectionClient.addRemoteIceCandidate(candidate);
466 | }
467 | });
468 | }
469 |
470 | @Override
471 | public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
472 | runOnUiThread(new Runnable() {
473 | @Override
474 | public void run() {
475 | if (peerConnectionClient == null) {
476 | Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
477 | return;
478 | }
479 | peerConnectionClient.removeRemoteIceCandidates(candidates);
480 | }
481 | });
482 | }
483 |
484 | @Override
485 | public void onChannelClose() {
486 | runOnUiThread(new Runnable() {
487 | @Override
488 | public void run() {
489 | logAndToast("Remote end hung up; dropping PeerConnection");
490 | disconnect();
491 | }
492 | });
493 | }
494 |
495 | @Override
496 | public void onChannelError(final String description) {
497 | reportError(description);
498 | }
499 |
500 | // -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
501 | // Send local peer connection SDP and ICE candidates to remote party.
502 | // All callbacks are invoked from peer connection client looper thread and
503 | // are routed to UI thread.
504 | @Override
505 | public void onLocalDescription(final SessionDescription sdp) {
506 | final long delta = System.currentTimeMillis() - callStartedTimeMs;
507 | runOnUiThread(new Runnable() {
508 | @Override
509 | public void run() {
510 | if (appRtcClient != null) {
511 | logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms");
512 | if (signalingParameters.initiator) {
513 | appRtcClient.sendOfferSdp(sdp);
514 | } else {
515 | appRtcClient.sendAnswerSdp(sdp);
516 | }
517 | }
518 | if (peerConnectionParameters.videoMaxBitrate > 0) {
519 | Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
520 | peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
521 | }
522 | }
523 | });
524 | }
525 |
526 | @Override
527 | public void onIceCandidate(final IceCandidate candidate) {
528 | runOnUiThread(new Runnable() {
529 | @Override
530 | public void run() {
531 | if (appRtcClient != null) {
532 | appRtcClient.sendLocalIceCandidate(candidate);
533 | }
534 | }
535 | });
536 | }
537 |
538 | @Override
539 | public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
540 | runOnUiThread(new Runnable() {
541 | @Override
542 | public void run() {
543 | if (appRtcClient != null) {
544 | appRtcClient.sendLocalIceCandidateRemovals(candidates);
545 | }
546 | }
547 | });
548 | }
549 |
550 | @Override
551 | public void onIceConnected() {
552 | final long delta = System.currentTimeMillis() - callStartedTimeMs;
553 | runOnUiThread(new Runnable() {
554 | @Override
555 | public void run() {
556 | logAndToast("ICE connected, delay=" + delta + "ms");
557 | iceConnected = true;
558 | callConnected();
559 | }
560 | });
561 | }
562 |
563 | @Override
564 | public void onIceDisconnected() {
565 | runOnUiThread(new Runnable() {
566 | @Override
567 | public void run() {
568 | logAndToast("ICE disconnected");
569 | iceConnected = false;
570 | disconnect();
571 | }
572 | });
573 | }
574 |
575 | @Override
576 | public void onPeerConnectionClosed() {
577 | }
578 |
579 | @Override
580 | public void onPeerConnectionStatsReady(final StatsReport[] reports) {
581 | }
582 |
583 | @Override
584 | public void onPeerConnectionError(final String description) {
585 | reportError(description);
586 | }
587 |
588 | // Activity interfaces
589 | @Override
590 | public void onStop() {
591 | super.onStop();
592 | activityRunning = false;
593 | if (peerConnectionClient != null) {
594 | peerConnectionClient.stopVideoSource();
595 | }
596 | }
597 |
598 | @Override
599 | public void onStart() {
600 | super.onStart();
601 | activityRunning = true;
602 | // Video is not paused for screencapture. See onPause.
603 | if (peerConnectionClient != null) {
604 | peerConnectionClient.startVideoSource();
605 | }
606 | }
607 |
608 | @Override
609 | protected void onDestroy() {
610 | Thread.setDefaultUncaughtExceptionHandler(null);
611 | disconnect();
612 | if (logToast != null) {
613 | logToast.cancel();
614 | }
615 | activityRunning = false;
616 | super.onDestroy();
617 | }
618 |
619 | private static class ProxyRenderer implements VideoRenderer.Callbacks {
620 | private VideoRenderer.Callbacks target;
621 |
622 | @Override
623 | synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
624 | if (target == null) {
625 | Logging.d(TAG, "Dropping frame in proxy because target is null.");
626 | VideoRenderer.renderFrameDone(frame);
627 | return;
628 | }
629 |
630 | target.renderFrame(frame);
631 | }
632 |
633 | synchronized public void setTarget(VideoRenderer.Callbacks target) {
634 | this.target = target;
635 | }
636 | }
637 |
638 | private static class ProxyVideoSink implements VideoSink {
639 | private VideoSink target;
640 |
641 | @Override
642 | synchronized public void onFrame(VideoFrame frame) {
643 | if (target == null) {
644 | Logging.d(TAG, "Dropping frame in proxy because target is null.");
645 | return;
646 | }
647 |
648 | target.onFrame(frame);
649 | }
650 |
651 | synchronized public void setTarget(VideoSink target) {
652 | this.target = target;
653 | }
654 | }
655 |
656 |
657 | }
658 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/androidthings/videortc/MainActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.androidthings.videortc;
18 |
19 | import android.app.Activity;
20 | import android.os.Bundle;
21 | import android.content.Intent;
22 | import android.util.Log;
23 | import android.widget.Button;
24 | import android.view.View;
25 | import android.view.View.OnClickListener;
26 |
27 | /**
28 | *
29 | */
30 | public class MainActivity extends Activity {
31 | private static final String TAG = MainActivity.class.getSimpleName();
32 |
33 | private Button button;
34 |
35 | @Override
36 | protected void onCreate(Bundle savedInstanceState) {
37 | super.onCreate(savedInstanceState);
38 | Log.d(TAG, "onCreate");
39 | setContentView(R.layout.main_activity);
40 |
41 | addListenerOnButton();
42 | }
43 |
44 | public void addListenerOnButton() {
45 |
46 | button = findViewById(R.id.connectBtn);
47 |
48 | button.setOnClickListener(new OnClickListener() {
49 |
50 | @Override
51 | public void onClick(View arg0) {
52 | Intent myIntent = new Intent(MainActivity.this, CallActivity.class);
53 | startActivity(myIntent);
54 | }
55 |
56 | });
57 |
58 | }
59 | }
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/AppRTCClient.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc;
12 |
13 | import org.webrtc.IceCandidate;
14 | import org.webrtc.PeerConnection;
15 | import org.webrtc.SessionDescription;
16 |
17 | import java.util.List;
18 |
19 | /**
20 | * AppRTCClient is the interface representing an AppRTC client.
21 | */
22 | public interface AppRTCClient {
23 | /**
24 | * Struct holding the connection parameters of an AppRTC room.
25 | */
26 | class RoomConnectionParameters {
27 | public final String roomUrl;
28 | public final String roomId;
29 | public final boolean loopback;
30 | public final String urlParameters;
31 | public RoomConnectionParameters(
32 | String roomUrl, String roomId, boolean loopback, String urlParameters) {
33 | this.roomUrl = roomUrl;
34 | this.roomId = roomId;
35 | this.loopback = loopback;
36 | this.urlParameters = urlParameters;
37 | }
38 | public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
39 | this(roomUrl, roomId, loopback, null /* urlParameters */);
40 | }
41 | }
42 |
43 | /**
44 | * Asynchronously connect to an AppRTC room URL using supplied connection
45 | * parameters. Once connection is established onConnectedToRoom()
46 | * callback with room parameters is invoked.
47 | */
48 | void connectToRoom(RoomConnectionParameters connectionParameters);
49 |
50 | /**
51 | * Send offer SDP to the other participant.
52 | */
53 | void sendOfferSdp(final SessionDescription sdp);
54 |
55 | /**
56 | * Send answer SDP to the other participant.
57 | */
58 | void sendAnswerSdp(final SessionDescription sdp);
59 |
60 | /**
61 | * Send Ice candidate to the other participant.
62 | */
63 | void sendLocalIceCandidate(final IceCandidate candidate);
64 |
65 | /**
66 | * Send removed ICE candidates to the other participant.
67 | */
68 | void sendLocalIceCandidateRemovals(final IceCandidate[] candidates);
69 |
70 | /**
71 | * Disconnect from room.
72 | */
73 | void disconnectFromRoom();
74 |
75 | /**
76 | * Struct holding the signaling parameters of an AppRTC room.
77 | */
78 | class SignalingParameters {
79 | public final List iceServers;
80 | public final boolean initiator;
81 | public final String clientId;
82 | public final String wssUrl;
83 | public final String wssPostUrl;
84 | public final SessionDescription offerSdp;
85 | public final List iceCandidates;
86 |
87 | public SignalingParameters(List iceServers, boolean initiator,
88 | String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
89 | List iceCandidates) {
90 | this.iceServers = iceServers;
91 | this.initiator = initiator;
92 | this.clientId = clientId;
93 | this.wssUrl = wssUrl;
94 | this.wssPostUrl = wssPostUrl;
95 | this.offerSdp = offerSdp;
96 | this.iceCandidates = iceCandidates;
97 | }
98 | }
99 |
100 | /**
101 | * Callback interface for messages delivered on signaling channel.
102 | *
103 | *
Methods are guaranteed to be invoked on the UI thread of |activity|.
104 | */
105 | interface SignalingEvents {
106 | /**
107 | * Callback fired once the room's signaling parameters
108 | * SignalingParameters are extracted.
109 | */
110 | void onConnectedToRoom(final SignalingParameters params);
111 |
112 | /**
113 | * Callback fired once remote SDP is received.
114 | */
115 | void onRemoteDescription(final SessionDescription sdp);
116 |
117 | /**
118 | * Callback fired once remote Ice candidate is received.
119 | */
120 | void onRemoteIceCandidate(final IceCandidate candidate);
121 |
122 | /**
123 | * Callback fired once remote Ice candidate removals are received.
124 | */
125 | void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
126 |
127 | /**
128 | * Callback fired once channel is closed.
129 | */
130 | void onChannelClose();
131 |
132 | /**
133 | * Callback fired once channel error happened.
134 | */
135 | void onChannelError(final String description);
136 | }
137 | }
138 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/PeerConnectionClient.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc;
12 |
13 | import android.content.Context;
14 | import android.os.Environment;
15 | import android.os.ParcelFileDescriptor;
16 | import android.util.Log;
17 | import java.io.File;
18 | import java.io.IOException;
19 | import java.nio.ByteBuffer;
20 | import java.nio.charset.Charset;
21 | import java.util.ArrayList;
22 | import java.util.Arrays;
23 | import java.util.Collections;
24 | import java.util.Iterator;
25 | import java.util.List;
26 | import java.util.Timer;
27 | import java.util.TimerTask;
28 | import java.util.concurrent.ExecutorService;
29 | import java.util.concurrent.Executors;
30 | import java.util.regex.Matcher;
31 | import java.util.regex.Pattern;
32 | import org.appspot.apprtc.AppRTCClient.SignalingParameters;
33 | import org.webrtc.AudioSource;
34 | import org.webrtc.AudioTrack;
35 | import org.webrtc.CameraVideoCapturer;
36 | import org.webrtc.DataChannel;
37 | import org.webrtc.DefaultVideoDecoderFactory;
38 | import org.webrtc.DefaultVideoEncoderFactory;
39 | import org.webrtc.EglBase;
40 | import org.webrtc.IceCandidate;
41 | import org.webrtc.Logging;
42 | import org.webrtc.MediaConstraints;
43 | import org.webrtc.MediaStream;
44 | import org.webrtc.PeerConnection;
45 | import org.webrtc.PeerConnection.IceConnectionState;
46 | import org.webrtc.PeerConnectionFactory;
47 | import org.webrtc.RtpParameters;
48 | import org.webrtc.RtpReceiver;
49 | import org.webrtc.RtpSender;
50 | import org.webrtc.SdpObserver;
51 | import org.webrtc.SessionDescription;
52 | import org.webrtc.SoftwareVideoDecoderFactory;
53 | import org.webrtc.SoftwareVideoEncoderFactory;
54 | import org.webrtc.StatsObserver;
55 | import org.webrtc.StatsReport;
56 | import org.webrtc.VideoCapturer;
57 | import org.webrtc.VideoDecoderFactory;
58 | import org.webrtc.VideoEncoderFactory;
59 | import org.webrtc.VideoRenderer;
60 | import org.webrtc.VideoSink;
61 | import org.webrtc.VideoSource;
62 | import org.webrtc.VideoTrack;
63 | import org.webrtc.voiceengine.WebRtcAudioManager;
64 | import org.webrtc.voiceengine.WebRtcAudioRecord;
65 | import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
66 | import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
67 | import org.webrtc.voiceengine.WebRtcAudioTrack;
68 | import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode;
69 | import org.webrtc.voiceengine.WebRtcAudioUtils;
70 |
71 | /**
72 | * Peer connection client implementation.
73 | *
74 | *
All public methods are routed to local looper thread.
75 | * All PeerConnectionEvents callbacks are invoked from the same looper thread.
76 | * This class is a singleton.
77 | */
78 | public class PeerConnectionClient {
79 | public static final String VIDEO_TRACK_ID = "ARDAMSv0";
80 | public static final String AUDIO_TRACK_ID = "ARDAMSa0";
81 | public static final String VIDEO_TRACK_TYPE = "video";
82 | private static final String TAG = "PCRTCClient";
83 | private static final String VIDEO_CODEC_VP8 = "VP8";
84 | private static final String VIDEO_CODEC_VP9 = "VP9";
85 | private static final String VIDEO_CODEC_H264 = "H264";
86 | private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
87 | private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
88 | private static final String AUDIO_CODEC_OPUS = "opus";
89 | private static final String AUDIO_CODEC_ISAC = "ISAC";
90 | private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
91 | private static final String VIDEO_FLEXFEC_FIELDTRIAL =
92 | "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
93 | private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/";
94 | private static final String VIDEO_H264_HIGH_PROFILE_FIELDTRIAL =
95 | "WebRTC-H264HighProfile/Enabled/";
96 | private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
97 | "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
98 | private static final String VIDEO_FRAME_EMIT_FIELDTRIAL =
99 | PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/" + PeerConnectionFactory.TRIAL_ENABLED
100 | + "/";
101 | private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
102 | private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
103 | private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
104 | private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
105 | private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
106 | private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
107 | private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
108 | private static final int HD_VIDEO_WIDTH = 1280;
109 | private static final int HD_VIDEO_HEIGHT = 720;
110 | private static final int BPS_IN_KBPS = 1000;
111 |
112 | // Executor thread is started once in private ctor and is used for all
113 | // peer connection API calls to ensure new peer connection factory is
114 | // created on the same thread as previously destroyed factory.
115 | private static final ExecutorService executor = Executors.newSingleThreadExecutor();
116 |
117 | private final PCObserver pcObserver = new PCObserver();
118 | private final SDPObserver sdpObserver = new SDPObserver();
119 |
120 | private final EglBase rootEglBase;
121 | private PeerConnectionFactory factory;
122 | private PeerConnection peerConnection;
123 | PeerConnectionFactory.Options options = null;
124 | private AudioSource audioSource;
125 | private VideoSource videoSource;
126 | private boolean videoCallEnabled;
127 | private boolean preferIsac;
128 | private String preferredVideoCodec;
129 | private boolean videoCapturerStopped;
130 | private boolean isError;
131 | private Timer statsTimer;
132 | private VideoSink localRender;
133 | private List remoteRenders;
134 | private SignalingParameters signalingParameters;
135 | private int videoWidth;
136 | private int videoHeight;
137 | private int videoFps;
138 | private MediaConstraints audioConstraints;
139 | private MediaConstraints sdpMediaConstraints;
140 | private PeerConnectionParameters peerConnectionParameters;
141 | // Queued remote ICE candidates are consumed only after both local and
142 | // remote descriptions are set. Similarly local ICE candidates are sent to
143 | // remote peer after both local and remote description are set.
144 | private List queuedRemoteCandidates;
145 | private PeerConnectionEvents events;
146 | private boolean isInitiator;
147 | private SessionDescription localSdp; // either offer or answer SDP
148 | private MediaStream mediaStream;
149 | private VideoCapturer videoCapturer;
150 | // enableVideo is set to true if video should be rendered and sent.
151 | private boolean renderVideo;
152 | private VideoTrack localVideoTrack;
153 | private VideoTrack remoteVideoTrack;
154 | private RtpSender localVideoSender;
155 | // enableAudio is set to true if audio should be sent.
156 | private boolean enableAudio;
157 | private AudioTrack localAudioTrack;
158 | private DataChannel dataChannel;
159 | private boolean dataChannelEnabled;
160 |
161 | /**
162 | * Peer connection parameters.
163 | */
164 | public static class DataChannelParameters {
165 | public final boolean ordered;
166 | public final int maxRetransmitTimeMs;
167 | public final int maxRetransmits;
168 | public final String protocol;
169 | public final boolean negotiated;
170 | public final int id;
171 |
172 | public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
173 | String protocol, boolean negotiated, int id) {
174 | this.ordered = ordered;
175 | this.maxRetransmitTimeMs = maxRetransmitTimeMs;
176 | this.maxRetransmits = maxRetransmits;
177 | this.protocol = protocol;
178 | this.negotiated = negotiated;
179 | this.id = id;
180 | }
181 | }
182 |
183 | /**
184 | * Peer connection parameters.
185 | */
186 | public static class PeerConnectionParameters {
187 | public final boolean videoCallEnabled;
188 | public final boolean loopback;
189 | public final boolean tracing;
190 | public final int videoWidth;
191 | public final int videoHeight;
192 | public final int videoFps;
193 | public final int videoMaxBitrate;
194 | public final String videoCodec;
195 | public final boolean videoCodecHwAcceleration;
196 | public final boolean videoFlexfecEnabled;
197 | public final int audioStartBitrate;
198 | public final String audioCodec;
199 | public final boolean noAudioProcessing;
200 | public final boolean aecDump;
201 | public final boolean useOpenSLES;
202 | public final boolean disableBuiltInAEC;
203 | public final boolean disableBuiltInAGC;
204 | public final boolean disableBuiltInNS;
205 | public final boolean enableLevelControl;
206 | public final boolean disableWebRtcAGCAndHPF;
207 | private final DataChannelParameters dataChannelParameters;
208 |
209 | public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
210 | int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
211 | boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
212 | String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
213 | boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
214 | boolean enableLevelControl, boolean disableWebRtcAGCAndHPF) {
215 | this(videoCallEnabled, loopback, tracing, videoWidth, videoHeight, videoFps, videoMaxBitrate,
216 | videoCodec, videoCodecHwAcceleration, videoFlexfecEnabled, audioStartBitrate, audioCodec,
217 | noAudioProcessing, aecDump, useOpenSLES, disableBuiltInAEC, disableBuiltInAGC,
218 | disableBuiltInNS, enableLevelControl, disableWebRtcAGCAndHPF, null);
219 | }
220 |
221 | public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
222 | int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
223 | boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate,
224 | String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
225 | boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
226 | boolean enableLevelControl, boolean disableWebRtcAGCAndHPF,
227 | DataChannelParameters dataChannelParameters) {
228 | this.videoCallEnabled = videoCallEnabled;
229 | this.loopback = loopback;
230 | this.tracing = tracing;
231 | this.videoWidth = videoWidth;
232 | this.videoHeight = videoHeight;
233 | this.videoFps = videoFps;
234 | this.videoMaxBitrate = videoMaxBitrate;
235 | this.videoCodec = videoCodec;
236 | this.videoFlexfecEnabled = videoFlexfecEnabled;
237 | this.videoCodecHwAcceleration = videoCodecHwAcceleration;
238 | this.audioStartBitrate = audioStartBitrate;
239 | this.audioCodec = audioCodec;
240 | this.noAudioProcessing = noAudioProcessing;
241 | this.aecDump = aecDump;
242 | this.useOpenSLES = useOpenSLES;
243 | this.disableBuiltInAEC = disableBuiltInAEC;
244 | this.disableBuiltInAGC = disableBuiltInAGC;
245 | this.disableBuiltInNS = disableBuiltInNS;
246 | this.enableLevelControl = enableLevelControl;
247 | this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF;
248 | this.dataChannelParameters = dataChannelParameters;
249 | }
250 | }
251 |
252 | /**
253 | * Peer connection events.
254 | */
255 | public interface PeerConnectionEvents {
256 | /**
257 | * Callback fired once local SDP is created and set.
258 | */
259 | void onLocalDescription(final SessionDescription sdp);
260 |
261 | /**
262 | * Callback fired once local Ice candidate is generated.
263 | */
264 | void onIceCandidate(final IceCandidate candidate);
265 |
266 | /**
267 | * Callback fired once local ICE candidates are removed.
268 | */
269 | void onIceCandidatesRemoved(final IceCandidate[] candidates);
270 |
271 | /**
272 | * Callback fired once connection is established (IceConnectionState is
273 | * CONNECTED).
274 | */
275 | void onIceConnected();
276 |
277 | /**
278 | * Callback fired once connection is closed (IceConnectionState is
279 | * DISCONNECTED).
280 | */
281 | void onIceDisconnected();
282 |
283 | /**
284 | * Callback fired once peer connection is closed.
285 | */
286 | void onPeerConnectionClosed();
287 |
288 | /**
289 | * Callback fired once peer connection statistics is ready.
290 | */
291 | void onPeerConnectionStatsReady(final StatsReport[] reports);
292 |
293 | /**
294 | * Callback fired once peer connection error happened.
295 | */
296 | void onPeerConnectionError(final String description);
297 | }
298 |
299 | public PeerConnectionClient() {
300 | rootEglBase = EglBase.create();
301 | }
302 |
303 | public void setPeerConnectionFactoryOptions(PeerConnectionFactory.Options options) {
304 | this.options = options;
305 | }
306 |
307 | public void createPeerConnectionFactory(final Context context,
308 | final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
309 | this.peerConnectionParameters = peerConnectionParameters;
310 | this.events = events;
311 | videoCallEnabled = peerConnectionParameters.videoCallEnabled;
312 | dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
313 | // Reset variables to initial states.
314 | factory = null;
315 | peerConnection = null;
316 | preferIsac = false;
317 | videoCapturerStopped = false;
318 | isError = false;
319 | queuedRemoteCandidates = null;
320 | localSdp = null; // either offer or answer SDP
321 | mediaStream = null;
322 | videoCapturer = null;
323 | renderVideo = true;
324 | localVideoTrack = null;
325 | remoteVideoTrack = null;
326 | localVideoSender = null;
327 | enableAudio = true;
328 | localAudioTrack = null;
329 | statsTimer = new Timer();
330 |
331 | executor.execute(new Runnable() {
332 | @Override
333 | public void run() {
334 | createPeerConnectionFactoryInternal(context);
335 | }
336 | });
337 | }
338 |
339 | public void createPeerConnection(final VideoSink localRender,
340 | final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer,
341 | final SignalingParameters signalingParameters) {
342 | createPeerConnection(
343 | localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters);
344 | }
345 |
346 | public void createPeerConnection(final VideoSink localRender,
347 | final List remoteRenders, final VideoCapturer videoCapturer,
348 | final SignalingParameters signalingParameters) {
349 | if (peerConnectionParameters == null) {
350 | Log.e(TAG, "Creating peer connection without initializing factory.");
351 | return;
352 | }
353 | this.localRender = localRender;
354 | this.remoteRenders = remoteRenders;
355 | this.videoCapturer = videoCapturer;
356 | this.signalingParameters = signalingParameters;
357 | executor.execute(new Runnable() {
358 | @Override
359 | public void run() {
360 | try {
361 | createMediaConstraintsInternal();
362 | createPeerConnectionInternal();
363 | } catch (Exception e) {
364 | reportError("Failed to create peer connection: " + e.getMessage());
365 | throw e;
366 | }
367 | }
368 | });
369 | }
370 |
371 | public void close() {
372 | executor.execute(new Runnable() {
373 | @Override
374 | public void run() {
375 | closeInternal();
376 | }
377 | });
378 | }
379 |
380 | public boolean isVideoCallEnabled() {
381 | return videoCallEnabled;
382 | }
383 |
384 | private void createPeerConnectionFactoryInternal(Context context) {
385 | isError = false;
386 |
387 | // Initialize field trials.
388 | String fieldTrials = "";
389 | if (peerConnectionParameters.videoFlexfecEnabled) {
390 | fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
391 | Log.d(TAG, "Enable FlexFEC field trial.");
392 | }
393 | fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL;
394 | if (peerConnectionParameters.disableWebRtcAGCAndHPF) {
395 | fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
396 | Log.d(TAG, "Disable WebRTC AGC field trial.");
397 | }
398 | fieldTrials += VIDEO_FRAME_EMIT_FIELDTRIAL;
399 |
400 | // Check preferred video codec.
401 | preferredVideoCodec = VIDEO_CODEC_VP8;
402 | if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
403 | switch (peerConnectionParameters.videoCodec) {
404 | case VIDEO_CODEC_VP8:
405 | preferredVideoCodec = VIDEO_CODEC_VP8;
406 | break;
407 | case VIDEO_CODEC_VP9:
408 | preferredVideoCodec = VIDEO_CODEC_VP9;
409 | break;
410 | case VIDEO_CODEC_H264_BASELINE:
411 | preferredVideoCodec = VIDEO_CODEC_H264;
412 | break;
413 | case VIDEO_CODEC_H264_HIGH:
414 | // TODO(magjed): Strip High from SDP when selecting Baseline instead of using field trial.
415 | fieldTrials += VIDEO_H264_HIGH_PROFILE_FIELDTRIAL;
416 | preferredVideoCodec = VIDEO_CODEC_H264;
417 | break;
418 | default:
419 | preferredVideoCodec = VIDEO_CODEC_VP8;
420 | }
421 | }
422 | Log.d(TAG, "Preferred video codec: " + preferredVideoCodec);
423 |
424 | // Initialize WebRTC
425 | Log.d(TAG,
426 | "Initialize WebRTC. Field trials: " + fieldTrials + " Enable video HW acceleration: "
427 | + peerConnectionParameters.videoCodecHwAcceleration);
428 | PeerConnectionFactory.initialize(
429 | PeerConnectionFactory.InitializationOptions.builder(context)
430 | .setFieldTrials(fieldTrials)
431 | .setEnableVideoHwAcceleration(peerConnectionParameters.videoCodecHwAcceleration)
432 | .setEnableInternalTracer(true)
433 | .createInitializationOptions());
434 | if (peerConnectionParameters.tracing) {
435 | PeerConnectionFactory.startInternalTracingCapture(
436 | Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
437 | + "webrtc-trace.txt");
438 | }
439 |
440 | // Check if ISAC is used by default.
441 | preferIsac = peerConnectionParameters.audioCodec != null
442 | && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC);
443 |
444 | // Enable/disable OpenSL ES playback.
445 | if (!peerConnectionParameters.useOpenSLES) {
446 | Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
447 | WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
448 | } else {
449 | Log.d(TAG, "Allow OpenSL ES audio if device supports it");
450 | WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
451 | }
452 |
453 | if (peerConnectionParameters.disableBuiltInAEC) {
454 | Log.d(TAG, "Disable built-in AEC even if device supports it");
455 | WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
456 | } else {
457 | Log.d(TAG, "Enable built-in AEC if device supports it");
458 | WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
459 | }
460 |
461 | if (peerConnectionParameters.disableBuiltInAGC) {
462 | Log.d(TAG, "Disable built-in AGC even if device supports it");
463 | WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
464 | } else {
465 | Log.d(TAG, "Enable built-in AGC if device supports it");
466 | WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
467 | }
468 |
469 | if (peerConnectionParameters.disableBuiltInNS) {
470 | Log.d(TAG, "Disable built-in NS even if device supports it");
471 | WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
472 | } else {
473 | Log.d(TAG, "Enable built-in NS if device supports it");
474 | WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
475 | }
476 |
477 | // Set audio record error callbacks.
478 | WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() {
479 | @Override
480 | public void onWebRtcAudioRecordInitError(String errorMessage) {
481 | Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
482 | reportError(errorMessage);
483 | }
484 |
485 | @Override
486 | public void onWebRtcAudioRecordStartError(
487 | AudioRecordStartErrorCode errorCode, String errorMessage) {
488 | Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
489 | reportError(errorMessage);
490 | }
491 |
492 | @Override
493 | public void onWebRtcAudioRecordError(String errorMessage) {
494 | Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
495 | reportError(errorMessage);
496 | }
497 | });
498 |
499 | WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.ErrorCallback() {
500 | @Override
501 | public void onWebRtcAudioTrackInitError(String errorMessage) {
502 | Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage);
503 | reportError(errorMessage);
504 | }
505 |
506 | @Override
507 | public void onWebRtcAudioTrackStartError(
508 | AudioTrackStartErrorCode errorCode, String errorMessage) {
509 | Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage);
510 | reportError(errorMessage);
511 | }
512 |
513 | @Override
514 | public void onWebRtcAudioTrackError(String errorMessage) {
515 | Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage);
516 | reportError(errorMessage);
517 | }
518 | });
519 |
520 | // Create peer connection factory.
521 | if (options != null) {
522 | Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
523 | }
524 | final boolean enableH264HighProfile =
525 | VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec);
526 | final VideoEncoderFactory encoderFactory;
527 | final VideoDecoderFactory decoderFactory;
528 |
529 | if (peerConnectionParameters.videoCodecHwAcceleration) {
530 | encoderFactory = new DefaultVideoEncoderFactory(
531 | rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
532 | decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
533 | } else {
534 | encoderFactory = new SoftwareVideoEncoderFactory();
535 | decoderFactory = new SoftwareVideoDecoderFactory();
536 | }
537 |
538 | factory = new PeerConnectionFactory(options, encoderFactory, decoderFactory);
539 | Log.d(TAG, "Peer connection factory created.");
540 | }
541 |
542 | private void createMediaConstraintsInternal() {
543 | // Check if there is a camera on device and disable video call if not.
544 | if (videoCapturer == null) {
545 | Log.w(TAG, "No camera on device. Switch to audio only call.");
546 | videoCallEnabled = false;
547 | }
548 | // Create video constraints if video call is enabled.
549 | if (videoCallEnabled) {
550 | videoWidth = peerConnectionParameters.videoWidth;
551 | videoHeight = peerConnectionParameters.videoHeight;
552 | videoFps = peerConnectionParameters.videoFps;
553 |
554 | // If video resolution is not specified, default to HD.
555 | if (videoWidth == 0 || videoHeight == 0) {
556 | videoWidth = HD_VIDEO_WIDTH;
557 | videoHeight = HD_VIDEO_HEIGHT;
558 | }
559 |
560 | // If fps is not specified, default to 30.
561 | if (videoFps == 0) {
562 | videoFps = 30;
563 | }
564 | Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
565 | }
566 |
567 | // Create audio constraints.
568 | audioConstraints = new MediaConstraints();
569 | // added for audio performance measurements
570 | if (peerConnectionParameters.noAudioProcessing) {
571 | Log.d(TAG, "Disabling audio processing");
572 | audioConstraints.mandatory.add(
573 | new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
574 | audioConstraints.mandatory.add(
575 | new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
576 | audioConstraints.mandatory.add(
577 | new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
578 | audioConstraints.mandatory.add(
579 | new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
580 | }
581 | if (peerConnectionParameters.enableLevelControl) {
582 | Log.d(TAG, "Enabling level control.");
583 | audioConstraints.mandatory.add(
584 | new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
585 | }
586 | // Create SDP constraints.
587 | sdpMediaConstraints = new MediaConstraints();
588 | sdpMediaConstraints.mandatory.add(
589 | new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
590 | if (videoCallEnabled || peerConnectionParameters.loopback) {
591 | sdpMediaConstraints.mandatory.add(
592 | new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
593 | } else {
594 | sdpMediaConstraints.mandatory.add(
595 | new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
596 | }
597 | }
598 |
599 | private void createPeerConnectionInternal() {
600 | if (factory == null || isError) {
601 | Log.e(TAG, "Peerconnection factory is not created");
602 | return;
603 | }
604 | Log.d(TAG, "Create peer connection.");
605 |
606 | queuedRemoteCandidates = new ArrayList<>();
607 |
608 | if (videoCallEnabled) {
609 | factory.setVideoHwAccelerationOptions(
610 | rootEglBase.getEglBaseContext(), rootEglBase.getEglBaseContext());
611 | }
612 |
613 | PeerConnection.RTCConfiguration rtcConfig =
614 | new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
615 | // TCP candidates are only useful when connecting to a server that supports
616 | // ICE-TCP.
617 | rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
618 | rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
619 | rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
620 | rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
621 | // Use ECDSA encryption.
622 | rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
623 | // Enable DTLS for normal calls and disable for loopback calls.
624 | rtcConfig.enableDtlsSrtp = !peerConnectionParameters.loopback;
625 |
626 | peerConnection = factory.createPeerConnection(rtcConfig, pcObserver);
627 |
628 | if (dataChannelEnabled) {
629 | DataChannel.Init init = new DataChannel.Init();
630 | init.ordered = peerConnectionParameters.dataChannelParameters.ordered;
631 | init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated;
632 | init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits;
633 | init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs;
634 | init.id = peerConnectionParameters.dataChannelParameters.id;
635 | init.protocol = peerConnectionParameters.dataChannelParameters.protocol;
636 | dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
637 | }
638 | isInitiator = false;
639 |
640 | // Set INFO libjingle logging.
641 | // NOTE: this _must_ happen while |factory| is alive!
642 | Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
643 |
644 | mediaStream = factory.createLocalMediaStream("ARDAMS");
645 | if (videoCallEnabled) {
646 | mediaStream.addTrack(createVideoTrack(videoCapturer));
647 | }
648 |
649 | mediaStream.addTrack(createAudioTrack());
650 | peerConnection.addStream(mediaStream);
651 | if (videoCallEnabled) {
652 | findVideoSender();
653 | }
654 |
655 | if (peerConnectionParameters.aecDump) {
656 | try {
657 | ParcelFileDescriptor aecDumpFileDescriptor =
658 | ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
659 | + File.separator + "Download/audio.aecdump"),
660 | ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
661 | | ParcelFileDescriptor.MODE_TRUNCATE);
662 | factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
663 | } catch (IOException e) {
664 | Log.e(TAG, "Can not open aecdump file", e);
665 | }
666 | }
667 |
668 | Log.d(TAG, "Peer connection created.");
669 | }
670 |
671 | private void closeInternal() {
672 | if (factory != null && peerConnectionParameters.aecDump) {
673 | factory.stopAecDump();
674 | }
675 | Log.d(TAG, "Closing peer connection.");
676 | statsTimer.cancel();
677 | if (dataChannel != null) {
678 | dataChannel.dispose();
679 | dataChannel = null;
680 | }
681 | if (peerConnection != null) {
682 | peerConnection.dispose();
683 | peerConnection = null;
684 | }
685 | Log.d(TAG, "Closing audio source.");
686 | if (audioSource != null) {
687 | audioSource.dispose();
688 | audioSource = null;
689 | }
690 | Log.d(TAG, "Stopping capture.");
691 | if (videoCapturer != null) {
692 | try {
693 | videoCapturer.stopCapture();
694 | } catch (InterruptedException e) {
695 | throw new RuntimeException(e);
696 | }
697 | videoCapturerStopped = true;
698 | videoCapturer.dispose();
699 | videoCapturer = null;
700 | }
701 | Log.d(TAG, "Closing video source.");
702 | if (videoSource != null) {
703 | videoSource.dispose();
704 | videoSource = null;
705 | }
706 | localRender = null;
707 | remoteRenders = null;
708 | Log.d(TAG, "Closing peer connection factory.");
709 | if (factory != null) {
710 | factory.dispose();
711 | factory = null;
712 | }
713 | options = null;
714 | rootEglBase.release();
715 | Log.d(TAG, "Closing peer connection done.");
716 | events.onPeerConnectionClosed();
717 | PeerConnectionFactory.stopInternalTracingCapture();
718 | PeerConnectionFactory.shutdownInternalTracer();
719 | events = null;
720 | }
721 |
722 | public boolean isHDVideo() {
723 | return videoCallEnabled && videoWidth * videoHeight >= 1280 * 720;
724 | }
725 |
726 | public EglBase.Context getRenderContext() {
727 | return rootEglBase.getEglBaseContext();
728 | }
729 |
730 | @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated.
731 | private void getStats() {
732 | if (peerConnection == null || isError) {
733 | return;
734 | }
735 | boolean success = peerConnection.getStats(new StatsObserver() {
736 | @Override
737 | public void onComplete(final StatsReport[] reports) {
738 | events.onPeerConnectionStatsReady(reports);
739 | }
740 | }, null);
741 | if (!success) {
742 | Log.e(TAG, "getStats() returns false!");
743 | }
744 | }
745 |
746 | public void enableStatsEvents(boolean enable, int periodMs) {
747 | if (enable) {
748 | try {
749 | statsTimer.schedule(new TimerTask() {
750 | @Override
751 | public void run() {
752 | executor.execute(new Runnable() {
753 | @Override
754 | public void run() {
755 | getStats();
756 | }
757 | });
758 | }
759 | }, 0, periodMs);
760 | } catch (Exception e) {
761 | Log.e(TAG, "Can not schedule statistics timer", e);
762 | }
763 | } else {
764 | statsTimer.cancel();
765 | }
766 | }
767 |
768 | public void setAudioEnabled(final boolean enable) {
769 | executor.execute(new Runnable() {
770 | @Override
771 | public void run() {
772 | enableAudio = enable;
773 | if (localAudioTrack != null) {
774 | localAudioTrack.setEnabled(enableAudio);
775 | }
776 | }
777 | });
778 | }
779 |
780 | public void setVideoEnabled(final boolean enable) {
781 | executor.execute(new Runnable() {
782 | @Override
783 | public void run() {
784 | renderVideo = enable;
785 | if (localVideoTrack != null) {
786 | localVideoTrack.setEnabled(renderVideo);
787 | }
788 | if (remoteVideoTrack != null) {
789 | remoteVideoTrack.setEnabled(renderVideo);
790 | }
791 | }
792 | });
793 | }
794 |
795 | public void createOffer() {
796 | executor.execute(new Runnable() {
797 | @Override
798 | public void run() {
799 | if (peerConnection != null && !isError) {
800 | Log.d(TAG, "PC Create OFFER");
801 | isInitiator = true;
802 | peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
803 | }
804 | }
805 | });
806 | }
807 |
808 | public void createAnswer() {
809 | executor.execute(new Runnable() {
810 | @Override
811 | public void run() {
812 | if (peerConnection != null && !isError) {
813 | Log.d(TAG, "PC create ANSWER");
814 | isInitiator = false;
815 | peerConnection.createAnswer(sdpObserver, sdpMediaConstraints);
816 | }
817 | }
818 | });
819 | }
820 |
821 | public void addRemoteIceCandidate(final IceCandidate candidate) {
822 | executor.execute(new Runnable() {
823 | @Override
824 | public void run() {
825 | if (peerConnection != null && !isError) {
826 | if (queuedRemoteCandidates != null) {
827 | queuedRemoteCandidates.add(candidate);
828 | } else {
829 | peerConnection.addIceCandidate(candidate);
830 | }
831 | }
832 | }
833 | });
834 | }
835 |
836 | public void removeRemoteIceCandidates(final IceCandidate[] candidates) {
837 | executor.execute(new Runnable() {
838 | @Override
839 | public void run() {
840 | if (peerConnection == null || isError) {
841 | return;
842 | }
843 | // Drain the queued remote candidates if there is any so that
844 | // they are processed in the proper order.
845 | drainCandidates();
846 | peerConnection.removeIceCandidates(candidates);
847 | }
848 | });
849 | }
850 |
851 | public void setRemoteDescription(final SessionDescription sdp) {
852 | executor.execute(new Runnable() {
853 | @Override
854 | public void run() {
855 | if (peerConnection == null || isError) {
856 | return;
857 | }
858 | String sdpDescription = sdp.description;
859 | if (preferIsac) {
860 | sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
861 | }
862 | if (videoCallEnabled) {
863 | sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
864 | }
865 | if (peerConnectionParameters.audioStartBitrate > 0) {
866 | sdpDescription = setStartBitrate(
867 | AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
868 | }
869 | Log.d(TAG, "Set remote SDP.");
870 | SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
871 | peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
872 | }
873 | });
874 | }
875 |
876 | public void stopVideoSource() {
877 | executor.execute(new Runnable() {
878 | @Override
879 | public void run() {
880 | if (videoCapturer != null && !videoCapturerStopped) {
881 | Log.d(TAG, "Stop video source.");
882 | try {
883 | videoCapturer.stopCapture();
884 | } catch (InterruptedException e) {
885 | }
886 | videoCapturerStopped = true;
887 | }
888 | }
889 | });
890 | }
891 |
892 | public void startVideoSource() {
893 | executor.execute(new Runnable() {
894 | @Override
895 | public void run() {
896 | if (videoCapturer != null && videoCapturerStopped) {
897 | Log.d(TAG, "Restart video source.");
898 | videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
899 | videoCapturerStopped = false;
900 | }
901 | }
902 | });
903 | }
904 |
905 | public void setVideoMaxBitrate(final Integer maxBitrateKbps) {
906 | executor.execute(new Runnable() {
907 | @Override
908 | public void run() {
909 | if (peerConnection == null || localVideoSender == null || isError) {
910 | return;
911 | }
912 | Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps);
913 | if (localVideoSender == null) {
914 | Log.w(TAG, "Sender is not ready.");
915 | return;
916 | }
917 |
918 | RtpParameters parameters = localVideoSender.getParameters();
919 | if (parameters.encodings.size() == 0) {
920 | Log.w(TAG, "RtpParameters are not ready.");
921 | return;
922 | }
923 |
924 | for (RtpParameters.Encoding encoding : parameters.encodings) {
925 | // Null value means no limit.
926 | encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
927 | }
928 | if (!localVideoSender.setParameters(parameters)) {
929 | Log.e(TAG, "RtpSender.setParameters failed.");
930 | }
931 | Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps);
932 | }
933 | });
934 | }
935 |
936 | private void reportError(final String errorMessage) {
937 | Log.e(TAG, "Peerconnection error: " + errorMessage);
938 | executor.execute(new Runnable() {
939 | @Override
940 | public void run() {
941 | if (!isError) {
942 | events.onPeerConnectionError(errorMessage);
943 | isError = true;
944 | }
945 | }
946 | });
947 | }
948 |
949 | private AudioTrack createAudioTrack() {
950 | audioSource = factory.createAudioSource(audioConstraints);
951 | localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
952 | localAudioTrack.setEnabled(enableAudio);
953 | return localAudioTrack;
954 | }
955 |
956 | private VideoTrack createVideoTrack(VideoCapturer capturer) {
957 | videoSource = factory.createVideoSource(capturer);
958 | capturer.startCapture(videoWidth, videoHeight, videoFps);
959 |
960 | localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
961 | localVideoTrack.setEnabled(renderVideo);
962 | localVideoTrack.addSink(localRender);
963 | return localVideoTrack;
964 | }
965 |
966 | private void findVideoSender() {
967 | for (RtpSender sender : peerConnection.getSenders()) {
968 | if (sender.track() != null) {
969 | String trackType = sender.track().kind();
970 | if (trackType.equals(VIDEO_TRACK_TYPE)) {
971 | Log.d(TAG, "Found video sender.");
972 | localVideoSender = sender;
973 | }
974 | }
975 | }
976 | }
977 |
978 | private static String setStartBitrate(
979 | String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
980 | String[] lines = sdpDescription.split("\r\n");
981 | int rtpmapLineIndex = -1;
982 | boolean sdpFormatUpdated = false;
983 | String codecRtpMap = null;
984 | // Search for codec rtpmap in format
985 | // a=rtpmap:/ [/]
986 | String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
987 | Pattern codecPattern = Pattern.compile(regex);
988 | for (int i = 0; i < lines.length; i++) {
989 | Matcher codecMatcher = codecPattern.matcher(lines[i]);
990 | if (codecMatcher.matches()) {
991 | codecRtpMap = codecMatcher.group(1);
992 | rtpmapLineIndex = i;
993 | break;
994 | }
995 | }
996 | if (codecRtpMap == null) {
997 | Log.w(TAG, "No rtpmap for " + codec + " codec");
998 | return sdpDescription;
999 | }
1000 | Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
1001 |
1002 | // Check if a=fmtp string already exist in remote SDP for this codec and
1003 | // update it with new bitrate parameter.
1004 | regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$";
1005 | codecPattern = Pattern.compile(regex);
1006 | for (int i = 0; i < lines.length; i++) {
1007 | Matcher codecMatcher = codecPattern.matcher(lines[i]);
1008 | if (codecMatcher.matches()) {
1009 | Log.d(TAG, "Found " + codec + " " + lines[i]);
1010 | if (isVideoCodec) {
1011 | lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
1012 | } else {
1013 | lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
1014 | }
1015 | Log.d(TAG, "Update remote SDP line: " + lines[i]);
1016 | sdpFormatUpdated = true;
1017 | break;
1018 | }
1019 | }
1020 |
1021 | StringBuilder newSdpDescription = new StringBuilder();
1022 | for (int i = 0; i < lines.length; i++) {
1023 | newSdpDescription.append(lines[i]).append("\r\n");
1024 | // Append new a=fmtp line if no such line exist for a codec.
1025 | if (!sdpFormatUpdated && i == rtpmapLineIndex) {
1026 | String bitrateSet;
1027 | if (isVideoCodec) {
1028 | bitrateSet =
1029 | "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
1030 | } else {
1031 | bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
1032 | + (bitrateKbps * 1000);
1033 | }
1034 | Log.d(TAG, "Add remote SDP line: " + bitrateSet);
1035 | newSdpDescription.append(bitrateSet).append("\r\n");
1036 | }
1037 | }
1038 | return newSdpDescription.toString();
1039 | }
1040 |
1041 | /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */
1042 | private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) {
1043 | final String mediaDescription = isAudio ? "m=audio " : "m=video ";
1044 | for (int i = 0; i < sdpLines.length; ++i) {
1045 | if (sdpLines[i].startsWith(mediaDescription)) {
1046 | return i;
1047 | }
1048 | }
1049 | return -1;
1050 | }
1051 |
1052 | private static String joinString(
1053 | Iterable extends CharSequence> s, String delimiter, boolean delimiterAtEnd) {
1054 | Iterator extends CharSequence> iter = s.iterator();
1055 | if (!iter.hasNext()) {
1056 | return "";
1057 | }
1058 | StringBuilder buffer = new StringBuilder(iter.next());
1059 | while (iter.hasNext()) {
1060 | buffer.append(delimiter).append(iter.next());
1061 | }
1062 | if (delimiterAtEnd) {
1063 | buffer.append(delimiter);
1064 | }
1065 | return buffer.toString();
1066 | }
1067 |
1068 | private static String movePayloadTypesToFront(List preferredPayloadTypes, String mLine) {
1069 | // The format of the media description line should be: m= ...
1070 | final List origLineParts = Arrays.asList(mLine.split(" "));
1071 | if (origLineParts.size() <= 3) {
1072 | Log.e(TAG, "Wrong SDP media description format: " + mLine);
1073 | return null;
1074 | }
1075 | final List header = origLineParts.subList(0, 3);
1076 | final List unpreferredPayloadTypes =
1077 | new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
1078 | unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
1079 | // Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload
1080 | // types.
1081 | final List newLineParts = new ArrayList<>();
1082 | newLineParts.addAll(header);
1083 | newLineParts.addAll(preferredPayloadTypes);
1084 | newLineParts.addAll(unpreferredPayloadTypes);
1085 | return joinString(newLineParts, " ", false /* delimiterAtEnd */);
1086 | }
1087 |
1088 | private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
1089 | final String[] lines = sdpDescription.split("\r\n");
1090 | final int mLineIndex = findMediaDescriptionLine(isAudio, lines);
1091 | if (mLineIndex == -1) {
1092 | Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
1093 | return sdpDescription;
1094 | }
1095 | // A list with all the payload types with name |codec|. The payload types are integers in the
1096 | // range 96-127, but they are stored as strings here.
1097 | final List codecPayloadTypes = new ArrayList<>();
1098 | // a=rtpmap:/ [/]
1099 | final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$");
1100 | for (String line : lines) {
1101 | Matcher codecMatcher = codecPattern.matcher(line);
1102 | if (codecMatcher.matches()) {
1103 | codecPayloadTypes.add(codecMatcher.group(1));
1104 | }
1105 | }
1106 | if (codecPayloadTypes.isEmpty()) {
1107 | Log.w(TAG, "No payload types with name " + codec);
1108 | return sdpDescription;
1109 | }
1110 |
1111 | final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]);
1112 | if (newMLine == null) {
1113 | return sdpDescription;
1114 | }
1115 | Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine);
1116 | lines[mLineIndex] = newMLine;
1117 | return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */);
1118 | }
1119 |
1120 | private void drainCandidates() {
1121 | if (queuedRemoteCandidates != null) {
1122 | Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates");
1123 | for (IceCandidate candidate : queuedRemoteCandidates) {
1124 | peerConnection.addIceCandidate(candidate);
1125 | }
1126 | queuedRemoteCandidates = null;
1127 | }
1128 | }
1129 |
1130 | private void switchCameraInternal() {
1131 | if (videoCapturer instanceof CameraVideoCapturer) {
1132 | if (!videoCallEnabled || isError) {
1133 | Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError);
1134 | return; // No video is sent or only one camera is available or error happened.
1135 | }
1136 | Log.d(TAG, "Switch camera");
1137 | CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
1138 | cameraVideoCapturer.switchCamera(null);
1139 | } else {
1140 | Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
1141 | }
1142 | }
1143 |
1144 | public void switchCamera() {
1145 | executor.execute(new Runnable() {
1146 | @Override
1147 | public void run() {
1148 | switchCameraInternal();
1149 | }
1150 | });
1151 | }
1152 |
1153 | public void changeCaptureFormat(final int width, final int height, final int framerate) {
1154 | executor.execute(new Runnable() {
1155 | @Override
1156 | public void run() {
1157 | changeCaptureFormatInternal(width, height, framerate);
1158 | }
1159 | });
1160 | }
1161 |
1162 | private void changeCaptureFormatInternal(int width, int height, int framerate) {
1163 | if (!videoCallEnabled || isError || videoCapturer == null) {
1164 | Log.e(TAG,
1165 | "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
1166 | return;
1167 | }
1168 | Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
1169 | videoSource.adaptOutputFormat(width, height, framerate);
1170 | }
1171 |
1172 | // Implementation detail: observe ICE & stream changes and react accordingly.
1173 | private class PCObserver implements PeerConnection.Observer {
1174 | @Override
1175 | public void onIceCandidate(final IceCandidate candidate) {
1176 | executor.execute(new Runnable() {
1177 | @Override
1178 | public void run() {
1179 | events.onIceCandidate(candidate);
1180 | }
1181 | });
1182 | }
1183 |
1184 | @Override
1185 | public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
1186 | executor.execute(new Runnable() {
1187 | @Override
1188 | public void run() {
1189 | events.onIceCandidatesRemoved(candidates);
1190 | }
1191 | });
1192 | }
1193 |
1194 | @Override
1195 | public void onSignalingChange(PeerConnection.SignalingState newState) {
1196 | Log.d(TAG, "SignalingState: " + newState);
1197 | }
1198 |
1199 | @Override
1200 | public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
1201 | executor.execute(new Runnable() {
1202 | @Override
1203 | public void run() {
1204 | Log.d(TAG, "IceConnectionState: " + newState);
1205 | if (newState == IceConnectionState.CONNECTED) {
1206 | events.onIceConnected();
1207 | } else if (newState == IceConnectionState.DISCONNECTED) {
1208 | events.onIceDisconnected();
1209 | } else if (newState == IceConnectionState.FAILED) {
1210 | reportError("ICE connection failed.");
1211 | }
1212 | }
1213 | });
1214 | }
1215 |
1216 | @Override
1217 | public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
1218 | Log.d(TAG, "IceGatheringState: " + newState);
1219 | }
1220 |
1221 | @Override
1222 | public void onIceConnectionReceivingChange(boolean receiving) {
1223 | Log.d(TAG, "IceConnectionReceiving changed to " + receiving);
1224 | }
1225 |
1226 | @Override
1227 | public void onAddStream(final MediaStream stream) {
1228 | executor.execute(new Runnable() {
1229 | @Override
1230 | public void run() {
1231 | if (peerConnection == null || isError) {
1232 | return;
1233 | }
1234 | if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
1235 | reportError("Weird-looking stream: " + stream);
1236 | return;
1237 | }
1238 | if (stream.videoTracks.size() == 1) {
1239 | remoteVideoTrack = stream.videoTracks.get(0);
1240 | remoteVideoTrack.setEnabled(renderVideo);
1241 | for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
1242 | remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
1243 | }
1244 | }
1245 | }
1246 | });
1247 | }
1248 |
1249 | @Override
1250 | public void onRemoveStream(final MediaStream stream) {
1251 | executor.execute(new Runnable() {
1252 | @Override
1253 | public void run() {
1254 | remoteVideoTrack = null;
1255 | }
1256 | });
1257 | }
1258 |
1259 | @Override
1260 | public void onDataChannel(final DataChannel dc) {
1261 | Log.d(TAG, "New Data channel " + dc.label());
1262 |
1263 | if (!dataChannelEnabled)
1264 | return;
1265 |
1266 | dc.registerObserver(new DataChannel.Observer() {
1267 | @Override
1268 | public void onBufferedAmountChange(long previousAmount) {
1269 | Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state());
1270 | }
1271 |
1272 | @Override
1273 | public void onStateChange() {
1274 | Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state());
1275 | }
1276 |
1277 | @Override
1278 | public void onMessage(final DataChannel.Buffer buffer) {
1279 | if (buffer.binary) {
1280 | Log.d(TAG, "Received binary msg over " + dc);
1281 | return;
1282 | }
1283 | ByteBuffer data = buffer.data;
1284 | final byte[] bytes = new byte[data.capacity()];
1285 | data.get(bytes);
1286 | String strData = new String(bytes, Charset.forName("UTF-8"));
1287 | Log.d(TAG, "Got msg: " + strData + " over " + dc);
1288 | }
1289 | });
1290 | }
1291 |
1292 | @Override
1293 | public void onRenegotiationNeeded() {
1294 | // No need to do anything; AppRTC follows a pre-agreed-upon
1295 | // signaling/negotiation protocol.
1296 | }
1297 |
1298 | @Override
1299 | public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {}
1300 | }
1301 |
1302 | // Implementation detail: handle offer creation/signaling and answer setting,
1303 | // as well as adding remote ICE candidates once the answer SDP is set.
1304 | private class SDPObserver implements SdpObserver {
1305 | @Override
1306 | public void onCreateSuccess(final SessionDescription origSdp) {
1307 | if (localSdp != null) {
1308 | reportError("Multiple SDP create.");
1309 | return;
1310 | }
1311 | String sdpDescription = origSdp.description;
1312 | if (preferIsac) {
1313 | sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
1314 | }
1315 | if (videoCallEnabled) {
1316 | sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
1317 | }
1318 | final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
1319 | localSdp = sdp;
1320 | executor.execute(new Runnable() {
1321 | @Override
1322 | public void run() {
1323 | if (peerConnection != null && !isError) {
1324 | Log.d(TAG, "Set local SDP from " + sdp.type);
1325 | peerConnection.setLocalDescription(sdpObserver, sdp);
1326 | }
1327 | }
1328 | });
1329 | }
1330 |
1331 | @Override
1332 | public void onSetSuccess() {
1333 | executor.execute(new Runnable() {
1334 | @Override
1335 | public void run() {
1336 | if (peerConnection == null || isError) {
1337 | return;
1338 | }
1339 | if (isInitiator) {
1340 | // For offering peer connection we first create offer and set
1341 | // local SDP, then after receiving answer set remote SDP.
1342 | if (peerConnection.getRemoteDescription() == null) {
1343 | // We've just set our local SDP so time to send it.
1344 | Log.d(TAG, "Local SDP set succesfully");
1345 | events.onLocalDescription(localSdp);
1346 | } else {
1347 | // We've just set remote description, so drain remote
1348 | // and send local ICE candidates.
1349 | Log.d(TAG, "Remote SDP set succesfully");
1350 | drainCandidates();
1351 | }
1352 | } else {
1353 | // For answering peer connection we set remote SDP and then
1354 | // create answer and set local SDP.
1355 | if (peerConnection.getLocalDescription() != null) {
1356 | // We've just set our local SDP so time to send it, drain
1357 | // remote and send local ICE candidates.
1358 | Log.d(TAG, "Local SDP set succesfully");
1359 | events.onLocalDescription(localSdp);
1360 | drainCandidates();
1361 | } else {
1362 | // We've just set remote SDP - do nothing for now -
1363 | // answer will be created soon.
1364 | Log.d(TAG, "Remote SDP set succesfully");
1365 | }
1366 | }
1367 | }
1368 | });
1369 | }
1370 |
1371 | @Override
1372 | public void onCreateFailure(final String error) {
1373 | reportError("createSDP error: " + error);
1374 | }
1375 |
1376 | @Override
1377 | public void onSetFailure(final String error) {
1378 | reportError("setSDP error: " + error);
1379 | }
1380 | }
1381 | }
1382 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/RoomParametersFetcher.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc;
12 |
13 | import android.util.Log;
14 | import java.io.IOException;
15 | import java.io.InputStream;
16 | import java.net.HttpURLConnection;
17 | import java.net.URL;
18 | import java.util.ArrayList;
19 | import java.util.Scanner;
20 | import java.util.List;
21 | import org.appspot.apprtc.AppRTCClient.SignalingParameters;
22 | import org.appspot.apprtc.util.AsyncHttpURLConnection;
23 | import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
24 | import org.json.JSONArray;
25 | import org.json.JSONException;
26 | import org.json.JSONObject;
27 | import org.webrtc.IceCandidate;
28 | import org.webrtc.PeerConnection;
29 | import org.webrtc.SessionDescription;
30 |
31 | /**
32 | * AsyncTask that converts an AppRTC room URL into the set of signaling
33 | * parameters to use with that room.
34 | */
35 | public class RoomParametersFetcher {
36 | private static final String TAG = "RoomRTCClient";
37 | private static final int TURN_HTTP_TIMEOUT_MS = 5000;
38 | private final RoomParametersFetcherEvents events;
39 | private final String roomUrl;
40 | private final String roomMessage;
41 |
42 | /**
43 | * Room parameters fetcher callbacks.
44 | */
45 | public interface RoomParametersFetcherEvents {
46 | /**
47 | * Callback fired once the room's signaling parameters
48 | * SignalingParameters are extracted.
49 | */
50 | void onSignalingParametersReady(final SignalingParameters params);
51 |
52 | /**
53 | * Callback for room parameters extraction error.
54 | */
55 | void onSignalingParametersError(final String description);
56 | }
57 |
58 | public RoomParametersFetcher(
59 | String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
60 | this.roomUrl = roomUrl;
61 | this.roomMessage = roomMessage;
62 | this.events = events;
63 | }
64 |
65 | public void makeRequest() {
66 | Log.d(TAG, "Connecting to room: " + roomUrl);
67 | AsyncHttpURLConnection httpConnection =
68 | new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
69 | @Override
70 | public void onHttpError(String errorMessage) {
71 | Log.e(TAG, "Room connection error: " + errorMessage);
72 | events.onSignalingParametersError(errorMessage);
73 | }
74 |
75 | @Override
76 | public void onHttpComplete(String response) {
77 | roomHttpResponseParse(response);
78 | }
79 | });
80 | httpConnection.send();
81 | }
82 |
83 | private void roomHttpResponseParse(String response) {
84 | Log.d(TAG, "Room response: " + response);
85 | try {
86 | List iceCandidates = null;
87 | SessionDescription offerSdp = null;
88 | JSONObject roomJson = new JSONObject(response);
89 |
90 | String result = roomJson.getString("result");
91 | if (!result.equals("SUCCESS")) {
92 | events.onSignalingParametersError("Room response error: " + result);
93 | return;
94 | }
95 | response = roomJson.getString("params");
96 | roomJson = new JSONObject(response);
97 | String roomId = roomJson.getString("room_id");
98 | String clientId = roomJson.getString("client_id");
99 | String wssUrl = roomJson.getString("wss_url");
100 | String wssPostUrl = roomJson.getString("wss_post_url");
101 | boolean initiator = (roomJson.getBoolean("is_initiator"));
102 | if (!initiator) {
103 | iceCandidates = new ArrayList<>();
104 | String messagesString = roomJson.getString("messages");
105 | JSONArray messages = new JSONArray(messagesString);
106 | for (int i = 0; i < messages.length(); ++i) {
107 | String messageString = messages.getString(i);
108 | JSONObject message = new JSONObject(messageString);
109 | String messageType = message.getString("type");
110 | Log.d(TAG, "GAE->C #" + i + " : " + messageString);
111 | if (messageType.equals("offer")) {
112 | offerSdp = new SessionDescription(
113 | SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
114 | } else if (messageType.equals("candidate")) {
115 | IceCandidate candidate = new IceCandidate(
116 | message.getString("id"), message.getInt("label"), message.getString("candidate"));
117 | iceCandidates.add(candidate);
118 | } else {
119 | Log.e(TAG, "Unknown message: " + messageString);
120 | }
121 | }
122 | }
123 | Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
124 | Log.d(TAG, "Initiator: " + initiator);
125 | Log.d(TAG, "WSS url: " + wssUrl);
126 | Log.d(TAG, "WSS POST url: " + wssPostUrl);
127 |
128 | List iceServers =
129 | iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
130 | boolean isTurnPresent = false;
131 | for (PeerConnection.IceServer server : iceServers) {
132 | Log.d(TAG, "IceServer: " + server);
133 | for (String uri : server.urls) {
134 | if (uri.startsWith("turn:")) {
135 | isTurnPresent = true;
136 | break;
137 | }
138 | }
139 | }
140 | // Request TURN servers.
141 | if (!isTurnPresent && !roomJson.optString("ice_server_url").isEmpty()) {
142 | List turnServers =
143 | requestTurnServers(roomJson.getString("ice_server_url"));
144 | for (PeerConnection.IceServer turnServer : turnServers) {
145 | Log.d(TAG, "TurnServer: " + turnServer);
146 | iceServers.add(turnServer);
147 | }
148 | }
149 |
150 | SignalingParameters params = new SignalingParameters(
151 | iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
152 | events.onSignalingParametersReady(params);
153 | } catch (JSONException e) {
154 | events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
155 | } catch (IOException e) {
156 | events.onSignalingParametersError("Room IO error: " + e.toString());
157 | }
158 | }
159 |
160 | // Requests & returns a TURN ICE Server based on a request URL. Must be run
161 | // off the main thread!
162 | private List requestTurnServers(String url)
163 | throws IOException, JSONException {
164 | List turnServers = new ArrayList<>();
165 | Log.d(TAG, "Request TURN from: " + url);
166 | HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
167 | connection.setDoOutput(true);
168 | connection.setRequestProperty("REFERER", "https://appr.tc");
169 | connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
170 | connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
171 | int responseCode = connection.getResponseCode();
172 | if (responseCode != 200) {
173 | throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
174 | + connection.getHeaderField(null));
175 | }
176 | InputStream responseStream = connection.getInputStream();
177 | String response = drainStream(responseStream);
178 | connection.disconnect();
179 | Log.d(TAG, "TURN response: " + response);
180 | JSONObject responseJSON = new JSONObject(response);
181 | JSONArray iceServers = responseJSON.getJSONArray("iceServers");
182 | for (int i = 0; i < iceServers.length(); ++i) {
183 | JSONObject server = iceServers.getJSONObject(i);
184 | JSONArray turnUrls = server.getJSONArray("urls");
185 | String username = server.has("username") ? server.getString("username") : "";
186 | String credential = server.has("credential") ? server.getString("credential") : "";
187 | for (int j = 0; j < turnUrls.length(); j++) {
188 | String turnUrl = turnUrls.getString(j);
189 | PeerConnection.IceServer turnServer =
190 | PeerConnection.IceServer.builder(turnUrl)
191 | .setUsername(username)
192 | .setPassword(credential)
193 | .createIceServer();
194 | turnServers.add(turnServer);
195 | }
196 | }
197 | return turnServers;
198 | }
199 |
200 | // Return the list of ICE servers described by a WebRTCPeerConnection
201 | // configuration string.
202 | private List iceServersFromPCConfigJSON(String pcConfig)
203 | throws JSONException {
204 | JSONObject json = new JSONObject(pcConfig);
205 | JSONArray servers = json.getJSONArray("iceServers");
206 | List ret = new ArrayList<>();
207 | for (int i = 0; i < servers.length(); ++i) {
208 | JSONObject server = servers.getJSONObject(i);
209 | String url = server.getString("urls");
210 | String credential = server.has("credential") ? server.getString("credential") : "";
211 | PeerConnection.IceServer turnServer =
212 | PeerConnection.IceServer.builder(url)
213 | .setPassword(credential)
214 | .createIceServer();
215 | ret.add(turnServer);
216 | }
217 | return ret;
218 | }
219 |
220 | // Return the contents of an InputStream as a String.
221 | private static String drainStream(InputStream in) {
222 | Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
223 | return s.hasNext() ? s.next() : "";
224 | }
225 | }
226 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/WebSocketChannelClient.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc;
12 |
13 | import android.os.Handler;
14 | import android.util.Log;
15 | import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
16 | import de.tavendo.autobahn.WebSocketConnection;
17 | import de.tavendo.autobahn.WebSocketException;
18 | import java.net.URI;
19 | import java.net.URISyntaxException;
20 | import java.util.ArrayList;
21 | import java.util.List;
22 | import org.appspot.apprtc.util.AsyncHttpURLConnection;
23 | import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
24 | import org.json.JSONException;
25 | import org.json.JSONObject;
26 |
27 | /**
28 | * WebSocket client implementation.
29 | *
30 | *
All public methods should be called from a looper executor thread
31 | * passed in a constructor, otherwise exception will be thrown.
32 | * All events are dispatched on the same thread.
33 | */
34 | public class WebSocketChannelClient {
35 | private static final String TAG = "WSChannelRTCClient";
36 | private static final int CLOSE_TIMEOUT = 1000;
37 | private final WebSocketChannelEvents events;
38 | private final Handler handler;
39 | private WebSocketConnection ws;
40 | private String wsServerUrl;
41 | private String postServerUrl;
42 | private String roomID;
43 | private String clientID;
44 | private WebSocketConnectionState state;
45 | // Do not remove this member variable. If this is removed, the observer gets garbage collected and
46 | // this causes test breakages.
47 | private WebSocketObserver wsObserver;
48 | private final Object closeEventLock = new Object();
49 | private boolean closeEvent;
50 | // WebSocket send queue. Messages are added to the queue when WebSocket
51 | // client is not registered and are consumed in register() call.
52 | private final List wsSendQueue = new ArrayList<>();
53 |
54 | /**
55 | * Possible WebSocket connection states.
56 | */
57 | public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
58 |
59 | /**
60 | * Callback interface for messages delivered on WebSocket.
61 | * All events are dispatched from a looper executor thread.
62 | */
63 | public interface WebSocketChannelEvents {
64 | void onWebSocketMessage(final String message);
65 | void onWebSocketClose();
66 | void onWebSocketError(final String description);
67 | }
68 |
69 | public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
70 | this.handler = handler;
71 | this.events = events;
72 | roomID = null;
73 | clientID = null;
74 | state = WebSocketConnectionState.NEW;
75 | }
76 |
77 | public WebSocketConnectionState getState() {
78 | return state;
79 | }
80 |
81 | public void connect(final String wsUrl, final String postUrl) {
82 | checkIfCalledOnValidThread();
83 | if (state != WebSocketConnectionState.NEW) {
84 | Log.e(TAG, "WebSocket is already connected.");
85 | return;
86 | }
87 | wsServerUrl = wsUrl;
88 | postServerUrl = postUrl;
89 | closeEvent = false;
90 |
91 | Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
92 | ws = new WebSocketConnection();
93 | wsObserver = new WebSocketObserver();
94 | try {
95 | ws.connect(new URI(wsServerUrl), wsObserver);
96 | } catch (URISyntaxException e) {
97 | reportError("URI error: " + e.getMessage());
98 | } catch (WebSocketException e) {
99 | reportError("WebSocket connection error: " + e.getMessage());
100 | }
101 | }
102 |
103 | public void register(final String roomID, final String clientID) {
104 | checkIfCalledOnValidThread();
105 | this.roomID = roomID;
106 | this.clientID = clientID;
107 | if (state != WebSocketConnectionState.CONNECTED) {
108 | Log.w(TAG, "WebSocket register() in state " + state);
109 | return;
110 | }
111 | Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
112 | JSONObject json = new JSONObject();
113 | try {
114 | json.put("cmd", "register");
115 | json.put("roomid", roomID);
116 | json.put("clientid", clientID);
117 | Log.d(TAG, "C->WSS: " + json.toString());
118 | ws.sendTextMessage(json.toString());
119 | state = WebSocketConnectionState.REGISTERED;
120 | // Send any previously accumulated messages.
121 | for (String sendMessage : wsSendQueue) {
122 | send(sendMessage);
123 | }
124 | wsSendQueue.clear();
125 | } catch (JSONException e) {
126 | reportError("WebSocket register JSON error: " + e.getMessage());
127 | }
128 | }
129 |
130 | public void send(String message) {
131 | checkIfCalledOnValidThread();
132 | switch (state) {
133 | case NEW:
134 | case CONNECTED:
135 | // Store outgoing messages and send them after websocket client
136 | // is registered.
137 | Log.d(TAG, "WS ACC: " + message);
138 | wsSendQueue.add(message);
139 | return;
140 | case ERROR:
141 | case CLOSED:
142 | Log.e(TAG, "WebSocket send() in error or closed state : " + message);
143 | return;
144 | case REGISTERED:
145 | JSONObject json = new JSONObject();
146 | try {
147 | json.put("cmd", "send");
148 | json.put("msg", message);
149 | message = json.toString();
150 | Log.d(TAG, "C->WSS: " + message);
151 | ws.sendTextMessage(message);
152 | } catch (JSONException e) {
153 | reportError("WebSocket send JSON error: " + e.getMessage());
154 | }
155 | break;
156 | }
157 | }
158 |
159 | // This call can be used to send WebSocket messages before WebSocket
160 | // connection is opened.
161 | public void post(String message) {
162 | checkIfCalledOnValidThread();
163 | sendWSSMessage("POST", message);
164 | }
165 |
166 | public void disconnect(boolean waitForComplete) {
167 | checkIfCalledOnValidThread();
168 | Log.d(TAG, "Disconnect WebSocket. State: " + state);
169 | if (state == WebSocketConnectionState.REGISTERED) {
170 | // Send "bye" to WebSocket server.
171 | send("{\"type\": \"bye\"}");
172 | state = WebSocketConnectionState.CONNECTED;
173 | // Send http DELETE to http WebSocket server.
174 | sendWSSMessage("DELETE", "");
175 | }
176 | // Close WebSocket in CONNECTED or ERROR states only.
177 | if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
178 | ws.disconnect();
179 | state = WebSocketConnectionState.CLOSED;
180 |
181 | // Wait for websocket close event to prevent websocket library from
182 | // sending any pending messages to deleted looper thread.
183 | if (waitForComplete) {
184 | synchronized (closeEventLock) {
185 | while (!closeEvent) {
186 | try {
187 | closeEventLock.wait(CLOSE_TIMEOUT);
188 | break;
189 | } catch (InterruptedException e) {
190 | Log.e(TAG, "Wait error: " + e.toString());
191 | }
192 | }
193 | }
194 | }
195 | }
196 | Log.d(TAG, "Disconnecting WebSocket done.");
197 | }
198 |
199 | private void reportError(final String errorMessage) {
200 | Log.e(TAG, errorMessage);
201 | handler.post(new Runnable() {
202 | @Override
203 | public void run() {
204 | if (state != WebSocketConnectionState.ERROR) {
205 | state = WebSocketConnectionState.ERROR;
206 | events.onWebSocketError(errorMessage);
207 | }
208 | }
209 | });
210 | }
211 |
212 | // Asynchronously send POST/DELETE to WebSocket server.
213 | private void sendWSSMessage(final String method, final String message) {
214 | String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
215 | Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
216 | AsyncHttpURLConnection httpConnection =
217 | new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
218 | @Override
219 | public void onHttpError(String errorMessage) {
220 | reportError("WS " + method + " error: " + errorMessage);
221 | }
222 |
223 | @Override
224 | public void onHttpComplete(String response) {}
225 | });
226 | httpConnection.send();
227 | }
228 |
229 | // Helper method for debugging purposes. Ensures that WebSocket method is
230 | // called on a looper thread.
231 | private void checkIfCalledOnValidThread() {
232 | if (Thread.currentThread() != handler.getLooper().getThread()) {
233 | throw new IllegalStateException("WebSocket method is not called on valid thread");
234 | }
235 | }
236 |
237 | private class WebSocketObserver implements WebSocketConnectionObserver {
238 | @Override
239 | public void onOpen() {
240 | Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
241 | handler.post(new Runnable() {
242 | @Override
243 | public void run() {
244 | state = WebSocketConnectionState.CONNECTED;
245 | // Check if we have pending register request.
246 | if (roomID != null && clientID != null) {
247 | register(roomID, clientID);
248 | }
249 | }
250 | });
251 | }
252 |
253 | @Override
254 | public void onClose(WebSocketCloseNotification code, String reason) {
255 | Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
256 | + state);
257 | synchronized (closeEventLock) {
258 | closeEvent = true;
259 | closeEventLock.notify();
260 | }
261 | handler.post(new Runnable() {
262 | @Override
263 | public void run() {
264 | if (state != WebSocketConnectionState.CLOSED) {
265 | state = WebSocketConnectionState.CLOSED;
266 | events.onWebSocketClose();
267 | }
268 | }
269 | });
270 | }
271 |
272 | @Override
273 | public void onTextMessage(String payload) {
274 | Log.d(TAG, "WSS->C: " + payload);
275 | final String message = payload;
276 | handler.post(new Runnable() {
277 | @Override
278 | public void run() {
279 | if (state == WebSocketConnectionState.CONNECTED
280 | || state == WebSocketConnectionState.REGISTERED) {
281 | events.onWebSocketMessage(message);
282 | }
283 | }
284 | });
285 | }
286 |
287 | @Override
288 | public void onRawTextMessage(byte[] payload) {}
289 |
290 | @Override
291 | public void onBinaryMessage(byte[] payload) {}
292 | }
293 | }
294 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/WebSocketRTCClient.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc;
12 |
13 | import org.appspot.apprtc.RoomParametersFetcher.RoomParametersFetcherEvents;
14 | import org.appspot.apprtc.WebSocketChannelClient.WebSocketChannelEvents;
15 | import org.appspot.apprtc.WebSocketChannelClient.WebSocketConnectionState;
16 | import org.appspot.apprtc.util.AsyncHttpURLConnection;
17 | import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
18 |
19 | import android.os.Handler;
20 | import android.os.HandlerThread;
21 | import android.util.Log;
22 |
23 | import org.json.JSONArray;
24 | import org.json.JSONException;
25 | import org.json.JSONObject;
26 | import org.webrtc.IceCandidate;
27 | import org.webrtc.SessionDescription;
28 |
29 | /**
30 | * Negotiates signaling for chatting with https://appr.tc "rooms".
31 | * Uses the client<->server specifics of the apprtc AppEngine webapp.
32 | *
33 | *
To use: create an instance of this object (registering a message handler) and
34 | * call connectToRoom(). Once room connection is established
35 | * onConnectedToRoom() callback with room parameters is invoked.
36 | * Messages to other party (with local Ice candidates and answer SDP) can
37 | * be sent after WebSocket connection is established.
38 | */
39 | public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
40 | private static final String TAG = "WSRTCClient";
41 | private static final String ROOM_JOIN = "join";
42 | private static final String ROOM_MESSAGE = "message";
43 | private static final String ROOM_LEAVE = "leave";
44 |
45 | private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
46 |
47 | private enum MessageType { MESSAGE, LEAVE }
48 |
49 | private final Handler handler;
50 | private boolean initiator;
51 | private SignalingEvents events;
52 | private WebSocketChannelClient wsClient;
53 | private ConnectionState roomState;
54 | private RoomConnectionParameters connectionParameters;
55 | private String messageUrl;
56 | private String leaveUrl;
57 |
58 | public WebSocketRTCClient(SignalingEvents events) {
59 | this.events = events;
60 | roomState = ConnectionState.NEW;
61 | final HandlerThread handlerThread = new HandlerThread(TAG);
62 | handlerThread.start();
63 | handler = new Handler(handlerThread.getLooper());
64 | }
65 |
66 | // --------------------------------------------------------------------
67 | // AppRTCClient interface implementation.
68 | // Asynchronously connect to an AppRTC room URL using supplied connection
69 | // parameters, retrieves room parameters and connect to WebSocket server.
70 | @Override
71 | public void connectToRoom(RoomConnectionParameters connectionParameters) {
72 | this.connectionParameters = connectionParameters;
73 | handler.post(new Runnable() {
74 | @Override
75 | public void run() {
76 | connectToRoomInternal();
77 | }
78 | });
79 | }
80 |
81 | @Override
82 | public void disconnectFromRoom() {
83 | handler.post(new Runnable() {
84 | @Override
85 | public void run() {
86 | disconnectFromRoomInternal();
87 | handler.getLooper().quit();
88 | }
89 | });
90 | }
91 |
92 | // Connects to room - function runs on a local looper thread.
93 | private void connectToRoomInternal() {
94 | String connectionUrl = getConnectionUrl(connectionParameters);
95 | Log.d(TAG, "Connect to room: " + connectionUrl);
96 | roomState = ConnectionState.NEW;
97 | wsClient = new WebSocketChannelClient(handler, this);
98 |
99 | RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
100 | @Override
101 | public void onSignalingParametersReady(final SignalingParameters params) {
102 | WebSocketRTCClient.this.handler.post(new Runnable() {
103 | @Override
104 | public void run() {
105 | WebSocketRTCClient.this.signalingParametersReady(params);
106 | }
107 | });
108 | }
109 |
110 | @Override
111 | public void onSignalingParametersError(String description) {
112 | WebSocketRTCClient.this.reportError(description);
113 | }
114 | };
115 |
116 | new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
117 | }
118 |
119 | // Disconnect from room and send bye messages - runs on a local looper thread.
120 | private void disconnectFromRoomInternal() {
121 | Log.d(TAG, "Disconnect. Room state: " + roomState);
122 | if (roomState == ConnectionState.CONNECTED) {
123 | Log.d(TAG, "Closing room.");
124 | sendPostMessage(MessageType.LEAVE, leaveUrl, null);
125 | }
126 | roomState = ConnectionState.CLOSED;
127 | if (wsClient != null) {
128 | wsClient.disconnect(true);
129 | }
130 | }
131 |
132 | // Helper functions to get connection, post message and leave message URLs
133 | private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
134 | return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId
135 | + getQueryString(connectionParameters);
136 | }
137 |
138 | private String getMessageUrl(
139 | RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
140 | return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
141 | + "/" + signalingParameters.clientId + getQueryString(connectionParameters);
142 | }
143 |
144 | private String getLeaveUrl(
145 | RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
146 | return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
147 | + signalingParameters.clientId + getQueryString(connectionParameters);
148 | }
149 |
150 | private String getQueryString(RoomConnectionParameters connectionParameters) {
151 | if (connectionParameters.urlParameters != null) {
152 | return "?" + connectionParameters.urlParameters;
153 | } else {
154 | return "";
155 | }
156 | }
157 |
158 | // Callback issued when room parameters are extracted. Runs on local
159 | // looper thread.
160 | private void signalingParametersReady(final SignalingParameters signalingParameters) {
161 | Log.d(TAG, "Room connection completed.");
162 | if (connectionParameters.loopback
163 | && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
164 | reportError("Loopback room is busy.");
165 | return;
166 | }
167 | if (!connectionParameters.loopback && !signalingParameters.initiator
168 | && signalingParameters.offerSdp == null) {
169 | Log.w(TAG, "No offer SDP in room response.");
170 | }
171 | initiator = signalingParameters.initiator;
172 | messageUrl = getMessageUrl(connectionParameters, signalingParameters);
173 | leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
174 | Log.d(TAG, "Message URL: " + messageUrl);
175 | Log.d(TAG, "Leave URL: " + leaveUrl);
176 | roomState = ConnectionState.CONNECTED;
177 |
178 | // Fire connection and signaling parameters events.
179 | events.onConnectedToRoom(signalingParameters);
180 |
181 | // Connect and register WebSocket client.
182 | wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
183 | wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
184 | }
185 |
186 | // Send local offer SDP to the other participant.
187 | @Override
188 | public void sendOfferSdp(final SessionDescription sdp) {
189 | handler.post(new Runnable() {
190 | @Override
191 | public void run() {
192 | if (roomState != ConnectionState.CONNECTED) {
193 | reportError("Sending offer SDP in non connected state.");
194 | return;
195 | }
196 | JSONObject json = new JSONObject();
197 | jsonPut(json, "sdp", sdp.description);
198 | jsonPut(json, "type", "offer");
199 | sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
200 | if (connectionParameters.loopback) {
201 | // In loopback mode rename this offer to answer and route it back.
202 | SessionDescription sdpAnswer = new SessionDescription(
203 | SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
204 | events.onRemoteDescription(sdpAnswer);
205 | }
206 | }
207 | });
208 | }
209 |
210 | // Send local answer SDP to the other participant.
211 | @Override
212 | public void sendAnswerSdp(final SessionDescription sdp) {
213 | handler.post(new Runnable() {
214 | @Override
215 | public void run() {
216 | if (connectionParameters.loopback) {
217 | Log.e(TAG, "Sending answer in loopback mode.");
218 | return;
219 | }
220 | JSONObject json = new JSONObject();
221 | jsonPut(json, "sdp", sdp.description);
222 | jsonPut(json, "type", "answer");
223 | wsClient.send(json.toString());
224 | }
225 | });
226 | }
227 |
228 | // Send Ice candidate to the other participant.
229 | @Override
230 | public void sendLocalIceCandidate(final IceCandidate candidate) {
231 | handler.post(new Runnable() {
232 | @Override
233 | public void run() {
234 | JSONObject json = new JSONObject();
235 | jsonPut(json, "type", "candidate");
236 | jsonPut(json, "label", candidate.sdpMLineIndex);
237 | jsonPut(json, "id", candidate.sdpMid);
238 | jsonPut(json, "candidate", candidate.sdp);
239 | if (initiator) {
240 | // Call initiator sends ice candidates to GAE server.
241 | if (roomState != ConnectionState.CONNECTED) {
242 | reportError("Sending ICE candidate in non connected state.");
243 | return;
244 | }
245 | sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
246 | if (connectionParameters.loopback) {
247 | events.onRemoteIceCandidate(candidate);
248 | }
249 | } else {
250 | // Call receiver sends ice candidates to websocket server.
251 | wsClient.send(json.toString());
252 | }
253 | }
254 | });
255 | }
256 |
257 | // Send removed Ice candidates to the other participant.
258 | @Override
259 | public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
260 | handler.post(new Runnable() {
261 | @Override
262 | public void run() {
263 | JSONObject json = new JSONObject();
264 | jsonPut(json, "type", "remove-candidates");
265 | JSONArray jsonArray = new JSONArray();
266 | for (final IceCandidate candidate : candidates) {
267 | jsonArray.put(toJsonCandidate(candidate));
268 | }
269 | jsonPut(json, "candidates", jsonArray);
270 | if (initiator) {
271 | // Call initiator sends ice candidates to GAE server.
272 | if (roomState != ConnectionState.CONNECTED) {
273 | reportError("Sending ICE candidate removals in non connected state.");
274 | return;
275 | }
276 | sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
277 | if (connectionParameters.loopback) {
278 | events.onRemoteIceCandidatesRemoved(candidates);
279 | }
280 | } else {
281 | // Call receiver sends ice candidates to websocket server.
282 | wsClient.send(json.toString());
283 | }
284 | }
285 | });
286 | }
287 |
288 | // --------------------------------------------------------------------
289 | // WebSocketChannelEvents interface implementation.
290 | // All events are called by WebSocketChannelClient on a local looper thread
291 | // (passed to WebSocket client constructor).
292 | @Override
293 | public void onWebSocketMessage(final String msg) {
294 | if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
295 | Log.e(TAG, "Got WebSocket message in non registered state.");
296 | return;
297 | }
298 | try {
299 | JSONObject json = new JSONObject(msg);
300 | String msgText = json.getString("msg");
301 | String errorText = json.optString("error");
302 | if (msgText.length() > 0) {
303 | json = new JSONObject(msgText);
304 | String type = json.optString("type");
305 | if (type.equals("candidate")) {
306 | events.onRemoteIceCandidate(toJavaCandidate(json));
307 | } else if (type.equals("remove-candidates")) {
308 | JSONArray candidateArray = json.getJSONArray("candidates");
309 | IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
310 | for (int i = 0; i < candidateArray.length(); ++i) {
311 | candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
312 | }
313 | events.onRemoteIceCandidatesRemoved(candidates);
314 | } else if (type.equals("answer")) {
315 | if (initiator) {
316 | SessionDescription sdp = new SessionDescription(
317 | SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
318 | events.onRemoteDescription(sdp);
319 | } else {
320 | reportError("Received answer for call initiator: " + msg);
321 | }
322 | } else if (type.equals("offer")) {
323 | if (!initiator) {
324 | SessionDescription sdp = new SessionDescription(
325 | SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
326 | events.onRemoteDescription(sdp);
327 | } else {
328 | reportError("Received offer for call receiver: " + msg);
329 | }
330 | } else if (type.equals("bye")) {
331 | events.onChannelClose();
332 | } else {
333 | reportError("Unexpected WebSocket message: " + msg);
334 | }
335 | } else {
336 | if (errorText != null && errorText.length() > 0) {
337 | reportError("WebSocket error message: " + errorText);
338 | } else {
339 | reportError("Unexpected WebSocket message: " + msg);
340 | }
341 | }
342 | } catch (JSONException e) {
343 | reportError("WebSocket message JSON parsing error: " + e.toString());
344 | }
345 | }
346 |
347 | @Override
348 | public void onWebSocketClose() {
349 | events.onChannelClose();
350 | }
351 |
352 | @Override
353 | public void onWebSocketError(String description) {
354 | reportError("WebSocket error: " + description);
355 | }
356 |
357 | // --------------------------------------------------------------------
358 | // Helper functions.
359 | private void reportError(final String errorMessage) {
360 | Log.e(TAG, errorMessage);
361 | handler.post(new Runnable() {
362 | @Override
363 | public void run() {
364 | if (roomState != ConnectionState.ERROR) {
365 | roomState = ConnectionState.ERROR;
366 | events.onChannelError(errorMessage);
367 | }
368 | }
369 | });
370 | }
371 |
372 | // Put a |key|->|value| mapping in |json|.
373 | private static void jsonPut(JSONObject json, String key, Object value) {
374 | try {
375 | json.put(key, value);
376 | } catch (JSONException e) {
377 | throw new RuntimeException(e);
378 | }
379 | }
380 |
381 | // Send SDP or ICE candidate to a room server.
382 | private void sendPostMessage(
383 | final MessageType messageType, final String url, final String message) {
384 | String logInfo = url;
385 | if (message != null) {
386 | logInfo += ". Message: " + message;
387 | }
388 | Log.d(TAG, "C->GAE: " + logInfo);
389 | AsyncHttpURLConnection httpConnection =
390 | new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
391 | @Override
392 | public void onHttpError(String errorMessage) {
393 | reportError("GAE POST error: " + errorMessage);
394 | }
395 |
396 | @Override
397 | public void onHttpComplete(String response) {
398 | if (messageType == MessageType.MESSAGE) {
399 | try {
400 | JSONObject roomJson = new JSONObject(response);
401 | String result = roomJson.getString("result");
402 | if (!result.equals("SUCCESS")) {
403 | reportError("GAE POST error: " + result);
404 | }
405 | } catch (JSONException e) {
406 | reportError("GAE POST JSON error: " + e.toString());
407 | }
408 | }
409 | }
410 | });
411 | httpConnection.send();
412 | }
413 |
414 | // Converts a Java candidate to a JSONObject.
415 | private JSONObject toJsonCandidate(final IceCandidate candidate) {
416 | JSONObject json = new JSONObject();
417 | jsonPut(json, "label", candidate.sdpMLineIndex);
418 | jsonPut(json, "id", candidate.sdpMid);
419 | jsonPut(json, "candidate", candidate.sdp);
420 | return json;
421 | }
422 |
423 | // Converts a JSON candidate to a Java object.
424 | IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
425 | return new IceCandidate(
426 | json.getString("id"), json.getInt("label"), json.getString("candidate"));
427 | }
428 | }
429 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/util/AppRTCUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc.util;
12 |
13 | import android.os.Build;
14 | import android.util.Log;
15 |
16 | /**
17 | * AppRTCUtils provides helper functions for managing thread safety.
18 | */
19 | public final class AppRTCUtils {
20 | private AppRTCUtils() {}
21 |
22 | /** Helper method which throws an exception when an assertion has failed. */
23 | public static void assertIsTrue(boolean condition) {
24 | if (!condition) {
25 | throw new AssertionError("Expected condition to be true");
26 | }
27 | }
28 |
29 | /** Helper method for building a string of thread information.*/
30 | public static String getThreadInfo() {
31 | return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
32 | + "]";
33 | }
34 |
35 | /** Information about the current build, taken from system properties. */
36 | public static void logDeviceInfo(String tag) {
37 | Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
38 | + "Release: " + Build.VERSION.RELEASE + ", "
39 | + "Brand: " + Build.BRAND + ", "
40 | + "Device: " + Build.DEVICE + ", "
41 | + "Id: " + Build.ID + ", "
42 | + "Hardware: " + Build.HARDWARE + ", "
43 | + "Manufacturer: " + Build.MANUFACTURER + ", "
44 | + "Model: " + Build.MODEL + ", "
45 | + "Product: " + Build.PRODUCT);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/app/src/main/java/org/appspot/apprtc/util/AsyncHttpURLConnection.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2015 The WebRTC Project Authors. All rights reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.appspot.apprtc.util;
12 |
13 | import java.io.IOException;
14 | import java.io.InputStream;
15 | import java.io.OutputStream;
16 | import java.net.HttpURLConnection;
17 | import java.net.SocketTimeoutException;
18 | import java.net.URL;
19 | import java.util.Scanner;
20 |
21 | /**
22 | * Asynchronous http requests implementation.
23 | */
24 | public class AsyncHttpURLConnection {
25 | private static final int HTTP_TIMEOUT_MS = 8000;
26 | private static final String HTTP_ORIGIN = "https://appr.tc";
27 | private final String method;
28 | private final String url;
29 | private final String message;
30 | private final AsyncHttpEvents events;
31 | private String contentType;
32 |
33 | /**
34 | * Http requests callbacks.
35 | */
36 | public interface AsyncHttpEvents {
37 | void onHttpError(String errorMessage);
38 | void onHttpComplete(String response);
39 | }
40 |
41 | public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
42 | this.method = method;
43 | this.url = url;
44 | this.message = message;
45 | this.events = events;
46 | }
47 |
48 | public void setContentType(String contentType) {
49 | this.contentType = contentType;
50 | }
51 |
52 | public void send() {
53 | new Thread(this ::sendHttpMessage).start();
54 | }
55 |
56 | private void sendHttpMessage() {
57 | try {
58 | HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
59 | byte[] postData = new byte[0];
60 | if (message != null) {
61 | postData = message.getBytes("UTF-8");
62 | }
63 | connection.setRequestMethod(method);
64 | connection.setUseCaches(false);
65 | connection.setDoInput(true);
66 | connection.setConnectTimeout(HTTP_TIMEOUT_MS);
67 | connection.setReadTimeout(HTTP_TIMEOUT_MS);
68 | // TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
69 | connection.addRequestProperty("origin", HTTP_ORIGIN);
70 | boolean doOutput = false;
71 | if (method.equals("POST")) {
72 | doOutput = true;
73 | connection.setDoOutput(true);
74 | connection.setFixedLengthStreamingMode(postData.length);
75 | }
76 | if (contentType == null) {
77 | connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
78 | } else {
79 | connection.setRequestProperty("Content-Type", contentType);
80 | }
81 |
82 | // Send POST request.
83 | if (doOutput && postData.length > 0) {
84 | OutputStream outStream = connection.getOutputStream();
85 | outStream.write(postData);
86 | outStream.close();
87 | }
88 |
89 | // Get response.
90 | int responseCode = connection.getResponseCode();
91 | if (responseCode != 200) {
92 | events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
93 | + connection.getHeaderField(null));
94 | connection.disconnect();
95 | return;
96 | }
97 | InputStream responseStream = connection.getInputStream();
98 | String response = drainStream(responseStream);
99 | responseStream.close();
100 | connection.disconnect();
101 | events.onHttpComplete(response);
102 | } catch (SocketTimeoutException e) {
103 | events.onHttpError("HTTP " + method + " to " + url + " timeout");
104 | } catch (IOException e) {
105 | events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
106 | }
107 | }
108 |
109 | // Return the contents of an InputStream as a String.
110 | private static String drainStream(InputStream in) {
111 | Scanner s = new Scanner(in, "UTF-8").useDelimiter("\\A");
112 | return s.hasNext() ? s.next() : "";
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/disconnect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-hdpi/disconnect.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_action_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-hdpi/ic_action_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_action_return_from_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-hdpi/ic_action_return_from_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-ldpi/disconnect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-ldpi/disconnect.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-ldpi/ic_action_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-ldpi/ic_action_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-ldpi/ic_action_return_from_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-ldpi/ic_action_return_from_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/disconnect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-mdpi/disconnect.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_action_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-mdpi/ic_action_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_action_return_from_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-mdpi/ic_action_return_from_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/disconnect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-xhdpi/disconnect.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_action_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-xhdpi/ic_action_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_action_return_from_full_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/app/src/main/res/drawable-xhdpi/ic_action_return_from_full_screen.png
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_call.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
18 |
19 |
21 |
27 |
28 |
33 |
34 |
40 |
41 |
55 |
56 |
61 |
62 |
63 |
74 |
75 |
82 |
83 |
90 |
91 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/main_activity.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
21 |
22 |
34 |
35 |
46 |
47 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/app/src/main/res/values/arrays.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 | Default
20 | 4K (3840 x 2160)
21 | Full HD (1920 x 1080)
22 | HD (1280 x 720)
23 | VGA (640 x 480)
24 | QVGA (320 x 240)
25 |
26 |
27 |
28 | Default
29 | 3840 x 2160
30 | 1920 x 1080
31 | 1280 x 720
32 | 640 x 480
33 | 320 x 240
34 |
35 |
36 |
37 | Default
38 | 30 fps
39 | 15 fps
40 |
41 |
42 |
43 | Default
44 | Manual
45 |
46 |
47 |
48 | VP8
49 | VP9
50 | H264 Baseline
51 | H264 High
52 |
53 |
54 |
55 | OPUS
56 | ISAC
57 |
58 |
59 |
60 | Auto (proximity sensor)
61 | Enabled
62 | Disabled
63 |
64 |
65 |
66 | auto
67 | true
68 | false
69 |
70 |
71 |
72 | Remove favorite
73 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | Video Call Using WebRTC
19 | Settings
20 | Click the button below to initiate a WebRTC video call.
21 | Go to https://appr.tc/ to join the room just created during the step above, using its random room ID displayed in this app.
22 | Disconnect Call
23 | Favorites
24 | No favorites
25 | Invalid URL
26 | The URL or room name you entered resulted in an invalid URL: %1$s
27 |
28 | Connection error
29 | Connecting to: %1$s
30 | FATAL ERROR: Missing URL to connect to.
31 | Camera2 only supports capturing to texture. Either disable Camera2 or enable capturing to texture in the options.
32 | OK
33 | Switch front/back camera
34 | Slide to change capture format
35 | Muted
36 | Toggle debug view
37 | Toggle microphone on/off
38 | Settings
39 | Loopback connection
40 | Connect to the room
41 | Add favorite
42 | %1$dx%2$d @ %3$d fps
43 |
44 |
45 | room_preference
46 | room_list_preference
47 |
48 | video_settings_key
49 | WebRTC video settings.
50 |
51 | videocall_preference
52 | Video call.
53 | Enable video in a call.
54 | true
55 |
56 | screencapture_preference
57 | Use screencapture.
58 | false
59 |
60 | camera2_preference
61 | Use Camera2.
62 | true
63 | Not supported on this device.
64 |
65 | resolution_preference
66 | Video resolution.
67 | Enter AppRTC local video resolution.
68 | Default
69 |
70 | fps_preference
71 | Camera fps.
72 | Enter local camera fps.
73 | Default
74 |
75 | capturequalityslider_preference
76 | Capture quality slider.
77 | Enable slider for changing capture quality.
78 | false
79 |
80 | maxvideobitrate_preference
81 | Maximum video bitrate setting.
82 | Maximum video bitrate setting.
83 | Default
84 |
85 | maxvideobitratevalue_preference
86 | Video encoder maximum bitrate.
87 | Enter video encoder maximum bitrate in kbps.
88 | 1700
89 |
90 | videocodec_preference
91 | Default video codec.
92 | Select default video codec.
93 | VP8
94 |
95 | hwcodec_preference
96 | Video codec hardware acceleration.
97 | Use hardware accelerated video codec (if available).
98 | true
99 |
100 | capturetotexture_preference
101 | Video capture to surface texture.
102 | Capture video to textures (if available).
103 | true
104 |
105 | flexfec_preference
106 | Codec-agnostic Flexible FEC.
107 | Enable FlexFEC.
108 | false
109 |
110 | Enabled
111 | Disabled
112 |
113 | audio_settings_key
114 | WebRTC audio settings.
115 |
116 | startaudiobitrate_preference
117 | Audio bitrate setting.
118 | Audio bitrate setting.
119 | Default
120 |
121 | startaudiobitratevalue_preference
122 | Audio codec bitrate.
123 | Enter audio codec bitrate in kbps.
124 | 32
125 |
126 | audiocodec_preference
127 | Default audio codec.
128 | Select default audio codec.
129 | OPUS
130 |
131 | audioprocessing_preference
132 | Disable audio processing.
133 | Disable audio processing pipeline.
134 | false
135 |
136 | aecdump_preference
137 | Create aecdump.
138 | Enable diagnostic audio recordings.
139 | false
140 |
141 | opensles_preference
142 | Use OpenSL ES for audio playback.
143 | Use OpenSL ES for audio playback.
144 | false
145 |
146 | disable_built_in_aec_preference
147 | Disable hardware AEC.
148 | Disable hardware AEC.
149 | false
150 | Hardware AEC is not available
151 |
152 | disable_built_in_agc_preference
153 | Disable hardware AGC.
154 | Disable hardware AGC.
155 | false
156 | Hardware AGC is not available
157 |
158 | disable_built_in_ns_preference
159 | Disable hardware NS.
160 | Disable hardware NS.
161 | false
162 | Hardware NS is not available
163 |
164 | enable_level_control_preference
165 | Enable level control.
166 | false
167 |
168 | disable_webrtc_agc_and_hpf_preference
169 | Disable WebRTC AGC and HPF.
170 | false
171 |
172 | speakerphone_preference
173 | Speakerphone.
174 | Speakerphone.
175 | auto
176 |
177 | data_settings_key
178 | WebRTC data channel settings.
179 |
180 | enable_datachannel_preference
181 | Enable datachannel.
182 | true
183 |
184 | ordered_preference
185 | Order messages.
186 | true
187 |
188 | Subprotocol
189 | Subprotocol.
190 | Enter subprotocol.
191 |
192 |
193 | negotiated_preference
194 | Negotiated.
195 | false
196 |
197 | max_retransmit_time_ms_preference
198 | Max delay to retransmit.
199 | Enter max delay to retransmit (in ms).
200 | -1
201 |
202 | max_retransmits_preference
203 | Max attempts to retransmit.
204 | Enter max attempts to retransmit.
205 | -1
206 |
207 | data_id_preference
208 | Data id.
209 | Enter data channel id.
210 | -1
211 |
212 | misc_settings_key
213 | Miscellaneous settings.
214 |
215 | room_server_url_preference
216 | Room server URL.
217 | Enter a room server URL.
218 | https://appr.tc
219 |
220 | displayhud_preference
221 | Display call statistics.
222 | Display call statistics.
223 | false
224 |
225 | tracing_preference
226 | Debug performance tracing.
227 | Debug performance tracing.
228 |
229 | ROOM ID:
230 |
231 |
232 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
23 |
24 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018, The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | buildscript {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 |
23 | dependencies {
24 | classpath 'com.android.tools.build:gradle:3.1.4'
25 | }
26 | }
27 |
28 | allprojects {
29 | repositories {
30 | google()
31 | jcenter()
32 | }
33 | }
34 |
35 | task clean(type: Delete) {
36 | delete rootProject.buildDir
37 | }
38 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/androidthings/sample-videoRTC/95e6aa4bf6721463f8e5c5cc818a876dd88ed0fb/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Sep 12 20:52:28 MDT 2018
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------