├── WebRtc.Mono.Droid
├── lib
│ ├── ViEAndroidJavaAPI.jar
│ ├── audio_device_module_java.jar
│ ├── video_capture_module_java.jar
│ ├── video_render_module_java.jar
│ └── armeabi-v7a
│ │ └── libwebrtc-video-demo-jni.so
├── Resources
│ ├── Drawable
│ │ └── logo.png
│ ├── Layout
│ │ ├── send.xml
│ │ ├── row.xml
│ │ ├── tabhost.xml
│ │ ├── both.xml
│ │ ├── vconfig.xml
│ │ ├── aconfig.xml
│ │ └── main.xml
│ ├── AboutResources.txt
│ ├── Values
│ │ └── strings.xml
│ └── Resource.Designer.cs
├── src
│ ├── video_engine
│ │ └── test
│ │ │ └── android
│ │ │ ├── res
│ │ │ ├── drawable
│ │ │ │ └── logo.png
│ │ │ ├── layout
│ │ │ │ ├── send.xml
│ │ │ │ ├── row.xml
│ │ │ │ ├── tabhost.xml
│ │ │ │ ├── both.xml
│ │ │ │ ├── vconfig.xml
│ │ │ │ ├── aconfig.xml
│ │ │ │ └── main.xml
│ │ │ └── values
│ │ │ │ └── strings.xml
│ │ │ ├── libs
│ │ │ ├── audio_device_module_java.jar
│ │ │ ├── video_render_module_java.jar
│ │ │ ├── video_capture_module_java.jar
│ │ │ └── armeabi-v7a
│ │ │ │ └── libwebrtc-video-demo-jni.so
│ │ │ ├── gen
│ │ │ ├── org
│ │ │ │ └── webrtc
│ │ │ │ │ └── videoengineapp
│ │ │ │ │ ├── BuildConfig.java
│ │ │ │ │ └── R.java
│ │ │ └── R.java.d
│ │ │ ├── project.properties
│ │ │ ├── src
│ │ │ └── org
│ │ │ │ └── webrtc
│ │ │ │ ├── videoengineapp
│ │ │ │ ├── IViEAndroidCallback.java
│ │ │ │ └── ViEAndroidJavaAPI.java
│ │ │ │ └── videoengine
│ │ │ │ └── ViEMediaCodecDecoder.java
│ │ │ ├── AndroidManifest.xml
│ │ │ ├── build.xml
│ │ │ └── jni
│ │ │ └── org_webrtc_videoengineapp_vie_android_java_api.h
│ └── modules
│ │ ├── video_render
│ │ └── android
│ │ │ └── java
│ │ │ └── src
│ │ │ └── org
│ │ │ └── webrtc
│ │ │ └── videoengine
│ │ │ ├── ViERenderer.java
│ │ │ ├── ViESurfaceRenderer.java
│ │ │ └── ViEAndroidGLES20.java
│ │ ├── audio_device
│ │ └── android
│ │ │ └── java
│ │ │ └── src
│ │ │ └── org
│ │ │ └── webrtc
│ │ │ └── voiceengine
│ │ │ ├── AudioManagerAndroid.java
│ │ │ └── WebRTCAudioDevice.java
│ │ └── video_capture
│ │ └── android
│ │ └── java
│ │ └── src
│ │ └── org
│ │ └── webrtc
│ │ └── videoengine
│ │ ├── VideoCaptureDeviceInfoAndroid.java
│ │ └── VideoCaptureAndroid.java
├── Properties
│ ├── AndroidManifest.xml
│ └── AssemblyInfo.cs
├── Assets
│ └── AboutAssets.txt
├── DateTimeHelperClass.cs
└── WebRtc.Mono.Droid.csproj
├── Org.Webrtc.Videoengineapp
├── Jars
│ ├── ViEAndroidJavaAPI.jar
│ └── AboutJars.txt
├── Transforms
│ ├── Metadata.xml
│ ├── EnumMethods.xml
│ └── EnumFields.xml
├── Properties
│ └── AssemblyInfo.cs
├── Additions
│ └── AboutAdditions.txt
└── Org.Webrtc.Videoengineapp.csproj
├── Org.Webrtc.Videoengine
├── Jars
│ ├── video_render_module_java.jar
│ ├── video_capture_module_java.jar
│ └── AboutJars.txt
├── Transforms
│ ├── Metadata.xml
│ ├── EnumMethods.xml
│ └── EnumFields.xml
├── Properties
│ └── AssemblyInfo.cs
├── Additions
│ └── AboutAdditions.txt
└── Org.Webrtc.Videoengine.csproj
├── Org.Webrtc.Voiceengine
├── Jars
│ ├── audio_device_module_java.jar
│ └── AboutJars.txt
├── Transforms
│ ├── Metadata.xml
│ ├── EnumMethods.xml
│ └── EnumFields.xml
├── Properties
│ └── AssemblyInfo.cs
├── Additions
│ └── AboutAdditions.txt
└── Org.Webrtc.Voiceengine.csproj
├── README.md
├── LICENSE
├── .gitignore
└── WebRtc.Mono.sln
/WebRtc.Mono.Droid/lib/ViEAndroidJavaAPI.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/lib/ViEAndroidJavaAPI.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Drawable/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/Resources/Drawable/logo.png
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/lib/audio_device_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/lib/audio_device_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/lib/video_capture_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/lib/video_capture_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/lib/video_render_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/lib/video_render_module_java.jar
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Jars/ViEAndroidJavaAPI.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/Org.Webrtc.Videoengineapp/Jars/ViEAndroidJavaAPI.jar
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Jars/video_render_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/Org.Webrtc.Videoengine/Jars/video_render_module_java.jar
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Jars/audio_device_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/Org.Webrtc.Voiceengine/Jars/audio_device_module_java.jar
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Jars/video_capture_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/Org.Webrtc.Videoengine/Jars/video_capture_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/lib/armeabi-v7a/libwebrtc-video-demo-jni.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/lib/armeabi-v7a/libwebrtc-video-demo-jni.so
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | webrtc-app-mono
2 | =========================
3 |
4 | Goal:
5 | - Porting WebRTC demo app (https://code.google.com/p/webrtc/source/browse/trunk/webrtc) to C# - Mono.Android (Xamarin)
6 |
7 |
8 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/drawable/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/src/video_engine/test/android/res/drawable/logo.png
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/libs/audio_device_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/src/video_engine/test/android/libs/audio_device_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/libs/video_render_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/src/video_engine/test/android/libs/video_render_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/libs/video_capture_module_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/src/video_engine/test/android/libs/video_capture_module_java.jar
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/libs/armeabi-v7a/libwebrtc-video-demo-jni.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kenneththorman/webrtc-app-mono/HEAD/WebRtc.Mono.Droid/src/video_engine/test/android/libs/armeabi-v7a/libwebrtc-video-demo-jni.so
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/gen/org/webrtc/videoengineapp/BuildConfig.java:
--------------------------------------------------------------------------------
1 | /** Automatically generated file. DO NOT MODIFY */
2 | package org.webrtc.videoengineapp;
3 |
4 | public final class BuildConfig {
5 | public final static boolean DEBUG = true;
6 | }
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Properties/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Transforms/Metadata.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Transforms/Metadata.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Transforms/Metadata.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Transforms/EnumMethods.xml:
--------------------------------------------------------------------------------
1 |
2 |
13 |
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Transforms/EnumMethods.xml:
--------------------------------------------------------------------------------
1 |
2 |
13 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Transforms/EnumMethods.xml:
--------------------------------------------------------------------------------
1 |
2 |
13 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/send.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 |
11 |
12 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/send.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 |
11 |
12 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/project.properties:
--------------------------------------------------------------------------------
1 | # This file is automatically generated by Android Tools.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must be checked in Version Control Systems.
5 | #
6 | # To customize properties used by the Ant build system use,
7 | # "ant.properties", and override values to adapt the script to your
8 | # project structure.
9 |
10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
12 |
13 | # Project target.
14 | target=android-19
15 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Assets/AboutAssets.txt:
--------------------------------------------------------------------------------
1 | Any raw assets you want to be deployed with your application can be placed in
2 | this directory (and child directories) and given a Build Action of "AndroidAsset".
3 |
4 | These files will be deployed with you package and will be accessible using Android's
5 | AssetManager, like this:
6 |
7 | public class ReadAsset : Activity
8 | {
9 | protected override void OnCreate (Bundle bundle)
10 | {
11 | base.OnCreate (bundle);
12 |
13 | InputStream input = Assets.Open ("my_asset.txt");
14 | }
15 | }
16 |
17 | Additionally, some Android functions will automatically load asset files:
18 |
19 | Typeface tf = Typeface.CreateFromAsset (Context.Assets, "fonts/samplefont.ttf");
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Transforms/EnumFields.xml:
--------------------------------------------------------------------------------
1 |
2 |
14 |
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Transforms/EnumFields.xml:
--------------------------------------------------------------------------------
1 |
2 |
14 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Transforms/EnumFields.xml:
--------------------------------------------------------------------------------
1 |
2 |
14 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/row.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
18 |
19 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/row.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
18 |
19 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengineapp;
12 |
13 | public interface IViEAndroidCallback {
14 | public int updateStats(int frameRateI, int bitRateI,
15 | int packetLoss, int frameRateO,
16 | int bitRateO);
17 |
18 | public int newIncomingResolution(int width, int height);
19 | }
20 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Jars/AboutJars.txt:
--------------------------------------------------------------------------------
1 | This directory is for Android .jars.
2 |
3 | There are 2 types of jars that are supported:
4 |
5 | == Input Jar ==
6 |
7 | This is the jar that bindings should be generated for.
8 |
9 | For example, if you were binding the Google Maps library, this would
10 | be Google's "maps.jar".
11 |
12 | Set the build action for these jars in the properties page to "InputJar".
13 |
14 |
15 | == Reference Jars ==
16 |
17 | These are jars that are referenced by the input jar. C# bindings will
18 | not be created for these jars. These jars will be used to resolve
19 | types used by the input jar.
20 |
21 | NOTE: Do not add "android.jar" as a reference jar. It will be added automatically
22 | based on the Target Framework selected.
23 |
24 | Set the build action for these jars in the properties page to "ReferenceJar".
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Jars/AboutJars.txt:
--------------------------------------------------------------------------------
1 | This directory is for Android .jars.
2 |
3 | There are 2 types of jars that are supported:
4 |
5 | == Input Jar ==
6 |
7 | This is the jar that bindings should be generated for.
8 |
9 | For example, if you were binding the Google Maps library, this would
10 | be Google's "maps.jar".
11 |
12 | Set the build action for these jars in the properties page to "InputJar".
13 |
14 |
15 | == Reference Jars ==
16 |
17 | These are jars that are referenced by the input jar. C# bindings will
18 | not be created for these jars. These jars will be used to resolve
19 | types used by the input jar.
20 |
21 | NOTE: Do not add "android.jar" as a reference jar. It will be added automatically
22 | based on the Target Framework selected.
23 |
24 | Set the build action for these jars in the properties page to "ReferenceJar".
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Jars/AboutJars.txt:
--------------------------------------------------------------------------------
1 | This directory is for Android .jars.
2 |
3 | There are 2 types of jars that are supported:
4 |
5 | == Input Jar ==
6 |
7 | This is the jar that bindings should be generated for.
8 |
9 | For example, if you were binding the Google Maps library, this would
10 | be Google's "maps.jar".
11 |
12 | Set the build action for these jars in the properties page to "InputJar".
13 |
14 |
15 | == Reference Jars ==
16 |
17 | These are jars that are referenced by the input jar. C# bindings will
18 | not be created for these jars. These jars will be used to resolve
19 | types used by the input jar.
20 |
21 | NOTE: Do not add "android.jar" as a reference jar. It will be added automatically
22 | based on the Target Framework selected.
23 |
24 | Set the build action for these jars in the properties page to "ReferenceJar".
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/DateTimeHelperClass.cs:
--------------------------------------------------------------------------------
1 | //---------------------------------------------------------------------------------------------------------
2 | // Copyright © 2007 - 2013 Tangible Software Solutions Inc.
3 | // This class can be used by anyone provided that the copyright notice remains intact.
4 | //
5 | // This class is used to replace calls to Java's System.currentTimeMillis with the C# equivalent.
6 | // Unix time is defined as the number of seconds that have elapsed since midnight UTC, 1 January 1970.
7 | //---------------------------------------------------------------------------------------------------------
8 | namespace WebRtc.Mono.Droid
9 | {
10 | internal static class DateTimeHelperClass
11 | {
12 | private static readonly System.DateTime Jan1st1970 = new System.DateTime(1970, 1, 1, 0, 0, 0, System.DateTimeKind.Utc);
13 | internal static long CurrentUnixTimeMillis()
14 | {
15 | return (long)(System.DateTime.UtcNow - Jan1st1970).TotalMilliseconds;
16 | }
17 | }
18 | }
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/gen/R.java.d:
--------------------------------------------------------------------------------
1 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/gen/org/webrtc/videoengineapp/R.java \
2 | : /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/drawable/logo.png \
3 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/values/strings.xml \
4 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/aconfig.xml \
5 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/main.xml \
6 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/tabhost.xml \
7 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/vconfig.xml \
8 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/row.xml \
9 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/send.xml \
10 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/res/layout/both.xml \
11 | /home/user1/WebRTCDemo/trunk/webrtc/video_engine/test/android/bin/AndroidManifest.xml \
12 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/tabhost.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
10 |
14 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/tabhost.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
10 |
14 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 | using Android.App;
5 |
6 | // General Information about an assembly is controlled through the following
7 | // set of attributes. Change these attribute values to modify the information
8 | // associated with an assembly.
9 | [assembly: AssemblyTitle("WebRtc.Mono.Droid")]
10 | [assembly: AssemblyDescription("")]
11 | [assembly: AssemblyConfiguration("")]
12 | [assembly: AssemblyCompany("")]
13 | [assembly: AssemblyProduct("WebRtc.Mono.Droid")]
14 | [assembly: AssemblyCopyright("Copyright © 2013")]
15 | [assembly: AssemblyTrademark("")]
16 | [assembly: AssemblyCulture("")]
17 | [assembly: ComVisible(false)]
18 |
19 | // Version information for an assembly consists of the following four values:
20 | //
21 | // Major Version
22 | // Minor Version
23 | // Build Number
24 | // Revision
25 | //
26 | // You can specify all the values or you can default the Build and Revision Numbers
27 | // by using the '*' as shown below:
28 | // [assembly: AssemblyVersion("1.0.*")]
29 | [assembly: AssemblyVersion("1.0.0.0")]
30 | [assembly: AssemblyFileVersion("1.0.0.0")]
31 |
32 | // Add some common permissions, these can be removed if not needed
33 | [assembly: UsesPermission(Android.Manifest.Permission.Internet)]
34 | [assembly: UsesPermission(Android.Manifest.Permission.WriteExternalStorage)]
35 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 | using Android.App;
5 |
6 | // General Information about an assembly is controlled through the following
7 | // set of attributes. Change these attribute values to modify the information
8 | // associated with an assembly.
9 | [assembly: AssemblyTitle("Org.Webrtc.Videoengine")]
10 | [assembly: AssemblyDescription("")]
11 | [assembly: AssemblyConfiguration("")]
12 | [assembly: AssemblyCompany("")]
13 | [assembly: AssemblyProduct("Org.Webrtc.Videoengine")]
14 | [assembly: AssemblyCopyright("Copyright © 2013")]
15 | [assembly: AssemblyTrademark("")]
16 | [assembly: AssemblyCulture("")]
17 | [assembly: ComVisible(false)]
18 |
19 | // Version information for an assembly consists of the following four values:
20 | //
21 | // Major Version
22 | // Minor Version
23 | // Build Number
24 | // Revision
25 | //
26 | // You can specify all the values or you can default the Build and Revision Numbers
27 | // by using the '*' as shown below:
28 | // [assembly: AssemblyVersion("1.0.*")]
29 | [assembly: AssemblyVersion("1.0.0.0")]
30 | [assembly: AssemblyFileVersion("1.0.0.0")]
31 |
32 | // Add some common permissions, these can be removed if not needed
33 | [assembly: UsesPermission(Android.Manifest.Permission.Internet)]
34 | [assembly: UsesPermission(Android.Manifest.Permission.WriteExternalStorage)]
35 |
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 | using Android.App;
5 |
6 | // General Information about an assembly is controlled through the following
7 | // set of attributes. Change these attribute values to modify the information
8 | // associated with an assembly.
9 | [assembly: AssemblyTitle("Org.Webrtc.Voiceengine")]
10 | [assembly: AssemblyDescription("")]
11 | [assembly: AssemblyConfiguration("")]
12 | [assembly: AssemblyCompany("")]
13 | [assembly: AssemblyProduct("Org.Webrtc.Voiceengine")]
14 | [assembly: AssemblyCopyright("Copyright © 2013")]
15 | [assembly: AssemblyTrademark("")]
16 | [assembly: AssemblyCulture("")]
17 | [assembly: ComVisible(false)]
18 |
19 | // Version information for an assembly consists of the following four values:
20 | //
21 | // Major Version
22 | // Minor Version
23 | // Build Number
24 | // Revision
25 | //
26 | // You can specify all the values or you can default the Build and Revision Numbers
27 | // by using the '*' as shown below:
28 | // [assembly: AssemblyVersion("1.0.*")]
29 | [assembly: AssemblyVersion("1.0.0.0")]
30 | [assembly: AssemblyFileVersion("1.0.0.0")]
31 |
32 | // Add some common permissions, these can be removed if not needed
33 | [assembly: UsesPermission(Android.Manifest.Permission.Internet)]
34 | [assembly: UsesPermission(Android.Manifest.Permission.WriteExternalStorage)]
35 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 | using Android.App;
5 |
6 | // General Information about an assembly is controlled through the following
7 | // set of attributes. Change these attribute values to modify the information
8 | // associated with an assembly.
9 | [assembly: AssemblyTitle("Org.Webrtc.Videoengineapp")]
10 | [assembly: AssemblyDescription("")]
11 | [assembly: AssemblyConfiguration("")]
12 | [assembly: AssemblyCompany("")]
13 | [assembly: AssemblyProduct("Org.Webrtc.Videoengineapp")]
14 | [assembly: AssemblyCopyright("Copyright © 2013")]
15 | [assembly: AssemblyTrademark("")]
16 | [assembly: AssemblyCulture("")]
17 | [assembly: ComVisible(false)]
18 |
19 | // Version information for an assembly consists of the following four values:
20 | //
21 | // Major Version
22 | // Minor Version
23 | // Build Number
24 | // Revision
25 | //
26 | // You can specify all the values or you can default the Build and Revision Numbers
27 | // by using the '*' as shown below:
28 | // [assembly: AssemblyVersion("1.0.*")]
29 | [assembly: AssemblyVersion("1.0.0.0")]
30 | [assembly: AssemblyFileVersion("1.0.0.0")]
31 |
32 | // Add some common permissions, these can be removed if not needed
33 | [assembly: UsesPermission(Android.Manifest.Permission.Internet)]
34 | [assembly: UsesPermission(Android.Manifest.Permission.WriteExternalStorage)]
35 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2011, The WebRTC project authors. All rights reserved.
2 |
3 | Redistribution and use in source and binary forms, with or without
4 | modification, are permitted provided that the following conditions are
5 | met:
6 |
7 | * Redistributions of source code must retain the above copyright
8 | notice, this list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright
11 | notice, this list of conditions and the following disclaimer in
12 | the documentation and/or other materials provided with the
13 | distribution.
14 |
15 | * Neither the name of Google nor the names of its contributors may
16 | be used to endorse or promote products derived from this software
17 | without specific prior written permission.
18 |
19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Additions/AboutAdditions.txt:
--------------------------------------------------------------------------------
1 | Additions allow you to add arbitrary C# to the generated classes
2 | before they are compiled. This can be helpful for providing convenience
3 | methods or adding pure C# classes.
4 |
5 | == Adding Methods to Generated Classes ==
6 |
7 | Let's say the library being bound has a Rectangle class with a constructor
8 | that takes an x and y position, and a width and length size. It will look like
9 | this:
10 |
11 | public partial class Rectangle
12 | {
13 | public Rectangle (int x, int y, int width, int height)
14 | {
15 | // JNI bindings
16 | }
17 | }
18 |
19 | Imagine we want to add a constructor to this class that takes a Point and
20 | Size structure instead of 4 ints. We can add a new file called Rectangle.cs
21 | with a partial class containing our new method:
22 |
23 | public partial class Rectangle
24 | {
25 | public Rectangle (Point location, Size size) :
26 | this (location.X, location.Y, size.Width, size.Height)
27 | {
28 | }
29 | }
30 |
31 | At compile time, the additions class will be added to the generated class
32 | and the final assembly will a Rectangle class with both constructors.
33 |
34 |
35 | == Adding C# Classes ==
36 |
37 | Another thing that can be done is adding fully C# managed classes to the
38 | generated library. In the above example, let's assume that there isn't a
39 | Point class available in Java or our library. The one we create doesn't need
40 | to interact with Java, so we'll create it like a normal class in C#.
41 |
42 | By adding a Point.cs file with this class, it will end up in the binding library:
43 |
44 | public class Point
45 | {
46 | public int X { get; set; }
47 | public int Y { get; set; }
48 | }
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Additions/AboutAdditions.txt:
--------------------------------------------------------------------------------
1 | Additions allow you to add arbitrary C# to the generated classes
2 | before they are compiled. This can be helpful for providing convenience
3 | methods or adding pure C# classes.
4 |
5 | == Adding Methods to Generated Classes ==
6 |
7 | Let's say the library being bound has a Rectangle class with a constructor
8 | that takes an x and y position, and a width and length size. It will look like
9 | this:
10 |
11 | public partial class Rectangle
12 | {
13 | public Rectangle (int x, int y, int width, int height)
14 | {
15 | // JNI bindings
16 | }
17 | }
18 |
19 | Imagine we want to add a constructor to this class that takes a Point and
20 | Size structure instead of 4 ints. We can add a new file called Rectangle.cs
21 | with a partial class containing our new method:
22 |
23 | public partial class Rectangle
24 | {
25 | public Rectangle (Point location, Size size) :
26 | this (location.X, location.Y, size.Width, size.Height)
27 | {
28 | }
29 | }
30 |
31 | At compile time, the additions class will be added to the generated class
32 | and the final assembly will a Rectangle class with both constructors.
33 |
34 |
35 | == Adding C# Classes ==
36 |
37 | Another thing that can be done is adding fully C# managed classes to the
38 | generated library. In the above example, let's assume that there isn't a
39 | Point class available in Java or our library. The one we create doesn't need
40 | to interact with Java, so we'll create it like a normal class in C#.
41 |
42 | By adding a Point.cs file with this class, it will end up in the binding library:
43 |
44 | public class Point
45 | {
46 | public int X { get; set; }
47 | public int Y { get; set; }
48 | }
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Additions/AboutAdditions.txt:
--------------------------------------------------------------------------------
1 | Additions allow you to add arbitrary C# to the generated classes
2 | before they are compiled. This can be helpful for providing convenience
3 | methods or adding pure C# classes.
4 |
5 | == Adding Methods to Generated Classes ==
6 |
7 | Let's say the library being bound has a Rectangle class with a constructor
8 | that takes an x and y position, and a width and length size. It will look like
9 | this:
10 |
11 | public partial class Rectangle
12 | {
13 | public Rectangle (int x, int y, int width, int height)
14 | {
15 | // JNI bindings
16 | }
17 | }
18 |
19 | Imagine we want to add a constructor to this class that takes a Point and
20 | Size structure instead of 4 ints. We can add a new file called Rectangle.cs
21 | with a partial class containing our new method:
22 |
23 | public partial class Rectangle
24 | {
25 | public Rectangle (Point location, Size size) :
26 | this (location.X, location.Y, size.Width, size.Height)
27 | {
28 | }
29 | }
30 |
31 | At compile time, the additions class will be added to the generated class
32 | and the final assembly will a Rectangle class with both constructors.
33 |
34 |
35 | == Adding C# Classes ==
36 |
37 | Another thing that can be done is adding fully C# managed classes to the
38 | generated library. In the above example, let's assume that there isn't a
39 | Point class available in Java or our library. The one we create doesn't need
40 | to interact with Java, so we'll create it like a normal class in C#.
41 |
42 | By adding a Point.cs file with this class, it will end up in the binding library:
43 |
44 | public class Point
45 | {
46 | public int X { get; set; }
47 | public int Y { get; set; }
48 | }
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/AboutResources.txt:
--------------------------------------------------------------------------------
1 | Images, layout descriptions, binary blobs and string dictionaries can be included
2 | in your application as resource files. Various Android APIs are designed to
3 | operate on the resource IDs instead of dealing with images, strings or binary blobs
4 | directly.
5 |
6 | For example, a sample Android app that contains a user interface layout (main.xml),
7 | an internationalization string table (strings.xml) and some icons (drawable-XXX/icon.png)
8 | would keep its resources in the "Resources" directory of the application:
9 |
10 | Resources/
11 | drawable-hdpi/
12 | icon.png
13 |
14 | drawable-ldpi/
15 | icon.png
16 |
17 | drawable-mdpi/
18 | icon.png
19 |
20 | layout/
21 | main.xml
22 |
23 | values/
24 | strings.xml
25 |
26 | In order to get the build system to recognize Android resources, set the build action to
27 | "AndroidResource". The native Android APIs do not operate directly with filenames, but
28 | instead operate on resource IDs. When you compile an Android application that uses resources,
29 | the build system will package the resources for distribution and generate a class called
30 | "Resource" that contains the tokens for each one of the resources included. For example,
31 | for the above Resources layout, this is what the Resource class would expose:
32 |
33 | public class Resource {
34 | public class drawable {
35 | public const int icon = 0x123;
36 | }
37 |
38 | public class layout {
39 | public const int main = 0x456;
40 | }
41 |
42 | public class strings {
43 | public const int first_string = 0xabc;
44 | public const int second_string = 0xbcd;
45 | }
46 | }
47 |
48 | You would then use R.drawable.icon to reference the drawable/icon.png file, or Resource.layout.main
49 | to reference the layout/main.xml file, or Resource.strings.first_string to reference the first
50 | string in the dictionary file values/strings.xml.
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | WebRTC
4 | Error
5 | Camera Error
6 | Choose a codec type
7 | Video Engine Android Demo
8 | Codec Type
9 | Codec Size
10 | Remote IP address
11 | Loopback
12 | Stats
13 | Start Listen
14 | Start Send
15 | Start Both
16 | Voice
17 | Video Receive
18 | Video Send
19 | Global Settings
20 | Video Settings
21 | Video Tx Port
22 | Video Rx Port
23 | Audio Tx Port
24 | Audio Rx Port
25 | AGC
26 | VAD
27 | AECM
28 | NS
29 | NACK
30 | SwitchToFront
31 | SwitchToBack
32 | StartCall
33 | StopCall
34 | Exit
35 | Speaker
36 | APMRecord
37 | rtpdump
38 | SurfaceView
39 | MediaCodec Decoder/Renderer
40 | OpenGL
41 |
42 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | WebRTC
4 | Error
5 | Camera Error
6 | Choose a codec type
7 | Video Engine Android Demo
8 | Codec Type
9 | Codec Size
10 | Remote IP address
11 | Loopback
12 | Stats
13 | Start Listen
14 | Start Send
15 | Start Both
16 | Voice
17 | Video Receive
18 | Video Send
19 | Global Settings
20 | Video Settings
21 | Video Tx Port
22 | Video Rx Port
23 | Audio Tx Port
24 | Audio Rx Port
25 | AGC
26 | VAD
27 | AECM
28 | NS
29 | NACK
30 | SwitchToFront
31 | SwitchToBack
32 | StartCall
33 | StopCall
34 | Exit
35 | Speaker
36 | APMRecord
37 | rtpdump
38 | SurfaceView
39 | MediaCodec Decoder/Renderer
40 | OpenGL
41 |
42 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/both.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
11 |
14 |
15 |
20 |
25 |
30 |
35 |
36 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/both.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
11 |
14 |
15 |
20 |
25 |
30 |
35 |
36 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Build Folders (you can keep bin if you'd like, to store dlls and pdbs)
2 | [Bb]in/
3 | [Oo]bj/
4 |
5 | # mstest test results
6 | TestResults
7 |
8 | ## Ignore Visual Studio temporary files, build results, and
9 | ## files generated by popular Visual Studio add-ons.
10 |
11 | # User-specific files
12 | *.suo
13 | *.user
14 | *.sln.docstates
15 |
16 | # Build results
17 | [Dd]ebug/
18 | [Rr]elease/
19 | x64/
20 | *_i.c
21 | *_p.c
22 | *.ilk
23 | *.meta
24 | *.obj
25 | *.pch
26 | *.pdb
27 | *.pgc
28 | *.pgd
29 | *.rsp
30 | *.sbr
31 | *.tlb
32 | *.tli
33 | *.tlh
34 | *.tmp
35 | *.log
36 | *.vspscc
37 | *.vssscc
38 | .builds
39 |
40 | # Visual C++ cache files
41 | ipch/
42 | *.aps
43 | *.ncb
44 | *.opensdf
45 | *.sdf
46 |
47 | # Visual Studio profiler
48 | *.psess
49 | *.vsp
50 | *.vspx
51 |
52 | # Guidance Automation Toolkit
53 | *.gpState
54 |
55 | # ReSharper is a .NET coding add-in
56 | _ReSharper*
57 |
58 | # NCrunch
59 | *.ncrunch*
60 | .*crunch*.local.xml
61 |
62 | # Installshield output folder
63 | [Ee]xpress
64 |
65 | # DocProject is a documentation generator add-in
66 | DocProject/buildhelp/
67 | DocProject/Help/*.HxT
68 | DocProject/Help/*.HxC
69 | DocProject/Help/*.hhc
70 | DocProject/Help/*.hhk
71 | DocProject/Help/*.hhp
72 | DocProject/Help/Html2
73 | DocProject/Help/html
74 |
75 | # Click-Once directory
76 | publish
77 |
78 | # Publish Web Output
79 | *.Publish.xml
80 |
81 | # NuGet Packages Directory
82 | packages
83 |
84 | # Windows Azure Build Output
85 | csx
86 | *.build.csdef
87 |
88 | # Windows Store app package directory
89 | AppPackages/
90 |
91 | # Others
92 | [Bb]in
93 | [Oo]bj
94 | sql
95 | TestResults
96 | [Tt]est[Rr]esult*
97 | *.Cache
98 | ClientBin
99 | [Ss]tyle[Cc]op.*
100 | ~$*
101 | *.dbmdl
102 | Generated_Code #added for RIA/Silverlight projects
103 |
104 | # Backup & report files from converting an old project file to a newer
105 | # Visual Studio version. Backup files are not needed, because we have git ;-)
106 | _UpgradeReport_Files/
107 | Backup*/
108 | UpgradeLog*.XML
109 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | import android.content.Context;
14 | import android.view.SurfaceHolder;
15 | import android.view.SurfaceView;
16 |
17 | public class ViERenderer {
18 |
19 | // View used for local rendering that Cameras can use for Video Overlay.
20 | private static SurfaceHolder g_localRenderer;
21 |
22 | public static SurfaceView CreateRenderer(Context context) {
23 | return CreateRenderer(context, false);
24 | }
25 |
26 | public static SurfaceView CreateRenderer(Context context,
27 | boolean useOpenGLES2) {
28 | if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
29 | return new ViEAndroidGLES20(context);
30 | else
31 | return new SurfaceView(context);
32 | }
33 |
34 | // Creates a SurfaceView to be used by Android Camera
35 | // service to display a local preview.
36 | // This needs to be used on Android prior to version 2.1
37 | // in order to run the camera.
38 | // Call this function before ViECapture::StartCapture.
39 | // The created view needs to be added to a visible layout
40 | // after a camera has been allocated
41 | // (with the call ViECapture::AllocateCaptureDevice).
42 | // IE.
43 | // CreateLocalRenderer
44 | // ViECapture::AllocateCaptureDevice
45 | // LinearLayout.addview
46 | // ViECapture::StartCapture
47 | public static SurfaceView CreateLocalRenderer(Context context) {
48 | SurfaceView localRender = new SurfaceView(context);
49 | g_localRenderer = localRender.getHolder();
50 | return localRender;
51 | }
52 |
53 | public static SurfaceHolder GetLocalRenderer() {
54 | return g_localRenderer;
55 | }
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/WebRtc.Mono.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 2013
4 | VisualStudioVersion = 12.0.21005.1
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WebRtc.Mono.Droid", "WebRtc.Mono.Droid\WebRtc.Mono.Droid.csproj", "{A15474CE-91C0-4A31-AE49-201872C530A4}"
7 | EndProject
8 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Org.Webrtc.Videoengineapp", "Org.Webrtc.Videoengineapp\Org.Webrtc.Videoengineapp.csproj", "{45A11329-F170-4B2D-9CCC-C09EF500C114}"
9 | EndProject
10 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Org.Webrtc.Videoengine", "Org.Webrtc.Videoengine\Org.Webrtc.Videoengine.csproj", "{074DD245-D7F1-448D-8F54-6F9C2301120A}"
11 | EndProject
12 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Org.Webrtc.Voiceengine", "Org.Webrtc.Voiceengine\Org.Webrtc.Voiceengine.csproj", "{9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}"
13 | EndProject
14 | Global
15 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
16 | Debug|Any CPU = Debug|Any CPU
17 | Release|Any CPU = Release|Any CPU
18 | EndGlobalSection
19 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
20 | {A15474CE-91C0-4A31-AE49-201872C530A4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
21 | {A15474CE-91C0-4A31-AE49-201872C530A4}.Debug|Any CPU.Build.0 = Debug|Any CPU
22 | {A15474CE-91C0-4A31-AE49-201872C530A4}.Release|Any CPU.ActiveCfg = Release|Any CPU
23 | {A15474CE-91C0-4A31-AE49-201872C530A4}.Release|Any CPU.Build.0 = Release|Any CPU
24 | {45A11329-F170-4B2D-9CCC-C09EF500C114}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
25 | {45A11329-F170-4B2D-9CCC-C09EF500C114}.Debug|Any CPU.Build.0 = Debug|Any CPU
26 | {45A11329-F170-4B2D-9CCC-C09EF500C114}.Release|Any CPU.ActiveCfg = Release|Any CPU
27 | {45A11329-F170-4B2D-9CCC-C09EF500C114}.Release|Any CPU.Build.0 = Release|Any CPU
28 | {074DD245-D7F1-448D-8F54-6F9C2301120A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
29 | {074DD245-D7F1-448D-8F54-6F9C2301120A}.Debug|Any CPU.Build.0 = Debug|Any CPU
30 | {074DD245-D7F1-448D-8F54-6F9C2301120A}.Release|Any CPU.ActiveCfg = Release|Any CPU
31 | {074DD245-D7F1-448D-8F54-6F9C2301120A}.Release|Any CPU.Build.0 = Release|Any CPU
32 | {9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
33 | {9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}.Debug|Any CPU.Build.0 = Debug|Any CPU
34 | {9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}.Release|Any CPU.ActiveCfg = Release|Any CPU
35 | {9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}.Release|Any CPU.Build.0 = Release|Any CPU
36 | EndGlobalSection
37 | GlobalSection(SolutionProperties) = preSolution
38 | HideSolutionNode = FALSE
39 | EndGlobalSection
40 | EndGlobal
41 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/vconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
10 |
11 |
12 |
15 |
16 |
17 |
21 |
22 |
25 |
26 |
27 |
31 |
32 |
35 |
36 |
37 |
40 |
43 |
44 |
47 |
48 |
51 |
52 |
55 |
56 |
57 |
58 |
61 |
65 |
66 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/vconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
10 |
11 |
12 |
15 |
16 |
17 |
21 |
22 |
25 |
26 |
27 |
31 |
32 |
35 |
36 |
37 |
40 |
43 |
44 |
47 |
48 |
51 |
52 |
55 |
56 |
57 |
58 |
61 |
65 |
66 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/Org.Webrtc.Voiceengine/Org.Webrtc.Voiceengine.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Debug
5 | AnyCPU
6 | 8.0.30703
7 | 2.0
8 | {9B5DAA9B-7087-47A3-8B7A-8FB8D794696A}
9 | {10368E6C-D01B-4462-8E8B-01FC667A7035};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
10 | Library
11 | Properties
12 | Org.Webrtc.Voiceengine
13 | Org.Webrtc.Voiceengine
14 | 512
15 | v4.1
16 |
17 |
18 |
19 | true
20 | full
21 | false
22 | bin\Debug\
23 | DEBUG;TRACE
24 | prompt
25 | 4
26 |
27 |
28 | pdbonly
29 | true
30 | bin\Release\
31 | TRACE
32 | prompt
33 | 4
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
64 |
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengineapp/Org.Webrtc.Videoengineapp.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Debug
5 | AnyCPU
6 | 8.0.30703
7 | 2.0
8 | {45A11329-F170-4B2D-9CCC-C09EF500C114}
9 | {10368E6C-D01B-4462-8E8B-01FC667A7035};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
10 | Library
11 | Properties
12 | org.webrtc.videoengineapp
13 | Org.Webrtc.Videoengineapp
14 | 512
15 | v4.1
16 |
17 |
18 |
19 | true
20 | full
21 | false
22 | bin\Debug\
23 | DEBUG;TRACE
24 | prompt
25 | 4
26 |
27 |
28 | pdbonly
29 | true
30 | bin\Release\
31 | TRACE
32 | prompt
33 | 4
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
64 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | // The functions in this file are called from native code. They can still be
12 | // accessed even though they are declared private.
13 |
14 | package org.webrtc.voiceengine;
15 |
16 | import android.content.Context;
17 | import android.content.pm.PackageManager;
18 | import android.media.AudioManager;
19 |
20 | class AudioManagerAndroid {
21 | // Most of Google lead devices use 44.1K as the default sampling rate, 44.1K
22 | // is also widely used on other android devices.
23 | private static final int DEFAULT_SAMPLING_RATE = 44100;
24 | // Randomly picked up frame size which is close to return value on N4.
25 | // Return this default value when
26 | // getProperty(PROPERTY_OUTPUT_FRAMES_PER_BUFFER) fails.
27 | private static final int DEFAULT_FRAMES_PER_BUFFER = 256;
28 |
29 | private int mNativeOutputSampleRate;
30 | private boolean mAudioLowLatencySupported;
31 | private int mAudioLowLatencyOutputFrameSize;
32 |
33 |
34 | @SuppressWarnings("unused")
35 | private AudioManagerAndroid(Context context) {
36 | AudioManager audioManager = (AudioManager)
37 | context.getSystemService(Context.AUDIO_SERVICE);
38 |
39 | mNativeOutputSampleRate = DEFAULT_SAMPLING_RATE;
40 | mAudioLowLatencyOutputFrameSize = DEFAULT_FRAMES_PER_BUFFER;
41 | if (android.os.Build.VERSION.SDK_INT >=
42 | android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
43 | String sampleRateString = audioManager.getProperty(
44 | AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
45 | if (sampleRateString != null) {
46 | mNativeOutputSampleRate = Integer.parseInt(sampleRateString);
47 | }
48 | String framesPerBuffer = audioManager.getProperty(
49 | AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
50 | if (framesPerBuffer != null) {
51 | mAudioLowLatencyOutputFrameSize = Integer.parseInt(framesPerBuffer);
52 | }
53 | }
54 | mAudioLowLatencySupported = context.getPackageManager().hasSystemFeature(
55 | PackageManager.FEATURE_AUDIO_LOW_LATENCY);
56 | }
57 |
58 | @SuppressWarnings("unused")
59 | private int getNativeOutputSampleRate() {
60 | return mNativeOutputSampleRate;
61 | }
62 |
63 | @SuppressWarnings("unused")
64 | private boolean isAudioLowLatencySupported() {
65 | return mAudioLowLatencySupported;
66 | }
67 |
68 | @SuppressWarnings("unused")
69 | private int getAudioLowLatencyOutputFrameSize() {
70 | return mAudioLowLatencyOutputFrameSize;
71 | }
72 | }
--------------------------------------------------------------------------------
/Org.Webrtc.Videoengine/Org.Webrtc.Videoengine.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Debug
5 | AnyCPU
6 | 8.0.30703
7 | 2.0
8 | {074DD245-D7F1-448D-8F54-6F9C2301120A}
9 | {10368E6C-D01B-4462-8E8B-01FC667A7035};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
10 | Library
11 | Properties
12 | Org.Webrtc.Videoengine
13 | Org.Webrtc.Videoengine
14 | 512
15 | v4.1
16 |
17 |
18 |
19 | true
20 | full
21 | false
22 | bin\Debug\
23 | DEBUG;TRACE
24 | prompt
25 | 4
26 |
27 |
28 | pdbonly
29 | true
30 | bin\Release\
31 | TRACE
32 | prompt
33 | 4
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
67 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/aconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
10 |
13 |
14 |
19 |
20 |
23 |
24 |
25 |
28 |
31 |
32 |
35 |
36 |
39 |
40 |
43 |
44 |
45 |
46 |
49 |
53 |
54 |
58 |
59 |
63 |
64 |
65 |
66 |
69 |
73 |
74 |
78 |
79 |
83 |
84 |
85 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/aconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
10 |
13 |
14 |
19 |
20 |
23 |
24 |
25 |
28 |
31 |
32 |
35 |
36 |
39 |
40 |
43 |
44 |
45 |
46 |
49 |
53 |
54 |
58 |
59 |
63 |
64 |
65 |
66 |
69 |
73 |
74 |
78 |
79 |
83 |
84 |
85 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Layout/main.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
13 |
14 |
17 |
18 |
19 |
23 |
27 |
28 |
32 |
33 |
37 |
38 |
42 |
43 |
47 |
48 |
49 |
50 |
54 |
55 |
58 |
59 |
60 |
64 |
69 |
77 |
84 |
91 |
92 |
93 |
94 |
95 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/res/layout/main.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
13 |
14 |
17 |
18 |
19 |
23 |
27 |
28 |
32 |
33 |
37 |
38 |
42 |
43 |
47 |
48 |
49 |
50 |
54 |
55 |
58 |
59 |
60 |
64 |
69 |
77 |
84 |
91 |
92 |
93 |
94 |
95 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | import java.io.File;
14 | import java.lang.reflect.InvocationTargetException;
15 | import java.lang.reflect.Method;
16 | import java.util.ArrayList;
17 | import java.util.List;
18 | import java.util.Locale;
19 |
20 | import android.content.Context;
21 | import android.hardware.Camera.CameraInfo;
22 | import android.hardware.Camera.Parameters;
23 | import android.hardware.Camera.Size;
24 | import android.hardware.Camera;
25 | import android.util.Log;
26 |
27 | import org.json.JSONArray;
28 | import org.json.JSONException;
29 | import org.json.JSONObject;
30 |
31 | public class VideoCaptureDeviceInfoAndroid {
32 | private final static String TAG = "WEBRTC-JC";
33 |
34 | private static boolean isFrontFacing(CameraInfo info) {
35 | return info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
36 | }
37 |
38 | private static String deviceUniqueName(int index, CameraInfo info) {
39 | return "Camera " + index +", Facing " +
40 | (isFrontFacing(info) ? "front" : "back") +
41 | ", Orientation "+ info.orientation;
42 | }
43 |
44 | // Returns information about all cameras on the device as a serialized JSON
45 | // array of dictionaries encoding information about a single device. Since
46 | // this reflects static information about the hardware present, there is no
47 | // need to call this function more than once in a single process. It is
48 | // marked "private" as it is only called by native code.
49 | private static String getDeviceInfo() {
50 | try {
51 | JSONArray devices = new JSONArray();
52 | for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
53 | CameraInfo info = new CameraInfo();
54 | Camera.getCameraInfo(i, info);
55 | String uniqueName = deviceUniqueName(i, info);
56 | JSONObject cameraDict = new JSONObject();
57 | devices.put(cameraDict);
58 | List supportedSizes;
59 | List supportedFpsRanges;
60 | try {
61 | Camera camera = Camera.open(i);
62 | Parameters parameters = camera.getParameters();
63 | supportedSizes = parameters.getSupportedPreviewSizes();
64 | supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
65 | camera.release();
66 | Log.d(TAG, uniqueName);
67 | } catch (RuntimeException e) {
68 | Log.e(TAG, "Failed to open " + uniqueName + ", skipping");
69 | continue;
70 | }
71 | JSONArray sizes = new JSONArray();
72 | for (Size supportedSize : supportedSizes) {
73 | JSONObject size = new JSONObject();
74 | size.put("width", supportedSize.width);
75 | size.put("height", supportedSize.height);
76 | sizes.put(size);
77 | }
78 | // Android SDK deals in integral "milliframes per second"
79 | // (i.e. fps*1000, instead of floating-point frames-per-second) so we
80 | // preserve that through the Java->C++->Java round-trip.
81 | int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
82 | cameraDict.put("name", uniqueName);
83 | cameraDict.put("front_facing", isFrontFacing(info))
84 | .put("orientation", info.orientation)
85 | .put("sizes", sizes)
86 | .put("min_mfps", mfps[Parameters.PREVIEW_FPS_MIN_INDEX])
87 | .put("max_mfps", mfps[Parameters.PREVIEW_FPS_MAX_INDEX]);
88 | }
89 | String ret = devices.toString(2);
90 | return ret;
91 | } catch (JSONException e) {
92 | throw new RuntimeException(e);
93 | }
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/build.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
29 |
30 |
31 |
35 |
36 |
37 |
38 |
39 |
40 |
49 |
50 |
51 |
52 |
56 |
57 |
69 |
70 |
71 |
89 |
90 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/gen/org/webrtc/videoengineapp/R.java:
--------------------------------------------------------------------------------
1 | /* AUTO-GENERATED FILE. DO NOT MODIFY.
2 | *
3 | * This class was automatically generated by the
4 | * aapt tool from the resource data it found. It
5 | * should not be modified by hand.
6 | */
7 |
8 | package org.webrtc.videoengineapp;
9 |
10 | public final class R {
11 | public static final class attr {
12 | }
13 | public static final class drawable {
14 | public static final int logo=0x7f020000;
15 | }
16 | public static final class id {
17 | public static final int LinearLayout02=0x7f050003;
18 | public static final int LinearLayout03=0x7f050019;
19 | public static final int TextView01=0x7f050001;
20 | public static final int TextView02=0x7f050017;
21 | public static final int TextView03=0x7f050000;
22 | public static final int btExit=0x7f05000f;
23 | public static final int btStartStopCall=0x7f05000e;
24 | public static final int btSwitchCamera=0x7f05000d;
25 | public static final int cbAECM=0x7f050006;
26 | public static final int cbAutoGainControl=0x7f050008;
27 | public static final int cbDebugRecording=0x7f05000a;
28 | public static final int cbLoopback=0x7f050015;
29 | public static final int cbNack=0x7f05002b;
30 | public static final int cbNoiseSuppression=0x7f050007;
31 | public static final int cbSpeaker=0x7f050009;
32 | public static final int cbStats=0x7f050016;
33 | public static final int cbVideoRTPDump=0x7f05002c;
34 | public static final int cbVideoReceive=0x7f050012;
35 | public static final int cbVideoSend=0x7f050013;
36 | public static final int cbVoice=0x7f050014;
37 | public static final int cbVoiceRTPDump=0x7f05000b;
38 | public static final int etARxPort=0x7f050005;
39 | public static final int etATxPort=0x7f050004;
40 | public static final int etRemoteIp=0x7f050018;
41 | public static final int etVRxPort=0x7f05002a;
42 | public static final int etVTxPort=0x7f050029;
43 | public static final int image=0x7f05001e;
44 | public static final int ivPreview=0x7f050021;
45 | public static final int llLocalView=0x7f05000c;
46 | public static final int llRemoteView=0x7f050010;
47 | public static final int radio_group1=0x7f05001a;
48 | public static final int radio_mediacodec=0x7f05001d;
49 | public static final int radio_opengl=0x7f05001b;
50 | public static final int radio_surface=0x7f05001c;
51 | public static final int spCodecSize=0x7f050028;
52 | public static final int spCodecType=0x7f050026;
53 | public static final int spVoiceCodecType=0x7f050002;
54 | public static final int spinner_row=0x7f05001f;
55 | public static final int svLocal=0x7f050020;
56 | public static final int tab_aconfig=0x7f050025;
57 | public static final int tab_config=0x7f050023;
58 | public static final int tab_vconfig=0x7f050024;
59 | public static final int tab_video=0x7f050022;
60 | public static final int tvCodecSize=0x7f050027;
61 | public static final int tvTitle=0x7f050011;
62 | }
63 | public static final class layout {
64 | public static final int aconfig=0x7f030000;
65 | public static final int both=0x7f030001;
66 | public static final int main=0x7f030002;
67 | public static final int row=0x7f030003;
68 | public static final int send=0x7f030004;
69 | public static final int tabhost=0x7f030005;
70 | public static final int vconfig=0x7f030006;
71 | }
72 | public static final class string {
73 | public static final int AECM=0x7f040018;
74 | public static final int AutoGainControl=0x7f040016;
75 | public static final int NoiseSuppression=0x7f040019;
76 | public static final int VoiceActivityDetection=0x7f040017;
77 | public static final int aRxPort=0x7f040015;
78 | public static final int aTxPort=0x7f040014;
79 | public static final int app_name=0x7f040000;
80 | public static final int backCamera=0x7f04001c;
81 | public static final int codecSize=0x7f040006;
82 | public static final int codecType=0x7f040005;
83 | public static final int codectype_prompt=0x7f040003;
84 | public static final int debugrecording=0x7f040021;
85 | public static final int demoTitle=0x7f040004;
86 | public static final int enableVideoReceive=0x7f04000e;
87 | public static final int enableVideoSend=0x7f04000f;
88 | public static final int enableVoice=0x7f04000d;
89 | public static final int error=0x7f040001;
90 | public static final int errorCamera=0x7f040002;
91 | public static final int exit=0x7f04001f;
92 | public static final int frontCamera=0x7f04001b;
93 | public static final int gSettings=0x7f040010;
94 | public static final int loopback=0x7f040008;
95 | public static final int mediacodec=0x7f040024;
96 | public static final int nack=0x7f04001a;
97 | public static final int opengl=0x7f040025;
98 | public static final int remoteIp=0x7f040007;
99 | public static final int rtpdump=0x7f040022;
100 | public static final int speaker=0x7f040020;
101 | public static final int startBoth=0x7f04000c;
102 | public static final int startCall=0x7f04001d;
103 | public static final int startListen=0x7f04000a;
104 | public static final int startSend=0x7f04000b;
105 | public static final int stats=0x7f040009;
106 | public static final int stopCall=0x7f04001e;
107 | public static final int surfaceview=0x7f040023;
108 | public static final int vRxPort=0x7f040013;
109 | public static final int vSettings=0x7f040011;
110 | public static final int vTxPort=0x7f040012;
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengineapp;
12 |
13 | import android.app.Activity;
14 | import android.content.Context;
15 | import android.util.Log;
16 | import android.view.SurfaceHolder;
17 | import android.view.SurfaceView;
18 |
19 | public class ViEAndroidJavaAPI {
20 |
21 | public ViEAndroidJavaAPI(Context context) {
22 | Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
23 | System.loadLibrary("webrtc-video-demo-jni");
24 |
25 | Log.d("*WEBRTCJ*", "Calling native init...");
26 | if (!NativeInit(context)) {
27 | Log.e("*WEBRTCJ*", "Native init failed");
28 | throw new RuntimeException("Native init failed");
29 | }
30 | else {
31 | Log.d("*WEBRTCJ*", "Native init successful");
32 | }
33 | String a = "";
34 | a.getBytes();
35 | }
36 |
37 | // API Native
38 | private native boolean NativeInit(Context context);
39 |
40 | // Video Engine API
41 | // Initialization and Termination functions
42 | public native int GetVideoEngine();
43 | public native int Init(boolean enableTrace);
44 | public native int Terminate();
45 |
46 | public native int StartSend(int channel);
47 | public native int StopRender(int channel);
48 | public native int StopSend(int channel);
49 | public native int StartReceive(int channel);
50 | public native int StopReceive(int channel);
51 | // Channel functions
52 | public native int CreateChannel(int voiceChannel);
53 | // Receiver & Destination functions
54 | public native int SetLocalReceiver(int channel, int port);
55 | public native int SetSendDestination(int channel, int port, String ipaddr);
56 | // Codec
57 | public native String[] GetCodecs();
58 | public native int SetReceiveCodec(int channel, int codecNum,
59 | int intbitRate, int width,
60 | int height, int frameRate);
61 | public native int SetSendCodec(int channel, int codecNum,
62 | int intbitRate, int width,
63 | int height, int frameRate);
64 | // Rendering
65 | public native int AddRemoteRenderer(int channel, Object glSurface);
66 | public native int RemoveRemoteRenderer(int channel);
67 | public native int StartRender(int channel);
68 |
69 | // Capture
70 | public native int StartCamera(int channel, int cameraNum);
71 | public native int StopCamera(int cameraId);
72 | public native int GetCameraOrientation(int cameraNum);
73 | public native int SetRotation(int cameraId,int degrees);
74 |
75 | // External Codec
76 | public native int SetExternalMediaCodecDecoderRenderer(
77 | int channel, Object glSurface);
78 |
79 | // NACK
80 | public native int EnableNACK(int channel, boolean enable);
81 |
82 | // PLI
83 | public native int EnablePLI(int channel, boolean enable);
84 |
85 | // Enable stats callback
86 | public native int SetCallback(int channel, IViEAndroidCallback callback);
87 |
88 | public native int StartIncomingRTPDump(int channel, String file);
89 | public native int StopIncomingRTPDump(int channel);
90 |
91 | // Voice Engine API
92 | // Create and Delete functions
93 | public native boolean VoE_Create(Context context);
94 | public native boolean VoE_Delete();
95 |
96 | // Initialization and Termination functions
97 | public native int VoE_Init(boolean enableTrace);
98 | public native int VoE_Terminate();
99 |
100 | // Channel functions
101 | public native int VoE_CreateChannel();
102 | public native int VoE_DeleteChannel(int channel);
103 | public native int ViE_DeleteChannel(int channel);
104 |
105 | // Receiver & Destination functions
106 | public native int VoE_SetLocalReceiver(int channel, int port);
107 | public native int VoE_SetSendDestination(int channel, int port,
108 | String ipaddr);
109 |
110 | // Media functions
111 | public native int VoE_StartListen(int channel);
112 | public native int VoE_StartPlayout(int channel);
113 | public native int VoE_StartSend(int channel);
114 | public native int VoE_StopListen(int channel);
115 | public native int VoE_StopPlayout(int channel);
116 | public native int VoE_StopSend(int channel);
117 |
118 | // Volume
119 | public native int VoE_SetSpeakerVolume(int volume);
120 |
121 | // Hardware
122 | public native int VoE_SetLoudspeakerStatus(boolean enable);
123 |
124 | // Playout file locally
125 | public native int VoE_StartPlayingFileLocally(
126 | int channel,
127 | String fileName,
128 | boolean loop);
129 | public native int VoE_StopPlayingFileLocally(int channel);
130 |
131 | // Play file as microphone
132 | public native int VoE_StartPlayingFileAsMicrophone(
133 | int channel,
134 | String fileName,
135 | boolean loop);
136 | public native int VoE_StopPlayingFileAsMicrophone(int channel);
137 |
138 | // Codec-setting functions
139 | public native int VoE_NumOfCodecs();
140 | public native String[] VoE_GetCodecs();
141 | public native int VoE_SetSendCodec(int channel, int index);
142 |
143 | //VoiceEngine funtions
144 | public native int VoE_SetECStatus(boolean enable);
145 | public native int VoE_SetAGCStatus(boolean enable);
146 | public native int VoE_SetNSStatus(boolean enable);
147 | public native int VoE_StartDebugRecording(String file);
148 | public native int VoE_StopDebugRecording();
149 | public native int VoE_StartIncomingRTPDump(int channel, String file);
150 | public native int VoE_StopIncomingRTPDump(int channel);
151 | }
152 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | // The following four imports are needed saveBitmapToJPEG which
14 | // is for debug only
15 | import java.io.ByteArrayOutputStream;
16 | import java.io.FileNotFoundException;
17 | import java.io.FileOutputStream;
18 | import java.io.IOException;
19 | import java.nio.ByteBuffer;
20 |
21 | import android.graphics.Bitmap;
22 | import android.graphics.Canvas;
23 | import android.graphics.Rect;
24 | import android.util.Log;
25 | import android.view.SurfaceHolder;
26 | import android.view.SurfaceView;
27 | import android.view.SurfaceHolder.Callback;
28 |
29 | public class ViESurfaceRenderer implements Callback {
30 |
31 | private final static String TAG = "WEBRTC";
32 |
33 | // the bitmap used for drawing.
34 | private Bitmap bitmap = null;
35 | private ByteBuffer byteBuffer = null;
36 | private SurfaceHolder surfaceHolder;
37 | // Rect of the source bitmap to draw
38 | private Rect srcRect = new Rect();
39 | // Rect of the destination canvas to draw to
40 | private Rect dstRect = new Rect();
41 | private float dstTopScale = 0;
42 | private float dstBottomScale = 1;
43 | private float dstLeftScale = 0;
44 | private float dstRightScale = 1;
45 |
46 | public ViESurfaceRenderer(SurfaceView view) {
47 | surfaceHolder = view.getHolder();
48 | if(surfaceHolder == null)
49 | return;
50 | surfaceHolder.addCallback(this);
51 | }
52 |
53 | // surfaceChanged and surfaceCreated share this function
54 | private void changeDestRect(int dstWidth, int dstHeight) {
55 | dstRect.right = (int)(dstRect.left + dstRightScale * dstWidth);
56 | dstRect.bottom = (int)(dstRect.top + dstBottomScale * dstHeight);
57 | }
58 |
59 | public void surfaceChanged(SurfaceHolder holder, int format,
60 | int in_width, int in_height) {
61 | Log.d(TAG, "ViESurfaceRender::surfaceChanged");
62 |
63 | changeDestRect(in_width, in_height);
64 |
65 | Log.d(TAG, "ViESurfaceRender::surfaceChanged" +
66 | " in_width:" + in_width + " in_height:" + in_height +
67 | " srcRect.left:" + srcRect.left +
68 | " srcRect.top:" + srcRect.top +
69 | " srcRect.right:" + srcRect.right +
70 | " srcRect.bottom:" + srcRect.bottom +
71 | " dstRect.left:" + dstRect.left +
72 | " dstRect.top:" + dstRect.top +
73 | " dstRect.right:" + dstRect.right +
74 | " dstRect.bottom:" + dstRect.bottom);
75 | }
76 |
77 | public void surfaceCreated(SurfaceHolder holder) {
78 | Canvas canvas = surfaceHolder.lockCanvas();
79 | if(canvas != null) {
80 | Rect dst = surfaceHolder.getSurfaceFrame();
81 | if(dst != null) {
82 | changeDestRect(dst.right - dst.left, dst.bottom - dst.top);
83 | Log.d(TAG, "ViESurfaceRender::surfaceCreated" +
84 | " dst.left:" + dst.left +
85 | " dst.top:" + dst.top +
86 | " dst.right:" + dst.right +
87 | " dst.bottom:" + dst.bottom +
88 | " srcRect.left:" + srcRect.left +
89 | " srcRect.top:" + srcRect.top +
90 | " srcRect.right:" + srcRect.right +
91 | " srcRect.bottom:" + srcRect.bottom +
92 | " dstRect.left:" + dstRect.left +
93 | " dstRect.top:" + dstRect.top +
94 | " dstRect.right:" + dstRect.right +
95 | " dstRect.bottom:" + dstRect.bottom);
96 | }
97 | surfaceHolder.unlockCanvasAndPost(canvas);
98 | }
99 | }
100 |
101 | public void surfaceDestroyed(SurfaceHolder holder) {
102 | Log.d(TAG, "ViESurfaceRenderer::surfaceDestroyed");
103 | bitmap = null;
104 | byteBuffer = null;
105 | }
106 |
107 | public Bitmap CreateBitmap(int width, int height) {
108 | Log.d(TAG, "CreateByteBitmap " + width + ":" + height);
109 | if (bitmap == null) {
110 | try {
111 | android.os.Process.setThreadPriority(
112 | android.os.Process.THREAD_PRIORITY_DISPLAY);
113 | }
114 | catch (Exception e) {
115 | }
116 | }
117 | bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
118 | srcRect.left = 0;
119 | srcRect.top = 0;
120 | srcRect.bottom = height;
121 | srcRect.right = width;
122 | return bitmap;
123 | }
124 |
125 | public ByteBuffer CreateByteBuffer(int width, int height) {
126 | Log.d(TAG, "CreateByteBuffer " + width + ":" + height);
127 | if (bitmap == null) {
128 | bitmap = CreateBitmap(width, height);
129 | byteBuffer = ByteBuffer.allocateDirect(width * height * 2);
130 | }
131 | return byteBuffer;
132 | }
133 |
134 | public void SetCoordinates(float left, float top,
135 | float right, float bottom) {
136 | Log.d(TAG, "SetCoordinates " + left + "," + top + ":" +
137 | right + "," + bottom);
138 | dstLeftScale = left;
139 | dstTopScale = top;
140 | dstRightScale = right;
141 | dstBottomScale = bottom;
142 | }
143 |
144 | // It saves bitmap data to a JPEG picture, this function is for debug only.
145 | private void saveBitmapToJPEG(int width, int height) {
146 | ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
147 | bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream);
148 |
149 | try{
150 | FileOutputStream output = new FileOutputStream(String.format(
151 | "/sdcard/render_%d.jpg", System.currentTimeMillis()));
152 | output.write(byteOutStream.toByteArray());
153 | output.flush();
154 | output.close();
155 | }
156 | catch (FileNotFoundException e) {
157 | }
158 | catch (IOException e) {
159 | }
160 | }
161 |
162 | public void DrawByteBuffer() {
163 | if(byteBuffer == null)
164 | return;
165 | byteBuffer.rewind();
166 | bitmap.copyPixelsFromBuffer(byteBuffer);
167 | DrawBitmap();
168 | }
169 |
170 | public void DrawBitmap() {
171 | if(bitmap == null)
172 | return;
173 |
174 | Canvas canvas = surfaceHolder.lockCanvas();
175 | if(canvas != null) {
176 | // The follow line is for debug only
177 | // saveBitmapToJPEG(srcRect.right - srcRect.left,
178 | // srcRect.bottom - srcRect.top);
179 | canvas.drawBitmap(bitmap, srcRect, dstRect, null);
180 | surfaceHolder.unlockCanvasAndPost(canvas);
181 | }
182 | }
183 |
184 | }
185 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | import java.io.IOException;
14 | import java.util.Locale;
15 | import java.util.concurrent.locks.ReentrantLock;
16 |
17 | import android.graphics.ImageFormat;
18 | import android.graphics.PixelFormat;
19 | import android.graphics.Rect;
20 | import android.graphics.SurfaceTexture;
21 | import android.graphics.YuvImage;
22 | import android.hardware.Camera;
23 | import android.hardware.Camera.PreviewCallback;
24 | import android.util.Log;
25 | import android.view.SurfaceHolder;
26 | import android.view.SurfaceHolder.Callback;
27 |
28 | // Wrapper for android Camera, with support for direct local preview rendering.
29 | // Threading notes: this class is called from ViE C++ code, and from Camera &
30 | // SurfaceHolder Java callbacks. Since these calls happen on different threads,
31 | // the entry points to this class are all synchronized. This shouldn't present
32 | // a performance bottleneck because only onPreviewFrame() is called more than
33 | // once (and is called serially on a single thread), so the lock should be
34 | // uncontended.
35 | public class VideoCaptureAndroid implements PreviewCallback, Callback {
36 | private final static String TAG = "WEBRTC-JC";
37 |
38 | private Camera camera; // Only non-null while capturing.
39 | private final int id;
40 | private final Camera.CameraInfo info;
41 | private final long native_capturer; // |VideoCaptureAndroid*| in C++.
42 | private SurfaceHolder localPreview;
43 | private SurfaceTexture dummySurfaceTexture;
44 | // Arbitrary queue depth. Higher number means more memory allocated & held,
45 | // lower number means more sensitivity to processing time in the client (and
46 | // potentially stalling the capturer if it runs out of buffers to write to).
47 | private final int numCaptureBuffers = 3;
48 |
49 | public VideoCaptureAndroid(int id, long native_capturer) {
50 | this.id = id;
51 | this.native_capturer = native_capturer;
52 | this.info = new Camera.CameraInfo();
53 | Camera.getCameraInfo(id, info);
54 | }
55 |
56 | // Called by native code. Returns true if capturer is started.
57 | //
58 | // Note that this actually opens the camera, which can be a slow operation and
59 | // thus might be done on a background thread, but ViE API needs a
60 | // synchronous success return value so we can't do that.
61 | private synchronized boolean startCapture(
62 | int width, int height, int min_mfps, int max_mfps) {
63 | Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
64 | min_mfps + ":" + max_mfps);
65 | Throwable error = null;
66 | try {
67 | camera = Camera.open(id);
68 |
69 | localPreview = ViERenderer.GetLocalRenderer();
70 | if (localPreview != null) {
71 | localPreview.addCallback(this);
72 | if (localPreview.getSurface() != null &&
73 | localPreview.getSurface().isValid()) {
74 | camera.setPreviewDisplay(localPreview);
75 | }
76 | } else {
77 | // No local renderer (we only care about onPreviewFrame() buffers, not a
78 | // directly-displayed UI element). Camera won't capture without
79 | // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
80 | // hand it over to Camera, but never listen for frame-ready callbacks,
81 | // and never call updateTexImage on it.
82 | try {
83 | // "42" because http://goo.gl/KaEn8
84 | dummySurfaceTexture = new SurfaceTexture(42);
85 | camera.setPreviewTexture(dummySurfaceTexture);
86 | } catch (IOException e) {
87 | throw new RuntimeException(e);
88 | }
89 | }
90 |
91 | Camera.Parameters parameters = camera.getParameters();
92 | Log.d(TAG, "isVideoStabilizationSupported: " +
93 | parameters.isVideoStabilizationSupported());
94 | if (parameters.isVideoStabilizationSupported()) {
95 | parameters.setVideoStabilization(true);
96 | }
97 | parameters.setPreviewSize(width, height);
98 | parameters.setPreviewFpsRange(min_mfps, max_mfps);
99 | int format = ImageFormat.NV21;
100 | parameters.setPreviewFormat(format);
101 | camera.setParameters(parameters);
102 | int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
103 | for (int i = 0; i < numCaptureBuffers; i++) {
104 | camera.addCallbackBuffer(new byte[bufSize]);
105 | }
106 | camera.setPreviewCallbackWithBuffer(this);
107 | camera.startPreview();
108 | return true;
109 | } catch (IOException e) {
110 | error = e;
111 | } catch (RuntimeException e) {
112 | error = e;
113 | }
114 | Log.e(TAG, "startCapture failed", error);
115 | if (camera != null) {
116 | stopCapture();
117 | }
118 | return false;
119 | }
120 |
121 | // Called by native code. Returns true when camera is known to be stopped.
122 | private synchronized boolean stopCapture() {
123 | Log.d(TAG, "stopCapture");
124 | if (camera == null) {
125 | throw new RuntimeException("Camera is already stopped!");
126 | }
127 | Throwable error = null;
128 | try {
129 | if (localPreview != null) {
130 | localPreview.removeCallback(this);
131 | camera.setPreviewDisplay(null);
132 | } else {
133 | camera.setPreviewTexture(null);
134 | }
135 | camera.setPreviewCallbackWithBuffer(null);
136 | camera.stopPreview();
137 | camera.release();
138 | camera = null;
139 | return true;
140 | } catch (IOException e) {
141 | error = e;
142 | } catch (RuntimeException e) {
143 | error = e;
144 | }
145 | Log.e(TAG, "Failed to stop camera", error);
146 | return false;
147 | }
148 |
149 | private native void ProvideCameraFrame(
150 | byte[] data, int length, long captureObject);
151 |
152 | public synchronized void onPreviewFrame(byte[] data, Camera camera) {
153 | ProvideCameraFrame(data, data.length, native_capturer);
154 | camera.addCallbackBuffer(data);
155 | }
156 |
157 | // Sets the rotation of the preview render window.
158 | // Does not affect the captured video image.
159 | // Called by native code.
160 | private synchronized void setPreviewRotation(int rotation) {
161 | Log.v(TAG, "setPreviewRotation:" + rotation);
162 |
163 | if (camera == null) {
164 | return;
165 | }
166 |
167 | int resultRotation = 0;
168 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
169 | // This is a front facing camera. SetDisplayOrientation will flip
170 | // the image horizontally before doing the rotation.
171 | resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
172 | } else {
173 | // Back-facing camera.
174 | resultRotation = rotation;
175 | }
176 | camera.setDisplayOrientation(resultRotation);
177 | }
178 |
179 | public synchronized void surfaceChanged(
180 | SurfaceHolder holder, int format, int width, int height) {
181 | Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
182 | format + ": " + width + "x" + height);
183 | }
184 |
185 | public synchronized void surfaceCreated(SurfaceHolder holder) {
186 | Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
187 | try {
188 | if (camera != null) {
189 | camera.setPreviewDisplay(holder);
190 | }
191 | } catch (IOException e) {
192 | throw new RuntimeException(e);
193 | }
194 | }
195 |
196 | public synchronized void surfaceDestroyed(SurfaceHolder holder) {
197 | Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
198 | try {
199 | if (camera != null) {
200 | camera.setPreviewDisplay(null);
201 | }
202 | } catch (IOException e) {
203 | throw new RuntimeException(e);
204 | }
205 | }
206 | }
207 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/WebRtc.Mono.Droid.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Debug
5 | AnyCPU
6 | 8.0.30703
7 | 2.0
8 | {A15474CE-91C0-4A31-AE49-201872C530A4}
9 | {EFBA0AD7-5A72-4C68-AF49-83D382785DCF};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
10 | Library
11 | Properties
12 | WebRtc.Mono.Droid
13 | WebRtc.Mono.Droid
14 | 512
15 | true
16 | Resources\Resource.Designer.cs
17 | Off
18 | Properties\AndroidManifest.xml
19 |
20 | v4.2
21 | armeabi-v7a
22 |
23 |
24 |
25 |
26 |
27 |
28 | true
29 | full
30 | false
31 | bin\Debug\
32 | DEBUG;TRACE
33 | prompt
34 | 4
35 | True
36 | None
37 |
38 |
39 | pdbonly
40 | true
41 | bin\Release\
42 | TRACE
43 | prompt
44 | 4
45 | False
46 | SdkOnly
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 | Designer
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 | {45a11329-f170-4b2d-9ccc-c09ef500c114}
122 | Org.Webrtc.Videoengineapp
123 |
124 |
125 | {074dd245-d7f1-448d-8f54-6f9c2301120a}
126 | Org.Webrtc.Videoengine
127 |
128 |
129 | {9b5daa9b-7087-47a3-8b7a-8fb8d794696a}
130 | Org.Webrtc.Voiceengine
131 |
132 |
133 |
134 |
135 |
142 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/Resources/Resource.Designer.cs:
--------------------------------------------------------------------------------
1 | #pragma warning disable 1591
2 | //------------------------------------------------------------------------------
3 | //
4 | // This code was generated by a tool.
5 | // Runtime Version:4.0.30319.18408
6 | //
7 | // Changes to this file may cause incorrect behavior and will be lost if
8 | // the code is regenerated.
9 | //
10 | //------------------------------------------------------------------------------
11 |
12 | [assembly: global::Android.Runtime.ResourceDesignerAttribute("WebRtc.Mono.Droid.Resource", IsApplication=true)]
13 |
14 | namespace WebRtc.Mono.Droid
15 | {
16 |
17 |
18 | [System.CodeDom.Compiler.GeneratedCodeAttribute("Xamarin.Android.Build.Tasks", "1.0.0.0")]
19 | public partial class Resource
20 | {
21 |
22 | static Resource()
23 | {
24 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
25 | }
26 |
27 | public static void UpdateIdValues()
28 | {
29 | }
30 |
31 | public partial class Attribute
32 | {
33 |
34 | static Attribute()
35 | {
36 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
37 | }
38 |
39 | private Attribute()
40 | {
41 | }
42 | }
43 |
44 | public partial class Drawable
45 | {
46 |
47 | // aapt resource value: 0x7f020000
48 | public const int logo = 2130837504;
49 |
50 | static Drawable()
51 | {
52 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
53 | }
54 |
55 | private Drawable()
56 | {
57 | }
58 | }
59 |
60 | public partial class Id
61 | {
62 |
63 | // aapt resource value: 0x7f050003
64 | public const int LinearLayout02 = 2131034115;
65 |
66 | // aapt resource value: 0x7f050019
67 | public const int LinearLayout03 = 2131034137;
68 |
69 | // aapt resource value: 0x7f050001
70 | public const int TextView01 = 2131034113;
71 |
72 | // aapt resource value: 0x7f050017
73 | public const int TextView02 = 2131034135;
74 |
75 | // aapt resource value: 0x7f050000
76 | public const int TextView03 = 2131034112;
77 |
78 | // aapt resource value: 0x7f05000f
79 | public const int btExit = 2131034127;
80 |
81 | // aapt resource value: 0x7f05000e
82 | public const int btStartStopCall = 2131034126;
83 |
84 | // aapt resource value: 0x7f05000d
85 | public const int btSwitchCamera = 2131034125;
86 |
87 | // aapt resource value: 0x7f050006
88 | public const int cbAECM = 2131034118;
89 |
90 | // aapt resource value: 0x7f050008
91 | public const int cbAutoGainControl = 2131034120;
92 |
93 | // aapt resource value: 0x7f05000a
94 | public const int cbDebugRecording = 2131034122;
95 |
96 | // aapt resource value: 0x7f050015
97 | public const int cbLoopback = 2131034133;
98 |
99 | // aapt resource value: 0x7f05002b
100 | public const int cbNack = 2131034155;
101 |
102 | // aapt resource value: 0x7f050007
103 | public const int cbNoiseSuppression = 2131034119;
104 |
105 | // aapt resource value: 0x7f050009
106 | public const int cbSpeaker = 2131034121;
107 |
108 | // aapt resource value: 0x7f050016
109 | public const int cbStats = 2131034134;
110 |
111 | // aapt resource value: 0x7f05002c
112 | public const int cbVideoRTPDump = 2131034156;
113 |
114 | // aapt resource value: 0x7f050012
115 | public const int cbVideoReceive = 2131034130;
116 |
117 | // aapt resource value: 0x7f050013
118 | public const int cbVideoSend = 2131034131;
119 |
120 | // aapt resource value: 0x7f050014
121 | public const int cbVoice = 2131034132;
122 |
123 | // aapt resource value: 0x7f05000b
124 | public const int cbVoiceRTPDump = 2131034123;
125 |
126 | // aapt resource value: 0x7f050005
127 | public const int etARxPort = 2131034117;
128 |
129 | // aapt resource value: 0x7f050004
130 | public const int etATxPort = 2131034116;
131 |
132 | // aapt resource value: 0x7f050018
133 | public const int etRemoteIp = 2131034136;
134 |
135 | // aapt resource value: 0x7f05002a
136 | public const int etVRxPort = 2131034154;
137 |
138 | // aapt resource value: 0x7f050029
139 | public const int etVTxPort = 2131034153;
140 |
141 | // aapt resource value: 0x7f05001e
142 | public const int image = 2131034142;
143 |
144 | // aapt resource value: 0x7f050021
145 | public const int ivPreview = 2131034145;
146 |
147 | // aapt resource value: 0x7f05000c
148 | public const int llLocalView = 2131034124;
149 |
150 | // aapt resource value: 0x7f050010
151 | public const int llRemoteView = 2131034128;
152 |
153 | // aapt resource value: 0x7f05001a
154 | public const int radio_group1 = 2131034138;
155 |
156 | // aapt resource value: 0x7f05001d
157 | public const int radio_mediacodec = 2131034141;
158 |
159 | // aapt resource value: 0x7f05001b
160 | public const int radio_opengl = 2131034139;
161 |
162 | // aapt resource value: 0x7f05001c
163 | public const int radio_surface = 2131034140;
164 |
165 | // aapt resource value: 0x7f050028
166 | public const int spCodecSize = 2131034152;
167 |
168 | // aapt resource value: 0x7f050026
169 | public const int spCodecType = 2131034150;
170 |
171 | // aapt resource value: 0x7f050002
172 | public const int spVoiceCodecType = 2131034114;
173 |
174 | // aapt resource value: 0x7f05001f
175 | public const int spinner_row = 2131034143;
176 |
177 | // aapt resource value: 0x7f050020
178 | public const int svLocal = 2131034144;
179 |
180 | // aapt resource value: 0x7f050025
181 | public const int tab_aconfig = 2131034149;
182 |
183 | // aapt resource value: 0x7f050023
184 | public const int tab_config = 2131034147;
185 |
186 | // aapt resource value: 0x7f050024
187 | public const int tab_vconfig = 2131034148;
188 |
189 | // aapt resource value: 0x7f050022
190 | public const int tab_video = 2131034146;
191 |
192 | // aapt resource value: 0x7f050027
193 | public const int tvCodecSize = 2131034151;
194 |
195 | // aapt resource value: 0x7f050011
196 | public const int tvTitle = 2131034129;
197 |
198 | static Id()
199 | {
200 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
201 | }
202 |
203 | private Id()
204 | {
205 | }
206 | }
207 |
208 | public partial class Layout
209 | {
210 |
211 | // aapt resource value: 0x7f030000
212 | public const int aconfig = 2130903040;
213 |
214 | // aapt resource value: 0x7f030001
215 | public const int both = 2130903041;
216 |
217 | // aapt resource value: 0x7f030002
218 | public const int main = 2130903042;
219 |
220 | // aapt resource value: 0x7f030003
221 | public const int row = 2130903043;
222 |
223 | // aapt resource value: 0x7f030004
224 | public const int send = 2130903044;
225 |
226 | // aapt resource value: 0x7f030005
227 | public const int tabhost = 2130903045;
228 |
229 | // aapt resource value: 0x7f030006
230 | public const int vconfig = 2130903046;
231 |
232 | static Layout()
233 | {
234 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
235 | }
236 |
237 | private Layout()
238 | {
239 | }
240 | }
241 |
242 | public partial class String
243 | {
244 |
245 | // aapt resource value: 0x7f040018
246 | public const int AECM = 2130968600;
247 |
248 | // aapt resource value: 0x7f040016
249 | public const int AutoGainControl = 2130968598;
250 |
251 | // aapt resource value: 0x7f040019
252 | public const int NoiseSuppression = 2130968601;
253 |
254 | // aapt resource value: 0x7f040017
255 | public const int VoiceActivityDetection = 2130968599;
256 |
257 | // aapt resource value: 0x7f040015
258 | public const int aRxPort = 2130968597;
259 |
260 | // aapt resource value: 0x7f040014
261 | public const int aTxPort = 2130968596;
262 |
263 | // aapt resource value: 0x7f040000
264 | public const int app_name = 2130968576;
265 |
266 | // aapt resource value: 0x7f04001c
267 | public const int backCamera = 2130968604;
268 |
269 | // aapt resource value: 0x7f040006
270 | public const int codecSize = 2130968582;
271 |
272 | // aapt resource value: 0x7f040005
273 | public const int codecType = 2130968581;
274 |
275 | // aapt resource value: 0x7f040003
276 | public const int codectype_prompt = 2130968579;
277 |
278 | // aapt resource value: 0x7f040021
279 | public const int debugrecording = 2130968609;
280 |
281 | // aapt resource value: 0x7f040004
282 | public const int demoTitle = 2130968580;
283 |
284 | // aapt resource value: 0x7f04000e
285 | public const int enableVideoReceive = 2130968590;
286 |
287 | // aapt resource value: 0x7f04000f
288 | public const int enableVideoSend = 2130968591;
289 |
290 | // aapt resource value: 0x7f04000d
291 | public const int enableVoice = 2130968589;
292 |
293 | // aapt resource value: 0x7f040001
294 | public const int error = 2130968577;
295 |
296 | // aapt resource value: 0x7f040002
297 | public const int errorCamera = 2130968578;
298 |
299 | // aapt resource value: 0x7f04001f
300 | public const int exit = 2130968607;
301 |
302 | // aapt resource value: 0x7f04001b
303 | public const int frontCamera = 2130968603;
304 |
305 | // aapt resource value: 0x7f040010
306 | public const int gSettings = 2130968592;
307 |
308 | // aapt resource value: 0x7f040008
309 | public const int loopback = 2130968584;
310 |
311 | // aapt resource value: 0x7f040024
312 | public const int mediacodec = 2130968612;
313 |
314 | // aapt resource value: 0x7f04001a
315 | public const int nack = 2130968602;
316 |
317 | // aapt resource value: 0x7f040025
318 | public const int opengl = 2130968613;
319 |
320 | // aapt resource value: 0x7f040007
321 | public const int remoteIp = 2130968583;
322 |
323 | // aapt resource value: 0x7f040022
324 | public const int rtpdump = 2130968610;
325 |
326 | // aapt resource value: 0x7f040020
327 | public const int speaker = 2130968608;
328 |
329 | // aapt resource value: 0x7f04000c
330 | public const int startBoth = 2130968588;
331 |
332 | // aapt resource value: 0x7f04001d
333 | public const int startCall = 2130968605;
334 |
335 | // aapt resource value: 0x7f04000a
336 | public const int startListen = 2130968586;
337 |
338 | // aapt resource value: 0x7f04000b
339 | public const int startSend = 2130968587;
340 |
341 | // aapt resource value: 0x7f040009
342 | public const int stats = 2130968585;
343 |
344 | // aapt resource value: 0x7f04001e
345 | public const int stopCall = 2130968606;
346 |
347 | // aapt resource value: 0x7f040023
348 | public const int surfaceview = 2130968611;
349 |
350 | // aapt resource value: 0x7f040013
351 | public const int vRxPort = 2130968595;
352 |
353 | // aapt resource value: 0x7f040011
354 | public const int vSettings = 2130968593;
355 |
356 | // aapt resource value: 0x7f040012
357 | public const int vTxPort = 2130968594;
358 |
359 | static String()
360 | {
361 | global::Android.Runtime.ResourceIdManager.UpdateIdValues();
362 | }
363 |
364 | private String()
365 | {
366 | }
367 | }
368 | }
369 | }
370 | #pragma warning restore 1591
371 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/src/org/webrtc/videoengine/ViEMediaCodecDecoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | import android.media.MediaCodec;
14 | import android.media.MediaExtractor;
15 | import android.media.MediaFormat;
16 | import android.os.Handler;
17 | import android.os.Looper;
18 | import android.os.Message;
19 | import android.util.Log;
20 | import android.view.Surface;
21 | import android.view.SurfaceView;
22 |
23 | import java.io.IOException;
24 | import java.nio.ByteBuffer;
25 | import java.util.LinkedList;
26 |
27 | class CodecState {
28 | private static final String TAG = "CodecState";
29 |
30 | private ViEMediaCodecDecoder mView;
31 | private MediaFormat mFormat;
32 | private boolean mSawInputEOS, mSawOutputEOS;
33 |
34 | private MediaCodec mCodec;
35 | private MediaFormat mOutputFormat;
36 | private ByteBuffer[] mCodecInputBuffers;
37 | private ByteBuffer[] mCodecOutputBuffers;
38 |
39 | private LinkedList mAvailableInputBufferIndices;
40 | private LinkedList mAvailableOutputBufferIndices;
41 | private LinkedList mAvailableOutputBufferInfos;
42 |
43 | private long mLastMediaTimeUs;
44 |
45 | public CodecState(
46 | ViEMediaCodecDecoder view,
47 | MediaFormat format,
48 | MediaCodec codec) {
49 | mView = view;
50 | mFormat = format;
51 | mSawInputEOS = mSawOutputEOS = false;
52 |
53 | mCodec = codec;
54 |
55 | mCodec.start();
56 | mCodecInputBuffers = mCodec.getInputBuffers();
57 | mCodecOutputBuffers = mCodec.getOutputBuffers();
58 |
59 | mAvailableInputBufferIndices = new LinkedList();
60 | mAvailableOutputBufferIndices = new LinkedList();
61 | mAvailableOutputBufferInfos = new LinkedList();
62 |
63 | mLastMediaTimeUs = 0;
64 | }
65 |
66 | public void release() {
67 | mCodec.stop();
68 | mCodecInputBuffers = null;
69 | mCodecOutputBuffers = null;
70 | mOutputFormat = null;
71 |
72 | mAvailableOutputBufferInfos = null;
73 | mAvailableOutputBufferIndices = null;
74 | mAvailableInputBufferIndices = null;
75 |
76 | mCodec.release();
77 | mCodec = null;
78 | }
79 |
80 | public void start() {
81 | }
82 |
83 | public void pause() {
84 | }
85 |
86 | public long getCurrentPositionUs() {
87 | return mLastMediaTimeUs;
88 | }
89 |
90 | public void flush() {
91 | mAvailableInputBufferIndices.clear();
92 | mAvailableOutputBufferIndices.clear();
93 | mAvailableOutputBufferInfos.clear();
94 |
95 | mSawInputEOS = false;
96 | mSawOutputEOS = false;
97 |
98 | mCodec.flush();
99 | }
100 |
101 | public void doSomeWork() {
102 | int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);
103 |
104 | if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
105 | mAvailableInputBufferIndices.add(new Integer(index));
106 | }
107 |
108 | while (feedInputBuffer()) {}
109 |
110 | MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
111 | index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
112 |
113 | if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
114 | mOutputFormat = mCodec.getOutputFormat();
115 | } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
116 | mCodecOutputBuffers = mCodec.getOutputBuffers();
117 | } else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
118 | mAvailableOutputBufferIndices.add(new Integer(index));
119 | mAvailableOutputBufferInfos.add(info);
120 | }
121 |
122 | while (drainOutputBuffer()) {}
123 | }
124 |
125 | /** returns true if more input data could be fed */
126 | private boolean feedInputBuffer() {
127 | if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) {
128 | return false;
129 | }
130 |
131 | int index = mAvailableInputBufferIndices.peekFirst().intValue();
132 |
133 | ByteBuffer codecData = mCodecInputBuffers[index];
134 |
135 | if (mView.hasFrame()) {
136 | Frame frame = mView.dequeueFrame();
137 | ByteBuffer buffer = frame.mBuffer;
138 | if (buffer == null) {
139 | return false;
140 | }
141 | if (codecData.capacity() < buffer.capacity()) {
142 | Log.e(TAG, "Buffer is too small to copy a frame.");
143 | // TODO(dwkang): split the frame into the multiple buffer.
144 | }
145 | buffer.rewind();
146 | codecData.rewind();
147 | codecData.put(buffer);
148 | codecData.rewind();
149 |
150 | try {
151 | mCodec.queueInputBuffer(
152 | index, 0 /* offset */, buffer.capacity(), frame.mTimeStampUs,
153 | 0 /* flags */);
154 |
155 | mAvailableInputBufferIndices.removeFirst();
156 | } catch (MediaCodec.CryptoException e) {
157 | Log.d(TAG, "CryptoException w/ errorCode "
158 | + e.getErrorCode() + ", '" + e.getMessage() + "'");
159 | }
160 |
161 | return true;
162 | }
163 | return false;
164 | }
165 |
166 |
167 | /** returns true if more output data could be drained */
168 | private boolean drainOutputBuffer() {
169 | if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) {
170 | return false;
171 | }
172 |
173 | int index = mAvailableOutputBufferIndices.peekFirst().intValue();
174 | MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
175 |
176 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
177 | Log.d(TAG, "saw output EOS.");
178 |
179 | mSawOutputEOS = true;
180 | return false;
181 | }
182 |
183 | long realTimeUs =
184 | mView.getRealTimeUsForMediaTime(info.presentationTimeUs);
185 | long nowUs = System.currentTimeMillis() * 1000;
186 | long lateUs = nowUs - realTimeUs;
187 |
188 | // video
189 | boolean render;
190 |
191 | // TODO(dwkang): For some extreme cases, just not doing rendering is not enough.
192 | // Need to seek to the next key frame.
193 | if (lateUs < -10000) {
194 | // too early;
195 | return false;
196 | } else if (lateUs > 30000) {
197 | Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
198 | render = false;
199 | } else {
200 | render = true;
201 | mLastMediaTimeUs = info.presentationTimeUs;
202 | }
203 |
204 | MediaFormat format= mCodec.getOutputFormat();
205 | Log.d(TAG, "Video output format :" + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
206 | mCodec.releaseOutputBuffer(index, render);
207 |
208 | mAvailableOutputBufferIndices.removeFirst();
209 | mAvailableOutputBufferInfos.removeFirst();
210 | return true;
211 | }
212 | }
213 |
214 | class Frame {
215 | public ByteBuffer mBuffer;
216 | public long mTimeStampUs;
217 |
218 | Frame(ByteBuffer buffer, long timeStampUs) {
219 | mBuffer = buffer;
220 | mTimeStampUs = timeStampUs;
221 | }
222 | }
223 |
224 | class ViEMediaCodecDecoder {
225 | private static final String TAG = "ViEMediaCodecDecoder";
226 |
227 | private MediaExtractor mExtractor;
228 |
229 | private CodecState mCodecState;
230 |
231 | private int mState;
232 | private static final int STATE_IDLE = 1;
233 | private static final int STATE_PREPARING = 2;
234 | private static final int STATE_PLAYING = 3;
235 | private static final int STATE_PAUSED = 4;
236 |
237 | private Handler mHandler;
238 | private static final int EVENT_PREPARE = 1;
239 | private static final int EVENT_DO_SOME_WORK = 2;
240 |
241 | private long mDeltaTimeUs;
242 | private long mDurationUs;
243 |
244 | private SurfaceView mSurfaceView;
245 | private LinkedList mFrameQueue = new LinkedList();
246 |
247 | private Thread mLooperThread;
248 |
249 | public boolean configure(SurfaceView surfaceView, int width, int height) {
250 | mSurfaceView = surfaceView;
251 | Log.d(TAG, "configure " + "width" + width + "height" + height + mSurfaceView.toString());
252 |
253 | MediaFormat format = new MediaFormat();
254 | format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
255 | format.setInteger(MediaFormat.KEY_WIDTH, width);
256 | format.setInteger(MediaFormat.KEY_HEIGHT, height);
257 |
258 | Surface surface = mSurfaceView.getHolder().getSurface();
259 | Log.d(TAG, "Surface " + surface.isValid());
260 | MediaCodec codec;
261 | try {
262 | codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
263 | // SW VP8 decoder
264 | // codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
265 | // Nexus10 HW VP8 decoder
266 | // codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
267 | } catch (Exception e) {
268 | // TODO(dwkang): replace this instanceof/throw with a narrower catch clause
269 | // once the SDK advances.
270 | if (e instanceof IOException) {
271 | Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
272 | return false;
273 | }
274 | throw new RuntimeException(e);
275 | }
276 |
277 | codec.configure(format, surface, null, 0);
278 | mCodecState = new CodecState(this, format, codec);
279 | initMediaCodecView();
280 | return true;
281 | }
282 |
283 | public void setEncodedImage(ByteBuffer buffer, long renderTimeMs) {
284 | // TODO(dwkang): figure out why exceptions just make this thread finish.
285 | try {
286 | final long renderTimeUs = renderTimeMs * 1000;
287 | ByteBuffer buf = ByteBuffer.allocate(buffer.capacity());
288 | buf.put(buffer);
289 | buf.rewind();
290 | synchronized(mFrameQueue) {
291 | mFrameQueue.add(new Frame(buf, renderTimeUs));
292 | }
293 | } catch (Exception e) {
294 | e.printStackTrace();
295 | }
296 | }
297 |
298 | public boolean hasFrame() {
299 | synchronized(mFrameQueue) {
300 | return !mFrameQueue.isEmpty();
301 | }
302 | }
303 |
304 | public Frame dequeueFrame() {
305 | synchronized(mFrameQueue) {
306 | return mFrameQueue.removeFirst();
307 | }
308 | }
309 |
310 | private void initMediaCodecView() {
311 | Log.d(TAG, "initMediaCodecView");
312 | mState = STATE_IDLE;
313 |
314 | mLooperThread = new Thread()
315 | {
316 | @Override
317 | public void run() {
318 | Log.d(TAG, "Looper prepare");
319 | Looper.prepare();
320 | mHandler = new Handler() {
321 | @Override
322 | public void handleMessage(Message msg) {
323 | // TODO(dwkang): figure out exceptions just make this thread finish.
324 | try {
325 | switch (msg.what) {
326 | case EVENT_PREPARE:
327 | {
328 | mState = STATE_PAUSED;
329 | ViEMediaCodecDecoder.this.start();
330 | break;
331 | }
332 |
333 | case EVENT_DO_SOME_WORK:
334 | {
335 | ViEMediaCodecDecoder.this.doSomeWork();
336 |
337 | mHandler.sendMessageDelayed(
338 | mHandler.obtainMessage(EVENT_DO_SOME_WORK), 5);
339 | break;
340 | }
341 |
342 | default:
343 | break;
344 | }
345 | } catch (Exception e) {
346 | e.printStackTrace();
347 | }
348 | }
349 | };
350 | Log.d(TAG, "Looper loop");
351 | synchronized(ViEMediaCodecDecoder.this) {
352 | ViEMediaCodecDecoder.this.notify();
353 | }
354 | Looper.loop();
355 | }
356 | };
357 | mLooperThread.start();
358 |
359 | // Wait until handler is set up.
360 | synchronized(ViEMediaCodecDecoder.this) {
361 | try {
362 | ViEMediaCodecDecoder.this.wait(1000);
363 | } catch (InterruptedException e) {
364 | e.printStackTrace();
365 | }
366 | }
367 | Log.d(TAG, "initMediaCodecView end");
368 | }
369 |
370 | public void start() {
371 | Log.d(TAG, "start");
372 |
373 | if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
374 | return;
375 | } else if (mState == STATE_IDLE) {
376 | mState = STATE_PREPARING;
377 | Log.d(TAG, "Sending EVENT_PREPARE");
378 | mHandler.sendMessage(mHandler.obtainMessage(EVENT_PREPARE));
379 | return;
380 | } else if (mState != STATE_PAUSED) {
381 | throw new IllegalStateException();
382 | }
383 |
384 | mCodecState.start();
385 |
386 | mHandler.sendMessage(mHandler.obtainMessage(EVENT_DO_SOME_WORK));
387 |
388 | mDeltaTimeUs = -1;
389 | mState = STATE_PLAYING;
390 |
391 | Log.d(TAG, "start end");
392 | }
393 |
394 | public void reset() {
395 | if (mState == STATE_PLAYING) {
396 | mCodecState.pause();
397 | }
398 |
399 | mCodecState.release();
400 |
401 | mDurationUs = -1;
402 | mState = STATE_IDLE;
403 | }
404 |
405 | private void doSomeWork() {
406 | mCodecState.doSomeWork();
407 | }
408 |
409 | public long getRealTimeUsForMediaTime(long mediaTimeUs) {
410 | if (mDeltaTimeUs == -1) {
411 | long nowUs = System.currentTimeMillis() * 1000;
412 | mDeltaTimeUs = nowUs - mediaTimeUs;
413 | }
414 |
415 | return mDeltaTimeUs + mediaTimeUs;
416 | }
417 | }
418 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.videoengine;
12 |
13 | import java.util.concurrent.locks.ReentrantLock;
14 |
15 | import javax.microedition.khronos.egl.EGL10;
16 | import javax.microedition.khronos.egl.EGLConfig;
17 | import javax.microedition.khronos.egl.EGLContext;
18 | import javax.microedition.khronos.egl.EGLDisplay;
19 | import javax.microedition.khronos.opengles.GL10;
20 |
21 | import android.app.ActivityManager;
22 | import android.content.Context;
23 | import android.content.pm.ConfigurationInfo;
24 | import android.graphics.PixelFormat;
25 | import android.opengl.GLSurfaceView;
26 | import android.util.Log;
27 |
28 | public class ViEAndroidGLES20 extends GLSurfaceView
29 | implements GLSurfaceView.Renderer {
30 | private static String TAG = "WEBRTC-JR";
31 | private static final boolean DEBUG = false;
32 | // True if onSurfaceCreated has been called.
33 | private boolean surfaceCreated = false;
34 | private boolean openGLCreated = false;
35 | // True if NativeFunctionsRegistered has been called.
36 | private boolean nativeFunctionsRegisted = false;
37 | private ReentrantLock nativeFunctionLock = new ReentrantLock();
38 | // Address of Native object that will do the drawing.
39 | private long nativeObject = 0;
40 | private int viewWidth = 0;
41 | private int viewHeight = 0;
42 |
43 | public static boolean UseOpenGL2(Object renderWindow) {
44 | return ViEAndroidGLES20.class.isInstance(renderWindow);
45 | }
46 |
47 | public ViEAndroidGLES20(Context context) {
48 | super(context);
49 | init(false, 0, 0);
50 | }
51 |
52 | public ViEAndroidGLES20(Context context, boolean translucent,
53 | int depth, int stencil) {
54 | super(context);
55 | init(translucent, depth, stencil);
56 | }
57 |
58 | private void init(boolean translucent, int depth, int stencil) {
59 |
60 | // By default, GLSurfaceView() creates a RGB_565 opaque surface.
61 | // If we want a translucent one, we should change the surface's
62 | // format here, using PixelFormat.TRANSLUCENT for GL Surfaces
63 | // is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
64 | if (translucent) {
65 | this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
66 | }
67 |
68 | // Setup the context factory for 2.0 rendering.
69 | // See ContextFactory class definition below
70 | setEGLContextFactory(new ContextFactory());
71 |
72 | // We need to choose an EGLConfig that matches the format of
73 | // our surface exactly. This is going to be done in our
74 | // custom config chooser. See ConfigChooser class definition
75 | // below.
76 | setEGLConfigChooser( translucent ?
77 | new ConfigChooser(8, 8, 8, 8, depth, stencil) :
78 | new ConfigChooser(5, 6, 5, 0, depth, stencil) );
79 |
80 | // Set the renderer responsible for frame rendering
81 | this.setRenderer(this);
82 | this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
83 | }
84 |
85 | private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
86 | private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
87 | public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
88 | Log.w(TAG, "creating OpenGL ES 2.0 context");
89 | checkEglError("Before eglCreateContext", egl);
90 | int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
91 | EGLContext context = egl.eglCreateContext(display, eglConfig,
92 | EGL10.EGL_NO_CONTEXT, attrib_list);
93 | checkEglError("After eglCreateContext", egl);
94 | return context;
95 | }
96 |
97 | public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
98 | egl.eglDestroyContext(display, context);
99 | }
100 | }
101 |
102 | private static void checkEglError(String prompt, EGL10 egl) {
103 | int error;
104 | while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
105 | Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
106 | }
107 | }
108 |
109 | private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
110 |
111 | public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
112 | mRedSize = r;
113 | mGreenSize = g;
114 | mBlueSize = b;
115 | mAlphaSize = a;
116 | mDepthSize = depth;
117 | mStencilSize = stencil;
118 | }
119 |
120 | // This EGL config specification is used to specify 2.0 rendering.
121 | // We use a minimum size of 4 bits for red/green/blue, but will
122 | // perform actual matching in chooseConfig() below.
123 | private static int EGL_OPENGL_ES2_BIT = 4;
124 | private static int[] s_configAttribs2 =
125 | {
126 | EGL10.EGL_RED_SIZE, 4,
127 | EGL10.EGL_GREEN_SIZE, 4,
128 | EGL10.EGL_BLUE_SIZE, 4,
129 | EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
130 | EGL10.EGL_NONE
131 | };
132 |
133 | public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
134 |
135 | // Get the number of minimally matching EGL configurations
136 | int[] num_config = new int[1];
137 | egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
138 |
139 | int numConfigs = num_config[0];
140 |
141 | if (numConfigs <= 0) {
142 | throw new IllegalArgumentException("No configs match configSpec");
143 | }
144 |
145 | // Allocate then read the array of minimally matching EGL configs
146 | EGLConfig[] configs = new EGLConfig[numConfigs];
147 | egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
148 |
149 | if (DEBUG) {
150 | printConfigs(egl, display, configs);
151 | }
152 | // Now return the "best" one
153 | return chooseConfig(egl, display, configs);
154 | }
155 |
156 | public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
157 | EGLConfig[] configs) {
158 | for(EGLConfig config : configs) {
159 | int d = findConfigAttrib(egl, display, config,
160 | EGL10.EGL_DEPTH_SIZE, 0);
161 | int s = findConfigAttrib(egl, display, config,
162 | EGL10.EGL_STENCIL_SIZE, 0);
163 |
164 | // We need at least mDepthSize and mStencilSize bits
165 | if (d < mDepthSize || s < mStencilSize)
166 | continue;
167 |
168 | // We want an *exact* match for red/green/blue/alpha
169 | int r = findConfigAttrib(egl, display, config,
170 | EGL10.EGL_RED_SIZE, 0);
171 | int g = findConfigAttrib(egl, display, config,
172 | EGL10.EGL_GREEN_SIZE, 0);
173 | int b = findConfigAttrib(egl, display, config,
174 | EGL10.EGL_BLUE_SIZE, 0);
175 | int a = findConfigAttrib(egl, display, config,
176 | EGL10.EGL_ALPHA_SIZE, 0);
177 |
178 | if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
179 | return config;
180 | }
181 | return null;
182 | }
183 |
184 | private int findConfigAttrib(EGL10 egl, EGLDisplay display,
185 | EGLConfig config, int attribute, int defaultValue) {
186 |
187 | if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
188 | return mValue[0];
189 | }
190 | return defaultValue;
191 | }
192 |
193 | private void printConfigs(EGL10 egl, EGLDisplay display,
194 | EGLConfig[] configs) {
195 | int numConfigs = configs.length;
196 | Log.w(TAG, String.format("%d configurations", numConfigs));
197 | for (int i = 0; i < numConfigs; i++) {
198 | Log.w(TAG, String.format("Configuration %d:\n", i));
199 | printConfig(egl, display, configs[i]);
200 | }
201 | }
202 |
203 | private void printConfig(EGL10 egl, EGLDisplay display,
204 | EGLConfig config) {
205 | int[] attributes = {
206 | EGL10.EGL_BUFFER_SIZE,
207 | EGL10.EGL_ALPHA_SIZE,
208 | EGL10.EGL_BLUE_SIZE,
209 | EGL10.EGL_GREEN_SIZE,
210 | EGL10.EGL_RED_SIZE,
211 | EGL10.EGL_DEPTH_SIZE,
212 | EGL10.EGL_STENCIL_SIZE,
213 | EGL10.EGL_CONFIG_CAVEAT,
214 | EGL10.EGL_CONFIG_ID,
215 | EGL10.EGL_LEVEL,
216 | EGL10.EGL_MAX_PBUFFER_HEIGHT,
217 | EGL10.EGL_MAX_PBUFFER_PIXELS,
218 | EGL10.EGL_MAX_PBUFFER_WIDTH,
219 | EGL10.EGL_NATIVE_RENDERABLE,
220 | EGL10.EGL_NATIVE_VISUAL_ID,
221 | EGL10.EGL_NATIVE_VISUAL_TYPE,
222 | 0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
223 | EGL10.EGL_SAMPLES,
224 | EGL10.EGL_SAMPLE_BUFFERS,
225 | EGL10.EGL_SURFACE_TYPE,
226 | EGL10.EGL_TRANSPARENT_TYPE,
227 | EGL10.EGL_TRANSPARENT_RED_VALUE,
228 | EGL10.EGL_TRANSPARENT_GREEN_VALUE,
229 | EGL10.EGL_TRANSPARENT_BLUE_VALUE,
230 | 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
231 | 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
232 | 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
233 | 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
234 | EGL10.EGL_LUMINANCE_SIZE,
235 | EGL10.EGL_ALPHA_MASK_SIZE,
236 | EGL10.EGL_COLOR_BUFFER_TYPE,
237 | EGL10.EGL_RENDERABLE_TYPE,
238 | 0x3042 // EGL10.EGL_CONFORMANT
239 | };
240 | String[] names = {
241 | "EGL_BUFFER_SIZE",
242 | "EGL_ALPHA_SIZE",
243 | "EGL_BLUE_SIZE",
244 | "EGL_GREEN_SIZE",
245 | "EGL_RED_SIZE",
246 | "EGL_DEPTH_SIZE",
247 | "EGL_STENCIL_SIZE",
248 | "EGL_CONFIG_CAVEAT",
249 | "EGL_CONFIG_ID",
250 | "EGL_LEVEL",
251 | "EGL_MAX_PBUFFER_HEIGHT",
252 | "EGL_MAX_PBUFFER_PIXELS",
253 | "EGL_MAX_PBUFFER_WIDTH",
254 | "EGL_NATIVE_RENDERABLE",
255 | "EGL_NATIVE_VISUAL_ID",
256 | "EGL_NATIVE_VISUAL_TYPE",
257 | "EGL_PRESERVED_RESOURCES",
258 | "EGL_SAMPLES",
259 | "EGL_SAMPLE_BUFFERS",
260 | "EGL_SURFACE_TYPE",
261 | "EGL_TRANSPARENT_TYPE",
262 | "EGL_TRANSPARENT_RED_VALUE",
263 | "EGL_TRANSPARENT_GREEN_VALUE",
264 | "EGL_TRANSPARENT_BLUE_VALUE",
265 | "EGL_BIND_TO_TEXTURE_RGB",
266 | "EGL_BIND_TO_TEXTURE_RGBA",
267 | "EGL_MIN_SWAP_INTERVAL",
268 | "EGL_MAX_SWAP_INTERVAL",
269 | "EGL_LUMINANCE_SIZE",
270 | "EGL_ALPHA_MASK_SIZE",
271 | "EGL_COLOR_BUFFER_TYPE",
272 | "EGL_RENDERABLE_TYPE",
273 | "EGL_CONFORMANT"
274 | };
275 | int[] value = new int[1];
276 | for (int i = 0; i < attributes.length; i++) {
277 | int attribute = attributes[i];
278 | String name = names[i];
279 | if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
280 | Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
281 | } else {
282 | // Log.w(TAG, String.format(" %s: failed\n", name));
283 | while (egl.eglGetError() != EGL10.EGL_SUCCESS);
284 | }
285 | }
286 | }
287 |
288 | // Subclasses can adjust these values:
289 | protected int mRedSize;
290 | protected int mGreenSize;
291 | protected int mBlueSize;
292 | protected int mAlphaSize;
293 | protected int mDepthSize;
294 | protected int mStencilSize;
295 | private int[] mValue = new int[1];
296 | }
297 |
298 | // IsSupported
299 | // Return true if this device support Open GL ES 2.0 rendering.
300 | public static boolean IsSupported(Context context) {
301 | ActivityManager am =
302 | (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
303 | ConfigurationInfo info = am.getDeviceConfigurationInfo();
304 | if(info.reqGlEsVersion >= 0x20000) {
305 | // Open GL ES 2.0 is supported.
306 | return true;
307 | }
308 | return false;
309 | }
310 |
311 | public void onDrawFrame(GL10 gl) {
312 | nativeFunctionLock.lock();
313 | if(!nativeFunctionsRegisted || !surfaceCreated) {
314 | nativeFunctionLock.unlock();
315 | return;
316 | }
317 |
318 | if(!openGLCreated) {
319 | if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
320 | return; // Failed to create OpenGL
321 | }
322 | openGLCreated = true; // Created OpenGL successfully
323 | }
324 | DrawNative(nativeObject); // Draw the new frame
325 | nativeFunctionLock.unlock();
326 | }
327 |
328 | public void onSurfaceChanged(GL10 gl, int width, int height) {
329 | surfaceCreated = true;
330 | viewWidth = width;
331 | viewHeight = height;
332 |
333 | nativeFunctionLock.lock();
334 | if(nativeFunctionsRegisted) {
335 | if(CreateOpenGLNative(nativeObject,width,height) == 0)
336 | openGLCreated = true;
337 | }
338 | nativeFunctionLock.unlock();
339 | }
340 |
341 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
342 | }
343 |
344 | public void RegisterNativeObject(long nativeObject) {
345 | nativeFunctionLock.lock();
346 | this.nativeObject = nativeObject;
347 | nativeFunctionsRegisted = true;
348 | nativeFunctionLock.unlock();
349 | }
350 |
351 | public void DeRegisterNativeObject() {
352 | nativeFunctionLock.lock();
353 | nativeFunctionsRegisted = false;
354 | openGLCreated = false;
355 | this.nativeObject = 0;
356 | nativeFunctionLock.unlock();
357 | }
358 |
359 | public void ReDraw() {
360 | if(surfaceCreated) {
361 | // Request the renderer to redraw using the render thread context.
362 | this.requestRender();
363 | }
364 | }
365 |
366 | private native int CreateOpenGLNative(long nativeObject,
367 | int width, int height);
368 | private native void DrawNative(long nativeObject);
369 |
370 | }
371 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | /* DO NOT EDIT THIS FILE - it is machine generated */
12 | #include
13 | /* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */
14 |
15 | #ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
16 | #define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
17 | #ifdef __cplusplus
18 | extern "C" {
19 | #endif
20 | /*
21 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
22 | * Method: NativeInit
23 | * Signature: (Landroid/content/Context;)Z
24 | */
25 | JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit
26 | (JNIEnv *, jobject, jobject);
27 |
28 | /*
29 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
30 | * Method: GetVideoEngine
31 | * Signature: ()I
32 | */
33 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine
34 | (JNIEnv *, jobject);
35 |
36 | /*
37 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
38 | * Method: Init
39 | * Signature: (Z)I
40 | */
41 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init
42 | (JNIEnv *, jobject, jboolean);
43 |
44 | /*
45 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
46 | * Method: Terminate
47 | * Signature: ()I
48 | */
49 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate
50 | (JNIEnv *, jobject);
51 |
52 | /*
53 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
54 | * Method: StartSend
55 | * Signature: (I)I
56 | */
57 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend
58 | (JNIEnv *, jobject, jint);
59 |
60 | /*
61 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
62 | * Method: StopRender
63 | * Signature: (I)I
64 | */
65 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender
66 | (JNIEnv *, jobject, jint);
67 |
68 | /*
69 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
70 | * Method: StopSend
71 | * Signature: (I)I
72 | */
73 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend
74 | (JNIEnv *, jobject, jint);
75 |
76 | /*
77 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
78 | * Method: StartReceive
79 | * Signature: (I)I
80 | */
81 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive
82 | (JNIEnv *, jobject, jint);
83 |
84 | /*
85 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
86 | * Method: StopReceive
87 | * Signature: (I)I
88 | */
89 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive
90 | (JNIEnv *, jobject, jint);
91 |
92 | /*
93 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
94 | * Method: CreateChannel
95 | * Signature: (I)I
96 | */
97 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel
98 | (JNIEnv *, jobject, jint);
99 |
100 | /*
101 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
102 | * Method: SetLocalReceiver
103 | * Signature: (II)I
104 | */
105 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver
106 | (JNIEnv *, jobject, jint, jint);
107 |
108 | /*
109 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
110 | * Method: SetSendDestination
111 | * Signature: (IILjava/lang/String;)I
112 | */
113 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
114 | (JNIEnv *, jobject, jint, jint, jstring);
115 |
116 | /*
117 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
118 | * Method: GetCodecs
119 | * Signature: ()[Ljava/lang/String;
120 | */
121 | JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs
122 | (JNIEnv *, jobject);
123 |
124 | /*
125 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
126 | * Method: SetReceiveCodec
127 | * Signature: (IIIIII)I
128 | */
129 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec
130 | (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
131 |
132 | /*
133 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
134 | * Method: SetSendCodec
135 | * Signature: (IIIIII)I
136 | */
137 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec
138 | (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
139 |
140 | /*
141 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
142 | * Method: AddRemoteRenderer
143 | * Signature: (ILjava/lang/Object;)I
144 | */
145 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer
146 | (JNIEnv *, jobject, jint, jobject);
147 |
148 | /*
149 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
150 | * Method: RemoveRemoteRenderer
151 | * Signature: (I)I
152 | */
153 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer
154 | (JNIEnv *, jobject, jint);
155 |
156 | /*
157 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
158 | * Method: StartRender
159 | * Signature: (I)I
160 | */
161 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender
162 | (JNIEnv *, jobject, jint);
163 |
164 | /*
165 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
166 | * Method: StartCamera
167 | * Signature: (II)I
168 | */
169 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera
170 | (JNIEnv *, jobject, jint, jint);
171 |
172 | /*
173 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
174 | * Method: StopCamera
175 | * Signature: (I)I
176 | */
177 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera
178 | (JNIEnv *, jobject, jint);
179 |
180 | /*
181 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
182 | * Method: GetCameraOrientation
183 | * Signature: (I)I
184 | */
185 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation
186 | (JNIEnv *, jobject, jint);
187 |
188 | /*
189 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
190 | * Method: SetRotation
191 | * Signature: (II)I
192 | */
193 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
194 | (JNIEnv *, jobject, jint, jint);
195 |
196 | /*
197 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
198 | * Method: SetExternalMediaCodecDecoderRenderer
199 | * Signature: (ILjava/lang/Object;)I
200 | */
201 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer
202 | (JNIEnv *, jobject, jint, jobject);
203 |
204 | /*
205 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
206 | * Method: EnableNACK
207 | * Signature: (IZ)I
208 | */
209 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK
210 | (JNIEnv *, jobject, jint, jboolean);
211 |
212 | /*
213 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
214 | * Method: EnablePLI
215 | * Signature: (IZ)I
216 | */
217 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI
218 | (JNIEnv *, jobject, jint, jboolean);
219 |
220 | /*
221 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
222 | * Method: SetCallback
223 | * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
224 | */
225 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback
226 | (JNIEnv *, jobject, jint, jobject);
227 |
228 | /*
229 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
230 | * Method: StartIncomingRTPDump
231 | * Signature: (ILjava/lang/String;)I
232 | */
233 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump
234 | (JNIEnv *, jobject, jint, jstring);
235 |
236 | /*
237 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
238 | * Method: StopIncomingRTPDump
239 | * Signature: (I)I
240 | */
241 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump
242 | (JNIEnv *, jobject, jint);
243 |
244 | /*
245 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
246 | * Method: VoE_Create
247 | * Signature: (Landroid/content/Context;)Z
248 | */
249 | JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create
250 | (JNIEnv *, jobject, jobject);
251 |
252 | /*
253 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
254 | * Method: VoE_Delete
255 | * Signature: ()Z
256 | */
257 | JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete
258 | (JNIEnv *, jobject);
259 |
260 | /*
261 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
262 | * Method: VoE_Init
263 | * Signature: (Z)I
264 | */
265 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init
266 | (JNIEnv *, jobject, jboolean);
267 |
268 | /*
269 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
270 | * Method: VoE_Terminate
271 | * Signature: ()I
272 | */
273 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate
274 | (JNIEnv *, jobject);
275 |
276 | /*
277 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
278 | * Method: VoE_CreateChannel
279 | * Signature: ()I
280 | */
281 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel
282 | (JNIEnv *, jobject);
283 |
284 | /*
285 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
286 | * Method: VoE_DeleteChannel
287 | * Signature: (I)I
288 | */
289 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel
290 | (JNIEnv *, jobject, jint);
291 |
292 | /*
293 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
294 | * Method: ViE_DeleteChannel
295 | * Signature: (I)I
296 | */
297 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel
298 | (JNIEnv *, jobject, jint);
299 |
300 | /*
301 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
302 | * Method: VoE_SetLocalReceiver
303 | * Signature: (II)I
304 | */
305 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver
306 | (JNIEnv *, jobject, jint, jint);
307 |
308 | /*
309 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
310 | * Method: VoE_SetSendDestination
311 | * Signature: (IILjava/lang/String;)I
312 | */
313 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination
314 | (JNIEnv *, jobject, jint, jint, jstring);
315 |
316 | /*
317 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
318 | * Method: VoE_StartListen
319 | * Signature: (I)I
320 | */
321 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen
322 | (JNIEnv *, jobject, jint);
323 |
324 | /*
325 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
326 | * Method: VoE_StartPlayout
327 | * Signature: (I)I
328 | */
329 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout
330 | (JNIEnv *, jobject, jint);
331 |
332 | /*
333 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
334 | * Method: VoE_StartSend
335 | * Signature: (I)I
336 | */
337 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend
338 | (JNIEnv *, jobject, jint);
339 |
340 | /*
341 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
342 | * Method: VoE_StopListen
343 | * Signature: (I)I
344 | */
345 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen
346 | (JNIEnv *, jobject, jint);
347 |
348 | /*
349 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
350 | * Method: VoE_StopPlayout
351 | * Signature: (I)I
352 | */
353 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout
354 | (JNIEnv *, jobject, jint);
355 |
356 | /*
357 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
358 | * Method: VoE_StopSend
359 | * Signature: (I)I
360 | */
361 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend
362 | (JNIEnv *, jobject, jint);
363 |
364 | /*
365 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
366 | * Method: VoE_SetSpeakerVolume
367 | * Signature: (I)I
368 | */
369 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume
370 | (JNIEnv *, jobject, jint);
371 |
372 | /*
373 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
374 | * Method: VoE_SetLoudspeakerStatus
375 | * Signature: (Z)I
376 | */
377 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus
378 | (JNIEnv *, jobject, jboolean);
379 |
380 | /*
381 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
382 | * Method: VoE_StartPlayingFileLocally
383 | * Signature: (ILjava/lang/String;Z)I
384 | */
385 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally
386 | (JNIEnv *, jobject, jint, jstring, jboolean);
387 |
388 | /*
389 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
390 | * Method: VoE_StopPlayingFileLocally
391 | * Signature: (I)I
392 | */
393 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally
394 | (JNIEnv *, jobject, jint);
395 |
396 | /*
397 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
398 | * Method: VoE_StartPlayingFileAsMicrophone
399 | * Signature: (ILjava/lang/String;Z)I
400 | */
401 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone
402 | (JNIEnv *, jobject, jint, jstring, jboolean);
403 |
404 | /*
405 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
406 | * Method: VoE_StopPlayingFileAsMicrophone
407 | * Signature: (I)I
408 | */
409 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone
410 | (JNIEnv *, jobject, jint);
411 |
412 | /*
413 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
414 | * Method: VoE_NumOfCodecs
415 | * Signature: ()I
416 | */
417 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs
418 | (JNIEnv *, jobject);
419 |
420 | /*
421 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
422 | * Method: VoE_GetCodecs
423 | * Signature: ()[Ljava/lang/String;
424 | */
425 | JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs
426 | (JNIEnv *, jobject);
427 |
428 | /*
429 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
430 | * Method: VoE_SetSendCodec
431 | * Signature: (II)I
432 | */
433 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec
434 | (JNIEnv *, jobject, jint, jint);
435 |
436 | /*
437 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
438 | * Method: VoE_SetECStatus
439 | * Signature: (Z)I
440 | */
441 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus
442 | (JNIEnv *, jobject, jboolean);
443 |
444 | /*
445 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
446 | * Method: VoE_SetAGCStatus
447 | * Signature: (Z)I
448 | */
449 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus
450 | (JNIEnv *, jobject, jboolean);
451 |
452 | /*
453 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
454 | * Method: VoE_SetNSStatus
455 | * Signature: (Z)I
456 | */
457 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus
458 | (JNIEnv *, jobject, jboolean);
459 |
460 | /*
461 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
462 | * Method: VoE_StartDebugRecording
463 | * Signature: (Ljava/lang/String;)I
464 | */
465 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording
466 | (JNIEnv *, jobject, jstring);
467 |
468 | /*
469 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
470 | * Method: VoE_StopDebugRecording
471 | * Signature: ()I
472 | */
473 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording
474 | (JNIEnv *, jobject);
475 |
476 | /*
477 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
478 | * Method: VoE_StartIncomingRTPDump
479 | * Signature: (ILjava/lang/String;)I
480 | */
481 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump
482 | (JNIEnv *, jobject, jint, jstring);
483 |
484 | /*
485 | * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
486 | * Method: VoE_StopIncomingRTPDump
487 | * Signature: (I)I
488 | */
489 | JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump
490 | (JNIEnv *, jobject, jint);
491 |
492 | #ifdef __cplusplus
493 | }
494 | #endif
495 | #endif
496 |
--------------------------------------------------------------------------------
/WebRtc.Mono.Droid/src/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree. An additional intellectual property rights grant can be found
7 | * in the file PATENTS. All contributing project authors may
8 | * be found in the AUTHORS file in the root of the source tree.
9 | */
10 |
11 | package org.webrtc.voiceengine;
12 |
13 | import java.nio.ByteBuffer;
14 | import java.util.concurrent.locks.ReentrantLock;
15 |
16 | import android.content.Context;
17 | import android.media.AudioFormat;
18 | import android.media.AudioManager;
19 | import android.media.AudioRecord;
20 | import android.media.AudioTrack;
21 | import android.util.Log;
22 |
23 | class WebRTCAudioDevice {
24 | private AudioTrack _audioTrack = null;
25 | private AudioRecord _audioRecord = null;
26 |
27 | private Context _context;
28 | private AudioManager _audioManager;
29 |
30 | private ByteBuffer _playBuffer;
31 | private ByteBuffer _recBuffer;
32 | private byte[] _tempBufPlay;
33 | private byte[] _tempBufRec;
34 |
35 | private final ReentrantLock _playLock = new ReentrantLock();
36 | private final ReentrantLock _recLock = new ReentrantLock();
37 |
38 | private boolean _doPlayInit = true;
39 | private boolean _doRecInit = true;
40 | private boolean _isRecording = false;
41 | private boolean _isPlaying = false;
42 |
43 | private int _bufferedRecSamples = 0;
44 | private int _bufferedPlaySamples = 0;
45 | private int _playPosition = 0;
46 |
47 | WebRTCAudioDevice() {
48 | try {
49 | _playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48
50 | // kHz
51 | _recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48
52 | // kHz
53 | } catch (Exception e) {
54 | DoLog(e.getMessage());
55 | }
56 |
57 | _tempBufPlay = new byte[2 * 480];
58 | _tempBufRec = new byte[2 * 480];
59 | }
60 |
61 | @SuppressWarnings("unused")
62 | private int InitRecording(int audioSource, int sampleRate) {
63 | // get the minimum buffer size that can be used
64 | int minRecBufSize = AudioRecord.getMinBufferSize(
65 | sampleRate,
66 | AudioFormat.CHANNEL_IN_MONO,
67 | AudioFormat.ENCODING_PCM_16BIT);
68 |
69 | // DoLog("min rec buf size is " + minRecBufSize);
70 |
71 | // double size to be more safe
72 | int recBufSize = minRecBufSize * 2;
73 | _bufferedRecSamples = (5 * sampleRate) / 200;
74 | // DoLog("rough rec delay set to " + _bufferedRecSamples);
75 |
76 | // release the object
77 | if (_audioRecord != null) {
78 | _audioRecord.release();
79 | _audioRecord = null;
80 | }
81 |
82 | try {
83 | _audioRecord = new AudioRecord(
84 | audioSource,
85 | sampleRate,
86 | AudioFormat.CHANNEL_IN_MONO,
87 | AudioFormat.ENCODING_PCM_16BIT,
88 | recBufSize);
89 |
90 | } catch (Exception e) {
91 | DoLog(e.getMessage());
92 | return -1;
93 | }
94 |
95 | // check that the audioRecord is ready to be used
96 | if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
97 | // DoLog("rec not initialized " + sampleRate);
98 | return -1;
99 | }
100 |
101 | // DoLog("rec sample rate set to " + sampleRate);
102 |
103 | return _bufferedRecSamples;
104 | }
105 |
106 | @SuppressWarnings("unused")
107 | private int StartRecording() {
108 | if (_isPlaying == false) {
109 | SetAudioMode(true);
110 | }
111 |
112 | // start recording
113 | try {
114 | _audioRecord.startRecording();
115 |
116 | } catch (IllegalStateException e) {
117 | e.printStackTrace();
118 | return -1;
119 | }
120 |
121 | _isRecording = true;
122 | return 0;
123 | }
124 |
125 | @SuppressWarnings("unused")
126 | private int InitPlayback(int sampleRate) {
127 | // get the minimum buffer size that can be used
128 | int minPlayBufSize = AudioTrack.getMinBufferSize(
129 | sampleRate,
130 | AudioFormat.CHANNEL_OUT_MONO,
131 | AudioFormat.ENCODING_PCM_16BIT);
132 |
133 | // DoLog("min play buf size is " + minPlayBufSize);
134 |
135 | int playBufSize = minPlayBufSize;
136 | if (playBufSize < 6000) {
137 | playBufSize *= 2;
138 | }
139 | _bufferedPlaySamples = 0;
140 | // DoLog("play buf size is " + playBufSize);
141 |
142 | // release the object
143 | if (_audioTrack != null) {
144 | _audioTrack.release();
145 | _audioTrack = null;
146 | }
147 |
148 | try {
149 | _audioTrack = new AudioTrack(
150 | AudioManager.STREAM_VOICE_CALL,
151 | sampleRate,
152 | AudioFormat.CHANNEL_OUT_MONO,
153 | AudioFormat.ENCODING_PCM_16BIT,
154 | playBufSize, AudioTrack.MODE_STREAM);
155 | } catch (Exception e) {
156 | DoLog(e.getMessage());
157 | return -1;
158 | }
159 |
160 | // check that the audioRecord is ready to be used
161 | if (_audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
162 | // DoLog("play not initialized " + sampleRate);
163 | return -1;
164 | }
165 |
166 | // DoLog("play sample rate set to " + sampleRate);
167 |
168 | if (_audioManager == null && _context != null) {
169 | _audioManager = (AudioManager)
170 | _context.getSystemService(Context.AUDIO_SERVICE);
171 | }
172 |
173 | // Return max playout volume
174 | if (_audioManager == null) {
175 | // Don't know the max volume but still init is OK for playout,
176 | // so we should not return error.
177 | return 0;
178 | }
179 | return _audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
180 | }
181 |
182 | @SuppressWarnings("unused")
183 | private int StartPlayback() {
184 | if (_isRecording == false) {
185 | SetAudioMode(true);
186 | }
187 |
188 | // start playout
189 | try {
190 | _audioTrack.play();
191 |
192 | } catch (IllegalStateException e) {
193 | e.printStackTrace();
194 | return -1;
195 | }
196 |
197 | _isPlaying = true;
198 | return 0;
199 | }
200 |
201 | @SuppressWarnings("unused")
202 | private int StopRecording() {
203 | _recLock.lock();
204 | try {
205 | // only stop if we are recording
206 | if (_audioRecord.getRecordingState() ==
207 | AudioRecord.RECORDSTATE_RECORDING) {
208 | // stop recording
209 | try {
210 | _audioRecord.stop();
211 | } catch (IllegalStateException e) {
212 | e.printStackTrace();
213 | return -1;
214 | }
215 | }
216 |
217 | // release the object
218 | _audioRecord.release();
219 | _audioRecord = null;
220 |
221 | } finally {
222 | // Ensure we always unlock, both for success, exception or error
223 | // return.
224 | _doRecInit = true;
225 | _recLock.unlock();
226 | }
227 |
228 | if (_isPlaying == false) {
229 | SetAudioMode(false);
230 | }
231 |
232 | _isRecording = false;
233 | return 0;
234 | }
235 |
236 | @SuppressWarnings("unused")
237 | private int StopPlayback() {
238 | _playLock.lock();
239 | try {
240 | // only stop if we are playing
241 | if (_audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
242 | // stop playout
243 | try {
244 | _audioTrack.stop();
245 | } catch (IllegalStateException e) {
246 | e.printStackTrace();
247 | return -1;
248 | }
249 |
250 | // flush the buffers
251 | _audioTrack.flush();
252 | }
253 |
254 | // release the object
255 | _audioTrack.release();
256 | _audioTrack = null;
257 |
258 | } finally {
259 | // Ensure we always unlock, both for success, exception or error
260 | // return.
261 | _doPlayInit = true;
262 | _playLock.unlock();
263 | }
264 |
265 | if (_isRecording == false) {
266 | SetAudioMode(false);
267 | }
268 |
269 | _isPlaying = false;
270 | return 0;
271 | }
272 |
273 | @SuppressWarnings("unused")
274 | private int PlayAudio(int lengthInBytes) {
275 |
276 | int bufferedSamples = 0;
277 |
278 | _playLock.lock();
279 | try {
280 | if (_audioTrack == null) {
281 | return -2; // We have probably closed down while waiting for
282 | // play lock
283 | }
284 |
285 | // Set priority, only do once
286 | if (_doPlayInit == true) {
287 | try {
288 | android.os.Process.setThreadPriority(
289 | android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
290 | } catch (Exception e) {
291 | DoLog("Set play thread priority failed: " + e.getMessage());
292 | }
293 | _doPlayInit = false;
294 | }
295 |
296 | int written = 0;
297 | _playBuffer.get(_tempBufPlay);
298 | written = _audioTrack.write(_tempBufPlay, 0, lengthInBytes);
299 | _playBuffer.rewind(); // Reset the position to start of buffer
300 |
301 | // DoLog("Wrote data to sndCard");
302 |
303 | // increase by number of written samples
304 | _bufferedPlaySamples += (written >> 1);
305 |
306 | // decrease by number of played samples
307 | int pos = _audioTrack.getPlaybackHeadPosition();
308 | if (pos < _playPosition) { // wrap or reset by driver
309 | _playPosition = 0; // reset
310 | }
311 | _bufferedPlaySamples -= (pos - _playPosition);
312 | _playPosition = pos;
313 |
314 | if (!_isRecording) {
315 | bufferedSamples = _bufferedPlaySamples;
316 | }
317 |
318 | if (written != lengthInBytes) {
319 | // DoLog("Could not write all data to sc (written = " + written
320 | // + ", length = " + lengthInBytes + ")");
321 | return -1;
322 | }
323 |
324 | } finally {
325 | // Ensure we always unlock, both for success, exception or error
326 | // return.
327 | _playLock.unlock();
328 | }
329 |
330 | return bufferedSamples;
331 | }
332 |
333 | @SuppressWarnings("unused")
334 | private int RecordAudio(int lengthInBytes) {
335 | _recLock.lock();
336 |
337 | try {
338 | if (_audioRecord == null) {
339 | return -2; // We have probably closed down while waiting for rec
340 | // lock
341 | }
342 |
343 | // Set priority, only do once
344 | if (_doRecInit == true) {
345 | try {
346 | android.os.Process.setThreadPriority(
347 | android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
348 | } catch (Exception e) {
349 | DoLog("Set rec thread priority failed: " + e.getMessage());
350 | }
351 | _doRecInit = false;
352 | }
353 |
354 | int readBytes = 0;
355 | _recBuffer.rewind(); // Reset the position to start of buffer
356 | readBytes = _audioRecord.read(_tempBufRec, 0, lengthInBytes);
357 | // DoLog("read " + readBytes + "from SC");
358 | _recBuffer.put(_tempBufRec);
359 |
360 | if (readBytes != lengthInBytes) {
361 | // DoLog("Could not read all data from sc (read = " + readBytes
362 | // + ", length = " + lengthInBytes + ")");
363 | return -1;
364 | }
365 |
366 | } catch (Exception e) {
367 | DoLogErr("RecordAudio try failed: " + e.getMessage());
368 |
369 | } finally {
370 | // Ensure we always unlock, both for success, exception or error
371 | // return.
372 | _recLock.unlock();
373 | }
374 |
375 | return (_bufferedPlaySamples);
376 | }
377 |
378 | @SuppressWarnings("unused")
379 | private int SetPlayoutSpeaker(boolean loudspeakerOn) {
380 | // create audio manager if needed
381 | if (_audioManager == null && _context != null) {
382 | _audioManager = (AudioManager)
383 | _context.getSystemService(Context.AUDIO_SERVICE);
384 | }
385 |
386 | if (_audioManager == null) {
387 | DoLogErr("Could not change audio routing - no audio manager");
388 | return -1;
389 | }
390 |
391 | int apiLevel = android.os.Build.VERSION.SDK_INT;
392 |
393 | if ((3 == apiLevel) || (4 == apiLevel)) {
394 | // 1.5 and 1.6 devices
395 | if (loudspeakerOn) {
396 | // route audio to back speaker
397 | _audioManager.setMode(AudioManager.MODE_NORMAL);
398 | } else {
399 | // route audio to earpiece
400 | _audioManager.setMode(AudioManager.MODE_IN_CALL);
401 | }
402 | } else {
403 | // 2.x devices
404 | if ((android.os.Build.BRAND.equals("Samsung") ||
405 | android.os.Build.BRAND.equals("samsung")) &&
406 | ((5 == apiLevel) || (6 == apiLevel) ||
407 | (7 == apiLevel))) {
408 | // Samsung 2.0, 2.0.1 and 2.1 devices
409 | if (loudspeakerOn) {
410 | // route audio to back speaker
411 | _audioManager.setMode(AudioManager.MODE_IN_CALL);
412 | _audioManager.setSpeakerphoneOn(loudspeakerOn);
413 | } else {
414 | // route audio to earpiece
415 | _audioManager.setSpeakerphoneOn(loudspeakerOn);
416 | _audioManager.setMode(AudioManager.MODE_NORMAL);
417 | }
418 | } else {
419 | // Non-Samsung and Samsung 2.2 and up devices
420 | _audioManager.setSpeakerphoneOn(loudspeakerOn);
421 | }
422 | }
423 |
424 | return 0;
425 | }
426 |
427 | @SuppressWarnings("unused")
428 | private int SetPlayoutVolume(int level) {
429 |
430 | // create audio manager if needed
431 | if (_audioManager == null && _context != null) {
432 | _audioManager = (AudioManager)
433 | _context.getSystemService(Context.AUDIO_SERVICE);
434 | }
435 |
436 | int retVal = -1;
437 |
438 | if (_audioManager != null) {
439 | _audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL,
440 | level, 0);
441 | retVal = 0;
442 | }
443 |
444 | return retVal;
445 | }
446 |
447 | @SuppressWarnings("unused")
448 | private int GetPlayoutVolume() {
449 |
450 | // create audio manager if needed
451 | if (_audioManager == null && _context != null) {
452 | _audioManager = (AudioManager)
453 | _context.getSystemService(Context.AUDIO_SERVICE);
454 | }
455 |
456 | int level = -1;
457 |
458 | if (_audioManager != null) {
459 | level = _audioManager.getStreamVolume(
460 | AudioManager.STREAM_VOICE_CALL);
461 | }
462 |
463 | return level;
464 | }
465 |
466 | private void SetAudioMode(boolean startCall) {
467 | int apiLevel = android.os.Build.VERSION.SDK_INT;
468 |
469 | if (_audioManager == null && _context != null) {
470 | _audioManager = (AudioManager)
471 | _context.getSystemService(Context.AUDIO_SERVICE);
472 | }
473 |
474 | if (_audioManager == null) {
475 | DoLogErr("Could not set audio mode - no audio manager");
476 | return;
477 | }
478 |
479 | // ***IMPORTANT*** When the API level for honeycomb (H) has been
480 | // decided,
481 | // the condition should be changed to include API level 8 to H-1.
482 | if ((android.os.Build.BRAND.equals("Samsung") ||
483 | android.os.Build.BRAND.equals("samsung")) &&
484 | (8 == apiLevel)) {
485 | // Set Samsung specific VoIP mode for 2.2 devices
486 | // 4 is VoIP mode
487 | int mode = (startCall ? 4 : AudioManager.MODE_NORMAL);
488 | _audioManager.setMode(mode);
489 | if (_audioManager.getMode() != mode) {
490 | DoLogErr("Could not set audio mode for Samsung device");
491 | }
492 | }
493 | }
494 |
495 | final String logTag = "WebRTC AD java";
496 |
497 | private void DoLog(String msg) {
498 | Log.d(logTag, msg);
499 | }
500 |
501 | private void DoLogErr(String msg) {
502 | Log.e(logTag, msg);
503 | }
504 | }
505 |
--------------------------------------------------------------------------------