├── .gitattributes ├── .gitignore ├── README.md └── audiopublish ├── .classpath ├── .project ├── AndroidManifest.xml ├── _classpath ├── _project ├── bin ├── AndroidManifest.xml ├── MainActivity.apk ├── classes.dex ├── classes │ └── org │ │ └── mconf │ │ └── android │ │ └── core │ │ └── video │ │ ├── AudioPublish.class │ │ ├── BbbVideoPublisher.class │ │ ├── BbbVoiceConnection.class │ │ ├── BigBlueButton.class │ │ ├── BigBlueButtonClient.class │ │ ├── BuildConfig.class │ │ ├── CaptureConstants.class │ │ ├── CaptureDialog.class │ │ ├── Codec.class │ │ ├── CodecBase.class │ │ ├── MainActivity.class │ │ ├── MainRtmpConnection$1.class │ │ ├── MainRtmpConnection.class │ │ ├── NativeLibsLoader.class │ │ ├── R$attr.class │ │ ├── R$drawable.class │ │ ├── R$id.class │ │ ├── R$layout.class │ │ ├── R$menu.class │ │ ├── R$string.class │ │ ├── R$style.class │ │ ├── R.class │ │ ├── RtmpAudioPlayer.class │ │ ├── RtmpConnection.class │ │ ├── Speex.class │ │ ├── VideoCapture.class │ │ ├── VideoCaptureLayout.class │ │ ├── VideoCentering.class │ │ ├── VideoDialog.class │ │ ├── VideoPublish.class │ │ ├── VideoPublisherConnection$1.class │ │ ├── VideoPublisherConnection.class │ │ ├── VoiceConnection$1.class │ │ ├── VoiceConnection.class │ │ ├── VoiceInterface.class │ │ ├── VoiceOverRtmp$1.class │ │ └── VoiceOverRtmp.class ├── dexedLibs │ ├── android-support-v4-f2fd738ff1ccc9d1341f4dad0fd5b4e2.jar │ ├── flazr-79a83adedea3ea36db9b347bd0ad5e99.jar │ ├── jcl104-over-slf4j-1.4.2-0fe157f2a3643d7a091c93c7bcae7ff7.jar │ ├── log4j-1.2.14-65b3d5bd0b25e206151620761e5f5f4f.jar │ ├── netty-3.7.0.Final-2a4b5622d25ad1e61a3daa36b4a60355.jar │ ├── slf4j-api-1.4.2-22f5530b8e89f2a190e761bdd65d5245.jar │ └── slf4j-log4j12-1.4.2-5fffc80ab642dab2c634939e4b8544d4.jar ├── jarlist.cache ├── res │ ├── drawable-hdpi │ │ └── ic_launcher.png │ ├── drawable-ldpi │ │ └── ic_launcher.png │ ├── drawable-mdpi │ │ └── ic_launcher.png │ └── drawable-xhdpi │ │ └── ic_launcher.png └── resources.ap_ ├── gen └── org │ └── mconf │ └── android │ └── core │ └── video │ ├── BuildConfig.java │ └── R.java ├── ic_launcher-web.png ├── libs ├── android-support-v4.jar ├── armeabi │ ├── libavcodec.so │ ├── libavformat.so │ ├── libavutil.so │ ├── libcommon.so │ ├── libdecode.so │ ├── libencode.so │ ├── libmconfnativeencodevideo.so │ ├── libmconfnativeshowvideo.so │ ├── libqueue.so │ ├── libspeex_jni.so │ ├── libswscale.so │ └── libthread.so ├── flazr.jar ├── jcl104-over-slf4j-1.4.2.jar ├── log4j-1.2.14.jar ├── netty-3.7.0.Final.jar ├── slf4j-api-1.4.2.jar └── slf4j-log4j12-1.4.2.jar ├── proguard-project.txt ├── project.properties ├── res ├── drawable-hdpi │ └── ic_launcher.png ├── drawable-ldpi │ └── ic_launcher.png ├── drawable-mdpi │ └── ic_launcher.png ├── drawable-xhdpi │ └── ic_launcher.png ├── layout │ ├── activity_main.xml │ └── video_capture.xml ├── menu │ └── activity_main.xml ├── values-v11 │ └── styles.xml ├── values-v14 │ └── styles.xml └── values │ ├── strings.xml │ └── styles.xml └── src └── org └── mconf └── android └── core └── video ├── AudioPublish.java ├── BbbVideoPublisher.java ├── BbbVoiceConnection.java ├── BigBlueButton.java ├── BigBlueButtonClient.java ├── CaptureConstants.java ├── CaptureDialog.java ├── Codec.java ├── CodecBase.java ├── MainActivity.java ├── MainRtmpConnection.java ├── NativeLibsLoader.java ├── RtmpAudioPlayer.java ├── RtmpConnection.java ├── Speex.java ├── VideoCapture.java ├── VideoCaptureLayout.java ├── VideoCentering.java ├── VideoDialog.java ├── VideoPublish.java ├── VideoPublisherConnection.java ├── VoiceConnection.java ├── VoiceInterface.java └── VoiceOverRtmp.java /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Windows image file caches 2 | Thumbs.db 3 | ehthumbs.db 4 | 5 | # Folder config file 6 | Desktop.ini 7 | 8 | # Recycle Bin used on file shares 9 | $RECYCLE.BIN/ 10 | 11 | # Windows Installer files 12 | *.cab 13 | *.msi 14 | *.msm 15 | *.msp 16 | 17 | # Windows shortcuts 18 | *.lnk 19 | 20 | # ========================= 21 | # Operating System Files 22 | # ========================= 23 | 24 | # OSX 25 | # ========================= 26 | 27 | .DS_Store 28 | .AppleDouble 29 | .LSOverride 30 | 31 | # Thumbnails 32 | ._* 33 | 34 | # Files that might appear on external disk 35 | .Spotlight-V100 36 | .Trashes 37 | 38 | # Directories potentially created on remote AFP share 39 | .AppleDB 40 | .AppleDesktop 41 | Network Trash Folder 42 | Temporary Items 43 | .apdisk 44 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RTMP-Audio-Publishing-Android 2 | 3 | This module is a part of RTMP Video Conferencing Android Application over Red5 Server for Aakash Tablets, developed at Indian Institute of Technology, Bombay while I was working there as a Project Trainee from July, 2014 - September, 2014. 4 | 5 | The above project have the following capabilities: 6 | 7 | Audio: Fetching and Publishing 8 | 9 | Video: Fetching and Publishing 10 | -------------------------------------------------------------------------------- /audiopublish/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /audiopublish/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainActivity 4 | 5 | 6 | 7 | 8 | 9 | com.android.ide.eclipse.adt.ResourceManagerBuilder 10 | 11 | 12 | 13 | 14 | com.android.ide.eclipse.adt.PreCompilerBuilder 15 | 16 | 17 | 18 | 19 | org.eclipse.jdt.core.javabuilder 20 | 21 | 22 | 23 | 24 | com.android.ide.eclipse.adt.ApkBuilder 25 | 26 | 27 | 28 | 29 | 30 | com.android.ide.eclipse.adt.AndroidNature 31 | org.eclipse.jdt.core.javanature 32 | 33 | 34 | -------------------------------------------------------------------------------- /audiopublish/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 30 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /audiopublish/_classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /audiopublish/_project: -------------------------------------------------------------------------------- 1 | 2 | 3 | videopublish 4 | 5 | 6 | 7 | 8 | 9 | com.android.ide.eclipse.adt.ResourceManagerBuilder 10 | 11 | 12 | 13 | 14 | com.android.ide.eclipse.adt.PreCompilerBuilder 15 | 16 | 17 | 18 | 19 | org.eclipse.jdt.core.javabuilder 20 | 21 | 22 | 23 | 24 | com.android.ide.eclipse.adt.ApkBuilder 25 | 26 | 27 | 28 | 29 | 30 | com.android.ide.eclipse.adt.AndroidNature 31 | org.eclipse.jdt.core.javanature 32 | 33 | 34 | -------------------------------------------------------------------------------- /audiopublish/bin/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 30 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /audiopublish/bin/MainActivity.apk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/MainActivity.apk -------------------------------------------------------------------------------- /audiopublish/bin/classes.dex: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes.dex -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/AudioPublish.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/AudioPublish.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/BbbVideoPublisher.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/BbbVideoPublisher.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/BbbVoiceConnection.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/BbbVoiceConnection.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/BigBlueButton.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/BigBlueButton.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/BigBlueButtonClient.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/BigBlueButtonClient.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/BuildConfig.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/BuildConfig.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/CaptureConstants.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/CaptureConstants.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/CaptureDialog.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/CaptureDialog.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/Codec.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/Codec.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/CodecBase.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/CodecBase.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/MainActivity.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/MainActivity.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/MainRtmpConnection$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/MainRtmpConnection$1.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/MainRtmpConnection.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/MainRtmpConnection.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/NativeLibsLoader.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/NativeLibsLoader.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$attr.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$attr.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$drawable.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$drawable.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$id.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$id.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$layout.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$layout.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$menu.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$menu.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$string.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$string.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R$style.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R$style.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/R.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/R.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/RtmpAudioPlayer.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/RtmpAudioPlayer.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/RtmpConnection.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/RtmpConnection.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/Speex.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/Speex.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoCapture.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoCapture.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoCaptureLayout.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoCaptureLayout.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoCentering.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoCentering.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoDialog.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoDialog.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoPublish.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoPublish.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoPublisherConnection$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoPublisherConnection$1.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VideoPublisherConnection.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VideoPublisherConnection.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VoiceConnection$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VoiceConnection$1.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VoiceConnection.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VoiceConnection.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VoiceInterface.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VoiceInterface.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VoiceOverRtmp$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VoiceOverRtmp$1.class -------------------------------------------------------------------------------- /audiopublish/bin/classes/org/mconf/android/core/video/VoiceOverRtmp.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/classes/org/mconf/android/core/video/VoiceOverRtmp.class -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/android-support-v4-f2fd738ff1ccc9d1341f4dad0fd5b4e2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/android-support-v4-f2fd738ff1ccc9d1341f4dad0fd5b4e2.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/flazr-79a83adedea3ea36db9b347bd0ad5e99.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/flazr-79a83adedea3ea36db9b347bd0ad5e99.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/jcl104-over-slf4j-1.4.2-0fe157f2a3643d7a091c93c7bcae7ff7.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/jcl104-over-slf4j-1.4.2-0fe157f2a3643d7a091c93c7bcae7ff7.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/log4j-1.2.14-65b3d5bd0b25e206151620761e5f5f4f.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/log4j-1.2.14-65b3d5bd0b25e206151620761e5f5f4f.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/netty-3.7.0.Final-2a4b5622d25ad1e61a3daa36b4a60355.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/netty-3.7.0.Final-2a4b5622d25ad1e61a3daa36b4a60355.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/slf4j-api-1.4.2-22f5530b8e89f2a190e761bdd65d5245.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/slf4j-api-1.4.2-22f5530b8e89f2a190e761bdd65d5245.jar -------------------------------------------------------------------------------- /audiopublish/bin/dexedLibs/slf4j-log4j12-1.4.2-5fffc80ab642dab2c634939e4b8544d4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/dexedLibs/slf4j-log4j12-1.4.2-5fffc80ab642dab2c634939e4b8544d4.jar -------------------------------------------------------------------------------- /audiopublish/bin/jarlist.cache: -------------------------------------------------------------------------------- 1 | # cache for current jar dependecy. DO NOT EDIT. 2 | # format is 3 | # Encoding is UTF-8 4 | -------------------------------------------------------------------------------- /audiopublish/bin/res/drawable-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/res/drawable-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/bin/res/drawable-ldpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/res/drawable-ldpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/bin/res/drawable-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/res/drawable-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/bin/res/drawable-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/res/drawable-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/bin/resources.ap_: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/bin/resources.ap_ -------------------------------------------------------------------------------- /audiopublish/gen/org/mconf/android/core/video/BuildConfig.java: -------------------------------------------------------------------------------- 1 | /** Automatically generated file. DO NOT MODIFY */ 2 | package org.mconf.android.core.video; 3 | 4 | public final class BuildConfig { 5 | public final static boolean DEBUG = true; 6 | } -------------------------------------------------------------------------------- /audiopublish/gen/org/mconf/android/core/video/R.java: -------------------------------------------------------------------------------- 1 | /* AUTO-GENERATED FILE. DO NOT MODIFY. 2 | * 3 | * This class was automatically generated by the 4 | * aapt tool from the resource data it found. It 5 | * should not be modified by hand. 6 | */ 7 | 8 | package org.mconf.android.core.video; 9 | 10 | public final class R { 11 | public static final class attr { 12 | } 13 | public static final class drawable { 14 | public static final int ic_launcher=0x7f020000; 15 | } 16 | public static final class id { 17 | public static final int menu_settings=0x7f070002; 18 | public static final int video_capture=0x7f070001; 19 | public static final int video_capture_layout=0x7f070000; 20 | } 21 | public static final class layout { 22 | public static final int activity_main=0x7f030000; 23 | public static final int video_capture=0x7f030001; 24 | } 25 | public static final class menu { 26 | public static final int activity_main=0x7f060000; 27 | } 28 | public static final class string { 29 | public static final int app_name=0x7f040000; 30 | public static final int hello_world=0x7f040001; 31 | public static final int menu_settings=0x7f040002; 32 | } 33 | public static final class style { 34 | /** 35 | Base application theme, dependent on API level. This theme is replaced 36 | by AppBaseTheme from res/values-vXX/styles.xml on newer devices. 37 | 38 | 39 | Theme customizations available in newer API levels can go in 40 | res/values-vXX/styles.xml, while customizations related to 41 | backward-compatibility can go here. 42 | 43 | 44 | Base application theme for API 11+. This theme completely replaces 45 | AppBaseTheme from res/values/styles.xml on API 11+ devices. 46 | 47 | API 11 theme customizations can go here. 48 | 49 | Base application theme for API 14+. This theme completely replaces 50 | AppBaseTheme from BOTH res/values/styles.xml and 51 | res/values-v11/styles.xml on API 14+ devices. 52 | 53 | API 14 theme customizations can go here. 54 | */ 55 | public static final int AppBaseTheme=0x7f050000; 56 | /** Application theme. 57 | All customizations that are NOT specific to a particular API-level can go here. 58 | */ 59 | public static final int AppTheme=0x7f050001; 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /audiopublish/ic_launcher-web.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/ic_launcher-web.png -------------------------------------------------------------------------------- /audiopublish/libs/android-support-v4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/android-support-v4.jar -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libavcodec.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libavcodec.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libavformat.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libavformat.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libavutil.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libavutil.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libcommon.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libcommon.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libdecode.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libdecode.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libencode.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libencode.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libmconfnativeencodevideo.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libmconfnativeencodevideo.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libmconfnativeshowvideo.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libmconfnativeshowvideo.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libqueue.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libqueue.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libspeex_jni.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libspeex_jni.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libswscale.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libswscale.so -------------------------------------------------------------------------------- /audiopublish/libs/armeabi/libthread.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/armeabi/libthread.so -------------------------------------------------------------------------------- /audiopublish/libs/flazr.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/flazr.jar -------------------------------------------------------------------------------- /audiopublish/libs/jcl104-over-slf4j-1.4.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/jcl104-over-slf4j-1.4.2.jar -------------------------------------------------------------------------------- /audiopublish/libs/log4j-1.2.14.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/log4j-1.2.14.jar -------------------------------------------------------------------------------- /audiopublish/libs/netty-3.7.0.Final.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/netty-3.7.0.Final.jar -------------------------------------------------------------------------------- /audiopublish/libs/slf4j-api-1.4.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/slf4j-api-1.4.2.jar -------------------------------------------------------------------------------- /audiopublish/libs/slf4j-log4j12-1.4.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/libs/slf4j-log4j12-1.4.2.jar -------------------------------------------------------------------------------- /audiopublish/proguard-project.txt: -------------------------------------------------------------------------------- 1 | # To enable ProGuard in your project, edit project.properties 2 | # to define the proguard.config property as described in that file. 3 | # 4 | # Add project specific ProGuard rules here. 5 | # By default, the flags in this file are appended to flags specified 6 | # in ${sdk.dir}/tools/proguard/proguard-android.txt 7 | # You can edit the include path and order by changing the ProGuard 8 | # include property in project.properties. 9 | # 10 | # For more details, see 11 | # http://developer.android.com/guide/developing/tools/proguard.html 12 | 13 | # Add any project specific keep options here: 14 | 15 | # If your project uses WebView with JS, uncomment the following 16 | # and specify the fully qualified class name to the JavaScript interface 17 | # class: 18 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 19 | # public *; 20 | #} 21 | -------------------------------------------------------------------------------- /audiopublish/project.properties: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by Android Tools. 2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED! 3 | # 4 | # This file must be checked in Version Control Systems. 5 | # 6 | # To customize properties used by the Ant build system edit 7 | # "ant.properties", and override values to adapt the script to your 8 | # project structure. 9 | # 10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home): 11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt 12 | 13 | # Project target. 14 | target=android-17 15 | -------------------------------------------------------------------------------- /audiopublish/res/drawable-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/res/drawable-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/res/drawable-ldpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/res/drawable-ldpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/res/drawable-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/res/drawable-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/res/drawable-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshing/RTMP-Audio-Publishing-Android/d32864db57d2074f5697bba5a98502ed07c99a01/audiopublish/res/drawable-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /audiopublish/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 6 | 7 | 13 | 14 | -------------------------------------------------------------------------------- /audiopublish/res/layout/video_capture.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 10 | 12 | 13 | -------------------------------------------------------------------------------- /audiopublish/res/menu/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | -------------------------------------------------------------------------------- /audiopublish/res/values-v11/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 10 | 11 | -------------------------------------------------------------------------------- /audiopublish/res/values-v14/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 11 | 12 | -------------------------------------------------------------------------------- /audiopublish/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | videopublish 5 | Hello world! 6 | Settings 7 | 8 | -------------------------------------------------------------------------------- /audiopublish/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 14 | 15 | 16 | 19 | 20 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/AudioPublish.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import android.media.AudioFormat; 10 | import android.media.AudioRecord; 11 | import android.media.MediaRecorder.AudioSource; 12 | 13 | import com.flazr.rtmp.RtmpMessage; 14 | import com.flazr.rtmp.RtmpReader; 15 | import com.flazr.rtmp.message.Audio; 16 | import com.flazr.rtmp.message.Metadata; 17 | 18 | public class AudioPublish extends Thread implements RtmpReader { 19 | 20 | private static final Logger log = LoggerFactory.getLogger(AudioPublish.class); 21 | 22 | private final int SHORT_SIZE_IN_BYTES = 2; 23 | 24 | private List audioBuffer; 25 | 26 | private Codec codec; 27 | 28 | private AudioRecord recorder; 29 | 30 | private short[] recordBuffer; 31 | private byte[] encodedBuffer; 32 | 33 | private int sampSize; // bytes 34 | private int sampRate; // Hertz 35 | 36 | private int frameSize; // samples 37 | private int frameRate; // FPS 38 | 39 | private int frameDuration; // ms 40 | 41 | private int frameSizeInShorts; 42 | 43 | private boolean running; 44 | 45 | private boolean muted = false; 46 | 47 | private int currentTimestamp; 48 | private int lastTimestamp; 49 | 50 | public AudioPublish() { 51 | 52 | running = false; 53 | 54 | codec = new Speex(); 55 | codec.init(); 56 | 57 | sampRate = codec.samp_rate(); // samples per second 8000 58 | sampSize = 2; //2 bytes per sample => ENCODING PCM 16BIT 59 | 60 | frameSize = codec.frame_size(); //number of SAMPLES which a FRAME has 160 61 | frameRate = sampRate / frameSize; //50 62 | frameDuration = 1000/frameRate; 63 | 64 | frameSizeInShorts = frameSize*(sampSize/SHORT_SIZE_IN_BYTES);//160 65 | 66 | int minBufferSize = AudioRecord.getMinBufferSize(sampRate, //768 67 | AudioFormat.CHANNEL_CONFIGURATION_MONO, 68 | AudioFormat.ENCODING_PCM_16BIT); 69 | 70 | minBufferSize *= 2; 71 | 72 | recorder = new AudioRecord(AudioSource.MIC, sampRate, 73 | AudioFormat.CHANNEL_CONFIGURATION_MONO, 74 | AudioFormat.ENCODING_PCM_16BIT, minBufferSize); 75 | 76 | 77 | recordBuffer = new short[frameSizeInShorts]; 78 | encodedBuffer = new byte[12 + frameSize*sampSize]; 79 | 80 | audioBuffer = new ArrayList(); 81 | setFirstAudioPacket(); 82 | 83 | currentTimestamp = 0; 84 | lastTimestamp = 0; 85 | } 86 | 87 | @Override 88 | public void run() { 89 | 90 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 91 | 92 | int readShorts = 0; 93 | int encodedSize = 0; 94 | 95 | long startTime = 0; 96 | long delayToUse = 0; 97 | 98 | running = true; 99 | recorder.startRecording(); 100 | 101 | while(running) 102 | { 103 | while(muted && running) 104 | { 105 | try { 106 | sleep(50); 107 | } catch (InterruptedException e) { 108 | e.printStackTrace(); 109 | } 110 | } 111 | 112 | 113 | //necessary to end this thread properly: 114 | //user may have closed the audio service while the loop above was active 115 | if(!running) 116 | break; 117 | 118 | startTime = System.currentTimeMillis(); 119 | 120 | //reading 160 samples (1 frame) 121 | readShorts = recorder.read(recordBuffer, 0, frameSizeInShorts); 122 | 123 | if(readShorts != AudioRecord.ERROR_BAD_VALUE && 124 | readShorts != AudioRecord.ERROR_INVALID_OPERATION && 125 | readShorts > 0) 126 | { 127 | encodedSize = codec.encode(recordBuffer, 0, encodedBuffer, readShorts); 128 | 129 | int offset = 1; 130 | 131 | final byte[] dataToSend = new byte[encodedSize+offset]; 132 | System.arraycopy(encodedBuffer, 12, dataToSend, offset, encodedSize); 133 | 134 | Audio audioPacket = new Audio(dataToSend); 135 | 136 | audioPacket.getHeader().setTime(currentTimestamp); 137 | 138 | int interval = currentTimestamp - lastTimestamp; 139 | audioPacket.getHeader().setDeltaTime(interval); 140 | 141 | lastTimestamp = currentTimestamp; 142 | currentTimestamp += frameDuration; 143 | 144 | audioBuffer.add(audioPacket); 145 | 146 | synchronized(this) { 147 | this.notify();} 148 | 149 | delayToUse = frameDuration-(System.currentTimeMillis()-startTime); 150 | if(delayToUse > 0) { 151 | try { 152 | Thread.sleep(delayToUse); 153 | } catch (InterruptedException e) { 154 | // TODO Auto-generated catch block 155 | e.printStackTrace(); 156 | } 157 | } 158 | 159 | } 160 | } 161 | 162 | //ending thread 163 | releaseResources(); 164 | log.debug("\n\n\nAll resources of the audio capture released!\n\n\n"); 165 | log.debug(""); 166 | 167 | } 168 | 169 | private void setFirstAudioPacket() 170 | { 171 | Audio firstAudio = Audio.empty(); 172 | audioBuffer.add(firstAudio); 173 | } 174 | 175 | public void mute() 176 | { 177 | muted = true; 178 | } 179 | 180 | public void unmute() 181 | { 182 | muted = false; 183 | } 184 | 185 | public boolean isMuted() 186 | { 187 | return muted; 188 | } 189 | 190 | private void releaseResources() 191 | { 192 | if(audioBuffer != null) { 193 | audioBuffer.clear(); 194 | audioBuffer = null; 195 | } 196 | 197 | if(recorder != null) { 198 | recorder.stop(); 199 | recorder.release(); 200 | recorder = null; 201 | } 202 | 203 | if(codec != null) { 204 | codec.close(); 205 | codec = null; 206 | } 207 | 208 | } 209 | 210 | 211 | @Override 212 | public Metadata getMetadata() { 213 | 214 | return null; 215 | } 216 | 217 | @Override 218 | public RtmpMessage[] getStartMessages() { 219 | 220 | return new Audio[0]; 221 | } 222 | 223 | @Override 224 | public void setAggregateDuration(int targetDuration) { 225 | 226 | } 227 | 228 | @Override 229 | public long getTimePosition() { 230 | 231 | return 0; 232 | } 233 | 234 | @Override 235 | public long seek(long timePosition) { 236 | 237 | return 0; 238 | } 239 | 240 | @Override 241 | public void close() { 242 | 243 | log.debug("\n\nCalling close on audio capture...\n\n"); 244 | running = false; 245 | } 246 | 247 | @Override 248 | public boolean hasNext() { 249 | 250 | if(audioBuffer != null && audioBuffer.isEmpty()) 251 | { 252 | try { 253 | 254 | this.wait(); 255 | 256 | } catch (InterruptedException e) { 257 | log.debug("\n\n\n\nException on AudioPublish , hasNext method, threw by this.wait() line\n\n\n\n"); 258 | return false; 259 | } 260 | } 261 | 262 | return audioBuffer != null; 263 | } 264 | 265 | @Override 266 | public RtmpMessage next() { 267 | 268 | if(audioBuffer != null) 269 | { 270 | if(audioBuffer.isEmpty()) 271 | { 272 | Audio emptyAudio = Audio.empty(); 273 | return emptyAudio; 274 | } 275 | 276 | return audioBuffer.remove(0); 277 | } 278 | 279 | return null; 280 | } 281 | 282 | @Override 283 | public int getWidth() { 284 | // TODO Auto-generated method stub 285 | return 0; 286 | } 287 | 288 | @Override 289 | public int getHeight() { 290 | // TODO Auto-generated method stub 291 | return 0; 292 | } 293 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/BbbVideoPublisher.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import com.flazr.rtmp.RtmpReader; 7 | import com.flazr.rtmp.client.ClientOptions; 8 | import com.flazr.util.Utils; 9 | 10 | 11 | public class BbbVideoPublisher { 12 | private static final Logger log = LoggerFactory.getLogger(BbbVideoPublisher.class); 13 | 14 | private VideoPublisherConnection videoConnection = null; 15 | private String streamName; 16 | private BigBlueButtonClient context; 17 | private ClientOptions opt; 18 | //private Object[] args={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 19 | //private Object[] args={"Test1"}; 20 | 21 | public BbbVideoPublisher(BigBlueButtonClient context, RtmpReader reader, String streamName, ClientOptions opt) { 22 | this.streamName = streamName; 23 | this.context = context; 24 | this.opt=opt; 25 | /*opt = new ClientOptions(); 26 | opt.setHost("10.129.200.81"); 27 | opt.setAppName("HariPanTest3"); 28 | //opt.setAppName("PanTest"); 29 | opt.publishLive(); 30 | opt.setArgs(args); 31 | opt.setStreamName(streamName);*/ 32 | opt.setReaderToPublish(reader); 33 | } 34 | 35 | public void setLoop(boolean loop) { 36 | opt.setLoop(loop? Integer.MAX_VALUE: 0); 37 | } 38 | 39 | public void start() { 40 | //context.getUsersModule().addStream(streamName); 41 | if (videoConnection == null) { 42 | videoConnection = new VideoPublisherConnection(opt, context); 43 | videoConnection.connect(); 44 | } 45 | } 46 | 47 | public void stop() { 48 | //context.getUsersModule().removeStream(streamName); 49 | // when the stream is removed from the users module, the client automatically 50 | // receives a NetStream.Unpublish.Success, then the channel is closed 51 | // \TODO it's may create a memory leak, check it 52 | //videoConnection.disconnect(); 53 | videoConnection = null; 54 | } 55 | 56 | public void fireFirstFrame() { 57 | if (videoConnection != null) { 58 | videoConnection.publisher.fireNext(videoConnection.publisher.channel, 0); 59 | } 60 | } 61 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/BbbVoiceConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import com.flazr.rtmp.client.ClientOptions; 7 | import com.flazr.rtmp.message.Audio; 8 | 9 | public class BbbVoiceConnection extends VoiceConnection { 10 | private static final Logger log = LoggerFactory.getLogger(BbbVoiceConnection.class); 11 | 12 | public BbbVoiceConnection(BigBlueButtonClient context, ClientOptions options) { 13 | super(options, context); 14 | } 15 | 16 | public void setLoop(boolean loop) { 17 | options.setLoop(loop ? Integer.MAX_VALUE : 0); 18 | } 19 | 20 | public boolean start() { 21 | return connect(); 22 | } 23 | 24 | public void stop() { 25 | disconnect(); 26 | } 27 | 28 | @Override 29 | protected void onAudio(Audio audio) { 30 | log.debug("received audio package: {}", audio.getHeader().getTime()); 31 | } 32 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/BigBlueButton.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import android.app.Application; 4 | import android.content.SharedPreferences; 5 | import android.preference.PreferenceManager; 6 | 7 | 8 | public class BigBlueButton extends Application { 9 | private BigBlueButtonClient handler = null; 10 | //private VoiceModule voice = null; 11 | private VideoPublish mVideoPublish = null; 12 | private boolean restartCaptureWhenAppResumes = false; 13 | 14 | private int framerate = CaptureConstants.DEFAULT_FRAME_RATE; 15 | private int width = CaptureConstants.DEFAULT_WIDTH; 16 | private int height = CaptureConstants.DEFAULT_HEIGHT; 17 | private int bitrate = CaptureConstants.DEFAULT_BIT_RATE; 18 | private int gop = CaptureConstants.DEFAULT_GOP; 19 | 20 | private int launchedBy = LAUNCHED_BY_NON_SPECIFIED; 21 | public static final int LAUNCHED_BY_NON_SPECIFIED = 0; 22 | public static final int LAUNCHED_USING_DEMO = 1; 23 | public static final int LAUNCHED_USING_URL = 2; 24 | 25 | @Override 26 | public void onCreate() { 27 | // The following line triggers the initialization of ACRA 28 | 29 | // http://stackoverflow.com/questions/2879455/android-2-2-and-bad-address-family-on-socket-connect 30 | System.setProperty("java.net.preferIPv6Addresses", "false"); 31 | super.onCreate(); 32 | } 33 | 34 | 35 | public BigBlueButtonClient getHandler() { 36 | if (handler == null) 37 | handler = new BigBlueButtonClient(); 38 | return handler; 39 | } 40 | 41 | 42 | public VideoPublish getVideoPublish() { 43 | if(mVideoPublish == null) { 44 | SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); 45 | mVideoPublish = new VideoPublish(getHandler(), restartCaptureWhenAppResumes, 46 | framerate, width, height, bitrate, gop, 47 | Integer.parseInt(prefs.getString("video_rotation", "0"))); 48 | } 49 | return mVideoPublish; 50 | } 51 | 52 | public VideoPublish deleteVideoPublish() { 53 | if(mVideoPublish != null){ 54 | restartCaptureWhenAppResumes = mVideoPublish.restartWhenResume; 55 | 56 | framerate = mVideoPublish.getFramerate(); 57 | width = mVideoPublish.getWidth(); 58 | height = mVideoPublish.getHeight(); 59 | bitrate = mVideoPublish.getBitrate(); 60 | gop = mVideoPublish.getGop(); 61 | 62 | mVideoPublish = null; 63 | } 64 | return mVideoPublish; 65 | } 66 | 67 | /*public void invalidateVoiceModule() { 68 | if (voice != null) 69 | voice.hang(); 70 | voice = null; 71 | }*/ 72 | 73 | /* 74 | * GETTERS AND SETTERS 75 | */ 76 | } 77 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/BigBlueButtonClient.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.util.Collection; 4 | import java.util.List; 5 | 6 | import org.jboss.netty.channel.Channel; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import com.flazr.rtmp.client.ClientOptions; 11 | import com.flazr.rtmp.message.Command; 12 | import com.flazr.util.Utils; 13 | 14 | public class BigBlueButtonClient { 15 | 16 | private static final Logger log = LoggerFactory.getLogger(BigBlueButtonClient.class); 17 | 18 | private MainRtmpConnection mainConnection = null; 19 | //private Object[] args={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 20 | private Object[] args={"Test1"}; 21 | 22 | 23 | public MainRtmpConnection getConnection() { 24 | return mainConnection; 25 | } 26 | 27 | public boolean connectBigBlueButton() { 28 | ClientOptions opt = new ClientOptions(); 29 | opt.setHost("10.129.200.81"); 30 | //opt.setAppName("HariPanTest3"); 31 | opt.setAppName("PanTest"); 32 | opt.setArgs(args); 33 | mainConnection = new MainRtmpConnection(opt, this); 34 | return mainConnection.connect(); 35 | } 36 | 37 | public void disconnect() { 38 | if (mainConnection != null) 39 | mainConnection.disconnect(); 40 | } 41 | 42 | public static void main(String[] args) { 43 | BigBlueButtonClient client = new BigBlueButtonClient(); 44 | client.connectBigBlueButton(); 45 | } 46 | 47 | public boolean onCommand(String resultFor, Command command) { 48 | return true; 49 | } 50 | 51 | public boolean isConnected() { 52 | if (mainConnection == null) 53 | return false; 54 | else 55 | return mainConnection.isConnected(); 56 | } 57 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/CaptureConstants.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | public class CaptureConstants { 4 | //errors: 5 | public static final int E_OK = 0; 6 | public static final int E_COULD_NOT_OPEN_CAMERA = -1; 7 | public static final int E_COULD_NOT_SET_PREVIEW_DISPLAY_R1 = -2; 8 | public static final int E_COULD_NOT_SET_PREVIEW_DISPLAY_R2 = -3; 9 | public static final int E_COULD_NOT_REQUEST_RESUME = -4; 10 | public static final int E_COULD_NOT_SET_PARAMETERS = -5; 11 | public static final int E_COULD_NOT_GET_BUFSIZE = -6; 12 | public static final int E_COULD_NOT_PREPARE_CALLBACK_R1 = -7; 13 | public static final int E_COULD_NOT_PREPARE_CALLBACK_R2 = -8; 14 | public static final int E_COULD_NOT_INIT_NATIVE_SIDE = -9; 15 | public static final int E_COULD_NOT_BEGIN_PREVIEW = -10; 16 | public static final int E_COULD_NOT_START_PUBLISHER_THREAD_R1 = -11; 17 | public static final int E_COULD_NOT_START_PUBLISHER_THREAD_R2 = -12; 18 | public static final int E_COULD_NOT_START_PUBLISHER_R1 = -13; 19 | public static final int E_COULD_NOT_START_PUBLISHER_R2 = -14; 20 | public static final int E_COULD_NOT_RESUME_CAPTURE = -15; 21 | public static final int E_COULD_NOT_INIT_HIDDEN = -16; 22 | public static final int E_COULD_NOT_SET_HIDDEN_R1 = -17; 23 | public static final int E_COULD_NOT_SET_HIDDEN_R2 = -18; 24 | public static final int E_COULD_NOT_ADD_HIDDEN = -19; 25 | public static final int E_COULD_NOT_SET_FR = -20; 26 | public static final int E_COULD_NOT_SET_W = -21; 27 | public static final int E_COULD_NOT_SET_H = -22; 28 | public static final int E_COULD_NOT_SET_BR = -23; 29 | public static final int E_COULD_NOT_SET_GOP = -24; 30 | public static final int E_COULD_NOT_CENTER = -25; 31 | public static final int E_COULD_NOT_GET_PUBLISHER = -26; 32 | public static final int E_COULD_NOT_GET_FR = -27; 33 | public static final int E_COULD_NOT_GET_W = -28; 34 | public static final int E_COULD_NOT_GET_H = -29; 35 | public static final int E_COULD_NOT_GET_BR = -30; 36 | public static final int E_COULD_NOT_GET_GOP = -31; 37 | 38 | //video parameters constants: 39 | public static final int DEFAULT_FRAME_RATE = 10; 40 | //I wouldn't set the width and height to values too low (below 200x200), 41 | //because there is a known bug on some Android devices where setting these values too low 42 | //causes the camera to crash the second time we open it, leading to the need of restarting the device. 43 | public static final int DEFAULT_WIDTH = 320; 44 | public static final int DEFAULT_HEIGHT = 240; 45 | public static final int DEFAULT_BIT_RATE = 128000; 46 | public static final int DEFAULT_GOP = 5; 47 | 48 | //constants to tell the state of the capture/encoding/publish: 49 | public static final int STOPPED = 0; // no video is being captured, the native threads are closed and the video publisher does not exist 50 | public static final int RESUMED = 1; // video is being captured, encoded and published 51 | public static final int PAUSED = 2; // no video is being captured, the native encoding thread is running and waiting for frames, and the video publisher exists and is waiting for frames 52 | public static final int ERROR = 3; // an error occured, and it is not possible to determine the state of the capture/encoding/publish 53 | 54 | //notification ID: 55 | public static final int VIDEO_PUBLISH_NOTIFICATION_ID = 57634; 56 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/CaptureDialog.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | 7 | import android.app.Dialog; 8 | import android.content.Context; 9 | import android.view.KeyEvent; 10 | import android.view.Window; 11 | 12 | public class CaptureDialog extends Dialog { 13 | private static final Logger log = LoggerFactory.getLogger(CaptureDialog.class); 14 | 15 | private VideoCapture videoWindow; 16 | private boolean startsHidden = true; //true=capture starts without preview. false=capture starts with preview. 17 | public boolean isPreviewHidden; 18 | private boolean wasPreviewHidden = false; 19 | 20 | public CaptureDialog(Context context) { 21 | super(context); 22 | 23 | requestWindowFeature(Window.FEATURE_NO_TITLE); //Removes the title from the Dialog 24 | setContentView(R.layout.video_capture); 25 | 26 | videoWindow = (VideoCapture) findViewById(R.id.video_capture); 27 | 28 | if(startsHidden){ 29 | isPreviewHidden = false; 30 | hidePreview(); 31 | } else { 32 | isPreviewHidden = true; 33 | showPreview(true); 34 | } 35 | 36 | setTitle("Camera preview"); 37 | setCancelable(false); 38 | } 39 | 40 | public void hidePreview() { //hides the preview but keeps capturing 41 | if(!isPreviewHidden){ 42 | android.view.WindowManager.LayoutParams windowAttributes = getWindow().getAttributes(); 43 | windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_SCALED 44 | | android.view.WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE 45 | | android.view.WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE 46 | ; 47 | getWindow().setAttributes(windowAttributes); 48 | 49 | if(videoWindow != null){ 50 | videoWindow.hidePreview(); 51 | } 52 | 53 | isPreviewHidden = true; 54 | } 55 | } 56 | 57 | public void showPreview(boolean center) { //if the preview is hidden, show it 58 | if(isPreviewHidden){ 59 | android.view.WindowManager.LayoutParams windowAttributes = getWindow().getAttributes(); 60 | windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; //Makes the video brigth 61 | getWindow().setAttributes(windowAttributes); 62 | 63 | if(videoWindow != null && center){ 64 | centerPreview(); 65 | } 66 | 67 | isPreviewHidden = false; 68 | } 69 | } 70 | 71 | public void centerPreview(){ 72 | if(videoWindow != null){ 73 | videoWindow.centerPreview(); 74 | } 75 | } 76 | 77 | @Override 78 | protected void onStart() { 79 | super.onStart(); 80 | resume(); 81 | } 82 | 83 | @Override 84 | protected void onStop() { 85 | pause(); 86 | super.onStop(); 87 | } 88 | 89 | public void pause() { 90 | if(isPreviewHidden){ 91 | showPreview(false); //this is needed to avoid a crash when closing the dialog 92 | wasPreviewHidden = true; 93 | } else { 94 | wasPreviewHidden = false; 95 | } 96 | } 97 | 98 | public void resume() { 99 | if(wasPreviewHidden){ //if the preview was hidden before the onStop, 100 | //then lets hide it again after the Dialog is resumed 101 | hidePreview(); 102 | } 103 | } 104 | 105 | @Override 106 | public boolean onKeyDown (int keyCode, KeyEvent event){ 107 | super.onKeyDown(keyCode, event); 108 | 109 | if(keyCode == KeyEvent.KEYCODE_BACK){ 110 | hidePreview(); 111 | } 112 | 113 | return true; 114 | } 115 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/Codec.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import android.preference.ListPreference; 5 | 6 | /** 7 | * Represents the basic interface to the Codec classes All codecs need 8 | * to implement basic encode and decode capability Codecs which 9 | * inherit from {@link CodecBase} only need to implement encode, 10 | * decode and init 11 | */ 12 | public interface Codec { 13 | /** 14 | * Decode a linear pcm audio stream 15 | * 16 | * @param encoded The encoded audio stream 17 | * 18 | * @param lin The linear pcm audio frame buffer in which to place the decoded stream 19 | * 20 | * @param size The size of the encoded frame 21 | * 22 | * @returns The size of the decoded frame 23 | */ 24 | int decode(byte encoded[], short lin[], int size); 25 | 26 | /** 27 | * Encode a linear pcm audio stream 28 | * 29 | * @param lin The linear stream to encode 30 | * 31 | * @param offset The offset into the linear stream to begin 32 | * 33 | * @param encoded The buffer to place the encoded stream 34 | * 35 | * @param size the size of the linear pcm stream (in words) 36 | * 37 | * @returns the length (in bytes) of the encoded stream 38 | */ 39 | int encode(short lin[], int offset, byte alaw[], int frames); 40 | 41 | /** 42 | * The sampling rate for this particular codec 43 | */ 44 | int samp_rate(); 45 | 46 | /** 47 | * The audio frame size for this particular codec 48 | */ 49 | int frame_size(); 50 | 51 | /** 52 | * Optionally used to initiallize the codec before any 53 | * encoding or decoding 54 | */ 55 | void init(); 56 | void update(); 57 | 58 | /** 59 | * Optionally used to free any resources allocated in init 60 | * after encoding or decoding is complete 61 | */ 62 | void close(); 63 | 64 | /** 65 | * (implemented by {@link CodecBase} 66 | * 67 | * checks to see if the user has enabled the codec. 68 | * 69 | * @returns true if the codec can be used 70 | */ 71 | boolean isEnabled(); 72 | 73 | /** 74 | * (implemented by {@link CodecBase} 75 | * 76 | * Checks to see if the binary library associated with the 77 | * codec (if any) loaded OK. 78 | * 79 | * @returns true if the codec loaded properly 80 | */ 81 | boolean isLoaded(); 82 | boolean isFailed(); 83 | void fail(); 84 | boolean isValid(); 85 | 86 | /** 87 | * (implemented by {@link CodecBase} 88 | * 89 | * @returns The user friendly string for the codec (should 90 | * include both the name and the bandwidth 91 | */ 92 | String getTitle(); 93 | 94 | 95 | /** 96 | * (implemented by {@link CodecBase} 97 | * 98 | * @returns The RTP assigned name string for the codec 99 | */ 100 | String name(); 101 | String key(); 102 | String getValue(); 103 | 104 | /** 105 | * (implemented by {@link CodecBase} 106 | * 107 | * @returns The commonly used name for the codec. 108 | */ 109 | String userName(); 110 | 111 | /** 112 | * (implemented by {@link CodecBase} 113 | * 114 | * @returns The RTP assigned number for the codec 115 | */ 116 | int number(); 117 | 118 | /** 119 | * (implemented by {@link CodecBase} 120 | * 121 | * @param l The list preference controlling this Codec 122 | * 123 | * Used to add listeners for preference changes and update 124 | * the codec parameters accordingly. 125 | */ 126 | void setListPreference(ListPreference l); 127 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/CodecBase.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import android.preference.ListPreference; 4 | import android.preference.Preference; 5 | import android.telephony.TelephonyManager; 6 | 7 | class CodecBase implements Preference.OnPreferenceChangeListener { 8 | protected String CODEC_NAME; 9 | protected String CODEC_USER_NAME; 10 | protected int CODEC_NUMBER; 11 | protected int CODEC_SAMPLE_RATE=16000; // default for most narrow band codecs 12 | protected int CODEC_FRAME_SIZE=320; // default for most narrow band codecs 13 | protected String CODEC_DESCRIPTION; 14 | protected String CODEC_DEFAULT_SETTING = "never"; 15 | 16 | private boolean loaded = false,failed = false; 17 | private boolean enabled = false; 18 | private boolean wlanOnly = false,wlanOr3GOnly = false; 19 | private String value; 20 | 21 | public void update() { 22 | 23 | } 24 | 25 | public String getValue() { 26 | return value; 27 | } 28 | 29 | void load() { 30 | update(); 31 | loaded = true; 32 | } 33 | 34 | public int samp_rate() { 35 | return CODEC_SAMPLE_RATE; 36 | } 37 | 38 | public int frame_size() { 39 | return CODEC_FRAME_SIZE; 40 | } 41 | 42 | public boolean isLoaded() { 43 | return loaded; 44 | } 45 | 46 | public boolean isFailed() { 47 | return failed; 48 | } 49 | 50 | public void fail() { 51 | update(); 52 | failed = true; 53 | } 54 | 55 | public void enable(boolean e) { 56 | enabled = e; 57 | } 58 | 59 | public boolean isEnabled() { 60 | return enabled; 61 | } 62 | 63 | TelephonyManager tm; 64 | int nt; 65 | 66 | public boolean isValid() { 67 | if (!isEnabled()) 68 | return false; 69 | 70 | if (wlanOr3GOnly() && nt < TelephonyManager.NETWORK_TYPE_UMTS) 71 | return false; 72 | // \TODO this test is True on Android 3.1 (specifically on Galaxy Tab 73 | // 10.1 3G), which means that the codecs will be classified as invalid, 74 | // which means that the sound won't work at all 75 | // if (nt < TelephonyManager.NETWORK_TYPE_EDGE) 76 | // return false; 77 | return true; 78 | } 79 | 80 | private boolean wlanOnly() { 81 | return enabled && wlanOnly; 82 | } 83 | 84 | private boolean wlanOr3GOnly() { 85 | return enabled && wlanOr3GOnly; 86 | } 87 | 88 | public String name() { 89 | return CODEC_NAME; 90 | } 91 | 92 | public String key() { 93 | return CODEC_NAME+"_new"; 94 | } 95 | 96 | public String userName() { 97 | return CODEC_USER_NAME; 98 | } 99 | 100 | public String getTitle() { 101 | return CODEC_NAME + " (" + CODEC_DESCRIPTION + ")"; 102 | } 103 | 104 | public int number() { 105 | return CODEC_NUMBER; 106 | } 107 | 108 | public void setListPreference(ListPreference l) { 109 | l.setOnPreferenceChangeListener(this); 110 | l.setValue(value); 111 | } 112 | 113 | public boolean onPreferenceChange(Preference p, Object newValue) { 114 | ListPreference l = (ListPreference)p; 115 | value = (String)newValue; 116 | 117 | updateFlags(value); 118 | 119 | l.setValue(value); 120 | l.setSummary(l.getEntry()); 121 | 122 | return true; 123 | } 124 | 125 | private void updateFlags(String v) { 126 | 127 | if (v.equals("never")) { 128 | enabled = false; 129 | } else { 130 | enabled = true; 131 | if (v.equals("wlan")) 132 | wlanOnly = true; 133 | else 134 | wlanOnly = false; 135 | if (v.equals("wlanor3g")) 136 | wlanOr3GOnly = true; 137 | else 138 | wlanOr3GOnly = false; 139 | } 140 | } 141 | 142 | public String toString() { 143 | return "CODEC{ " + CODEC_NUMBER + ": " + getTitle() + "}"; 144 | } 145 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/MainActivity.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.apache.log4j.BasicConfigurator; 5 | 6 | import com.flazr.rtmp.client.ClientOptions; 7 | 8 | import android.os.Bundle; 9 | import android.os.StrictMode; 10 | import android.app.Activity; 11 | import android.view.Menu; 12 | 13 | public class MainActivity extends Activity { 14 | 15 | private Object[] args={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 16 | @Override 17 | protected void onCreate(Bundle savedInstanceState) { 18 | super.onCreate(savedInstanceState); 19 | setContentView(R.layout.video_capture); 20 | BasicConfigurator.configure(); 21 | StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); 22 | StrictMode.setThreadPolicy(policy); 23 | ClientOptions options=new ClientOptions(); 24 | options.setHost("10.129.200.81"); 25 | options.setAppName("HariPanTest3"); 26 | options.setStreamName("Test1"); 27 | options.setArgs(args); 28 | options.publishLive(); 29 | 30 | VideoDialog mVideoDialog = new VideoDialog(this, "1", "1", "Test1", 1, options); 31 | mVideoDialog.show(); 32 | /* 33 | VideoCapture mVideoCapture = (VideoCapture) findViewById(R.id.video_capture); 34 | mVideoCapture.startCapture();*/ 35 | 36 | 37 | } 38 | 39 | @Override 40 | public boolean onCreateOptionsMenu(Menu menu) { 41 | // Inflate the menu; this adds items to the action bar if it is present. 42 | getMenuInflater().inflate(R.menu.activity_main, menu); 43 | return true; 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/MainRtmpConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.Channel; 4 | import org.jboss.netty.channel.ChannelHandlerContext; 5 | import org.jboss.netty.channel.ChannelPipeline; 6 | import org.jboss.netty.channel.ChannelPipelineFactory; 7 | import org.jboss.netty.channel.ChannelStateEvent; 8 | import org.jboss.netty.channel.Channels; 9 | import org.jboss.netty.channel.MessageEvent; 10 | import org.slf4j.Logger; 11 | import org.slf4j.LoggerFactory; 12 | 13 | import android.util.Log; 14 | 15 | import com.flazr.rtmp.RtmpDecoder; 16 | import com.flazr.rtmp.RtmpEncoder; 17 | import com.flazr.rtmp.RtmpMessage; 18 | import com.flazr.rtmp.client.ClientHandshakeHandler; 19 | import com.flazr.rtmp.client.ClientOptions; 20 | import com.flazr.rtmp.message.Command; 21 | import com.flazr.rtmp.message.Control; 22 | 23 | public class MainRtmpConnection extends RtmpConnection { 24 | 25 | private static final Logger log = LoggerFactory.getLogger(MainRtmpConnection.class); 26 | private boolean connected = false; 27 | public MainRtmpConnection(ClientOptions options, BigBlueButtonClient context) { 28 | super(options, context); 29 | // TODO Auto-generated constructor stub 30 | } 31 | 32 | @Override 33 | protected ChannelPipelineFactory pipelineFactory() { 34 | // TODO Auto-generated method stub 35 | return new ChannelPipelineFactory() { 36 | @Override 37 | public ChannelPipeline getPipeline() throws Exception { 38 | final ChannelPipeline pipeline = Channels.pipeline(); 39 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 40 | pipeline.addLast("decoder", new RtmpDecoder()); 41 | pipeline.addLast("encoder", new RtmpEncoder()); 42 | pipeline.addLast("handler", MainRtmpConnection.this); 43 | return pipeline; 44 | } 45 | }; 46 | } 47 | 48 | @Override 49 | public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) { 50 | 51 | // * https://github.com/bigbluebutton/bigbluebutton/blob/master/bigbluebutton-client/src/org/bigbluebutton/main/model/users/NetConnectionDelegate.as#L102 52 | // * _netConnection.connect(?); 53 | 54 | Log.e("channel connection","success"); 55 | writeCommandExpectingResult(e.getChannel(), Command.connect(options)); 56 | } 57 | 58 | @Override 59 | public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception { 60 | super.channelDisconnected(ctx, e); 61 | log.debug("Rtmp Channel Disconnected"); 62 | 63 | connected = false; 64 | } 65 | 66 | @Override 67 | public void messageReceived(ChannelHandlerContext ctx, MessageEvent me) { 68 | final Channel channel = me.getChannel(); 69 | final RtmpMessage message = (RtmpMessage) me.getMessage(); 70 | Log.e("","<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<,"); 71 | switch(message.getHeader().getMessageType()) { 72 | case CONTROL: 73 | Control control = (Control) message; 74 | switch(control.getType()) { 75 | case PING_REQUEST: 76 | final int time = control.getTime(); 77 | Control pong = Control.pingResponse(time); 78 | channel.write(pong); 79 | break; 80 | } 81 | break; 82 | 83 | case COMMAND_AMF0: 84 | case COMMAND_AMF3: 85 | Command command = (Command) message; 86 | String name = command.getName(); 87 | log.debug("server command: {}", name); 88 | break; 89 | 90 | case SHARED_OBJECT_AMF0: 91 | case SHARED_OBJECT_AMF3: 92 | //onSharedObject(channel, (SharedObjectMessage) message); 93 | Log.d("object", "shared"); 94 | break; 95 | default: 96 | log.info("ignoring rtmp message: {}", message); 97 | break; 98 | } 99 | } 100 | 101 | public boolean isConnected() { 102 | return connected; 103 | } 104 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/NativeLibsLoader.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | public class NativeLibsLoader { 7 | private static final Logger log = LoggerFactory.getLogger(NativeLibsLoader.class); 8 | private static boolean captureLibsLoaded = false; 9 | private static boolean capturePlaybackLoaded = false; 10 | 11 | public static void loadCaptureLibs(String packageName) throws SecurityException { 12 | if (captureLibsLoaded) 13 | return; 14 | 15 | String path = "/data/data/" + packageName + "/lib/"; 16 | System.load(path + "libavutil.so"); 17 | System.load(path + "libswscale.so"); 18 | System.load(path + "libavcodec.so"); 19 | System.load(path + "libavformat.so"); 20 | System.load(path + "libthread.so"); 21 | System.load(path + "libcommon.so"); 22 | System.load(path + "libqueue.so"); 23 | System.load(path + "libencode.so"); 24 | System.load(path + "libmconfnativeencodevideo.so"); 25 | 26 | log.debug("Native capture libraries loaded"); 27 | captureLibsLoaded = true; 28 | } 29 | 30 | public static void loadPlaybackLibs(String packageName) throws SecurityException { 31 | if (capturePlaybackLoaded) 32 | return; 33 | 34 | String path = "/data/data/" + packageName + "/lib/"; 35 | System.load(path + "libavutil.so"); 36 | System.load(path + "libswscale.so"); 37 | System.load(path + "libavcodec.so"); 38 | System.load(path + "libthread.so"); 39 | System.load(path + "libcommon.so"); 40 | System.load(path + "libqueue.so"); 41 | System.load(path + "libdecode.so"); 42 | System.load(path + "libspeex_jni.so"); 43 | System.load(path + "libmconfnativeshowvideo.so"); 44 | 45 | log.debug("Native playback libraries loaded"); 46 | capturePlaybackLoaded = true; 47 | } 48 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/RtmpAudioPlayer.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.media.AudioFormat; 7 | import android.media.AudioManager; 8 | import android.media.AudioTrack; 9 | 10 | import com.flazr.rtmp.message.Audio; 11 | 12 | public class RtmpAudioPlayer { 13 | private static final Logger log = LoggerFactory.getLogger(RtmpAudioPlayer.class); 14 | /** Size of the read buffer */ 15 | public static final int BUFFER_SIZE = 1024; 16 | 17 | private Codec codec = new Speex(); 18 | private AudioTrack audioTrack; 19 | private int mu, maxjitter; 20 | private boolean running = false; 21 | private short[] decodedBuffer = new short[BUFFER_SIZE]; 22 | private byte[] pktBuffer = new byte[BUFFER_SIZE + 12]; 23 | // private RtpPacket pkt = new RtpPacket(pktBuffer, 0); 24 | 25 | public void start() { 26 | codec.init(); 27 | mu = codec.samp_rate()/8000; 28 | maxjitter = AudioTrack.getMinBufferSize(codec.samp_rate(), 29 | AudioFormat.CHANNEL_CONFIGURATION_MONO, 30 | AudioFormat.ENCODING_PCM_16BIT); 31 | if (maxjitter < 2*2*BUFFER_SIZE*3*mu) 32 | maxjitter = 2*2*BUFFER_SIZE*3*mu; 33 | audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, codec.samp_rate(), AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, 34 | maxjitter, AudioTrack.MODE_STREAM); 35 | // AudioManager am = (AudioManager) Receiver.mContext.getSystemService(Context.AUDIO_SERVICE); 36 | // if (Integer.parseInt(Build.VERSION.SDK) >= 5) 37 | // am.setSpeakerphoneOn(true); 38 | // else 39 | // am.setMode(AudioManager.MODE_NORMAL); 40 | //audioTrack.play(); 41 | 42 | running = true; 43 | } 44 | 45 | private void write(short a[],int b,int c) { 46 | synchronized (this) { 47 | audioTrack.write(a,b,c); 48 | } 49 | } 50 | 51 | public void stop() { 52 | log.debug("rtmp player stop."); 53 | 54 | running = false; 55 | codec.close(); 56 | 57 | if(audioTrack != null) { 58 | log.debug("Releasing audio track resources"); 59 | 60 | if(audioTrack.getState() == AudioTrack.STATE_INITIALIZED) 61 | audioTrack.stop(); 62 | 63 | audioTrack.release(); 64 | audioTrack = null; 65 | } 66 | } 67 | 68 | public void onAudio(Audio audio) { 69 | if (running) { 70 | byte[] audioData = audio.getByteArray(); 71 | 72 | int offset = 1; 73 | 74 | // byte[] tmpBuffer = new byte[audioData.length - offset]; 75 | // System.arraycopy(audioData, offset, tmpBuffer, 0, tmpBuffer.length); 76 | // pkt.setPayload(tmpBuffer, tmpBuffer.length); 77 | // int decodedSize = codec.decode(pktBuffer, decodedBuffer, pkt.getPayloadLength()); 78 | 79 | System.arraycopy(audioData, offset, pktBuffer, 12, audioData.length - offset); 80 | int decodedSize = codec.decode(pktBuffer, decodedBuffer, audioData.length - offset); 81 | 82 | write(decodedBuffer, 0, decodedSize); 83 | 84 | audioTrack.play(); 85 | } 86 | } 87 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/RtmpConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.net.InetSocketAddress; 4 | import java.util.Map; 5 | import java.util.concurrent.CountDownLatch; 6 | import java.util.concurrent.Executors; 7 | 8 | import org.jboss.netty.bootstrap.ClientBootstrap; 9 | import org.jboss.netty.channel.Channel; 10 | import org.jboss.netty.channel.ChannelFactory; 11 | import org.jboss.netty.channel.ChannelFuture; 12 | import org.jboss.netty.channel.ChannelFutureListener; 13 | import org.jboss.netty.channel.ChannelHandlerContext; 14 | import org.jboss.netty.channel.ChannelPipelineFactory; 15 | import org.jboss.netty.channel.ExceptionEvent; 16 | import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import android.util.Log; 21 | 22 | import com.flazr.rtmp.client.ClientHandler; 23 | import com.flazr.rtmp.client.ClientOptions; 24 | import com.flazr.rtmp.message.ChunkSize; 25 | import com.flazr.rtmp.message.Command; 26 | import com.flazr.rtmp.message.CommandAmf0; 27 | import com.flazr.rtmp.message.Control; 28 | 29 | public abstract class RtmpConnection extends ClientHandler implements ChannelFutureListener { 30 | 31 | private static final Logger log = LoggerFactory.getLogger(RtmpConnection.class); 32 | 33 | final protected BigBlueButtonClient context; 34 | 35 | public RtmpConnection(ClientOptions options, BigBlueButtonClient context) { 36 | super(options); 37 | Log.e("",options.toString()); 38 | this.context = context; 39 | // TODO Auto-generated constructor stub 40 | } 41 | 42 | private ClientBootstrap bootstrap = null; 43 | private ChannelFuture future = null; 44 | private ChannelFactory factory = null; 45 | 46 | @Override 47 | public void operationComplete(ChannelFuture arg0) throws Exception { 48 | // TODO Auto-generated method stub 49 | 50 | if (future.isSuccess()){ 51 | Log.e("", "jjjjjjjjjjjjjjjjjjjj"); 52 | 53 | onConnectedSuccessfully(); 54 | } 55 | else{ 56 | Log.e("", "wwwweeeeeeeeeeeeeee"); 57 | onConnectedUnsuccessfully(); 58 | } 59 | } 60 | 61 | private void onConnectedUnsuccessfully() { 62 | // TODO Auto-generated method stub 63 | 64 | } 65 | 66 | protected void onConnectedSuccessfully() { 67 | // TODO Auto-generated method stub 68 | 69 | } 70 | 71 | public boolean connect() { 72 | if(factory == null) 73 | factory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); 74 | bootstrap = new ClientBootstrap(factory); 75 | bootstrap.setPipelineFactory(pipelineFactory()); 76 | future = bootstrap.connect(new InetSocketAddress(options.getHost(),options.getPort())); 77 | future.addListener(this); 78 | 79 | //Log.e("a>>>>>>>>>>>>>>>",future.getChannel().getRemoteAddress().toString()); 80 | return true; 81 | } 82 | 83 | abstract protected ChannelPipelineFactory pipelineFactory(); 84 | 85 | public void disconnect() { 86 | if (future != null) { 87 | if (future.getChannel().isConnected()) { 88 | log.debug("Channel is connected, disconnecting"); 89 | //future.getChannel().close(); //ClosedChannelException 90 | future.getChannel().disconnect(); 91 | future.getChannel().getCloseFuture().awaitUninterruptibly(); 92 | } 93 | future.removeListener(this); 94 | factory.releaseExternalResources(); 95 | future = null; factory = null; bootstrap = null; 96 | } 97 | } 98 | @Override 99 | public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { 100 | String exceptionMessage = e.getCause().getMessage(); 101 | if (exceptionMessage != null && exceptionMessage.contains("ArrayIndexOutOfBoundsException") && exceptionMessage.contains("bad value / byte: 101 (hex: 65)")) { 102 | Log.e("","wwwwwwwwwwwwwww"); 103 | log.debug("Ignoring malformed metadata"); 104 | return; 105 | } else { 106 | Log.e("",""+exceptionMessage); 107 | super.exceptionCaught(ctx, e); 108 | } 109 | } 110 | public void doGetMyUserId(Channel channel) { 111 | Command command = new CommandAmf0("updateStreamn", null, "Test1","AV"); 112 | Log.e("","updateStreamn"); 113 | writeCommandExpectingResult(channel, command); 114 | } 115 | 116 | @Override 117 | protected void onCommandStatus(Channel channel, Command command, 118 | Map args) { 119 | final String code = (String) args.get("code"); 120 | final String level = (String) args.get("level"); 121 | final String description = (String) args.get("description"); 122 | final String application = (String) args.get("application"); 123 | final String messageStr = level + " onStatus message, code: " + code + ", description: " + description + ", application: " + application; 124 | 125 | // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/events/NetStatusEvent.html 126 | if (level.equals("status")) { 127 | logger.info(messageStr); 128 | if (code.equals("NetStream.Publish.Start") 129 | && publisher != null && !publisher.isStarted()) { 130 | logger.debug("starting the publisher after NetStream.Publish.Start"); 131 | publisher.start(channel, options.getStart(), options.getLength(), new ChunkSize(4096)); 132 | if(publisher.isStarted()) 133 | {doGetMyUserId(channel);} 134 | } else if (code.equals("NetStream.Unpublish.Success") 135 | && publisher != null) { 136 | logger.info("unpublish success, closing channel"); 137 | ChannelFuture future = channel.write(Command.closeStream(streamId)); 138 | future.addListener(ChannelFutureListener.CLOSE); 139 | } else if (code.equals("NetStream.Play.Stop")) { 140 | channel.close(); 141 | } 142 | } else if (level.equals("warning")) { 143 | logger.warn(messageStr); 144 | if (code.equals("NetStream.Play.InsufficientBW")) { 145 | ChannelFuture future = channel.write(Command.closeStream(streamId)); 146 | future.addListener(ChannelFutureListener.CLOSE); 147 | // \TODO create a listener for insufficient bandwidth 148 | } 149 | } else if (level.equals("error")) { 150 | logger.error(messageStr); 151 | channel.close(); 152 | } 153 | } 154 | 155 | @Override 156 | protected void onControl(Channel channel, Control control) { 157 | if (control.getType() != Control.Type.PING_REQUEST) 158 | logger.debug("control: {}", control); 159 | switch(control.getType()) { 160 | case PING_REQUEST: 161 | final int time = control.getTime(); 162 | Control pong = Control.pingResponse(time); 163 | // we don't want to print two boring messages every second 164 | // logger.debug("server ping: {}", time); 165 | // logger.debug("sending ping response: {}", pong); 166 | if (channel.isWritable()) 167 | channel.write(pong); 168 | break; 169 | case SWFV_REQUEST: 170 | if(swfvBytes == null) { 171 | logger.warn("swf verification not initialized!" 172 | + " not sending response, server likely to stop responding / disconnect"); 173 | } else { 174 | Control swfv = Control.swfvResponse(swfvBytes); 175 | logger.info("sending swf verification response: {}", swfv); 176 | channel.write(swfv); 177 | } 178 | break; 179 | case STREAM_BEGIN: 180 | if(publisher != null && !publisher.isStarted()) { 181 | publisher.start(channel, options.getStart(), 182 | options.getLength(), new ChunkSize(4096)); 183 | return; 184 | } 185 | //if(streamId !=0) { 186 | //channel.write(Control.setBuffer(streamId, options.getBuffer())); 187 | channel.write(Control.setBuffer(1, options.getBuffer())); 188 | 189 | //} 190 | break; 191 | default: 192 | logger.debug("ignoring control message: {}", control); 193 | } 194 | } 195 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/Speex.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import android.util.Log; 4 | 5 | 6 | public class Speex extends CodecBase implements Codec { 7 | 8 | /* quality 9 | * 1 : 4kbps (very noticeable artifacts, usually intelligible) 10 | * 2 : 6kbps (very noticeable artifacts, good intelligibility) 11 | * 4 : 8kbps (noticeable artifacts sometimes) 12 | * 6 : 11kpbs (artifacts usually only noticeable with headphones) 13 | * 8 : 15kbps (artifacts not usually noticeable) 14 | */ 15 | private static final int DEFAULT_COMPRESSION = 6; 16 | 17 | public Speex() { 18 | CODEC_NAME = "speex"; 19 | CODEC_USER_NAME = "speex"; 20 | CODEC_DESCRIPTION = "20.6kbit"; 21 | CODEC_NUMBER = 97; 22 | CODEC_DEFAULT_SETTING = "always"; 23 | super.update(); 24 | } 25 | 26 | void load() { 27 | try { 28 | System.loadLibrary("speex_jni"); 29 | super.load(); 30 | } catch (Throwable e) { 31 | Log.e("","nolib"); 32 | } 33 | 34 | } 35 | 36 | public native int open(int compression); 37 | public native int decode(byte encoded[], short lin[], int size); 38 | public native int encode(short lin[], int offset, byte encoded[], int size); 39 | public native void close(); 40 | 41 | public void init() { 42 | load(); 43 | if (isLoaded()) 44 | open(DEFAULT_COMPRESSION); 45 | } 46 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCapture.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.io.IOException; 4 | import java.lang.reflect.Method; 5 | import java.util.List; 6 | 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import com.flazr.rtmp.client.ClientOptions; 11 | 12 | import android.content.Context; 13 | import android.content.SharedPreferences; 14 | import android.graphics.ImageFormat; 15 | import android.graphics.PixelFormat; 16 | import android.hardware.Camera; 17 | import android.hardware.Camera.CameraInfo; 18 | import android.os.Build; 19 | import android.preference.PreferenceManager; 20 | import android.util.AttributeSet; 21 | import android.util.Log; 22 | import android.view.SurfaceHolder; 23 | import android.view.SurfaceView; 24 | import android.view.ViewGroup.LayoutParams; 25 | 26 | public class VideoCapture extends SurfaceView implements SurfaceHolder.Callback,Camera.PreviewCallback { 27 | 28 | private static final Logger log = LoggerFactory.getLogger(VideoCapture.class); 29 | 30 | private Context context; 31 | 32 | private SurfaceHolder mHolder; 33 | 34 | private ClientOptions options; 35 | 36 | private VideoPublish mVideoPublish = null; 37 | 38 | private Method mAcb; // method for adding a pre-allocated buffer 39 | private Object[] mArglist; // list of arguments 40 | 41 | private boolean isSurfaceCreated = false; // true when: surface is created AND mVideoPublish is correctly set 42 | // false when: surface is destroyed 43 | 44 | private boolean fakeDestroyed = false; // When there are 2 active preview surfaces on the same activity 45 | // (for example: the normal surface and a dialog surface) 46 | // we need to use this boolean to manage the conflict 47 | // true when: the preview is being shown on a Dialog 48 | // false otherwise 49 | 50 | private boolean usingFaster, usingHidden; 51 | public VideoCapture(Context context, AttributeSet attrs) { 52 | super(context, attrs); 53 | 54 | this.context = context; 55 | 56 | // Install a SurfaceHolder.Callback so we get notified when the 57 | // underlying surface is created and destroyed. 58 | mHolder = getHolder(); 59 | mHolder.addCallback(this); 60 | mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 61 | } 62 | 63 | private int getPublisher(){ 64 | mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).getVideoPublish(); 65 | if(mVideoPublish == null){ 66 | log.debug("Error: could not get or instantiate a VideoPublisher"); 67 | return CaptureConstants.E_COULD_NOT_GET_PUBLISHER; 68 | } 69 | return CaptureConstants.E_OK; 70 | } 71 | 72 | public boolean isCapturing() { // returns true if the capture is running or is paused 73 | // returns false if the capture is stopped or is in an error state 74 | if(mVideoPublish != null && 75 | (mVideoPublish.state == CaptureConstants.RESUMED || 76 | mVideoPublish.state == CaptureConstants.PAUSED)){ 77 | return true; 78 | } 79 | return false; 80 | } 81 | 82 | // Centers the preview on the screen keeping the capture aspect ratio. 83 | // Remember to call this function after you change the width or height if 84 | // you want to keep the aspect and the video centered 85 | // This function is useful for displaying the preview centered on fullscreen on an Activity 86 | // or centered on a Dialog, for example. If that is not the case, then it may be better to use 87 | // the VideoCaptureLayout class to handle the video preview position instead 88 | public int centerPreview() { 89 | if(mVideoPublish != null){ 90 | VideoCentering mVideoCentering = new VideoCentering(); 91 | mVideoCentering.setAspectRatio(mVideoPublish.getWidth()/(float)mVideoPublish.getHeight()); 92 | LayoutParams layoutParams = mVideoCentering.getVideoLayoutParams(mVideoCentering.getDisplayMetrics(this.getContext(),40), this.getLayoutParams()); 93 | setLayoutParams(layoutParams); 94 | return CaptureConstants.E_OK; 95 | } else { 96 | log.debug("Error: could not center screen"); 97 | return CaptureConstants.E_COULD_NOT_CENTER; 98 | } 99 | } 100 | 101 | // This function is useful for hidden the preview that was being shown on fullscreen on an Activity 102 | // or centered on a Dialog, for example. If that is not the case, then it may be better to use 103 | // the VideoCaptureLayout class to handle the video preview hidding instead 104 | public void hidePreview() { 105 | VideoCentering mVideoCentering = new VideoCentering(); 106 | LayoutParams layoutParams = mVideoCentering.hidePreview(this.getLayoutParams()); 107 | setLayoutParams(layoutParams); 108 | } 109 | 110 | private boolean isAvailableSprintFFC() 111 | { 112 | try { 113 | Class.forName("android.hardware.HtcFrontFacingCamera"); 114 | return true; 115 | } catch (Exception ex) { 116 | return false; 117 | } 118 | } 119 | 120 | private int openCameraNormalWay(){ 121 | if(mVideoPublish.mCamera != null){ 122 | mVideoPublish.mCamera.release(); 123 | mVideoPublish.mCamera = null; 124 | } 125 | 126 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { 127 | int firstFrontCamera = -1; 128 | int firstBackCamera = -1; 129 | for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { 130 | CameraInfo cameraInfo = new CameraInfo(); 131 | Camera.getCameraInfo(i, cameraInfo); 132 | switch (cameraInfo.facing) { 133 | case CameraInfo.CAMERA_FACING_FRONT: 134 | if (firstFrontCamera == -1) firstFrontCamera = i; break; 135 | case CameraInfo.CAMERA_FACING_BACK: 136 | if (firstBackCamera == -1) firstBackCamera = i; break; 137 | } 138 | } 139 | 140 | if (firstFrontCamera != -1) { 141 | mVideoPublish.cameraId = firstFrontCamera; 142 | } else if (firstBackCamera != -1) { 143 | mVideoPublish.cameraId = firstBackCamera; 144 | } else { 145 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 146 | } 147 | mVideoPublish.mCamera = Camera.open(mVideoPublish.cameraId); 148 | if (mVideoPublish.mCamera == null) 149 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 150 | } else { 151 | mVideoPublish.mCamera = Camera.open(); 152 | if (mVideoPublish.mCamera == null) 153 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 154 | 155 | Camera.Parameters parameters = mVideoPublish.mCamera.getParameters(); 156 | parameters.set("camera-id", 2); // this command sets the front facing camera to be used 157 | // (if the device has one). Else, it sets the default camera. 158 | mVideoPublish.mCamera.setParameters(parameters); 159 | } 160 | return CaptureConstants.E_OK; 161 | } 162 | 163 | private int openCamera(){ 164 | int err = CaptureConstants.E_OK; 165 | 166 | if (isAvailableSprintFFC()) { // this device has the specific HTC camera 167 | try { // try opening the specific HTC camera 168 | Method method = Class.forName("android.hardware.HtcFrontFacingCamera").getDeclaredMethod("getCamera", (Class[])null); 169 | mVideoPublish.mCamera = (Camera) method.invoke((Object)null, (Object)null); 170 | } catch (Exception ex) { // it was not possible to open the specifica HTC camera, 171 | // so, lets open the camera using the normal way 172 | log.debug(ex.toString()); 173 | 174 | err = openCameraNormalWay(); 175 | } 176 | } else { // this device does not have the specific HTC camera, 177 | // so, lets open the camera using the normal way 178 | err = openCameraNormalWay(); 179 | } 180 | 181 | return err; 182 | } 183 | 184 | private int setDisplay(){ 185 | if(mVideoPublish.mCamera != null){ 186 | try { 187 | mVideoPublish.mCamera.setPreviewDisplay(mHolder); 188 | } catch (IOException exception) { 189 | log.debug("Error: could not set preview display"); 190 | log.debug(exception.toString()); 191 | 192 | mVideoPublish.mCamera.release(); 193 | mVideoPublish.mCamera = null; 194 | 195 | return CaptureConstants.E_COULD_NOT_SET_PREVIEW_DISPLAY_R1; 196 | } 197 | } else { 198 | log.debug("Error: setDisplay() called without an opened camera"); 199 | return CaptureConstants.E_COULD_NOT_SET_PREVIEW_DISPLAY_R2; 200 | } 201 | 202 | return CaptureConstants.E_OK; 203 | } 204 | 205 | private int setParameters(){ 206 | if(mVideoPublish.mCamera != null){ 207 | Camera.Parameters parameters = mVideoPublish.mCamera.getParameters(); 208 | 209 | if (!parameters.getSupportedPreviewSizes().isEmpty()) { 210 | parameters.setPreviewSize( 211 | parameters.getSupportedPreviewSizes().get(0).width, 212 | parameters.getSupportedPreviewSizes().get(0).height); 213 | } 214 | if (Build.VERSION.SDK_INT > Build.VERSION_CODES.GINGERBREAD) { 215 | List fpsRange = parameters.getSupportedPreviewFpsRange(); 216 | if (fpsRange != null && !fpsRange.isEmpty()) 217 | parameters.setPreviewFpsRange( 218 | fpsRange.get(0)[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 219 | fpsRange.get(0)[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 220 | // parameters.set("orientation", "portrait"); 221 | } else { 222 | List fps = parameters.getSupportedPreviewFrameRates(); 223 | if (fps != null && !fps.isEmpty()) 224 | parameters.setPreviewFrameRate(fps.get(0)); 225 | } 226 | parameters.setPreviewFormat(ImageFormat.NV21); 227 | 228 | mVideoPublish.mCamera.setParameters(parameters); 229 | 230 | SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getContext()); 231 | int rotation = Integer.parseInt(prefs.getString("preview_rotation", "0")); 232 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) 233 | mVideoPublish.mCamera.setDisplayOrientation(rotation); 234 | else { 235 | // doesn't apply any rotation 236 | // \TODO apply the preferred rotation 237 | // parameters.setRotation(rotation); 238 | // parameters.set("rotation", rotation); 239 | } 240 | 241 | // setCameraDisplayOrientation((Activity) context, mVideoPublish.cameraId, mVideoPublish.mCamera); 242 | 243 | parameters = mVideoPublish.mCamera.getParameters(); 244 | 245 | mVideoPublish.setFramerate(parameters.getPreviewFrameRate()); 246 | mVideoPublish.setHeight(parameters.getPreviewSize().height); 247 | mVideoPublish.setWidth(parameters.getPreviewSize().width); 248 | 249 | log.debug("Using capture parameters: " + mVideoPublish.getWidth() + "x" + mVideoPublish.getHeight() + ", {} fps", mVideoPublish.getFramerate()); 250 | 251 | return CaptureConstants.E_OK; 252 | } else { 253 | log.debug("Error: setParameters() called without an opened camera"); 254 | return CaptureConstants.E_COULD_NOT_SET_PARAMETERS; 255 | } 256 | } 257 | 258 | private int getBufferSize(){ 259 | if(mVideoPublish.mCamera != null){ 260 | PixelFormat pixelFormat = new PixelFormat(); 261 | Camera.Parameters param = mVideoPublish.mCamera.getParameters(); 262 | PixelFormat.getPixelFormatInfo(param.getPreviewFormat(), pixelFormat); 263 | int buffersize = (param.getPreviewSize().width * param.getPreviewSize().height * pixelFormat.bitsPerPixel) / 8; 264 | return buffersize; 265 | } else { 266 | log.debug("Error: getBufferSize() called without an opened camera"); 267 | return CaptureConstants.E_COULD_NOT_GET_BUFSIZE; 268 | } 269 | } 270 | 271 | private void setCallbackBest(){ 272 | usingFaster = true; 273 | usingHidden = false; 274 | 275 | //we call addCallbackBuffer twice to reduce the "Out of buffers, clearing callback!" problem 276 | byte[] buffer = new byte[mVideoPublish.bufSize]; 277 | mVideoPublish.mCamera.addCallbackBuffer(buffer); 278 | buffer = new byte[mVideoPublish.bufSize]; 279 | mVideoPublish.mCamera.addCallbackBuffer(buffer); 280 | 281 | mVideoPublish.mCamera.setPreviewCallbackWithBuffer(this); 282 | 283 | log.debug("Using fast preview callback"); 284 | } 285 | 286 | private int setCallbackHidden(){ 287 | int err; 288 | 289 | usingFaster = true; 290 | usingHidden = true; 291 | 292 | //Must call this before calling addCallbackBuffer to get all the 293 | // reflection variables setup 294 | err = initForACB(); 295 | if(err != CaptureConstants.E_OK){ 296 | return err; 297 | } 298 | 299 | //we call addCallbackBuffer twice to reduce the "Out of buffers, clearing callback!" problem 300 | byte[] buffer = new byte[mVideoPublish.bufSize]; 301 | err = addCallbackBuffer_Android2p2(buffer); 302 | if(err != CaptureConstants.E_OK){ 303 | return err; 304 | } 305 | buffer = new byte[mVideoPublish.bufSize]; 306 | err = addCallbackBuffer_Android2p2(buffer); 307 | if(err != CaptureConstants.E_OK){ 308 | return err; 309 | } 310 | 311 | err = setPreviewCallbackWithBuffer_Android2p2(false); 312 | if(err != CaptureConstants.E_OK){ 313 | return err; 314 | } 315 | 316 | log.debug("Using fast but hidden preview callback"); 317 | return CaptureConstants.E_OK; 318 | } 319 | 320 | private void setCallbackSlow(){ 321 | usingFaster = false; 322 | usingHidden = false; 323 | 324 | mVideoPublish.mCamera.setPreviewCallback(this); 325 | 326 | log.debug("Using slow preview callback"); 327 | } 328 | 329 | private int prepareCallback(){ 330 | if(mVideoPublish.mCamera == null){ 331 | log.debug("Error: prepareCallback() called without an opened camera"); 332 | return CaptureConstants.E_COULD_NOT_PREPARE_CALLBACK_R1; 333 | } 334 | if(mVideoPublish.bufSize < CaptureConstants.E_OK || mVideoPublish.bufSize <= 0){ 335 | log.debug("Error: prepareCallback() called without a valid mVideoPublish.bufSize"); 336 | return CaptureConstants.E_COULD_NOT_PREPARE_CALLBACK_R2; 337 | } 338 | //java reflection (idea from http://code.google.com/p/android/issues/detail?id=2794): 339 | //This kind of java reflection is safe to be used as explained in the official android documentation 340 | //on (http://developer.android.com/resources/articles/backward-compatibility.html). 341 | //Explanation: The method setPreviewCallback exists since Android's API level 1. 342 | //An alternative method is the setPreviewCallbackWithBuffer, which exists since API level 8 (Android 2.2). 343 | //The setPreviewCallbackWithBuffer method is much better than the setPreviewCallback method 344 | //in terms of performance, because the onPreviewFrame method returns a copy of the frame 345 | //in a newly allocated memory when using the setPreviewCallback method, causing the 346 | //Garbage Collector to perform, which takes about 80-100ms. 347 | //Instead, when the setPreviewCallbackWithBuffer is used, the byte array is overwritten, 348 | //avoiding the GC to perform. 349 | //In mconf we want compatibility with API levels lower than 8. 350 | //The setPreviewCallbackWithBuffer method is implemented on a Debug class on API levels lower than 8. 351 | //In order to use it on API levels lower than 8, we need to use Java Reflection. 352 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { //if(2.2 or higher){ 353 | setCallbackBest(); 354 | } else if(HiddenCallbackWithBuffer()) { //} else if(has the methods hidden){ 355 | if(setCallbackHidden() != CaptureConstants.E_OK){ 356 | setCallbackSlow(); 357 | } 358 | } else { 359 | setCallbackSlow(); 360 | } 361 | 362 | return CaptureConstants.E_OK; 363 | } 364 | 365 | private int beginPreview(){ 366 | if(mVideoPublish.mCamera != null){ 367 | log.debug("Preview starting"); 368 | mVideoPublish.mCamera.startPreview(); 369 | log.debug("Preview started"); 370 | return CaptureConstants.E_OK; 371 | } else { 372 | log.debug("Error: beginPreview() called without an opened camera"); 373 | return CaptureConstants.E_COULD_NOT_BEGIN_PREVIEW; 374 | } 375 | } 376 | 377 | private int initNativeSide(){ 378 | if(mVideoPublish.bufSize < CaptureConstants.E_OK || mVideoPublish.bufSize <= 0){ 379 | log.debug("Error: initNativeSide() called without a valid mVideoPublish.bufSize"); 380 | return CaptureConstants.E_COULD_NOT_INIT_NATIVE_SIDE; 381 | } 382 | mVideoPublish.initNativeEncoder(); 383 | return CaptureConstants.E_OK; 384 | } 385 | 386 | private int startPublisherThread(){ 387 | if(!mVideoPublish.nativeEncoderInitialized){ 388 | log.debug("Error: startPublisherThread() called but native capture side not initialized"); 389 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_THREAD_R1; 390 | } 391 | if(mVideoPublish.isAlive()){ 392 | log.debug("Error: startPublisherThread() called but publisher thread already running"); 393 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_THREAD_R2; 394 | } 395 | mVideoPublish.start(); 396 | return CaptureConstants.E_OK; 397 | } 398 | 399 | private int startPublisher(ClientOptions opt){ 400 | this.options=opt; 401 | if(!mVideoPublish.nativeEncoderInitialized){ 402 | log.debug("Error: startPublisher() called but native capture side not initialized"); 403 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_R1; 404 | } 405 | if(!mVideoPublish.isAlive()){ 406 | log.debug("Error: startPublisher() called but publisher thread not running"); 407 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_R2; 408 | } 409 | mVideoPublish.startPublisher(opt); 410 | return CaptureConstants.E_OK; 411 | } 412 | 413 | private void clearCallbackBest(){ 414 | mVideoPublish.mCamera.setPreviewCallbackWithBuffer(null); 415 | } 416 | 417 | private int clearCallbackHidden(){ 418 | int err; 419 | 420 | err = setPreviewCallbackWithBuffer_Android2p2(true); 421 | if(err != CaptureConstants.E_OK){ 422 | return err; 423 | } 424 | 425 | return CaptureConstants.E_OK; 426 | } 427 | 428 | private void clearCallbackSlow(){ 429 | mVideoPublish.mCamera.setPreviewCallback(null); 430 | } 431 | 432 | private void resetBuffersAndCallbacks(){ 433 | if(usingHidden){ 434 | clearCallbackHidden(); 435 | } else if(usingFaster){ 436 | clearCallbackBest(); 437 | } else { 438 | clearCallbackSlow(); 439 | } 440 | } 441 | 442 | public int startCapture(ClientOptions opt) { 443 | NativeLibsLoader.loadCaptureLibs(context.getPackageName()); 444 | 445 | int err = CaptureConstants.E_OK; 446 | if(mVideoPublish == null){ 447 | err = getPublisher(); 448 | if(err != CaptureConstants.E_OK){ 449 | mVideoPublish.state = CaptureConstants.ERROR; 450 | return err; 451 | } 452 | } 453 | 454 | mVideoPublish.state = CaptureConstants.RESUMED; 455 | 456 | mVideoPublish.restartWhenResume = false; 457 | 458 | // acquires the camera 459 | err = openCamera(); 460 | if(err != CaptureConstants.E_OK){ 461 | mVideoPublish.state = CaptureConstants.ERROR; 462 | return err; 463 | }; 464 | 465 | // sets up the camera parameters 466 | err = setParameters(); 467 | if(err != CaptureConstants.E_OK){ 468 | mVideoPublish.state = CaptureConstants.ERROR; 469 | return err; 470 | } 471 | 472 | // gets the size of a non encoded frame 473 | mVideoPublish.bufSize = getBufferSize(); 474 | if(mVideoPublish.bufSize < 0){ 475 | mVideoPublish.state = CaptureConstants.ERROR; 476 | return mVideoPublish.bufSize; 477 | } 478 | 479 | // creates the shared buffer, inits the native side and sets the streamId 480 | err = initNativeSide(); 481 | if(err != CaptureConstants.E_OK){ 482 | mVideoPublish.state = CaptureConstants.ERROR; 483 | return err; 484 | } 485 | 486 | // start the publisher native thread 487 | err = startPublisherThread(); 488 | if(err != CaptureConstants.E_OK){ 489 | mVideoPublish.state = CaptureConstants.ERROR; 490 | return err; 491 | } 492 | 493 | // start the publisher handler 494 | err = startPublisher(opt); 495 | if(err != CaptureConstants.E_OK){ 496 | mVideoPublish.state = CaptureConstants.ERROR; 497 | return err; 498 | } 499 | 500 | err = resumeCapture(); 501 | if(err != CaptureConstants.E_OK){ 502 | mVideoPublish.state = CaptureConstants.ERROR; 503 | return err; 504 | } 505 | 506 | 507 | return err; 508 | } 509 | 510 | public int resumeCapture(){ 511 | int err = CaptureConstants.E_OK; 512 | Log.e("","resumeCapture"); 513 | if(!isSurfaceCreated || mVideoPublish == null){ 514 | err = CaptureConstants.E_COULD_NOT_RESUME_CAPTURE; 515 | mVideoPublish.state = CaptureConstants.ERROR; 516 | return err; 517 | } 518 | 519 | mVideoPublish.state = CaptureConstants.RESUMED; 520 | 521 | mVideoPublish.lastSurfaceDestroyed = false; // set to false because the 2 surfaces conflict has ended 522 | mVideoPublish.nextSurfaceCreated = false; // set to false because the 2 surfaces conflict has ended 523 | 524 | // tells it where to draw (sets display for preview) 525 | err = setDisplay(); 526 | if(err != CaptureConstants.E_OK){ 527 | mVideoPublish.state = CaptureConstants.ERROR; 528 | return err; 529 | } 530 | 531 | // prepares the callback 532 | err = prepareCallback(); 533 | if(err != CaptureConstants.E_OK){ 534 | mVideoPublish.state = CaptureConstants.ERROR; 535 | return err; 536 | } 537 | 538 | // begins the preview. 539 | err = beginPreview(); 540 | if(err != CaptureConstants.E_OK){ 541 | mVideoPublish.state = CaptureConstants.ERROR; 542 | return err; 543 | } 544 | 545 | return err; 546 | } 547 | 548 | public void stopCapture(){ 549 | if(isCapturing()){ 550 | NativeLibsLoader.loadCaptureLibs(context.getPackageName()); 551 | 552 | pauseCapture(); 553 | mVideoPublish.state = CaptureConstants.STOPPED; 554 | 555 | // Because the CameraDevice object is not a shared resource, it's very 556 | // important to release it when it may not be used anymore 557 | if(mVideoPublish.mCamera != null){ 558 | mVideoPublish.mCamera.release(); 559 | mVideoPublish.mCamera = null; 560 | } 561 | 562 | mVideoPublish.endNativeEncoder(); 563 | mVideoPublish.stopPublisher(); 564 | 565 | mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).deleteVideoPublish(); 566 | 567 | } 568 | } 569 | 570 | private void pauseCapture(){ 571 | if(mVideoPublish != null && mVideoPublish.mCamera != null && 572 | !(mVideoPublish.state == CaptureConstants.PAUSED)){ 573 | mVideoPublish.state = CaptureConstants.PAUSED; 574 | 575 | mVideoPublish.mCamera.stopPreview(); 576 | 577 | resetBuffersAndCallbacks(); 578 | 579 | try { 580 | mVideoPublish.mCamera.setPreviewDisplay(null); 581 | } catch (IOException e) { 582 | log.debug("Warning: error when trying to remove the preview display"); 583 | e.printStackTrace(); 584 | } 585 | } 586 | } 587 | 588 | // Checks if addCallbackBuffer and setPreviewCallbackWithBuffer are written but hidden. 589 | // This method will look for all methods of the android.hardware.Camera class, 590 | // even the hidden ones. 591 | private boolean HiddenCallbackWithBuffer(){ 592 | int exist = 0; 593 | try { 594 | Class> c = Class.forName("android.hardware.Camera"); 595 | Method[] m = c.getMethods(); 596 | for(int i=0; i mC = Class.forName("android.hardware.Camera"); 617 | 618 | // variable that will indicate of what class is each parameter of the method 619 | Class>[] mPartypes = new Class[1]; 620 | mPartypes[0] = (new byte[1]).getClass(); //There is probably a better way to do this. 621 | mAcb = mC.getMethod("addCallbackBuffer", mPartypes); 622 | 623 | mArglist = new Object[1]; 624 | } catch (Exception e) { 625 | log.debug("Problem setting up for addCallbackBuffer: " + e.toString()); 626 | return CaptureConstants.E_COULD_NOT_INIT_HIDDEN; 627 | } 628 | return CaptureConstants.E_OK; 629 | } 630 | 631 | // This method uses reflection to call the addCallbackBuffer method 632 | // It allows you to add a byte buffer to the queue of buffers to be used by preview. 633 | // Real addCallbackBuffer implementation: http://android.git.kernel.org/?p=platform/frameworks/base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9db3d07b9620b4269ab33f78604a36327e536ce1 634 | // @param b The buffer to register. Size should be width * height * bitsPerPixel / 8. 635 | private int addCallbackBuffer_Android2p2(byte[] b){ // this function is native since Android 2.2 636 | //Check to be sure initForACB has been called to setup 637 | // mVideoPublish.mAcb and mVideoPublish.mArglist 638 | // if(mVideoPublish.mArglist == null){ 639 | // initForACB(); 640 | // } 641 | 642 | mArglist[0] = b; 643 | try { 644 | mAcb.invoke(mVideoPublish.mCamera, mArglist); 645 | } catch (Exception e) { 646 | log.debug("invoking addCallbackBuffer failed: " + e.toString()); 647 | return CaptureConstants.E_COULD_NOT_ADD_HIDDEN; 648 | } 649 | return CaptureConstants.E_OK; 650 | } 651 | 652 | // This method uses reflection to call the setPreviewCallbackWithBuffer method 653 | // Use this method instead of setPreviewCallback if you want to use manually allocated 654 | // buffers. Assumes that "this" implements Camera.PreviewCallback 655 | private int setPreviewCallbackWithBuffer_Android2p2(boolean clear){ // this function is native since Android 2.2 656 | try { 657 | Class> c = Class.forName("android.hardware.Camera"); 658 | Method spcwb = null; // sets a preview with buffers 659 | //This way of finding our method is a bit inefficient 660 | // However, since this method is only called when the preview starts or resumes 661 | // this should not cause performance issues 662 | Method[] m = c.getMethods(); // get all methods of camera 663 | for(int i=0; i[] mPartypes = new Class[2]; // variable that will indicate of what class 673 | // // is each parameter of the method 674 | // mPartypes[0] = (mVideoPublish.mCamera).getClass(); 675 | // mPartypes[1] = (this).getClass(); //There is probably a better way to do this. 676 | // spcwb = c.getMethod("setPreviewCallbackWithBuffer", mPartypes); 677 | 678 | //If we were able to find the setPreviewCallbackWithBuffer method of Camera, 679 | // we can now invoke it on our Camera instance, setting 'this' to be the 680 | // callback handler 681 | if(spcwb != null){ 682 | Object[] arglist = new Object[1]; 683 | if(clear){ 684 | arglist[0] = null; 685 | } else { 686 | arglist[0] = this; // receives a copy of a preview frame 687 | } 688 | spcwb.invoke(mVideoPublish.mCamera, arglist); 689 | //Log.i("AR","setPreviewCallbackWithBuffer: Called method"); 690 | } else { 691 | log.debug("setPreviewCallbackWithBuffer: Did not find method"); 692 | return CaptureConstants.E_COULD_NOT_SET_HIDDEN_R1; 693 | } 694 | } catch (Exception e) { 695 | log.debug("{}",e.toString()); 696 | return CaptureConstants.E_COULD_NOT_SET_HIDDEN_R2; 697 | } 698 | return CaptureConstants.E_OK; 699 | } 700 | 701 | @Override 702 | public void surfaceDestroyed(SurfaceHolder holder) { 703 | log.debug("preview surface destroyed"); 704 | 705 | isSurfaceCreated = false; 706 | 707 | if(mVideoPublish != null && 708 | (mVideoPublish.state == CaptureConstants.RESUMED 709 | || mVideoPublish.state == CaptureConstants.PAUSED)){ // means that the activity or the orientation 710 | // changed and the camera was being captured and published (except if 711 | // we are faking a destruction - see the VideoCapture.fakeDestroyed variable for more info) 712 | // (because, in the strategy we are using, this surface will only be destroyed 713 | // when the activity or the orientation changes) 714 | //if(BackgroundManager.isApplicationBroughtToBackground(context)){ // means that the next 715 | // activity doesn't belong to this application. So, we need to: 716 | // 1) stop the capture, because we won't have a surface 717 | // 2) consequently, we have to stop the publish 718 | // 3) release the camera, because the user may want to use 719 | // the camera on another application 720 | //mVideoPublish.restartWhenResume = true; 721 | 722 | // stops the preview, the publish and releases the camera 723 | //stopCapture(); 724 | //} else { // means that the next activity belongs to this application 725 | // pauses the preview and publish 726 | pauseCapture(); 727 | 728 | // signalizes that the activity has changed and the 729 | // camera was being captured 730 | if(mVideoPublish.nextSurfaceCreated){ // means that the surface of the next activity or 731 | // of the next orientation has already been created 732 | mVideoPublish.RequestResume(); 733 | } else { // means that the surface of the next activity has not been created yet 734 | mVideoPublish.lastSurfaceDestroyed = true; // set to true because the current surface has been destroyed 735 | } 736 | //} 737 | } 738 | } 739 | 740 | @Override 741 | public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { 742 | log.debug("preview surface changed"); 743 | 744 | if(!fakeDestroyed && w == 0 && h == 0){ // means that we want to show the preview on a Dialog. 745 | // So, we need to simulate a surfaceDestroyed 746 | fakeDestroyed = true; 747 | log.debug("simulating a preview surface destruction"); 748 | surfaceDestroyed(holder); // this call does not destroy the surface, 749 | // it just sets the variables to simulate a destruction 750 | } else if(fakeDestroyed && w == 1 && h == 1){ // means that we closed the preview Dialog. 751 | // So, we need to simulate a surfaceCreated 752 | fakeDestroyed = false; 753 | log.debug("simulating a preview surface creation"); 754 | surfaceCreated(holder); // this call does not create the surface, 755 | // it just sets the variables to simulate a creation 756 | } 757 | } 758 | 759 | @Override 760 | public void surfaceCreated(SurfaceHolder holder) { 761 | log.debug("preview surface created"); 762 | 763 | if(getPublisher() == CaptureConstants.E_OK){ 764 | Log.e("","surfaceCreated"); 765 | isSurfaceCreated = true; 766 | 767 | if(mVideoPublish.state == CaptureConstants.RESUMED || 768 | mVideoPublish.state == CaptureConstants.PAUSED){ 769 | if(!mVideoPublish.lastSurfaceDestroyed){ // means that the last preview surface used to 770 | // capture the video is still active (not destroyed) 771 | // and the capture is not paused yet. 772 | // So, we can't resume the capture right now 773 | mVideoPublish.nextSurfaceCreated = true; 774 | 775 | mVideoPublish.readyToResume(this); 776 | } else { // means that the last preview surface used to capture the video has already been 777 | // destroyed and the capture is paused 778 | resumeCapture(); 779 | } 780 | } else if(mVideoPublish.state == CaptureConstants.STOPPED){ 781 | if(mVideoPublish.restartWhenResume){ // means that the following happened: 782 | // a publish was running, then the application went to 783 | // background, then it is now back to foreground. 784 | // So, if we want to keep the previous state, 785 | // lets start the capture 786 | startCapture(options); 787 | } 788 | } 789 | } 790 | } 791 | 792 | @Override 793 | public void onPreviewFrame (byte[] _data, Camera camera) 794 | { 795 | if(mVideoPublish != null && mVideoPublish.state == CaptureConstants.RESUMED){ 796 | if(usingHidden){ 797 | addCallbackBuffer_Android2p2(_data); 798 | } else if(usingFaster && mVideoPublish.mCamera != null){ 799 | mVideoPublish.mCamera.addCallbackBuffer(_data); 800 | } 801 | //Participant myself = ((BigBlueButton) getContext().getApplicationContext()).getHandler().getMyself(); 802 | //if(myself!=null&&myself.getStatus().doesHaveStream()){ 803 | enqueueFrame(_data, _data.length, mVideoPublish.getWidth(), mVideoPublish.getHeight(), mVideoPublish.getRotation()); 804 | // } 805 | } 806 | } 807 | 808 | private native int enqueueFrame(byte[] data, int length, int width, int height, int rotation); 809 | 810 | /*public static void setCameraDisplayOrientation(Activity activity, 811 | int cameraId, android.hardware.Camera camera) { 812 | android.hardware.Camera.CameraInfo info = 813 | new android.hardware.Camera.CameraInfo(); 814 | android.hardware.Camera.getCameraInfo(cameraId, info); 815 | int rotation = activity.getWindowManager().getDefaultDisplay() 816 | .getRotation(); 817 | int degrees = 0; 818 | switch (rotation) { 819 | case Surface.ROTATION_0: degrees = 0; break; 820 | case Surface.ROTATION_90: degrees = 90; break; 821 | case Surface.ROTATION_180: degrees = 180; break; 822 | case Surface.ROTATION_270: degrees = 270; break; 823 | } 824 | 825 | int result; 826 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 827 | result = (info.orientation + degrees) % 360; 828 | result = (360 - result) % 360; // compensate the mirror 829 | } else { // back-facing 830 | result = (info.orientation - degrees + 360) % 360; 831 | } 832 | 833 | camera.setDisplayOrientation(result); 834 | }*/ 835 | 836 | 837 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCaptureLayout.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.content.Context; 7 | import android.content.SharedPreferences; 8 | import android.preference.PreferenceManager; 9 | import android.util.AttributeSet; 10 | import android.view.ViewGroup; 11 | import android.widget.LinearLayout; 12 | 13 | public class VideoCaptureLayout extends LinearLayout { 14 | 15 | private static final Logger log = LoggerFactory.getLogger(VideoCaptureLayout.class); 16 | 17 | public VideoCaptureLayout(Context context, AttributeSet attrs) { 18 | super(context, attrs); 19 | } 20 | 21 | public void hide() { 22 | VideoCentering mVideoCentering = new VideoCentering(); 23 | ViewGroup.LayoutParams layoutParams = mVideoCentering.hidePreview(getLayoutParams()); 24 | setLayoutParams(layoutParams); 25 | 26 | // ViewGroup.LayoutParams params = getLayoutParams(); 27 | // params.width = 1; 28 | // params.height = 1; 29 | // setLayoutParams(params); 30 | } 31 | 32 | public void show(int margin) { 33 | VideoPublish mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).getVideoPublish(); 34 | if(mVideoPublish == null){ 35 | log.debug("Error: could not show capture preview. Reason: could not get or instantiate a VideoPublisher"); 36 | return; 37 | } 38 | 39 | SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getContext()); 40 | int rotation = Integer.parseInt(prefs.getString("preview_rotation", "0")); 41 | log.debug("PREVIEW ROTATION ={}",rotation); 42 | 43 | VideoCentering mVideoCentering = new VideoCentering(); 44 | if (rotation % 180 == 0) 45 | mVideoCentering.setAspectRatio(mVideoPublish.getWidth()/(float)mVideoPublish.getHeight()); 46 | else 47 | mVideoCentering.setAspectRatio(mVideoPublish.getHeight()/(float)mVideoPublish.getWidth()); 48 | 49 | ViewGroup.LayoutParams layoutParams = mVideoCentering.getVideoLayoutParams(mVideoCentering.getDisplayMetrics(getContext(),margin), getLayoutParams()); 50 | setLayoutParams(layoutParams); 51 | 52 | //this would be the ideal =( 53 | // if(mVideoPublish.mCamera != null) 54 | // { 55 | // mVideoPublish.mCamera.stopPreview(); 56 | // //mVideoPublish.mCamera.setDisplayOrientation(rotation); 57 | // mVideoPublish.mCamera.startPreview(); 58 | // } 59 | 60 | 61 | //ViewGroup.LayoutParams params = getLayoutParams(); 62 | //params.width = 320; 63 | //params.height = 240; 64 | //setLayoutParams(params); 65 | } 66 | 67 | public void destroy() { 68 | VideoCentering mVideoCentering = new VideoCentering(); 69 | ViewGroup.LayoutParams layoutParams = mVideoCentering.destroyPreview(getLayoutParams()); 70 | setLayoutParams(layoutParams); 71 | 72 | // ViewGroup.LayoutParams params = getLayoutParams(); 73 | // params.width = 0; 74 | // params.height = 0; 75 | // setLayoutParams(params); 76 | } 77 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCentering.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.content.Context; 7 | import android.util.DisplayMetrics; 8 | import android.view.Display; 9 | import android.view.WindowManager; 10 | import android.view.ViewGroup.LayoutParams; 11 | 12 | public class VideoCentering { 13 | private static final Logger log = LoggerFactory.getLogger(VideoCentering.class); 14 | private float aspectRatio = 4 / (float) 3; 15 | 16 | public LayoutParams getVideoLayoutParams(DisplayMetrics metrics, LayoutParams layoutParams) { 17 | int h = 0, w = 0; 18 | float displayAspectRatio = metrics.widthPixels / (float) metrics.heightPixels; 19 | if (displayAspectRatio < aspectRatio) { 20 | w = metrics.widthPixels; 21 | h = (int) (w / aspectRatio); 22 | } else { 23 | h = metrics.heightPixels; 24 | w = (int) (h * aspectRatio); 25 | } 26 | layoutParams.width = w; 27 | layoutParams.height = h; 28 | return layoutParams; 29 | } 30 | 31 | public DisplayMetrics getDisplayMetrics(Context context, int margin){ 32 | DisplayMetrics metrics = new DisplayMetrics(); 33 | Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); 34 | display.getMetrics(metrics); 35 | log.debug("Maximum display resolution: {} X {}\n", metrics.widthPixels, metrics.heightPixels); 36 | metrics.widthPixels -= margin; 37 | metrics.heightPixels -= margin; 38 | return metrics; 39 | } 40 | 41 | public void setAspectRatio(float aspectRatio) { 42 | this.aspectRatio = aspectRatio; 43 | } 44 | 45 | public float getAspectRatio() { 46 | return aspectRatio; 47 | } 48 | 49 | public LayoutParams hidePreview(LayoutParams layoutParams){ 50 | layoutParams.width = 1; 51 | layoutParams.height = 1; 52 | return layoutParams; 53 | } 54 | 55 | public LayoutParams destroyPreview(LayoutParams layoutParams) { 56 | layoutParams.width = 0; 57 | layoutParams.height = 0; 58 | return layoutParams; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoDialog.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | import android.app.Dialog; 8 | import android.content.Context; 9 | import android.view.Window; 10 | 11 | import com.flazr.rtmp.client.ClientOptions; 12 | 13 | public class VideoDialog extends Dialog { 14 | private static final Logger log = LoggerFactory.getLogger(VideoDialog.class); 15 | 16 | //private VideoSurface videoWindow; 17 | private String userId; 18 | private String name; 19 | public boolean isPreview; 20 | private int streamToShow; 21 | private VoiceInterface mVoiceItf; 22 | private ClientOptions opt; 23 | 24 | public VideoDialog(Context context, String userId, String myId, String name, int streamToShow, ClientOptions options) { 25 | super(context); 26 | this.opt=options; 27 | this.userId = userId; 28 | this.name = name; 29 | 30 | if(userId.equals(myId)){ 31 | isPreview = true; 32 | } else { 33 | isPreview = false; 34 | } 35 | 36 | requestWindowFeature(Window.FEATURE_NO_TITLE); //Removes the title from the Dialog 37 | 38 | if(isPreview){ 39 | setContentView(R.layout.video_capture); 40 | } else { 41 | /*setContentView(R.layout.video_window); 42 | 43 | videoWindow = (VideoSurface) findViewById(R.id.video_window);*/ 44 | } 45 | 46 | android.view.WindowManager.LayoutParams windowAttributes = getWindow().getAttributes(); 47 | windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; //Makes the video brigth 48 | // windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE; //Makes it possible to interact with the window behind, but the video should be closed properly when the screen changes 49 | // windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_SCALED; //Removes the title from the dialog and removes the border also 50 | getWindow().setAttributes(windowAttributes); 51 | 52 | setTitle(name); 53 | setCancelable(true); 54 | 55 | this.streamToShow = streamToShow; 56 | } 57 | 58 | private void sendBroadcastRecreateCaptureSurface() { 59 | log.debug("sendBroadcastRecreateCaptureSurface()"); 60 | 61 | //Intent intent= new Intent(Client.CLOSE_DIALOG_PREVIEW); 62 | //getContext().sendBroadcast(intent); 63 | } 64 | 65 | private void setVideoId(String userIdLocal){ 66 | userId = userIdLocal; 67 | } 68 | 69 | private void setVideoName(String userName){ 70 | name = userName; 71 | } 72 | 73 | public String getVideoId(){ 74 | return userId; 75 | } 76 | 77 | public String getVideoName(){ 78 | return name; 79 | } 80 | 81 | @Override 82 | protected void onStart() { 83 | super.onStart(); 84 | resume(); 85 | } 86 | 87 | @Override 88 | protected void onStop() { 89 | pause(); 90 | super.onStop(); 91 | } 92 | 93 | public void pause() { 94 | if(isPreview){ 95 | sendBroadcastRecreateCaptureSurface(); 96 | } else { 97 | //videoWindow.stop(); 98 | } 99 | } 100 | 101 | public void resume() { 102 | if(isPreview){ 103 | VideoCaptureLayout videocaplayout = (VideoCaptureLayout) findViewById(R.id.video_capture_layout); 104 | videocaplayout.show(40); 105 | } else { 106 | //videoWindow.start(userId, true, streamToShow); 107 | } 108 | } 109 | 110 | /* (non-Javadoc) 111 | * @see android.app.Dialog#onBackPressed() 112 | */ 113 | @Override 114 | public void onBackPressed() { 115 | // TODO Auto-generated method stub 116 | //super.onBackPressed(); 117 | 118 | VideoCapture mVideoCapture = (VideoCapture) findViewById(R.id.video_capture); 119 | mVideoCapture.startCapture(opt); 120 | 121 | } 122 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoPublish.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import android.hardware.Camera; 10 | 11 | import com.flazr.rtmp.RtmpReader; 12 | import com.flazr.rtmp.client.ClientOptions; 13 | import com.flazr.rtmp.message.Metadata; 14 | import com.flazr.rtmp.message.Video; 15 | 16 | public class VideoPublish extends Thread implements RtmpReader { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(VideoPublish.class); 19 | 20 | private int framerate = CaptureConstants.DEFAULT_FRAME_RATE; 21 | private int width = CaptureConstants.DEFAULT_WIDTH; 22 | private int height = CaptureConstants.DEFAULT_HEIGHT; 23 | private int bitrate = CaptureConstants.DEFAULT_BIT_RATE; 24 | private int gop = CaptureConstants.DEFAULT_GOP; 25 | private int rotation = 0; 26 | 27 | private List framesList = new ArrayList(); 28 | 29 | private BbbVideoPublisher videoPublishHandler; 30 | 31 | private BigBlueButtonClient context; 32 | 33 | private VideoCapture mVideoCapture; 34 | 35 | private VoiceInterface mVoiceItf; 36 | 37 | private byte[] sharedBuffer; 38 | 39 | public int bufSize; 40 | 41 | public Camera mCamera; 42 | 43 | private int firstTimestamp = 0; 44 | private int lastTimestamp = 0; 45 | 46 | public int state = CaptureConstants.STOPPED; 47 | 48 | public boolean nextSurfaceCreated = false; // used when: 49 | // the activity or the orientation changes and 50 | // the video was being captured (except if 51 | // we are faking a destruction - see the VideoCapture.fakeDestroyed variable for more info). 52 | // In this moment, 53 | // there are 2 surfaces conflicting, and we need to know 54 | // if/when they are destroyed and created. 55 | // true when: the next surface has already been created 56 | // false when: the next surface has not been created yet OR 57 | // there isn't a 2 surfaces conflict 58 | public boolean lastSurfaceDestroyed = false; // used when: 59 | // same situation as the "nextSurfaceCreated" variable 60 | // true when: the last preview surface has already been destroyed 61 | // false when: the last preview surface is still active 62 | 63 | public boolean nativeEncoderInitialized = false; // used to prevent errors. 64 | // true when the native class VideoEncoder is not NULL 65 | // false when the native class VideoEncoder is NULL 66 | 67 | public boolean restartWhenResume; // used in the following situation: 68 | // the user put the application in background. 69 | // now the user put the application in foreground again. 70 | // in this situation, this boolean is true if the camera was being 71 | // captured when the application went to background, and false if the 72 | // camera was not being captured. 73 | // So, this boolean allows to keep the previous state (capturing or not) 74 | // when the application resumes. 75 | 76 | private boolean framesListAvailable = false; // set to true when the RtmpPublisher starts seeking 77 | // for video messages. When true, this boolean allows the addition 78 | // of video frames to the list. 79 | // Set to false right when the RtmpPublisher decides to 80 | // close the reader. When false, this boolean prevents the 81 | // addition of new frames to the list. 82 | 83 | private boolean firstFrameWrote = true; 84 | 85 | public int cameraId = -1; 86 | 87 | private ClientOptions options; 88 | 89 | public VideoPublish(BigBlueButtonClient context, boolean restartWhenResume, int framerate, int width, int height, int bitrate, int gop, int rotation) { 90 | this.context = context; 91 | this.restartWhenResume = restartWhenResume; 92 | 93 | this.framerate = framerate; 94 | this.width = width; 95 | this.height = height; 96 | this.bitrate = bitrate; 97 | this.gop = gop; 98 | this.rotation = rotation; 99 | } 100 | 101 | public void startPublisher(ClientOptions opt) { 102 | String streamName; 103 | 104 | //streamName="Test1"; 105 | /*videoPublishHandler = new BbbVideoPublisher(context, this, opt.getStreamName(), opt); 106 | videoPublishHandler.start();*/ 107 | mVoiceItf=new VoiceOverRtmp(context, opt); 108 | mVoiceItf.start(); 109 | } 110 | 111 | public void stopPublisher(){ 112 | synchronized(this) { 113 | this.notifyAll(); 114 | } 115 | if(videoPublishHandler != null){ 116 | videoPublishHandler.stop(); 117 | } 118 | } 119 | 120 | public void readyToResume(VideoCapture videoCapture) { 121 | mVideoCapture = videoCapture; 122 | } 123 | 124 | public int RequestResume() { 125 | if(mVideoCapture == null){ 126 | log.debug("Error: resume requested but there is not a VideoCapture class available"); 127 | return CaptureConstants.E_COULD_NOT_REQUEST_RESUME; 128 | } 129 | mVideoCapture.resumeCapture(); 130 | mVideoCapture = null; 131 | return CaptureConstants.E_OK; 132 | } 133 | 134 | public void initNativeEncoder(){ 135 | sharedBuffer = new byte[bufSize]; // the encoded frame will never be bigger than the not encoded 136 | //\TODO Usually the encoded frame is much smaller than the not encoded. 137 | //So it would be good if we find out the biggest encoded 138 | //frame size possible (given the encoding parameters) 139 | //in order to initialize the sharedBuffer array as a byte[] 140 | //of the smaller size as possible, to allocate less memory. 141 | 142 | if (rotation % 180 == 0) 143 | initEncoder(width, height, framerate, bitrate, gop); 144 | else 145 | initEncoder(height, width, framerate, bitrate, gop); 146 | 147 | nativeEncoderInitialized = true; 148 | } 149 | 150 | public void endNativeEncoder(){ 151 | nativeEncoderInitialized = false; 152 | 153 | endEncoder(); 154 | } 155 | 156 | @Override 157 | public void run() { 158 | initSenderLoop(); 159 | } 160 | 161 | public byte[] assignJavaBuffer() 162 | { 163 | return sharedBuffer; 164 | } 165 | 166 | public int onReadyFrame (int bufferSize, int timestamp) 167 | { 168 | if(firstTimestamp == 0){ 169 | firstTimestamp = timestamp; 170 | } 171 | timestamp = timestamp - firstTimestamp; 172 | int interval = timestamp - lastTimestamp; 173 | lastTimestamp = timestamp; 174 | 175 | byte[] aux = new byte[bufferSize]; 176 | System.arraycopy(sharedBuffer, 0, aux, 0, bufferSize); //\TODO see if we can avoid this copy 177 | 178 | Video video = new Video(timestamp, aux, bufferSize); 179 | video.getHeader().setDeltaTime(interval); 180 | 181 | if (framesListAvailable) { 182 | framesList.add(video); 183 | if (firstFrameWrote) { 184 | firstFrameWrote = false; 185 | videoPublishHandler.fireFirstFrame(); 186 | } 187 | synchronized(this) { 188 | this.notifyAll(); 189 | } 190 | } 191 | return 0; 192 | } 193 | 194 | @Override 195 | public void close() { 196 | framesListAvailable = false; 197 | if(framesList != null){ 198 | framesList.clear(); 199 | } 200 | framesList = null; 201 | } 202 | 203 | @Override 204 | public Metadata getMetadata() { 205 | return null; 206 | } 207 | 208 | @Override 209 | public Video[] getStartMessages() { 210 | framesListAvailable = true; 211 | Video[] startMessages = new Video[0]; 212 | return startMessages; 213 | } 214 | 215 | @Override 216 | public long getTimePosition() { 217 | return 0; 218 | } 219 | 220 | @Override 221 | public boolean hasNext() { 222 | if((state == CaptureConstants.RESUMED || state == CaptureConstants.PAUSED) 223 | && framesListAvailable && framesList != null && framesList.isEmpty()){ 224 | try { 225 | this.wait(); 226 | } catch (InterruptedException e) { 227 | e.printStackTrace(); 228 | } 229 | } 230 | if((state == CaptureConstants.RESUMED || state == CaptureConstants.PAUSED) 231 | && framesListAvailable && framesList != null){ // means that the framesList is not empty 232 | return true; 233 | } else { // means that the framesList is empty or we should not get next frames 234 | return false; 235 | } 236 | } 237 | 238 | @Override 239 | public Video next() { 240 | if(framesListAvailable && framesList != null && !framesList.isEmpty()){ 241 | return framesList.remove(0); 242 | } else { 243 | Video emptyVideo = new Video(); 244 | return emptyVideo; 245 | } 246 | } 247 | 248 | @Override 249 | public long seek(long timePosition) { 250 | return 0; 251 | } 252 | 253 | @Override 254 | public void setAggregateDuration(int targetDuration) { 255 | } 256 | 257 | private native int initEncoder(int width, int height, int frameRate, int bitRate, int GOP); 258 | private native int endEncoder(); 259 | private native int initSenderLoop(); 260 | 261 | @Override 262 | public int getWidth() { 263 | return width; 264 | } 265 | 266 | @Override 267 | public int getHeight() { 268 | return height; 269 | } 270 | 271 | public void setFramerate(int framerate) { 272 | this.framerate = framerate; 273 | } 274 | 275 | public void setHeight(int height) { 276 | this.height = height; 277 | } 278 | 279 | public void setWidth(int width) { 280 | this.width = width; 281 | } 282 | 283 | public int getFramerate() { 284 | return framerate; 285 | } 286 | 287 | public int getBitrate() { 288 | return bitrate; 289 | } 290 | 291 | public int getGop() { 292 | return gop; 293 | } 294 | 295 | public int getRotation() { 296 | return rotation; 297 | } 298 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoPublisherConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.jboss.netty.channel.ChannelPipeline; 5 | import org.jboss.netty.channel.ChannelPipelineFactory; 6 | import org.jboss.netty.channel.Channels; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import com.flazr.rtmp.RtmpDecoder; 11 | import com.flazr.rtmp.RtmpEncoder; 12 | import com.flazr.rtmp.client.ClientHandshakeHandler; 13 | import com.flazr.rtmp.client.ClientOptions; 14 | 15 | public class VideoPublisherConnection extends RtmpConnection { 16 | 17 | @SuppressWarnings("unused") 18 | private static final Logger log = LoggerFactory.getLogger(VideoPublisherConnection.class); 19 | 20 | public VideoPublisherConnection(ClientOptions options, BigBlueButtonClient context) { 21 | super(options, context); 22 | } 23 | 24 | @Override 25 | protected ChannelPipelineFactory pipelineFactory() { 26 | return new ChannelPipelineFactory() { 27 | @Override 28 | public ChannelPipeline getPipeline() throws Exception { 29 | final ChannelPipeline pipeline = Channels.pipeline(); 30 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 31 | pipeline.addLast("decoder", new RtmpDecoder()); 32 | pipeline.addLast("encoder", new RtmpEncoder()); 33 | pipeline.addLast("handler", VideoPublisherConnection.this); 34 | return pipeline; 35 | } 36 | }; 37 | } 38 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.ChannelPipeline; 4 | import org.jboss.netty.channel.ChannelPipelineFactory; 5 | import org.jboss.netty.channel.Channels; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import com.flazr.rtmp.RtmpDecoder; 10 | import com.flazr.rtmp.RtmpEncoder; 11 | import com.flazr.rtmp.client.ClientHandshakeHandler; 12 | import com.flazr.rtmp.client.ClientOptions; 13 | import com.flazr.rtmp.message.Audio; 14 | 15 | public abstract class VoiceConnection extends RtmpConnection { 16 | 17 | private static final Logger log = LoggerFactory.getLogger(VoiceConnection.class); 18 | private String publishName; 19 | private String playName; 20 | @SuppressWarnings("unused") 21 | private String codec; 22 | private int playStreamId = -1; 23 | private int publishStreamId = -1; 24 | 25 | public VoiceConnection(ClientOptions options, BigBlueButtonClient context) { 26 | super(options, context); 27 | } 28 | 29 | @Override 30 | protected ChannelPipelineFactory pipelineFactory() { 31 | return new ChannelPipelineFactory() { 32 | @Override 33 | public ChannelPipeline getPipeline() throws Exception { 34 | final ChannelPipeline pipeline = Channels.pipeline(); 35 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 36 | pipeline.addLast("decoder", new RtmpDecoder()); 37 | pipeline.addLast("encoder", new RtmpEncoder()); 38 | pipeline.addLast("handler", VoiceConnection.this); 39 | return pipeline; 40 | } 41 | }; 42 | } 43 | 44 | abstract protected void onAudio(Audio audio); 45 | 46 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceInterface.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import com.flazr.rtmp.client.ClientOptions; 4 | 5 | 6 | public interface VoiceInterface { 7 | 8 | public static final int E_OK = 0; 9 | public static final int E_INVALID_NUMBER = 1; 10 | public static final int E_TIMEOUT = 2; 11 | 12 | public int start(); 13 | public void stop(); 14 | public boolean isOnCall(); 15 | public boolean isMuted(); 16 | public void muteCall(boolean mute); 17 | public int getSpeaker(); 18 | public void setSpeaker(int mode); 19 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceOverRtmp.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.ChannelHandlerContext; 4 | import org.jboss.netty.channel.ChannelStateEvent; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | import android.os.SystemClock; 9 | import android.util.Log; 10 | 11 | import com.flazr.rtmp.client.ClientOptions; 12 | import com.flazr.rtmp.message.Audio; 13 | 14 | public class VoiceOverRtmp implements VoiceInterface { 15 | 16 | private static final Logger log = LoggerFactory.getLogger(VoiceOverRtmp.class); 17 | 18 | private BbbVoiceConnection connection; 19 | private RtmpAudioPlayer audioPlayer = new RtmpAudioPlayer(); 20 | private AudioPublish micBufferReader = new AudioPublish(); 21 | protected boolean onCall = false; 22 | private Object args []={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 23 | //private Object args[]={"Test1"}; 24 | public VoiceOverRtmp(BigBlueButtonClient bbb, ClientOptions options) { 25 | 26 | /*ClientOptions options=new ClientOptions(); 27 | options.setHost("10.129.200.81"); 28 | options.setAppName("HariPanTest3"); 29 | //options.setAppName("PanTest"); 30 | options.setStreamName("Test1"); 31 | options.setArgs(args); 32 | options.publishLive();*/ 33 | options.setReaderToPublish(micBufferReader); 34 | connection = new BbbVoiceConnection(bbb, options) { 35 | @Override 36 | protected void onAudio(Audio audio) { 37 | audioPlayer.onAudio(audio); 38 | } 39 | 40 | @Override 41 | protected void onConnectedSuccessfully() { 42 | onCall = true; 43 | } 44 | 45 | @Override 46 | public void channelDisconnected(ChannelHandlerContext ctx, 47 | ChannelStateEvent e) throws Exception { 48 | super.channelDisconnected(ctx, e); 49 | log.debug("\n\nvoice disconnected, stopping VoiceOverRtmp\n\n"); 50 | onCall = false; 51 | audioPlayer.stop(); 52 | 53 | } 54 | }; 55 | 56 | } 57 | 58 | @Override 59 | public int start() { 60 | connection.start(); 61 | 62 | int cont = 10; 63 | while (!onCall && cont > 0) { 64 | SystemClock.sleep(500); 65 | cont--; 66 | } 67 | 68 | if (cont == 0) { 69 | stop(); 70 | return E_TIMEOUT; 71 | } 72 | else 73 | { 74 | audioPlayer.start(); 75 | int res; 76 | if(sendFirstAudioPacket()) { 77 | micBufferReader.start(); 78 | //Log.e("",""+Thread.currentThread()); 79 | res=E_OK; 80 | } 81 | else { 82 | stop(); 83 | //Log.e("",""+Thread.currentThread()); 84 | res=E_TIMEOUT; 85 | } 86 | return res; 87 | } 88 | } 89 | 90 | private boolean sendFirstAudioPacket() 91 | { 92 | //for some reason - and we dont know why yet - after the reception of the first audio packet 93 | //the connection needs to wait 101 ms to then normally starts the audio dispatching 94 | //so..we are firing the first audio packet with a 101ms delay... 95 | //this first audio packet is in the audio buffer of the micBufferReader 96 | // ( you can check in the constructor of the AudioPublish class ) 97 | 98 | // The voice connection waits for a 'createStream' server command to initialize the publisher 99 | // It means that by the time fireNext is called 'connection.publisher' may be null 100 | // So we have to wait until the publisher is initialized 101 | 102 | int attemptsLeft = 10; 103 | while(connection.publisher == null && attemptsLeft > 0) { 104 | SystemClock.sleep(500); 105 | attemptsLeft--; 106 | } 107 | 108 | if(attemptsLeft == 0) { 109 | /* Failed to initialize the publisher */ 110 | return false; 111 | } 112 | else { 113 | connection.publisher.fireNext(connection.publisher.channel, 101); 114 | return true; 115 | } 116 | } 117 | 118 | @Override 119 | public void stop() { 120 | connection.stop(); 121 | } 122 | 123 | @Override 124 | public boolean isOnCall() { 125 | return onCall; 126 | } 127 | 128 | @Override 129 | public boolean isMuted() { 130 | return micBufferReader.isMuted(); 131 | } 132 | 133 | @Override 134 | public void muteCall(boolean mute) { 135 | if(mute) 136 | micBufferReader.mute(); 137 | else 138 | micBufferReader.unmute(); 139 | 140 | } 141 | 142 | @Override 143 | public int getSpeaker() { 144 | return 0; 145 | } 146 | 147 | @Override 148 | public void setSpeaker(int mode) { 149 | // TODO Auto-generated method stub 150 | 151 | } 152 | 153 | } --------------------------------------------------------------------------------
67 | * checks to see if the user has enabled the codec. 68 | * 69 | * @returns true if the codec can be used 70 | */ 71 | boolean isEnabled(); 72 | 73 | /** 74 | * (implemented by {@link CodecBase} 75 | *
76 | * Checks to see if the binary library associated with the 77 | * codec (if any) loaded OK. 78 | * 79 | * @returns true if the codec loaded properly 80 | */ 81 | boolean isLoaded(); 82 | boolean isFailed(); 83 | void fail(); 84 | boolean isValid(); 85 | 86 | /** 87 | * (implemented by {@link CodecBase} 88 | * 89 | * @returns The user friendly string for the codec (should 90 | * include both the name and the bandwidth 91 | */ 92 | String getTitle(); 93 | 94 | 95 | /** 96 | * (implemented by {@link CodecBase} 97 | * 98 | * @returns The RTP assigned name string for the codec 99 | */ 100 | String name(); 101 | String key(); 102 | String getValue(); 103 | 104 | /** 105 | * (implemented by {@link CodecBase} 106 | * 107 | * @returns The commonly used name for the codec. 108 | */ 109 | String userName(); 110 | 111 | /** 112 | * (implemented by {@link CodecBase} 113 | * 114 | * @returns The RTP assigned number for the codec 115 | */ 116 | int number(); 117 | 118 | /** 119 | * (implemented by {@link CodecBase} 120 | * 121 | * @param l The list preference controlling this Codec 122 | * 123 | * Used to add listeners for preference changes and update 124 | * the codec parameters accordingly. 125 | */ 126 | void setListPreference(ListPreference l); 127 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/CodecBase.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import android.preference.ListPreference; 4 | import android.preference.Preference; 5 | import android.telephony.TelephonyManager; 6 | 7 | class CodecBase implements Preference.OnPreferenceChangeListener { 8 | protected String CODEC_NAME; 9 | protected String CODEC_USER_NAME; 10 | protected int CODEC_NUMBER; 11 | protected int CODEC_SAMPLE_RATE=16000; // default for most narrow band codecs 12 | protected int CODEC_FRAME_SIZE=320; // default for most narrow band codecs 13 | protected String CODEC_DESCRIPTION; 14 | protected String CODEC_DEFAULT_SETTING = "never"; 15 | 16 | private boolean loaded = false,failed = false; 17 | private boolean enabled = false; 18 | private boolean wlanOnly = false,wlanOr3GOnly = false; 19 | private String value; 20 | 21 | public void update() { 22 | 23 | } 24 | 25 | public String getValue() { 26 | return value; 27 | } 28 | 29 | void load() { 30 | update(); 31 | loaded = true; 32 | } 33 | 34 | public int samp_rate() { 35 | return CODEC_SAMPLE_RATE; 36 | } 37 | 38 | public int frame_size() { 39 | return CODEC_FRAME_SIZE; 40 | } 41 | 42 | public boolean isLoaded() { 43 | return loaded; 44 | } 45 | 46 | public boolean isFailed() { 47 | return failed; 48 | } 49 | 50 | public void fail() { 51 | update(); 52 | failed = true; 53 | } 54 | 55 | public void enable(boolean e) { 56 | enabled = e; 57 | } 58 | 59 | public boolean isEnabled() { 60 | return enabled; 61 | } 62 | 63 | TelephonyManager tm; 64 | int nt; 65 | 66 | public boolean isValid() { 67 | if (!isEnabled()) 68 | return false; 69 | 70 | if (wlanOr3GOnly() && nt < TelephonyManager.NETWORK_TYPE_UMTS) 71 | return false; 72 | // \TODO this test is True on Android 3.1 (specifically on Galaxy Tab 73 | // 10.1 3G), which means that the codecs will be classified as invalid, 74 | // which means that the sound won't work at all 75 | // if (nt < TelephonyManager.NETWORK_TYPE_EDGE) 76 | // return false; 77 | return true; 78 | } 79 | 80 | private boolean wlanOnly() { 81 | return enabled && wlanOnly; 82 | } 83 | 84 | private boolean wlanOr3GOnly() { 85 | return enabled && wlanOr3GOnly; 86 | } 87 | 88 | public String name() { 89 | return CODEC_NAME; 90 | } 91 | 92 | public String key() { 93 | return CODEC_NAME+"_new"; 94 | } 95 | 96 | public String userName() { 97 | return CODEC_USER_NAME; 98 | } 99 | 100 | public String getTitle() { 101 | return CODEC_NAME + " (" + CODEC_DESCRIPTION + ")"; 102 | } 103 | 104 | public int number() { 105 | return CODEC_NUMBER; 106 | } 107 | 108 | public void setListPreference(ListPreference l) { 109 | l.setOnPreferenceChangeListener(this); 110 | l.setValue(value); 111 | } 112 | 113 | public boolean onPreferenceChange(Preference p, Object newValue) { 114 | ListPreference l = (ListPreference)p; 115 | value = (String)newValue; 116 | 117 | updateFlags(value); 118 | 119 | l.setValue(value); 120 | l.setSummary(l.getEntry()); 121 | 122 | return true; 123 | } 124 | 125 | private void updateFlags(String v) { 126 | 127 | if (v.equals("never")) { 128 | enabled = false; 129 | } else { 130 | enabled = true; 131 | if (v.equals("wlan")) 132 | wlanOnly = true; 133 | else 134 | wlanOnly = false; 135 | if (v.equals("wlanor3g")) 136 | wlanOr3GOnly = true; 137 | else 138 | wlanOr3GOnly = false; 139 | } 140 | } 141 | 142 | public String toString() { 143 | return "CODEC{ " + CODEC_NUMBER + ": " + getTitle() + "}"; 144 | } 145 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/MainActivity.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.apache.log4j.BasicConfigurator; 5 | 6 | import com.flazr.rtmp.client.ClientOptions; 7 | 8 | import android.os.Bundle; 9 | import android.os.StrictMode; 10 | import android.app.Activity; 11 | import android.view.Menu; 12 | 13 | public class MainActivity extends Activity { 14 | 15 | private Object[] args={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 16 | @Override 17 | protected void onCreate(Bundle savedInstanceState) { 18 | super.onCreate(savedInstanceState); 19 | setContentView(R.layout.video_capture); 20 | BasicConfigurator.configure(); 21 | StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); 22 | StrictMode.setThreadPolicy(policy); 23 | ClientOptions options=new ClientOptions(); 24 | options.setHost("10.129.200.81"); 25 | options.setAppName("HariPanTest3"); 26 | options.setStreamName("Test1"); 27 | options.setArgs(args); 28 | options.publishLive(); 29 | 30 | VideoDialog mVideoDialog = new VideoDialog(this, "1", "1", "Test1", 1, options); 31 | mVideoDialog.show(); 32 | /* 33 | VideoCapture mVideoCapture = (VideoCapture) findViewById(R.id.video_capture); 34 | mVideoCapture.startCapture();*/ 35 | 36 | 37 | } 38 | 39 | @Override 40 | public boolean onCreateOptionsMenu(Menu menu) { 41 | // Inflate the menu; this adds items to the action bar if it is present. 42 | getMenuInflater().inflate(R.menu.activity_main, menu); 43 | return true; 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/MainRtmpConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.Channel; 4 | import org.jboss.netty.channel.ChannelHandlerContext; 5 | import org.jboss.netty.channel.ChannelPipeline; 6 | import org.jboss.netty.channel.ChannelPipelineFactory; 7 | import org.jboss.netty.channel.ChannelStateEvent; 8 | import org.jboss.netty.channel.Channels; 9 | import org.jboss.netty.channel.MessageEvent; 10 | import org.slf4j.Logger; 11 | import org.slf4j.LoggerFactory; 12 | 13 | import android.util.Log; 14 | 15 | import com.flazr.rtmp.RtmpDecoder; 16 | import com.flazr.rtmp.RtmpEncoder; 17 | import com.flazr.rtmp.RtmpMessage; 18 | import com.flazr.rtmp.client.ClientHandshakeHandler; 19 | import com.flazr.rtmp.client.ClientOptions; 20 | import com.flazr.rtmp.message.Command; 21 | import com.flazr.rtmp.message.Control; 22 | 23 | public class MainRtmpConnection extends RtmpConnection { 24 | 25 | private static final Logger log = LoggerFactory.getLogger(MainRtmpConnection.class); 26 | private boolean connected = false; 27 | public MainRtmpConnection(ClientOptions options, BigBlueButtonClient context) { 28 | super(options, context); 29 | // TODO Auto-generated constructor stub 30 | } 31 | 32 | @Override 33 | protected ChannelPipelineFactory pipelineFactory() { 34 | // TODO Auto-generated method stub 35 | return new ChannelPipelineFactory() { 36 | @Override 37 | public ChannelPipeline getPipeline() throws Exception { 38 | final ChannelPipeline pipeline = Channels.pipeline(); 39 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 40 | pipeline.addLast("decoder", new RtmpDecoder()); 41 | pipeline.addLast("encoder", new RtmpEncoder()); 42 | pipeline.addLast("handler", MainRtmpConnection.this); 43 | return pipeline; 44 | } 45 | }; 46 | } 47 | 48 | @Override 49 | public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) { 50 | 51 | // * https://github.com/bigbluebutton/bigbluebutton/blob/master/bigbluebutton-client/src/org/bigbluebutton/main/model/users/NetConnectionDelegate.as#L102 52 | // * _netConnection.connect(?); 53 | 54 | Log.e("channel connection","success"); 55 | writeCommandExpectingResult(e.getChannel(), Command.connect(options)); 56 | } 57 | 58 | @Override 59 | public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception { 60 | super.channelDisconnected(ctx, e); 61 | log.debug("Rtmp Channel Disconnected"); 62 | 63 | connected = false; 64 | } 65 | 66 | @Override 67 | public void messageReceived(ChannelHandlerContext ctx, MessageEvent me) { 68 | final Channel channel = me.getChannel(); 69 | final RtmpMessage message = (RtmpMessage) me.getMessage(); 70 | Log.e("","<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<,"); 71 | switch(message.getHeader().getMessageType()) { 72 | case CONTROL: 73 | Control control = (Control) message; 74 | switch(control.getType()) { 75 | case PING_REQUEST: 76 | final int time = control.getTime(); 77 | Control pong = Control.pingResponse(time); 78 | channel.write(pong); 79 | break; 80 | } 81 | break; 82 | 83 | case COMMAND_AMF0: 84 | case COMMAND_AMF3: 85 | Command command = (Command) message; 86 | String name = command.getName(); 87 | log.debug("server command: {}", name); 88 | break; 89 | 90 | case SHARED_OBJECT_AMF0: 91 | case SHARED_OBJECT_AMF3: 92 | //onSharedObject(channel, (SharedObjectMessage) message); 93 | Log.d("object", "shared"); 94 | break; 95 | default: 96 | log.info("ignoring rtmp message: {}", message); 97 | break; 98 | } 99 | } 100 | 101 | public boolean isConnected() { 102 | return connected; 103 | } 104 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/NativeLibsLoader.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | public class NativeLibsLoader { 7 | private static final Logger log = LoggerFactory.getLogger(NativeLibsLoader.class); 8 | private static boolean captureLibsLoaded = false; 9 | private static boolean capturePlaybackLoaded = false; 10 | 11 | public static void loadCaptureLibs(String packageName) throws SecurityException { 12 | if (captureLibsLoaded) 13 | return; 14 | 15 | String path = "/data/data/" + packageName + "/lib/"; 16 | System.load(path + "libavutil.so"); 17 | System.load(path + "libswscale.so"); 18 | System.load(path + "libavcodec.so"); 19 | System.load(path + "libavformat.so"); 20 | System.load(path + "libthread.so"); 21 | System.load(path + "libcommon.so"); 22 | System.load(path + "libqueue.so"); 23 | System.load(path + "libencode.so"); 24 | System.load(path + "libmconfnativeencodevideo.so"); 25 | 26 | log.debug("Native capture libraries loaded"); 27 | captureLibsLoaded = true; 28 | } 29 | 30 | public static void loadPlaybackLibs(String packageName) throws SecurityException { 31 | if (capturePlaybackLoaded) 32 | return; 33 | 34 | String path = "/data/data/" + packageName + "/lib/"; 35 | System.load(path + "libavutil.so"); 36 | System.load(path + "libswscale.so"); 37 | System.load(path + "libavcodec.so"); 38 | System.load(path + "libthread.so"); 39 | System.load(path + "libcommon.so"); 40 | System.load(path + "libqueue.so"); 41 | System.load(path + "libdecode.so"); 42 | System.load(path + "libspeex_jni.so"); 43 | System.load(path + "libmconfnativeshowvideo.so"); 44 | 45 | log.debug("Native playback libraries loaded"); 46 | capturePlaybackLoaded = true; 47 | } 48 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/RtmpAudioPlayer.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.media.AudioFormat; 7 | import android.media.AudioManager; 8 | import android.media.AudioTrack; 9 | 10 | import com.flazr.rtmp.message.Audio; 11 | 12 | public class RtmpAudioPlayer { 13 | private static final Logger log = LoggerFactory.getLogger(RtmpAudioPlayer.class); 14 | /** Size of the read buffer */ 15 | public static final int BUFFER_SIZE = 1024; 16 | 17 | private Codec codec = new Speex(); 18 | private AudioTrack audioTrack; 19 | private int mu, maxjitter; 20 | private boolean running = false; 21 | private short[] decodedBuffer = new short[BUFFER_SIZE]; 22 | private byte[] pktBuffer = new byte[BUFFER_SIZE + 12]; 23 | // private RtpPacket pkt = new RtpPacket(pktBuffer, 0); 24 | 25 | public void start() { 26 | codec.init(); 27 | mu = codec.samp_rate()/8000; 28 | maxjitter = AudioTrack.getMinBufferSize(codec.samp_rate(), 29 | AudioFormat.CHANNEL_CONFIGURATION_MONO, 30 | AudioFormat.ENCODING_PCM_16BIT); 31 | if (maxjitter < 2*2*BUFFER_SIZE*3*mu) 32 | maxjitter = 2*2*BUFFER_SIZE*3*mu; 33 | audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, codec.samp_rate(), AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, 34 | maxjitter, AudioTrack.MODE_STREAM); 35 | // AudioManager am = (AudioManager) Receiver.mContext.getSystemService(Context.AUDIO_SERVICE); 36 | // if (Integer.parseInt(Build.VERSION.SDK) >= 5) 37 | // am.setSpeakerphoneOn(true); 38 | // else 39 | // am.setMode(AudioManager.MODE_NORMAL); 40 | //audioTrack.play(); 41 | 42 | running = true; 43 | } 44 | 45 | private void write(short a[],int b,int c) { 46 | synchronized (this) { 47 | audioTrack.write(a,b,c); 48 | } 49 | } 50 | 51 | public void stop() { 52 | log.debug("rtmp player stop."); 53 | 54 | running = false; 55 | codec.close(); 56 | 57 | if(audioTrack != null) { 58 | log.debug("Releasing audio track resources"); 59 | 60 | if(audioTrack.getState() == AudioTrack.STATE_INITIALIZED) 61 | audioTrack.stop(); 62 | 63 | audioTrack.release(); 64 | audioTrack = null; 65 | } 66 | } 67 | 68 | public void onAudio(Audio audio) { 69 | if (running) { 70 | byte[] audioData = audio.getByteArray(); 71 | 72 | int offset = 1; 73 | 74 | // byte[] tmpBuffer = new byte[audioData.length - offset]; 75 | // System.arraycopy(audioData, offset, tmpBuffer, 0, tmpBuffer.length); 76 | // pkt.setPayload(tmpBuffer, tmpBuffer.length); 77 | // int decodedSize = codec.decode(pktBuffer, decodedBuffer, pkt.getPayloadLength()); 78 | 79 | System.arraycopy(audioData, offset, pktBuffer, 12, audioData.length - offset); 80 | int decodedSize = codec.decode(pktBuffer, decodedBuffer, audioData.length - offset); 81 | 82 | write(decodedBuffer, 0, decodedSize); 83 | 84 | audioTrack.play(); 85 | } 86 | } 87 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/RtmpConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.net.InetSocketAddress; 4 | import java.util.Map; 5 | import java.util.concurrent.CountDownLatch; 6 | import java.util.concurrent.Executors; 7 | 8 | import org.jboss.netty.bootstrap.ClientBootstrap; 9 | import org.jboss.netty.channel.Channel; 10 | import org.jboss.netty.channel.ChannelFactory; 11 | import org.jboss.netty.channel.ChannelFuture; 12 | import org.jboss.netty.channel.ChannelFutureListener; 13 | import org.jboss.netty.channel.ChannelHandlerContext; 14 | import org.jboss.netty.channel.ChannelPipelineFactory; 15 | import org.jboss.netty.channel.ExceptionEvent; 16 | import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import android.util.Log; 21 | 22 | import com.flazr.rtmp.client.ClientHandler; 23 | import com.flazr.rtmp.client.ClientOptions; 24 | import com.flazr.rtmp.message.ChunkSize; 25 | import com.flazr.rtmp.message.Command; 26 | import com.flazr.rtmp.message.CommandAmf0; 27 | import com.flazr.rtmp.message.Control; 28 | 29 | public abstract class RtmpConnection extends ClientHandler implements ChannelFutureListener { 30 | 31 | private static final Logger log = LoggerFactory.getLogger(RtmpConnection.class); 32 | 33 | final protected BigBlueButtonClient context; 34 | 35 | public RtmpConnection(ClientOptions options, BigBlueButtonClient context) { 36 | super(options); 37 | Log.e("",options.toString()); 38 | this.context = context; 39 | // TODO Auto-generated constructor stub 40 | } 41 | 42 | private ClientBootstrap bootstrap = null; 43 | private ChannelFuture future = null; 44 | private ChannelFactory factory = null; 45 | 46 | @Override 47 | public void operationComplete(ChannelFuture arg0) throws Exception { 48 | // TODO Auto-generated method stub 49 | 50 | if (future.isSuccess()){ 51 | Log.e("", "jjjjjjjjjjjjjjjjjjjj"); 52 | 53 | onConnectedSuccessfully(); 54 | } 55 | else{ 56 | Log.e("", "wwwweeeeeeeeeeeeeee"); 57 | onConnectedUnsuccessfully(); 58 | } 59 | } 60 | 61 | private void onConnectedUnsuccessfully() { 62 | // TODO Auto-generated method stub 63 | 64 | } 65 | 66 | protected void onConnectedSuccessfully() { 67 | // TODO Auto-generated method stub 68 | 69 | } 70 | 71 | public boolean connect() { 72 | if(factory == null) 73 | factory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); 74 | bootstrap = new ClientBootstrap(factory); 75 | bootstrap.setPipelineFactory(pipelineFactory()); 76 | future = bootstrap.connect(new InetSocketAddress(options.getHost(),options.getPort())); 77 | future.addListener(this); 78 | 79 | //Log.e("a>>>>>>>>>>>>>>>",future.getChannel().getRemoteAddress().toString()); 80 | return true; 81 | } 82 | 83 | abstract protected ChannelPipelineFactory pipelineFactory(); 84 | 85 | public void disconnect() { 86 | if (future != null) { 87 | if (future.getChannel().isConnected()) { 88 | log.debug("Channel is connected, disconnecting"); 89 | //future.getChannel().close(); //ClosedChannelException 90 | future.getChannel().disconnect(); 91 | future.getChannel().getCloseFuture().awaitUninterruptibly(); 92 | } 93 | future.removeListener(this); 94 | factory.releaseExternalResources(); 95 | future = null; factory = null; bootstrap = null; 96 | } 97 | } 98 | @Override 99 | public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { 100 | String exceptionMessage = e.getCause().getMessage(); 101 | if (exceptionMessage != null && exceptionMessage.contains("ArrayIndexOutOfBoundsException") && exceptionMessage.contains("bad value / byte: 101 (hex: 65)")) { 102 | Log.e("","wwwwwwwwwwwwwww"); 103 | log.debug("Ignoring malformed metadata"); 104 | return; 105 | } else { 106 | Log.e("",""+exceptionMessage); 107 | super.exceptionCaught(ctx, e); 108 | } 109 | } 110 | public void doGetMyUserId(Channel channel) { 111 | Command command = new CommandAmf0("updateStreamn", null, "Test1","AV"); 112 | Log.e("","updateStreamn"); 113 | writeCommandExpectingResult(channel, command); 114 | } 115 | 116 | @Override 117 | protected void onCommandStatus(Channel channel, Command command, 118 | Map args) { 119 | final String code = (String) args.get("code"); 120 | final String level = (String) args.get("level"); 121 | final String description = (String) args.get("description"); 122 | final String application = (String) args.get("application"); 123 | final String messageStr = level + " onStatus message, code: " + code + ", description: " + description + ", application: " + application; 124 | 125 | // http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/events/NetStatusEvent.html 126 | if (level.equals("status")) { 127 | logger.info(messageStr); 128 | if (code.equals("NetStream.Publish.Start") 129 | && publisher != null && !publisher.isStarted()) { 130 | logger.debug("starting the publisher after NetStream.Publish.Start"); 131 | publisher.start(channel, options.getStart(), options.getLength(), new ChunkSize(4096)); 132 | if(publisher.isStarted()) 133 | {doGetMyUserId(channel);} 134 | } else if (code.equals("NetStream.Unpublish.Success") 135 | && publisher != null) { 136 | logger.info("unpublish success, closing channel"); 137 | ChannelFuture future = channel.write(Command.closeStream(streamId)); 138 | future.addListener(ChannelFutureListener.CLOSE); 139 | } else if (code.equals("NetStream.Play.Stop")) { 140 | channel.close(); 141 | } 142 | } else if (level.equals("warning")) { 143 | logger.warn(messageStr); 144 | if (code.equals("NetStream.Play.InsufficientBW")) { 145 | ChannelFuture future = channel.write(Command.closeStream(streamId)); 146 | future.addListener(ChannelFutureListener.CLOSE); 147 | // \TODO create a listener for insufficient bandwidth 148 | } 149 | } else if (level.equals("error")) { 150 | logger.error(messageStr); 151 | channel.close(); 152 | } 153 | } 154 | 155 | @Override 156 | protected void onControl(Channel channel, Control control) { 157 | if (control.getType() != Control.Type.PING_REQUEST) 158 | logger.debug("control: {}", control); 159 | switch(control.getType()) { 160 | case PING_REQUEST: 161 | final int time = control.getTime(); 162 | Control pong = Control.pingResponse(time); 163 | // we don't want to print two boring messages every second 164 | // logger.debug("server ping: {}", time); 165 | // logger.debug("sending ping response: {}", pong); 166 | if (channel.isWritable()) 167 | channel.write(pong); 168 | break; 169 | case SWFV_REQUEST: 170 | if(swfvBytes == null) { 171 | logger.warn("swf verification not initialized!" 172 | + " not sending response, server likely to stop responding / disconnect"); 173 | } else { 174 | Control swfv = Control.swfvResponse(swfvBytes); 175 | logger.info("sending swf verification response: {}", swfv); 176 | channel.write(swfv); 177 | } 178 | break; 179 | case STREAM_BEGIN: 180 | if(publisher != null && !publisher.isStarted()) { 181 | publisher.start(channel, options.getStart(), 182 | options.getLength(), new ChunkSize(4096)); 183 | return; 184 | } 185 | //if(streamId !=0) { 186 | //channel.write(Control.setBuffer(streamId, options.getBuffer())); 187 | channel.write(Control.setBuffer(1, options.getBuffer())); 188 | 189 | //} 190 | break; 191 | default: 192 | logger.debug("ignoring control message: {}", control); 193 | } 194 | } 195 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/Speex.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import android.util.Log; 4 | 5 | 6 | public class Speex extends CodecBase implements Codec { 7 | 8 | /* quality 9 | * 1 : 4kbps (very noticeable artifacts, usually intelligible) 10 | * 2 : 6kbps (very noticeable artifacts, good intelligibility) 11 | * 4 : 8kbps (noticeable artifacts sometimes) 12 | * 6 : 11kpbs (artifacts usually only noticeable with headphones) 13 | * 8 : 15kbps (artifacts not usually noticeable) 14 | */ 15 | private static final int DEFAULT_COMPRESSION = 6; 16 | 17 | public Speex() { 18 | CODEC_NAME = "speex"; 19 | CODEC_USER_NAME = "speex"; 20 | CODEC_DESCRIPTION = "20.6kbit"; 21 | CODEC_NUMBER = 97; 22 | CODEC_DEFAULT_SETTING = "always"; 23 | super.update(); 24 | } 25 | 26 | void load() { 27 | try { 28 | System.loadLibrary("speex_jni"); 29 | super.load(); 30 | } catch (Throwable e) { 31 | Log.e("","nolib"); 32 | } 33 | 34 | } 35 | 36 | public native int open(int compression); 37 | public native int decode(byte encoded[], short lin[], int size); 38 | public native int encode(short lin[], int offset, byte encoded[], int size); 39 | public native void close(); 40 | 41 | public void init() { 42 | load(); 43 | if (isLoaded()) 44 | open(DEFAULT_COMPRESSION); 45 | } 46 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCapture.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.io.IOException; 4 | import java.lang.reflect.Method; 5 | import java.util.List; 6 | 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import com.flazr.rtmp.client.ClientOptions; 11 | 12 | import android.content.Context; 13 | import android.content.SharedPreferences; 14 | import android.graphics.ImageFormat; 15 | import android.graphics.PixelFormat; 16 | import android.hardware.Camera; 17 | import android.hardware.Camera.CameraInfo; 18 | import android.os.Build; 19 | import android.preference.PreferenceManager; 20 | import android.util.AttributeSet; 21 | import android.util.Log; 22 | import android.view.SurfaceHolder; 23 | import android.view.SurfaceView; 24 | import android.view.ViewGroup.LayoutParams; 25 | 26 | public class VideoCapture extends SurfaceView implements SurfaceHolder.Callback,Camera.PreviewCallback { 27 | 28 | private static final Logger log = LoggerFactory.getLogger(VideoCapture.class); 29 | 30 | private Context context; 31 | 32 | private SurfaceHolder mHolder; 33 | 34 | private ClientOptions options; 35 | 36 | private VideoPublish mVideoPublish = null; 37 | 38 | private Method mAcb; // method for adding a pre-allocated buffer 39 | private Object[] mArglist; // list of arguments 40 | 41 | private boolean isSurfaceCreated = false; // true when: surface is created AND mVideoPublish is correctly set 42 | // false when: surface is destroyed 43 | 44 | private boolean fakeDestroyed = false; // When there are 2 active preview surfaces on the same activity 45 | // (for example: the normal surface and a dialog surface) 46 | // we need to use this boolean to manage the conflict 47 | // true when: the preview is being shown on a Dialog 48 | // false otherwise 49 | 50 | private boolean usingFaster, usingHidden; 51 | public VideoCapture(Context context, AttributeSet attrs) { 52 | super(context, attrs); 53 | 54 | this.context = context; 55 | 56 | // Install a SurfaceHolder.Callback so we get notified when the 57 | // underlying surface is created and destroyed. 58 | mHolder = getHolder(); 59 | mHolder.addCallback(this); 60 | mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 61 | } 62 | 63 | private int getPublisher(){ 64 | mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).getVideoPublish(); 65 | if(mVideoPublish == null){ 66 | log.debug("Error: could not get or instantiate a VideoPublisher"); 67 | return CaptureConstants.E_COULD_NOT_GET_PUBLISHER; 68 | } 69 | return CaptureConstants.E_OK; 70 | } 71 | 72 | public boolean isCapturing() { // returns true if the capture is running or is paused 73 | // returns false if the capture is stopped or is in an error state 74 | if(mVideoPublish != null && 75 | (mVideoPublish.state == CaptureConstants.RESUMED || 76 | mVideoPublish.state == CaptureConstants.PAUSED)){ 77 | return true; 78 | } 79 | return false; 80 | } 81 | 82 | // Centers the preview on the screen keeping the capture aspect ratio. 83 | // Remember to call this function after you change the width or height if 84 | // you want to keep the aspect and the video centered 85 | // This function is useful for displaying the preview centered on fullscreen on an Activity 86 | // or centered on a Dialog, for example. If that is not the case, then it may be better to use 87 | // the VideoCaptureLayout class to handle the video preview position instead 88 | public int centerPreview() { 89 | if(mVideoPublish != null){ 90 | VideoCentering mVideoCentering = new VideoCentering(); 91 | mVideoCentering.setAspectRatio(mVideoPublish.getWidth()/(float)mVideoPublish.getHeight()); 92 | LayoutParams layoutParams = mVideoCentering.getVideoLayoutParams(mVideoCentering.getDisplayMetrics(this.getContext(),40), this.getLayoutParams()); 93 | setLayoutParams(layoutParams); 94 | return CaptureConstants.E_OK; 95 | } else { 96 | log.debug("Error: could not center screen"); 97 | return CaptureConstants.E_COULD_NOT_CENTER; 98 | } 99 | } 100 | 101 | // This function is useful for hidden the preview that was being shown on fullscreen on an Activity 102 | // or centered on a Dialog, for example. If that is not the case, then it may be better to use 103 | // the VideoCaptureLayout class to handle the video preview hidding instead 104 | public void hidePreview() { 105 | VideoCentering mVideoCentering = new VideoCentering(); 106 | LayoutParams layoutParams = mVideoCentering.hidePreview(this.getLayoutParams()); 107 | setLayoutParams(layoutParams); 108 | } 109 | 110 | private boolean isAvailableSprintFFC() 111 | { 112 | try { 113 | Class.forName("android.hardware.HtcFrontFacingCamera"); 114 | return true; 115 | } catch (Exception ex) { 116 | return false; 117 | } 118 | } 119 | 120 | private int openCameraNormalWay(){ 121 | if(mVideoPublish.mCamera != null){ 122 | mVideoPublish.mCamera.release(); 123 | mVideoPublish.mCamera = null; 124 | } 125 | 126 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { 127 | int firstFrontCamera = -1; 128 | int firstBackCamera = -1; 129 | for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { 130 | CameraInfo cameraInfo = new CameraInfo(); 131 | Camera.getCameraInfo(i, cameraInfo); 132 | switch (cameraInfo.facing) { 133 | case CameraInfo.CAMERA_FACING_FRONT: 134 | if (firstFrontCamera == -1) firstFrontCamera = i; break; 135 | case CameraInfo.CAMERA_FACING_BACK: 136 | if (firstBackCamera == -1) firstBackCamera = i; break; 137 | } 138 | } 139 | 140 | if (firstFrontCamera != -1) { 141 | mVideoPublish.cameraId = firstFrontCamera; 142 | } else if (firstBackCamera != -1) { 143 | mVideoPublish.cameraId = firstBackCamera; 144 | } else { 145 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 146 | } 147 | mVideoPublish.mCamera = Camera.open(mVideoPublish.cameraId); 148 | if (mVideoPublish.mCamera == null) 149 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 150 | } else { 151 | mVideoPublish.mCamera = Camera.open(); 152 | if (mVideoPublish.mCamera == null) 153 | return CaptureConstants.E_COULD_NOT_OPEN_CAMERA; 154 | 155 | Camera.Parameters parameters = mVideoPublish.mCamera.getParameters(); 156 | parameters.set("camera-id", 2); // this command sets the front facing camera to be used 157 | // (if the device has one). Else, it sets the default camera. 158 | mVideoPublish.mCamera.setParameters(parameters); 159 | } 160 | return CaptureConstants.E_OK; 161 | } 162 | 163 | private int openCamera(){ 164 | int err = CaptureConstants.E_OK; 165 | 166 | if (isAvailableSprintFFC()) { // this device has the specific HTC camera 167 | try { // try opening the specific HTC camera 168 | Method method = Class.forName("android.hardware.HtcFrontFacingCamera").getDeclaredMethod("getCamera", (Class[])null); 169 | mVideoPublish.mCamera = (Camera) method.invoke((Object)null, (Object)null); 170 | } catch (Exception ex) { // it was not possible to open the specifica HTC camera, 171 | // so, lets open the camera using the normal way 172 | log.debug(ex.toString()); 173 | 174 | err = openCameraNormalWay(); 175 | } 176 | } else { // this device does not have the specific HTC camera, 177 | // so, lets open the camera using the normal way 178 | err = openCameraNormalWay(); 179 | } 180 | 181 | return err; 182 | } 183 | 184 | private int setDisplay(){ 185 | if(mVideoPublish.mCamera != null){ 186 | try { 187 | mVideoPublish.mCamera.setPreviewDisplay(mHolder); 188 | } catch (IOException exception) { 189 | log.debug("Error: could not set preview display"); 190 | log.debug(exception.toString()); 191 | 192 | mVideoPublish.mCamera.release(); 193 | mVideoPublish.mCamera = null; 194 | 195 | return CaptureConstants.E_COULD_NOT_SET_PREVIEW_DISPLAY_R1; 196 | } 197 | } else { 198 | log.debug("Error: setDisplay() called without an opened camera"); 199 | return CaptureConstants.E_COULD_NOT_SET_PREVIEW_DISPLAY_R2; 200 | } 201 | 202 | return CaptureConstants.E_OK; 203 | } 204 | 205 | private int setParameters(){ 206 | if(mVideoPublish.mCamera != null){ 207 | Camera.Parameters parameters = mVideoPublish.mCamera.getParameters(); 208 | 209 | if (!parameters.getSupportedPreviewSizes().isEmpty()) { 210 | parameters.setPreviewSize( 211 | parameters.getSupportedPreviewSizes().get(0).width, 212 | parameters.getSupportedPreviewSizes().get(0).height); 213 | } 214 | if (Build.VERSION.SDK_INT > Build.VERSION_CODES.GINGERBREAD) { 215 | List fpsRange = parameters.getSupportedPreviewFpsRange(); 216 | if (fpsRange != null && !fpsRange.isEmpty()) 217 | parameters.setPreviewFpsRange( 218 | fpsRange.get(0)[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 219 | fpsRange.get(0)[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 220 | // parameters.set("orientation", "portrait"); 221 | } else { 222 | List fps = parameters.getSupportedPreviewFrameRates(); 223 | if (fps != null && !fps.isEmpty()) 224 | parameters.setPreviewFrameRate(fps.get(0)); 225 | } 226 | parameters.setPreviewFormat(ImageFormat.NV21); 227 | 228 | mVideoPublish.mCamera.setParameters(parameters); 229 | 230 | SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getContext()); 231 | int rotation = Integer.parseInt(prefs.getString("preview_rotation", "0")); 232 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) 233 | mVideoPublish.mCamera.setDisplayOrientation(rotation); 234 | else { 235 | // doesn't apply any rotation 236 | // \TODO apply the preferred rotation 237 | // parameters.setRotation(rotation); 238 | // parameters.set("rotation", rotation); 239 | } 240 | 241 | // setCameraDisplayOrientation((Activity) context, mVideoPublish.cameraId, mVideoPublish.mCamera); 242 | 243 | parameters = mVideoPublish.mCamera.getParameters(); 244 | 245 | mVideoPublish.setFramerate(parameters.getPreviewFrameRate()); 246 | mVideoPublish.setHeight(parameters.getPreviewSize().height); 247 | mVideoPublish.setWidth(parameters.getPreviewSize().width); 248 | 249 | log.debug("Using capture parameters: " + mVideoPublish.getWidth() + "x" + mVideoPublish.getHeight() + ", {} fps", mVideoPublish.getFramerate()); 250 | 251 | return CaptureConstants.E_OK; 252 | } else { 253 | log.debug("Error: setParameters() called without an opened camera"); 254 | return CaptureConstants.E_COULD_NOT_SET_PARAMETERS; 255 | } 256 | } 257 | 258 | private int getBufferSize(){ 259 | if(mVideoPublish.mCamera != null){ 260 | PixelFormat pixelFormat = new PixelFormat(); 261 | Camera.Parameters param = mVideoPublish.mCamera.getParameters(); 262 | PixelFormat.getPixelFormatInfo(param.getPreviewFormat(), pixelFormat); 263 | int buffersize = (param.getPreviewSize().width * param.getPreviewSize().height * pixelFormat.bitsPerPixel) / 8; 264 | return buffersize; 265 | } else { 266 | log.debug("Error: getBufferSize() called without an opened camera"); 267 | return CaptureConstants.E_COULD_NOT_GET_BUFSIZE; 268 | } 269 | } 270 | 271 | private void setCallbackBest(){ 272 | usingFaster = true; 273 | usingHidden = false; 274 | 275 | //we call addCallbackBuffer twice to reduce the "Out of buffers, clearing callback!" problem 276 | byte[] buffer = new byte[mVideoPublish.bufSize]; 277 | mVideoPublish.mCamera.addCallbackBuffer(buffer); 278 | buffer = new byte[mVideoPublish.bufSize]; 279 | mVideoPublish.mCamera.addCallbackBuffer(buffer); 280 | 281 | mVideoPublish.mCamera.setPreviewCallbackWithBuffer(this); 282 | 283 | log.debug("Using fast preview callback"); 284 | } 285 | 286 | private int setCallbackHidden(){ 287 | int err; 288 | 289 | usingFaster = true; 290 | usingHidden = true; 291 | 292 | //Must call this before calling addCallbackBuffer to get all the 293 | // reflection variables setup 294 | err = initForACB(); 295 | if(err != CaptureConstants.E_OK){ 296 | return err; 297 | } 298 | 299 | //we call addCallbackBuffer twice to reduce the "Out of buffers, clearing callback!" problem 300 | byte[] buffer = new byte[mVideoPublish.bufSize]; 301 | err = addCallbackBuffer_Android2p2(buffer); 302 | if(err != CaptureConstants.E_OK){ 303 | return err; 304 | } 305 | buffer = new byte[mVideoPublish.bufSize]; 306 | err = addCallbackBuffer_Android2p2(buffer); 307 | if(err != CaptureConstants.E_OK){ 308 | return err; 309 | } 310 | 311 | err = setPreviewCallbackWithBuffer_Android2p2(false); 312 | if(err != CaptureConstants.E_OK){ 313 | return err; 314 | } 315 | 316 | log.debug("Using fast but hidden preview callback"); 317 | return CaptureConstants.E_OK; 318 | } 319 | 320 | private void setCallbackSlow(){ 321 | usingFaster = false; 322 | usingHidden = false; 323 | 324 | mVideoPublish.mCamera.setPreviewCallback(this); 325 | 326 | log.debug("Using slow preview callback"); 327 | } 328 | 329 | private int prepareCallback(){ 330 | if(mVideoPublish.mCamera == null){ 331 | log.debug("Error: prepareCallback() called without an opened camera"); 332 | return CaptureConstants.E_COULD_NOT_PREPARE_CALLBACK_R1; 333 | } 334 | if(mVideoPublish.bufSize < CaptureConstants.E_OK || mVideoPublish.bufSize <= 0){ 335 | log.debug("Error: prepareCallback() called without a valid mVideoPublish.bufSize"); 336 | return CaptureConstants.E_COULD_NOT_PREPARE_CALLBACK_R2; 337 | } 338 | //java reflection (idea from http://code.google.com/p/android/issues/detail?id=2794): 339 | //This kind of java reflection is safe to be used as explained in the official android documentation 340 | //on (http://developer.android.com/resources/articles/backward-compatibility.html). 341 | //Explanation: The method setPreviewCallback exists since Android's API level 1. 342 | //An alternative method is the setPreviewCallbackWithBuffer, which exists since API level 8 (Android 2.2). 343 | //The setPreviewCallbackWithBuffer method is much better than the setPreviewCallback method 344 | //in terms of performance, because the onPreviewFrame method returns a copy of the frame 345 | //in a newly allocated memory when using the setPreviewCallback method, causing the 346 | //Garbage Collector to perform, which takes about 80-100ms. 347 | //Instead, when the setPreviewCallbackWithBuffer is used, the byte array is overwritten, 348 | //avoiding the GC to perform. 349 | //In mconf we want compatibility with API levels lower than 8. 350 | //The setPreviewCallbackWithBuffer method is implemented on a Debug class on API levels lower than 8. 351 | //In order to use it on API levels lower than 8, we need to use Java Reflection. 352 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) { //if(2.2 or higher){ 353 | setCallbackBest(); 354 | } else if(HiddenCallbackWithBuffer()) { //} else if(has the methods hidden){ 355 | if(setCallbackHidden() != CaptureConstants.E_OK){ 356 | setCallbackSlow(); 357 | } 358 | } else { 359 | setCallbackSlow(); 360 | } 361 | 362 | return CaptureConstants.E_OK; 363 | } 364 | 365 | private int beginPreview(){ 366 | if(mVideoPublish.mCamera != null){ 367 | log.debug("Preview starting"); 368 | mVideoPublish.mCamera.startPreview(); 369 | log.debug("Preview started"); 370 | return CaptureConstants.E_OK; 371 | } else { 372 | log.debug("Error: beginPreview() called without an opened camera"); 373 | return CaptureConstants.E_COULD_NOT_BEGIN_PREVIEW; 374 | } 375 | } 376 | 377 | private int initNativeSide(){ 378 | if(mVideoPublish.bufSize < CaptureConstants.E_OK || mVideoPublish.bufSize <= 0){ 379 | log.debug("Error: initNativeSide() called without a valid mVideoPublish.bufSize"); 380 | return CaptureConstants.E_COULD_NOT_INIT_NATIVE_SIDE; 381 | } 382 | mVideoPublish.initNativeEncoder(); 383 | return CaptureConstants.E_OK; 384 | } 385 | 386 | private int startPublisherThread(){ 387 | if(!mVideoPublish.nativeEncoderInitialized){ 388 | log.debug("Error: startPublisherThread() called but native capture side not initialized"); 389 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_THREAD_R1; 390 | } 391 | if(mVideoPublish.isAlive()){ 392 | log.debug("Error: startPublisherThread() called but publisher thread already running"); 393 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_THREAD_R2; 394 | } 395 | mVideoPublish.start(); 396 | return CaptureConstants.E_OK; 397 | } 398 | 399 | private int startPublisher(ClientOptions opt){ 400 | this.options=opt; 401 | if(!mVideoPublish.nativeEncoderInitialized){ 402 | log.debug("Error: startPublisher() called but native capture side not initialized"); 403 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_R1; 404 | } 405 | if(!mVideoPublish.isAlive()){ 406 | log.debug("Error: startPublisher() called but publisher thread not running"); 407 | return CaptureConstants.E_COULD_NOT_START_PUBLISHER_R2; 408 | } 409 | mVideoPublish.startPublisher(opt); 410 | return CaptureConstants.E_OK; 411 | } 412 | 413 | private void clearCallbackBest(){ 414 | mVideoPublish.mCamera.setPreviewCallbackWithBuffer(null); 415 | } 416 | 417 | private int clearCallbackHidden(){ 418 | int err; 419 | 420 | err = setPreviewCallbackWithBuffer_Android2p2(true); 421 | if(err != CaptureConstants.E_OK){ 422 | return err; 423 | } 424 | 425 | return CaptureConstants.E_OK; 426 | } 427 | 428 | private void clearCallbackSlow(){ 429 | mVideoPublish.mCamera.setPreviewCallback(null); 430 | } 431 | 432 | private void resetBuffersAndCallbacks(){ 433 | if(usingHidden){ 434 | clearCallbackHidden(); 435 | } else if(usingFaster){ 436 | clearCallbackBest(); 437 | } else { 438 | clearCallbackSlow(); 439 | } 440 | } 441 | 442 | public int startCapture(ClientOptions opt) { 443 | NativeLibsLoader.loadCaptureLibs(context.getPackageName()); 444 | 445 | int err = CaptureConstants.E_OK; 446 | if(mVideoPublish == null){ 447 | err = getPublisher(); 448 | if(err != CaptureConstants.E_OK){ 449 | mVideoPublish.state = CaptureConstants.ERROR; 450 | return err; 451 | } 452 | } 453 | 454 | mVideoPublish.state = CaptureConstants.RESUMED; 455 | 456 | mVideoPublish.restartWhenResume = false; 457 | 458 | // acquires the camera 459 | err = openCamera(); 460 | if(err != CaptureConstants.E_OK){ 461 | mVideoPublish.state = CaptureConstants.ERROR; 462 | return err; 463 | }; 464 | 465 | // sets up the camera parameters 466 | err = setParameters(); 467 | if(err != CaptureConstants.E_OK){ 468 | mVideoPublish.state = CaptureConstants.ERROR; 469 | return err; 470 | } 471 | 472 | // gets the size of a non encoded frame 473 | mVideoPublish.bufSize = getBufferSize(); 474 | if(mVideoPublish.bufSize < 0){ 475 | mVideoPublish.state = CaptureConstants.ERROR; 476 | return mVideoPublish.bufSize; 477 | } 478 | 479 | // creates the shared buffer, inits the native side and sets the streamId 480 | err = initNativeSide(); 481 | if(err != CaptureConstants.E_OK){ 482 | mVideoPublish.state = CaptureConstants.ERROR; 483 | return err; 484 | } 485 | 486 | // start the publisher native thread 487 | err = startPublisherThread(); 488 | if(err != CaptureConstants.E_OK){ 489 | mVideoPublish.state = CaptureConstants.ERROR; 490 | return err; 491 | } 492 | 493 | // start the publisher handler 494 | err = startPublisher(opt); 495 | if(err != CaptureConstants.E_OK){ 496 | mVideoPublish.state = CaptureConstants.ERROR; 497 | return err; 498 | } 499 | 500 | err = resumeCapture(); 501 | if(err != CaptureConstants.E_OK){ 502 | mVideoPublish.state = CaptureConstants.ERROR; 503 | return err; 504 | } 505 | 506 | 507 | return err; 508 | } 509 | 510 | public int resumeCapture(){ 511 | int err = CaptureConstants.E_OK; 512 | Log.e("","resumeCapture"); 513 | if(!isSurfaceCreated || mVideoPublish == null){ 514 | err = CaptureConstants.E_COULD_NOT_RESUME_CAPTURE; 515 | mVideoPublish.state = CaptureConstants.ERROR; 516 | return err; 517 | } 518 | 519 | mVideoPublish.state = CaptureConstants.RESUMED; 520 | 521 | mVideoPublish.lastSurfaceDestroyed = false; // set to false because the 2 surfaces conflict has ended 522 | mVideoPublish.nextSurfaceCreated = false; // set to false because the 2 surfaces conflict has ended 523 | 524 | // tells it where to draw (sets display for preview) 525 | err = setDisplay(); 526 | if(err != CaptureConstants.E_OK){ 527 | mVideoPublish.state = CaptureConstants.ERROR; 528 | return err; 529 | } 530 | 531 | // prepares the callback 532 | err = prepareCallback(); 533 | if(err != CaptureConstants.E_OK){ 534 | mVideoPublish.state = CaptureConstants.ERROR; 535 | return err; 536 | } 537 | 538 | // begins the preview. 539 | err = beginPreview(); 540 | if(err != CaptureConstants.E_OK){ 541 | mVideoPublish.state = CaptureConstants.ERROR; 542 | return err; 543 | } 544 | 545 | return err; 546 | } 547 | 548 | public void stopCapture(){ 549 | if(isCapturing()){ 550 | NativeLibsLoader.loadCaptureLibs(context.getPackageName()); 551 | 552 | pauseCapture(); 553 | mVideoPublish.state = CaptureConstants.STOPPED; 554 | 555 | // Because the CameraDevice object is not a shared resource, it's very 556 | // important to release it when it may not be used anymore 557 | if(mVideoPublish.mCamera != null){ 558 | mVideoPublish.mCamera.release(); 559 | mVideoPublish.mCamera = null; 560 | } 561 | 562 | mVideoPublish.endNativeEncoder(); 563 | mVideoPublish.stopPublisher(); 564 | 565 | mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).deleteVideoPublish(); 566 | 567 | } 568 | } 569 | 570 | private void pauseCapture(){ 571 | if(mVideoPublish != null && mVideoPublish.mCamera != null && 572 | !(mVideoPublish.state == CaptureConstants.PAUSED)){ 573 | mVideoPublish.state = CaptureConstants.PAUSED; 574 | 575 | mVideoPublish.mCamera.stopPreview(); 576 | 577 | resetBuffersAndCallbacks(); 578 | 579 | try { 580 | mVideoPublish.mCamera.setPreviewDisplay(null); 581 | } catch (IOException e) { 582 | log.debug("Warning: error when trying to remove the preview display"); 583 | e.printStackTrace(); 584 | } 585 | } 586 | } 587 | 588 | // Checks if addCallbackBuffer and setPreviewCallbackWithBuffer are written but hidden. 589 | // This method will look for all methods of the android.hardware.Camera class, 590 | // even the hidden ones. 591 | private boolean HiddenCallbackWithBuffer(){ 592 | int exist = 0; 593 | try { 594 | Class> c = Class.forName("android.hardware.Camera"); 595 | Method[] m = c.getMethods(); 596 | for(int i=0; i mC = Class.forName("android.hardware.Camera"); 617 | 618 | // variable that will indicate of what class is each parameter of the method 619 | Class>[] mPartypes = new Class[1]; 620 | mPartypes[0] = (new byte[1]).getClass(); //There is probably a better way to do this. 621 | mAcb = mC.getMethod("addCallbackBuffer", mPartypes); 622 | 623 | mArglist = new Object[1]; 624 | } catch (Exception e) { 625 | log.debug("Problem setting up for addCallbackBuffer: " + e.toString()); 626 | return CaptureConstants.E_COULD_NOT_INIT_HIDDEN; 627 | } 628 | return CaptureConstants.E_OK; 629 | } 630 | 631 | // This method uses reflection to call the addCallbackBuffer method 632 | // It allows you to add a byte buffer to the queue of buffers to be used by preview. 633 | // Real addCallbackBuffer implementation: http://android.git.kernel.org/?p=platform/frameworks/base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9db3d07b9620b4269ab33f78604a36327e536ce1 634 | // @param b The buffer to register. Size should be width * height * bitsPerPixel / 8. 635 | private int addCallbackBuffer_Android2p2(byte[] b){ // this function is native since Android 2.2 636 | //Check to be sure initForACB has been called to setup 637 | // mVideoPublish.mAcb and mVideoPublish.mArglist 638 | // if(mVideoPublish.mArglist == null){ 639 | // initForACB(); 640 | // } 641 | 642 | mArglist[0] = b; 643 | try { 644 | mAcb.invoke(mVideoPublish.mCamera, mArglist); 645 | } catch (Exception e) { 646 | log.debug("invoking addCallbackBuffer failed: " + e.toString()); 647 | return CaptureConstants.E_COULD_NOT_ADD_HIDDEN; 648 | } 649 | return CaptureConstants.E_OK; 650 | } 651 | 652 | // This method uses reflection to call the setPreviewCallbackWithBuffer method 653 | // Use this method instead of setPreviewCallback if you want to use manually allocated 654 | // buffers. Assumes that "this" implements Camera.PreviewCallback 655 | private int setPreviewCallbackWithBuffer_Android2p2(boolean clear){ // this function is native since Android 2.2 656 | try { 657 | Class> c = Class.forName("android.hardware.Camera"); 658 | Method spcwb = null; // sets a preview with buffers 659 | //This way of finding our method is a bit inefficient 660 | // However, since this method is only called when the preview starts or resumes 661 | // this should not cause performance issues 662 | Method[] m = c.getMethods(); // get all methods of camera 663 | for(int i=0; i[] mPartypes = new Class[2]; // variable that will indicate of what class 673 | // // is each parameter of the method 674 | // mPartypes[0] = (mVideoPublish.mCamera).getClass(); 675 | // mPartypes[1] = (this).getClass(); //There is probably a better way to do this. 676 | // spcwb = c.getMethod("setPreviewCallbackWithBuffer", mPartypes); 677 | 678 | //If we were able to find the setPreviewCallbackWithBuffer method of Camera, 679 | // we can now invoke it on our Camera instance, setting 'this' to be the 680 | // callback handler 681 | if(spcwb != null){ 682 | Object[] arglist = new Object[1]; 683 | if(clear){ 684 | arglist[0] = null; 685 | } else { 686 | arglist[0] = this; // receives a copy of a preview frame 687 | } 688 | spcwb.invoke(mVideoPublish.mCamera, arglist); 689 | //Log.i("AR","setPreviewCallbackWithBuffer: Called method"); 690 | } else { 691 | log.debug("setPreviewCallbackWithBuffer: Did not find method"); 692 | return CaptureConstants.E_COULD_NOT_SET_HIDDEN_R1; 693 | } 694 | } catch (Exception e) { 695 | log.debug("{}",e.toString()); 696 | return CaptureConstants.E_COULD_NOT_SET_HIDDEN_R2; 697 | } 698 | return CaptureConstants.E_OK; 699 | } 700 | 701 | @Override 702 | public void surfaceDestroyed(SurfaceHolder holder) { 703 | log.debug("preview surface destroyed"); 704 | 705 | isSurfaceCreated = false; 706 | 707 | if(mVideoPublish != null && 708 | (mVideoPublish.state == CaptureConstants.RESUMED 709 | || mVideoPublish.state == CaptureConstants.PAUSED)){ // means that the activity or the orientation 710 | // changed and the camera was being captured and published (except if 711 | // we are faking a destruction - see the VideoCapture.fakeDestroyed variable for more info) 712 | // (because, in the strategy we are using, this surface will only be destroyed 713 | // when the activity or the orientation changes) 714 | //if(BackgroundManager.isApplicationBroughtToBackground(context)){ // means that the next 715 | // activity doesn't belong to this application. So, we need to: 716 | // 1) stop the capture, because we won't have a surface 717 | // 2) consequently, we have to stop the publish 718 | // 3) release the camera, because the user may want to use 719 | // the camera on another application 720 | //mVideoPublish.restartWhenResume = true; 721 | 722 | // stops the preview, the publish and releases the camera 723 | //stopCapture(); 724 | //} else { // means that the next activity belongs to this application 725 | // pauses the preview and publish 726 | pauseCapture(); 727 | 728 | // signalizes that the activity has changed and the 729 | // camera was being captured 730 | if(mVideoPublish.nextSurfaceCreated){ // means that the surface of the next activity or 731 | // of the next orientation has already been created 732 | mVideoPublish.RequestResume(); 733 | } else { // means that the surface of the next activity has not been created yet 734 | mVideoPublish.lastSurfaceDestroyed = true; // set to true because the current surface has been destroyed 735 | } 736 | //} 737 | } 738 | } 739 | 740 | @Override 741 | public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { 742 | log.debug("preview surface changed"); 743 | 744 | if(!fakeDestroyed && w == 0 && h == 0){ // means that we want to show the preview on a Dialog. 745 | // So, we need to simulate a surfaceDestroyed 746 | fakeDestroyed = true; 747 | log.debug("simulating a preview surface destruction"); 748 | surfaceDestroyed(holder); // this call does not destroy the surface, 749 | // it just sets the variables to simulate a destruction 750 | } else if(fakeDestroyed && w == 1 && h == 1){ // means that we closed the preview Dialog. 751 | // So, we need to simulate a surfaceCreated 752 | fakeDestroyed = false; 753 | log.debug("simulating a preview surface creation"); 754 | surfaceCreated(holder); // this call does not create the surface, 755 | // it just sets the variables to simulate a creation 756 | } 757 | } 758 | 759 | @Override 760 | public void surfaceCreated(SurfaceHolder holder) { 761 | log.debug("preview surface created"); 762 | 763 | if(getPublisher() == CaptureConstants.E_OK){ 764 | Log.e("","surfaceCreated"); 765 | isSurfaceCreated = true; 766 | 767 | if(mVideoPublish.state == CaptureConstants.RESUMED || 768 | mVideoPublish.state == CaptureConstants.PAUSED){ 769 | if(!mVideoPublish.lastSurfaceDestroyed){ // means that the last preview surface used to 770 | // capture the video is still active (not destroyed) 771 | // and the capture is not paused yet. 772 | // So, we can't resume the capture right now 773 | mVideoPublish.nextSurfaceCreated = true; 774 | 775 | mVideoPublish.readyToResume(this); 776 | } else { // means that the last preview surface used to capture the video has already been 777 | // destroyed and the capture is paused 778 | resumeCapture(); 779 | } 780 | } else if(mVideoPublish.state == CaptureConstants.STOPPED){ 781 | if(mVideoPublish.restartWhenResume){ // means that the following happened: 782 | // a publish was running, then the application went to 783 | // background, then it is now back to foreground. 784 | // So, if we want to keep the previous state, 785 | // lets start the capture 786 | startCapture(options); 787 | } 788 | } 789 | } 790 | } 791 | 792 | @Override 793 | public void onPreviewFrame (byte[] _data, Camera camera) 794 | { 795 | if(mVideoPublish != null && mVideoPublish.state == CaptureConstants.RESUMED){ 796 | if(usingHidden){ 797 | addCallbackBuffer_Android2p2(_data); 798 | } else if(usingFaster && mVideoPublish.mCamera != null){ 799 | mVideoPublish.mCamera.addCallbackBuffer(_data); 800 | } 801 | //Participant myself = ((BigBlueButton) getContext().getApplicationContext()).getHandler().getMyself(); 802 | //if(myself!=null&&myself.getStatus().doesHaveStream()){ 803 | enqueueFrame(_data, _data.length, mVideoPublish.getWidth(), mVideoPublish.getHeight(), mVideoPublish.getRotation()); 804 | // } 805 | } 806 | } 807 | 808 | private native int enqueueFrame(byte[] data, int length, int width, int height, int rotation); 809 | 810 | /*public static void setCameraDisplayOrientation(Activity activity, 811 | int cameraId, android.hardware.Camera camera) { 812 | android.hardware.Camera.CameraInfo info = 813 | new android.hardware.Camera.CameraInfo(); 814 | android.hardware.Camera.getCameraInfo(cameraId, info); 815 | int rotation = activity.getWindowManager().getDefaultDisplay() 816 | .getRotation(); 817 | int degrees = 0; 818 | switch (rotation) { 819 | case Surface.ROTATION_0: degrees = 0; break; 820 | case Surface.ROTATION_90: degrees = 90; break; 821 | case Surface.ROTATION_180: degrees = 180; break; 822 | case Surface.ROTATION_270: degrees = 270; break; 823 | } 824 | 825 | int result; 826 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 827 | result = (info.orientation + degrees) % 360; 828 | result = (360 - result) % 360; // compensate the mirror 829 | } else { // back-facing 830 | result = (info.orientation - degrees + 360) % 360; 831 | } 832 | 833 | camera.setDisplayOrientation(result); 834 | }*/ 835 | 836 | 837 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCaptureLayout.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.content.Context; 7 | import android.content.SharedPreferences; 8 | import android.preference.PreferenceManager; 9 | import android.util.AttributeSet; 10 | import android.view.ViewGroup; 11 | import android.widget.LinearLayout; 12 | 13 | public class VideoCaptureLayout extends LinearLayout { 14 | 15 | private static final Logger log = LoggerFactory.getLogger(VideoCaptureLayout.class); 16 | 17 | public VideoCaptureLayout(Context context, AttributeSet attrs) { 18 | super(context, attrs); 19 | } 20 | 21 | public void hide() { 22 | VideoCentering mVideoCentering = new VideoCentering(); 23 | ViewGroup.LayoutParams layoutParams = mVideoCentering.hidePreview(getLayoutParams()); 24 | setLayoutParams(layoutParams); 25 | 26 | // ViewGroup.LayoutParams params = getLayoutParams(); 27 | // params.width = 1; 28 | // params.height = 1; 29 | // setLayoutParams(params); 30 | } 31 | 32 | public void show(int margin) { 33 | VideoPublish mVideoPublish = ((BigBlueButton) getContext().getApplicationContext()).getVideoPublish(); 34 | if(mVideoPublish == null){ 35 | log.debug("Error: could not show capture preview. Reason: could not get or instantiate a VideoPublisher"); 36 | return; 37 | } 38 | 39 | SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getContext()); 40 | int rotation = Integer.parseInt(prefs.getString("preview_rotation", "0")); 41 | log.debug("PREVIEW ROTATION ={}",rotation); 42 | 43 | VideoCentering mVideoCentering = new VideoCentering(); 44 | if (rotation % 180 == 0) 45 | mVideoCentering.setAspectRatio(mVideoPublish.getWidth()/(float)mVideoPublish.getHeight()); 46 | else 47 | mVideoCentering.setAspectRatio(mVideoPublish.getHeight()/(float)mVideoPublish.getWidth()); 48 | 49 | ViewGroup.LayoutParams layoutParams = mVideoCentering.getVideoLayoutParams(mVideoCentering.getDisplayMetrics(getContext(),margin), getLayoutParams()); 50 | setLayoutParams(layoutParams); 51 | 52 | //this would be the ideal =( 53 | // if(mVideoPublish.mCamera != null) 54 | // { 55 | // mVideoPublish.mCamera.stopPreview(); 56 | // //mVideoPublish.mCamera.setDisplayOrientation(rotation); 57 | // mVideoPublish.mCamera.startPreview(); 58 | // } 59 | 60 | 61 | //ViewGroup.LayoutParams params = getLayoutParams(); 62 | //params.width = 320; 63 | //params.height = 240; 64 | //setLayoutParams(params); 65 | } 66 | 67 | public void destroy() { 68 | VideoCentering mVideoCentering = new VideoCentering(); 69 | ViewGroup.LayoutParams layoutParams = mVideoCentering.destroyPreview(getLayoutParams()); 70 | setLayoutParams(layoutParams); 71 | 72 | // ViewGroup.LayoutParams params = getLayoutParams(); 73 | // params.width = 0; 74 | // params.height = 0; 75 | // setLayoutParams(params); 76 | } 77 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoCentering.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import android.content.Context; 7 | import android.util.DisplayMetrics; 8 | import android.view.Display; 9 | import android.view.WindowManager; 10 | import android.view.ViewGroup.LayoutParams; 11 | 12 | public class VideoCentering { 13 | private static final Logger log = LoggerFactory.getLogger(VideoCentering.class); 14 | private float aspectRatio = 4 / (float) 3; 15 | 16 | public LayoutParams getVideoLayoutParams(DisplayMetrics metrics, LayoutParams layoutParams) { 17 | int h = 0, w = 0; 18 | float displayAspectRatio = metrics.widthPixels / (float) metrics.heightPixels; 19 | if (displayAspectRatio < aspectRatio) { 20 | w = metrics.widthPixels; 21 | h = (int) (w / aspectRatio); 22 | } else { 23 | h = metrics.heightPixels; 24 | w = (int) (h * aspectRatio); 25 | } 26 | layoutParams.width = w; 27 | layoutParams.height = h; 28 | return layoutParams; 29 | } 30 | 31 | public DisplayMetrics getDisplayMetrics(Context context, int margin){ 32 | DisplayMetrics metrics = new DisplayMetrics(); 33 | Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); 34 | display.getMetrics(metrics); 35 | log.debug("Maximum display resolution: {} X {}\n", metrics.widthPixels, metrics.heightPixels); 36 | metrics.widthPixels -= margin; 37 | metrics.heightPixels -= margin; 38 | return metrics; 39 | } 40 | 41 | public void setAspectRatio(float aspectRatio) { 42 | this.aspectRatio = aspectRatio; 43 | } 44 | 45 | public float getAspectRatio() { 46 | return aspectRatio; 47 | } 48 | 49 | public LayoutParams hidePreview(LayoutParams layoutParams){ 50 | layoutParams.width = 1; 51 | layoutParams.height = 1; 52 | return layoutParams; 53 | } 54 | 55 | public LayoutParams destroyPreview(LayoutParams layoutParams) { 56 | layoutParams.width = 0; 57 | layoutParams.height = 0; 58 | return layoutParams; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoDialog.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | import android.app.Dialog; 8 | import android.content.Context; 9 | import android.view.Window; 10 | 11 | import com.flazr.rtmp.client.ClientOptions; 12 | 13 | public class VideoDialog extends Dialog { 14 | private static final Logger log = LoggerFactory.getLogger(VideoDialog.class); 15 | 16 | //private VideoSurface videoWindow; 17 | private String userId; 18 | private String name; 19 | public boolean isPreview; 20 | private int streamToShow; 21 | private VoiceInterface mVoiceItf; 22 | private ClientOptions opt; 23 | 24 | public VideoDialog(Context context, String userId, String myId, String name, int streamToShow, ClientOptions options) { 25 | super(context); 26 | this.opt=options; 27 | this.userId = userId; 28 | this.name = name; 29 | 30 | if(userId.equals(myId)){ 31 | isPreview = true; 32 | } else { 33 | isPreview = false; 34 | } 35 | 36 | requestWindowFeature(Window.FEATURE_NO_TITLE); //Removes the title from the Dialog 37 | 38 | if(isPreview){ 39 | setContentView(R.layout.video_capture); 40 | } else { 41 | /*setContentView(R.layout.video_window); 42 | 43 | videoWindow = (VideoSurface) findViewById(R.id.video_window);*/ 44 | } 45 | 46 | android.view.WindowManager.LayoutParams windowAttributes = getWindow().getAttributes(); 47 | windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; //Makes the video brigth 48 | // windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE; //Makes it possible to interact with the window behind, but the video should be closed properly when the screen changes 49 | // windowAttributes.flags = android.view.WindowManager.LayoutParams.FLAG_SCALED; //Removes the title from the dialog and removes the border also 50 | getWindow().setAttributes(windowAttributes); 51 | 52 | setTitle(name); 53 | setCancelable(true); 54 | 55 | this.streamToShow = streamToShow; 56 | } 57 | 58 | private void sendBroadcastRecreateCaptureSurface() { 59 | log.debug("sendBroadcastRecreateCaptureSurface()"); 60 | 61 | //Intent intent= new Intent(Client.CLOSE_DIALOG_PREVIEW); 62 | //getContext().sendBroadcast(intent); 63 | } 64 | 65 | private void setVideoId(String userIdLocal){ 66 | userId = userIdLocal; 67 | } 68 | 69 | private void setVideoName(String userName){ 70 | name = userName; 71 | } 72 | 73 | public String getVideoId(){ 74 | return userId; 75 | } 76 | 77 | public String getVideoName(){ 78 | return name; 79 | } 80 | 81 | @Override 82 | protected void onStart() { 83 | super.onStart(); 84 | resume(); 85 | } 86 | 87 | @Override 88 | protected void onStop() { 89 | pause(); 90 | super.onStop(); 91 | } 92 | 93 | public void pause() { 94 | if(isPreview){ 95 | sendBroadcastRecreateCaptureSurface(); 96 | } else { 97 | //videoWindow.stop(); 98 | } 99 | } 100 | 101 | public void resume() { 102 | if(isPreview){ 103 | VideoCaptureLayout videocaplayout = (VideoCaptureLayout) findViewById(R.id.video_capture_layout); 104 | videocaplayout.show(40); 105 | } else { 106 | //videoWindow.start(userId, true, streamToShow); 107 | } 108 | } 109 | 110 | /* (non-Javadoc) 111 | * @see android.app.Dialog#onBackPressed() 112 | */ 113 | @Override 114 | public void onBackPressed() { 115 | // TODO Auto-generated method stub 116 | //super.onBackPressed(); 117 | 118 | VideoCapture mVideoCapture = (VideoCapture) findViewById(R.id.video_capture); 119 | mVideoCapture.startCapture(opt); 120 | 121 | } 122 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoPublish.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import android.hardware.Camera; 10 | 11 | import com.flazr.rtmp.RtmpReader; 12 | import com.flazr.rtmp.client.ClientOptions; 13 | import com.flazr.rtmp.message.Metadata; 14 | import com.flazr.rtmp.message.Video; 15 | 16 | public class VideoPublish extends Thread implements RtmpReader { 17 | 18 | private static final Logger log = LoggerFactory.getLogger(VideoPublish.class); 19 | 20 | private int framerate = CaptureConstants.DEFAULT_FRAME_RATE; 21 | private int width = CaptureConstants.DEFAULT_WIDTH; 22 | private int height = CaptureConstants.DEFAULT_HEIGHT; 23 | private int bitrate = CaptureConstants.DEFAULT_BIT_RATE; 24 | private int gop = CaptureConstants.DEFAULT_GOP; 25 | private int rotation = 0; 26 | 27 | private List framesList = new ArrayList(); 28 | 29 | private BbbVideoPublisher videoPublishHandler; 30 | 31 | private BigBlueButtonClient context; 32 | 33 | private VideoCapture mVideoCapture; 34 | 35 | private VoiceInterface mVoiceItf; 36 | 37 | private byte[] sharedBuffer; 38 | 39 | public int bufSize; 40 | 41 | public Camera mCamera; 42 | 43 | private int firstTimestamp = 0; 44 | private int lastTimestamp = 0; 45 | 46 | public int state = CaptureConstants.STOPPED; 47 | 48 | public boolean nextSurfaceCreated = false; // used when: 49 | // the activity or the orientation changes and 50 | // the video was being captured (except if 51 | // we are faking a destruction - see the VideoCapture.fakeDestroyed variable for more info). 52 | // In this moment, 53 | // there are 2 surfaces conflicting, and we need to know 54 | // if/when they are destroyed and created. 55 | // true when: the next surface has already been created 56 | // false when: the next surface has not been created yet OR 57 | // there isn't a 2 surfaces conflict 58 | public boolean lastSurfaceDestroyed = false; // used when: 59 | // same situation as the "nextSurfaceCreated" variable 60 | // true when: the last preview surface has already been destroyed 61 | // false when: the last preview surface is still active 62 | 63 | public boolean nativeEncoderInitialized = false; // used to prevent errors. 64 | // true when the native class VideoEncoder is not NULL 65 | // false when the native class VideoEncoder is NULL 66 | 67 | public boolean restartWhenResume; // used in the following situation: 68 | // the user put the application in background. 69 | // now the user put the application in foreground again. 70 | // in this situation, this boolean is true if the camera was being 71 | // captured when the application went to background, and false if the 72 | // camera was not being captured. 73 | // So, this boolean allows to keep the previous state (capturing or not) 74 | // when the application resumes. 75 | 76 | private boolean framesListAvailable = false; // set to true when the RtmpPublisher starts seeking 77 | // for video messages. When true, this boolean allows the addition 78 | // of video frames to the list. 79 | // Set to false right when the RtmpPublisher decides to 80 | // close the reader. When false, this boolean prevents the 81 | // addition of new frames to the list. 82 | 83 | private boolean firstFrameWrote = true; 84 | 85 | public int cameraId = -1; 86 | 87 | private ClientOptions options; 88 | 89 | public VideoPublish(BigBlueButtonClient context, boolean restartWhenResume, int framerate, int width, int height, int bitrate, int gop, int rotation) { 90 | this.context = context; 91 | this.restartWhenResume = restartWhenResume; 92 | 93 | this.framerate = framerate; 94 | this.width = width; 95 | this.height = height; 96 | this.bitrate = bitrate; 97 | this.gop = gop; 98 | this.rotation = rotation; 99 | } 100 | 101 | public void startPublisher(ClientOptions opt) { 102 | String streamName; 103 | 104 | //streamName="Test1"; 105 | /*videoPublishHandler = new BbbVideoPublisher(context, this, opt.getStreamName(), opt); 106 | videoPublishHandler.start();*/ 107 | mVoiceItf=new VoiceOverRtmp(context, opt); 108 | mVoiceItf.start(); 109 | } 110 | 111 | public void stopPublisher(){ 112 | synchronized(this) { 113 | this.notifyAll(); 114 | } 115 | if(videoPublishHandler != null){ 116 | videoPublishHandler.stop(); 117 | } 118 | } 119 | 120 | public void readyToResume(VideoCapture videoCapture) { 121 | mVideoCapture = videoCapture; 122 | } 123 | 124 | public int RequestResume() { 125 | if(mVideoCapture == null){ 126 | log.debug("Error: resume requested but there is not a VideoCapture class available"); 127 | return CaptureConstants.E_COULD_NOT_REQUEST_RESUME; 128 | } 129 | mVideoCapture.resumeCapture(); 130 | mVideoCapture = null; 131 | return CaptureConstants.E_OK; 132 | } 133 | 134 | public void initNativeEncoder(){ 135 | sharedBuffer = new byte[bufSize]; // the encoded frame will never be bigger than the not encoded 136 | //\TODO Usually the encoded frame is much smaller than the not encoded. 137 | //So it would be good if we find out the biggest encoded 138 | //frame size possible (given the encoding parameters) 139 | //in order to initialize the sharedBuffer array as a byte[] 140 | //of the smaller size as possible, to allocate less memory. 141 | 142 | if (rotation % 180 == 0) 143 | initEncoder(width, height, framerate, bitrate, gop); 144 | else 145 | initEncoder(height, width, framerate, bitrate, gop); 146 | 147 | nativeEncoderInitialized = true; 148 | } 149 | 150 | public void endNativeEncoder(){ 151 | nativeEncoderInitialized = false; 152 | 153 | endEncoder(); 154 | } 155 | 156 | @Override 157 | public void run() { 158 | initSenderLoop(); 159 | } 160 | 161 | public byte[] assignJavaBuffer() 162 | { 163 | return sharedBuffer; 164 | } 165 | 166 | public int onReadyFrame (int bufferSize, int timestamp) 167 | { 168 | if(firstTimestamp == 0){ 169 | firstTimestamp = timestamp; 170 | } 171 | timestamp = timestamp - firstTimestamp; 172 | int interval = timestamp - lastTimestamp; 173 | lastTimestamp = timestamp; 174 | 175 | byte[] aux = new byte[bufferSize]; 176 | System.arraycopy(sharedBuffer, 0, aux, 0, bufferSize); //\TODO see if we can avoid this copy 177 | 178 | Video video = new Video(timestamp, aux, bufferSize); 179 | video.getHeader().setDeltaTime(interval); 180 | 181 | if (framesListAvailable) { 182 | framesList.add(video); 183 | if (firstFrameWrote) { 184 | firstFrameWrote = false; 185 | videoPublishHandler.fireFirstFrame(); 186 | } 187 | synchronized(this) { 188 | this.notifyAll(); 189 | } 190 | } 191 | return 0; 192 | } 193 | 194 | @Override 195 | public void close() { 196 | framesListAvailable = false; 197 | if(framesList != null){ 198 | framesList.clear(); 199 | } 200 | framesList = null; 201 | } 202 | 203 | @Override 204 | public Metadata getMetadata() { 205 | return null; 206 | } 207 | 208 | @Override 209 | public Video[] getStartMessages() { 210 | framesListAvailable = true; 211 | Video[] startMessages = new Video[0]; 212 | return startMessages; 213 | } 214 | 215 | @Override 216 | public long getTimePosition() { 217 | return 0; 218 | } 219 | 220 | @Override 221 | public boolean hasNext() { 222 | if((state == CaptureConstants.RESUMED || state == CaptureConstants.PAUSED) 223 | && framesListAvailable && framesList != null && framesList.isEmpty()){ 224 | try { 225 | this.wait(); 226 | } catch (InterruptedException e) { 227 | e.printStackTrace(); 228 | } 229 | } 230 | if((state == CaptureConstants.RESUMED || state == CaptureConstants.PAUSED) 231 | && framesListAvailable && framesList != null){ // means that the framesList is not empty 232 | return true; 233 | } else { // means that the framesList is empty or we should not get next frames 234 | return false; 235 | } 236 | } 237 | 238 | @Override 239 | public Video next() { 240 | if(framesListAvailable && framesList != null && !framesList.isEmpty()){ 241 | return framesList.remove(0); 242 | } else { 243 | Video emptyVideo = new Video(); 244 | return emptyVideo; 245 | } 246 | } 247 | 248 | @Override 249 | public long seek(long timePosition) { 250 | return 0; 251 | } 252 | 253 | @Override 254 | public void setAggregateDuration(int targetDuration) { 255 | } 256 | 257 | private native int initEncoder(int width, int height, int frameRate, int bitRate, int GOP); 258 | private native int endEncoder(); 259 | private native int initSenderLoop(); 260 | 261 | @Override 262 | public int getWidth() { 263 | return width; 264 | } 265 | 266 | @Override 267 | public int getHeight() { 268 | return height; 269 | } 270 | 271 | public void setFramerate(int framerate) { 272 | this.framerate = framerate; 273 | } 274 | 275 | public void setHeight(int height) { 276 | this.height = height; 277 | } 278 | 279 | public void setWidth(int width) { 280 | this.width = width; 281 | } 282 | 283 | public int getFramerate() { 284 | return framerate; 285 | } 286 | 287 | public int getBitrate() { 288 | return bitrate; 289 | } 290 | 291 | public int getGop() { 292 | return gop; 293 | } 294 | 295 | public int getRotation() { 296 | return rotation; 297 | } 298 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VideoPublisherConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | 4 | import org.jboss.netty.channel.ChannelPipeline; 5 | import org.jboss.netty.channel.ChannelPipelineFactory; 6 | import org.jboss.netty.channel.Channels; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import com.flazr.rtmp.RtmpDecoder; 11 | import com.flazr.rtmp.RtmpEncoder; 12 | import com.flazr.rtmp.client.ClientHandshakeHandler; 13 | import com.flazr.rtmp.client.ClientOptions; 14 | 15 | public class VideoPublisherConnection extends RtmpConnection { 16 | 17 | @SuppressWarnings("unused") 18 | private static final Logger log = LoggerFactory.getLogger(VideoPublisherConnection.class); 19 | 20 | public VideoPublisherConnection(ClientOptions options, BigBlueButtonClient context) { 21 | super(options, context); 22 | } 23 | 24 | @Override 25 | protected ChannelPipelineFactory pipelineFactory() { 26 | return new ChannelPipelineFactory() { 27 | @Override 28 | public ChannelPipeline getPipeline() throws Exception { 29 | final ChannelPipeline pipeline = Channels.pipeline(); 30 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 31 | pipeline.addLast("decoder", new RtmpDecoder()); 32 | pipeline.addLast("encoder", new RtmpEncoder()); 33 | pipeline.addLast("handler", VideoPublisherConnection.this); 34 | return pipeline; 35 | } 36 | }; 37 | } 38 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceConnection.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.ChannelPipeline; 4 | import org.jboss.netty.channel.ChannelPipelineFactory; 5 | import org.jboss.netty.channel.Channels; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import com.flazr.rtmp.RtmpDecoder; 10 | import com.flazr.rtmp.RtmpEncoder; 11 | import com.flazr.rtmp.client.ClientHandshakeHandler; 12 | import com.flazr.rtmp.client.ClientOptions; 13 | import com.flazr.rtmp.message.Audio; 14 | 15 | public abstract class VoiceConnection extends RtmpConnection { 16 | 17 | private static final Logger log = LoggerFactory.getLogger(VoiceConnection.class); 18 | private String publishName; 19 | private String playName; 20 | @SuppressWarnings("unused") 21 | private String codec; 22 | private int playStreamId = -1; 23 | private int publishStreamId = -1; 24 | 25 | public VoiceConnection(ClientOptions options, BigBlueButtonClient context) { 26 | super(options, context); 27 | } 28 | 29 | @Override 30 | protected ChannelPipelineFactory pipelineFactory() { 31 | return new ChannelPipelineFactory() { 32 | @Override 33 | public ChannelPipeline getPipeline() throws Exception { 34 | final ChannelPipeline pipeline = Channels.pipeline(); 35 | pipeline.addLast("handshaker", new ClientHandshakeHandler(options)); 36 | pipeline.addLast("decoder", new RtmpDecoder()); 37 | pipeline.addLast("encoder", new RtmpEncoder()); 38 | pipeline.addLast("handler", VoiceConnection.this); 39 | return pipeline; 40 | } 41 | }; 42 | } 43 | 44 | abstract protected void onAudio(Audio audio); 45 | 46 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceInterface.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import com.flazr.rtmp.client.ClientOptions; 4 | 5 | 6 | public interface VoiceInterface { 7 | 8 | public static final int E_OK = 0; 9 | public static final int E_INVALID_NUMBER = 1; 10 | public static final int E_TIMEOUT = 2; 11 | 12 | public int start(); 13 | public void stop(); 14 | public boolean isOnCall(); 15 | public boolean isMuted(); 16 | public void muteCall(boolean mute); 17 | public int getSpeaker(); 18 | public void setSpeaker(int mode); 19 | } -------------------------------------------------------------------------------- /audiopublish/src/org/mconf/android/core/video/VoiceOverRtmp.java: -------------------------------------------------------------------------------- 1 | package org.mconf.android.core.video; 2 | 3 | import org.jboss.netty.channel.ChannelHandlerContext; 4 | import org.jboss.netty.channel.ChannelStateEvent; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | import android.os.SystemClock; 9 | import android.util.Log; 10 | 11 | import com.flazr.rtmp.client.ClientOptions; 12 | import com.flazr.rtmp.message.Audio; 13 | 14 | public class VoiceOverRtmp implements VoiceInterface { 15 | 16 | private static final Logger log = LoggerFactory.getLogger(VoiceOverRtmp.class); 17 | 18 | private BbbVoiceConnection connection; 19 | private RtmpAudioPlayer audioPlayer = new RtmpAudioPlayer(); 20 | private AudioPublish micBufferReader = new AudioPublish(); 21 | protected boolean onCall = false; 22 | private Object args []={"Test1","18.9750/72.8258","AB","1","Female","info",""}; 23 | //private Object args[]={"Test1"}; 24 | public VoiceOverRtmp(BigBlueButtonClient bbb, ClientOptions options) { 25 | 26 | /*ClientOptions options=new ClientOptions(); 27 | options.setHost("10.129.200.81"); 28 | options.setAppName("HariPanTest3"); 29 | //options.setAppName("PanTest"); 30 | options.setStreamName("Test1"); 31 | options.setArgs(args); 32 | options.publishLive();*/ 33 | options.setReaderToPublish(micBufferReader); 34 | connection = new BbbVoiceConnection(bbb, options) { 35 | @Override 36 | protected void onAudio(Audio audio) { 37 | audioPlayer.onAudio(audio); 38 | } 39 | 40 | @Override 41 | protected void onConnectedSuccessfully() { 42 | onCall = true; 43 | } 44 | 45 | @Override 46 | public void channelDisconnected(ChannelHandlerContext ctx, 47 | ChannelStateEvent e) throws Exception { 48 | super.channelDisconnected(ctx, e); 49 | log.debug("\n\nvoice disconnected, stopping VoiceOverRtmp\n\n"); 50 | onCall = false; 51 | audioPlayer.stop(); 52 | 53 | } 54 | }; 55 | 56 | } 57 | 58 | @Override 59 | public int start() { 60 | connection.start(); 61 | 62 | int cont = 10; 63 | while (!onCall && cont > 0) { 64 | SystemClock.sleep(500); 65 | cont--; 66 | } 67 | 68 | if (cont == 0) { 69 | stop(); 70 | return E_TIMEOUT; 71 | } 72 | else 73 | { 74 | audioPlayer.start(); 75 | int res; 76 | if(sendFirstAudioPacket()) { 77 | micBufferReader.start(); 78 | //Log.e("",""+Thread.currentThread()); 79 | res=E_OK; 80 | } 81 | else { 82 | stop(); 83 | //Log.e("",""+Thread.currentThread()); 84 | res=E_TIMEOUT; 85 | } 86 | return res; 87 | } 88 | } 89 | 90 | private boolean sendFirstAudioPacket() 91 | { 92 | //for some reason - and we dont know why yet - after the reception of the first audio packet 93 | //the connection needs to wait 101 ms to then normally starts the audio dispatching 94 | //so..we are firing the first audio packet with a 101ms delay... 95 | //this first audio packet is in the audio buffer of the micBufferReader 96 | // ( you can check in the constructor of the AudioPublish class ) 97 | 98 | // The voice connection waits for a 'createStream' server command to initialize the publisher 99 | // It means that by the time fireNext is called 'connection.publisher' may be null 100 | // So we have to wait until the publisher is initialized 101 | 102 | int attemptsLeft = 10; 103 | while(connection.publisher == null && attemptsLeft > 0) { 104 | SystemClock.sleep(500); 105 | attemptsLeft--; 106 | } 107 | 108 | if(attemptsLeft == 0) { 109 | /* Failed to initialize the publisher */ 110 | return false; 111 | } 112 | else { 113 | connection.publisher.fireNext(connection.publisher.channel, 101); 114 | return true; 115 | } 116 | } 117 | 118 | @Override 119 | public void stop() { 120 | connection.stop(); 121 | } 122 | 123 | @Override 124 | public boolean isOnCall() { 125 | return onCall; 126 | } 127 | 128 | @Override 129 | public boolean isMuted() { 130 | return micBufferReader.isMuted(); 131 | } 132 | 133 | @Override 134 | public void muteCall(boolean mute) { 135 | if(mute) 136 | micBufferReader.mute(); 137 | else 138 | micBufferReader.unmute(); 139 | 140 | } 141 | 142 | @Override 143 | public int getSpeaker() { 144 | return 0; 145 | } 146 | 147 | @Override 148 | public void setSpeaker(int mode) { 149 | // TODO Auto-generated method stub 150 | 151 | } 152 | 153 | } --------------------------------------------------------------------------------