├── .gitignore
├── .idea
├── compiler.xml
├── copyright
│ └── profiles_settings.xml
├── gradle.xml
├── misc.xml
├── modules.xml
└── runConfigurations.xml
├── app
├── .gitignore
├── CMakeLists.txt
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── jiangdg
│ │ └── yuvosd
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── cpp
│ │ ├── Yuv.cpp
│ │ ├── Yuv.h
│ │ ├── YuvOsd.cpp
│ │ └── osd
│ │ │ ├── Osdchar.c
│ │ │ └── Osdchar.h
│ ├── java
│ │ └── com
│ │ │ └── jiangdg
│ │ │ ├── demo
│ │ │ ├── MainActivity.java
│ │ │ ├── runnable
│ │ │ │ ├── EncoderAudioRunnable.java
│ │ │ │ └── EncoderVideoRunnable.java
│ │ │ └── utils
│ │ │ │ ├── CameraUtils.java
│ │ │ │ ├── MediaMuxerUtils.java
│ │ │ │ └── SensorAccelerometer.java
│ │ │ └── natives
│ │ │ └── YuvUtils.java
│ └── res
│ │ ├── layout
│ │ └── activity_main.xml
│ │ ├── mipmap-hdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-mdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxxhdpi
│ │ └── ic_launcher.png
│ │ ├── values-w820dp
│ │ └── dimens.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── com
│ └── jiangdg
│ └── yuvosd
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/workspace.xml
5 | /.idea/libraries
6 | .DS_Store
7 | /build
8 | /captures
9 | .externalNativeBuild
10 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
18 |
19 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 | 1.8
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # 设置编译native library需要最小的cmake版本
2 | cmake_minimum_required(VERSION 3.4.1)
3 |
4 | # 设置so动态库最后的输出路径
5 | #set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI})
6 |
7 | # 将指定的源文件编译为YuvOsd.so动态库
8 | add_library(YuvOsd
9 |
10 | SHARED
11 |
12 | src/main/cpp/YuvOsd.cpp
13 | src/main/cpp/Yuv.cpp)
14 |
15 | find_library(log-lib log )
16 | target_link_libraries(YuvOsd ${log-lib} )
17 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 25
5 | buildToolsVersion "26.0.1"
6 | defaultConfig {
7 | applicationId "com.teligen.yuvosdteligen"
8 | minSdkVersion 18
9 | targetSdkVersion 22
10 | versionCode 1
11 | versionName "1.0"
12 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
13 | externalNativeBuild {
14 | cmake {
15 | cppFlags ""
16 | }
17 |
18 | ndk{
19 | abiFilters 'armeabi', 'armeabi-v7a','arm64-v8a'
20 | }
21 | }
22 | }
23 | buildTypes {
24 | release {
25 | minifyEnabled false
26 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
27 | }
28 | }
29 | externalNativeBuild {
30 | cmake {
31 | path "CMakeLists.txt"
32 | }
33 | }
34 | }
35 |
36 | dependencies {
37 | compile fileTree(dir: 'libs', include: ['*.jar'])
38 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
39 | exclude group: 'com.android.support', module: 'support-annotations'
40 | })
41 | compile 'com.android.support:appcompat-v7:25.3.1'
42 | testCompile 'junit:junit:4.12'
43 | }
44 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in E:\Environment\android-sdk-windows/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/jiangdg/yuvosd/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.yuvosd;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumentation test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("com.teligen.yuvosdteligen", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/app/src/main/cpp/Yuv.cpp:
--------------------------------------------------------------------------------
1 | //
2 | // Created by jiangdongguo on 2018/10/19.
3 | //
4 |
5 | #include "Yuv.h"
6 | #include
7 |
8 | #define LOG_TAG_YUV "yuv"
9 |
10 | //#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG_YUV,__VA_ARGS__)
11 |
12 | const unsigned char table[] = {
13 |
14 | /*-- 文字: 陈 --*/
15 | /*-- 楷体_GB231212; 此字体下对应的点阵为:宽x高=16x16 --*/
16 | 0x00, 0x40, 0x78, 0x40, 0x48, 0x40, 0x57, 0xFE, 0x50, 0x80, 0x61, 0x20, 0x51,
17 | 0x20, 0x4A, 0x20, 0x4B, 0xFC, 0x48, 0x20, 0x69, 0x28, 0x51, 0x24, 0x42,
18 | 0x22, 0x44, 0x22, 0x40, 0xA0, 0x40, 0x40,
19 |
20 | /*-- 文字: 桂 --*/
21 | /*-- 楷体_GB231212; 此字体下对应的点阵为:宽x高=16x16 --*/
22 | 0x10, 0x20, 0x10, 0x20, 0x11, 0xFC, 0x10, 0x20, 0xFC, 0x20, 0x10, 0x20,
23 | 0x33, 0xFE, 0x38, 0x00, 0x54, 0x20, 0x54, 0x20, 0x91, 0xFC, 0x10, 0x20,
24 | 0x10, 0x20, 0x10, 0x20, 0x13, 0xFE, 0x10, 0x00,
25 |
26 | /*-- 文字: 芳 --*/
27 | /*-- 楷体_GB231212; 此字体下对应的点阵为:宽x高=16x16 --*/
28 | 0x08, 0x20, 0x08, 0x20, 0xFF, 0xFE, 0x08, 0x20, 0x0A, 0x20, 0x01, 0x00,
29 | 0xFF, 0xFE, 0x04, 0x00, 0x04, 0x00, 0x07, 0xF0, 0x04, 0x10, 0x08, 0x10,
30 | 0x08, 0x10, 0x10, 0x10, 0x20, 0xA0, 0x40, 0x40,
31 |
32 | };
33 |
34 | // 16*16点阵字模,每个汉字字模需要16x16/8=32个字节存放
35 | const unsigned char byteTable[] = {
36 | /*-- 文字: 趣 --*//*-- 楷体_GB231212; 此字体下对应的点阵为:宽x高=16x16 --*/
37 | 0x10, 0x00, 0x13, 0xE0, 0x11, 0x5E, 0x7D, 0x42, 0x11, 0xD2, 0x11, 0x4A, 0xFD,
38 | 0x4A, 0x11, 0xC4, 0x11, 0x44, 0x51, 0x64, 0x5D, 0xCA, 0x53, 0x4A, 0x70,
39 | 0x52, 0x50, 0x40, 0x4F, 0xFE, 0x80, 0x00,
40 | /*-- 文字: 看 --*/
41 | 0x00, 0xF8, 0x7F, 0x00, 0x01, 0x00, 0x3F, 0xF8, 0x02, 0x00, 0xFF, 0xFE,
42 | 0x04, 0x00, 0x08, 0x00, 0x1F, 0xF0, 0x28, 0x10, 0x4F, 0xF0, 0x88, 0x10,
43 | 0x0F, 0xF0, 0x08, 0x10, 0x0F, 0xF0, 0x08, 0x10};
44 |
45 | // 16*8点阵字模(16行,8列),每个数字或符号占16*8/8=16个字节
46 | const unsigned char charTable[] = {
47 |
48 | /*-- 文字: 0 --*/
49 | 0x00, 0x00, 0x00, 0x18, 0x24, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x24,
50 | 0x18, 0x00, 0x00,
51 |
52 | /*-- 文字: 1 --*/
53 | 0x00, 0x00, 0x00, 0x10, 0x70, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10,
54 | 0x10, 0x7C, 0x00, 0x00,
55 |
56 | /*-- 文字: 2 --*/
57 | 0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x42, 0x04, 0x04, 0x08, 0x10, 0x20,
58 | 0x42, 0x7E, 0x00, 0x00,
59 |
60 | /*-- 文字: 3 --*/
61 | 0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x04, 0x18, 0x04, 0x02, 0x02, 0x42,
62 | 0x44, 0x38, 0x00, 0x00,
63 |
64 | /*-- 文字: 4 --*/
65 | 0x00, 0x00, 0x00, 0x04, 0x0C, 0x14, 0x24, 0x24, 0x44, 0x44, 0x7E, 0x04,
66 | 0x04, 0x1E, 0x00, 0x00,
67 |
68 | /*-- 文字: 5 --*/
69 | 0x00, 0x00, 0x00, 0x7E, 0x40, 0x40, 0x40, 0x58, 0x64, 0x02, 0x02, 0x42,
70 | 0x44, 0x38, 0x00, 0x00,
71 |
72 | /*-- 文字: 6 --*/
73 | 0x00, 0x00, 0x00, 0x1C, 0x24, 0x40, 0x40, 0x58, 0x64, 0x42, 0x42, 0x42,
74 | 0x24, 0x18, 0x00, 0x00,
75 |
76 | /*-- 文字: 7 --*/
77 | 0x00, 0x00, 0x00, 0x7E, 0x44, 0x44, 0x08, 0x08, 0x10, 0x10, 0x10, 0x10,
78 | 0x10, 0x10, 0x00, 0x00,
79 |
80 | /*-- 文字: 8 --*/
81 | 0x00, 0x00, 0x00, 0x3C, 0x42, 0x42, 0x42, 0x24, 0x18, 0x24, 0x42, 0x42,
82 | 0x42, 0x3C, 0x00, 0x00,
83 |
84 | /*-- 文字: 9 --*/
85 | 0x00, 0x00, 0x00, 0x18, 0x24, 0x42, 0x42, 0x42, 0x26, 0x1A, 0x02, 0x02,
86 | 0x24, 0x38, 0x00, 0x00,
87 |
88 | /*-- 文字: ":" --*/
89 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x00, 0x00,
90 | 0x18, 0x18, 0x00, 0x00,
91 |
92 | /*-- 文字: "-" --*/
93 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x00, 0x00,
94 | 0x00, 0x00, 0x00, 0x00};
95 |
96 | /*
97 | * Function: draw_Font_Func
98 | * Description: 实现在yuv420图片上面时间(暂时不支持汉字)
99 | * Input: char *ptr_frame 一帧视频的首地址
100 | * const unsigned char font[] 画的字模
101 | * int startx 写字的起点坐标x
102 | * int starty 写字的起点坐标y
103 | * int color 字颜色的选择,具体颜色在程序代码
104 | * Return: 这里会把传进来的一帧视频的地址返回,可以不调用
105 | */
106 | char *draw_Font_Func(char *ptr_frame, int FRAME_WIDTH, int FRAME_HEIGHT,
107 | int startx, int starty, char *str) {
108 |
109 | assert(ptr_frame != NULL);
110 |
111 | char *offsetY = NULL, *offsetU = NULL, *offsetV = NULL;
112 | unsigned char p8, mask8;
113 | unsigned int nStrlen = strlen(str);
114 | /***
115 | * i 表示汉字的个数
116 | * j 表示汉字的16x16点阵序列值
117 | * x 表示yuv绘制字的X轴偏移量
118 | * y 表示yuv绘制字的Y轴偏移量
119 | * */
120 | int x = 0, y = 0, i = 1, j = 0, k = 0;
121 |
122 | /*yuv420p
123 | * YYYYYYYY UU VV
124 | */
125 | offsetY = ptr_frame; // Y分量起始地址
126 | offsetU = offsetY + FRAME_WIDTH * FRAME_HEIGHT; // U分量起始地址,这里暂时用不到
127 | offsetV = offsetU + FRAME_WIDTH * FRAME_HEIGHT / 4; // V分量起始地址,这里暂时用不到
128 |
129 | // 遍历字符串所有字符,阿拉伯数字0~9的ASCII码为48~57
130 | // '-'的ASCII码为45
131 | for (i = 0; i < nStrlen; i++) {
132 | int temp1 = str[i] - 48;
133 | if (temp1 == -3) {
134 | //"-"
135 | temp1 = 11;
136 | } else if (temp1 > 9) {
137 | temp1 = 10;
138 | } else if (temp1 < 0) {
139 | continue;
140 | }
141 |
142 | //j是点阵的高度,k点阵的宽度
143 | // 循环line dots per char,
144 | for (j = 0, y = starty; j < 16 && y < FRAME_HEIGHT - 1; j++, y++) {
145 | p8 = *(char *) (charTable + j + temp1 * 16);/*取字模数据*/
146 | // 0x80--->二进制:1000 0000
147 | mask8 = 0x80;
148 | // 处理一个字节里的位
149 | for (k = 0, x = startx + i * 8; k < 8 && x < FRAME_WIDTH - 1;
150 | k++, x++) // dots in a line
151 | {
152 | // 取一个字节里的所有为,为1就上色,为0向右偏移1位以便取下一位
153 | if (p8 & mask8) {
154 | // Y分量为亮度
155 | // if (FRAME_WIDTH > 640 || FRAME_HEIGHT > 640) {
156 | // *(offsetY + 2 * y * FRAME_WIDTH + 2 * x) = 255;
157 | // *(offsetY + (2 * y + 1) * FRAME_WIDTH + 2 * x) = 255;
158 | // } else {
159 | // y * FRAME_WIDTH表示第几个像素,x为Y分量的宽度 1个字节
160 | *(offsetY + y * FRAME_WIDTH + x) = 255;
161 | // }
162 | }
163 | mask8 = mask8 >> 1; /* 循环移位取数据 */
164 | if (mask8 == 0)
165 | mask8 = 0x80;
166 | }
167 | }
168 | }
169 |
170 | return (char *) ptr_frame;
171 | }
172 |
--------------------------------------------------------------------------------
/app/src/main/cpp/Yuv.h:
--------------------------------------------------------------------------------
1 | //
2 | // Created by jiangdongguo on 2018/10/19.
3 | //
4 |
5 | #ifndef ANDROIDYUVOSD_YUV_H
6 | #define ANDROIDYUVOSD_YUV_H
7 | #include
8 | #include
9 | #include
10 | #include
11 |
12 | char *draw_Font_Func(char *ptr_frame, int FRAME_WIDTH, int FRAME_HEIGHT,
13 | int startx, int starty, char *str);
14 |
15 | #endif //ANDROIDYUVOSD_YUV_H
16 |
--------------------------------------------------------------------------------
/app/src/main/cpp/YuvOsd.cpp:
--------------------------------------------------------------------------------
1 | //
2 | // Created by jianddongguo on 2018/8/23.
3 | //
4 | #include
5 | #include
6 | #include "Yuv.h"
7 | #include
8 |
9 | #define TAG "YuvOsd"
10 | #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,TAG,__VA_ARGS__)
11 |
12 | extern "C"
13 | JNIEXPORT jint JNICALL
14 | Java_com_jiangdg_natives_YuvUtils_nativeYV12ToNV21(JNIEnv *env, jclass type, jbyteArray jarray_,
15 | jint width, jint height) {
16 | jbyte *srcData = env->GetByteArrayElements(jarray_, NULL);
17 | jsize srcLen = env->GetArrayLength(jarray_);
18 | int yLength = width * height;
19 | int vLength = yLength / 4;
20 | // 开辟一段临时内存空间
21 | char *c_tmp = (char *) malloc(srcLen);
22 | if(c_tmp == NULL) {
23 | return -1;
24 | }
25 | // YYYYYYYY VV UU --> YYYYYYYY VUVU
26 | // 拷贝Y分量
27 | memcpy(c_tmp, srcData, yLength);
28 | int i = 0;
29 | for (i = 0; i < yLength / 4; i++) {
30 | // U分量
31 | c_tmp[yLength + 2 * i + 1] = srcData[yLength + vLength + i];
32 | // V分量
33 | c_tmp[yLength + 2 * i] = srcData[yLength + i];
34 | }
35 | // 将c_tmp的数据覆盖到jarray_
36 | env->SetByteArrayRegion(jarray_, 0, srcLen, (jbyte *) c_tmp);
37 | env->ReleaseByteArrayElements(jarray_, srcData, 0);
38 | // 释放临时内存
39 | free(c_tmp);
40 | return 0;
41 | }
42 |
43 | extern "C"
44 | JNIEXPORT jint JNICALL
45 | Java_com_jiangdg_natives_YuvUtils_nativeNV21ToYUV420sp(JNIEnv *env, jclass type, jbyteArray jarray_,
46 | jint width, jint height) {
47 | jbyte *srcData = env->GetByteArrayElements(jarray_, NULL);
48 | jsize srcLen = env->GetArrayLength(jarray_);
49 | int yLength = width * height;
50 | int uLength = yLength / 4;
51 | // 开辟一段临时内存空间
52 | char *c_tmp = (char *) malloc(srcLen);
53 | if(c_tmp == NULL) {
54 | return -1;
55 | }
56 | // 拷贝Y分量
57 | memcpy(c_tmp, srcData, yLength);
58 | int i = 0;
59 | for (i = 0; i < yLength / 4; i++) {
60 | // U分量
61 | c_tmp[yLength + 2 * i] = srcData[yLength + 2 * i + 1];
62 | // V分量
63 | c_tmp[yLength + 2 * i + 1] = srcData[yLength + 2 * i];
64 | }
65 | // 将c_tmp的数据覆盖到jarray_
66 | env->SetByteArrayRegion(jarray_, 0, srcLen, (jbyte *) c_tmp);
67 | env->ReleaseByteArrayElements(jarray_, srcData, 0);
68 | // 释放临时内存
69 | free(c_tmp);
70 | return 0;
71 | }
72 |
73 | extern "C"
74 | JNIEXPORT jint JNICALL
75 | Java_com_jiangdg_natives_YuvUtils_nativeNV21ToYUV420p(JNIEnv *env, jclass type, jbyteArray jarray_,
76 | jint width, jint height) {
77 | jbyte *srcData = env->GetByteArrayElements(jarray_, NULL);
78 | jsize srcLen = env->GetArrayLength(jarray_);
79 | int yLength = width * height;
80 | int uLength = yLength / 4;
81 | // 开辟一段临时内存空间
82 | char *c_tmp = (char *) malloc(srcLen);
83 | if(c_tmp == NULL) {
84 | return -1;
85 | }
86 | // 拷贝Y分量
87 | memcpy(c_tmp, srcData, yLength);
88 | int i = 0;
89 | for (i = 0; i < yLength / 4; i++) {
90 | // U分量
91 | c_tmp[yLength + i] = srcData[yLength + 2 * i + 1];
92 | // V分量
93 | c_tmp[yLength + uLength + i] = srcData[yLength + 2 * i];
94 | }
95 | // 将c_tmp的数据覆盖到jarray_
96 | env->SetByteArrayRegion(jarray_, 0, srcLen, (jbyte *) c_tmp);
97 | env->ReleaseByteArrayElements(jarray_, srcData, 0);
98 | // 释放临时内存
99 | free(c_tmp);
100 | return 0;
101 | }
102 |
103 | JNIEXPORT void JNICALL Java_com_jiangdg_natives_YuvUtils_nativeRotateNV21Flip
104 | (JNIEnv *env, jclass jcls, jbyteArray j_srcArr, jbyteArray j_destArr, jint srcWidth,
105 | jint srcHeight, jint rotateDegree) {
106 | if (j_srcArr == NULL || j_destArr == NULL) {
107 | return;
108 | }
109 | jint wh = 0;
110 | jint mWidth = 0;
111 | jint mHeight = 0;
112 | jint uvHeight = 0;
113 | if (srcWidth != mWidth || srcHeight != mHeight) {
114 | mWidth = srcWidth;
115 | mHeight = srcHeight;
116 | wh = srcWidth * srcHeight;
117 | uvHeight = srcHeight >> 1; // uvHeight=height/2
118 | }
119 | // j_srcArr转jbyte *
120 | jbyte *c_srcArr = env->GetByteArrayElements(j_srcArr, JNI_FALSE);
121 | jbyte *c_destArr = env->GetByteArrayElements(j_destArr, JNI_FALSE);
122 | int k = 0, i = 0, j = 0;
123 | if (rotateDegree == 270) {
124 | // 旋转Y
125 | for (i = 0; i < srcWidth; i++) {
126 | int nPos = srcWidth - 1;
127 | for (j = 0; j < srcHeight; j++) {
128 | c_destArr[k] = c_srcArr[nPos - i];
129 | k++;
130 | nPos += srcWidth;
131 | }
132 | }
133 |
134 | // 旋转UV
135 | for (i = 0; i < srcWidth; i += 2) {
136 | int nPos = wh + srcWidth - 2;
137 | for (j = 0; j < uvHeight; j++) {
138 | c_destArr[k] = c_srcArr[nPos - i];
139 | c_destArr[k + 1] = c_srcArr[nPos - i + 1];
140 | k += 2;
141 | nPos += srcWidth;
142 | }
143 | }
144 | } else if (rotateDegree == 180) {
145 | // 旋转Y分量
146 | for (i = wh - 1; i >= 0; i--) {
147 | c_destArr[k] = c_srcArr[i];
148 | k++;
149 | }
150 | // 旋转U、V分量
151 | for (j = wh * 3 / 2 - 1; j >= wh; j -= 2) {
152 | c_destArr[k] = c_srcArr[j - 1];
153 | c_destArr[k + 1] = c_srcArr[j];
154 | k += 2;
155 | }
156 | }
157 | // 释放内存,是否同步到Java层
158 | env->ReleaseByteArrayElements(j_srcArr, c_srcArr, JNI_FALSE);
159 | env->ReleaseByteArrayElements(j_destArr, c_destArr, JNI_FALSE);
160 | }
161 |
162 | extern "C"
163 | JNIEXPORT void JNICALL Java_com_jiangdg_natives_YuvUtils_nativeRotateNV21
164 | (JNIEnv *env, jclass jcls, jbyteArray j_srcArr, jbyteArray j_destArr, jint width,
165 | jint height, jint rotateDegree) {
166 | jbyte *c_srcArr = (jbyte *) env->GetByteArrayElements(j_srcArr, JNI_FALSE);
167 | jbyte *c_destArr = (jbyte *) env->GetByteArrayElements(j_destArr, JNI_FALSE);
168 | jint wh = width * height;
169 | jint frameSize = wh * 3 / 2;
170 | int k = 0, i = 0, j = 0;
171 | if (rotateDegree == 90) {
172 | // 旋转Y
173 | for (i = 0; i < width; i++) {
174 | for (j = height - 1; j >= 0; j--) {
175 | c_destArr[k] = c_srcArr[width * j + i];
176 | k++;
177 | }
178 | }
179 | // 旋转U、V分量
180 | for (i = 0; i < width; i += 2) {
181 | for (j = height / 2 - 1; j >= 0; j--) {
182 | c_destArr[k] = c_srcArr[wh + width * j + i];
183 | c_destArr[k + 1] = c_srcArr[wh + width * j + i + 1];
184 | k += 2;
185 | }
186 | }
187 | } else if (rotateDegree == 180) {
188 | // 旋转Y分量
189 | for (i = wh - 1; i >= 0; i--) {
190 | c_destArr[k] = c_srcArr[i];
191 | k++;
192 | }
193 | // 旋转U、V分量
194 | for (j = wh * 3 / 2 - 1; j >= wh; j -= 2) {
195 | c_destArr[k] = c_srcArr[j - 1];
196 | c_destArr[k + 1] = c_srcArr[j];
197 | k += 2;
198 | }
199 | } else if (rotateDegree == 270) {
200 | // 旋转Y分量
201 | for (i = width - 1; i >= 0; i--) {
202 | for (j = height - 1; j >= 0; j--) {
203 | c_destArr[k] = c_srcArr[width * j + i];
204 | k++;
205 | }
206 | }
207 | // 旋转U、V分量
208 | for (i = width - 1; i >= 0; i -= 2) {
209 | for (j = height / 2 - 1; j >= 0; j--) {
210 | c_destArr[k] = c_srcArr[wh + width * j + i - 1];
211 | c_destArr[k + 1] = c_srcArr[wh + width * j + i];
212 | k += 2;
213 | }
214 | }
215 | }
216 | // 释放数组资源
217 | env->ReleaseByteArrayElements(j_srcArr, c_srcArr, JNI_FALSE);
218 | env->ReleaseByteArrayElements(j_destArr, c_destArr, JNI_FALSE);
219 | }
220 |
221 | extern "C"
222 | char *jstringTostring(JNIEnv *env, jstring jstr) {
223 | char *rtn = NULL;
224 | jclass clsstring = env->FindClass("java/lang/String");
225 | jstring strencode = env->NewStringUTF("utf-8");
226 | jmethodID mid = env->GetMethodID(clsstring, "getBytes", "(Ljava/lang/String;)[B");
227 | jbyteArray barr = (jbyteArray) env->CallObjectMethod(jstr, mid, strencode);
228 | jsize alen = env->GetArrayLength(barr);
229 | jbyte *ba = env->GetByteArrayElements(barr, JNI_FALSE);
230 | if (alen > 0) {
231 | rtn = (char *) malloc(alen + 1);
232 | memcpy(rtn, ba, alen);
233 | rtn[alen] = 0;
234 | }
235 | env->ReleaseByteArrayElements(barr, ba, 0);
236 | return rtn;
237 | }
238 |
239 | //extern "C"
240 | //JNIEXPORT void JNICALL
241 | //Java_com_jiangdg_natives_YuvUtils_addYuvOsd(JNIEnv *env, jclass type, jbyteArray src_, jint width,
242 | // jint height, jstring osdStr_, jboolean isHorizontalTake) {
243 |
244 | //}
245 | extern "C"
246 | JNIEXPORT void JNICALL
247 | Java_com_jiangdg_natives_YuvUtils_addYuvOsd(JNIEnv *env, jclass type, jbyteArray src_, jint width,
248 | jint height, jboolean isHorizontalTake, jstring osdStr_,
249 | jint startX, jint startY) {
250 | // 一副图像有width * height个像素
251 | // 每个像素占3/2个字节
252 | int frameSize = width * height * 3 / 2;
253 | if (src_ == NULL || osdStr_ == NULL) {
254 | LOGI("addYuvOsd传入参数有误...");
255 | return;
256 | }
257 | jbyte *src = env->GetByteArrayElements(src_, NULL);
258 | const char *osdStr = env->GetStringUTFChars(osdStr_, 0);
259 | if (!src || !osdStr) {
260 | LOGI("addYuvOsd分配内存失败");
261 | return;
262 | }
263 | // 根据图像方向,添加水印
264 | if (isHorizontalTake) {
265 | draw_Font_Func((char *) src, width, height, startX, startY, jstringTostring(env, osdStr_));
266 | } else {
267 | draw_Font_Func((char *) src, height,width , startX, startY,jstringTostring(env, osdStr_));
268 | }
269 | env->ReleaseByteArrayElements(src_, src, 0);
270 | env->ReleaseStringUTFChars(osdStr_, osdStr);
271 | }
--------------------------------------------------------------------------------
/app/src/main/cpp/osd/Osdchar.c:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * File name : Osdchar.c
3 | * Description : 实现yuv上的字幕叠加,同时支持滚动,颜色变换,逐渐消隐,大小可调等功能.
4 | 该文件为纯C文件,不依赖第三方库及其他系统调用。可实现跨平台功能。
5 | * Wrote by/Date : gaoc@devison.com/2010.02.27
6 | * Modify/Date :
7 | * Project : V30E
8 | *******************************************************************************/
9 | #include "Osdchar.h"
10 | #include
11 |
12 | #define LOG_TAG_OSD "osd"
13 |
14 | //#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG_OSD,__VA_ARGS__)
15 | //缓冲分配
16 | unsigned char srcRGBBuf[IMAGEWIDTH * IMAGEHEIGHT * 3];
17 | unsigned char dstRGBBuf[IMAGEWIDTH * IMAGEHEIGHT * 3];
18 | unsigned char srcYUVBuf[IMAGEWIDTH * IMAGEHEIGHT * 3 / 2];
19 | unsigned char dstYUVBuf[IMAGEWIDTH * IMAGEHEIGHT * 3 / 2];
20 | unsigned char subYUVBuf[IMAGEWIDTH * IMAGEHEIGHT * 3 / 2];
21 |
22 | //转换表空间
23 | long int crv_tab[256];
24 | long int cbu_tab[256];
25 | long int cgu_tab[256];
26 | long int cgv_tab[256];
27 | long int tab_76309[256];
28 | unsigned char clp[1024];
29 |
30 | //全局量
31 | char g_szDefault[128] = "请在此处设置缺省叠加文字汇智";
32 | O_OBJCHAR *g_pAllObjCharObj[64];
33 | char *g_szAllCharObj[64];
34 | FILE *g_fpHZKLIB = NULL;
35 | FILE *g_fpASCII = NULL;
36 | unsigned int g_frameCounter = 0;
37 |
38 | //转换矩阵
39 | #define MY(a,b,c) (( a* 0.2989 + b* 0.5866 + c* 0.1145))
40 | #define MU(a,b,c) (( a*(-0.1688) + b*(-0.3312) + c* 0.5000 + 128))
41 | #define MV(a,b,c) (( a* 0.5000 + b*(-0.4184) + c*(-0.0816) + 128))
42 | //大小判断
43 | #define DY(a,b,c) (MY(a,b,c) > 255 ? 255 : (MY(a,b,c) < 0 ? 0 : MY(a,b,c)))
44 | #define DU(a,b,c) (MU(a,b,c) > 255 ? 255 : (MU(a,b,c) < 0 ? 0 : MU(a,b,c)))
45 | #define DV(a,b,c) (MV(a,b,c) > 255 ? 255 : (MV(a,b,c) < 0 ? 0 : MV(a,b,c)))
46 | //转换矩阵
47 | double YuvToRgb[3][3] = { 1, 0, 1.4022, 1, -0.3456, -0.7145, 1, 1.771, 0 };
48 | /*******************************************************************************
49 | 说明:
50 | 1.所有函数前向申明
51 | 2.以OSD_XXX(XX,XX,...)形式出现的函数均为接口函数
52 | 3.以_XXX(XX,XX,...)形式出现的函数均为私有函数
53 | 4.以p_XXX形式的均为指针量,以g_XXX形式出现的均为全局量
54 |
55 | WroteBy/Date:
56 | gaoc@Dvision.com/2010.03.05
57 |
58 | Modify:
59 | *******************************************************************************/
60 | //接口函数
61 | char OSD_CreateObjCharObj(int strID, char *szStr, O_STRINGATTR OAttrCharObj);
62 | char OSD_DeleteObjCharObj(int strID);
63 | char OSD_SetContentCharObj(int strID, char *szStr);
64 | char OSD_SetPositionCharObj(int strID, int x, int y);
65 | char OSD_SetAttrCharObj(int strID, O_STRINGATTR OAttrCharObj);
66 | void OSD_FeedFrameYUV420(char* pYUV420Frame, int iSrcWidth, int iSrcHeight);
67 | char OSD_Init(char *szPathHZK, char *szPathASCII);
68 | void OSD_Release();
69 | //内部函数
70 | void _InitDitherTab();
71 | void _RGB24ToYUV420(unsigned char *RGB, int nWidth, int nHeight, //源
72 | unsigned char *YUV, unsigned long nLen); //目标
73 | void _YUV420ToRGB24(unsigned char *src_yuv, //源
74 | unsigned char *dst_rgb, int width, int height); //目标
75 | void _YUV420ToYUV422(char* pYUV420Buf, int iSrcWidth, int iSrcHeight, //源
76 | char* pYUV422Buf); //目标
77 | void _YUV422ToYUV420(char* pYUV422Buf, int iSrcWidth, int iSrcHeight, //源
78 | char* pYUV420Buf); //目标
79 | void _GetSubReginFromYUV420(unsigned char *src_yuv, int srcW, int srcH, //yuv源图
80 | unsigned char *sub_yuv, int x, int y, int subW, int subH); //yuv子区域
81 | void _SetSubReginToYUV420(unsigned char *src_yuv, int srcW, int srcH, //yuv源图
82 | unsigned char *sub_yuv, int x, int y, int subW, int subH); //yuv子区域
83 | void _OverlapCaptionOnRGB(unsigned char* srcRgbBuf, int nWidth, int nHeight, //rgb源图
84 | char* pCaption, O_STRINGATTR *pOAttrCharObj); //要叠加的文字,叠加属性及输出
85 | char _OverLapCaptionOnYUV420(unsigned char *src_yuv, int srcW, int srcH, //源图及宽高
86 | int xStart, int yStart, int dstW, int dstH, //要叠加的文字及区域
87 | char* pCaption, O_STRINGATTR *pOAttrCharObj); //要叠加的文字及叠加属性
88 | void _OverLapCaptionOnYUV422Raw(char* pCharcode, int column, int row,
89 | int imageWidth, int imageHeight, char *pYUVbuffer, char OsdY, char OsdU,
90 | char OsdV);
91 |
92 | /*******************************************************************************
93 | 说明:
94 | 以下以OSD_XXX(XX,XX,...)形式出现的函数均为接口函数
95 |
96 | WroteBy/Date:
97 | gaoc@Dvision.com/2010.03.05
98 |
99 | Modify:
100 | *******************************************************************************/
101 | //创建叠加的字符串对象,用于字符串叠加
102 | char OSD_CreateObjCharObj(int strID, char *szStr, O_STRINGATTR OAttrCharObj) {
103 | szStr = strlen(szStr) == 0 ? g_szDefault : szStr;
104 |
105 | if (g_pAllObjCharObj[strID] == NULL) {
106 | g_pAllObjCharObj[strID] = (O_OBJCHAR *) malloc(sizeof(O_OBJCHAR));
107 | assert(g_pAllObjCharObj[strID]);
108 | strcpy(g_pAllObjCharObj[strID]->szStr, szStr);
109 | g_pAllObjCharObj[strID]->oAttrChar = OAttrCharObj;
110 | g_pAllObjCharObj[strID]->w = g_pAllObjCharObj[strID]->oAttrChar.sizeW;
111 | g_pAllObjCharObj[strID]->h = g_pAllObjCharObj[strID]->oAttrChar.sizeH;
112 | } else {
113 | return ERR_OBJEXIST; //返回错误码,对象已经存在
114 | }
115 |
116 | return ERR_NONE;
117 | }
118 | //删除叠加的字符串对象
119 | char OSD_DeleteObjCharObj(int strID) {
120 | if (g_pAllObjCharObj[strID] == NULL)
121 | return ERR_OBJNOTEXIST; //返回错误码,对象不存在
122 | else {
123 | free(g_pAllObjCharObj[strID]);
124 | g_pAllObjCharObj[strID] = NULL;
125 | }
126 |
127 | return ERR_NONE;
128 | }
129 | //改变该字符串的属性
130 | char OSD_SetAttrCharObj(int strID, O_STRINGATTR OAttrCharObj) {
131 | if (g_pAllObjCharObj[strID] == NULL)
132 | return ERR_OBJNOTEXIST; //返回错误码,对象不存在
133 | else {
134 | g_pAllObjCharObj[strID]->oAttrChar = OAttrCharObj;
135 | }
136 |
137 | return ERR_NONE;
138 | }
139 | //改变字符串的内容
140 | char OSD_SetContentCharObj(int strID, char *szStr) {
141 | if (g_pAllObjCharObj[strID] == NULL)
142 | return ERR_OBJNOTEXIST; //返回错误码,对象不存在
143 | else {
144 | strcpy(g_pAllObjCharObj[strID]->szStr, szStr);
145 | }
146 |
147 | return ERR_NONE;
148 | }
149 | //改变字符串叠加位置
150 | char OSD_SetPositionCharObj(int strID, int x, int y) {
151 | if (g_pAllObjCharObj[strID] == NULL)
152 | return ERR_OBJNOTEXIST; //返回错误码,对象不存在
153 | else {
154 | g_pAllObjCharObj[strID]->x = x;
155 | g_pAllObjCharObj[strID]->y = y;
156 | }
157 |
158 | return ERR_NONE;
159 | }
160 | //当设置完要叠加的字符后通过该函数装填图像的连续帧
161 | void OSD_FeedFrameYUV420(char* pYUV420Frame, int iSrcWidth, int iSrcHeight) {
162 | int i = 0;
163 | g_frameCounter++;
164 |
165 | //遍历列表叠加所有字符串对象
166 | for (i = 0; i < 64; i++) {
167 | //如果空项,则检查下一个
168 | if (g_pAllObjCharObj[i] == NULL)
169 | continue;
170 |
171 | switch (g_pAllObjCharObj[i]->oAttrChar.eActionType) {
172 | case e_SCROLL_LEFT: //如果是滚动字幕,则需要修改属性中叠加位置参数
173 | g_pAllObjCharObj[i]->x =
174 | g_frameCounter
175 | % (g_pAllObjCharObj[i]->oAttrChar.actionValue1)
176 | == 0 ?
177 | g_pAllObjCharObj[i]->x
178 | - g_pAllObjCharObj[i]->oAttrChar.actionValue2 :
179 | g_pAllObjCharObj[i]->x;
180 | break;
181 | case e_SCROLL_RIGHT: //如果是滚动字幕,则需要修改属性中叠加位置参数
182 | g_pAllObjCharObj[i]->x =
183 | g_frameCounter
184 | % (g_pAllObjCharObj[i]->oAttrChar.actionValue1)
185 | == 0 ?
186 | g_pAllObjCharObj[i]->x
187 | + g_pAllObjCharObj[i]->oAttrChar.actionValue2 :
188 | g_pAllObjCharObj[i]->x;
189 | break;
190 | case e_SCROLL_UP: //如果是滚动字幕,则需要修改属性中叠加位置参数
191 | g_pAllObjCharObj[i]->y =
192 | g_frameCounter
193 | % (g_pAllObjCharObj[i]->oAttrChar.actionValue1)
194 | == 0 ?
195 | g_pAllObjCharObj[i]->y
196 | - g_pAllObjCharObj[i]->oAttrChar.actionValue2 :
197 | g_pAllObjCharObj[i]->y;
198 | break;
199 | case e_SCROLL_DOWN: //如果是滚动字幕,则需要修改属性中叠加位置参数
200 | g_pAllObjCharObj[i]->y =
201 | g_frameCounter
202 | % (g_pAllObjCharObj[i]->oAttrChar.actionValue1)
203 | == 0 ?
204 | g_pAllObjCharObj[i]->y
205 | + g_pAllObjCharObj[i]->oAttrChar.actionValue2 :
206 | g_pAllObjCharObj[i]->y;
207 | break;
208 | case e_STATIC: //如果是静态字幕
209 | break;
210 | case e_SLOWHIDE: //如果是逐渐消隐的字幕
211 | break;
212 | case e_SPARK: //如果是闪烁字幕
213 | break;
214 | default:
215 | break;
216 | }
217 | _OverLapCaptionOnYUV420(pYUV420Frame, iSrcWidth,
218 | iSrcHeight, //源图及宽高
219 | g_pAllObjCharObj[i]->x, g_pAllObjCharObj[i]->y,
220 | g_pAllObjCharObj[i]->w, g_pAllObjCharObj[i]->h, //要叠加的位置
221 | g_pAllObjCharObj[i]->szStr, &(g_pAllObjCharObj[i]->oAttrChar)); //要叠加的文字及叠加属性
222 | }
223 | }
224 | //初始化
225 | char OSD_Init(char *szPathHZK, char *szPathASCII) {
226 | //加载汉字点阵字库
227 | if ((g_fpHZKLIB = fopen(szPathHZK, "rb")) == NULL) {
228 | return ERR_FILEOPENFAIL;
229 | }
230 |
231 | // LOGE("OSD_Init 1");
232 | //加载ascii点阵字库
233 | if ((g_fpASCII = fopen(szPathASCII, "rb")) == NULL) {
234 | return ERR_FILEOPENFAIL;
235 | }
236 | // LOGE("OSD_Init 2");
237 | //初始化转换表
238 | _InitDitherTab();
239 |
240 | return ERR_NONE;
241 | }
242 | //析构
243 | void OSD_Release() {
244 | //关闭字库文件
245 | if (g_fpHZKLIB)
246 | fclose(g_fpHZKLIB);
247 | if (g_fpASCII)
248 | fclose(g_fpASCII);
249 | }
250 |
251 | /*******************************************************************************
252 | 说明:
253 | 以下以_XXX(XX,XX,...)形式出现的函数均为私有函数
254 |
255 | WroteBy/Date:
256 | gaoc@Dvision.com/2010.03.05
257 |
258 | Modify:
259 | *******************************************************************************/
260 | //初始化转换表
261 | void _InitDitherTab() {
262 | long int crv, cbu, cgu, cgv;
263 | int i, ind;
264 |
265 | crv = 104597;
266 | cbu = 132201;
267 | cgu = 25675;
268 | cgv = 53279;
269 |
270 | for (i = 0; i < 256; i++) {
271 | crv_tab[i] = (i - 128) * crv;
272 | cbu_tab[i] = (i - 128) * cbu;
273 | cgu_tab[i] = (i - 128) * cgu;
274 | cgv_tab[i] = (i - 128) * cgv;
275 | tab_76309[i] = 76309 * (i - 16);
276 | }
277 |
278 | for (i = 0; i < 384; i++) {
279 | clp[i] = 0;
280 | }
281 |
282 | ind = 384;
283 | for (i = 0; i < 256; i++) {
284 | clp[ind++] = i;
285 | }
286 |
287 | ind = 640;
288 | for (i = 0; i < 384; i++) {
289 | clp[ind++] = 255;
290 | }
291 | }
292 | //工具函数:RGB24转YUV420
293 | void _RGB24ToYUV420(unsigned char *RGB, int nWidth, int nHeight,
294 | unsigned char *YUV, unsigned long nLen) {
295 | //变量声明
296 | int i, x, y, j;
297 | unsigned char *Y = NULL;
298 | unsigned char *U = NULL;
299 | unsigned char *V = NULL;
300 |
301 | Y = YUV;
302 | U = YUV + nWidth * nHeight;
303 | V = U + ((nWidth * nHeight) >> 2);
304 |
305 | for (y = 0; y < nHeight; y++) {
306 | for (x = 0; x < nWidth; x++) {
307 | j = y * nWidth + x;
308 | i = j * 3;
309 |
310 | Y[j] = (unsigned char) (DY(RGB[i+2], RGB[i+1], RGB[i]));
311 |
312 | if (x % 2 == 1 && y % 2 == 1) {
313 | j = (nWidth >> 1) * (y >> 1) + (x >> 1);
314 | //上面i仍有效
315 | U[j] =
316 | (unsigned char) ((DU(RGB[i +2 ], RGB[i+1], RGB[i])
317 | + DU(RGB[i-1], RGB[i-2], RGB[i-3])
318 | + DU(RGB[i+2 -nWidth*3], RGB[i+1-nWidth*3], RGB[i-nWidth*3])
319 | + DU(RGB[i-1-nWidth*3], RGB[i-2-nWidth*3], RGB[i-3-nWidth*3]))/4);
320 |
321 | V[j] =
322 | (unsigned char) ((DV(RGB[i+2 ], RGB[i+1], RGB[i])
323 | + DV(RGB[i-1], RGB[i-2], RGB[i-3])
324 | + DV(RGB[i+2 -nWidth*3], RGB[i+1-nWidth*3], RGB[i-nWidth*3])
325 | + DV(RGB[i-1-nWidth*3], RGB[i-2-nWidth*3], RGB[i-3-nWidth*3]))/4);
326 | }
327 | }
328 | }
329 |
330 | nLen = nWidth * nHeight + (nWidth * nHeight) / 2;
331 | }
332 | //工具函数:YUV420转RGB
333 | void _YUV420ToRGB24(unsigned char *src_yuv, unsigned char *dst_rgb, int width,
334 | int height) {
335 | int y1, y2, u, v;
336 | unsigned char *py1, *py2;
337 | int i, j, c1, c2, c3, c4;
338 | unsigned char *d1, *d2;
339 |
340 | unsigned char *srcY = src_yuv;
341 | unsigned char *srcU = src_yuv + width * height;
342 | unsigned char *srcV = src_yuv + width * height + (width / 2) * (height / 2);
343 |
344 | py1 = srcY;
345 | py2 = py1 + width;
346 | d1 = dst_rgb;
347 | d2 = d1 + 3 * width;
348 | for (j = 0; j < height; j += 2) {
349 | for (i = 0; i < width; i += 2) {
350 | u = *srcU++;
351 | v = *srcV++;
352 |
353 | c1 = crv_tab[v];
354 | c2 = cgu_tab[u];
355 | c3 = cgv_tab[v];
356 | c4 = cbu_tab[u];
357 |
358 | //up-left
359 | y1 = tab_76309[*py1++];
360 | *d1++ = clp[384 + ((y1 + c4) >> 16)];
361 | *d1++ = clp[384 + ((y1 - c2 - c3) >> 16)];
362 | *d1++ = clp[384 + ((y1 + c1) >> 16)];
363 |
364 | //down-left
365 | y2 = tab_76309[*py2++];
366 | *d2++ = clp[384 + ((y2 + c4) >> 16)];
367 | *d2++ = clp[384 + ((y2 - c2 - c3) >> 16)];
368 | *d2++ = clp[384 + ((y2 + c1) >> 16)];
369 |
370 | //up-right
371 | y1 = tab_76309[*py1++];
372 | *d1++ = clp[384 + ((y1 + c4) >> 16)];
373 | *d1++ = clp[384 + ((y1 - c2 - c3) >> 16)];
374 | *d1++ = clp[384 + ((y1 + c1) >> 16)];
375 |
376 | //down-right
377 | y2 = tab_76309[*py2++];
378 | *d2++ = clp[384 + ((y2 + c4) >> 16)];
379 | *d2++ = clp[384 + ((y2 - c2 - c3) >> 16)];
380 | *d2++ = clp[384 + ((y2 + c1) >> 16)];
381 | }
382 | d1 += 3 * width;
383 | d2 += 3 * width;
384 | py1 += width;
385 | py2 += width;
386 | }
387 | }
388 | //工具函数:YUV420转YUV422的函数
389 | void _YUV420ToYUV422(char* pYUV420Buf, int iSrcWidth, int iSrcHeight,
390 | char* pYUV422Buf) {
391 | unsigned int nIamgeSize = iSrcWidth * iSrcHeight;
392 | int i;
393 | if ((pYUV420Buf == NULL) || (pYUV422Buf == NULL)) {
394 | return;
395 | }
396 | //Copy Y
397 | for (i = 0; i < iSrcHeight; i++) {
398 | memcpy(pYUV422Buf + i * iSrcWidth, pYUV420Buf + i * iSrcWidth,
399 | iSrcWidth);
400 | }
401 | //Copy U
402 | for (i = 0; i < iSrcHeight / 2; i++) {
403 | memcpy(pYUV422Buf + nIamgeSize + (2 * i) * iSrcWidth / 2,
404 | pYUV420Buf + nIamgeSize + i * iSrcWidth / 2, iSrcWidth / 2);
405 | memcpy(pYUV422Buf + nIamgeSize + (2 * i + 1) * iSrcWidth / 2,
406 | pYUV420Buf + nIamgeSize + i * iSrcWidth / 2, iSrcWidth / 2);
407 | }
408 | //Copy V
409 | for (i = 0; i < iSrcHeight / 2; i++) {
410 | memcpy(pYUV422Buf + nIamgeSize * 3 / 2 + (2 * i) * iSrcWidth / 2,
411 | pYUV420Buf + nIamgeSize * 5 / 4 + i * iSrcWidth / 2,
412 | iSrcWidth / 2);
413 | memcpy(pYUV422Buf + nIamgeSize * 3 / 2 + (2 * i + 1) * iSrcWidth / 2,
414 | pYUV420Buf + nIamgeSize * 5 / 4 + i * iSrcWidth / 2,
415 | iSrcWidth / 2);
416 | }
417 | }
418 | //工具函数:YUV422转YUV420的函数
419 | void _YUV422ToYUV420(char* pYUV422Buf, int iSrcWidth, int iSrcHeight,
420 | char* pYUV420Buf) {
421 | unsigned int nIamgeSize = iSrcWidth * iSrcHeight;
422 | int i;
423 |
424 | if ((pYUV422Buf == NULL) || (pYUV420Buf == NULL)) {
425 | return;
426 | }
427 | //Copy Y
428 | for (i = 0; i < iSrcHeight; i++) {
429 | memcpy(pYUV420Buf + i * iSrcWidth, pYUV422Buf + i * iSrcWidth,
430 | iSrcWidth);
431 | }
432 | //Copy U
433 | for (i = 0; i < iSrcHeight / 2; i++) {
434 | memcpy(pYUV420Buf + nIamgeSize + i * iSrcWidth / 2,
435 | pYUV422Buf + nIamgeSize + 2 * i * iSrcWidth / 2, iSrcWidth / 2);
436 | }
437 | //Copy V
438 | for (i = 0; i < iSrcHeight / 2; i++) {
439 | memcpy(pYUV420Buf + nIamgeSize * 5 / 4 + i * iSrcWidth / 2,
440 | pYUV422Buf + nIamgeSize * 3 / 2 + 2 * i * iSrcWidth / 2,
441 | iSrcWidth / 2);
442 | }
443 | }
444 | //在YUV源图上从起始位置为(x,y)的地方截取指定大小的子区域
445 | void _GetSubReginFromYUV420(unsigned char *src_yuv, int srcW, int srcH, //yuv源图
446 | unsigned char *sub_yuv, int x, int y, int subW, int subH) //yuv子区域的xy坐标及宽高
447 | {
448 | unsigned char *pSrcY, *pSrcU, *pSrcV;
449 | unsigned char *pSubY, *pSubU, *pSubV;
450 | int i;
451 | int srcTmp = srcW * srcH;
452 | int subTmp = subW * subH;
453 |
454 | if (src_yuv == NULL || sub_yuv == NULL)
455 | return;
456 |
457 | if (subW > srcW || (x + subW) > srcW || subH > srcH || (y + subH) > srcH) {
458 | return;
459 | }
460 |
461 | //拷贝Y数据
462 | for (i = 0; i < subH; i++) {
463 | pSrcY = src_yuv + srcW * (y + i) + x;
464 | pSubY = sub_yuv + subW * i;
465 | memcpy(pSubY, pSrcY, subW);
466 | }
467 |
468 | //拷贝U数据
469 | for (i = 0; i < (subH / 2); i++) {
470 | pSrcU = (src_yuv + srcTmp) + (srcW / 2) * (y / 2 + i) + x / 2;
471 | pSubU = (sub_yuv + subTmp) + (subW / 2) * i;
472 | memcpy(pSubU, pSrcU, subW / 2);
473 | }
474 |
475 | //拷贝V数据
476 | for (i = 0; i < (subH / 2); i++) {
477 | pSrcV = (src_yuv + srcTmp + srcTmp / 4) + (srcW / 2) * (y / 2 + i)
478 | + x / 2;
479 | pSubV = (sub_yuv + subTmp + subTmp / 4) + (subW / 2) * i;
480 | memcpy(pSubV, pSrcV, subW / 2);
481 | }
482 | }
483 | //修改yuv
484 | void _SetSubReginToYUV420(unsigned char *src_yuv, int srcW, int srcH, //yuv源图
485 | unsigned char *sub_yuv, int x, int y, int subW, int subH) //yuv子区域
486 | {
487 | unsigned char *pSrcY, *pSrcU, *pSrcV;
488 | unsigned char *pSubY, *pSubU, *pSubV;
489 | int i;
490 | int srcTmp = srcW * srcH;
491 | int subTmp = subW * subH;
492 |
493 | if (src_yuv == NULL || sub_yuv == NULL)
494 | return;
495 |
496 | if (subW > srcW || (x + subW) > srcW || subH > srcH || (y + subH) > srcH) {
497 | return;
498 | }
499 |
500 | //拷贝Y数据
501 | for (i = 0; i < subH; i++) {
502 | pSrcY = src_yuv + srcW * (y + i) + x;
503 | pSubY = sub_yuv + subW * i;
504 | memcpy(pSrcY, pSubY, subW);
505 | }
506 |
507 | //拷贝U数据
508 | for (i = 0; i < (subH / 2); i++) {
509 | pSrcU = (src_yuv + srcTmp) + (srcW / 2) * (y / 2 + i) + x / 2;
510 | pSubU = (sub_yuv + subTmp) + (subW / 2) * i;
511 | memcpy(pSrcU, pSubU, subW / 2);
512 | }
513 |
514 | //拷贝V数据
515 | for (i = 0; i < (subH / 2); i++) {
516 | pSrcV = (src_yuv + srcTmp + srcTmp / 4) + (srcW / 2) * (y / 2 + i)
517 | + x / 2;
518 | pSubV = (sub_yuv + subTmp + subTmp / 4) + (subW / 2) * i;
519 | memcpy(pSrcV, pSubV, subW / 2);
520 | }
521 | }
522 | //在RGB上叠加文字
523 | void _OverlapCaptionOnRGB(unsigned char* srcRgbBuf, int nWidth, int nHeight,
524 | char* pCharcode, O_STRINGATTR *pOAttrCharObj) {
525 | int i, j, k, m;
526 | unsigned char qh, wh;
527 | unsigned long offset;
528 | unsigned int pixelCount;
529 | char frontBuffer[32] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0xF0,
530 | 0x18, 0x3C, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0F, 0x18,
531 | 0x0E, 0x18, 0x0E, 0x18, 0x0C, 0x18, 0x38, 0x7F, 0xE0, 0x00, 0x00,
532 | 0x00, 0x00 };
533 | int iStartXpos = 0;
534 | int iStartYpos = 0;
535 | unsigned int nStrlen = strlen(pCharcode);
536 | unsigned char bIsChar = 0;
537 |
538 | //断言保护
539 | if ((srcRgbBuf == NULL) || (pCharcode == NULL)) {
540 | return;
541 | }
542 |
543 | //逐个读取字符,逐个叠加
544 | for (m = 0; m < nStrlen;) {
545 |
546 | memset(frontBuffer, 0, sizeof(frontBuffer));
547 |
548 | //取得点阵信息
549 | if (pCharcode[m] & 0x80) { //汉字
550 | qh = pCharcode[m] - 0xa0;
551 | wh = pCharcode[m + 1] - 0xa0;
552 | offset = (94 * (qh - 1) + (wh - 1)) * 32;
553 | fseek(g_fpHZKLIB, offset, SEEK_SET);
554 | fread(frontBuffer, 32, 1, g_fpHZKLIB);
555 | m += 2;
556 | bIsChar = 0;
557 | } else { //字符
558 | offset = pCharcode[m] * 32;
559 | fseek(g_fpASCII, offset, SEEK_SET);
560 | fread(frontBuffer, 32, 1, g_fpASCII);
561 | m++;
562 | bIsChar = 1;
563 | }
564 |
565 | //叠加
566 | for (j = 0; j < 16; j++) {
567 | pixelCount = 0;
568 | for (i = 0; i < 2; i++) {
569 | for (k = 0; k < 8; k++) {
570 | if (((frontBuffer[j * 2 + i] >> (7 - k)) & 0x1) != 0) {
571 | srcRgbBuf[nWidth * 3 * j + (i * 8 + k + iStartXpos) * 3] =
572 | pOAttrCharObj->osdB;
573 | srcRgbBuf[nWidth * 3 * j + (i * 8 + k + iStartXpos) * 3
574 | + 1] = pOAttrCharObj->osdG;
575 | srcRgbBuf[nWidth * 3 * j + (i * 8 + k + iStartXpos) * 3
576 | + 2] = pOAttrCharObj->osdR;
577 | }
578 | //if (k%2==0){
579 | pixelCount++;
580 | //}
581 | }
582 | }
583 | }
584 |
585 | //区分汉字及字符所占宽度
586 | if (bIsChar == 0) {
587 | iStartXpos += 16;
588 | } else {
589 | iStartXpos += 16;
590 | }
591 |
592 | if (iStartXpos > nWidth)
593 | return;
594 |
595 | }
596 | }
597 | //间接在YUV420上叠加字幕
598 | char _OverLapCaptionOnYUV420(unsigned char *src_yuv, int srcW, int srcH, //源图及宽高
599 | int xStart, int yStart, int dstW, int dstH, //要叠加的字符串的位置及宽高
600 | char* pCaption, O_STRINGATTR *pOAttrCharObj) //要叠加的字符串及叠加属性
601 | {
602 | /*
603 | FILE *pSubYuv = fopen("AsubYUV.yuv","wb");
604 | FILE *pSubRGB = fopen("AsubRGB.rgb","wb");
605 | FILE *pDstYuv = fopen("AdstYUV.yuv","wb");
606 | FILE *pDstRGB = fopen("AdstRGB.rgb","wb");
607 | */
608 |
609 | int sub_W = 0;
610 | int sub_H = pOAttrCharObj->sizeH;
611 | int m = 0;
612 | int nStrlen = strlen(pCaption);
613 | //计算要截取的区域宽度
614 | for (m = 0; m < nStrlen;) {
615 | if (pCaption[m] & 0x80)
616 | m += 2;
617 | else
618 | //字符
619 | m += 1;
620 |
621 | sub_W += pOAttrCharObj->sizeW;
622 | }
623 |
624 | sub_W = (sub_W + xStart) > srcW ? (srcW - xStart) : sub_W;
625 | sub_H = (sub_H + yStart) > srcH ? (srcH - yStart) : sub_H;
626 |
627 | assert(src_yuv && pCaption && pOAttrCharObj);
628 |
629 | //抠像
630 | _GetSubReginFromYUV420(src_yuv, srcW, srcH, subYUVBuf, xStart, yStart,
631 | sub_W, sub_H);
632 |
633 | // LOGE("_GetSubReginFromYUV420 success");
634 | //fwrite(subYUVBuf,1,sub_W*sub_H*3/2,pSubYuv);
635 | //fflush(pSubYuv);
636 | //fclose(pSubYuv);
637 |
638 | //yuv转换成RGB
639 | _YUV420ToRGB24(subYUVBuf, srcRGBBuf, sub_W, sub_H);
640 | // LOGE("_YUV420ToRGB24 success");
641 | //fwrite(srcRGBBuf,1,sub_W*sub_H*3,pSubRGB);
642 | //fflush(pSubRGB);
643 | //fclose(pSubRGB);
644 |
645 | //在RGB上完成叠加
646 | _OverlapCaptionOnRGB(srcRGBBuf, sub_W, sub_H, pCaption, pOAttrCharObj);
647 | // LOGE("_OverlapCaptionOnRGB success");
648 | //fwrite(dstRGBBuf,1,sub_W*sub_H*3,pDstRGB);
649 | //fflush(pDstRGB);
650 | //fclose(pDstRGB);
651 |
652 | //rgb转换成YUV
653 | _RGB24ToYUV420(srcRGBBuf, sub_W, sub_H, dstYUVBuf, sub_W * sub_H * 3 / 2);
654 | // LOGE("_RGB24ToYUV420 success");
655 | //fwrite(dstYUVBuf,1,sub_W*sub_H*3/2,pDstYuv);
656 | //fflush(pDstYuv);
657 | //fclose(pDstYuv);
658 |
659 | //抠像叠加
660 | _SetSubReginToYUV420(src_yuv, srcW, srcH, dstYUVBuf, xStart, yStart, sub_W,
661 | sub_H);
662 | // LOGE("_SetSubReginToYUV420 success");
663 | return ERR_NONE;
664 | }
665 | //直接在YUV422上叠加字符
666 | void _OverLapCaptionOnYUV422Raw(char* pCharcode, int column, int row,
667 | int imageWidth, int imageHeight, char *pYUVbuffer, char OsdY, char OsdU,
668 | char OsdV) {
669 | int i, j, k, m;
670 | unsigned char qh, wh;
671 | unsigned long offset;
672 | unsigned int pixelCount;
673 | char frontBuffer[32] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0xF0,
674 | 0x18, 0x3C, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0E, 0x18, 0x0F, 0x18,
675 | 0x0E, 0x18, 0x0E, 0x18, 0x0C, 0x18, 0x38, 0x7F, 0xE0, 0x00, 0x00,
676 | 0x00, 0x00 };
677 | int iStartXpos = 100;
678 | int iStartYpos = 100;
679 | unsigned int nStrlen = strlen(pCharcode);
680 | unsigned char bIsChar = 0;
681 | int temp1 = 0;
682 | int temp2 = 0;
683 |
684 | //断言保护
685 | if ((pCharcode == NULL) || (pYUVbuffer == NULL)) {
686 | return;
687 | }
688 |
689 | //逐个读取字符,逐个叠加
690 | for (m = 0; m < nStrlen;) {
691 |
692 | memset(frontBuffer, 0, sizeof(frontBuffer));
693 |
694 | //取得点阵信息
695 | if (pCharcode[m] & 0x80) { //汉字
696 | qh = pCharcode[m] - 0xa0;
697 | wh = pCharcode[m + 1] - 0xa0;
698 | offset = (94 * (qh - 1) + (wh - 1)) * 32;
699 | fseek(g_fpHZKLIB, offset, SEEK_SET);
700 | fread(frontBuffer, 32, 1, g_fpHZKLIB);
701 | m += 2;
702 | bIsChar = 0;
703 | } else { //字符
704 | offset = pCharcode[m] * 32;
705 | fseek(g_fpASCII, offset, SEEK_SET);
706 | fread(frontBuffer, 32, 1, g_fpASCII);
707 | m++;
708 | bIsChar = 1;
709 | }
710 |
711 | //叠加
712 | for (j = 0; j < 16; j++) {
713 | pixelCount = 0;
714 | for (i = 0; i < 2; i++) {
715 | for (k = 0; k < 8; k++) {
716 | if (((frontBuffer[j * 2 + i] >> (7 - k)) & 0x1) != 0) {
717 | pYUVbuffer[((j + iStartYpos) * imageWidth + iStartXpos
718 | + pixelCount)] = OsdU;
719 | pYUVbuffer[((j + iStartYpos) * imageWidth + iStartXpos
720 | + pixelCount) + 1] = OsdY;
721 | //pYUVbuffer[((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)+2] = OsdY;
722 | //pYUVbuffer[((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)+3] = OsdU;
723 |
724 | //temp1=imageWidth*imageHeight*5/4+((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)/2;//色度修改
725 | //temp2=((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)*2;
726 | //temp1=imageWidth*imageHeight*5/4+(j+iStartYpos)/2*352+(iStartXpos+pixelCount)/2;
727 | //pYUVbuffer[(int)(((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)/4)+imageWidth*imageHeight] = 100;
728 | //pYUVbuffer[temp1] = 250;
729 | //pYUVbuffer[temp2] = 200;
730 | //pYUVbuffer[((j+iStartYpos)*imageWidth+iStartXpos+pixelCount)+2] = OsdV;
731 | //buffer[((index+row)*imageWidth+column+pixelCount)*2] = OsdU;
732 | //buffer[((index+row)*imageWidth+column+pixelCount)*2+1] = OsdY;
733 | }
734 | //if (k%2==0){
735 | pixelCount++;
736 | //}
737 | }
738 | }
739 | }
740 |
741 | //区分汉字及字符所占宽度
742 | if (bIsChar == 0) {
743 | iStartXpos += 32;
744 | } else {
745 | iStartXpos += 16;
746 | }
747 |
748 | }
749 | }
750 |
--------------------------------------------------------------------------------
/app/src/main/cpp/osd/Osdchar.h:
--------------------------------------------------------------------------------
1 | #ifndef _OSDCHAR_H_
2 | #define _OSDCHAR_H_
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | //错误码
10 | #define ERR_NONE 0x00
11 | #define ERR_OBJEXIST 0x01
12 | #define ERR_OBJNOTEXIST 0x02
13 | #define ERR_FILEOPENFAIL 0x03
14 | #define ERR_PARA 0x04
15 |
16 | //字符的行为类型
17 | typedef enum {
18 | e_SCROLL_LEFT,
19 | e_SCROLL_RIGHT,
20 | e_SCROLL_UP,
21 | e_SCROLL_DOWN,
22 | e_MOVE_RAND,
23 | e_STATIC,
24 | e_SLOWHIDE,
25 | e_SPARK
26 | } E_ACTIONTYPE;
27 | //行为码表
28 | /*
29 | e_ROLL:
30 | -val:向左滚动及滚动速度
31 | +val:向右滚动及滚动速度
32 | e_STATIC:
33 | 无行为码
34 | e_SLOWHIDE:
35 | 停留时间
36 | e_SPARK:
37 | 闪烁间隔
38 | */
39 | //叠加字符串对象属性
40 | typedef struct _O_STRINGATTR {
41 | char osdR, osdG, osdB; //字符颜色
42 | char font; //字体类型
43 | char sizeW, sizeH; //字符点阵大小,只可为16*16或者32*32
44 | E_ACTIONTYPE eActionType; //行为类型
45 | int actionValue1; //行为码
46 | int actionValue2; //行为码
47 |
48 | /*
49 | _O_STRINGATTR()
50 | {
51 | osdR=255;
52 | osdG=255;
53 | osdB=255;
54 | }
55 | */
56 |
57 | } O_STRINGATTR, *PO_STRINGATTR, *LPO_STRINGATTR;
58 |
59 | //叠加的字符串对象
60 | typedef struct _O_OBJCHAR {
61 | int x, y, w, h; //叠加的位置
62 | char szStr[128]; //要叠加的字符
63 | O_STRINGATTR oAttrChar; //属性
64 |
65 | } O_OBJCHAR, *PO_OBJCHAR, *LPO_OBJCHAR;
66 |
67 | //要叠加的图像源的分辨率
68 | #define IMAGEWIDTH 1280
69 | #define IMAGEHEIGHT 720
70 | //接口函数
71 | extern char OSD_CreateObjCharObj(int strID, char *szStr,
72 | O_STRINGATTR OAttrCharObj);
73 | extern char OSD_DeleteObjCharObj(int strID);
74 | extern char OSD_SetContentCharObj(int strID, char *szStr);
75 | extern char OSD_SetPositionCharObj(int strID, int x, int y);
76 | extern char OSD_SetAttrCharObj(int strID, O_STRINGATTR OAttrCharObj);
77 | extern void OSD_FeedFrameYUV420(char* pYUV420Frame, int iSrcWidth,
78 | int iSrcHeight);
79 | extern char OSD_Init(char *szPathHZK, char *szPathASCII);
80 | extern void OSD_Release();
81 | #endif
82 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo;
2 |
3 | import android.app.Activity;
4 | import android.hardware.Camera;
5 | import android.os.Bundle;
6 | import android.view.SurfaceHolder;
7 | import android.view.SurfaceView;
8 | import android.view.View;
9 | import android.view.View.OnClickListener;
10 | import android.widget.Button;
11 | import android.widget.Toast;
12 |
13 | import com.jiangdg.yuvosd.R;
14 | import com.jiangdg.demo.utils.CameraUtils;
15 | import com.jiangdg.demo.utils.MediaMuxerUtils;
16 | import com.jiangdg.demo.utils.SensorAccelerometer;
17 | import com.jiangdg.demo.utils.CameraUtils.OnCameraFocusResult;
18 | import com.jiangdg.demo.utils.SensorAccelerometer.OnSensorChangedResult;
19 |
20 | public class MainActivity extends Activity implements SurfaceHolder.Callback {
21 | private Button mBtnRecord;
22 | private Button mBtnSwitchCam;
23 | private SurfaceView mSurfaceView;
24 | private CameraUtils mCamManager;
25 | private boolean isRecording;
26 | //加速传感器
27 | private static SensorAccelerometer mSensorAccelerometer;
28 | byte[] nv21 = new byte[CameraUtils.PREVIEW_WIDTH * CameraUtils.PREVIEW_HEIGHT * 3 / 2];
29 |
30 | private CameraUtils.OnPreviewFrameResult mPreviewListener = new CameraUtils.OnPreviewFrameResult() {
31 | @Override
32 | public void onPreviewResult(byte[] data, Camera camera) {
33 | mCamManager.getCameraIntance().addCallbackBuffer(data);
34 | MediaMuxerUtils.getMuxerRunnableInstance().addVideoFrameData(data);
35 |
36 | }
37 | };
38 |
39 | @Override
40 | protected void onCreate(Bundle savedInstanceState) {
41 | super.onCreate(savedInstanceState);
42 | setContentView(R.layout.activity_main);
43 | mCamManager = CameraUtils.getCamManagerInstance(MainActivity.this);
44 | //实例化加速传感器
45 | mSensorAccelerometer = SensorAccelerometer.getSensorInstance();
46 |
47 | mSurfaceView = (SurfaceView) findViewById(R.id.main_record_surface);
48 | mSurfaceView.getHolder().addCallback(this);
49 | mSurfaceView.setOnClickListener(new OnClickListener() {
50 |
51 | @Override
52 | public void onClick(View v) {
53 | mCamManager.cameraFocus(new OnCameraFocusResult() {
54 | @Override
55 | public void onFocusResult(boolean result) {
56 | if (result) {
57 | Toast.makeText(MainActivity.this, "对焦成功", Toast.LENGTH_SHORT).show();
58 | }
59 | }
60 | });
61 | }
62 | });
63 |
64 | mBtnRecord = (Button) findViewById(R.id.main_record_btn);
65 | mBtnRecord.setOnClickListener(new OnClickListener() {
66 | @Override
67 | public void onClick(View view) {
68 | MediaMuxerUtils mMuxerUtils = MediaMuxerUtils.getMuxerRunnableInstance();
69 | if (!isRecording) {
70 | mMuxerUtils.startMuxerThread(mCamManager.getCameraDirection());
71 | mBtnRecord.setText("停止录像");
72 | } else {
73 | mMuxerUtils.stopMuxerThread();
74 | mBtnRecord.setText("开始录像");
75 | }
76 | isRecording = !isRecording;
77 | }
78 | });
79 |
80 | mBtnSwitchCam = (Button) findViewById(R.id.main_switch_camera_btn);
81 | mBtnSwitchCam.setOnClickListener(new OnClickListener() {
82 |
83 | @Override
84 | public void onClick(View v) {
85 | if (isRecording) {
86 | Toast.makeText(MainActivity.this, "正在录像,无法切换",
87 | Toast.LENGTH_SHORT).show();
88 | return;
89 | }
90 | if (mCamManager != null) {
91 | mCamManager.switchCamera();
92 | }
93 | }
94 | });
95 | }
96 |
97 | @Override
98 | public void surfaceCreated(SurfaceHolder surfaceHolder) {
99 | mCamManager.setSurfaceHolder(surfaceHolder);
100 | mCamManager.setOnPreviewResult(mPreviewListener);
101 | mCamManager.createCamera();
102 | mCamManager.startPreview();
103 | startSensorAccelerometer();
104 | }
105 |
106 | @Override
107 | public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
108 |
109 | }
110 |
111 | @Override
112 | public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
113 | mCamManager.stopPreivew();
114 | mCamManager.destoryCamera();
115 | stopSensorAccelerometer();
116 | }
117 |
118 | private void startSensorAccelerometer() {
119 | // 启动加速传感器,注册结果事件监听器
120 | if (mSensorAccelerometer != null) {
121 | mSensorAccelerometer.startSensorAccelerometer(MainActivity.this,
122 | new OnSensorChangedResult() {
123 | @Override
124 | public void onStopped() {
125 | // 对焦成功,隐藏对焦图标
126 | mCamManager.cameraFocus(new OnCameraFocusResult() {
127 | @Override
128 | public void onFocusResult(boolean reslut) {
129 |
130 | }
131 | });
132 | }
133 |
134 | @Override
135 | public void onMoving(int x, int y, int z) {
136 | // Log.i(TAG, "手机移动中:x=" + x + ";y=" + y + ";z=" + z);
137 | }
138 | });
139 | }
140 | }
141 |
142 | private void stopSensorAccelerometer() {
143 | // 释放加速传感器资源
144 | if (mSensorAccelerometer == null) {
145 | return;
146 | }
147 | mSensorAccelerometer.stopSensorAccelerometer();
148 | }
149 | }
150 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/runnable/EncoderAudioRunnable.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo.runnable;
2 |
3 | import java.io.IOException;
4 | import java.lang.ref.WeakReference;
5 | import java.nio.ByteBuffer;
6 |
7 | import com.jiangdg.demo.utils.MediaMuxerUtils;
8 |
9 | import android.annotation.SuppressLint;
10 | import android.media.AudioFormat;
11 | import android.media.AudioRecord;
12 | import android.media.MediaCodec;
13 | import android.media.MediaCodecInfo;
14 | import android.media.MediaCodecList;
15 | import android.media.MediaFormat;
16 | import android.media.MediaRecorder;
17 | import android.os.Build;
18 | import android.os.Process;
19 | import android.util.Log;
20 |
21 | /** 对ACC音频进行编码
22 | * Created by jiangdongguo on 2017/5/6.
23 | */
24 | public class EncoderAudioRunnable implements Runnable {
25 | private static final String TAG = "EncoderAudioRunnable";
26 | private static final String MIME_TYPE = "audio/mp4a-latm";
27 | private static final int TIMES_OUT = 1000;
28 | private static final int BIT_RATE = 16000;
29 | private static final int CHANNEL_COUNT = 1;
30 | private static final int SMAPLE_RATE = 8000;
31 | private static final int ACC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
32 | private static final int BUFFER_SIZE = 1600;
33 | private static final int AUDIO_BUFFER_SIZE = 1024;
34 | // 录音
35 | private static final int channelConfig = AudioFormat.CHANNEL_IN_MONO;
36 | private static final int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
37 | private static final int audioSouce = MediaRecorder.AudioSource.MIC;
38 | private AudioRecord mAudioRecord;
39 | // 编码器
40 | private boolean isExit = false;
41 | private boolean isEncoderStarted = false;
42 | private WeakReference muxerRunnableRf;
43 | private MediaCodec mAudioEncoder;
44 | private long prevPresentationTimes;
45 | private MediaFormat mediaFormat;
46 |
47 | public EncoderAudioRunnable(WeakReference muxerRunnableRf){
48 | this.muxerRunnableRf = muxerRunnableRf;
49 | initMediaCodec();
50 | }
51 |
52 | private void initMediaCodec() {
53 | MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE);
54 | if(mCodecInfo == null){
55 | Log.e(TAG,"编码器不支持"+MIME_TYPE+"类型");
56 | return;
57 | }
58 | try{
59 | mAudioEncoder = MediaCodec.createByCodecName(mCodecInfo.getName());
60 | }catch(IOException e){
61 | Log.e(TAG,"创建编码器失败"+e.getMessage());
62 | e.printStackTrace();
63 | }
64 | // 告诉编码器输出数据的格式,如MIME类型、码率、采样率、通道数量等
65 | mediaFormat = new MediaFormat();
66 | mediaFormat.setString(MediaFormat.KEY_MIME,MIME_TYPE);
67 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,BIT_RATE);
68 | mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE,SMAPLE_RATE);
69 | mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,ACC_PROFILE);
70 | mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT,CHANNEL_COUNT);
71 | mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE,BUFFER_SIZE);
72 | }
73 |
74 | @Override
75 | public void run() {
76 | if (!isEncoderStarted) {
77 | startAudioRecord();
78 | startCodec();
79 | }
80 | while (!isExit) {
81 | if (mAudioRecord != null) {
82 | byte[] audioBuf = new byte[AUDIO_BUFFER_SIZE];
83 | int readBytes = mAudioRecord.read(audioBuf, 0,AUDIO_BUFFER_SIZE);
84 | if (readBytes > 0) {
85 | try {
86 | encoderBytes(audioBuf, readBytes);
87 | } catch (IllegalStateException e) {
88 | // 捕获因中断线程并停止混合dequeueOutputBuffer报的状态异常
89 | e.printStackTrace();
90 | } catch (NullPointerException e) {
91 | // 捕获因中断线程并停止混合MediaCodec为NULL异常
92 | e.printStackTrace();
93 | }
94 | }
95 | }
96 | }
97 | stopCodec();
98 | stopAudioRecord();
99 | }
100 |
101 | @SuppressLint("NewApi")
102 | private void encoderBytes(byte[] audioBuf,int readBytes){
103 | ByteBuffer[] inputBuffers = mAudioEncoder.getInputBuffers();
104 | ByteBuffer[] outputBuffers = mAudioEncoder.getOutputBuffers();
105 | //返回编码器的一个输入缓存区句柄,-1表示当前没有可用的输入缓存区
106 | int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMES_OUT);
107 | if(inputBufferIndex >= 0){
108 | // 绑定一个被空的、可写的输入缓存区inputBuffer到客户端
109 | ByteBuffer inputBuffer = null;
110 | if(!isLollipop()){
111 | inputBuffer = inputBuffers[inputBufferIndex];
112 | }else{
113 | inputBuffer = mAudioEncoder.getInputBuffer(inputBufferIndex);
114 | }
115 | // 向输入缓存区写入有效原始数据,并提交到编码器中进行编码处理
116 | if(audioBuf==null || readBytes<=0){
117 | mAudioEncoder.queueInputBuffer(inputBufferIndex,0,0,getPTSUs(),MediaCodec.BUFFER_FLAG_END_OF_STREAM);
118 | }else{
119 | inputBuffer.clear();
120 | inputBuffer.put(audioBuf);
121 | mAudioEncoder.queueInputBuffer(inputBufferIndex,0,readBytes,getPTSUs(),0);
122 | }
123 | }
124 |
125 | // 返回一个输出缓存区句柄,当为-1时表示当前没有可用的输出缓存区
126 | // mBufferInfo参数包含被编码好的数据,timesOut参数为超时等待的时间
127 | MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
128 | int outputBufferIndex = -1;
129 | do{
130 | outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo,TIMES_OUT);
131 | if(outputBufferIndex == MediaCodec. INFO_TRY_AGAIN_LATER){
132 | Log.i(TAG,"获得编码器输出缓存区超时");
133 | }else if(outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
134 | // 如果API小于21,APP需要重新绑定编码器的输入缓存区;
135 | // 如果API大于21,则无需处理INFO_OUTPUT_BUFFERS_CHANGED
136 | if(!isLollipop()){
137 | outputBuffers = mAudioEncoder.getOutputBuffers();
138 | }
139 | }else if(outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
140 | // 编码器输出缓存区格式改变,通常在存储数据之前且只会改变一次
141 | // 这里设置混合器视频轨道,如果音频已经添加则启动混合器(保证音视频同步)
142 | MediaFormat newFormat = mAudioEncoder.getOutputFormat();
143 | MediaMuxerUtils mMuxerUtils = muxerRunnableRf.get();
144 | if(mMuxerUtils != null){
145 | mMuxerUtils.setMediaFormat(MediaMuxerUtils.TRACK_AUDIO,newFormat);
146 | }
147 | Log.i(TAG,"编码器输出缓存区格式改变,添加视频轨道到混合器");
148 | }else{
149 | // 当flag属性置为BUFFER_FLAG_CODEC_CONFIG后,说明输出缓存区的数据已经被消费了
150 | if((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0){
151 | Log.i(TAG,"编码数据被消费,BufferInfo的size属性置0");
152 | mBufferInfo.size = 0;
153 | }
154 | // 数据流结束标志,结束本次循环
155 | if((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0){
156 | Log.i(TAG,"数据流结束,退出循环");
157 | break;
158 | }
159 | // 获取一个只读的输出缓存区inputBuffer ,它包含被编码好的数据
160 | ByteBuffer outputBuffer = null;
161 | if(!isLollipop()){
162 | outputBuffer = outputBuffers[outputBufferIndex];
163 | }else{
164 | outputBuffer = mAudioEncoder.getOutputBuffer(outputBufferIndex);
165 | }
166 | if(mBufferInfo.size != 0){
167 | // 获取输出缓存区失败,抛出异常
168 | if(outputBuffer == null){
169 | throw new RuntimeException("encodecOutputBuffer"+outputBufferIndex+"was null");
170 | }
171 | // 如果API<=19,需要根据BufferInfo的offset偏移量调整ByteBuffer的位置
172 | //并且限定将要读取缓存区数据的长度,否则输出数据会混乱
173 | if(isKITKAT()){
174 | outputBuffer.position(mBufferInfo.offset);
175 | outputBuffer.limit(mBufferInfo.offset+mBufferInfo.size);
176 | }
177 | // 对输出缓存区的H.264数据进行混合处理
178 | MediaMuxerUtils mMuxerUtils = muxerRunnableRf.get();
179 | mBufferInfo.presentationTimeUs = getPTSUs();
180 | if(mMuxerUtils != null && mMuxerUtils.isMuxerStarted()){
181 | Log.d(TAG,"------混合音频数据-------");
182 | mMuxerUtils.addMuxerData(new MediaMuxerUtils.MuxerData(MediaMuxerUtils.TRACK_AUDIO,outputBuffer,mBufferInfo));
183 | prevPresentationTimes = mBufferInfo.presentationTimeUs;
184 | }
185 | }
186 | // 处理结束,释放输出缓存区资源
187 | mAudioEncoder.releaseOutputBuffer(outputBufferIndex,false);
188 | }
189 | }while (outputBufferIndex >= 0);
190 | }
191 |
192 | private void startCodec(){
193 | isExit = false;
194 | if(mAudioEncoder != null){
195 | mAudioEncoder.configure(mediaFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
196 | mAudioEncoder.start();
197 | isEncoderStarted = true;
198 | }
199 | }
200 |
201 | private void stopCodec(){
202 | if(mAudioEncoder != null){
203 | mAudioEncoder.stop();
204 | mAudioEncoder.release();
205 | mAudioEncoder = null;
206 | }
207 | isEncoderStarted = false;
208 | }
209 |
210 | private void startAudioRecord(){
211 | // 计算AudioRecord所需输入缓存空间大小
212 | int bufferSizeInBytes = AudioRecord.getMinBufferSize(SMAPLE_RATE,channelConfig,audioFormat);
213 | if(bufferSizeInBytes < 1600){
214 | bufferSizeInBytes = 1600;
215 | }
216 | Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
217 | mAudioRecord = new AudioRecord(audioSouce,SMAPLE_RATE,channelConfig,audioFormat,bufferSizeInBytes);
218 | // 开始录音
219 | mAudioRecord.startRecording();
220 | }
221 |
222 | public void stopAudioRecord(){
223 | if(mAudioRecord != null){
224 | mAudioRecord.stop();
225 | mAudioRecord.release();
226 | mAudioRecord = null;
227 | }
228 | }
229 |
230 | public void exit(){
231 | isExit = true;
232 | }
233 |
234 | /**
235 | * 遍历所有编解码器,返回第一个与指定MIME类型匹配的编码器
236 | * 判断是否有支持指定mime类型的编码器
237 | * */
238 | private MediaCodecInfo selectSupportCodec(String mimeType){
239 | int numCodecs = MediaCodecList.getCodecCount();
240 | for (int i = 0; i < numCodecs; i++) {
241 | MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
242 | // 判断是否为编码器,否则直接进入下一次循环
243 | if (!codecInfo.isEncoder()) {
244 | continue;
245 | }
246 | // 如果是编码器,判断是否支持Mime类型
247 | String[] types = codecInfo.getSupportedTypes();
248 | for (int j = 0; j < types.length; j++) {
249 | if (types[j].equalsIgnoreCase(mimeType)) {
250 | return codecInfo;
251 | }
252 | }
253 | }
254 | return null;
255 | }
256 |
257 | private boolean isLollipop(){
258 | // API>=21
259 | return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
260 | }
261 |
262 | private boolean isKITKAT(){
263 | // API<=19
264 | return Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT;
265 | }
266 |
267 | private long getPTSUs(){
268 | long result = System.nanoTime()/1000;
269 | if(result < prevPresentationTimes){
270 | result = (prevPresentationTimes - result ) + result;
271 | }
272 | return result;
273 | }
274 | }
275 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/runnable/EncoderVideoRunnable.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo.runnable;
2 |
3 | import java.io.BufferedOutputStream;
4 | import java.io.File;
5 | import java.io.FileOutputStream;
6 | import java.io.IOException;
7 | import java.lang.ref.WeakReference;
8 | import java.nio.ByteBuffer;
9 | import java.text.SimpleDateFormat;
10 | import java.util.Date;
11 | import java.util.concurrent.ArrayBlockingQueue;
12 | import java.util.concurrent.BlockingQueue;
13 |
14 | import android.annotation.SuppressLint;
15 | import android.media.MediaCodec;
16 | import android.media.MediaCodecInfo;
17 | import android.media.MediaCodecList;
18 | import android.media.MediaFormat;
19 | import android.os.Build;
20 | import android.os.Environment;
21 | import android.util.Log;
22 |
23 | import com.jiangdg.demo.utils.CameraUtils;
24 | import com.jiangdg.demo.utils.MediaMuxerUtils;
25 | import com.jiangdg.natives.YuvUtils;
26 |
27 | /**
28 | * 对YUV视频流进行编码
29 | * Created by jiangdongguo on 2017/5/6.
30 | */
31 |
32 | public class EncoderVideoRunnable implements Runnable {
33 | private static final String TAG = "EncoderVideoRunnable";
34 | private static final String MIME_TYPE = "video/avc";
35 | // 帧率
36 | private static final int FRAME_RATE = 20;
37 | // 间隔1s插入一帧关键帧
38 | private static final int FRAME_INTERVAL = 1;
39 | // 绑定编码器缓存区超时时间为10s
40 | private static final int TIMES_OUT = 10000;
41 | // 码率
42 | private static final int BIT_RATE = CameraUtils.PREVIEW_WIDTH * CameraUtils.PREVIEW_HEIGHT * 3 * 8 * FRAME_RATE / 256;
43 | // 默认水平
44 | private boolean isPhoneHorizontal = true;
45 |
46 | // MP4混合器
47 | private WeakReference muxerRunnableRf;
48 | // 硬编码器
49 | private MediaCodec mVideoEncodec;
50 | private int mColorFormat;
51 | private boolean isExit = false;
52 | private boolean isEncoderStart = false;
53 | private boolean isAddTimeOsd = true;
54 |
55 | private BlockingQueue frameBytes;
56 | // private byte[] mFrameData;
57 | private boolean isFrontCamera;
58 | private long prevPresentationTimes;
59 | private MediaFormat mFormat;
60 | private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
61 | private BufferedOutputStream outputStream;
62 | private boolean isAddKeyFrame = false;
63 |
64 | public EncoderVideoRunnable(WeakReference muxerRunnableRf) {
65 | this.muxerRunnableRf = muxerRunnableRf;
66 | frameBytes = new ArrayBlockingQueue(5);
67 | // mFrameData = new byte[CameraUtils.PREVIEW_WIDTH * CameraUtils.PREVIEW_HEIGHT *3 /2];
68 | initMediaFormat();
69 | }
70 |
71 | private void initMediaFormat() {
72 | try {
73 | MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE);
74 | if (mCodecInfo == null) {
75 | Log.d(TAG, "匹配编码器失败" + MIME_TYPE);
76 | return;
77 | }
78 | mColorFormat = selectSupportColorFormat(mCodecInfo, MIME_TYPE);
79 | mVideoEncodec = MediaCodec.createByCodecName(mCodecInfo.getName());
80 | } catch (IOException e) {
81 | Log.e(TAG, "创建编码器失败" + e.getMessage());
82 | e.printStackTrace();
83 | }
84 | if (!isPhoneHorizontal) {
85 | mFormat = MediaFormat.createVideoFormat(MIME_TYPE, CameraUtils.PREVIEW_HEIGHT, CameraUtils.PREVIEW_WIDTH);
86 | } else {
87 | mFormat = MediaFormat.createVideoFormat(MIME_TYPE, CameraUtils.PREVIEW_WIDTH, CameraUtils.PREVIEW_HEIGHT);
88 | }
89 | mFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
90 | mFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
91 | mFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat); // 颜色格式
92 | mFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, FRAME_INTERVAL);
93 | }
94 |
95 | private void startCodec() {
96 | isExit = false;
97 | frameBytes.clear();
98 | if (mVideoEncodec != null) {
99 | mVideoEncodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
100 | mVideoEncodec.start();
101 | isEncoderStart = true;
102 | Log.d(TAG, "配置、启动视频编码器");
103 | }
104 | //创建保存编码后数据的文件
105 | createfile();
106 | }
107 |
108 | private void stopCodec() {
109 | if (mVideoEncodec != null) {
110 | mVideoEncodec.stop();
111 | mVideoEncodec.release();
112 | mVideoEncodec = null;
113 | isAddKeyFrame = false;
114 | isEncoderStart = false;
115 | Log.d(TAG, "关闭视频编码器");
116 | }
117 | frameBytes.clear();
118 | try {
119 | outputStream.flush();
120 | outputStream.close();
121 | } catch (IOException e) {
122 | e.printStackTrace();
123 | }
124 | }
125 |
126 | public void addData(byte[] yuvData) {
127 | if (frameBytes != null) {
128 | // 这里不能用put,会一直不断地向阻塞线程写入数据
129 | //导致线程无法退出
130 | frameBytes.offer(yuvData);
131 | }
132 | }
133 |
134 | @Override
135 | public void run() {
136 | if (!isEncoderStart) {
137 | try {
138 | Thread.sleep(200);
139 | } catch (InterruptedException e) {
140 | e.printStackTrace();
141 | }
142 | startCodec();
143 | }
144 | // 如果编码器没有启动或者没有图像数据,线程阻塞等待
145 | while (!isExit) {
146 | try {
147 | byte[] bytes = frameBytes.take();
148 | encoderBytes(bytes);
149 | } catch (IllegalStateException e) {
150 | // 捕获因中断线程并停止混合dequeueOutputBuffer报的状态异常
151 | e.printStackTrace();
152 | } catch (NullPointerException e) {
153 | // 捕获因中断线程并停止混合MediaCodec为NULL异常
154 | e.printStackTrace();
155 | } catch (InterruptedException e1) {
156 | e1.printStackTrace();
157 | }
158 | }
159 | stopCodec();
160 | }
161 |
162 | @SuppressLint({"NewApi", "WrongConstant"})
163 | private void encoderBytes(byte[] rawFrame) {
164 | ByteBuffer[] inputBuffers = mVideoEncodec.getInputBuffers();
165 | ByteBuffer[] outputBuffers = mVideoEncodec.getOutputBuffers();
166 | //前置摄像头旋转270度,后置摄像头旋转90度
167 | int mWidth = CameraUtils.PREVIEW_WIDTH;
168 | int mHeight = CameraUtils.PREVIEW_HEIGHT;
169 | byte[] rotateNv21 = new byte[mWidth * mHeight * 3 / 2];
170 | // if(isFrontCamera()){
171 | // 前置旋转270度(即竖屏采集,此时isPhoneHorizontal=false)
172 | // YuvUtils.Yuv420spRotateOfFront(rawFrame, rotateNv21, mWidth, mHeight, 270);
173 | // }else{
174 | // 后置旋转90度(即竖直采集,此时isPhoneHorizontal=false)
175 | // YuvUtils.YUV420spRotateOfBack(rawFrame, rotateNv21, mWidth, mHeight, 90);
176 | // 后置旋转270度(即倒立采集,此时isPhoneHorizontal=false)
177 | // YuvUtils.YUV420spRotateOfBack(rawFrame, rotateNv21, mWidth, mHeight, 270);
178 | // 后置旋转180度(即反向横屏采集,此时isPhoneHorizontal=true)
179 | // YuvUtils.YUV420spRotateOfBack(rawFrame, rotateNv21, mWidth, mHeight, 180);
180 | // 如果是正向横屏,则无需旋转YUV,此时isPhoneHorizontal=true
181 | // }
182 | // 将NV21转换为编码器支持的颜色格式I420,添加时间水印
183 | // if(isAddTimeOsd){
184 | // YuvUtils.AddYuvOsd(rotateNv21, mWidth, mHeight, mFrameData,
185 | // new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()),
186 | // mColorFormat,isPhoneHorizontal);
187 | // }else{
188 | // YuvUtils.transferColorFormat(rotateNv21, mWidth, mHeight, mFrameData, mColorFormat);
189 | // }
190 | // 将NV21转换为YUV420p
191 | if(mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
192 | YuvUtils.nativeNV21ToYUV420p(rawFrame, mWidth, mHeight);
193 | } else {
194 | YuvUtils.nativeNV21ToYUV420sp(rawFrame,mWidth,mHeight);
195 | }
196 |
197 | // 叠加时间水印
198 | YuvUtils.addYuvOsd(rawFrame, mWidth, mHeight, new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()), 100, 100);
199 |
200 | //返回编码器的一个输入缓存区句柄,-1表示当前没有可用的输入缓存区
201 | int inputBufferIndex = mVideoEncodec.dequeueInputBuffer(TIMES_OUT);
202 | if (inputBufferIndex >= 0) {
203 | // 绑定一个被空的、可写的输入缓存区inputBuffer到客户端
204 | ByteBuffer inputBuffer = null;
205 | if (!isLollipop()) {
206 | inputBuffer = inputBuffers[inputBufferIndex];
207 | } else {
208 | inputBuffer = mVideoEncodec.getInputBuffer(inputBufferIndex);
209 | }
210 | // 向输入缓存区写入有效原始数据,并提交到编码器中进行编码处理
211 | inputBuffer.clear();
212 | inputBuffer.put(rawFrame);
213 | mVideoEncodec.queueInputBuffer(inputBufferIndex, 0, rawFrame.length, getPTSUs(), 0);
214 | }
215 |
216 | // 返回一个输出缓存区句柄,当为-1时表示当前没有可用的输出缓存区
217 | // mBufferInfo参数包含被编码好的数据,timesOut参数为超时等待的时间
218 | MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
219 | int outputBufferIndex = -1;
220 | do {
221 | outputBufferIndex = mVideoEncodec.dequeueOutputBuffer(mBufferInfo, TIMES_OUT);
222 | if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
223 | Log.i(TAG, "获得编码器输出缓存区超时");
224 | } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
225 | // 如果API小于21,APP需要重新绑定编码器的输入缓存区;
226 | // 如果API大于21,则无需处理INFO_OUTPUT_BUFFERS_CHANGED
227 | if (!isLollipop()) {
228 | outputBuffers = mVideoEncodec.getOutputBuffers();
229 | }
230 | } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
231 | // 编码器输出缓存区格式改变,通常在存储数据之前且只会改变一次
232 | // 这里设置混合器视频轨道,如果音频已经添加则启动混合器(保证音视频同步)
233 | MediaFormat newFormat = mVideoEncodec.getOutputFormat();
234 | MediaMuxerUtils mMuxerUtils = muxerRunnableRf.get();
235 | if (mMuxerUtils != null) {
236 | mMuxerUtils.setMediaFormat(MediaMuxerUtils.TRACK_VIDEO, newFormat);
237 | }
238 | Log.i(TAG, "编码器输出缓存区格式改变,添加视频轨道到混合器");
239 | } else {
240 | // 获取一个只读的输出缓存区inputBuffer ,它包含被编码好的数据
241 | ByteBuffer outputBuffer = null;
242 | if (!isLollipop()) {
243 | outputBuffer = outputBuffers[outputBufferIndex];
244 | } else {
245 | outputBuffer = mVideoEncodec.getOutputBuffer(outputBufferIndex);
246 | }
247 | // 如果API<=19,需要根据BufferInfo的offset偏移量调整ByteBuffer的位置
248 | // 并且限定将要读取缓存区数据的长度,否则输出数据会混乱
249 | if (isKITKAT()) {
250 | outputBuffer.position(mBufferInfo.offset);
251 | outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);
252 | }
253 | // 根据NALU类型判断帧类型
254 | MediaMuxerUtils mMuxerUtils = muxerRunnableRf.get();
255 | int type = outputBuffer.get(4) & 0x1F;
256 | Log.d(TAG, "------还有数据---->" + type);
257 | if (type == 7 || type == 8) {
258 | Log.e(TAG, "------PPS、SPS帧(非图像数据),忽略-------");
259 | mBufferInfo.size = 0;
260 | } else if (type == 5) {
261 | // 录像时,第1秒画面会静止,这是由于音视轨没有完全被添加
262 | // Muxer没有启动
263 | Log.e(TAG, "------I帧(关键帧)-------");
264 | if (mMuxerUtils != null && mMuxerUtils.isMuxerStarted()) {
265 | mMuxerUtils.addMuxerData(new MediaMuxerUtils.MuxerData(
266 | MediaMuxerUtils.TRACK_VIDEO, outputBuffer,
267 | mBufferInfo));
268 | prevPresentationTimes = mBufferInfo.presentationTimeUs;
269 | isAddKeyFrame = true;
270 | Log.e(TAG, "----------->添加关键帧到混合器");
271 | }
272 | } else {
273 | if (isAddKeyFrame) {
274 | Log.d(TAG, "------非I帧(type=1),添加到混合器-------");
275 | if (mMuxerUtils != null && mMuxerUtils.isMuxerStarted()) {
276 | mMuxerUtils.addMuxerData(new MediaMuxerUtils.MuxerData(
277 | MediaMuxerUtils.TRACK_VIDEO, outputBuffer,
278 | mBufferInfo));
279 | prevPresentationTimes = mBufferInfo.presentationTimeUs;
280 | Log.d(TAG, "------添加到混合器");
281 | }
282 | }
283 | }
284 | // 处理结束,释放输出缓存区资源
285 | mVideoEncodec.releaseOutputBuffer(outputBufferIndex, false);
286 | }
287 | } while (outputBufferIndex >= 0);
288 | }
289 |
290 | public void exit() {
291 | isExit = true;
292 | }
293 |
294 | /**
295 | * 遍历所有编解码器,返回第一个与指定MIME类型匹配的编码器
296 | * 判断是否有支持指定mime类型的编码器
297 | */
298 | private MediaCodecInfo selectSupportCodec(String mimeType) {
299 | int numCodecs = MediaCodecList.getCodecCount();
300 | for (int i = 0; i < numCodecs; i++) {
301 | MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
302 | // 判断是否为编码器,否则直接进入下一次循环
303 | if (!codecInfo.isEncoder()) {
304 | continue;
305 | }
306 | // 如果是编码器,判断是否支持Mime类型
307 | String[] types = codecInfo.getSupportedTypes();
308 | for (int j = 0; j < types.length; j++) {
309 | if (types[j].equalsIgnoreCase(mimeType)) {
310 | return codecInfo;
311 | }
312 | }
313 | }
314 | return null;
315 | }
316 |
317 | public boolean isFrontCamera() {
318 | return isFrontCamera;
319 | }
320 |
321 | public void setFrontCamera(boolean isFrontCamera) {
322 | this.isFrontCamera = isFrontCamera;
323 | }
324 |
325 | /**
326 | * 根据mime类型匹配编码器支持的颜色格式
327 | */
328 | private int selectSupportColorFormat(MediaCodecInfo mCodecInfo, String mimeType) {
329 | MediaCodecInfo.CodecCapabilities capabilities = mCodecInfo.getCapabilitiesForType(mimeType);
330 | for (int i = 0; i < capabilities.colorFormats.length; i++) {
331 | int colorFormat = capabilities.colorFormats[i];
332 | if (isCodecRecognizedFormat(colorFormat)) {
333 | return colorFormat;
334 | }
335 | }
336 | return 0;
337 | }
338 |
339 | private boolean isCodecRecognizedFormat(int colorFormat) {
340 | switch (colorFormat) {
341 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
342 | // case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
343 | return true;
344 | default:
345 | return false;
346 | }
347 | }
348 |
349 | private boolean isLollipop() {
350 | // API>=21
351 | return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
352 | }
353 |
354 | private boolean isKITKAT() {
355 | // API<=19
356 | return Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT;
357 | }
358 |
359 | private long getPTSUs() {
360 | long result = System.nanoTime() / 1000;
361 | if (result < prevPresentationTimes) {
362 | result = (prevPresentationTimes - result) + result;
363 | }
364 | return result;
365 | }
366 |
367 | private void createfile() {
368 | File file = new File(path);
369 | if (file.exists()) {
370 | file.delete();
371 | }
372 | try {
373 | outputStream = new BufferedOutputStream(new FileOutputStream(file));
374 | } catch (Exception e) {
375 | e.printStackTrace();
376 | }
377 | }
378 |
379 | }
380 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/utils/CameraUtils.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo.utils;
2 |
3 | import java.io.IOException;
4 | import java.lang.ref.WeakReference;
5 | import java.util.Iterator;
6 | import java.util.List;
7 |
8 | import android.app.Activity;
9 | import android.content.Context;
10 | import android.graphics.ImageFormat;
11 | import android.hardware.Camera;
12 | import android.hardware.Camera.AutoFocusCallback;
13 | import android.hardware.Camera.CameraInfo;
14 | import android.hardware.Camera.PreviewCallback;
15 | import android.hardware.Camera.Size;
16 | import android.util.Log;
17 | import android.view.Surface;
18 | import android.view.SurfaceHolder;
19 |
20 | /** Camera操作封装类
21 | * Created by jiangdongguo on 2017/5/6.
22 | */
23 | public class CameraUtils {
24 | private static final String TAG = "CameraManager";
25 | public static int PREVIEW_WIDTH = 640;
26 | public static int PREVIEW_HEIGHT = 480;
27 | public static boolean isUsingYv12 = false;
28 |
29 | private Camera mCamera;
30 | private static Context mContext;
31 | private boolean isFrontCamera = false;
32 | private OnPreviewFrameResult mPreviewListener;
33 | private WeakReference mHolderRef;
34 | private static CameraUtils mCameraManager;
35 | private CameraUtils() {}
36 |
37 | public interface OnPreviewFrameResult{
38 | void onPreviewResult(byte[] data, Camera camera);
39 | }
40 |
41 | public interface OnCameraFocusResult{
42 | void onFocusResult(boolean result);
43 | }
44 |
45 | public static CameraUtils getCamManagerInstance(Context mContext){
46 | CameraUtils.mContext = mContext;
47 | if(mCameraManager == null){
48 | mCameraManager = new CameraUtils();
49 | }
50 | return mCameraManager;
51 | }
52 |
53 | //将预览数据回传到onPreviewResult方法中
54 | private PreviewCallback previewCallback = new PreviewCallback() {
55 | private boolean rotate = false;
56 |
57 | @Override
58 | public void onPreviewFrame(byte[] data, Camera camera) {
59 | mPreviewListener.onPreviewResult(data, camera);
60 | }
61 | };
62 |
63 | public void setOnPreviewResult(OnPreviewFrameResult mPreviewListener){
64 | this.mPreviewListener = mPreviewListener;
65 | }
66 |
67 | public void setSurfaceHolder(SurfaceHolder mSurfaceHolder){
68 | if(mHolderRef != null){
69 | mHolderRef.clear();
70 | mHolderRef = null;
71 | }
72 | mHolderRef = new WeakReference(mSurfaceHolder);
73 | }
74 |
75 | public void startPreview() {
76 | if (mCamera == null) {
77 | return;
78 | }
79 | //设定预览控件
80 | try {
81 | Log.i(TAG, "CameraManager-->开始相机预览");
82 | mCamera.setPreviewDisplay(mHolderRef.get());
83 | } catch (IOException e) {
84 | e.printStackTrace();
85 | }
86 | //开始预览Camera
87 | try {
88 | mCamera.startPreview();
89 | } catch (RuntimeException e){
90 | Log.i(TAG, "相机预览失败,重新启动Camera.");
91 | stopPreivew();
92 | destoryCamera();
93 | createCamera();
94 | startPreview();
95 | }
96 | //自动对焦
97 | mCamera.autoFocus(null);
98 | //设置预览回调缓存
99 | int previewFormat = mCamera.getParameters().getPreviewFormat();
100 | Size previewSize = mCamera.getParameters().getPreviewSize();
101 | int size = previewSize.width * previewSize.height * ImageFormat.getBitsPerPixel(previewFormat) / 8;
102 | mCamera.addCallbackBuffer(new byte[size]);
103 | mCamera.setPreviewCallbackWithBuffer(previewCallback);
104 | }
105 |
106 | public void stopPreivew(){
107 | if(mCamera==null){
108 | return;
109 | }
110 | try {
111 | mCamera.setPreviewDisplay(null);
112 | mCamera.setPreviewCallbackWithBuffer(null);
113 | mCamera.stopPreview();
114 | Log.i(TAG, "CameraManager-->停止相机预览");
115 | } catch (IOException e) {
116 | e.printStackTrace();
117 | }
118 | }
119 |
120 | public void createCamera(){
121 | //创建Camera
122 | openCamera();
123 | setCamParameters();
124 | }
125 |
126 | private void openCamera() {
127 | if(mCamera != null){
128 | stopPreivew();
129 | destoryCamera();
130 | }
131 | //打开前置摄像头
132 | if(isFrontCamera ){
133 | CameraInfo cameraInfo = new CameraInfo();
134 | int camNums = Camera.getNumberOfCameras();
135 | for (int i = 0; i < camNums; i++) {
136 | Camera.getCameraInfo(i, cameraInfo);
137 | if(cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT){
138 | try {
139 | mCamera = Camera.open(i);
140 | Log.i(TAG, "CameraManager-->创建Camera对象,开启前置摄像头");
141 | break;
142 | } catch (Exception e) {
143 | Log.d(TAG, "打开前置摄像头失败:"+e.getMessage());
144 | }
145 | }
146 | }
147 | }else{
148 | try {
149 | mCamera = Camera.open();
150 | Log.i(TAG, "CameraManager-->创建Camera对象,开启后置摄像头");
151 | } catch (Exception e) {
152 | Log.d(TAG, "打开后置摄像头失败:"+e.getMessage());
153 | }
154 | }
155 | }
156 |
157 | public void destoryCamera() {
158 | if(mCamera==null){
159 | return;
160 | }
161 | mCamera.release();
162 | mCamera = null;
163 | Log.i(TAG, "CameraManager-->释放相机资源");
164 | }
165 |
166 | private void setCamParameters() {
167 | if(mCamera == null)
168 | return;
169 | Camera.Parameters params = mCamera.getParameters();
170 | if(isUsingYv12){
171 | params.setPreviewFormat(ImageFormat.YV12);
172 | }else{
173 | params.setPreviewFormat(ImageFormat.NV21);
174 | }
175 | //开启自动对焦
176 | List focusModes = params.getSupportedFocusModes();
177 | if(isSupportFocusAuto(focusModes)){
178 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
179 | }
180 | //设置预览分辨率,问题出在这里
181 | List previewSizes = params.getSupportedPreviewSizes();
182 | if(!isSupportPreviewSize(previewSizes)){
183 | PREVIEW_WIDTH = previewSizes.get(0).width;
184 | PREVIEW_HEIGHT = previewSizes.get(0).height;
185 | }
186 | params.setPreviewSize(PREVIEW_WIDTH,PREVIEW_HEIGHT);
187 | //设置预览的最大、最小像素
188 | int[] max = determineMaximumSupportedFramerate(params);
189 | params.setPreviewFpsRange(max[0], max[1]);
190 | //使参数配置生效
191 | mCamera.setParameters(params);
192 | //旋转预览方向
193 | int rotateDegree = getPreviewRotateDegree();
194 | mCamera.setDisplayOrientation(rotateDegree);
195 | }
196 |
197 | public void cameraFocus(final OnCameraFocusResult listener){
198 | if(mCamera != null){
199 | mCamera.autoFocus(new AutoFocusCallback() {
200 | @Override
201 | public void onAutoFocus(boolean success, Camera camera) {
202 | if(listener != null){
203 | listener.onFocusResult(success);
204 | }
205 | }
206 | });
207 | }
208 | }
209 |
210 | private int getPreviewRotateDegree(){
211 | int phoneDegree = 0;
212 | int result = 0;
213 | //获得手机方向
214 | int phoneRotate =((Activity)mContext).getWindowManager().getDefaultDisplay().getOrientation();
215 | //得到手机的角度
216 | switch (phoneRotate) {
217 | case Surface.ROTATION_0: phoneDegree = 0; break; //旋转90度
218 | case Surface.ROTATION_90: phoneDegree = 90; break; //旋转0度
219 | case Surface.ROTATION_180: phoneDegree = 180; break;//旋转270
220 | case Surface.ROTATION_270: phoneDegree = 270; break;//旋转180
221 | }
222 | //分别计算前后置摄像头需要旋转的角度
223 | CameraInfo cameraInfo = new CameraInfo();
224 | if(isFrontCamera){
225 | Camera.getCameraInfo(CameraInfo.CAMERA_FACING_FRONT, cameraInfo);
226 | result = (cameraInfo.orientation + phoneDegree) % 360;
227 | result = (360 - result) % 360;
228 | }else{
229 | Camera.getCameraInfo(CameraInfo.CAMERA_FACING_BACK, cameraInfo);
230 | result = (cameraInfo.orientation - phoneDegree +360) % 360;
231 | }
232 | return result;
233 | }
234 |
235 | private boolean isSupportFocusAuto(List focusModes){
236 | boolean isSupport = false;
237 | for (String mode:focusModes) {
238 | if(mode.equals(Camera.Parameters.FLASH_MODE_AUTO)){
239 | isSupport = true;
240 | break;
241 | }
242 | }
243 | return isSupport;
244 | }
245 |
246 | private boolean isSupportPreviewSize(List previewSizes) {
247 | boolean isSupport = false;
248 | for (Size size : previewSizes) {
249 | if ((size.width == PREVIEW_WIDTH && size.height == PREVIEW_HEIGHT)
250 | || (size.width == PREVIEW_HEIGHT && size.height == PREVIEW_WIDTH)) {
251 | isSupport = true;
252 | break;
253 | }
254 | }
255 | return isSupport;
256 | }
257 |
258 | public void switchCamera(){
259 | isFrontCamera = !isFrontCamera;
260 | createCamera();
261 | startPreview();
262 | }
263 |
264 | public void setPreviewSize(int width, int height) {
265 | PREVIEW_WIDTH = width;
266 | PREVIEW_HEIGHT = height;
267 | }
268 |
269 | public int getPreviewFormat(){
270 | if(mCamera == null){
271 | return -1;
272 | }
273 | return mCamera.getParameters().getPreviewFormat();
274 | }
275 |
276 | public Camera getCameraIntance() {
277 | return mCamera;
278 | }
279 |
280 | public SurfaceHolder getSurfaceHolder() {
281 | if(mHolderRef == null){
282 | return null;
283 | }
284 | return mHolderRef.get();
285 | }
286 |
287 | public boolean getCameraDirection() {
288 | return isFrontCamera;
289 | }
290 |
291 | public static int[] determineMaximumSupportedFramerate(Camera.Parameters parameters) {
292 | int[] maxFps = new int[] { 0, 0 };
293 | List supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
294 | for (Iterator it = supportedFpsRanges.iterator(); it.hasNext();) {
295 | int[] interval = it.next();
296 | if (interval[1] > maxFps[1]|| (interval[0] > maxFps[0] && interval[1] == maxFps[1])) {
297 | maxFps = interval;
298 | }
299 | }
300 | return maxFps;
301 | }
302 | }
303 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/utils/MediaMuxerUtils.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo.utils;
2 |
3 | import java.io.File;
4 | import java.lang.ref.WeakReference;
5 | import java.nio.ByteBuffer;
6 | import java.util.Vector;
7 |
8 | import com.jiangdg.demo.runnable.EncoderAudioRunnable;
9 | import com.jiangdg.demo.runnable.EncoderVideoRunnable;
10 |
11 | import android.media.MediaCodec;
12 | import android.media.MediaFormat;
13 | import android.media.MediaMuxer;
14 | import android.os.Environment;
15 | import android.util.Log;
16 | /**MP4混合器
17 | * Created by jiangdongguo on 2017/5/6.
18 | */
19 |
20 | public class MediaMuxerUtils{
21 | private static final String ROOT_PATH = Environment.getExternalStorageDirectory().getAbsolutePath();
22 | private static final String TAG = "MediaMuxerUtils";
23 | public static final int TRACK_VIDEO = 0;
24 | public static final int TRACK_AUDIO = 1;
25 | private boolean isVideoAdded;
26 | private boolean isAudioAdded;
27 | private boolean isMuxerStarted;
28 | private boolean isExit = false;
29 | private int videoTrack = -1;
30 | private int audioTrack = -1;
31 |
32 | private Object lock = new Object();
33 | private Vector mMuxerDatas;
34 | private MediaMuxer mMuxer;
35 | private MediaFormat videoMediaFormat;
36 | private MediaFormat audioMediaFormat;
37 | private EncoderVideoRunnable videoRunnable;
38 | private EncoderAudioRunnable audioRunnable;
39 | private Thread mMuxerThread;
40 | private Thread mVideoThread;
41 | private Thread mAudioThread;
42 | private boolean isFrontCamera;
43 | private static MediaMuxerUtils muxerUtils;
44 |
45 | private MediaMuxerUtils(){}
46 |
47 | public static MediaMuxerUtils getMuxerRunnableInstance(){
48 | if(muxerUtils == null){
49 | muxerUtils = new MediaMuxerUtils();
50 | }
51 | return muxerUtils;
52 | }
53 |
54 | private void initMuxer(){
55 | try {
56 | mMuxer = new MediaMuxer(ROOT_PATH + File.separator
57 | + System.currentTimeMillis() + ".mp4",
58 | MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
59 | } catch (Exception e) {
60 | e.printStackTrace();
61 | }
62 | mMuxerDatas = new Vector<>();
63 | videoRunnable = new EncoderVideoRunnable(new WeakReference<>(this));
64 | audioRunnable = new EncoderAudioRunnable(new WeakReference<>(this));
65 | mVideoThread = new Thread(videoRunnable);
66 | mAudioThread = new Thread(audioRunnable);
67 | videoRunnable.setFrontCamera(isFrontCamera);
68 | mAudioThread.start();
69 | mVideoThread.start();
70 | isExit = false;
71 | }
72 |
73 | class MediaMuxerRunnable implements Runnable{
74 | @Override
75 | public void run() {
76 | initMuxer();
77 | while (!isExit){
78 | // 混合器没有启动或数据缓存为空,则阻塞混合线程等待启动(数据输入)
79 | if(isMuxerStarted){
80 | // 从缓存读取数据写入混合器中
81 | if(mMuxerDatas.isEmpty()){
82 | Log.w(TAG, "run--->混合器没有数据,阻塞线程等待");
83 | synchronized (lock){
84 | try{
85 | lock.wait();
86 | }catch(Exception e){
87 | e.printStackTrace();
88 | }
89 | }
90 | }else{
91 | MuxerData data = mMuxerDatas.remove(0);
92 | if(data != null){
93 | int track = 0;
94 | try{
95 | if(data.trackIndex == TRACK_VIDEO){
96 | track = videoTrack;
97 | Log.d(TAG,"---写入视频数据---");
98 | }else if(data.trackIndex == TRACK_AUDIO){
99 | Log.d(TAG,"---写入音频数据---");
100 | track = audioTrack;
101 | }
102 | mMuxer.writeSampleData(track,data.byteBuf,data.bufferInfo);
103 | }catch(Exception e){
104 | Log.e(TAG,"写入数据到混合器失败,track="+track);
105 | e.printStackTrace();
106 | }
107 | }
108 | }
109 | }else{
110 | Log.w(TAG, "run--->混合器没有启动,阻塞线程等待");
111 | synchronized (lock){
112 | try{
113 | lock.wait();
114 | }catch(Exception e){
115 | e.printStackTrace();
116 | }
117 | }
118 | }
119 | }
120 | stopMuxer();
121 | }
122 | }
123 |
124 | private void startMuxer(){
125 | if(mMuxer == null){
126 | Log.e(TAG,"启动混合器失败,mMuxer=null");
127 | return;
128 | }
129 | if(isCanStartMuxer() && !isMuxerStarted){
130 | mMuxer.start();
131 | isMuxerStarted = true;
132 | synchronized (lock) {
133 | lock.notify();
134 | }
135 | Log.d(TAG,"---启动混合器---");
136 | }
137 | }
138 |
139 | private void stopMuxer(){
140 | if(mMuxer == null){
141 | Log.e(TAG,"停止混合器失败,mMuxer=null");
142 | return;
143 | }
144 | Log.d(TAG,"---停止混合器---");
145 | if(isMuxerStarted){
146 | mMuxer.stop();
147 | mMuxer.release();
148 | videoMediaFormat = null;
149 | audioMediaFormat = null;
150 | isVideoAdded = false;
151 | isAudioAdded = false;
152 | isMuxerStarted = false;
153 | mMuxer = null;
154 | }
155 | }
156 |
157 | // 添加音、视频轨道
158 | public void setMediaFormat(int index,MediaFormat meidaFormat){
159 | if(mMuxer == null && isMuxerStarted){
160 | Log.e(TAG,"添加轨道失败或混合器已经启动,index="+index);
161 | return;
162 | }
163 | if(index == TRACK_VIDEO){
164 | if(videoMediaFormat == null){
165 | videoMediaFormat = meidaFormat;
166 | videoTrack = mMuxer.addTrack(videoMediaFormat);
167 | isVideoAdded = true;
168 | Log.d(TAG,"---添加视频轨道到混合器---");
169 | }
170 | }else if(index == TRACK_AUDIO){
171 | if(audioMediaFormat == null){
172 | audioMediaFormat = meidaFormat;
173 | audioTrack = mMuxer.addTrack(audioMediaFormat);
174 | isAudioAdded = true;
175 | Log.d(TAG,"---添加音频轨道到混合器---");
176 | }
177 | }
178 | startMuxer();
179 | }
180 |
181 | // 向MediaMuxer添加数据
182 | public void addMuxerData(MuxerData data){
183 | if(mMuxerDatas == null){
184 | Log.e(TAG,"添加数据失败");
185 | return;
186 | }
187 | mMuxerDatas.add(data);
188 | // 解锁
189 | synchronized (lock){
190 | lock.notify();
191 | }
192 | }
193 |
194 | // 添加图像数据到视频编码器
195 | public void addVideoFrameData(byte[] frameData){
196 | if(videoRunnable != null){
197 | videoRunnable.addData(frameData);
198 | }
199 | }
200 |
201 | public void startMuxerThread(boolean isFrontCamera){
202 | Log.d(TAG,"---启动混合器线程---");
203 | this.isFrontCamera = isFrontCamera;
204 | if(mMuxerThread == null){
205 | synchronized (MediaMuxerUtils.this) {
206 | mMuxerThread = new Thread(new MediaMuxerRunnable());
207 | mMuxerThread.start();
208 | }
209 | }
210 | }
211 |
212 | public void stopMuxerThread(){
213 | exit();
214 | if(mMuxerThread != null){
215 | try {
216 | mMuxerThread.join();
217 | } catch (InterruptedException e) {
218 | e.printStackTrace();
219 | }
220 | }
221 | mMuxerThread = null;
222 | }
223 |
224 | private void exit(){
225 | Log.d(TAG,"---停止混合器(录音、录像)线程---");
226 | // 清理视频录制线程资源
227 | if(videoRunnable != null){
228 | videoRunnable.exit();
229 | }
230 | if(mVideoThread != null){
231 | try {
232 | mVideoThread.join();
233 | } catch (InterruptedException e) {
234 | e.printStackTrace();
235 | }
236 | mVideoThread = null;
237 | }
238 | // 清理录音线程资源
239 | if(audioRunnable != null){
240 | audioRunnable.exit();
241 | }
242 | if(mAudioThread != null){
243 | try {
244 | mAudioThread.join();
245 | } catch (InterruptedException e) {
246 | e.printStackTrace();
247 | }
248 | mAudioThread = null;
249 | }
250 | isExit = true;
251 | synchronized (lock) {
252 | lock.notify();
253 | }
254 | }
255 |
256 | public boolean isMuxerStarted(){
257 | return isMuxerStarted;
258 | }
259 |
260 | public boolean isVideoAdded() {
261 | return isVideoAdded;
262 | }
263 |
264 | public boolean isAudioAdded() {
265 | return isAudioAdded;
266 | }
267 |
268 | private boolean isCanStartMuxer(){
269 | return isVideoAdded & isAudioAdded;
270 | }
271 |
272 | /**
273 | * 封装要混合器数据实体
274 | */
275 | public static class MuxerData {
276 | int trackIndex;
277 | ByteBuffer byteBuf;
278 | MediaCodec.BufferInfo bufferInfo;
279 |
280 | public MuxerData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
281 | this.trackIndex = trackIndex;
282 | this.byteBuf = byteBuf;
283 | this.bufferInfo = bufferInfo;
284 | }
285 | }
286 | }
287 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/demo/utils/SensorAccelerometer.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.demo.utils;
2 |
3 | import java.util.Calendar;
4 |
5 | import android.annotation.SuppressLint;
6 | import android.content.Context;
7 | import android.hardware.Sensor;
8 | import android.hardware.SensorEvent;
9 | import android.hardware.SensorEventListener;
10 | import android.hardware.SensorManager;
11 | import android.util.Log;
12 |
13 | /**加速器传感器,监听手机运动状态,比如静止、移动,用于自动对焦
14 | *
15 | * @author Created by jiangdongguo on 2017-3-1下午2:17:40
16 | */
17 | public class SensorAccelerometer implements SensorEventListener {
18 | private int status = -1;
19 | private static final int STATUS_MOVING = 1;
20 | private static final int STATUS_STOP = 2;
21 | private static final String TAG = "SensorAccelerometer";
22 | private SensorManager mSensorManager;
23 | private Sensor mAccelerometer;
24 | private OnSensorChangedResult reslistener;
25 | private static SensorAccelerometer sensorMeter;
26 | private static long STATIC_DELAY_TIME = 1000;
27 | private long laststamp;
28 | private int lastX;
29 | private int lastY;
30 | private int lastZ;
31 | //自动对焦标志,防止连续对焦
32 | private boolean isFocused = false;
33 |
34 | //对外回调结果接口
35 | public interface OnSensorChangedResult{
36 | void onMoving(int x, int y, int z);
37 | void onStopped();
38 | }
39 |
40 | private SensorAccelerometer(){}
41 |
42 | public static SensorAccelerometer getSensorInstance(){
43 | if(sensorMeter == null){
44 | sensorMeter = new SensorAccelerometer();
45 | }
46 | return sensorMeter;
47 | }
48 |
49 | public void startSensorAccelerometer(Context mContext,OnSensorChangedResult reslistener){
50 | //注册运动事件结果监听器
51 | this.reslistener = reslistener;
52 | //初始化传感器
53 | mSensorManager = (SensorManager)mContext.getSystemService(Context.SENSOR_SERVICE);
54 | //启动加速传感器
55 | mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
56 | mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_NORMAL);
57 | lastX = 0;
58 | lastY = 0;
59 | lastZ = 0;
60 | Log.i(TAG, "启动加速传感器");
61 | }
62 |
63 | public void stopSensorAccelerometer(){
64 | if(mSensorManager == null){
65 | return;
66 | }
67 | //停止加速传感器
68 | mSensorManager.unregisterListener(this, mAccelerometer);
69 | Log.i(TAG, "停止加速传感器");
70 | }
71 |
72 | @SuppressLint("NewApi")
73 | @Override
74 | public void onSensorChanged(SensorEvent event) {
75 | if(reslistener == null || event.sensor == null){
76 | return;
77 | }
78 | //event.sensor.getStringType().equals(Sensor.STRING_TYPE_ACCELEROMETER)
79 | //部分机型报NoSuchMethod异常
80 | if(event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
81 | //获得当前运动坐标值,时间戳
82 | int x = (int)event.values[0];
83 | int y = (int)event.values[1];
84 | int z = (int)event.values[2];
85 | long stamp = Calendar.getInstance().getTimeInMillis();
86 | //根据坐标变化值,计算加速度大小
87 | int px = Math.abs(lastX-x);
88 | int py = Math.abs(lastY-y);
89 | int pz = Math.abs(lastZ-z);
90 | double accelerometer = Math.sqrt(px*px+py*py+pz*pz);
91 | // Log.i(TAG, "px="+px+";py="+py+";pz="+pz+";accelerometer="+accelerometer);
92 | //当手机倾斜20度左右或移动4cm时accelerometer值约为1.4
93 | if(accelerometer > 1.4){
94 | isFocused = false;
95 | reslistener.onMoving(x,y,z);
96 | status = STATUS_MOVING;
97 | }else{
98 | //记录静止起止时间,如果静止时间超过800ms,则回调onStopped实现对焦
99 | if(status == STATUS_MOVING){
100 | laststamp = stamp;
101 | }
102 | if((stamp - laststamp> STATIC_DELAY_TIME) && !isFocused){
103 | isFocused = true;
104 | reslistener.onStopped();
105 | }
106 | status = STATUS_STOP;
107 | }
108 | //缓存当前坐标,用于下次计算
109 | lastX = x;
110 | lastY = y;
111 | lastZ = z;
112 | }
113 | }
114 |
115 | @Override
116 | public void onAccuracyChanged(Sensor sensor, int accuracy) {
117 |
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jiangdg/natives/YuvUtils.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.natives;
2 |
3 | /**
4 | * Created by jiangdongguo on 2018/8/18.
5 | */
6 |
7 | public class YuvUtils {
8 | public static native int nativeNV21ToYUV420sp(byte[] data,int width, int height);
9 | public static native int nativeNV21ToYUV420p(byte[] data,int width, int height);
10 | public static native int nativeYV12ToNV21(byte[] data,int width, int height);
11 |
12 | // 后置旋转:90、180、270
13 | public native static void nativeRotateNV21(byte[] src,byte[] dest,int width, int height,int rotateDegree);
14 |
15 | // 前置旋转:270,180
16 | public static native void nativeRotateNV21Flip(byte[] src,byte[] dest,int width, int height,int rotateDegree);
17 |
18 | /** 添加水印
19 | * src 图像数据,YUV420采样格式均可(只用到了Y分量)
20 | * width 图像宽度
21 | * height 图像高度
22 | * osdStr 水印内容
23 | * isHorizontalTake 图像方向,水平或竖直
24 | * */
25 | public static native void addYuvOsd(byte[] src,int width,int height,boolean isHorizontalTake,String osdStr,int startX,int startY);
26 |
27 | static{
28 | System.loadLibrary("YuvOsd");
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
12 |
13 |
25 |
26 |
36 |
37 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | YuvOsdJiang
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/test/java/com/jiangdg/yuvosd/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.jiangdg.yuvosd;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | }
7 | dependencies {
8 | classpath 'com.android.tools.build:gradle:2.2.0'
9 |
10 | // NOTE: Do not place your application dependencies here; they belong
11 | // in the individual module build.gradle files
12 | }
13 | }
14 |
15 | allprojects {
16 | repositories {
17 | jcenter()
18 | }
19 | }
20 |
21 | task clean(type: Delete) {
22 | delete rootProject.buildDir
23 | }
24 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jiangdongguo/AndroidYuvOsd/b49a609733e5d98381f63b878f74bc747d6b88e2/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Dec 28 10:00:20 PST 2015
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------