├── .gitignore ├── .google └── packaging.yaml ├── .idea ├── encodings.xml ├── misc.xml ├── modules.xml ├── runConfigurations.xml └── vcs.xml ├── Application ├── build.gradle ├── src │ └── main │ │ ├── AndroidManifest.xml │ │ ├── java │ │ ├── cc │ │ │ └── rome753 │ │ │ │ └── encodemp4 │ │ │ │ ├── AACEncodeConsumer.java │ │ │ │ ├── EncoderParams.java │ │ │ │ ├── H264EncodeConsumer.java │ │ │ │ ├── MediaMuxerUtil.java │ │ │ │ ├── VideoRecorder.java │ │ │ │ └── YUVTools.java │ │ └── com │ │ │ └── example │ │ │ └── android │ │ │ └── camera2basic │ │ │ ├── AutoFitTextureView.java │ │ │ ├── Camera2BasicFragment.java │ │ │ └── CameraActivity.java │ │ └── res │ │ ├── drawable-hdpi │ │ ├── ic_action_info.png │ │ ├── ic_launcher.png │ │ └── tile.9.png │ │ ├── drawable-mdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── drawable-xhdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── drawable-xxhdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── layout │ │ ├── activity_camera.xml │ │ └── fragment_camera2_basic.xml │ │ ├── values-sw600dp │ │ ├── template-dimens.xml │ │ └── template-styles.xml │ │ ├── values-v11 │ │ └── template-styles.xml │ │ ├── values-v21 │ │ ├── base-colors.xml │ │ └── base-template-styles.xml │ │ └── values │ │ ├── base-strings.xml │ │ ├── colors.xml │ │ ├── strings.xml │ │ ├── styles.xml │ │ ├── template-dimens.xml │ │ └── template-styles.xml └── tests │ ├── AndroidManifest.xml │ └── src │ └── com │ └── example │ └── android │ └── camera2basic │ └── tests │ └── SampleTests.java ├── CONTRIB.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── packaging.yaml └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | # Built application files 2 | *.apk 3 | *.ap_ 4 | 5 | # Files for the ART/Dalvik VM 6 | *.dex 7 | 8 | # Java class files 9 | *.class 10 | 11 | # Generated files 12 | bin/ 13 | gen/ 14 | out/ 15 | 16 | # Gradle files 17 | .gradle/ 18 | build/ 19 | 20 | # Local configuration file (sdk path, etc) 21 | local.properties 22 | 23 | # Proguard folder generated by Eclipse 24 | proguard/ 25 | 26 | # Log Files 27 | *.log 28 | 29 | # Android Studio Navigation editor temp files 30 | .navigation/ 31 | 32 | # Android Studio captures folder 33 | captures/ 34 | 35 | # IntelliJ 36 | *.iml 37 | .idea/workspace.xml 38 | .idea/tasks.xml 39 | .idea/gradle.xml 40 | .idea/assetWizardSettings.xml 41 | .idea/dictionaries 42 | .idea/libraries 43 | .idea/caches 44 | 45 | # Keystore files 46 | # Uncomment the following line if you do not want to check your keystore files in. 47 | #*.jks 48 | 49 | # External native build folder generated in Android Studio 2.2 and later 50 | .externalNativeBuild 51 | 52 | # Google Services (e.g. APIs or Firebase) 53 | google-services.json 54 | 55 | # Freeline 56 | freeline.py 57 | freeline/ 58 | freeline_project_description.json 59 | 60 | # fastlane 61 | fastlane/report.xml 62 | fastlane/Preview.html 63 | fastlane/screenshots 64 | fastlane/test_output 65 | fastlane/readme.md 66 | -------------------------------------------------------------------------------- /.google/packaging.yaml: -------------------------------------------------------------------------------- 1 | 2 | # GOOGLE SAMPLE PACKAGING DATA 3 | # 4 | # This file is used by Google as part of our samples packaging process. 5 | # End users may safely ignore this file. It has no relevance to other systems. 6 | --- 7 | status: PUBLISHED 8 | technologies: [Android] 9 | categories: [Media, Camera, Camera2] 10 | languages: [Java] 11 | solutions: [Mobile] 12 | github: android-Camera2Basic 13 | level: INTERMEDIATE 14 | icon: screenshots/icon-web.png 15 | apiRefs: 16 | - android:android.hardware.camera2.CameraManager 17 | - android:android.hardware.camera2.CameraDevice 18 | - android:android.hardware.camera2.CameraCharacteristics 19 | - android:android.hardware.camera2.CameraCaptureSession 20 | - android:android.hardware.camera2.CaptureRequest 21 | - android:android.hardware.camera2.CaptureResult 22 | - android:android.view.TextureView 23 | license: apache2 24 | -------------------------------------------------------------------------------- /.idea/encodings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.idea/runConfigurations.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Application/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | buildscript { 3 | repositories { 4 | jcenter() 5 | google() 6 | } 7 | 8 | dependencies { 9 | classpath 'com.android.tools.build:gradle:3.4.0' 10 | } 11 | } 12 | 13 | apply plugin: 'com.android.application' 14 | 15 | repositories { 16 | jcenter() 17 | google() 18 | } 19 | 20 | dependencies { 21 | compile "com.android.support:support-v4:27.0.2" 22 | compile "com.android.support:support-v13:27.0.2" 23 | compile "com.android.support:cardview-v7:27.0.2" 24 | compile "com.android.support:appcompat-v7:27.0.2" 25 | } 26 | 27 | // The sample build uses multiple directories to 28 | // keep boilerplate and common code separate from 29 | // the main sample code. 30 | List dirs = [ 31 | 'main', // main sample code; look here for the interesting stuff. 32 | 'common', // components that are reused by multiple samples 33 | 'template'] // boilerplate code that is generated by the sample template process 34 | 35 | android { 36 | compileSdkVersion 27 37 | 38 | buildToolsVersion "27.0.2" 39 | 40 | defaultConfig { 41 | applicationId "cc.rome753.encodemp4" 42 | minSdkVersion 21 43 | targetSdkVersion 27 44 | } 45 | 46 | compileOptions { 47 | sourceCompatibility JavaVersion.VERSION_1_7 48 | targetCompatibility JavaVersion.VERSION_1_7 49 | } 50 | 51 | sourceSets { 52 | main { 53 | dirs.each { dir -> 54 | java.srcDirs "src/${dir}/java" 55 | res.srcDirs "src/${dir}/res" 56 | } 57 | } 58 | androidTest.setRoot('tests') 59 | androidTest.java.srcDirs = ['tests/src'] 60 | 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /Application/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 32 | 33 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/AACEncodeConsumer.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | import android.annotation.SuppressLint; 4 | import android.media.AudioFormat; 5 | import android.media.MediaCodec; 6 | import android.media.MediaCodecInfo; 7 | import android.media.MediaCodecList; 8 | import android.media.MediaFormat; 9 | import android.media.MediaRecorder; 10 | import android.util.Log; 11 | 12 | import java.lang.ref.WeakReference; 13 | import java.nio.ByteBuffer; 14 | import java.util.concurrent.LinkedBlockingQueue; 15 | 16 | /** 17 | * 对ACC音频进行编码 18 | * Created by chao on 2017/5/6. 19 | */ 20 | 21 | public class AACEncodeConsumer extends Thread { 22 | private static final String TAG = "EncodeAudio"; 23 | private static final String MIME_TYPE = "audio/mp4a-latm"; 24 | private static final int TIMES_OUT = 10000; 25 | private static final int ACC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; 26 | private static final int BUFFER_SIZE = 3584;//1600; 27 | private static final int AUDIO_BUFFER_SIZE = 1024; 28 | /** 29 | * 默认采样率 30 | */ 31 | public static final int DEFAULT_SAMPLE_RATE = 44100; 32 | 33 | /** 34 | * 通道数为1 35 | */ 36 | public static final int CHANNEL_COUNT_MONO = 1; 37 | /** 38 | * 通道数为2 39 | */ 40 | public static final int CHANNEL_COUNT_STEREO = 2; 41 | /** 42 | * 单声道 43 | */ 44 | public static final int CHANNEL_IN_MONO = AudioFormat.CHANNEL_IN_MONO; 45 | /** 46 | * 立体声 47 | */ 48 | public static final int CHANNEL_IN_STEREO = AudioFormat.CHANNEL_IN_STEREO; 49 | /** 50 | * 16位采样精度 51 | */ 52 | public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT; 53 | /** 54 | * 8位采样精度 55 | */ 56 | public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT; 57 | /** 58 | * 音频源为MIC 59 | */ 60 | public static final int SOURCE_MIC = MediaRecorder.AudioSource.MIC; 61 | 62 | // 编码器 63 | private boolean isExit = false; 64 | private boolean isEncoderStarted = false; 65 | private WeakReference mMuxerRef; 66 | private EncoderParams mParams; 67 | private MediaCodec mAudioEncoder; 68 | private MediaFormat newFormat; 69 | private long prevPresentationTimes = 0; 70 | private long nanoTime = 0;//System.nanoTime(); 71 | 72 | synchronized void setTmpuMuxer(MediaMuxerUtil mMuxer, EncoderParams mParams) { 73 | this.mMuxerRef = new WeakReference<>(mMuxer); 74 | this.mParams = mParams; 75 | 76 | MediaMuxerUtil muxer = mMuxerRef.get(); 77 | if (muxer != null && newFormat != null) { 78 | muxer.addTrack(newFormat, false); 79 | } 80 | } 81 | 82 | 83 | static class RawData { 84 | byte[] buf; 85 | int readBytes; 86 | long timeStamp; 87 | 88 | RawData() { 89 | buf = new byte[BUFFER_SIZE]; 90 | } 91 | 92 | void merge(ByteBuffer byteBuffer, int length) { 93 | System.arraycopy(byteBuffer.array(), byteBuffer.arrayOffset(), buf, readBytes, length); 94 | readBytes += length; 95 | timeStamp = System.nanoTime(); 96 | } 97 | 98 | boolean canMerge(int length) { 99 | return readBytes + length < buf.length; 100 | } 101 | 102 | } 103 | 104 | private LinkedBlockingQueue queue = new LinkedBlockingQueue<>(); 105 | 106 | private RawData bigShip; 107 | 108 | // queue数据没处理完时,先放到bigShip里,确保编码器消费速度 109 | public void addData(ByteBuffer byteBuffer, int length) { 110 | if(bigShip == null) { 111 | bigShip = new RawData(); 112 | bigShip.merge(byteBuffer, length); 113 | if(queue.isEmpty()) { 114 | queue.offer(bigShip); 115 | bigShip = null; 116 | } 117 | } else { 118 | if(bigShip.canMerge(length)) { 119 | bigShip.merge(byteBuffer, length); 120 | } else { 121 | queue.offer(bigShip); 122 | bigShip = null; 123 | } 124 | } 125 | } 126 | 127 | private RawData removeData() { 128 | return queue.poll(); 129 | } 130 | 131 | @Override 132 | public void run() { 133 | startCodec(); 134 | while (!isExit) { 135 | try { 136 | RawData data = removeData(); 137 | if(data != null) { 138 | Log.d("encode", "onWebRtcAudioRecording take data"); 139 | encoderBytes(data.buf, data.readBytes, data.timeStamp); 140 | } 141 | 142 | MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); 143 | int outputBufferIndex; 144 | do { 145 | outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo, TIMES_OUT); 146 | if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { 147 | // Log.i(TAG, "INFO_TRY_AGAIN_LATER"); 148 | } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 149 | Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED"); 150 | synchronized (AACEncodeConsumer.this) { 151 | newFormat = mAudioEncoder.getOutputFormat(); 152 | if (mMuxerRef != null) { 153 | MediaMuxerUtil muxer = mMuxerRef.get(); 154 | if (muxer != null) { 155 | muxer.addTrack(newFormat, false); 156 | } 157 | } 158 | } 159 | } else { 160 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 161 | mBufferInfo.size = 0; 162 | } 163 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 164 | 165 | Log.i(TAG, "数据流结束,退出循环"); 166 | break; 167 | } 168 | ByteBuffer outputBuffer = mAudioEncoder.getOutputBuffer(outputBufferIndex); 169 | if (mBufferInfo.size != 0) { 170 | if (outputBuffer == null) { 171 | throw new RuntimeException("encodecOutputBuffer" + outputBufferIndex + "was null"); 172 | } 173 | if (mMuxerRef != null) { 174 | MediaMuxerUtil muxer = mMuxerRef.get(); 175 | if (muxer != null) { 176 | Log.i(TAG, "------编码混合音频数据------------" + mBufferInfo.presentationTimeUs / 1000); 177 | muxer.pumpStream(outputBuffer, mBufferInfo, false); 178 | } 179 | } 180 | } 181 | mAudioEncoder.releaseOutputBuffer(outputBufferIndex, false); 182 | } 183 | } while (outputBufferIndex >= 0); 184 | } catch (Exception e) { 185 | e.printStackTrace(); 186 | } 187 | 188 | } 189 | stopCodec(); 190 | } 191 | 192 | @SuppressLint("NewApi") 193 | public void encoderBytes(byte[] audioBuf, int readBytes, long timeStamp) { 194 | int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMES_OUT); 195 | if (inputBufferIndex >= 0) { 196 | ByteBuffer inputBuffer = mAudioEncoder.getInputBuffer(inputBufferIndex); 197 | 198 | if (audioBuf == null || readBytes <= 0) { 199 | mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, 0, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_END_OF_STREAM); 200 | } else { 201 | if(inputBuffer != null) { 202 | inputBuffer.clear(); 203 | inputBuffer.put(audioBuf); 204 | } 205 | Log.e("chao", "audio set pts-------" + timeStamp / 1000 / 1000); 206 | mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, readBytes, System.nanoTime() / 1000, 0); 207 | 208 | } 209 | } 210 | 211 | 212 | } 213 | 214 | private void startCodec() { 215 | MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE); 216 | if (mCodecInfo == null) { 217 | return; 218 | } 219 | try { 220 | mAudioEncoder = MediaCodec.createByCodecName(mCodecInfo.getName()); 221 | MediaFormat mediaFormat = new MediaFormat(); 222 | mediaFormat.setString(MediaFormat.KEY_MIME, MIME_TYPE); 223 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mParams.getAudioBitrate()); 224 | mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, mParams.getAudioSampleRate()); 225 | mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, ACC_PROFILE); 226 | mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mParams.getAudioChannelCount()); 227 | mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, BUFFER_SIZE); 228 | if (mAudioEncoder != null) { 229 | mAudioEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 230 | mAudioEncoder.start(); 231 | isEncoderStarted = true; 232 | } 233 | } catch (Exception e) { 234 | Log.e(TAG, "startCodec" + e.getMessage()); 235 | e.printStackTrace(); 236 | } 237 | } 238 | 239 | private void stopCodec() { 240 | try { 241 | if (mAudioEncoder != null) { 242 | mAudioEncoder.stop(); 243 | mAudioEncoder.release(); 244 | mAudioEncoder = null; 245 | } 246 | } catch (Exception e) { 247 | // 捕获release()方法抛出异常 248 | } 249 | isEncoderStarted = false; 250 | } 251 | 252 | public void exit() { 253 | isExit = true; 254 | } 255 | 256 | /** 257 | * 遍历所有编解码器,返回第一个与指定MIME类型匹配的编码器 258 | * 判断是否有支持指定mime类型的编码器 259 | */ 260 | private MediaCodecInfo selectSupportCodec(String mimeType) { 261 | int numCodecs = MediaCodecList.getCodecCount(); 262 | for (int i = 0; i < numCodecs; i++) { 263 | MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 264 | if (!codecInfo.isEncoder()) { 265 | continue; 266 | } 267 | String[] types = codecInfo.getSupportedTypes(); 268 | for (int j = 0; j < types.length; j++) { 269 | if (types[j].equalsIgnoreCase(mimeType)) { 270 | return codecInfo; 271 | } 272 | } 273 | } 274 | return null; 275 | } 276 | 277 | } 278 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/EncoderParams.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | /** 音、视频编码参数 4 | * 5 | * Created by chao on 2017/9/19. 6 | */ 7 | 8 | public class EncoderParams { 9 | private String videoPath; 10 | private int frameWidth; // 图像宽度 11 | private int frameHeight; // 图像高度 12 | private int bitRate; 13 | private int frameRate; 14 | private boolean isVertical; 15 | 16 | private String picPath; // 图片抓拍路径 17 | private int audioBitrate; // 音频编码比特率 18 | private int audioChannelCount; // 通道数据 19 | private int audioSampleRate; // 采样率 20 | 21 | private int audioChannelConfig; // 单声道或立体声 22 | private int audioFormat; // 采样精度 23 | private int audioSouce; // 音频来源 24 | 25 | public EncoderParams(){} 26 | 27 | public String getVideoPath() { 28 | return videoPath; 29 | } 30 | 31 | public void setVideoPath(String videoPath) { 32 | this.videoPath = videoPath; 33 | } 34 | 35 | public int getAudioChannelConfig() { 36 | return audioChannelConfig; 37 | } 38 | 39 | public void setAudioChannelConfig(int audioChannelConfig) { 40 | this.audioChannelConfig = audioChannelConfig; 41 | } 42 | 43 | public boolean isVertical() { 44 | return isVertical; 45 | } 46 | 47 | public void setVertical(boolean vertical) { 48 | isVertical = vertical; 49 | } 50 | 51 | public int getAudioFormat() { 52 | return audioFormat; 53 | } 54 | 55 | public void setAudioFormat(int audioFormat) { 56 | this.audioFormat = audioFormat; 57 | } 58 | 59 | public int getAudioSouce() { 60 | return audioSouce; 61 | } 62 | 63 | public void setAudioSouce(int audioSouce) { 64 | this.audioSouce = audioSouce; 65 | } 66 | 67 | public String getPicPath() { 68 | return picPath; 69 | } 70 | 71 | public void setPicPath(String picPath) { 72 | this.picPath = picPath; 73 | } 74 | 75 | public int getAudioChannelCount() { 76 | return audioChannelCount; 77 | } 78 | 79 | public void setAudioChannelCount(int audioChannelCount) { 80 | this.audioChannelCount = audioChannelCount; 81 | } 82 | 83 | public int getAudioBitrate() { 84 | return audioBitrate; 85 | } 86 | 87 | public void setAudioBitrate(int audioBitrate) { 88 | this.audioBitrate = audioBitrate; 89 | } 90 | 91 | public int getAudioSampleRate() { 92 | return audioSampleRate; 93 | } 94 | 95 | public void setAudioSampleRate(int audioSampleRate) { 96 | this.audioSampleRate = audioSampleRate; 97 | } 98 | 99 | public int getFrameWidth() { 100 | return frameWidth; 101 | } 102 | 103 | public void setFrameWidth(int frameWidth) { 104 | this.frameWidth = frameWidth; 105 | } 106 | 107 | public int getFrameHeight() { 108 | return frameHeight; 109 | } 110 | 111 | public void setFrameHeight(int frameHeight) { 112 | this.frameHeight = frameHeight; 113 | } 114 | 115 | public int getBitRate() { 116 | return bitRate; 117 | } 118 | 119 | public void setBitRate(int bitRate) { 120 | this.bitRate = bitRate; 121 | } 122 | 123 | public int getFrameRate() { 124 | return frameRate; 125 | } 126 | 127 | public void setFrameRate(int frameRate) { 128 | this.frameRate = frameRate; 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/H264EncodeConsumer.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | import android.annotation.TargetApi; 4 | import android.media.MediaCodec; 5 | import android.media.MediaCodecInfo; 6 | import android.media.MediaCodecList; 7 | import android.media.MediaFormat; 8 | import android.os.Build; 9 | import android.util.Log; 10 | 11 | import java.lang.ref.WeakReference; 12 | import java.nio.ByteBuffer; 13 | import java.util.HashSet; 14 | import java.util.concurrent.LinkedBlockingQueue; 15 | 16 | /** 17 | * 对YUV视频流进行编码 18 | * Created by chao on 2017/5/6. 19 | */ 20 | 21 | public class H264EncodeConsumer extends Thread { 22 | private static final String TAG = "EncodeVideo"; 23 | private static final String MIME_TYPE = "video/avc"; 24 | // 间隔1s插入一帧关键帧 25 | private static final int FRAME_INTERVAL = 1; 26 | // 绑定编码器缓存区超时时间为10s 27 | private static final int TIMES_OUT = 10000; 28 | 29 | // 硬编码器 30 | private MediaCodec mVideoEncodec; 31 | private boolean isExit = false; 32 | private boolean isEncoderStart = false; 33 | 34 | private boolean isAddKeyFrame = false; 35 | private EncoderParams mParams; 36 | private MediaFormat newFormat; 37 | private WeakReference mMuxerRef; 38 | private int mColorFormat; 39 | private long nanoTime = 0;//System.nanoTime(); 40 | 41 | synchronized void setTmpuMuxer(MediaMuxerUtil mMuxer, EncoderParams mParams) { 42 | this.mMuxerRef = new WeakReference<>(mMuxer); 43 | this.mParams = mParams; 44 | MediaMuxerUtil muxer = mMuxerRef.get(); 45 | 46 | if (muxer != null && newFormat != null) { 47 | muxer.addTrack(newFormat, true); 48 | } 49 | } 50 | 51 | private void startCodec() { 52 | try { 53 | MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE); 54 | if (mCodecInfo == null) { 55 | Log.d(TAG, "startCodec fail" + MIME_TYPE); 56 | return; 57 | } 58 | mColorFormat = selectSupportColorFormat(mCodecInfo, MIME_TYPE); 59 | mVideoEncodec = MediaCodec.createByCodecName(mCodecInfo.getName()); 60 | MediaFormat mFormat = MediaFormat.createVideoFormat(MIME_TYPE, mParams.getFrameHeight(), mParams.getFrameWidth()); 61 | mFormat.setInteger(MediaFormat.KEY_BIT_RATE, mParams.getBitRate()); 62 | mFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mParams.getFrameRate()); 63 | mFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat); // 颜色格式 64 | mFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, FRAME_INTERVAL); 65 | if (mVideoEncodec != null) { 66 | mVideoEncodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 67 | mVideoEncodec.start(); 68 | isEncoderStart = true; 69 | } 70 | } catch (Exception e) { 71 | Log.e(TAG, "startCodec" + e.getMessage()); 72 | e.printStackTrace(); 73 | } 74 | } 75 | 76 | private void stopCodec() { 77 | if (mVideoEncodec != null) { 78 | mVideoEncodec.stop(); 79 | mVideoEncodec.release(); 80 | mVideoEncodec = null; 81 | isAddKeyFrame = false; 82 | isEncoderStart = false; 83 | 84 | Log.d(TAG, "stopCodec"); 85 | } 86 | } 87 | 88 | // private long lastPush = 0; 89 | 90 | private LinkedBlockingQueue queue = new LinkedBlockingQueue<>(); 91 | 92 | static class RawData { 93 | byte[] buf; 94 | long timeStamp; 95 | 96 | RawData(byte[] buf, long timeStamp) { 97 | this.buf = buf; 98 | this.timeStamp = timeStamp; 99 | } 100 | } 101 | 102 | public void addData(byte[] yuvData) { 103 | Log.e("chao", "**********add video" + System.nanoTime() / 1000 / 1000); 104 | queue.offer(new RawData(yuvData, System.nanoTime())); 105 | } 106 | 107 | private RawData removeData() { 108 | return queue.poll(); 109 | } 110 | 111 | private void handleData(byte[] yuvData, long timeStamp) { 112 | if (!isEncoderStart || mParams == null) 113 | return; 114 | try { 115 | int mWidth = mParams.getFrameWidth(); 116 | int mHeight = mParams.getFrameHeight(); 117 | 118 | byte[] resultBytes = new byte[yuvData.length]; 119 | if(mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) { // I420 120 | YUVTools.rotateP90(yuvData, resultBytes, mWidth, mHeight); 121 | } else /*if(mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)*/{ //NV12 122 | YUVTools.rotateSP90(yuvData, resultBytes, mWidth, mHeight); 123 | } 124 | 125 | feedMediaCodecData(resultBytes, timeStamp); 126 | } catch (Exception e) { 127 | e.printStackTrace(); 128 | } 129 | } 130 | 131 | @TargetApi(21) 132 | private void feedMediaCodecData(byte[] data, long timeStamp) { 133 | int inputBufferIndex = mVideoEncodec.dequeueInputBuffer(TIMES_OUT); 134 | if (inputBufferIndex >= 0) { 135 | ByteBuffer inputBuffer = mVideoEncodec.getInputBuffer(inputBufferIndex); 136 | if (inputBuffer != null) { 137 | inputBuffer.clear(); 138 | inputBuffer.put(data); 139 | } 140 | Log.e("chao", "video set pts......." + (timeStamp) / 1000 / 1000); 141 | mVideoEncodec.queueInputBuffer(inputBufferIndex, 0, data.length, System.nanoTime() / 1000 142 | , MediaCodec.BUFFER_FLAG_KEY_FRAME); 143 | } 144 | } 145 | 146 | @TargetApi(Build.VERSION_CODES.LOLLIPOP) 147 | @Override 148 | public void run() { 149 | try { 150 | if (!isEncoderStart) { 151 | Thread.sleep(200); 152 | 153 | startCodec(); 154 | } 155 | while (!isExit && isEncoderStart) { 156 | RawData rawData = removeData(); 157 | if(rawData != null) { 158 | handleData(rawData.buf, rawData.timeStamp); 159 | } 160 | 161 | MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); 162 | int outputBufferIndex; 163 | do { 164 | outputBufferIndex = mVideoEncodec.dequeueOutputBuffer(mBufferInfo, TIMES_OUT); 165 | if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { 166 | // Log.i(TAG, "INFO_TRY_AGAIN_LATER"); 167 | } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 168 | synchronized (H264EncodeConsumer.this) { 169 | newFormat = mVideoEncodec.getOutputFormat(); 170 | if (mMuxerRef != null) { 171 | MediaMuxerUtil muxer = mMuxerRef.get(); 172 | if (muxer != null) { 173 | muxer.addTrack(newFormat, true); 174 | } 175 | } 176 | } 177 | 178 | Log.i(TAG, "编码器输出缓存区格式改变,添加视频轨道到混合器"); 179 | } else { 180 | ByteBuffer outputBuffer = mVideoEncodec.getOutputBuffer(outputBufferIndex); 181 | int type = outputBuffer.get(4) & 0x1F; 182 | 183 | Log.d(TAG, "------还有数据---->" + type); 184 | if (type == 7 || type == 8) { 185 | 186 | Log.e(TAG, "------PPS、SPS帧(非图像数据),忽略-------"); 187 | mBufferInfo.size = 0; 188 | } else if (type == 5) { 189 | if (mMuxerRef != null) { 190 | MediaMuxerUtil muxer = mMuxerRef.get(); 191 | if (muxer != null) { 192 | Log.i(TAG, "------编码混合 视频关键帧数据-----" + mBufferInfo.presentationTimeUs / 1000); 193 | muxer.pumpStream(outputBuffer, mBufferInfo, true); 194 | } 195 | isAddKeyFrame = true; 196 | } 197 | } else { 198 | if (isAddKeyFrame) { 199 | if (isAddKeyFrame && mMuxerRef != null) { 200 | MediaMuxerUtil muxer = mMuxerRef.get(); 201 | if (muxer != null) { 202 | Log.i(TAG, "------编码混合 视频普通帧数据-----" + mBufferInfo.presentationTimeUs / 1000); 203 | muxer.pumpStream(outputBuffer, mBufferInfo, true); 204 | } 205 | } 206 | } 207 | } 208 | mVideoEncodec.releaseOutputBuffer(outputBufferIndex, false); 209 | } 210 | } while (outputBufferIndex >= 0); 211 | } 212 | stopCodec(); 213 | } catch (Exception e) { 214 | e.printStackTrace(); 215 | } 216 | } 217 | 218 | public void exit() { 219 | isExit = true; 220 | } 221 | 222 | /** 223 | * 遍历所有编解码器,返回第一个与指定MIME类型匹配的编码器 224 | * 判断是否有支持指定mime类型的编码器 225 | */ 226 | private MediaCodecInfo selectSupportCodec(String mimeType) { 227 | int numCodecs = MediaCodecList.getCodecCount(); 228 | for (int i = 0; i < numCodecs; i++) { 229 | MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 230 | if (!codecInfo.isEncoder()) { 231 | continue; 232 | } 233 | String[] types = codecInfo.getSupportedTypes(); 234 | for (int j = 0; j < types.length; j++) { 235 | if (types[j].equalsIgnoreCase(mimeType)) { 236 | return codecInfo; 237 | } 238 | } 239 | } 240 | return null; 241 | } 242 | 243 | /** 244 | * 根据mime类型匹配编码器支持的颜色格式 245 | */ 246 | private int selectSupportColorFormat(MediaCodecInfo mCodecInfo, String mimeType) { 247 | MediaCodecInfo.CodecCapabilities capabilities = mCodecInfo.getCapabilitiesForType(mimeType); 248 | HashSet colorFormats = new HashSet<>(); 249 | for(int i : capabilities.colorFormats) colorFormats.add(i); 250 | if(colorFormats.contains(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)) return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 251 | if(colorFormats.contains(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)) return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar; 252 | return 0; 253 | } 254 | 255 | } 256 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/MediaMuxerUtil.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaFormat; 5 | import android.media.MediaMuxer; 6 | import android.util.Log; 7 | 8 | import java.io.IOException; 9 | import java.nio.ByteBuffer; 10 | import java.util.LinkedList; 11 | import java.util.Queue; 12 | 13 | /** 14 | * Mp4封装混合器 15 | *

16 | * Created by chao on 2017/7/28. 17 | */ 18 | 19 | public class MediaMuxerUtil { 20 | private static final String TAG = MediaMuxerUtil.class.getSimpleName(); 21 | private MediaMuxer mMuxer; 22 | private final long durationMillis; 23 | private int mVideoTrackIndex = -1; 24 | private int mAudioTrackIndex = -1; 25 | private long mBeginMillis; 26 | 27 | // 文件路径;文件时长 28 | public MediaMuxerUtil(String path, long durationMillis) { 29 | this.durationMillis = durationMillis; 30 | try { 31 | mMuxer = new MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 32 | } catch (IOException e) { 33 | e.printStackTrace(); 34 | } 35 | } 36 | 37 | public synchronized void addTrack(MediaFormat format, boolean isVideo) { 38 | if(mMuxer == null) { 39 | return; 40 | } 41 | if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1) { 42 | return; 43 | } 44 | 45 | int track = mMuxer.addTrack(format); 46 | Log.i(TAG, String.format("addTrack %s result %d", isVideo ? "video" : "audio", track)); 47 | if (isVideo) { 48 | mVideoTrackIndex = track; 49 | if (mAudioTrackIndex != -1) { 50 | Log.i(TAG, "both audio and video added,and muxer is started"); 51 | mMuxer.start(); 52 | mBeginMillis = System.currentTimeMillis(); 53 | } 54 | } else { 55 | mAudioTrackIndex = track; 56 | if (mVideoTrackIndex != -1) { 57 | mMuxer.start(); 58 | mBeginMillis = System.currentTimeMillis(); 59 | } 60 | } 61 | } 62 | 63 | public synchronized void pumpStream(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo, boolean isVideo) { 64 | if (mBeginMillis > 0) { 65 | try { 66 | pump(outputBuffer, bufferInfo, isVideo); 67 | }catch (Exception e) { 68 | e.printStackTrace(); 69 | } 70 | } 71 | } 72 | 73 | private void pump(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo, boolean isVideo) { 74 | if(mMuxer == null) { 75 | return; 76 | } 77 | if (mAudioTrackIndex == -1 || mVideoTrackIndex == -1) { 78 | return; 79 | } 80 | if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 81 | // The codec config data was pulled out and fed to the muxer when we got 82 | // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. 83 | } else if (bufferInfo.size != 0) { 84 | 85 | outputBuffer.position(bufferInfo.offset); 86 | outputBuffer.limit(bufferInfo.offset + bufferInfo.size); 87 | 88 | mMuxer.writeSampleData(isVideo ? mVideoTrackIndex : mAudioTrackIndex, outputBuffer, bufferInfo); 89 | Log.d(TAG, String.format("sent %s [" + bufferInfo.size + "] with timestamp:[%d] to muxer", isVideo ? "video" : "audio", bufferInfo.presentationTimeUs / 1000)); 90 | } 91 | 92 | if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 93 | Log.i(TAG, "BUFFER_FLAG_END_OF_STREAM received"); 94 | } 95 | 96 | if (System.currentTimeMillis() - mBeginMillis >= durationMillis) { 97 | mMuxer.stop(); 98 | mMuxer.release(); 99 | mMuxer = null; 100 | mVideoTrackIndex = mAudioTrackIndex = -1; 101 | } 102 | } 103 | 104 | public synchronized void release() { 105 | if (mMuxer != null) { 106 | if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1) { 107 | Log.i(TAG, String.format("muxer is started. now it will be stoped.")); 108 | try { 109 | mMuxer.stop(); 110 | mMuxer.release(); 111 | } catch (IllegalStateException ex) { 112 | ex.printStackTrace(); 113 | } 114 | 115 | mAudioTrackIndex = mVideoTrackIndex = -1; 116 | } else { 117 | Log.i(TAG, String.format("muxer is failed to be stoped.")); 118 | } 119 | } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/VideoRecorder.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | import java.nio.ByteBuffer; 4 | 5 | /** 6 | * Created by chao on 19-4-10. 7 | */ 8 | 9 | public class VideoRecorder { 10 | 11 | private AACEncodeConsumer mAacConsumer; 12 | private H264EncodeConsumer mH264Consumer; 13 | private MediaMuxerUtil mMuxer; 14 | 15 | private String filePath; 16 | 17 | 18 | public VideoRecorder(String filePath) { 19 | this.filePath = filePath; 20 | } 21 | 22 | 23 | private EncoderParams setEncodeParams() { 24 | EncoderParams params = new EncoderParams(); 25 | params.setVideoPath(filePath); // 视频文件路径 26 | params.setFrameWidth(640); // 分辨率 27 | params.setFrameHeight(480); 28 | params.setBitRate(600000); // 视频编码码率 29 | params.setFrameRate(30);// 视频编码帧率 30 | params.setAudioBitrate(44100); // 音频比特率 31 | params.setAudioSampleRate(AACEncodeConsumer.DEFAULT_SAMPLE_RATE); // 音频采样率 32 | params.setAudioChannelConfig(AACEncodeConsumer.CHANNEL_IN_MONO);// 单声道 33 | params.setAudioChannelCount(AACEncodeConsumer.CHANNEL_COUNT_MONO); // 单声道通道数量 34 | params.setAudioFormat(AACEncodeConsumer.ENCODING_PCM_16BIT); // 采样精度为16位 35 | return params; 36 | } 37 | 38 | 39 | public void addAudioData(byte[] buffer) { 40 | if (mAacConsumer != null) { 41 | try { 42 | mAacConsumer.addData(ByteBuffer.wrap(buffer), buffer.length); 43 | } catch (Exception e) { 44 | e.printStackTrace(); 45 | } 46 | } 47 | } 48 | 49 | public void addVideoData(byte[] frame) { 50 | if(mH264Consumer != null) { 51 | try { 52 | mH264Consumer.addData(frame); 53 | } catch (Exception e) { 54 | e.printStackTrace(); 55 | } 56 | } 57 | } 58 | 59 | public void start() { 60 | EncoderParams params = setEncodeParams(); 61 | // 创建音视频编码线程 62 | mH264Consumer = new H264EncodeConsumer(); 63 | mAacConsumer = new AACEncodeConsumer(); 64 | mMuxer = new MediaMuxerUtil(params.getVideoPath(), 1000000); 65 | if (mH264Consumer != null) { 66 | mH264Consumer.setTmpuMuxer(mMuxer,params); 67 | } 68 | if (mAacConsumer != null) { 69 | mAacConsumer.setTmpuMuxer(mMuxer,params); 70 | } 71 | // 配置好混合器后启动线程 72 | mH264Consumer.start(); 73 | mAacConsumer.start(); 74 | 75 | } 76 | 77 | public void stop() { 78 | // 停止混合器 79 | if (mMuxer != null) { 80 | mMuxer.release(); 81 | mMuxer = null; 82 | } 83 | if (mH264Consumer != null) { 84 | mH264Consumer.setTmpuMuxer(null,null); 85 | } 86 | if (mAacConsumer != null) { 87 | mAacConsumer.setTmpuMuxer(null,null); 88 | } 89 | // 停止视频编码线程 90 | if (mH264Consumer != null) { 91 | mH264Consumer.exit(); 92 | try { 93 | Thread t2 = mH264Consumer; 94 | mH264Consumer = null; 95 | if (t2 != null) { 96 | t2.interrupt(); 97 | t2.join(); 98 | } 99 | } catch (InterruptedException e) { 100 | e.printStackTrace(); 101 | } 102 | } 103 | // 停止音频编码线程 104 | if (mAacConsumer != null) { 105 | mAacConsumer.exit(); 106 | try { 107 | Thread t1 = mAacConsumer; 108 | mAacConsumer = null; 109 | if (t1 != null) { 110 | t1.interrupt(); 111 | t1.join(); 112 | } 113 | } catch (InterruptedException e) { 114 | e.printStackTrace(); 115 | } 116 | } 117 | 118 | } 119 | 120 | } 121 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/encodemp4/YUVTools.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.encodemp4; 2 | 3 | import android.graphics.Bitmap; 4 | import android.media.Image; 5 | import android.media.ImageReader; 6 | 7 | import java.nio.ByteBuffer; 8 | 9 | public class YUVTools { 10 | 11 | /******************************* YUV420旋转算法 *******************************/ 12 | 13 | // I420或YV12顺时针旋转 14 | public static void rotateP(byte[] src, byte[] dest, int w, int h, int rotation) { 15 | switch (rotation) { 16 | case 0: 17 | System.arraycopy(src, 0, dest, 0, src.length); 18 | break; 19 | case 90: 20 | rotateP90(src, dest, w, h); 21 | break; 22 | case 180: 23 | rotateP180(src, dest, w, h); 24 | break; 25 | case 270: 26 | rotateP270(src, dest, w, h); 27 | break; 28 | } 29 | } 30 | 31 | // NV21或NV12顺时针旋转 32 | public static void rotateSP(byte[] src, byte[] dest, int w, int h, int rotation) { 33 | switch (rotation) { 34 | case 0: 35 | System.arraycopy(src, 0, dest, 0, src.length); 36 | break; 37 | case 90: 38 | rotateSP90(src, dest, w, h); 39 | break; 40 | case 180: 41 | rotateSP180(src, dest, w, h); 42 | break; 43 | case 270: 44 | rotateSP270(src, dest, w, h); 45 | break; 46 | } 47 | } 48 | 49 | // NV21或NV12顺时针旋转90度 50 | public static void rotateSP90(byte[] src, byte[] dest, int w, int h) { 51 | int pos = 0; 52 | int k = 0; 53 | for (int i = 0; i <= w - 1; i++) { 54 | for (int j = h - 1; j >= 0; j--) { 55 | dest[k++] = src[j * w + i]; 56 | } 57 | } 58 | 59 | pos = w * h; 60 | for (int i = 0; i <= w - 2; i += 2) { 61 | for (int j = h / 2 - 1; j >= 0; j--) { 62 | dest[k++] = src[pos + j * w + i]; 63 | dest[k++] = src[pos + j * w + i + 1]; 64 | } 65 | } 66 | } 67 | 68 | // NV21或NV12顺时针旋转270度 69 | public static void rotateSP270(byte[] src, byte[] dest, int w, int h) { 70 | int pos = 0; 71 | int k = 0; 72 | for (int i = w - 1; i >= 0; i--) { 73 | for (int j = 0; j <= h - 1; j++) { 74 | dest[k++] = src[j * w + i]; 75 | } 76 | } 77 | 78 | pos = w * h; 79 | for (int i = w - 2; i >= 0; i -= 2) { 80 | for (int j = 0; j <= h / 2 - 1; j++) { 81 | dest[k++] = src[pos + j * w + i]; 82 | dest[k++] = src[pos + j * w + i + 1]; 83 | } 84 | } 85 | } 86 | 87 | // NV21或NV12顺时针旋转180度 88 | public static void rotateSP180(byte[] src, byte[] dest, int w, int h) { 89 | int pos = 0; 90 | int k = w * h - 1; 91 | while (k >= 0) { 92 | dest[pos++] = src[k--]; 93 | } 94 | 95 | k = src.length - 2; 96 | while (pos < dest.length) { 97 | dest[pos++] = src[k]; 98 | dest[pos++] = src[k + 1]; 99 | k -= 2; 100 | } 101 | } 102 | 103 | // I420或YV12顺时针旋转90度 104 | public static void rotateP90(byte[] src, byte[] dest, int w, int h) { 105 | int pos = 0; 106 | //旋转Y 107 | int k = 0; 108 | for (int i = 0; i < w; i++) { 109 | for (int j = h - 1; j >= 0; j--) { 110 | dest[k++] = src[j * w + i]; 111 | } 112 | } 113 | //旋转U 114 | pos = w * h; 115 | for (int i = 0; i < w / 2; i++) { 116 | for (int j = h / 2 - 1; j >= 0; j--) { 117 | dest[k++] = src[pos + j * w / 2 + i]; 118 | } 119 | } 120 | 121 | //旋转V 122 | pos = w * h * 5 / 4; 123 | for (int i = 0; i < w / 2; i++) { 124 | for (int j = h / 2 - 1; j >= 0; j--) { 125 | dest[k++] = src[pos + j * w / 2 + i]; 126 | } 127 | } 128 | } 129 | 130 | // I420或YV12顺时针旋转270度 131 | public static void rotateP270(byte[] src, byte[] dest, int w, int h) { 132 | int pos = 0; 133 | //旋转Y 134 | int k = 0; 135 | for (int i = w - 1; i >= 0; i--) { 136 | for (int j = 0; j < h; j++) { 137 | dest[k++] = src[j * w + i]; 138 | } 139 | } 140 | //旋转U 141 | pos = w * h; 142 | for (int i = w / 2 - 1; i >= 0; i--) { 143 | for (int j = 0; j < h / 2; j++) { 144 | dest[k++] = src[pos + j * w / 2 + i]; 145 | } 146 | } 147 | 148 | //旋转V 149 | pos = w * h * 5 / 4; 150 | for (int i = w / 2 - 1; i >= 0; i--) { 151 | for (int j = 0; j < h / 2; j++) { 152 | dest[k++] = src[pos + j * w / 2 + i]; 153 | } 154 | } 155 | } 156 | 157 | // I420或YV12顺时针旋转180度 158 | public static void rotateP180(byte[] src, byte[] dest, int w, int h) { 159 | int pos = 0; 160 | int k = w * h - 1; 161 | while (k >= 0) { 162 | dest[pos++] = src[k--]; 163 | } 164 | 165 | k = w * h * 5 / 4; 166 | while (k >= w * h) { 167 | dest[pos++] = src[k--]; 168 | } 169 | 170 | k = src.length - 1; 171 | while (pos < dest.length) { 172 | dest[pos++] = src[k--]; 173 | } 174 | } 175 | 176 | /******************************* YUV420格式相互转换算法 *******************************/ 177 | 178 | // i420 -> nv12, yv12 -> nv21 179 | public static void pToSP(byte[] src, byte[] dest, int w, int h) { 180 | int pos = w * h; 181 | int u = pos; 182 | int v = pos + (pos >> 2); 183 | System.arraycopy(src, 0, dest, 0, pos); 184 | while (pos < src.length) { 185 | dest[pos++] = src[u++]; 186 | dest[pos++] = src[v++]; 187 | } 188 | } 189 | 190 | // i420 -> nv21, yv12 -> nv12 191 | public static void pToSPx(byte[] src, byte[] dest, int w, int h) { 192 | int pos = w * h; 193 | int u = pos; 194 | int v = pos + (pos >> 2); 195 | System.arraycopy(src, 0, dest, 0, pos); 196 | while (pos < src.length) { 197 | dest[pos++] = src[v++]; 198 | dest[pos++] = src[u++]; 199 | } 200 | } 201 | 202 | // nv12 -> i420, nv21 -> yv12 203 | public static void spToP(byte[] src, byte[] dest, int w, int h) { 204 | int pos = w * h; 205 | int u = pos; 206 | int v = pos + (pos >> 2); 207 | System.arraycopy(src, 0, dest, 0, pos); 208 | while (pos < src.length) { 209 | dest[u++] = src[pos++]; 210 | dest[v++] = src[pos++]; 211 | } 212 | } 213 | 214 | // nv12 -> yv12, nv21 -> i420 215 | public static void spToPx(byte[] src, byte[] dest, int w, int h) { 216 | int pos = w * h; 217 | int u = pos; 218 | int v = pos + (pos >> 2); 219 | System.arraycopy(src, 0, dest, 0, pos); 220 | while (pos < src.length) { 221 | dest[v++] = src[pos++]; 222 | dest[u++] = src[pos++]; 223 | } 224 | } 225 | 226 | // i420 <-> yv12 227 | public static void pToP(byte[] src, byte[] dest, int w, int h) { 228 | int pos = w * h; 229 | int off = pos >> 2; 230 | System.arraycopy(src, 0, dest, 0, pos); 231 | System.arraycopy(src, pos, dest, pos + off, off); 232 | System.arraycopy(src, pos + off, dest, pos, off); 233 | } 234 | 235 | // nv12 <-> nv21 236 | public static void spToSP(byte[] src, byte[] dest, int w, int h) { 237 | int pos = w * h; 238 | System.arraycopy(src, 0, dest, 0, pos); 239 | for (; pos < src.length; pos += 2) { 240 | dest[pos] = src[pos + 1]; 241 | dest[pos + 1] = src[pos]; 242 | } 243 | } 244 | 245 | 246 | /******************************* YUV420转换Bitmap算法 *******************************/ 247 | 248 | // 此方法虽然是官方的,但是耗时是下面方法的两倍 249 | // public static Bitmap nv21ToBitmap(byte[] data, int w, int h) { 250 | // final YuvImage image = new YuvImage(data, ImageFormat.NV21, w, h, null); 251 | // ByteArrayOutputStream os = new ByteArrayOutputStream(data.length); 252 | // if (image.compressToJpeg(new Rect(0, 0, w, h), 100, os)) { 253 | // byte[] tmp = os.toByteArray(); 254 | // return BitmapFactory.decodeByteArray(tmp, 0, tmp.length); 255 | // } 256 | // return null; 257 | // } 258 | 259 | public static Bitmap nv12ToBitmap(byte[] data, int w, int h) { 260 | return spToBitmap(data, w, h, 0, 1); 261 | } 262 | 263 | public static Bitmap nv21ToBitmap(byte[] data, int w, int h) { 264 | return spToBitmap(data, w, h, 1, 0); 265 | } 266 | 267 | private static Bitmap spToBitmap(byte[] data, int w, int h, int uOff, int vOff) { 268 | int plane = w * h; 269 | int[] colors = new int[plane]; 270 | int yPos = 0, uvPos = plane; 271 | for(int j = 0; j < h; j++) { 272 | for(int i = 0; i < w; i++) { 273 | // YUV byte to RGB int 274 | final int y1 = data[yPos] & 0xff; 275 | final int u = (data[uvPos + uOff] & 0xff) - 128; 276 | final int v = (data[uvPos + vOff] & 0xff) - 128; 277 | final int y1192 = 1192 * y1; 278 | int r = (y1192 + 1634 * v); 279 | int g = (y1192 - 833 * v - 400 * u); 280 | int b = (y1192 + 2066 * u); 281 | 282 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 283 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 284 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 285 | colors[yPos] = ((r << 6) & 0xff0000) | 286 | ((g >> 2) & 0xff00) | 287 | ((b >> 10) & 0xff); 288 | 289 | if((yPos++ & 1) == 1) uvPos += 2; 290 | } 291 | if((j & 1) == 0) uvPos -= w; 292 | } 293 | return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565); 294 | } 295 | 296 | public static Bitmap i420ToBitmap(byte[] data, int w, int h) { 297 | return pToBitmap(data, w, h, true); 298 | } 299 | 300 | public static Bitmap yv12ToBitmap(byte[] data, int w, int h) { 301 | return pToBitmap(data, w, h, false); 302 | } 303 | 304 | private static Bitmap pToBitmap(byte[] data, int w, int h, boolean uv) { 305 | int plane = w * h; 306 | int[] colors = new int[plane]; 307 | int off = plane >> 2; 308 | int yPos = 0, uPos = plane + (uv ? 0 : off), vPos = plane + (uv ? off : 0); 309 | for(int j = 0; j < h; j++) { 310 | for(int i = 0; i < w; i++) { 311 | // YUV byte to RGB int 312 | final int y1 = data[yPos] & 0xff; 313 | final int u = (data[uPos] & 0xff) - 128; 314 | final int v = (data[vPos] & 0xff) - 128; 315 | final int y1192 = 1192 * y1; 316 | int r = (y1192 + 1634 * v); 317 | int g = (y1192 - 833 * v - 400 * u); 318 | int b = (y1192 + 2066 * u); 319 | 320 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 321 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 322 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 323 | colors[yPos] = ((r << 6) & 0xff0000) | 324 | ((g >> 2) & 0xff00) | 325 | ((b >> 10) & 0xff); 326 | 327 | if((yPos++ & 1) == 1) { 328 | uPos++; 329 | vPos++; 330 | } 331 | } 332 | if((j & 1) == 0) { 333 | uPos -= (w >> 1); 334 | vPos -= (w >> 1); 335 | } 336 | } 337 | return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565); 338 | } 339 | 340 | public static int[] planesToColors(Image.Plane[] planes, int height) { 341 | ByteBuffer yPlane = planes[0].getBuffer(); 342 | ByteBuffer uPlane = planes[1].getBuffer(); 343 | ByteBuffer vPlane = planes[2].getBuffer(); 344 | 345 | int bufferIndex = 0; 346 | final int total = yPlane.capacity(); 347 | final int uvCapacity = uPlane.capacity(); 348 | final int width = planes[0].getRowStride(); 349 | 350 | int[] rgbBuffer = new int[width * height]; 351 | 352 | int yPos = 0; 353 | for (int i = 0; i < height; i++) { 354 | int uvPos = (i >> 1) * width; 355 | 356 | for (int j = 0; j < width; j++) { 357 | if (uvPos >= uvCapacity - 1) 358 | break; 359 | if (yPos >= total) 360 | break; 361 | 362 | final int y1 = yPlane.get(yPos++) & 0xff; 363 | 364 | /* 365 | The ordering of the u (Cb) and v (Cr) bytes inside the planes is a 366 | bit strange. The _first_ byte of the u-plane and the _second_ byte 367 | of the v-plane build the u/v pair and belong to the first two pixels 368 | (y-bytes), thus usual YUV 420 behavior. What the Android devs did 369 | here (IMHO): just copy the interleaved NV21 U/V data to two planes 370 | but keep the offset of the interleaving. 371 | */ 372 | final int u = (uPlane.get(uvPos) & 0xff) - 128; 373 | final int v = (vPlane.get(uvPos) & 0xff) - 128; 374 | if ((j & 1) == 1) { 375 | uvPos += 2; 376 | } 377 | 378 | // This is the integer variant to convert YCbCr to RGB, NTSC values. 379 | // formulae found at 380 | // https://software.intel.com/en-us/android/articles/trusted-tools-in-the-new-android-world-optimization-techniques-from-intel-sse-intrinsics-to 381 | // and on StackOverflow etc. 382 | final int y1192 = 1192 * y1; 383 | int r = (y1192 + 1634 * v); 384 | int g = (y1192 - 833 * v - 400 * u); 385 | int b = (y1192 + 2066 * u); 386 | 387 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 388 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 389 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 390 | 391 | rgbBuffer[bufferIndex++] = ((r << 6) & 0xff0000) | 392 | ((g >> 2) & 0xff00) | 393 | ((b >> 10) & 0xff); 394 | } 395 | } 396 | return rgbBuffer; 397 | } 398 | 399 | /** 400 | * 从ImageReader中获取byte[]数据 401 | */ 402 | public static byte[] getBytesFromImageReader(ImageReader imageReader) { 403 | try (Image image = imageReader.acquireNextImage()) { 404 | final Image.Plane[] planes = image.getPlanes(); 405 | ByteBuffer b0 = planes[0].getBuffer(); 406 | ByteBuffer b1 = planes[1].getBuffer(); 407 | ByteBuffer b2 = planes[2].getBuffer(); 408 | int y = b0.remaining(), u = y >> 2, v = u; 409 | byte[] bytes = new byte[y + u + v]; 410 | if(b1.remaining() > u) { // y420sp 411 | b0.get(bytes, 0, b0.remaining()); 412 | b1.get(bytes, y, b1.remaining()); // uv 413 | } else { // y420p 414 | b0.get(bytes, 0, b0.remaining()); 415 | b1.get(bytes, y, b1.remaining()); // u 416 | b2.get(bytes, y + u, b2.remaining()); // v 417 | } 418 | return bytes; 419 | } catch (Exception e) { 420 | e.printStackTrace(); 421 | } 422 | return null; 423 | } 424 | } 425 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/AutoFitTextureView.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.content.Context; 20 | import android.util.AttributeSet; 21 | import android.view.TextureView; 22 | 23 | /** 24 | * A {@link TextureView} that can be adjusted to a specified aspect ratio. 25 | */ 26 | public class AutoFitTextureView extends TextureView { 27 | 28 | private int mRatioWidth = 0; 29 | private int mRatioHeight = 0; 30 | 31 | public AutoFitTextureView(Context context) { 32 | this(context, null); 33 | } 34 | 35 | public AutoFitTextureView(Context context, AttributeSet attrs) { 36 | this(context, attrs, 0); 37 | } 38 | 39 | public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) { 40 | super(context, attrs, defStyle); 41 | } 42 | 43 | /** 44 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio 45 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that 46 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. 47 | * 48 | * @param width Relative horizontal size 49 | * @param height Relative vertical size 50 | */ 51 | public void setAspectRatio(int width, int height) { 52 | if (width < 0 || height < 0) { 53 | throw new IllegalArgumentException("Size cannot be negative."); 54 | } 55 | mRatioWidth = width; 56 | mRatioHeight = height; 57 | requestLayout(); 58 | } 59 | 60 | @Override 61 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { 62 | super.onMeasure(widthMeasureSpec, heightMeasureSpec); 63 | int width = MeasureSpec.getSize(widthMeasureSpec); 64 | int height = MeasureSpec.getSize(heightMeasureSpec); 65 | if (0 == mRatioWidth || 0 == mRatioHeight) { 66 | setMeasuredDimension(width, height); 67 | } else { 68 | if (width < height * mRatioWidth / mRatioHeight) { 69 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth); 70 | } else { 71 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height); 72 | } 73 | } 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/Camera2BasicFragment.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.Manifest; 20 | import android.app.Activity; 21 | import android.app.AlertDialog; 22 | import android.app.Dialog; 23 | import android.content.Context; 24 | import android.content.DialogInterface; 25 | import android.content.pm.PackageManager; 26 | import android.content.res.Configuration; 27 | import android.graphics.ImageFormat; 28 | import android.graphics.Matrix; 29 | import android.graphics.Point; 30 | import android.graphics.RectF; 31 | import android.graphics.SurfaceTexture; 32 | import android.hardware.camera2.CameraAccessException; 33 | import android.hardware.camera2.CameraCaptureSession; 34 | import android.hardware.camera2.CameraCharacteristics; 35 | import android.hardware.camera2.CameraDevice; 36 | import android.hardware.camera2.CameraManager; 37 | import android.hardware.camera2.CameraMetadata; 38 | import android.hardware.camera2.CaptureRequest; 39 | import android.hardware.camera2.CaptureResult; 40 | import android.hardware.camera2.TotalCaptureResult; 41 | import android.hardware.camera2.params.StreamConfigurationMap; 42 | import android.media.Image; 43 | import android.media.ImageReader; 44 | import android.os.Bundle; 45 | import android.os.Handler; 46 | import android.os.HandlerThread; 47 | import android.support.annotation.NonNull; 48 | import android.support.v4.app.ActivityCompat; 49 | import android.support.v4.app.DialogFragment; 50 | import android.support.v4.app.Fragment; 51 | import android.support.v4.content.ContextCompat; 52 | import android.util.Log; 53 | import android.util.Size; 54 | import android.util.SparseIntArray; 55 | import android.view.LayoutInflater; 56 | import android.view.Surface; 57 | import android.view.TextureView; 58 | import android.view.View; 59 | import android.view.ViewGroup; 60 | import android.widget.Toast; 61 | 62 | import java.io.File; 63 | import java.io.FileOutputStream; 64 | import java.io.IOException; 65 | import java.nio.ByteBuffer; 66 | import java.util.ArrayList; 67 | import java.util.Arrays; 68 | import java.util.Collections; 69 | import java.util.Comparator; 70 | import java.util.List; 71 | import java.util.concurrent.Semaphore; 72 | import java.util.concurrent.TimeUnit; 73 | 74 | import cc.rome753.encodemp4.R; 75 | import cc.rome753.encodemp4.YUVTools; 76 | 77 | public class Camera2BasicFragment extends Fragment 78 | implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback { 79 | 80 | /** 81 | * Conversion from screen rotation to JPEG orientation. 82 | */ 83 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 84 | private static final int REQUEST_CAMERA_PERMISSION = 1; 85 | private static final String FRAGMENT_DIALOG = "dialog"; 86 | 87 | static { 88 | ORIENTATIONS.append(Surface.ROTATION_0, 90); 89 | ORIENTATIONS.append(Surface.ROTATION_90, 0); 90 | ORIENTATIONS.append(Surface.ROTATION_180, 270); 91 | ORIENTATIONS.append(Surface.ROTATION_270, 180); 92 | } 93 | 94 | /** 95 | * Tag for the {@link Log}. 96 | */ 97 | private static final String TAG = "Camera2BasicFragment"; 98 | 99 | /** 100 | * Camera state: Showing camera preview. 101 | */ 102 | private static final int STATE_PREVIEW = 0; 103 | 104 | /** 105 | * Camera state: Waiting for the focus to be locked. 106 | */ 107 | private static final int STATE_WAITING_LOCK = 1; 108 | 109 | /** 110 | * Camera state: Waiting for the exposure to be precapture state. 111 | */ 112 | private static final int STATE_WAITING_PRECAPTURE = 2; 113 | 114 | /** 115 | * Camera state: Waiting for the exposure state to be something other than precapture. 116 | */ 117 | private static final int STATE_WAITING_NON_PRECAPTURE = 3; 118 | 119 | /** 120 | * Camera state: Picture was taken. 121 | */ 122 | private static final int STATE_PICTURE_TAKEN = 4; 123 | 124 | /** 125 | * Max preview width that is guaranteed by Camera2 API 126 | */ 127 | private static final int MAX_PREVIEW_WIDTH = 1920; 128 | 129 | /** 130 | * Max preview height that is guaranteed by Camera2 API 131 | */ 132 | private static final int MAX_PREVIEW_HEIGHT = 1080; 133 | 134 | /** 135 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a 136 | * {@link TextureView}. 137 | */ 138 | private final TextureView.SurfaceTextureListener mSurfaceTextureListener 139 | = new TextureView.SurfaceTextureListener() { 140 | 141 | @Override 142 | public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { 143 | openCamera(width, height); 144 | } 145 | 146 | @Override 147 | public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) { 148 | configureTransform(width, height); 149 | } 150 | 151 | @Override 152 | public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) { 153 | return true; 154 | } 155 | 156 | @Override 157 | public void onSurfaceTextureUpdated(SurfaceTexture texture) { 158 | } 159 | 160 | }; 161 | 162 | /** 163 | * ID of the current {@link CameraDevice}. 164 | */ 165 | private String mCameraId; 166 | 167 | /** 168 | * An {@link AutoFitTextureView} for camera preview. 169 | */ 170 | private AutoFitTextureView mTextureView; 171 | 172 | /** 173 | * A {@link CameraCaptureSession } for camera preview. 174 | */ 175 | private CameraCaptureSession mCaptureSession; 176 | 177 | /** 178 | * A reference to the opened {@link CameraDevice}. 179 | */ 180 | private CameraDevice mCameraDevice; 181 | 182 | /** 183 | * The {@link android.util.Size} of camera preview. 184 | */ 185 | private Size mPreviewSize; 186 | 187 | /** 188 | * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. 189 | */ 190 | private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { 191 | 192 | @Override 193 | public void onOpened(@NonNull CameraDevice cameraDevice) { 194 | // This method is called when the camera is opened. We start camera preview here. 195 | mCameraOpenCloseLock.release(); 196 | mCameraDevice = cameraDevice; 197 | createCameraPreviewSession(); 198 | } 199 | 200 | @Override 201 | public void onDisconnected(@NonNull CameraDevice cameraDevice) { 202 | mCameraOpenCloseLock.release(); 203 | cameraDevice.close(); 204 | mCameraDevice = null; 205 | } 206 | 207 | @Override 208 | public void onError(@NonNull CameraDevice cameraDevice, int error) { 209 | mCameraOpenCloseLock.release(); 210 | cameraDevice.close(); 211 | mCameraDevice = null; 212 | Activity activity = getActivity(); 213 | if (null != activity) { 214 | activity.finish(); 215 | } 216 | } 217 | 218 | }; 219 | 220 | /** 221 | * An additional thread for running tasks that shouldn't block the UI. 222 | */ 223 | private HandlerThread mBackgroundThread; 224 | 225 | /** 226 | * A {@link Handler} for running tasks in the background. 227 | */ 228 | private Handler mBackgroundHandler; 229 | 230 | /** 231 | * An {@link ImageReader} that handles still image capture. 232 | */ 233 | private ImageReader mImageReader; 234 | 235 | /** 236 | * This is the output file for our picture. 237 | */ 238 | private File mFile; 239 | 240 | /** 241 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a 242 | * still image is ready to be saved. 243 | */ 244 | private final ImageReader.OnImageAvailableListener mOnImageAvailableListener 245 | = new ImageReader.OnImageAvailableListener() { 246 | 247 | @Override 248 | public void onImageAvailable(ImageReader reader) { 249 | // mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile)); 250 | byte[] data = YUVTools.getBytesFromImageReader(reader); 251 | ((CameraActivity)getActivity()).addVideoData(data); 252 | 253 | } 254 | 255 | }; 256 | 257 | /** 258 | * {@link CaptureRequest.Builder} for the camera preview 259 | */ 260 | private CaptureRequest.Builder mPreviewRequestBuilder; 261 | 262 | /** 263 | * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder} 264 | */ 265 | private CaptureRequest mPreviewRequest; 266 | 267 | /** 268 | * The current state of camera state for taking pictures. 269 | * 270 | * @see #mCaptureCallback 271 | */ 272 | private int mState = STATE_PREVIEW; 273 | 274 | /** 275 | * A {@link Semaphore} to prevent the app from exiting before closing the camera. 276 | */ 277 | private Semaphore mCameraOpenCloseLock = new Semaphore(1); 278 | 279 | /** 280 | * Whether the current camera device supports Flash or not. 281 | */ 282 | private boolean mFlashSupported; 283 | 284 | /** 285 | * Orientation of the camera sensor 286 | */ 287 | private int mSensorOrientation; 288 | 289 | /** 290 | * A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. 291 | */ 292 | private CameraCaptureSession.CaptureCallback mCaptureCallback 293 | = new CameraCaptureSession.CaptureCallback() { 294 | 295 | private void process(CaptureResult result) { 296 | switch (mState) { 297 | case STATE_PREVIEW: { 298 | // We have nothing to do when the camera preview is working normally. 299 | break; 300 | } 301 | case STATE_WAITING_LOCK: { 302 | Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); 303 | if (afState == null) { 304 | captureStillPicture(); 305 | } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || 306 | CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { 307 | // CONTROL_AE_STATE can be null on some devices 308 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 309 | if (aeState == null || 310 | aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { 311 | mState = STATE_PICTURE_TAKEN; 312 | captureStillPicture(); 313 | } else { 314 | runPrecaptureSequence(); 315 | } 316 | } 317 | break; 318 | } 319 | case STATE_WAITING_PRECAPTURE: { 320 | // CONTROL_AE_STATE can be null on some devices 321 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 322 | if (aeState == null || 323 | aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE || 324 | aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { 325 | mState = STATE_WAITING_NON_PRECAPTURE; 326 | } 327 | break; 328 | } 329 | case STATE_WAITING_NON_PRECAPTURE: { 330 | // CONTROL_AE_STATE can be null on some devices 331 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 332 | if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { 333 | mState = STATE_PICTURE_TAKEN; 334 | captureStillPicture(); 335 | } 336 | break; 337 | } 338 | } 339 | } 340 | 341 | @Override 342 | public void onCaptureProgressed(@NonNull CameraCaptureSession session, 343 | @NonNull CaptureRequest request, 344 | @NonNull CaptureResult partialResult) { 345 | process(partialResult); 346 | } 347 | 348 | @Override 349 | public void onCaptureCompleted(@NonNull CameraCaptureSession session, 350 | @NonNull CaptureRequest request, 351 | @NonNull TotalCaptureResult result) { 352 | process(result); 353 | } 354 | 355 | }; 356 | 357 | /** 358 | * Shows a {@link Toast} on the UI thread. 359 | * 360 | * @param text The message to show 361 | */ 362 | private void showToast(final String text) { 363 | final Activity activity = getActivity(); 364 | if (activity != null) { 365 | activity.runOnUiThread(new Runnable() { 366 | @Override 367 | public void run() { 368 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); 369 | } 370 | }); 371 | } 372 | } 373 | 374 | /** 375 | * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that 376 | * is at least as large as the respective texture view size, and that is at most as large as the 377 | * respective max size, and whose aspect ratio matches with the specified value. If such size 378 | * doesn't exist, choose the largest one that is at most as large as the respective max size, 379 | * and whose aspect ratio matches with the specified value. 380 | * 381 | * @param choices The list of sizes that the camera supports for the intended output 382 | * class 383 | * @param textureViewWidth The width of the texture view relative to sensor coordinate 384 | * @param textureViewHeight The height of the texture view relative to sensor coordinate 385 | * @param maxWidth The maximum width that can be chosen 386 | * @param maxHeight The maximum height that can be chosen 387 | * @param aspectRatio The aspect ratio 388 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough 389 | */ 390 | private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, 391 | int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { 392 | 393 | // Collect the supported resolutions that are at least as big as the preview Surface 394 | List bigEnough = new ArrayList<>(); 395 | // Collect the supported resolutions that are smaller than the preview Surface 396 | List notBigEnough = new ArrayList<>(); 397 | int w = aspectRatio.getWidth(); 398 | int h = aspectRatio.getHeight(); 399 | for (Size option : choices) { 400 | if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && 401 | option.getHeight() == option.getWidth() * h / w) { 402 | if (option.getWidth() >= textureViewWidth && 403 | option.getHeight() >= textureViewHeight) { 404 | bigEnough.add(option); 405 | } else { 406 | notBigEnough.add(option); 407 | } 408 | } 409 | } 410 | 411 | // Pick the smallest of those big enough. If there is no one big enough, pick the 412 | // largest of those not big enough. 413 | if (bigEnough.size() > 0) { 414 | return Collections.min(bigEnough, new CompareSizesByArea()); 415 | } else if (notBigEnough.size() > 0) { 416 | return Collections.max(notBigEnough, new CompareSizesByArea()); 417 | } else { 418 | Log.e(TAG, "Couldn't find any suitable preview size"); 419 | return choices[0]; 420 | } 421 | } 422 | 423 | public static Camera2BasicFragment newInstance() { 424 | return new Camera2BasicFragment(); 425 | } 426 | 427 | @Override 428 | public View onCreateView(LayoutInflater inflater, ViewGroup container, 429 | Bundle savedInstanceState) { 430 | return inflater.inflate(R.layout.fragment_camera2_basic, container, false); 431 | } 432 | 433 | @Override 434 | public void onViewCreated(final View view, Bundle savedInstanceState) { 435 | view.findViewById(R.id.picture).setOnClickListener(this); 436 | view.findViewById(R.id.info).setOnClickListener(this); 437 | mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture); 438 | } 439 | 440 | @Override 441 | public void onActivityCreated(Bundle savedInstanceState) { 442 | super.onActivityCreated(savedInstanceState); 443 | mFile = new File(getActivity().getExternalFilesDir(null), "pic.jpg"); 444 | } 445 | 446 | @Override 447 | public void onResume() { 448 | super.onResume(); 449 | startBackgroundThread(); 450 | 451 | // When the screen is turned off and turned back on, the SurfaceTexture is already 452 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open 453 | // a camera and start preview from here (otherwise, we wait until the surface is ready in 454 | // the SurfaceTextureListener). 455 | if (mTextureView.isAvailable()) { 456 | openCamera(mTextureView.getWidth(), mTextureView.getHeight()); 457 | } else { 458 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); 459 | } 460 | } 461 | 462 | @Override 463 | public void onPause() { 464 | closeCamera(); 465 | stopBackgroundThread(); 466 | super.onPause(); 467 | } 468 | 469 | private void requestCameraPermission() { 470 | if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) { 471 | new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG); 472 | } else { 473 | requestPermissions(new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION); 474 | } 475 | } 476 | 477 | @Override 478 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, 479 | @NonNull int[] grantResults) { 480 | if (requestCode == REQUEST_CAMERA_PERMISSION) { 481 | if (grantResults[0] != PackageManager.PERMISSION_GRANTED) { 482 | ErrorDialog.newInstance(getString(R.string.request_permission)) 483 | .show(getChildFragmentManager(), FRAGMENT_DIALOG); 484 | } 485 | } else { 486 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 487 | } 488 | } 489 | 490 | /** 491 | * Sets up member variables related to camera. 492 | * 493 | * @param width The width of available size for camera preview 494 | * @param height The height of available size for camera preview 495 | */ 496 | @SuppressWarnings("SuspiciousNameCombination") 497 | private void setUpCameraOutputs(int width, int height) { 498 | Activity activity = getActivity(); 499 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 500 | try { 501 | for (String cameraId : manager.getCameraIdList()) { 502 | CameraCharacteristics characteristics 503 | = manager.getCameraCharacteristics(cameraId); 504 | 505 | // We don't use a front facing camera in this sample. 506 | Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); 507 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { 508 | continue; 509 | } 510 | 511 | StreamConfigurationMap map = characteristics.get( 512 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 513 | if (map == null) { 514 | continue; 515 | } 516 | 517 | // For still image captures, we use the largest available size. 518 | Size largest = Collections.max( 519 | Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), 520 | new CompareSizesByArea()); 521 | mImageReader = ImageReader.newInstance(640, 480, 522 | ImageFormat.YUV_420_888, /*maxImages*/2); 523 | mImageReader.setOnImageAvailableListener( 524 | mOnImageAvailableListener, mBackgroundHandler); 525 | 526 | // Find out if we need to swap dimension to get the preview size relative to sensor 527 | // coordinate. 528 | int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 529 | //noinspection ConstantConditions 530 | mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 531 | boolean swappedDimensions = false; 532 | switch (displayRotation) { 533 | case Surface.ROTATION_0: 534 | case Surface.ROTATION_180: 535 | if (mSensorOrientation == 90 || mSensorOrientation == 270) { 536 | swappedDimensions = true; 537 | } 538 | break; 539 | case Surface.ROTATION_90: 540 | case Surface.ROTATION_270: 541 | if (mSensorOrientation == 0 || mSensorOrientation == 180) { 542 | swappedDimensions = true; 543 | } 544 | break; 545 | default: 546 | Log.e(TAG, "Display rotation is invalid: " + displayRotation); 547 | } 548 | 549 | Point displaySize = new Point(); 550 | activity.getWindowManager().getDefaultDisplay().getSize(displaySize); 551 | int rotatedPreviewWidth = width; 552 | int rotatedPreviewHeight = height; 553 | int maxPreviewWidth = displaySize.x; 554 | int maxPreviewHeight = displaySize.y; 555 | 556 | if (swappedDimensions) { 557 | rotatedPreviewWidth = height; 558 | rotatedPreviewHeight = width; 559 | maxPreviewWidth = displaySize.y; 560 | maxPreviewHeight = displaySize.x; 561 | } 562 | 563 | if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { 564 | maxPreviewWidth = MAX_PREVIEW_WIDTH; 565 | } 566 | 567 | if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { 568 | maxPreviewHeight = MAX_PREVIEW_HEIGHT; 569 | } 570 | 571 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera 572 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of 573 | // garbage capture data. 574 | mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), 575 | rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, 576 | maxPreviewHeight, largest); 577 | 578 | // We fit the aspect ratio of TextureView to the size of preview we picked. 579 | int orientation = getResources().getConfiguration().orientation; 580 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) { 581 | mTextureView.setAspectRatio( 582 | mPreviewSize.getWidth(), mPreviewSize.getHeight()); 583 | } else { 584 | mTextureView.setAspectRatio( 585 | mPreviewSize.getHeight(), mPreviewSize.getWidth()); 586 | } 587 | 588 | // Check if the flash is supported. 589 | Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); 590 | mFlashSupported = available == null ? false : available; 591 | 592 | mCameraId = cameraId; 593 | return; 594 | } 595 | } catch (CameraAccessException e) { 596 | e.printStackTrace(); 597 | } catch (NullPointerException e) { 598 | // Currently an NPE is thrown when the Camera2API is used but not supported on the 599 | // device this code runs. 600 | ErrorDialog.newInstance(getString(R.string.camera_error)) 601 | .show(getChildFragmentManager(), FRAGMENT_DIALOG); 602 | } 603 | } 604 | 605 | /** 606 | * Opens the camera specified by {@link Camera2BasicFragment#mCameraId}. 607 | */ 608 | private void openCamera(int width, int height) { 609 | if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) 610 | != PackageManager.PERMISSION_GRANTED) { 611 | requestCameraPermission(); 612 | return; 613 | } 614 | setUpCameraOutputs(width, height); 615 | configureTransform(width, height); 616 | Activity activity = getActivity(); 617 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 618 | try { 619 | if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { 620 | throw new RuntimeException("Time out waiting to lock camera opening."); 621 | } 622 | manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); 623 | } catch (CameraAccessException e) { 624 | e.printStackTrace(); 625 | } catch (InterruptedException e) { 626 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e); 627 | } 628 | } 629 | 630 | /** 631 | * Closes the current {@link CameraDevice}. 632 | */ 633 | private void closeCamera() { 634 | try { 635 | mCameraOpenCloseLock.acquire(); 636 | if (null != mCaptureSession) { 637 | mCaptureSession.close(); 638 | mCaptureSession = null; 639 | } 640 | if (null != mCameraDevice) { 641 | mCameraDevice.close(); 642 | mCameraDevice = null; 643 | } 644 | if (null != mImageReader) { 645 | mImageReader.close(); 646 | mImageReader = null; 647 | } 648 | } catch (InterruptedException e) { 649 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e); 650 | } finally { 651 | mCameraOpenCloseLock.release(); 652 | } 653 | } 654 | 655 | /** 656 | * Starts a background thread and its {@link Handler}. 657 | */ 658 | private void startBackgroundThread() { 659 | mBackgroundThread = new HandlerThread("CameraBackground"); 660 | mBackgroundThread.start(); 661 | mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); 662 | } 663 | 664 | /** 665 | * Stops the background thread and its {@link Handler}. 666 | */ 667 | private void stopBackgroundThread() { 668 | mBackgroundThread.quitSafely(); 669 | try { 670 | mBackgroundThread.join(); 671 | mBackgroundThread = null; 672 | mBackgroundHandler = null; 673 | } catch (InterruptedException e) { 674 | e.printStackTrace(); 675 | } 676 | } 677 | 678 | /** 679 | * Creates a new {@link CameraCaptureSession} for camera preview. 680 | */ 681 | private void createCameraPreviewSession() { 682 | try { 683 | SurfaceTexture texture = mTextureView.getSurfaceTexture(); 684 | assert texture != null; 685 | 686 | // We configure the size of default buffer to be the size of camera preview we want. 687 | texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); 688 | 689 | // This is the output Surface we need to start preview. 690 | Surface surface = new Surface(texture); 691 | 692 | // We set up a CaptureRequest.Builder with the output Surface. 693 | mPreviewRequestBuilder 694 | = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 695 | mPreviewRequestBuilder.addTarget(surface); 696 | mPreviewRequestBuilder.addTarget(mImageReader.getSurface()); 697 | 698 | // Here, we create a CameraCaptureSession for camera preview. 699 | mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), 700 | new CameraCaptureSession.StateCallback() { 701 | 702 | @Override 703 | public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 704 | // The camera is already closed 705 | if (null == mCameraDevice) { 706 | return; 707 | } 708 | 709 | // When the session is ready, we start displaying the preview. 710 | mCaptureSession = cameraCaptureSession; 711 | try { 712 | // Auto focus should be continuous for camera preview. 713 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 714 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); 715 | // Flash is automatically enabled when necessary. 716 | setAutoFlash(mPreviewRequestBuilder); 717 | 718 | // Finally, we start displaying the camera preview. 719 | mPreviewRequest = mPreviewRequestBuilder.build(); 720 | mCaptureSession.setRepeatingRequest(mPreviewRequest, 721 | mCaptureCallback, mBackgroundHandler); 722 | } catch (CameraAccessException e) { 723 | e.printStackTrace(); 724 | } 725 | } 726 | 727 | @Override 728 | public void onConfigureFailed( 729 | @NonNull CameraCaptureSession cameraCaptureSession) { 730 | showToast("Failed"); 731 | } 732 | }, null 733 | ); 734 | } catch (CameraAccessException e) { 735 | e.printStackTrace(); 736 | } 737 | } 738 | 739 | /** 740 | * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. 741 | * This method should be called after the camera preview size is determined in 742 | * setUpCameraOutputs and also the size of `mTextureView` is fixed. 743 | * 744 | * @param viewWidth The width of `mTextureView` 745 | * @param viewHeight The height of `mTextureView` 746 | */ 747 | private void configureTransform(int viewWidth, int viewHeight) { 748 | Activity activity = getActivity(); 749 | if (null == mTextureView || null == mPreviewSize || null == activity) { 750 | return; 751 | } 752 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 753 | Matrix matrix = new Matrix(); 754 | RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); 755 | RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); 756 | float centerX = viewRect.centerX(); 757 | float centerY = viewRect.centerY(); 758 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { 759 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); 760 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); 761 | float scale = Math.max( 762 | (float) viewHeight / mPreviewSize.getHeight(), 763 | (float) viewWidth / mPreviewSize.getWidth()); 764 | matrix.postScale(scale, scale, centerX, centerY); 765 | matrix.postRotate(90 * (rotation - 2), centerX, centerY); 766 | } else if (Surface.ROTATION_180 == rotation) { 767 | matrix.postRotate(180, centerX, centerY); 768 | } 769 | mTextureView.setTransform(matrix); 770 | } 771 | 772 | /** 773 | * Initiate a still image capture. 774 | */ 775 | private void takePicture() { 776 | lockFocus(); 777 | } 778 | 779 | /** 780 | * Lock the focus as the first step for a still image capture. 781 | */ 782 | private void lockFocus() { 783 | try { 784 | // This is how to tell the camera to lock focus. 785 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 786 | CameraMetadata.CONTROL_AF_TRIGGER_START); 787 | // Tell #mCaptureCallback to wait for the lock. 788 | mState = STATE_WAITING_LOCK; 789 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 790 | mBackgroundHandler); 791 | } catch (CameraAccessException e) { 792 | e.printStackTrace(); 793 | } 794 | } 795 | 796 | /** 797 | * Run the precapture sequence for capturing a still image. This method should be called when 798 | * we get a response in {@link #mCaptureCallback} from {@link #lockFocus()}. 799 | */ 800 | private void runPrecaptureSequence() { 801 | try { 802 | // This is how to tell the camera to trigger. 803 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 804 | CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 805 | // Tell #mCaptureCallback to wait for the precapture sequence to be set. 806 | mState = STATE_WAITING_PRECAPTURE; 807 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 808 | mBackgroundHandler); 809 | } catch (CameraAccessException e) { 810 | e.printStackTrace(); 811 | } 812 | } 813 | 814 | /** 815 | * Capture a still picture. This method should be called when we get a response in 816 | * {@link #mCaptureCallback} from both {@link #lockFocus()}. 817 | */ 818 | private void captureStillPicture() { 819 | try { 820 | final Activity activity = getActivity(); 821 | if (null == activity || null == mCameraDevice) { 822 | return; 823 | } 824 | // This is the CaptureRequest.Builder that we use to take a picture. 825 | final CaptureRequest.Builder captureBuilder = 826 | mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 827 | captureBuilder.addTarget(mImageReader.getSurface()); 828 | 829 | // Use the same AE and AF modes as the preview. 830 | captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, 831 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); 832 | setAutoFlash(captureBuilder); 833 | 834 | // Orientation 835 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 836 | captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); 837 | 838 | CameraCaptureSession.CaptureCallback CaptureCallback 839 | = new CameraCaptureSession.CaptureCallback() { 840 | 841 | @Override 842 | public void onCaptureCompleted(@NonNull CameraCaptureSession session, 843 | @NonNull CaptureRequest request, 844 | @NonNull TotalCaptureResult result) { 845 | showToast("Saved: " + mFile); 846 | Log.d(TAG, mFile.toString()); 847 | unlockFocus(); 848 | } 849 | }; 850 | 851 | mCaptureSession.stopRepeating(); 852 | mCaptureSession.abortCaptures(); 853 | mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); 854 | } catch (CameraAccessException e) { 855 | e.printStackTrace(); 856 | } 857 | } 858 | 859 | /** 860 | * Retrieves the JPEG orientation from the specified screen rotation. 861 | * 862 | * @param rotation The screen rotation. 863 | * @return The JPEG orientation (one of 0, 90, 270, and 360) 864 | */ 865 | private int getOrientation(int rotation) { 866 | // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X) 867 | // We have to take that into account and rotate JPEG properly. 868 | // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS. 869 | // For devices with orientation of 270, we need to rotate the JPEG 180 degrees. 870 | return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360; 871 | } 872 | 873 | /** 874 | * Unlock the focus. This method should be called when still image capture sequence is 875 | * finished. 876 | */ 877 | private void unlockFocus() { 878 | try { 879 | // Reset the auto-focus trigger 880 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 881 | CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); 882 | setAutoFlash(mPreviewRequestBuilder); 883 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 884 | mBackgroundHandler); 885 | // After this, the camera will go back to the normal state of preview. 886 | mState = STATE_PREVIEW; 887 | mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, 888 | mBackgroundHandler); 889 | } catch (CameraAccessException e) { 890 | e.printStackTrace(); 891 | } 892 | } 893 | 894 | @Override 895 | public void onClick(View view) { 896 | switch (view.getId()) { 897 | case R.id.picture: { 898 | // takePicture(); 899 | 900 | ((CameraActivity)getActivity()).click(); 901 | 902 | break; 903 | } 904 | case R.id.info: { 905 | Activity activity = getActivity(); 906 | if (null != activity) { 907 | new AlertDialog.Builder(activity) 908 | .setMessage(R.string.intro_message) 909 | .setPositiveButton(android.R.string.ok, null) 910 | .show(); 911 | } 912 | break; 913 | } 914 | } 915 | } 916 | 917 | private void setAutoFlash(CaptureRequest.Builder requestBuilder) { 918 | if (mFlashSupported) { 919 | requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, 920 | CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 921 | } 922 | } 923 | 924 | /** 925 | * Saves a JPEG {@link Image} into the specified {@link File}. 926 | */ 927 | private static class ImageSaver implements Runnable { 928 | 929 | /** 930 | * The JPEG image 931 | */ 932 | private final Image mImage; 933 | /** 934 | * The file we save the image into. 935 | */ 936 | private final File mFile; 937 | 938 | ImageSaver(Image image, File file) { 939 | mImage = image; 940 | mFile = file; 941 | } 942 | 943 | @Override 944 | public void run() { 945 | ByteBuffer buffer = mImage.getPlanes()[0].getBuffer(); 946 | byte[] bytes = new byte[buffer.remaining()]; 947 | buffer.get(bytes); 948 | FileOutputStream output = null; 949 | try { 950 | output = new FileOutputStream(mFile); 951 | output.write(bytes); 952 | } catch (IOException e) { 953 | e.printStackTrace(); 954 | } finally { 955 | mImage.close(); 956 | if (null != output) { 957 | try { 958 | output.close(); 959 | } catch (IOException e) { 960 | e.printStackTrace(); 961 | } 962 | } 963 | } 964 | } 965 | 966 | } 967 | 968 | /** 969 | * Compares two {@code Size}s based on their areas. 970 | */ 971 | static class CompareSizesByArea implements Comparator { 972 | 973 | @Override 974 | public int compare(Size lhs, Size rhs) { 975 | // We cast here to ensure the multiplications won't overflow 976 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() - 977 | (long) rhs.getWidth() * rhs.getHeight()); 978 | } 979 | 980 | } 981 | 982 | /** 983 | * Shows an error message dialog. 984 | */ 985 | public static class ErrorDialog extends DialogFragment { 986 | 987 | private static final String ARG_MESSAGE = "message"; 988 | 989 | public static ErrorDialog newInstance(String message) { 990 | ErrorDialog dialog = new ErrorDialog(); 991 | Bundle args = new Bundle(); 992 | args.putString(ARG_MESSAGE, message); 993 | dialog.setArguments(args); 994 | return dialog; 995 | } 996 | 997 | @NonNull 998 | @Override 999 | public Dialog onCreateDialog(Bundle savedInstanceState) { 1000 | final Activity activity = getActivity(); 1001 | return new AlertDialog.Builder(activity) 1002 | .setMessage(getArguments().getString(ARG_MESSAGE)) 1003 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { 1004 | @Override 1005 | public void onClick(DialogInterface dialogInterface, int i) { 1006 | activity.finish(); 1007 | } 1008 | }) 1009 | .create(); 1010 | } 1011 | 1012 | } 1013 | 1014 | /** 1015 | * Shows OK/Cancel confirmation dialog about camera permission. 1016 | */ 1017 | public static class ConfirmationDialog extends DialogFragment { 1018 | 1019 | @NonNull 1020 | @Override 1021 | public Dialog onCreateDialog(Bundle savedInstanceState) { 1022 | final Fragment parent = getParentFragment(); 1023 | return new AlertDialog.Builder(getActivity()) 1024 | .setMessage(R.string.request_permission) 1025 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { 1026 | @Override 1027 | public void onClick(DialogInterface dialog, int which) { 1028 | parent.requestPermissions(new String[]{Manifest.permission.CAMERA}, 1029 | REQUEST_CAMERA_PERMISSION); 1030 | } 1031 | }) 1032 | .setNegativeButton(android.R.string.cancel, 1033 | new DialogInterface.OnClickListener() { 1034 | @Override 1035 | public void onClick(DialogInterface dialog, int which) { 1036 | Activity activity = parent.getActivity(); 1037 | if (activity != null) { 1038 | activity.finish(); 1039 | } 1040 | } 1041 | }) 1042 | .create(); 1043 | } 1044 | } 1045 | 1046 | } 1047 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/CameraActivity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.media.AudioFormat; 20 | import android.media.AudioRecord; 21 | import android.media.MediaRecorder; 22 | import android.os.Bundle; 23 | import android.support.v7.app.AppCompatActivity; 24 | import android.widget.Toast; 25 | 26 | import cc.rome753.encodemp4.R; 27 | import cc.rome753.encodemp4.VideoRecorder; 28 | 29 | public class CameraActivity extends AppCompatActivity { 30 | 31 | private VideoRecorder mRecorder; 32 | private String path; 33 | 34 | @Override 35 | protected void onCreate(Bundle savedInstanceState) { 36 | super.onCreate(savedInstanceState); 37 | setContentView(R.layout.activity_camera); 38 | if (null == savedInstanceState) { 39 | getSupportFragmentManager().beginTransaction() 40 | .replace(R.id.container, Camera2BasicFragment.newInstance()) 41 | .commit(); 42 | } 43 | } 44 | 45 | public void click() { 46 | if(mRecorder == null) { 47 | Toast.makeText(this, "start record...", Toast.LENGTH_SHORT).show(); 48 | startRecord(); 49 | path = getExternalCacheDir() + "/record-" + System.currentTimeMillis() + ".mp4"; 50 | mRecorder = new VideoRecorder(path); 51 | mRecorder.start(); 52 | } else { 53 | Toast.makeText(this, "saved: " + path, Toast.LENGTH_LONG).show(); 54 | stopRecord(); 55 | mRecorder.stop(); 56 | mRecorder = null; 57 | } 58 | } 59 | 60 | public void addVideoData(byte[] data) { 61 | if(mRecorder != null) { 62 | mRecorder.addVideoData(data); 63 | } 64 | } 65 | 66 | private Thread recordThread; 67 | private boolean isStart = false; 68 | private int bufferSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); 69 | private AudioRecord mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2); 70 | 71 | /** 72 | * 销毁线程方法 73 | */ 74 | private void destroyThread() { 75 | try { 76 | isStart = false; 77 | if (null != recordThread && Thread.State.RUNNABLE == recordThread.getState()) { 78 | try { 79 | Thread.sleep(500); 80 | recordThread.interrupt(); 81 | } catch (Exception e) { 82 | recordThread = null; 83 | } 84 | } 85 | recordThread = null; 86 | } catch (Exception e) { 87 | e.printStackTrace(); 88 | } finally { 89 | recordThread = null; 90 | } 91 | } 92 | 93 | /** 94 | * 启动录音线程 95 | */ 96 | private void startThread() { 97 | destroyThread(); 98 | isStart = true; 99 | if (recordThread == null) { 100 | recordThread = new Thread(recordRunnable); 101 | recordThread.start(); 102 | } 103 | } 104 | 105 | /** 106 | * 录音线程 107 | */ 108 | Runnable recordRunnable = new Runnable() { 109 | @Override 110 | public void run() { 111 | try { 112 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 113 | int bytesRecord; 114 | //int bufferSize = 320; 115 | byte[] tempBuffer = new byte[bufferSize]; 116 | if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) { 117 | stopRecord(); 118 | return; 119 | } 120 | mAudioRecord.startRecording(); 121 | //writeToFileHead(); 122 | while (isStart) { 123 | if (null != mAudioRecord) { 124 | bytesRecord = mAudioRecord.read(tempBuffer, 0, bufferSize); 125 | if (bytesRecord == AudioRecord.ERROR_INVALID_OPERATION || bytesRecord == AudioRecord.ERROR_BAD_VALUE) { 126 | continue; 127 | } 128 | if (bytesRecord != 0 && bytesRecord != -1) { 129 | if(mRecorder != null) { 130 | mRecorder.addAudioData(tempBuffer); 131 | } 132 | } else { 133 | break; 134 | } 135 | } 136 | } 137 | } catch (Exception e) { 138 | e.printStackTrace(); 139 | } 140 | } 141 | 142 | }; 143 | 144 | /** 145 | * 启动录音 146 | * 147 | */ 148 | public void startRecord() { 149 | try { 150 | startThread(); 151 | } catch (Exception e) { 152 | e.printStackTrace(); 153 | } 154 | } 155 | 156 | /** 157 | * 停止录音 158 | */ 159 | public void stopRecord() { 160 | try { 161 | destroyThread(); 162 | if (mAudioRecord != null) { 163 | if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) { 164 | mAudioRecord.stop(); 165 | } 166 | if (mAudioRecord != null) { 167 | mAudioRecord.release(); 168 | } 169 | } 170 | } catch (Exception e) { 171 | e.printStackTrace(); 172 | } 173 | } 174 | 175 | 176 | 177 | } 178 | -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-hdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/tile.9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-hdpi/tile.9.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-mdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-mdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xhdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-xhdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xxhdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-xxhdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-encode-mp4/94cf4f9a6aa2b77c763768b9cfea4cf2af39961f/Application/src/main/res/drawable-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/layout/activity_camera.xml: -------------------------------------------------------------------------------- 1 | 16 | 23 | -------------------------------------------------------------------------------- /Application/src/main/res/layout/fragment_camera2_basic.xml: -------------------------------------------------------------------------------- 1 | 16 | 19 | 20 | 26 | 27 | 34 | 35 |