├── .gitignore ├── .idea ├── codeStyles │ └── Project.xml ├── gradle.xml ├── misc.xml ├── modules.xml ├── runConfigurations.xml └── vcs.xml ├── README.md ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── top │ │ └── omooo │ │ └── audiovideotask │ │ └── ExampleInstrumentedTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── java │ │ └── top │ │ │ └── omooo │ │ │ └── audiovideotask │ │ │ ├── MainActivity.java │ │ │ ├── task_1 │ │ │ ├── CustomImage.java │ │ │ ├── CustomSurfaceView.java │ │ │ ├── DrawBoardView.java │ │ │ └── DrawPictureActivity.java │ │ │ ├── task_2 │ │ │ ├── AudioActivity.java │ │ │ ├── AudioRecordActivity.java │ │ │ ├── AudioTrackManager.java │ │ │ └── MyAudioManager.java │ │ │ ├── task_3 │ │ │ ├── CameraPreview.java │ │ │ ├── PreviewActivity.java │ │ │ ├── SurfacePreviewActivity.java │ │ │ ├── TextureCameraPreview.java │ │ │ └── TexturePreviewActivity.java │ │ │ ├── task_4 │ │ │ ├── ExtractMuxerActivity.java │ │ │ └── MP4Manager.java │ │ │ └── task_5 │ │ │ ├── GLActivity.java │ │ │ ├── MyGlRenderer.java │ │ │ ├── MyGlSurfaceView.java │ │ │ └── MyTriangle.java │ └── res │ │ ├── drawable-v24 │ │ ├── ic_launcher_foreground.xml │ │ └── picture.gif │ │ ├── drawable │ │ └── ic_launcher_background.xml │ │ ├── layout │ │ ├── activity_audio_record.xml │ │ ├── activity_draw_picture.xml │ │ ├── activity_extract_muxer.xml │ │ ├── activity_gl.xml │ │ ├── activity_main.xml │ │ ├── activity_preview.xml │ │ └── activity_preview_layout.xml │ │ ├── mipmap-anydpi-v26 │ │ ├── ic_launcher.xml │ │ └── ic_launcher_round.xml │ │ ├── mipmap-hdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-mdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ └── values │ │ ├── colors.xml │ │ ├── strings.xml │ │ └── styles.xml │ └── test │ └── java │ └── top │ └── omooo │ └── audiovideotask │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/workspace.xml 5 | /.idea/libraries 6 | .DS_Store 7 | /build 8 | /captures 9 | .externalNativeBuild 10 | -------------------------------------------------------------------------------- /.idea/codeStyles/Project.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 15 | 16 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /.idea/gradle.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 17 | 18 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 26 | 27 | 28 | 29 | 30 | 31 | 33 | 34 | 35 | 36 | 37 | 1.8 38 | 39 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.idea/runConfigurations.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AudioVideoTask 2 | --- 3 | 任务列表 4 | --- 5 | 6 | ### 1. 在Android平台绘制一张图片,使用至少三种不同的API,ImageView、SurfaceView、自定义View 7 | 8 | 参考:[https://www.jianshu.com/p/4f2c1d00f32f](https://www.jianshu.com/p/4f2c1d00f32f) 9 | 10 | **SurfaceView与View的区别** 11 | 12 | SurfaceView是在一个新起的单独线程中可以重新绘制画面,而View必须在UI主线程中更新画面。 13 | 14 | **SurfaceView原理** 15 | 16 | * 什么是Surface? 17 | 18 | 简单来说Surface对应了一块屏幕缓存区,每个window对应一个Surface,任何View都是画在Surface上的,传统的View共享一块屏幕缓存区,所有的绘制必须在UI线程中进行。 19 | 20 | * Surface和SurfaceView的关系 21 | 22 | SurfaceView封装了一个Surface对象,而不是Canvas,因为Surface可以使用后台线程绘制。 23 | 24 | 对于那些资源敏感的操作或者要求快速更新或者高速帧率的地方,例如使用3D图形或者实时预览摄像头,这一点特别有用。 25 | 26 | * 什么是SurfaceHolder.CallBack? 27 | 28 | SurfaceHolder.CallBack主要是当底层的Surface被创建、销毁或者改变时提供回调通知。 29 | 30 | 由于绘制必须在Surface被创建后才能进行,因此SurfaceHolder.Callback中的surfaceCreated和surfaceDestoryed就成了绘图处理代码的边界。 31 | 32 | SurfaceHolder,可以把它当成Surface的容器和控制器,用来操纵Surface,处理它的Canvas上画的效果和动画,控制表面、大小、像素等。 33 | 34 | * 使用代价 35 | 36 | 独立于GUI线程进行绘图的代价是额外的内存消耗,所以,虽然它是创建定制View的有效方式,有时甚至是必须的,但是使用SurfaceView的时候仍需谨慎。 37 | 38 | * 应该注意 39 | 40 | 所有SurfaceView和SurfaceHolder.Callback的方法都应该在UI线程里调用,一般来说就是应用程序主线程。渲染线程所要访问的各种变量应该做同步处理。 41 | 42 | 参考:[https://www.jianshu.com/p/70912c55a03b](https://www.jianshu.com/p/70912c55a03b) 43 | 44 | **SurfaceView优缺点** 45 | 优点: 46 | 47 | * 在一个子线程中对自己进行绘制,避免造成UI线程阻塞 48 | * 高效复杂的UI效果 49 | * 独立的Surface、独立的Window 50 | * 使用双缓冲机制,播放视频时画面更加流畅 51 | 52 | 缺点: 53 | 54 | * 每次绘制都会优先绘制黑色背景,更新不及时会出现黑边现象 55 | * Surface不在View hierachy中,它的显示也不受View的属性控制,平移、缩放等变换 56 | 57 | **SurfaceView双缓冲区** 58 | 59 | 双缓冲:在运用时可以理解为:SurfaceView在更新视图时用到了两张Canvas,一张frontCanvas和一张backCanvas,每次实际显示的是frontCanvas,backCanvas存储的是上一次更改前的视图。当你播放这一帧的时候,它已经提前帮你加载好后面一帧了,所以播放起来更加流畅。 60 | 61 | 当使用lockCanvas()获取画布时,实际上是backCanvas而不是正在显示的frontCanvas,之后你在获取到的backCanvas上绘制新视图,在unlockCanvasAndPost此视图,那么上传的这张Canvas将替换原有的frontCanvas作为新的frontCanvas,原来的frontCanvas将切换到后台作为backCanvas。相当于多个线程,交替解析和渲染每一帧视频数据。 62 | 63 | SurfaceHolder.lockCanvas ----> SurfaceHolder.unlockCanvasAndPost(canvas) 64 | 65 | ![](https://upload-images.jianshu.io/upload_images/1592280-bea74430778c80b1.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/413) 66 | 67 | 68 | 69 | 双缓冲技术是游戏开发中的一个重要的技术。当一个动画争先显示时,程序又在改变它,前面还没有显示完,程序又请求重新绘制,这样屏幕就会不停地**闪烁**。而双缓冲技术是把要处理的图片在内存中处理好之后,再将其显示在屏幕上。双缓冲主要是为了解决 反复局部刷屏带来的闪烁。把要画的东西先画到一个内存区域里,然后整体的一次性显示出来。需要注意的是,每次在绘制的时候都需要清除Canvas画布,不然出现画面重叠现象。 70 | 71 | 这种做的好处就是: 72 | 73 | 1. 提高渲染效率 74 | 2. 可以避免刷新评率过高而出现闪烁现象 75 | 76 | 77 | **SurfaceView怎么进行旋转,透明操作的?** 78 | 79 | * 普通View旋转后,View的内容也跟着同步做了旋转 80 | * SurfaceView在旋转后,其显示内容并没有跟着一起旋转 81 | 82 | 这就好比在墙上开了一个窗(Surface),通过窗口可以看到外面的花花世界,但是无论窗口怎么变化,窗外面的画面是不会随着窗口变化的。 83 | 84 | **一般视频播放器可以横竖屏切换,是如何实现的?** 85 | 86 | 在Activity中覆写onConfigurationChanged方法就可以,根据横竖屏切换,修改SurfaceView的Parameter的宽高参数即可。 87 | 88 | 89 | 90 | ### 2. 在Android平台使用AudioRecord和AudioTrack API完成音频PCM数据的采集和播放,并实现读写音频wav文件 91 | 92 | [音频采集 AudioRecorder](https://www.jianshu.com/p/125b94af7c08) 93 | 94 | [音频播放 AudioTrack](https://www.jianshu.com/p/632dce664c3d) 95 | 96 | [PCM转WAV](https://www.jianshu.com/p/f7863638acbe) 97 | 98 | ##### Android 音频采集 99 | 100 | Android平台上的音频采集一般就两种方式: 101 | 102 | 1. 使用MediaRecorder进行音频采集 103 | 104 | MediaRecorder是基于AudioRecorder的API(最终还是会创建AudioRecord用来与AudioFlinger进行交互),它可以直接将采集到的音频数据转化为执行的编码格式并保存。这种方案相较于调用系统内置的应用程序,便于开发者在UI界面上布局,而且系统封装的很好,便于使用,唯一的缺点是使用它录下来的音频是经过编码的,没有办法得到原始的音频。同时MediaRecorder既可用于音频的捕获也可以用于视频的捕获,相当的强大,实际开发中没有特殊的需求的话,用的比较多。 105 | 106 | 2. 使用AudioRecord进行音频采集 107 | 108 | AudioRecord是一个比较偏底层的API,它可以获取到一帧帧PCM数据,之后可以对这些数据进行处理。AudioRecord这种方式采集最为灵活,使得开发者最大限度的处理采集的音频,同时它捕获到的音频是原始的PCM格式。像做变声处理的需要必须要用它收集音频。 109 | 110 | 在实际开发中,它也是最常用来采集音频的手段。如直播技术采用的就是AudioRecorder采集音频数据。 111 | 112 | AudioRecord保存的音频文件为.pcm格式,不能直接播放,它是一个原始的文件,没有任何播放格式,因此就无法被播放器识别并播放。 113 | 114 | 播放PCM文件有两种: 115 | 116 | 1. 使用AudioTrack播放PCM格式的音频数据 117 | 2. 将PCM数据转化为wav格式的数据,这样就可以被播放器识别 118 | 119 | **AudioRecord的基本参数说明** 120 | 121 | ```java 122 | public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, 123 | int bufferSizeInBytes) 124 | ``` 125 | 126 | * audioSource 音频采集的来源 127 | * audioSampleRate 音频采样率 128 | * channelConfig 声道 129 | * audioFormat 音频采样精度,指定采样的数据格式和每次采样的大小 130 | * bufferSizeInBytes AudioRecord采集到的音频数据所存放的缓冲区大小 131 | 132 | **AudioRecord的状态用意** 133 | 134 | * 获取AudioRecord的状态 135 | 136 | 用于检测AudioRecord是否确保了获得适当的硬件资源,在AudioRecord对象实例化之后调用getState() 137 | 138 | * STATE_INITIALIZED 舒适化完毕 139 | * STATE_UNINITIALIZED 未初始化 140 | 141 | * 开始采集时调用mAudioRecord.startRecording()开始录音 142 | 143 | 开始采集之后,状态自动变为RECORDSTATE_RECORDING 144 | 145 | * 停止采集时调用mAudioRecord.stop()停止录音 146 | 147 | 停止采集之后,状态自动变为RECORDSTATE_STOPPED 148 | 149 | * 读取录制内存,将采集到的数据读取到缓存区,方法调用的返回值的状态码 150 | 151 | 情况异常:ERROR_INVALID_OPERATION,ERROR_BAD_VALUE 152 | 153 | 154 | 155 | ##### 音频播放AudioTrack 156 | 157 | **音频播放** 158 | 音频播放声音分为MediaPlayer和AudioTrack两种方案。MediaPlayer可以播放多种格式的声音文件,例如MP3、WAV、AAC等。然而AudioTrack只能播放PCM数据流,当然两者之间还是有紧密的联系。MediaPlayer在播放音频时,在framework层还是会创建AudioTrack,把解码后的PCM数据流传递给AudioTrack,最后由AudioFlinger进行混音,传递音频给硬件播放出来。利用AudioTrack播放只是跳过MediaPlayer的解码部分而已。 159 | 160 | **AudioTrack基本参数** 161 | 162 | ```java 163 | public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, 164 | int bufferSizeInBytes, int mode) 165 | ``` 166 | 167 | * streamType 音频流类型 168 | 169 | 最主要的几种Stream类型: 170 | 171 | * AudioManager.STREAM_MUSIC 用于音乐播放的音频流 172 | * AudioManager.STREAM_SYSTEM 用于系统声音的音频流 173 | * AudioManager.STREAM_RING 用于电话铃声的音频流 174 | * AudioManager.STREAM_VOICE_CALL 用于电话通话的音频流 175 | * AudioManager.STREAM_ALARM 用于警报的音频流 176 | * AudioManager.STREAM_NOTIFICATION 用于通知的音频流 177 | * ... 178 | 179 | * mode 180 | 181 | 只有两种: 182 | 183 | - AudioTrack_MODE_STREAM 184 | STREAM的意思是由用户在应用程序通过write方式把数据一次一次的写到AudioTrack中,应用层从某个地方获取数据,例如通过编解码得到PCM数据,然后write到AudioTrack,这种方式的坏处就是总是在Java层和Native层交互,效率损失较大 185 | 186 | - AudioTrack_MODE_STATIC 187 | 188 | STATIC就是数据一次性交付给接收方,好处是简单高效,只需要进行一次操作就完成了数据的传递。缺点也很明显,对于数据量较大的音频回放,显然是不能胜任的,因而通常只用于播放铃声、系统提醒等对内存小的操作 189 | 190 | **AudioTrack作用** 191 | AudioTrack是管理和播放单一音频资源的类。AudioTrack仅仅能播放已经解码的PCM流,用于PCM音频流的回放。 192 | 193 | AudioTrack实现PCM音频播放流程: 194 | 195 | 1. 配置基本参数 196 | 2. 获取最小缓冲区大小 197 | 3. 创建AudioTrack对象 198 | 4. 获取PCM文件,转成DataInputStream 199 | 5. 开启/停止播放 200 | 201 | 系统将这几种声音区分管理,好处就是比如你在听歌的时候接电话,接电话调大音量,在挂电话的时候继续放歌的音量肯定不用再调了。 202 | 203 | 204 | 205 | ##### PCM转WAV 206 | 207 | PCM: 208 | 209 | ​ PCM是在由模拟信号向数字信号转化的一种常用的编码格式,称为脉冲编码调制,PCM将模拟信号按照一定的间距划分为多段,然后通过二进制去量化每一个间距的强度。 210 | 211 | PCM表示的是音频文件中随着时间的流逝的一段音频的振幅,Android在WAV文件中支持PCM的音频数据。 212 | 213 | WAV: 214 | 215 | ​ WAV、MP3等比较常见的音频格式,不同的编码格式对应不同的原始音频,为了方便传输,通常会压缩原始音频。为了辨别出音频格式,每种格式有特定的头文件(header)。 216 | 217 | WAV以RIFF为标准,RIFF是一种资源交换档案标准,RIFF将文件存储在每一个标记快中。 218 | 219 | 基本构成单位是trunk,每个trunk是由标记位、数据大小、数据存储,三个部分构成。 220 | 221 | PCM打包成WAV: 222 | 223 | PCM是原始音频数据,WAV是windows中常见的音频格式,只是在PCM数据中添加了一个文件头。 224 | 225 | 226 | 227 | ### 3. 在Android平台使用Camera API进行视频采集,分别使用SurfaceView、TextureView预览Camera数据,取的NV21的数据回调 228 | 229 | [Android 分别使用 SurfaceView 和 TextureView 来预览 Camera,获取NV21数据](https://rustfisher.github.io/2018/02/26/Android_note/Android-camera_nv21_surfaceview_textureview/) 230 | 231 | [Android openGl开发详解(二)——通过SurfaceView,TextureView,GlSurfaceView显示相机预览(附Demo)](https://www.jianshu.com/p/db8ecba6037a) 232 | 233 | Camera预览回调中默认使用NV21格式。 234 | 235 | **使用SurfaceView预览Camera** 236 | 237 | ```java 238 | @Override 239 | public void surfaceCreated(SurfaceHolder holder) { 240 | //开启预览 241 | try { 242 | mCamera.setPreviewDisplay(holder); 243 | mCamera.startPreview(); 244 | } catch (IOException e) { 245 | e.printStackTrace(); 246 | } 247 | } 248 | ``` 249 | 250 | **使用TextureView预览Camera** 251 | TextureView可用于显示内容流,内容流可以是视频或者OpenGL的场景。内容流可来自应用进程或者远程其他进程。 252 | 253 | TextureView必须在硬件加速开启的窗口中使用,若是软解,TextureView不会显示。 254 | 255 | 不同于SurfaceView,TextureView不会建立一个单独的窗口,而是像一个常规的View一样,这使得TextureView可以被移动、转换或是添加动画。 256 | 257 | | | TextureView | SurfaceView | 258 | | -------- | ----------- | ------------ | 259 | | 绘制 | 稍微延时 | 及时 | 260 | | 内存 | 高 | 低 | 261 | | 耗电 | 高 | 低 | 262 | | 动画 | 支持 | 不支持 | 263 | | 适用场景(推荐) | 视频播放,相机应用 | 大量画布更新(游戏绘制) | 264 | 265 | ```java 266 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 267 | if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) { 268 | mCamera.setDisplayOrientation(90); 269 | } else { 270 | mCamera.setDisplayOrientation(0); 271 | } 272 | try { 273 | mCamera.setPreviewCallback(mCameraPreviewCallback); 274 | mCamera.setPreviewTexture(surface); //使用SurfaceTexture 275 | mCamera.startPreview(); 276 | } catch (IOException ioe) { 277 | 278 | } 279 | } 280 | ``` 281 | 282 | 283 | 284 | ### 4. 使用Android平台的MediaExtractor和MediaMuxer API 解析和封装mp4文件 285 | 286 | [Android 使用 MediaExtractor 和 MediaMuxer 解析和封装 mp4 文件](https://rustfisher.github.io/2018/03/01/Android_note/Android-media_MediaMuxer_MediaExtractor_mp4/) 287 | 288 | [https://www.jianshu.com/p/1ceef353ede0](https://www.jianshu.com/p/1ceef353ede0) 289 | 290 | **MP4** 291 | MP4或称MPEG-4第14部分,是一种标准的数字多媒体容器格式,MP4中的音频格式通常为AAC。 292 | 293 | **MediaExtractor** 294 | MediaExtractor可用于分离多媒体容器中视频track和音频track。 295 | 296 | ``` 297 | setDataSource() //设置数据源,数据源可以是本地文件地址,也可以是网络地址 298 | getTrackFormat(int index) //来获取各个track的MediaFormat,通过MediaFormat来获取track的详细信息,如MimeType、分辨率、采样频率、帧率等等 299 | selectTrack(int index) //通过下标选择指定的通道 300 | readSampleData(ByteBuffer buffer,int offset) //获取当前编码好的数据并存在指定好偏移量的buffer中 301 | ``` 302 | 303 | **MediaMuxer** 304 | MediaMuxer可用于混合基本码流,将所有的信道的信息合成一个视频,目前输出格式支持MP4、Webm等。 305 | 306 | **提取并输出MP4文件中的视频部分** 307 | 从一个MP4文件中提取出视频,得到不含音频的MP4文件。 308 | 309 | 实现流程,首先是使用 MediaExtractor 提取,然后使用 MediaMuxer 输出MP4文件。 310 | 311 | * MediaExtractor 设置数据源,找到并选择视频轨道的格式和下标 312 | * MediaMuxer 设置输出格式为 MUXER_OUTPUT_MPEG_4 ,添加前面选定的格式,调用 start() 启动 313 | * MediaExtractor 读取帧数据,不停地将帧数据和相关信息传入 MediaMuxer 314 | * 最后停止并释放 MediaMuxer 和 MediaExtractor,最后放在子线程中操作 315 | 316 | **提取MP4文件中的音频部分,获取音频文件** 317 | 318 | 319 | 320 | ### 5. 学习Android平台OpenGl ES API,了解OpenGl开发的基本流程,使用OpenGl绘制一个三角形 321 | 322 | [如何在Android APP中使用OpenGL ES](https://code.tutsplus.com/zh-hans/tutorials/how-to-use-opengl-es-in-android-apps--cms-28464) 323 | 324 | 现在几乎每个Android手机都有一个图形处理单元,或简称为GPU。顾名思义,就是专门处理与3D图形计算相关的硬件单元。目前有两种不同的API可用于与Android设备的GPU交互:Vulkan和OpenGL ES。 325 | 326 | Vulkan仅适用于运行7.0或更高设备,而所有Android版本都支持OpenGl ES。 327 | 328 | **GpenGL ES** 329 | 330 | OpenGL是Open Graphics Library的缩写,它是一个独立于平台的API,可以创建硬件加速的3D图形,OpenGL ES是 OpenGL for Embedded Systems的缩写,是OpenGL API的一个子集。 331 | 332 | OpenGL ES是一个非常低级的API,换句话说,它不提供任何让你快速创建或操纵3D对象的方法。相反,在使用它时,你需要手动管理任务,例如创建3D对象的各个顶点和面,计算各种3D变换,以及创建不同类型的着色器。 333 | 334 | 还值得一提的是,Android SDK和NDK可以让你在Java和C中编写与OpenGL ES相关的代码。 335 | 336 | **项目设置** 337 | OpenGL ES是Android框架的一部分,所以我们不必为项目添加任何依赖就能使用它们。 338 | 339 | 当然,有些设备不支持OpenGL ES,无法安装APP,所以声明使用: 340 | 341 | ```xml 342 | 343 | ``` 344 | 345 | **创建画布** 346 | Android框架为3D图形画布提供了两个控件:GLSurfaceView和TextureView。大多数开发人员喜欢使用GLSurfaceView,只有当他们打算在其他View控件上叠加3D图形时才会选择TextureView。 347 | 348 | 在指定GLSurfaceView宽高的时候通常设置相等,这样做很重要,因为OpenGL ES坐标系是一个正方形,如果必须使用矩形画布,请记住在计算投影矩阵时使用宽高比。 349 | 350 | **创建3D对象** 351 | 虽然可以通过手动编写所有顶点的X、Y、Z坐标,但是非常麻烦,使用3D建模工具会容易些。 352 | 353 | **创建缓冲区对象** 354 | 你不能将顶点和面的列表直接传递给OpenGL ES API中的方法。你必须先将其转换为缓冲对象。要存储顶点坐标数据,我们需要一个 FloatBuffer对象,对于仅由顶点索引组成的面部数据,ShortBuffer对象就足够了。 355 | 356 | **创建着色器** 357 | 为了能够渲染我们的3D对象,我们必须为它创建一个顶点着色器和片段着色器。现在,可以将着色器看成一个非常简单的程序,用类似C语言的语言编写,称为OpenGL着色语言,简称GLSL。 358 | 359 | 顶点着色器负责处理3D对象的顶点,片段着色器(也称为像素着色器)负责着色3D对象的像素。 360 | 361 | * 创建顶点着色器 362 | 363 | 在项目的res/raw文件夹中创建一个名为vertex_shader.txt的新文件。 364 | 365 | 顶点着色器必须有一个 attribute 全局变量,以便从Java代码接收顶点位置数据。此外,添加一个 uniform 全局变量以便于从 Java代码接收视图投影矩阵。 366 | 367 | 在顶点着色器的 main() 方法内部,你必须设置 gl_position 值,它是 GLSL 内置变量,该变量决定顶点的最终位置。 368 | 369 | ```java 370 | attribute vec4 position; 371 | uniform mat4 matrix; 372 | 373 | void main() { 374 | gl_Position = matrix * position; 375 | } 376 | ``` 377 | 378 | * 创建片段着色器 379 | 380 | 在项目的res/raw文件夹中创建一个名为fragment_shader.txt的新文件。 381 | 382 | ```java 383 | precision mediump float; 384 | 385 | void main() { 386 | gl_FragColor = vec4(1, 0.5, 0, 1.0); 387 | } 388 | ``` 389 | 390 | 创建一个简单的片段着色器,将橙色分配给所有像素。要将一个颜色分配给一个像素,在片段着色器的main()方法内部,使用 gl_FragColor 内置的变量。 391 | 392 | * 编译着色器 393 | 394 | 上面着色器代码是写在文件中的,我们要把它读取出来,转化为字符串。 395 | 396 | 着色器的代码必须添加到 OpenGL ES着色器对象中,要创建一个新的着色器对象,请使用 GLES20类的 glCreateShader() 方法。根据要创建的着色器对象的类型,你可以传递 GL_VERTEX_SHADER 或者 GL_FRAGMENT_SHADER 给它。该方法返回一个整数,该正数作为着色器对象的引用。新创建的着色器对象不包含任何代码,要将着色器代码添加到着色器对象,必须使用 glShaderSource() 方法。 397 | 398 | ```java 399 | int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); 400 | GLES20.glShaderSource(vertexShader, vertexShaderCode); 401 | 402 | int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); 403 | GLES20.glShaderSource(fragmentShader, fragmentShaderCode); 404 | ``` 405 | 406 | 现在可以将着色器对象传递给 glCompileShader() 方法来编译它们包含的代码。 407 | 408 | ```java 409 | GLES20.glCompileShader(vertexShader); 410 | GLES20.glCompileShader(fragmentShader); 411 | ``` 412 | 413 | 414 | -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 26 5 | defaultConfig { 6 | applicationId "top.omooo.audiovideotask" 7 | minSdkVersion 23 8 | targetSdkVersion 26 9 | versionCode 1 10 | versionName "1.0" 11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" 12 | } 13 | buildTypes { 14 | release { 15 | minifyEnabled false 16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 17 | } 18 | } 19 | } 20 | 21 | dependencies { 22 | implementation fileTree(dir: 'libs', include: ['*.jar']) 23 | implementation 'com.android.support:appcompat-v7:26.1.0' 24 | implementation 'com.android.support.constraint:constraint-layout:1.1.2' 25 | testImplementation 'junit:junit:4.12' 26 | androidTestImplementation 'com.android.support.test:runner:1.0.1' 27 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1' 28 | 29 | implementation 'com.jakewharton:butterknife:8.8.1' 30 | annotationProcessor 'com.jakewharton:butterknife-compiler:8.8.1' 31 | 32 | compile 'com.github.jokermonn:permissions4m:2.1.2-lib' 33 | annotationProcessor 'com.github.jokermonn:permissions4m:2.1.2-processor' 34 | } 35 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /app/src/androidTest/java/top/omooo/audiovideotask/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask; 2 | 3 | import android.content.Context; 4 | import android.support.test.InstrumentationRegistry; 5 | import android.support.test.runner.AndroidJUnit4; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | 10 | import static org.junit.Assert.*; 11 | 12 | /** 13 | * Instrumented test, which will execute on an Android device. 14 | * 15 | * @see Testing documentation 16 | */ 17 | @RunWith(AndroidJUnit4.class) 18 | public class ExampleInstrumentedTest { 19 | @Test 20 | public void useAppContext() throws Exception { 21 | // Context of the app under test. 22 | Context appContext = InstrumentationRegistry.getTargetContext(); 23 | 24 | assertEquals("top.omooo.audiovideotask", appContext.getPackageName()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 26 | 27 | 28 | 29 | 30 | 32 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/MainActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask; 2 | 3 | import android.content.Intent; 4 | import android.os.Bundle; 5 | import android.support.v7.app.AppCompatActivity; 6 | import android.view.View; 7 | import android.widget.Button; 8 | 9 | import butterknife.BindView; 10 | import butterknife.ButterKnife; 11 | import butterknife.OnClick; 12 | import top.omooo.audiovideotask.task_1.DrawPictureActivity; 13 | import top.omooo.audiovideotask.task_2.AudioRecordActivity; 14 | import top.omooo.audiovideotask.task_3.PreviewActivity; 15 | import top.omooo.audiovideotask.task_4.ExtractMuxerActivity; 16 | import top.omooo.audiovideotask.task_5.GLActivity; 17 | 18 | public class MainActivity extends AppCompatActivity { 19 | 20 | @BindView(R.id.btn_task_1) 21 | Button mBtnTask1; 22 | @BindView(R.id.btn_task_2) 23 | Button mBtnTask2; 24 | @BindView(R.id.btn_task_3) 25 | Button mBtnTask3; 26 | @BindView(R.id.btn_task_4) 27 | Button mBtnTask4; 28 | @BindView(R.id.btn_task_5) 29 | Button mBtnTask5; 30 | 31 | @Override 32 | protected void onCreate(Bundle savedInstanceState) { 33 | super.onCreate(savedInstanceState); 34 | setContentView(R.layout.activity_main); 35 | ButterKnife.bind(this); 36 | } 37 | 38 | @OnClick({R.id.btn_task_1, R.id.btn_task_2, R.id.btn_task_3, R.id.btn_task_4, R.id.btn_task_5}) 39 | public void onViewClicked(View view) { 40 | switch (view.getId()) { 41 | case R.id.btn_task_1: 42 | startActivity(new Intent(this, DrawPictureActivity.class)); 43 | break; 44 | case R.id.btn_task_2: 45 | startActivity(new Intent(this, AudioRecordActivity.class)); 46 | break; 47 | case R.id.btn_task_3: 48 | startActivity(new Intent(this, PreviewActivity.class)); 49 | break; 50 | case R.id.btn_task_4: 51 | startActivity(new Intent(this, ExtractMuxerActivity.class)); 52 | break; 53 | case R.id.btn_task_5: 54 | startActivity(new Intent(this, GLActivity.class)); 55 | break; 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_1/CustomImage.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_1; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.graphics.Canvas; 7 | import android.graphics.Paint; 8 | import android.support.annotation.Nullable; 9 | import android.util.AttributeSet; 10 | 11 | import top.omooo.audiovideotask.R; 12 | 13 | /** 14 | * Created by SSC on 2018/6/29. 15 | */ 16 | 17 | public class CustomImage extends android.support.v7.widget.AppCompatImageView { 18 | 19 | private Bitmap mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.picture); 20 | private Paint mPaint = new Paint(); 21 | 22 | public CustomImage(Context context) { 23 | super(context); 24 | } 25 | 26 | public CustomImage(Context context, @Nullable AttributeSet attrs) { 27 | super(context, attrs); 28 | } 29 | 30 | public CustomImage(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 31 | super(context, attrs, defStyleAttr); 32 | } 33 | 34 | @Override 35 | protected void onDraw(Canvas canvas) { 36 | super.onDraw(canvas); 37 | canvas.drawBitmap(mBitmap, 0, 0, mPaint); 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_1/CustomSurfaceView.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_1; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.graphics.Canvas; 7 | import android.graphics.Matrix; 8 | import android.graphics.Paint; 9 | import android.graphics.Rect; 10 | import android.util.AttributeSet; 11 | import android.view.SurfaceHolder; 12 | import android.view.SurfaceView; 13 | 14 | import top.omooo.audiovideotask.R; 15 | 16 | /** 17 | * Created by SSC on 2018/6/29. 18 | */ 19 | 20 | public class CustomSurfaceView extends SurfaceView implements SurfaceHolder.Callback { 21 | 22 | private SurfaceHolder mSurfaceHolder; 23 | private Paint mPaint; 24 | private Bitmap mBitmap; 25 | 26 | public CustomSurfaceView(Context context) { 27 | super(context); 28 | init(); 29 | } 30 | 31 | private void init() { 32 | //得到控制器 33 | mSurfaceHolder = getHolder(); 34 | //对SurfaceView进行操作 35 | mSurfaceHolder.addCallback(this); 36 | mPaint = new Paint(); 37 | mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.picture); 38 | } 39 | 40 | public CustomSurfaceView(Context context, AttributeSet attrs) { 41 | super(context, attrs); 42 | init(); 43 | } 44 | 45 | public CustomSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) { 46 | super(context, attrs, defStyleAttr); 47 | init(); 48 | } 49 | 50 | 51 | 52 | @Override 53 | public void surfaceCreated(SurfaceHolder holder) { 54 | Canvas canvas = mSurfaceHolder.lockCanvas(); 55 | canvas.drawBitmap(mBitmap, new Matrix(), mPaint); 56 | mSurfaceHolder.unlockCanvasAndPost(canvas); 57 | } 58 | 59 | @Override 60 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 61 | //SurfaceView发生变化时候回调 62 | } 63 | 64 | @Override 65 | public void surfaceDestroyed(SurfaceHolder holder) { 66 | //SurfaceView销毁时回调 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_1/DrawBoardView.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_1; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.graphics.Color; 6 | import android.graphics.Paint; 7 | import android.graphics.Path; 8 | import android.util.AttributeSet; 9 | import android.view.MotionEvent; 10 | import android.view.SurfaceHolder; 11 | import android.view.SurfaceView; 12 | 13 | /** 14 | * Created by SSC on 2018/6/29. 15 | */ 16 | 17 | /** 18 | * 用Surface实现画板 19 | */ 20 | public class DrawBoardView extends SurfaceView implements SurfaceHolder.Callback, Runnable { 21 | 22 | private SurfaceHolder mSurfaceHolder; 23 | private Canvas mCanvas; 24 | private volatile boolean mIsDrawing; 25 | private Paint mPaint; 26 | private Path mPath; 27 | 28 | public DrawBoardView(Context context) { 29 | super(context); 30 | init(); 31 | } 32 | 33 | public DrawBoardView(Context context, AttributeSet attrs) { 34 | super(context, attrs); 35 | init(); 36 | } 37 | 38 | public DrawBoardView(Context context, AttributeSet attrs, int defStyleAttr) { 39 | super(context, attrs, defStyleAttr); 40 | init(); 41 | } 42 | 43 | private void init() { 44 | mSurfaceHolder = getHolder(); 45 | mSurfaceHolder.addCallback(this); 46 | setFocusable(true); 47 | setFocusableInTouchMode(true); 48 | this.setKeepScreenOn(true); 49 | 50 | mPaint = new Paint(); 51 | mPaint.setColor(Color.BLUE); 52 | mPaint.setStyle(Paint.Style.STROKE); 53 | mPaint.setStrokeWidth(10); 54 | mPaint.setAntiAlias(true); 55 | mPath = new Path(); 56 | } 57 | @Override 58 | public void surfaceCreated(SurfaceHolder holder) { 59 | mIsDrawing = true; 60 | new Thread(this).start(); 61 | } 62 | 63 | @Override 64 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 65 | 66 | } 67 | 68 | @Override 69 | public void surfaceDestroyed(SurfaceHolder holder) { 70 | mIsDrawing = false; 71 | } 72 | 73 | @Override 74 | public void run() { 75 | long start = System.currentTimeMillis(); 76 | while (mIsDrawing) { 77 | draw(); 78 | } 79 | long end = System.currentTimeMillis(); 80 | // 50 - 100 81 | if (end - start < 100) {//保证线程运行时间不少于100ms 82 | try { 83 | Thread.sleep(100 - (end - start)); 84 | } catch (InterruptedException e) { 85 | e.printStackTrace(); 86 | } 87 | } 88 | } 89 | private synchronized void draw() { 90 | try { 91 | mCanvas = mSurfaceHolder.lockCanvas(); 92 | mCanvas.drawColor(Color.WHITE); 93 | mCanvas.drawPath(mPath,mPaint); 94 | } catch (Exception e) { 95 | } finally { 96 | if (mCanvas != null){ 97 | mSurfaceHolder.unlockCanvasAndPost(mCanvas); 98 | } 99 | } 100 | } 101 | 102 | @Override 103 | public boolean onTouchEvent(MotionEvent event) { 104 | switch (event.getAction()){ 105 | case MotionEvent.ACTION_DOWN: 106 | mPath.moveTo(event.getX(),event.getY()); 107 | break; 108 | case MotionEvent.ACTION_MOVE: 109 | mPath.lineTo(event.getX(),event.getY()); 110 | break; 111 | case MotionEvent.ACTION_UP: 112 | 113 | break; 114 | } 115 | return true; 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_1/DrawPictureActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_1; 2 | 3 | import android.os.Bundle; 4 | import android.support.annotation.Nullable; 5 | import android.support.v7.app.AppCompatActivity; 6 | 7 | import top.omooo.audiovideotask.R; 8 | 9 | /** 10 | * Created by SSC on 2018/6/29. 11 | */ 12 | 13 | public class DrawPictureActivity extends AppCompatActivity { 14 | @Override 15 | protected void onCreate(@Nullable Bundle savedInstanceState) { 16 | super.onCreate(savedInstanceState); 17 | //自定义View、SurfaceView 18 | setContentView(R.layout.activity_draw_picture); 19 | //自定义SurfaceView实现画板 20 | // setContentView(new DrawBoardView(this)); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_2/AudioActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_2; 2 | 3 | import android.Manifest; 4 | import android.os.Bundle; 5 | import android.os.Environment; 6 | import android.support.annotation.NonNull; 7 | import android.support.annotation.Nullable; 8 | import android.support.v7.app.AppCompatActivity; 9 | import android.view.View; 10 | import android.widget.Button; 11 | import android.widget.Toast; 12 | 13 | import com.joker.api.Permissions4M; 14 | import com.joker.api.wrapper.ListenerWrapper; 15 | 16 | import butterknife.BindView; 17 | import butterknife.ButterKnife; 18 | import butterknife.OnClick; 19 | import top.omooo.audiovideotask.R; 20 | 21 | /** 22 | * Created by SSC on 2018/7/1. 23 | */ 24 | 25 | public class AudioActivity extends AppCompatActivity { 26 | 27 | @BindView(R.id.btn_start) 28 | Button mBtnStart; 29 | @BindView(R.id.btn_stop) 30 | Button mBtnStop; 31 | @BindView(R.id.btn_play) 32 | Button mBtnPlay; 33 | @BindView(R.id.btn_pause) 34 | Button mBtnPause; 35 | @BindView(R.id.btn_convert) 36 | Button mBtnConvert; 37 | @BindView(R.id.btn_play_wav) 38 | Button mBtnPlayWav; 39 | 40 | private MyAudioManager mMyAudioManager; 41 | 42 | private String fileRoot = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioRecordFile"; 43 | 44 | private AudioTrackManager mAudioTrackManager; 45 | 46 | @Override 47 | protected void onCreate(@Nullable Bundle savedInstanceState) { 48 | super.onCreate(savedInstanceState); 49 | setContentView(R.layout.activity_audio_record); 50 | ButterKnife.bind(this); 51 | 52 | requestPer(); 53 | init(); 54 | } 55 | 56 | private void init() { 57 | mMyAudioManager = new MyAudioManager(this); 58 | mAudioTrackManager = AudioTrackManager.getInstance(); 59 | } 60 | 61 | @OnClick({R.id.btn_start, R.id.btn_stop, R.id.btn_play, R.id.btn_pause, R.id.btn_convert,R.id.btn_play_wav}) 62 | public void onViewClicked(View view) { 63 | switch (view.getId()) { 64 | case R.id.btn_start: 65 | mMyAudioManager.startRecord(); 66 | break; 67 | case R.id.btn_stop: 68 | mMyAudioManager.stopRecord(); 69 | break; 70 | case R.id.btn_play: 71 | // mMyAudioManager.playFile(fileRoot + "/audio.pcm"); 72 | mAudioTrackManager.startPlay(fileRoot + "/audio.pcm"); 73 | break; 74 | case R.id.btn_pause: 75 | // mMyAudioManager.stopPlay(); 76 | mAudioTrackManager.stopPlay(); 77 | break; 78 | case R.id.btn_convert: 79 | 80 | break; 81 | case R.id.btn_play_wav: 82 | break; 83 | } 84 | } 85 | 86 | private void requestPer() { 87 | Permissions4M.get(this) 88 | .requestPermissions(Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE) 89 | .requestCodes(0x01, 0x02) 90 | .requestListener(new ListenerWrapper.PermissionRequestListener() { 91 | @Override 92 | public void permissionGranted(int i) { 93 | 94 | } 95 | 96 | @Override 97 | public void permissionDenied(int i) { 98 | switch (i) { 99 | case 0x01: 100 | Toast.makeText(AudioActivity.this, "录音权限申请失败!", Toast.LENGTH_SHORT).show(); 101 | break; 102 | case 0x02: 103 | Toast.makeText(AudioActivity.this, "读写SD卡权限申请失败!", Toast.LENGTH_SHORT).show(); 104 | break; 105 | } 106 | } 107 | 108 | @Override 109 | public void permissionRationale(int i) { 110 | switch (i) { 111 | case 0x01: 112 | Toast.makeText(AudioActivity.this, "申请必要权限用于录音!", Toast.LENGTH_SHORT).show(); 113 | break; 114 | case 0x02: 115 | Toast.makeText(AudioActivity.this, "申请必要权限用于读写SD卡!", Toast.LENGTH_SHORT).show(); 116 | break; 117 | } 118 | } 119 | }) 120 | .request(); 121 | } 122 | 123 | 124 | @Override 125 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] 126 | grantResults) { 127 | Permissions4M.onRequestPermissionsResult(this, requestCode, grantResults); 128 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 129 | } 130 | 131 | @Override 132 | protected void onDestroy() { 133 | super.onDestroy(); 134 | mMyAudioManager.stopRecord(); 135 | mMyAudioManager.destroyThread(); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_2/AudioRecordActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_2; 2 | 3 | import android.Manifest; 4 | import android.media.AudioFormat; 5 | import android.media.AudioRecord; 6 | import android.media.MediaRecorder; 7 | import android.os.Bundle; 8 | import android.os.Environment; 9 | import android.support.annotation.NonNull; 10 | import android.support.v7.app.AppCompatActivity; 11 | import android.view.View; 12 | import android.widget.Button; 13 | import android.widget.Toast; 14 | 15 | import com.joker.api.Permissions4M; 16 | import com.joker.api.wrapper.ListenerWrapper; 17 | 18 | import java.io.BufferedOutputStream; 19 | import java.io.DataOutputStream; 20 | import java.io.File; 21 | import java.io.FileOutputStream; 22 | import java.io.IOException; 23 | import java.io.RandomAccessFile; 24 | 25 | import butterknife.BindView; 26 | import butterknife.ButterKnife; 27 | import butterknife.OnClick; 28 | import top.omooo.audiovideotask.R; 29 | 30 | public class AudioRecordActivity extends AppCompatActivity implements Runnable { 31 | 32 | @BindView(R.id.btn_start) 33 | Button mBtnStart; 34 | @BindView(R.id.btn_stop) 35 | Button mBtnStop; 36 | @BindView(R.id.btn_play) 37 | Button mBtnPlay; 38 | @BindView(R.id.btn_pause) 39 | Button mBtnPause; 40 | @BindView(R.id.btn_convert) 41 | Button mBtnConvert; 42 | @BindView(R.id.btn_play_wav) 43 | Button mBtnPlayWav; 44 | 45 | //指定音频源 这个和MediaRecorder是相同的 MediaRecorder.AudioSource.MIC指的是麦克风 46 | private static final int mAudioSource = MediaRecorder.AudioSource.MIC; 47 | //指定采样率 (MediaRecorder 的采样率通常是8000Hz AAC的通常是44100Hz。 设置采样率为44100,目前为常用的采样率,官方文档表示这个值可以兼容所有的设置) 48 | private static final int mSampleRateInHz = 44100; 49 | //指定捕获音频的声道数目。在AudioFormat类中指定用于此的常量 50 | private static final int mChannelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO; //单声道 51 | //指定音频量化位数 ,在AudioFormaat类中指定了以下各种可能的常量。通常我们选择ENCODING_PCM_16BIT和ENCODING_PCM_8BIT PCM代表的是脉冲编码调制,它实际上是原始音频样本。 52 | //因此可以设置每个样本的分辨率为16位或者8位,16位将占用更多的空间和处理能力,表示的音频也更加接近真实。 53 | private static final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 54 | //指定缓冲区大小。调用AudioRecord类的getMinBufferSize方法可以获得。 55 | private int mBufferSizeInBytes; 56 | 57 | private File mRecordingFile;//储存AudioRecord录下来的文件 58 | private File mWavFile; 59 | private boolean isRecording = false; //true表示正在录音 60 | private AudioRecord mAudioRecord = null; 61 | private File mFileRoot = null;//文件目录 62 | //存放的目录路径名称 63 | private static final String mPathName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioRecordFiles"; 64 | //保存的音频文件名 65 | private static final String mFileName = "record.pcm"; 66 | private static final String mWavFileName = "record.wav"; 67 | //缓冲区中数据写入到数据,因为需要使用IO操作,因此读取数据的过程应该在子线程中执行。 68 | private Thread mThread; 69 | private DataOutputStream mDataOutputStream; 70 | private DataOutputStream mDataOutputStreamWav; 71 | private AudioTrackManager mAudioTrackManager; 72 | 73 | @Override 74 | protected void onCreate(Bundle savedInstanceState) { 75 | super.onCreate(savedInstanceState); 76 | setContentView(R.layout.activity_audio_record); 77 | ButterKnife.bind(this); 78 | 79 | initDatas(); 80 | requestPer(); 81 | mAudioTrackManager = new AudioTrackManager(); 82 | } 83 | 84 | //初始化数据 85 | private void initDatas() { 86 | mBufferSizeInBytes = AudioRecord.getMinBufferSize(mSampleRateInHz, mChannelConfig, mAudioFormat);//计算最小缓冲区 87 | mAudioRecord = new AudioRecord(mAudioSource, mSampleRateInHz, mChannelConfig, 88 | mAudioFormat, mBufferSizeInBytes);//创建AudioRecorder对象 89 | 90 | mFileRoot = new File(mPathName); 91 | if (!mFileRoot.exists()) 92 | mFileRoot.mkdirs();//创建文件夹 93 | 94 | } 95 | 96 | 97 | @OnClick({R.id.btn_start, R.id.btn_stop, R.id.btn_play, R.id.btn_pause, R.id.btn_convert, R.id.btn_play_wav}) 98 | public void onViewClicked(View view) { 99 | switch (view.getId()) { 100 | case R.id.btn_start: 101 | startRecord(); 102 | break; 103 | case R.id.btn_stop: 104 | stopRecord(); 105 | break; 106 | case R.id.btn_play: 107 | mAudioTrackManager.startPlay(mPathName + "/" + mFileName); 108 | break; 109 | case R.id.btn_pause: 110 | mAudioTrackManager.stopPlay(); 111 | break; 112 | case R.id.btn_convert: 113 | convertToWav(); 114 | break; 115 | case R.id.btn_play_wav: 116 | mAudioTrackManager.startPlay(mPathName + "/" + mWavFileName); 117 | break; 118 | } 119 | } 120 | 121 | //pcm转wav 122 | private void convertToWav() { 123 | mWavFile = new File(mFileRoot, mWavFileName); 124 | mRecordingFile = new File(mFileRoot, mFileName); 125 | RandomAccessFile wavRaf = null; 126 | try { 127 | wavRaf = new RandomAccessFile(mWavFile, "rw"); 128 | byte[] header = generateWavFileHeader(mRecordingFile.length(), mSampleRateInHz, mAudioRecord.getChannelCount()); 129 | wavRaf.seek(0); 130 | wavRaf.write(header); 131 | wavRaf.close(); 132 | } catch (IOException e) { 133 | e.printStackTrace(); 134 | } 135 | 136 | } 137 | 138 | //写入头文件 其实wav比pcm多的就是头文件信息 139 | private void writeWavFileHeader(FileOutputStream out, long totalAudioLen, long longSampleRate, 140 | int channels) throws IOException { 141 | byte[] header = generateWavFileHeader(totalAudioLen, longSampleRate, channels); 142 | out.write(header, 0, header.length); 143 | } 144 | 145 | private byte[] generateWavFileHeader(long pcmAudioByteCount, long longSampleRate, int channels) { 146 | long totalDataLen = pcmAudioByteCount + 36; // 不包含前8个字节的WAV文件总长度 147 | long byteRate = longSampleRate * 2 * channels; 148 | byte[] header = new byte[44]; 149 | header[0] = 'R'; // RIFF 150 | header[1] = 'I'; 151 | header[2] = 'F'; 152 | header[3] = 'F'; 153 | 154 | header[4] = (byte) (totalDataLen & 0xff);//数据大小 155 | header[5] = (byte) ((totalDataLen >> 8) & 0xff); 156 | header[6] = (byte) ((totalDataLen >> 16) & 0xff); 157 | header[7] = (byte) ((totalDataLen >> 24) & 0xff); 158 | 159 | header[8] = 'W';//WAVE 160 | header[9] = 'A'; 161 | header[10] = 'V'; 162 | header[11] = 'E'; 163 | //FMT Chunk 164 | header[12] = 'f'; // 'fmt ' 165 | header[13] = 'm'; 166 | header[14] = 't'; 167 | header[15] = ' ';//过渡字节 168 | //数据大小 169 | header[16] = 16; // 4 bytes: size of 'fmt ' chunk 170 | header[17] = 0; 171 | header[18] = 0; 172 | header[19] = 0; 173 | //编码方式 10H为PCM编码格式 174 | header[20] = 1; // format = 1 175 | header[21] = 0; 176 | //通道数 177 | header[22] = (byte) channels; 178 | header[23] = 0; 179 | //采样率,每个通道的播放速度 180 | header[24] = (byte) (longSampleRate & 0xff); 181 | header[25] = (byte) ((longSampleRate >> 8) & 0xff); 182 | header[26] = (byte) ((longSampleRate >> 16) & 0xff); 183 | header[27] = (byte) ((longSampleRate >> 24) & 0xff); 184 | //音频数据传送速率,采样率*通道数*采样深度/8 185 | header[28] = (byte) (byteRate & 0xff); 186 | header[29] = (byte) ((byteRate >> 8) & 0xff); 187 | header[30] = (byte) ((byteRate >> 16) & 0xff); 188 | header[31] = (byte) ((byteRate >> 24) & 0xff); 189 | // 确定系统一次要处理多少个这样字节的数据,确定缓冲区,通道数*采样位数 190 | header[32] = (byte) (2 * channels); 191 | header[33] = 0; 192 | //每个样本的数据位数 193 | header[34] = 16; 194 | header[35] = 0; 195 | //Data chunk 196 | header[36] = 'd';//data 197 | header[37] = 'a'; 198 | header[38] = 't'; 199 | header[39] = 'a'; 200 | header[40] = (byte) (pcmAudioByteCount & 0xff); 201 | header[41] = (byte) ((pcmAudioByteCount >> 8) & 0xff); 202 | header[42] = (byte) ((pcmAudioByteCount >> 16) & 0xff); 203 | header[43] = (byte) ((pcmAudioByteCount >> 24) & 0xff); 204 | return header; 205 | } 206 | //开始录音 207 | 208 | public void startRecord() { 209 | 210 | //AudioRecord.getMinBufferSize的参数是否支持当前的硬件设备 211 | if (AudioRecord.ERROR_BAD_VALUE == mBufferSizeInBytes || AudioRecord.ERROR == mBufferSizeInBytes) { 212 | throw new RuntimeException("Unable to getMinBufferSize"); 213 | } else { 214 | destroyThread(); 215 | isRecording = true; 216 | if (mThread == null) { 217 | mThread = new Thread(this); 218 | mThread.start();//开启线程 219 | } 220 | } 221 | } 222 | 223 | /** 224 | * 销毁线程方法 225 | */ 226 | private void destroyThread() { 227 | try { 228 | isRecording = false; 229 | if (null != mThread && Thread.State.RUNNABLE == mThread.getState()) { 230 | try { 231 | Thread.sleep(500); 232 | mThread.interrupt(); 233 | } catch (Exception e) { 234 | mThread = null; 235 | } 236 | } 237 | mThread = null; 238 | } catch (Exception e) { 239 | e.printStackTrace(); 240 | } finally { 241 | mThread = null; 242 | } 243 | } 244 | //停止录音 245 | 246 | public void stopRecord() { 247 | isRecording = false; 248 | //停止录音,回收AudioRecord对象,释放内存 249 | if (mAudioRecord != null) { 250 | if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {//初始化成功 251 | mAudioRecord.stop(); 252 | } 253 | if (mAudioRecord != null) { 254 | mAudioRecord.release(); 255 | } 256 | } 257 | } 258 | 259 | @Override 260 | public void run() { 261 | 262 | //标记为开始采集状态 263 | isRecording = true; 264 | //创建一个流,存放从AudioRecord读取的数据 265 | mRecordingFile = new File(mFileRoot, mFileName); 266 | if (mRecordingFile.exists()) {//音频文件保存过了删除 267 | mRecordingFile.delete(); 268 | } 269 | mWavFile = new File(mFileRoot, mWavFileName); 270 | if (mWavFile.exists()) { 271 | mWavFile.delete(); 272 | } 273 | try { 274 | mRecordingFile.createNewFile();//创建新文件 275 | mWavFile.createNewFile(); 276 | } catch (IOException e) { 277 | e.printStackTrace(); 278 | } 279 | 280 | try { 281 | //获取到文件的数据流 282 | mDataOutputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(mRecordingFile))); 283 | mDataOutputStreamWav = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(mWavFile))); 284 | byte[] buffer = new byte[mBufferSizeInBytes]; 285 | 286 | 287 | //判断AudioRecord未初始化,停止录音的时候释放了,状态就为STATE_UNINITIALIZED 288 | if (mAudioRecord.getState() == mAudioRecord.STATE_UNINITIALIZED) { 289 | initDatas(); 290 | } 291 | 292 | writeWavFileHeader(new FileOutputStream(mWavFile), mBufferSizeInBytes, mSampleRateInHz, mAudioRecord.getChannelCount()); 293 | 294 | mAudioRecord.startRecording();//开始录音 295 | //getRecordingState获取当前AudioRecording是否正在采集数据的状态 296 | while (isRecording && mAudioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { 297 | int bufferReadResult = mAudioRecord.read(buffer, 0, mBufferSizeInBytes); 298 | for (int i = 0; i < bufferReadResult; i++) { 299 | mDataOutputStream.write(buffer[i]); 300 | mDataOutputStreamWav.write(buffer[i]); 301 | } 302 | 303 | } 304 | mDataOutputStream.close(); 305 | mDataOutputStreamWav.close(); 306 | } catch (Throwable t) { 307 | stopRecord(); 308 | } 309 | } 310 | 311 | @Override 312 | protected void onDestroy() { 313 | super.onDestroy(); 314 | destroyThread(); 315 | stopRecord(); 316 | } 317 | 318 | private void requestPer() { 319 | Permissions4M.get(this) 320 | .requestPermissions(Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE) 321 | .requestCodes(0x01, 0x02) 322 | .requestListener(new ListenerWrapper.PermissionRequestListener() { 323 | @Override 324 | public void permissionGranted(int i) { 325 | 326 | } 327 | 328 | @Override 329 | public void permissionDenied(int i) { 330 | switch (i) { 331 | case 0x01: 332 | Toast.makeText(AudioRecordActivity.this, "录音权限申请失败!", Toast.LENGTH_SHORT).show(); 333 | break; 334 | case 0x02: 335 | Toast.makeText(AudioRecordActivity.this, "读写SD卡权限申请失败!", Toast.LENGTH_SHORT).show(); 336 | break; 337 | } 338 | } 339 | 340 | @Override 341 | public void permissionRationale(int i) { 342 | switch (i) { 343 | case 0x01: 344 | Toast.makeText(AudioRecordActivity.this, "申请必要权限用于录音!", Toast.LENGTH_SHORT).show(); 345 | break; 346 | case 0x02: 347 | Toast.makeText(AudioRecordActivity.this, "申请必要权限用于读写SD卡!", Toast.LENGTH_SHORT).show(); 348 | break; 349 | } 350 | } 351 | }) 352 | .request(); 353 | } 354 | 355 | @Override 356 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] 357 | grantResults) { 358 | Permissions4M.onRequestPermissionsResult(this, requestCode, grantResults); 359 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 360 | } 361 | } 362 | 363 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_2/AudioTrackManager.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_2; 2 | 3 | import android.media.AudioFormat; 4 | import android.media.AudioManager; 5 | import android.media.AudioRecord; 6 | import android.media.AudioTrack; 7 | 8 | import java.io.DataInputStream; 9 | import java.io.File; 10 | import java.io.FileInputStream; 11 | 12 | /** 13 | * 用于播放 PCM 或 WAV 音频文件 14 | */ 15 | public class AudioTrackManager { 16 | private AudioTrack mAudioTrack; 17 | private DataInputStream mDis;//播放文件的数据流 18 | private Thread mRecordThread; 19 | private boolean isStart = false; 20 | private volatile static AudioTrackManager mInstance; 21 | 22 | //音频流类型 23 | private static final int mStreamType = AudioManager.STREAM_MUSIC; 24 | //指定采样率 (MediaRecoder 的采样率通常是8000Hz AAC的通常是44100Hz。 设置采样率为44100,目前为常用的采样率,官方文档表示这个值可以兼容所有的设置) 25 | private static final int mSampleRateInHz=44100 ; 26 | //指定捕获音频的声道数目。在AudioFormat类中指定用于此的常量 27 | private static final int mChannelConfig= AudioFormat.CHANNEL_CONFIGURATION_MONO; //单声道 28 | //指定音频量化位数 ,在AudioFormaat类中指定了以下各种可能的常量。通常我们选择ENCODING_PCM_16BIT和ENCODING_PCM_8BIT PCM代表的是脉冲编码调制,它实际上是原始音频样本。 29 | //因此可以设置每个样本的分辨率为16位或者8位,16位将占用更多的空间和处理能力,表示的音频也更加接近真实。 30 | private static final int mAudioFormat=AudioFormat.ENCODING_PCM_16BIT; 31 | //指定缓冲区大小。调用AudioRecord类的getMinBufferSize方法可以获得。 32 | private int mMinBufferSize; 33 | //STREAM的意思是由用户在应用程序通过write方式把数据一次一次得写到audiotrack中。这个和我们在socket中发送数据一样, 34 | // 应用层从某个地方获取数据,例如通过编解码得到PCM数据,然后write到audiotrack。 35 | private static int mMode = AudioTrack.MODE_STREAM; 36 | 37 | 38 | public AudioTrackManager() { 39 | initData(); 40 | } 41 | 42 | private void initData(){ 43 | //根据采样率,采样精度,单双声道来得到frame的大小。 44 | mMinBufferSize = AudioTrack.getMinBufferSize(mSampleRateInHz,mChannelConfig, mAudioFormat);//计算最小缓冲区 45 | //注意,按照数字音频的知识,这个算出来的是一秒钟buffer的大小。 46 | //创建AudioTrack 47 | mAudioTrack = new AudioTrack(mStreamType, mSampleRateInHz,mChannelConfig, 48 | mAudioFormat,mMinBufferSize,mMode); 49 | } 50 | 51 | 52 | /** 53 | * 获取单例引用 54 | * 55 | * @return 56 | */ 57 | public static AudioTrackManager getInstance() { 58 | if (mInstance == null) { 59 | synchronized (AudioTrackManager.class) { 60 | if (mInstance == null) { 61 | mInstance = new AudioTrackManager(); 62 | } 63 | } 64 | } 65 | return mInstance; 66 | } 67 | 68 | /** 69 | * 销毁线程方法 70 | */ 71 | private void destroyThread() { 72 | try { 73 | isStart = false; 74 | if (null != mRecordThread && Thread.State.RUNNABLE == mRecordThread.getState()) { 75 | try { 76 | Thread.sleep(500); 77 | mRecordThread.interrupt(); 78 | } catch (Exception e) { 79 | mRecordThread = null; 80 | } 81 | } 82 | mRecordThread = null; 83 | } catch (Exception e) { 84 | e.printStackTrace(); 85 | } finally { 86 | mRecordThread = null; 87 | } 88 | } 89 | 90 | /** 91 | * 启动播放线程 92 | */ 93 | private void startThread() { 94 | destroyThread(); 95 | isStart = true; 96 | if (mRecordThread == null) { 97 | mRecordThread = new Thread(recordRunnable); 98 | mRecordThread.start(); 99 | } 100 | } 101 | 102 | /** 103 | * 播放线程 104 | */ 105 | Runnable recordRunnable = new Runnable() { 106 | @Override 107 | public void run() { 108 | try { 109 | //设置线程的优先级 110 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 111 | byte[] tempBuffer = new byte[mMinBufferSize]; 112 | int readCount = 0; 113 | while (mDis.available() > 0) { 114 | readCount= mDis.read(tempBuffer); 115 | if (readCount == AudioTrack.ERROR_INVALID_OPERATION || readCount == AudioTrack.ERROR_BAD_VALUE) { 116 | continue; 117 | } 118 | if (readCount != 0 && readCount != -1) {//一边播放一边写入语音数据 119 | //判断AudioTrack未初始化,停止播放的时候释放了,状态就为STATE_UNINITIALIZED 120 | if(mAudioTrack.getState() == mAudioTrack.STATE_UNINITIALIZED){ 121 | initData(); 122 | } 123 | mAudioTrack.play(); 124 | mAudioTrack.write(tempBuffer, 0, readCount); 125 | } 126 | } 127 | stopPlay();//播放完就停止播放 128 | } catch (Exception e) { 129 | e.printStackTrace(); 130 | } 131 | } 132 | 133 | }; 134 | 135 | /** 136 | * 播放文件 137 | * @param path 138 | * @throws Exception 139 | */ 140 | private void setPath(String path) throws Exception { 141 | File file = new File(path); 142 | mDis = new DataInputStream(new FileInputStream(file)); 143 | } 144 | 145 | /** 146 | * 启动播放 147 | * 148 | * @param path 149 | */ 150 | public void startPlay(String path) { 151 | try { 152 | // //AudioTrack未初始化 153 | // if(mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED){ 154 | // throw new RuntimeException("The AudioTrack is not uninitialized"); 155 | // }//AudioRecord.getMinBufferSize的参数是否支持当前的硬件设备 156 | // else if (AudioTrack.ERROR_BAD_VALUE == mMinBufferSize || AudioTrack.ERROR == mMinBufferSize) { 157 | // throw new RuntimeException("AudioTrack Unable to getMinBufferSize"); 158 | // }else{ 159 | setPath(path); 160 | startThread(); 161 | // } 162 | 163 | } catch (Exception e) { 164 | e.printStackTrace(); 165 | } 166 | } 167 | 168 | /** 169 | * 停止播放 170 | */ 171 | public void stopPlay() { 172 | try { 173 | destroyThread();//销毁线程 174 | if (mAudioTrack != null) { 175 | if (mAudioTrack.getState() == AudioRecord.STATE_INITIALIZED) {//初始化成功 176 | mAudioTrack.stop();//停止播放 177 | } 178 | if (mAudioTrack != null) { 179 | mAudioTrack.release();//释放audioTrack资源 180 | } 181 | } 182 | if (mDis != null) { 183 | mDis.close();//关闭数据输入流 184 | } 185 | } catch (Exception e) { 186 | e.printStackTrace(); 187 | } 188 | } 189 | 190 | } -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_2/MyAudioManager.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_2; 2 | 3 | import android.content.Context; 4 | import android.media.AudioFormat; 5 | import android.media.AudioManager; 6 | import android.media.AudioRecord; 7 | import android.media.AudioTrack; 8 | import android.media.MediaRecorder; 9 | import android.os.Environment; 10 | import android.widget.Toast; 11 | 12 | import java.io.BufferedOutputStream; 13 | import java.io.DataInputStream; 14 | import java.io.DataOutputStream; 15 | import java.io.File; 16 | import java.io.FileInputStream; 17 | import java.io.FileNotFoundException; 18 | import java.io.FileOutputStream; 19 | import java.io.IOException; 20 | 21 | /** 22 | * Created by SSC on 2018/7/2. 23 | */ 24 | 25 | public class MyAudioManager implements Runnable{ 26 | //指定音频源,这个和MediaRecorder是一样的,MIC指定为麦克风 27 | private static final int mAudioSource = MediaRecorder.AudioSource.MIC; 28 | //指定采样率(MediaRecorder的采样率通常是8000Hz AAC通常是44100,44100能兼容所有设置) 29 | private static final int mSampleRateInHz = 44100; 30 | //指定音频声道数,在AudioFormat类中指定用于此的常量,单声道 31 | private static final int mChannelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO; 32 | //指定音频量化位数 33 | private static final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 34 | //指定缓冲区大小,调用AudioRecord.getMinBufferSize()方法获得 35 | private int mBufferSizeInBytes; 36 | 37 | private File mRecordingFile; 38 | private boolean isRecording = false; 39 | private boolean isPlaying; 40 | private AudioRecord mAudioRecord = null; 41 | private AudioTrack mAudioTrack = null; 42 | private File mFileRoot = null; 43 | 44 | //存放的目录路径 45 | private static final String mPathName = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioRecordFile"; 46 | //保存的PCM文件名 47 | private static final String mFileNamePcm = "audio.pcm"; 48 | 49 | private Thread mThread; 50 | private Thread mTrackThread; 51 | private DataOutputStream mDataOutputStream; 52 | private DataInputStream mDataInputStream; 53 | 54 | //音频流类型 55 | private static final int mStreamType = AudioManager.STREAM_MUSIC; 56 | private static int mMode = AudioTrack.MODE_STREAM; 57 | private int mMinBufferSizeTrack; 58 | 59 | private Context mContext; 60 | 61 | public MyAudioManager(Context context) { 62 | mContext = context; 63 | initDatas(); 64 | } 65 | 66 | private void initDatas() { 67 | //计算最小缓冲区 68 | mBufferSizeInBytes = AudioRecord.getMinBufferSize(mSampleRateInHz, mChannelConfig, mAudioFormat); 69 | mAudioRecord = new AudioRecord(mAudioSource, mSampleRateInHz, mChannelConfig, mAudioFormat, mBufferSizeInBytes); 70 | mFileRoot = new File(mPathName); 71 | if (!mFileRoot.exists()) { 72 | mFileRoot.mkdirs(); 73 | Toast.makeText(mContext, "创建文件夹", Toast.LENGTH_SHORT).show(); 74 | } 75 | 76 | //AudioTrack 77 | mMinBufferSizeTrack = AudioTrack.getMinBufferSize(mSampleRateInHz, mChannelConfig, mAudioFormat); 78 | mAudioTrack = new AudioTrack(mStreamType, mSampleRateInHz, mChannelConfig, mAudioFormat, mMinBufferSizeTrack, mMode); 79 | 80 | 81 | } 82 | 83 | //开始录音 84 | public void startRecord() { 85 | if (AudioRecord.ERROR_BAD_VALUE == mBufferSizeInBytes || AudioRecord.ERROR == mBufferSizeInBytes) { 86 | throw new RuntimeException("Unable to getMinBufferSize"); 87 | } else { 88 | destroyThread(); 89 | isRecording = true; 90 | if (mThread == null) { 91 | mThread = new Thread(this); 92 | mThread.start(); 93 | } 94 | } 95 | Toast.makeText(mContext, "开始录音", Toast.LENGTH_SHORT).show(); 96 | } 97 | 98 | 99 | public void destroyThread() { 100 | isRecording = false; 101 | if (null != mThread && mThread.getState() == Thread.State.RUNNABLE) { 102 | try { 103 | isRecording = false; 104 | if (null != mThread && Thread.State.RUNNABLE == mThread.getState()) { 105 | try { 106 | Thread.sleep(500); 107 | mThread.interrupt(); 108 | } catch (Exception e) { 109 | mThread = null; 110 | } 111 | } 112 | mThread = null; 113 | } catch (Exception e) { 114 | e.printStackTrace(); 115 | } finally { 116 | mThread = null; 117 | } 118 | } 119 | } 120 | 121 | //停止录音 122 | public void stopRecord() { 123 | isRecording = false; 124 | //停止录音,释放内存 125 | if (mAudioRecord != null) { 126 | if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) { 127 | mAudioRecord.stop(); 128 | } 129 | if (mAudioRecord != null) { 130 | mAudioRecord.release(); 131 | } 132 | } 133 | destroyThread(); 134 | Toast.makeText(mContext, "停止录音", Toast.LENGTH_SHORT).show(); 135 | } 136 | 137 | public void playFile(String path) { 138 | File file = new File(path); 139 | try { 140 | mDataInputStream = new DataInputStream(new FileInputStream(file)); 141 | isPlaying = true; 142 | if (mTrackThread == null) { 143 | mTrackThread = new Thread(this); 144 | mTrackThread.start(); 145 | } 146 | } catch (FileNotFoundException e) { 147 | e.printStackTrace(); 148 | } 149 | } 150 | 151 | public void stopPlay() { 152 | if (mAudioTrack != null) { 153 | if (mAudioTrack.getState() == AudioRecord.STATE_INITIALIZED) {//初始化成功 154 | mAudioTrack.stop();//停止播放 155 | } 156 | if (mAudioTrack != null) { 157 | mAudioTrack.release();//释放audioTrack资源 158 | } 159 | } 160 | if (mDataInputStream != null) { 161 | try { 162 | mDataInputStream.close();//关闭数据输入流 163 | } catch (IOException e) { 164 | e.printStackTrace(); 165 | } 166 | } 167 | } 168 | 169 | @Override 170 | public void run() { 171 | isRecording = true; 172 | mRecordingFile = new File(mFileRoot, mFileNamePcm); 173 | if (mRecordingFile.exists()) { 174 | mRecordingFile.delete(); 175 | } 176 | try { 177 | mRecordingFile.createNewFile(); 178 | } catch (IOException e) { 179 | e.printStackTrace(); 180 | Toast.makeText(mContext, "创建文件夹出错", Toast.LENGTH_SHORT).show(); 181 | } 182 | try { 183 | mDataOutputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(mRecordingFile))); 184 | byte[] buffer = new byte[mBufferSizeInBytes]; 185 | if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { 186 | initDatas(); 187 | } 188 | mAudioRecord.startRecording(); 189 | while (isRecording && mAudioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { 190 | int bufferReadResult = mAudioRecord.read(buffer, 0, mBufferSizeInBytes); 191 | for (int i = 0; i < bufferReadResult; i++) { 192 | mDataOutputStream.write(buffer[i]); 193 | } 194 | } 195 | mDataOutputStream.close(); 196 | } catch (IOException e) { 197 | stopRecord(); 198 | Toast.makeText(mContext, "录音失败!", Toast.LENGTH_SHORT).show(); 199 | } 200 | } 201 | } 202 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_3/CameraPreview.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_3; 2 | 3 | import android.content.Context; 4 | import android.content.res.Configuration; 5 | import android.hardware.Camera; 6 | import android.view.SurfaceHolder; 7 | import android.view.SurfaceView; 8 | 9 | import java.io.IOException; 10 | 11 | public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback { 12 | 13 | private static final String TAG = "CameraPreview"; 14 | private SurfaceHolder mSurfaceHolder; 15 | private Camera mCamera; 16 | private int mFrameCount = 0; 17 | 18 | public CameraPreview(Context context) { 19 | super(context); 20 | } 21 | 22 | public CameraPreview(Context context, Camera camera) { 23 | super(context); 24 | mCamera = camera; 25 | mSurfaceHolder = getHolder(); 26 | mSurfaceHolder.addCallback(this); 27 | mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 28 | } 29 | 30 | public void setCamera(Camera camera) { 31 | mCamera = camera; 32 | } 33 | 34 | @Override 35 | public void surfaceCreated(SurfaceHolder holder) { 36 | //开启预览 37 | try { 38 | mCamera.setPreviewDisplay(holder); 39 | mCamera.startPreview(); 40 | } catch (IOException e) { 41 | e.printStackTrace(); 42 | } 43 | } 44 | 45 | @Override 46 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 47 | //若需要旋转、更改大小或者重新设置,停止预览 48 | if (mSurfaceHolder.getSurface() == null) { 49 | return; 50 | } 51 | try { 52 | mCamera.stopPreview(); 53 | } catch (Exception e) { 54 | e.printStackTrace(); 55 | } 56 | Camera.Parameters parameters = mCamera.getParameters(); 57 | if (this.getResources().getConfiguration().orientation != -Configuration.ORIENTATION_LANDSCAPE) { 58 | mCamera.setDisplayOrientation(90); 59 | } else { 60 | mCamera.setDisplayOrientation(0); 61 | } 62 | try { 63 | mCamera.setPreviewDisplay(mSurfaceHolder); 64 | mCamera.setPreviewCallback(mPreviewCallback); 65 | mCamera.startPreview(); 66 | } catch (IOException e) { 67 | e.printStackTrace(); 68 | } 69 | } 70 | 71 | @Override 72 | public void surfaceDestroyed(SurfaceHolder holder) { 73 | 74 | } 75 | 76 | private Camera.PreviewCallback mPreviewCallback=new Camera.PreviewCallback() { 77 | @Override 78 | public void onPreviewFrame(byte[] data, Camera camera) { 79 | mFrameCount++; 80 | } 81 | }; 82 | 83 | 84 | } 85 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_3/PreviewActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_3; 2 | 3 | import android.content.Intent; 4 | import android.os.Bundle; 5 | import android.support.annotation.Nullable; 6 | import android.support.v7.app.AppCompatActivity; 7 | import android.view.View; 8 | import android.widget.Button; 9 | 10 | import butterknife.BindView; 11 | import butterknife.ButterKnife; 12 | import butterknife.OnClick; 13 | import top.omooo.audiovideotask.R; 14 | 15 | public class PreviewActivity extends AppCompatActivity { 16 | @BindView(R.id.btn_surface) 17 | Button mBtnSurface; 18 | @BindView(R.id.btn_texture) 19 | Button mBtnTexture; 20 | 21 | @Override 22 | protected void onCreate(@Nullable Bundle savedInstanceState) { 23 | super.onCreate(savedInstanceState); 24 | setContentView(R.layout.activity_preview_layout); 25 | ButterKnife.bind(this); 26 | } 27 | 28 | @OnClick({R.id.btn_surface, R.id.btn_texture}) 29 | public void onViewClicked(View view) { 30 | switch (view.getId()) { 31 | case R.id.btn_surface: 32 | startActivity(new Intent(this,SurfacePreviewActivity.class)); 33 | break; 34 | case R.id.btn_texture: 35 | startActivity(new Intent(this, TexturePreviewActivity.class)); 36 | break; 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_3/SurfacePreviewActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_3; 2 | 3 | import android.Manifest; 4 | import android.hardware.Camera; 5 | import android.os.Bundle; 6 | import android.support.annotation.NonNull; 7 | import android.support.annotation.Nullable; 8 | import android.support.v7.app.AppCompatActivity; 9 | import android.widget.FrameLayout; 10 | import android.widget.Toast; 11 | 12 | import com.joker.api.Permissions4M; 13 | import com.joker.api.wrapper.ListenerWrapper; 14 | 15 | import butterknife.BindView; 16 | import butterknife.ButterKnife; 17 | import top.omooo.audiovideotask.R; 18 | 19 | public class SurfacePreviewActivity extends AppCompatActivity { 20 | 21 | private static final String TAG = "SurfacePreviewActivity"; 22 | @BindView(R.id.frame_layout) 23 | FrameLayout mFrameLayout; 24 | 25 | private Camera mCamera; 26 | private CameraPreview mCameraPreview; 27 | 28 | @Override 29 | protected void onCreate(@Nullable Bundle savedInstanceState) { 30 | super.onCreate(savedInstanceState); 31 | setContentView(R.layout.activity_preview); 32 | ButterKnife.bind(this); 33 | 34 | requestPer(); 35 | mCameraPreview = new CameraPreview(SurfacePreviewActivity.this, mCamera); 36 | new InitCameraThread().start(); 37 | } 38 | 39 | @Override 40 | protected void onResume() { 41 | super.onResume(); 42 | if (null == mCamera) { 43 | if (safeCameraOpen()) { 44 | mCameraPreview.setCamera(mCamera); 45 | } 46 | } 47 | } 48 | 49 | private boolean safeCameraOpen() { 50 | boolean open = false; 51 | try { 52 | releaseCamera(); 53 | mCamera = Camera.open(); 54 | open = (mCamera != null); 55 | } catch (Exception e) { 56 | e.printStackTrace(); 57 | } 58 | return open; 59 | } 60 | 61 | private void releaseCamera() { 62 | if (mCamera != null) { 63 | mCamera.setPreviewCallback(null); 64 | mCamera.release(); 65 | mCamera = null; 66 | } 67 | } 68 | 69 | 70 | @Override 71 | protected void onDestroy() { 72 | super.onDestroy(); 73 | releaseCamera(); 74 | } 75 | 76 | private class InitCameraThread extends Thread { 77 | @Override 78 | public void run() { 79 | super.run(); 80 | if (safeCameraOpen()) { 81 | runOnUiThread(new Runnable() { 82 | @Override 83 | public void run() { 84 | mFrameLayout.removeAllViews(); 85 | mFrameLayout.addView(mCameraPreview); 86 | } 87 | }); 88 | } 89 | } 90 | } 91 | 92 | private void requestPer() { 93 | Permissions4M.get(this) 94 | .requestPermissions(Manifest.permission.CAMERA) 95 | .requestCodes(0x01) 96 | .requestListener(new ListenerWrapper.PermissionRequestListener() { 97 | @Override 98 | public void permissionGranted(int i) { 99 | 100 | } 101 | 102 | @Override 103 | public void permissionDenied(int i) { 104 | switch (i) { 105 | case 0x01: 106 | Toast.makeText(SurfacePreviewActivity.this, "照相机权限申请失败!", Toast.LENGTH_SHORT).show(); 107 | break; 108 | } 109 | } 110 | 111 | @Override 112 | public void permissionRationale(int i) { 113 | switch (i) { 114 | case 0x01: 115 | Toast.makeText(SurfacePreviewActivity.this, "申请必要权限用于预览相机!", Toast.LENGTH_SHORT).show(); 116 | break; 117 | } 118 | } 119 | }) 120 | .request(); 121 | } 122 | 123 | @Override 124 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] 125 | grantResults) { 126 | Permissions4M.onRequestPermissionsResult(this, requestCode, grantResults); 127 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 128 | } 129 | 130 | } 131 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_3/TextureCameraPreview.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_3; 2 | 3 | import android.content.Context; 4 | import android.content.res.Configuration; 5 | import android.graphics.SurfaceTexture; 6 | import android.hardware.Camera; 7 | import android.view.TextureView; 8 | 9 | import java.io.IOException; 10 | 11 | public class TextureCameraPreview extends TextureView implements TextureView.SurfaceTextureListener { 12 | 13 | private Camera mCamera; 14 | public TextureCameraPreview(Context context) { 15 | super(context); 16 | } 17 | 18 | public TextureCameraPreview(Context context, Camera camera) { 19 | super(context); 20 | mCamera = camera; 21 | } 22 | 23 | public void setCamera(Camera camera) { 24 | mCamera = camera; 25 | } 26 | @Override 27 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 28 | if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) { 29 | mCamera.setDisplayOrientation(90); 30 | } else { 31 | mCamera.setDisplayOrientation(0); 32 | } 33 | try { 34 | mCamera.setPreviewCallback(mCameraPreviewCallback); 35 | mCamera.setPreviewTexture(surface); //使用SurfaceTexture 36 | mCamera.startPreview(); 37 | } catch (IOException ioe) { 38 | 39 | } 40 | } 41 | 42 | @Override 43 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 44 | 45 | } 46 | 47 | @Override 48 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 49 | mCamera.stopPreview(); 50 | mCamera.release(); 51 | return true; 52 | } 53 | 54 | @Override 55 | public void onSurfaceTextureUpdated(SurfaceTexture surface) { 56 | 57 | } 58 | 59 | private Camera.PreviewCallback mCameraPreviewCallback = new Camera.PreviewCallback() { 60 | @Override 61 | public void onPreviewFrame(byte[] data, Camera camera) { 62 | } 63 | }; 64 | } 65 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_3/TexturePreviewActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_3; 2 | 3 | import android.Manifest; 4 | import android.hardware.Camera; 5 | import android.os.Bundle; 6 | import android.support.annotation.NonNull; 7 | import android.support.annotation.Nullable; 8 | import android.support.v7.app.AppCompatActivity; 9 | import android.widget.FrameLayout; 10 | import android.widget.Toast; 11 | 12 | import com.joker.api.Permissions4M; 13 | import com.joker.api.wrapper.ListenerWrapper; 14 | 15 | import butterknife.BindView; 16 | import butterknife.ButterKnife; 17 | import top.omooo.audiovideotask.R; 18 | 19 | public class TexturePreviewActivity extends AppCompatActivity { 20 | 21 | @BindView(R.id.frame_layout) 22 | FrameLayout mFrameLayout; 23 | 24 | private Camera mCamera; 25 | private TextureCameraPreview mTextureCameraPreview; 26 | @Override 27 | protected void onCreate(@Nullable Bundle savedInstanceState) { 28 | super.onCreate(savedInstanceState); 29 | setContentView(R.layout.activity_preview); 30 | ButterKnife.bind(this); 31 | 32 | requestPer(); 33 | mTextureCameraPreview = new TextureCameraPreview(TexturePreviewActivity.this, mCamera); 34 | mTextureCameraPreview.setSurfaceTextureListener(mTextureCameraPreview); 35 | new InitCameraThread().start(); 36 | } 37 | 38 | @Override 39 | protected void onResume() { 40 | super.onResume(); 41 | if (null == mCamera) { 42 | if (safeCameraOpen()) { 43 | mTextureCameraPreview.setCamera(mCamera); 44 | } 45 | } 46 | } 47 | 48 | private boolean safeCameraOpen() { 49 | boolean open = false; 50 | try { 51 | releaseCamera(); 52 | mCamera = Camera.open(); 53 | open = (mCamera != null); 54 | } catch (Exception e) { 55 | e.printStackTrace(); 56 | } 57 | return open; 58 | } 59 | 60 | private void releaseCamera() { 61 | if (mCamera != null) { 62 | mCamera.setPreviewCallback(null); 63 | // mCamera.release(); 64 | mCamera = null; 65 | } 66 | } 67 | 68 | 69 | @Override 70 | protected void onDestroy() { 71 | super.onDestroy(); 72 | releaseCamera(); 73 | } 74 | 75 | private class InitCameraThread extends Thread { 76 | @Override 77 | public void run() { 78 | super.run(); 79 | if (safeCameraOpen()) { 80 | runOnUiThread(new Runnable() { 81 | @Override 82 | public void run() { 83 | mFrameLayout.removeAllViews(); 84 | mFrameLayout.addView(mTextureCameraPreview); 85 | } 86 | }); 87 | } 88 | } 89 | } 90 | 91 | private void requestPer() { 92 | Permissions4M.get(this) 93 | .requestPermissions(Manifest.permission.CAMERA) 94 | .requestCodes(0x01) 95 | .requestListener(new ListenerWrapper.PermissionRequestListener() { 96 | @Override 97 | public void permissionGranted(int i) { 98 | 99 | } 100 | 101 | @Override 102 | public void permissionDenied(int i) { 103 | switch (i) { 104 | case 0x01: 105 | Toast.makeText(TexturePreviewActivity.this, "照相机权限申请失败!", Toast.LENGTH_SHORT).show(); 106 | break; 107 | } 108 | } 109 | 110 | @Override 111 | public void permissionRationale(int i) { 112 | switch (i) { 113 | case 0x01: 114 | Toast.makeText(TexturePreviewActivity.this, "申请必要权限用于预览相机!", Toast.LENGTH_SHORT).show(); 115 | break; 116 | } 117 | } 118 | }) 119 | .request(); 120 | } 121 | 122 | @Override 123 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] 124 | grantResults) { 125 | Permissions4M.onRequestPermissionsResult(this, requestCode, grantResults); 126 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 127 | } 128 | 129 | } 130 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_4/ExtractMuxerActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_4; 2 | 3 | import android.os.Bundle; 4 | import android.os.Environment; 5 | import android.support.annotation.Nullable; 6 | import android.support.v7.app.AppCompatActivity; 7 | import android.view.View; 8 | import android.widget.Button; 9 | 10 | import butterknife.BindView; 11 | import butterknife.ButterKnife; 12 | import butterknife.OnClick; 13 | import top.omooo.audiovideotask.R; 14 | 15 | public class ExtractMuxerActivity extends AppCompatActivity { 16 | @BindView(R.id.btn_extract_video) 17 | Button mBtnExtractVideo; 18 | @BindView(R.id.btn_extract_audio) 19 | Button mBtnExtractAudio; 20 | @BindView(R.id.btn_combine) 21 | Button mBtnCombine; 22 | 23 | private static final String inputFilePath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioVideoTask/input.mp4"; 24 | private static final String outputFilePath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioVideoTask/outVideo.mp4"; 25 | private static final String outputFileAudioPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioVideoTask/outAudio.mp3"; 26 | private static final String outputFileVideoPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/AudioVideoTask/outCombineFile.mp4"; 27 | 28 | @Override 29 | protected void onCreate(@Nullable Bundle savedInstanceState) { 30 | super.onCreate(savedInstanceState); 31 | setContentView(R.layout.activity_extract_muxer); 32 | ButterKnife.bind(this); 33 | 34 | } 35 | 36 | @OnClick({R.id.btn_extract_video, R.id.btn_extract_audio, R.id.btn_combine}) 37 | public void onViewClicked(View view) { 38 | switch (view.getId()) { 39 | case R.id.btn_extract_video: 40 | MP4Manager.extractVideo(inputFilePath, outputFilePath); 41 | break; 42 | case R.id.btn_extract_audio: 43 | MP4Manager.extractAudio(inputFilePath, outputFileAudioPath); 44 | break; 45 | case R.id.btn_combine: 46 | MP4Manager.combine(outputFilePath, outputFileAudioPath, outputFileVideoPath); 47 | break; 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_4/MP4Manager.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_4; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaExtractor; 5 | import android.media.MediaFormat; 6 | import android.media.MediaMuxer; 7 | 8 | import java.io.IOException; 9 | import java.nio.ByteBuffer; 10 | 11 | public class MP4Manager { 12 | 13 | private static final String TAG = "MP4Manager"; 14 | 15 | public static void extractVideo(String inputFilePath,String outputFilePath) { 16 | int videoIndex = -1; 17 | MediaExtractor mediaExtractor = new MediaExtractor(); 18 | MediaMuxer mediaMuxer = null; 19 | try { 20 | mediaExtractor.setDataSource(inputFilePath); 21 | int trackCount = mediaExtractor.getTrackCount(); 22 | for (int i = 0; i < trackCount; i++) { 23 | MediaFormat format = mediaExtractor.getTrackFormat(i); 24 | String mime = format.getString(MediaFormat.KEY_MIME); 25 | if (mime.startsWith("video/")) { 26 | videoIndex = i; //得到具体轨道 27 | break; 28 | } 29 | } 30 | mediaExtractor.selectTrack(videoIndex); 31 | MediaFormat trackFormat = mediaExtractor.getTrackFormat(videoIndex); 32 | mediaMuxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 33 | int i = mediaMuxer.addTrack(trackFormat); 34 | 35 | ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 1024); 36 | MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 37 | mediaMuxer.start(); 38 | 39 | long videoSampleTime; 40 | //获取每一帧的时间 41 | { 42 | mediaExtractor.readSampleData(byteBuffer, 0); 43 | if (mediaExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { 44 | mediaExtractor.advance(); 45 | } 46 | mediaExtractor.readSampleData(byteBuffer, 0); 47 | long sampleTime = mediaExtractor.getSampleTime(); 48 | mediaExtractor.advance(); 49 | 50 | mediaExtractor.readSampleData(byteBuffer, 0); 51 | long sampleTime1 = mediaExtractor.getSampleTime(); 52 | videoSampleTime = Math.abs(sampleTime - sampleTime1); 53 | } 54 | mediaExtractor.unselectTrack(videoIndex); 55 | mediaExtractor.selectTrack(videoIndex); 56 | 57 | while (true) { 58 | int data = mediaExtractor.readSampleData(byteBuffer, 0); 59 | if (data < 0) { 60 | break; 61 | } 62 | 63 | bufferInfo.size = data; 64 | bufferInfo.offset = 0; 65 | bufferInfo.flags = mediaExtractor.getSampleFlags(); 66 | bufferInfo.presentationTimeUs += videoSampleTime; 67 | 68 | mediaMuxer.writeSampleData(i, byteBuffer, bufferInfo); 69 | 70 | mediaExtractor.advance(); 71 | } 72 | 73 | } catch (IOException e) { 74 | e.printStackTrace(); 75 | }finally { 76 | mediaExtractor.release(); 77 | if (mediaMuxer != null) { 78 | mediaMuxer.stop(); 79 | mediaMuxer.release(); 80 | } 81 | } 82 | } 83 | 84 | public static void extractAudio(String inputFilePath,String outputFilePath) { 85 | MediaExtractor mediaExtractor = new MediaExtractor(); 86 | MediaMuxer mediaMuxer = null; 87 | int audioIndex = -1; 88 | try { 89 | mediaExtractor.setDataSource(inputFilePath); 90 | int trackCount = mediaExtractor.getTrackCount(); 91 | for (int i = 0; i < trackCount; i++) { 92 | MediaFormat format = mediaExtractor.getTrackFormat(i); 93 | String mime = format.getString(MediaFormat.KEY_MIME); 94 | if (mime.startsWith("audio/")) { 95 | audioIndex = i; 96 | } 97 | } 98 | mediaExtractor.selectTrack(audioIndex); 99 | MediaFormat format = mediaExtractor.getTrackFormat(audioIndex); 100 | mediaMuxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 101 | int i = mediaMuxer.addTrack(format); 102 | mediaMuxer.start(); 103 | 104 | ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 1024); 105 | MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 106 | 107 | long time; 108 | 109 | { 110 | mediaExtractor.readSampleData(byteBuffer, 0); 111 | if (mediaExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { 112 | mediaExtractor.advance(); 113 | } 114 | mediaExtractor.readSampleData(byteBuffer, 0); 115 | long sampleTime = mediaExtractor.getSampleTime(); 116 | mediaExtractor.advance(); 117 | 118 | mediaExtractor.readSampleData(byteBuffer, 0); 119 | long sampleTime1 = mediaExtractor.getSampleTime(); 120 | mediaExtractor.advance(); 121 | 122 | time = Math.abs(sampleTime - sampleTime1); 123 | } 124 | 125 | mediaExtractor.unselectTrack(audioIndex); 126 | mediaExtractor.selectTrack(audioIndex); 127 | while (true) { 128 | int data = mediaExtractor.readSampleData(byteBuffer, 0); 129 | if (data < 0) { 130 | break; 131 | } 132 | bufferInfo.size = data; 133 | bufferInfo.flags = mediaExtractor.getSampleFlags(); 134 | bufferInfo.offset = 0; 135 | bufferInfo.presentationTimeUs += time; 136 | 137 | mediaMuxer.writeSampleData(i, byteBuffer, bufferInfo); 138 | mediaExtractor.advance(); 139 | } 140 | } catch (IOException e) { 141 | e.printStackTrace(); 142 | }finally { 143 | mediaExtractor.release(); 144 | if (mediaMuxer != null) { 145 | mediaMuxer.stop(); 146 | mediaMuxer.release(); 147 | } 148 | } 149 | } 150 | 151 | public static void combine(String inputVideoFilePath, String inputAudioFilePath, String outputVideoFilePath) { 152 | MediaExtractor videoExtractor; 153 | MediaExtractor audioExtractor = null; 154 | MediaMuxer mediaMuxer = null; 155 | videoExtractor = new MediaExtractor(); 156 | try { 157 | videoExtractor.setDataSource(inputVideoFilePath); 158 | int videoIndex = -1; 159 | MediaFormat videoTrackFormat = null; 160 | int trackCount = videoExtractor.getTrackCount(); 161 | for (int i = 0; i < trackCount; i++) { 162 | videoTrackFormat = videoExtractor.getTrackFormat(i); 163 | if (videoTrackFormat.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 164 | videoIndex = i; 165 | } 166 | } 167 | audioExtractor = new MediaExtractor(); 168 | audioExtractor.setDataSource(inputAudioFilePath); 169 | int audioIndex = -1; 170 | MediaFormat audioTrackFormat = null; 171 | trackCount = audioExtractor.getTrackCount(); 172 | for (int i = 0; i < trackCount; i++) { 173 | audioTrackFormat = audioExtractor.getTrackFormat(i); 174 | if (audioTrackFormat.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { 175 | audioIndex = i; 176 | } 177 | } 178 | 179 | videoExtractor.selectTrack(videoIndex); 180 | audioExtractor.selectTrack(audioIndex); 181 | 182 | MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo(); 183 | MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo(); 184 | mediaMuxer = new MediaMuxer(outputVideoFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 185 | assert videoTrackFormat != null; 186 | int videoTrackIndex = mediaMuxer.addTrack(videoTrackFormat); 187 | assert audioTrackFormat != null; 188 | int audioTrackIndex = mediaMuxer.addTrack(audioTrackFormat); 189 | mediaMuxer.start(); 190 | ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 1024); 191 | long videoTime; 192 | long audioTime; 193 | { 194 | videoExtractor.readSampleData(byteBuffer, 0); 195 | if (videoExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { 196 | videoExtractor.advance(); 197 | } 198 | videoExtractor.readSampleData(byteBuffer, 0); 199 | long sampleTime = videoExtractor.getSampleTime(); 200 | videoExtractor.advance(); 201 | videoExtractor.readSampleData(byteBuffer, 0); 202 | long sampleTime1 = videoExtractor.getSampleTime(); 203 | videoExtractor.advance(); 204 | 205 | videoTime = Math.abs(sampleTime - sampleTime1); 206 | } 207 | 208 | { 209 | audioExtractor.readSampleData(byteBuffer, 0); 210 | if (audioExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { 211 | audioExtractor.advance(); 212 | } 213 | audioExtractor.readSampleData(byteBuffer, 0); 214 | long sampleTime = audioExtractor.getSampleTime(); 215 | audioExtractor.advance(); 216 | audioExtractor.readSampleData(byteBuffer, 0); 217 | long sampleTime1 = audioExtractor.getSampleTime(); 218 | audioExtractor.advance(); 219 | 220 | audioTime = Math.abs(sampleTime - sampleTime1); 221 | } 222 | 223 | videoExtractor.unselectTrack(videoIndex); 224 | videoExtractor.selectTrack(videoIndex); 225 | 226 | while (true) { 227 | int data = videoExtractor.readSampleData(byteBuffer, 0); 228 | if (data < 0) { 229 | break; 230 | } 231 | videoBufferInfo.size = data; 232 | videoBufferInfo.presentationTimeUs += videoTime; 233 | videoBufferInfo.offset = 0; 234 | videoBufferInfo.flags = videoExtractor.getSampleFlags(); 235 | 236 | mediaMuxer.writeSampleData(videoTrackIndex, byteBuffer, videoBufferInfo); 237 | videoExtractor.advance(); 238 | } 239 | 240 | while (true) { 241 | int data = audioExtractor.readSampleData(byteBuffer, 0); 242 | if (data < 0) { 243 | break; 244 | } 245 | audioBufferInfo.size = data; 246 | audioBufferInfo.presentationTimeUs += audioTime; 247 | audioBufferInfo.offset = 0; 248 | audioBufferInfo.flags = audioExtractor.getSampleFlags(); 249 | 250 | mediaMuxer.writeSampleData(audioTrackIndex, byteBuffer, audioBufferInfo); 251 | audioExtractor.advance(); 252 | } 253 | } catch (IOException e) { 254 | e.printStackTrace(); 255 | } finally { 256 | if (mediaMuxer != null) { 257 | mediaMuxer.stop(); 258 | mediaMuxer.release(); 259 | } 260 | videoExtractor.release(); 261 | if (audioExtractor != null) { 262 | audioExtractor.release(); 263 | } 264 | } 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_5/GLActivity.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_5; 2 | 3 | import android.opengl.GLSurfaceView; 4 | import android.os.Bundle; 5 | import android.support.annotation.Nullable; 6 | import android.support.v7.app.AppCompatActivity; 7 | import android.widget.FrameLayout; 8 | 9 | import butterknife.BindView; 10 | import butterknife.ButterKnife; 11 | import top.omooo.audiovideotask.R; 12 | 13 | public class GLActivity extends AppCompatActivity { 14 | @BindView(R.id.frame_layout) 15 | FrameLayout mFrameLayout; 16 | 17 | 18 | private GLSurfaceView mGLSurfaceView; 19 | @Override 20 | protected void onCreate(@Nullable Bundle savedInstanceState) { 21 | super.onCreate(savedInstanceState); 22 | setContentView(R.layout.activity_gl); 23 | ButterKnife.bind(this); 24 | 25 | mGLSurfaceView = new MyGlSurfaceView(this); 26 | FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(500, 500); 27 | addContentView(mGLSurfaceView,layoutParams); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_5/MyGlRenderer.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_5; 2 | 3 | import android.opengl.GLES20; 4 | import android.opengl.GLSurfaceView; 5 | 6 | import javax.microedition.khronos.egl.EGLConfig; 7 | import javax.microedition.khronos.opengles.GL10; 8 | 9 | /** 10 | * 渲染器,类似画笔 11 | */ 12 | public class MyGlRenderer implements GLSurfaceView.Renderer { 13 | 14 | private MyTriangle mMyTriangle; 15 | 16 | public static int loadShader(int type, String shaderCode) { 17 | int shader = GLES20.glCreateShader(type); 18 | 19 | GLES20.glShaderSource(shader, shaderCode); 20 | GLES20.glCompileShader(shader); 21 | 22 | return shader; 23 | } 24 | 25 | @Override 26 | public void onSurfaceCreated(GL10 gl, EGLConfig config) { 27 | mMyTriangle = new MyTriangle(); 28 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 29 | } 30 | 31 | @Override 32 | public void onSurfaceChanged(GL10 gl, int width, int height) { 33 | GLES20.glViewport(0, 0, width, height); 34 | } 35 | 36 | @Override 37 | public void onDrawFrame(GL10 gl) { 38 | //绘制背景 39 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 40 | mMyTriangle.draw(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_5/MyGlSurfaceView.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_5; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLSurfaceView; 5 | import android.util.AttributeSet; 6 | 7 | public class MyGlSurfaceView extends GLSurfaceView { 8 | 9 | private MyGlRenderer mMyGlRenderer; 10 | public MyGlSurfaceView(Context context) { 11 | super(context); 12 | init(); 13 | } 14 | 15 | public MyGlSurfaceView(Context context, AttributeSet attrs) { 16 | super(context, attrs); 17 | init(); 18 | } 19 | 20 | private void init() { 21 | setEGLContextClientVersion(2); 22 | mMyGlRenderer = new MyGlRenderer(); 23 | setRenderer(mMyGlRenderer); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/java/top/omooo/audiovideotask/task_5/MyTriangle.java: -------------------------------------------------------------------------------- 1 | package top.omooo.audiovideotask.task_5; 2 | 3 | import android.opengl.GLES20; 4 | 5 | import java.nio.ByteBuffer; 6 | import java.nio.ByteOrder; 7 | import java.nio.FloatBuffer; 8 | 9 | /** 10 | * OpenGl 三角形类 11 | */ 12 | public class MyTriangle { 13 | private FloatBuffer vertexBuffer; 14 | private static final String vertexShaderCode = 15 | "attribute vec4 vPosition;" + "void main(){ " 16 | + " gl_Position = vPosition;" 17 | + "}"; 18 | //所有的浮点值都是中等精度(precision mediump float;) 19 | //可以选择把这个值设为“低”( precision lowp float; )或者“高”( precision highp float; ) 20 | private static final String fragmentShaderCode = 21 | "precision mediump float;" 22 | + "uniform vec4 vColor;" 23 | + "void main(){" 24 | + " gl_FragColor = vColor;" 25 | + "}"; 26 | //每个点由三个数值定义 27 | private static final int COORDS_PER_VERTEX = 3; 28 | private static float triangleCoords[] = { 29 | 0.0f, 0.62f, 0.0f, 30 | -0.5f, -0.3f, 0.0f, 31 | 0.5f, -0.3f, 0.0f 32 | }; 33 | //设置red,green,blue 和 alpha颜色值 34 | private float color[] = {0.6367f, 0.7695f, 0.2227f, 1.0f}; 35 | private final int mProgram; 36 | 37 | public MyTriangle() { 38 | //初始化顶点ByteBuffer 39 | ByteBuffer byteBuffer = ByteBuffer.allocateDirect(triangleCoords.length * 4); 40 | //使用硬件指定的字节顺序 一般而言是ByteOrder.LITTLE_ENDIAN 41 | byteBuffer.order(ByteOrder.nativeOrder()); 42 | //从ByteBuffer中创建FloatBuffer 43 | vertexBuffer = byteBuffer.asFloatBuffer(); 44 | //把预置的坐标值填入FloatBuffer 45 | vertexBuffer.put(triangleCoords); 46 | //设置从第一个坐标开始 47 | vertexBuffer.position(0); 48 | 49 | int vertexShader = MyGlRenderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode); 50 | int fragmentShader = MyGlRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); 51 | 52 | //创建空的OpenGL ES Program 53 | mProgram = GLES20.glCreateProgram(); 54 | //ES Program加入顶点着色器 55 | GLES20.glAttachShader(mProgram, vertexShader); 56 | //ES Program加入片段着色器 57 | GLES20.glAttachShader(mProgram, fragmentShader); 58 | //创建可执行的OpenGL ES程序 59 | GLES20.glLinkProgram(mProgram); 60 | 61 | } 62 | 63 | private int mPositionHandle; 64 | private int mColorHandle; 65 | 66 | private static final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX; 67 | private static final int vertexStride = COORDS_PER_VERTEX * 4; //每个顶点四个字节 68 | 69 | public void draw() { 70 | //将程序添加到OpenGL ES环境中 71 | GLES20.glUseProgram(mProgram); 72 | 73 | //获取顶点着色器的vPosition成员位置 74 | 75 | mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); 76 | //激活这个三角形的handle 77 | GLES20.glEnableVertexAttribArray(mPositionHandle); 78 | 79 | //准备这个三角形的坐标数据 80 | GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false 81 | , vertexStride, vertexBuffer); 82 | //获取片段着色器的颜色成员信息 83 | mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor"); 84 | //设置三角形的颜色 85 | GLES20.glUniform4fv(mColorHandle, 1, color, 0); 86 | //绘制三角形 87 | GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount); 88 | GLES20.glDisableVertexAttribArray(mPositionHandle); 89 | 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 | 7 | 12 | 13 | 19 | 22 | 25 | 26 | 27 | 28 | 34 | 35 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-v24/picture.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Omooo/AudioVideoTask/9f84a6a0d755a4168bdac3d3b654b637f69f3af7/app/src/main/res/drawable-v24/picture.gif -------------------------------------------------------------------------------- /app/src/main/res/drawable/ic_launcher_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 11 | 16 | 21 | 26 | 31 | 36 | 41 | 46 | 51 | 56 | 61 | 66 | 71 | 76 | 81 | 86 | 91 | 96 | 101 | 106 | 111 | 116 | 121 | 126 | 131 | 136 | 141 | 146 | 151 | 156 | 161 | 166 | 171 | 172 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_audio_record.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 |