├── .gitignore ├── .idea ├── .name ├── compiler.xml ├── copyright │ └── profiles_settings.xml ├── dictionaries │ └── relex.xml ├── gradle.xml ├── misc.xml ├── modules.xml ├── runConfigurations.xml └── vcs.xml ├── CameraFilter.iml ├── README.md ├── app ├── .gitignore ├── app.iml ├── build.gradle ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ ├── java │ └── me │ │ └── relex │ │ └── camerafilter │ │ ├── App.java │ │ ├── FileUtil.java │ │ ├── ImageFilterActivity.java │ │ ├── MainActivity.java │ │ ├── VideoRecordActivity.java │ │ ├── camera │ │ ├── CameraController.java │ │ ├── CameraHelper.java │ │ ├── CameraPictureSizeComparator.java │ │ ├── CameraPreviewSizeComparator.java │ │ ├── CameraRecordRenderer.java │ │ ├── CameraSurfaceRenderer.java │ │ └── CommonHandlerListener.java │ │ ├── filter │ │ ├── AbstractFilter.java │ │ ├── BlurFilter │ │ │ ├── CameraFilterGaussianBlur.java │ │ │ ├── CameraFilterGaussianSingleBlur.java │ │ │ ├── ImageFilterGaussianBlur.java │ │ │ └── ImageFilterGaussianSingleBlur.java │ │ ├── CameraFilter.java │ │ ├── CameraFilterBlend.java │ │ ├── CameraFilterBlendSoftLight.java │ │ ├── CameraFilterToneCurve.java │ │ ├── FilterGroup.java │ │ ├── FilterManager.java │ │ ├── IFilter.java │ │ ├── ImageFilter.java │ │ ├── ImageFilterBlend.java │ │ ├── ImageFilterBlendSoftLight.java │ │ ├── ImageFilterBlur.java │ │ └── ImageFilterToneCurve.java │ │ ├── gles │ │ ├── Drawable2d.java │ │ ├── DrawableFlipVertical2d.java │ │ ├── FullFrameRect.java │ │ └── GlUtil.java │ │ ├── image │ │ ├── ImageEglSurface.java │ │ └── ImageRenderer.java │ │ ├── video │ │ ├── EglCore.java │ │ ├── EglSurfaceBase.java │ │ ├── EncoderConfig.java │ │ ├── TextureMovieEncoder.java │ │ ├── VideoEncoderCore.java │ │ └── WindowSurface.java │ │ └── widget │ │ ├── AutoFitGLSurfaceView.java │ │ └── CameraSurfaceView.java │ └── res │ ├── drawable-hdpi │ └── ic_launcher.png │ ├── drawable-mdpi │ └── ic_launcher.png │ ├── drawable-nodpi │ ├── mask.png │ └── raw_image.jpg │ ├── drawable-xhdpi │ └── ic_launcher.png │ ├── drawable-xxhdpi │ └── ic_launcher.png │ ├── layout │ ├── activity_image_filter.xml │ ├── activity_main.xml │ └── activity_video_record.xml │ ├── raw │ ├── cross_1.acv │ ├── cross_10.acv │ ├── cross_11.acv │ ├── cross_2.acv │ ├── cross_3.acv │ ├── cross_4.acv │ ├── cross_5.acv │ ├── cross_6.acv │ ├── cross_7.acv │ ├── cross_8.acv │ ├── cross_9.acv │ ├── fragment_shader_2d.glsl │ ├── fragment_shader_2d_blend.glsl │ ├── fragment_shader_2d_blend_soft_light.glsl │ ├── fragment_shader_2d_blur.glsl │ ├── fragment_shader_2d_kernel.glsl │ ├── fragment_shader_2d_tone_curve.glsl │ ├── fragment_shader_ext.glsl │ ├── fragment_shader_ext_blend.glsl │ ├── fragment_shader_ext_blend_soft_light.glsl │ ├── fragment_shader_ext_blur.glsl │ ├── fragment_shader_ext_bw.glsl │ ├── fragment_shader_ext_kernel.glsl │ ├── fragment_shader_ext_night.glsl │ ├── fragment_shader_ext_tone_curve.glsl │ ├── vertex_shader.glsl │ ├── vertex_shader_2d_two_input.glsl │ ├── vertex_shader_blur.glsl │ └── vertex_shader_two_input.glsl │ └── values │ ├── dimens.xml │ ├── strings.xml │ └── styles.xml ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | /local.properties 3 | /.idea/workspace.xml 4 | /.idea/libraries 5 | .DS_Store 6 | /build 7 | -------------------------------------------------------------------------------- /.idea/.name: -------------------------------------------------------------------------------- 1 | CameraFilter -------------------------------------------------------------------------------- /.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /.idea/copyright/profiles_settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /.idea/dictionaries/relex.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | coord 5 | coords 6 | muxer 7 | 8 | 9 | -------------------------------------------------------------------------------- /.idea/gradle.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 17 | 18 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 19 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | Android 39 | 40 | 41 | Android Lint 42 | 43 | 44 | Java language level migration aids 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 66 | 67 | 68 | 69 | 70 | 1.7 71 | 72 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 88 | 89 | 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.idea/runConfigurations.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /CameraFilter.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DEPRECATED 2 | 3 | 4 | ### CameraFilter 5 | 6 | 研究 Android 用 OpenGL ES 2.0 处理相机预览和滤镜。 7 | 8 | 9 | ##### 实时滤镜 10 | 使用 GLSurfaceView 显示相机画面,用 OpenGL 实现不同滤镜效果。 11 | 12 | 13 | [android-gpuimage](https://github.com/CyberAgent/android-gpuimage) 使用 ```GL_TEXTURE_2D``` 作为纹理目标。 在处理相机预览画面时,需要将每帧的图像颜色由 YUV 转成 RBGA,画面不流畅。 14 | 15 | 这里使用 ```GL_TEXTURE_EXTERNAL_OES``` 作为纹理目标,绕过了 YUV 转 RBGA 步骤。 另外简单处理了一下预览画面的比例。 16 | 17 | 18 | ##### 录制视频 19 | 使用 [grafika](https://github.com/google/grafika) 方案(需要Android 4.3),用 MediaCodec、MediaMuxer 编码生成 MP4。使用 MediaCodec 的 [createInputSurface](http://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface()),接收来自 OpenGL 渲染的画面。 20 | 21 | 22 | ##### 图片滤镜 23 | 与 [android-gpuimage](https://github.com/CyberAgent/android-gpuimage) 一样,使用 ```GL_TEXTURE_2D``` 作为纹理目标,传入需要处理的图片,然后用滤镜渲染。不过因为纹理目标不同,不能直接套用相机的滤镜,导致每个滤镜分别要写两次…… 24 | -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /app/app.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 22 5 | buildToolsVersion "22.0.1" 6 | 7 | defaultConfig { 8 | applicationId "me.relex.camerafilter" 9 | minSdkVersion 18 10 | targetSdkVersion 22 11 | versionCode 1 12 | versionName "1.0" 13 | } 14 | buildTypes { 15 | release { 16 | minifyEnabled false 17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 18 | } 19 | } 20 | } 21 | 22 | dependencies { 23 | compile 'com.android.support:appcompat-v7:22.2.0' 24 | } 25 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in /Applications/Android Studio.app/sdk/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 10 | 13 | 16 | 19 | 20 | 29 | 30 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 43 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/App.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter; 2 | 3 | import android.app.Application; 4 | import me.relex.camerafilter.video.TextureMovieEncoder; 5 | 6 | public class App extends Application { 7 | 8 | @Override public void onCreate() { 9 | super.onCreate(); 10 | TextureMovieEncoder.initialize(getApplicationContext()); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/FileUtil.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter; 2 | 3 | import android.content.Context; 4 | import android.os.Environment; 5 | import android.util.Log; 6 | import java.io.File; 7 | 8 | public class FileUtil { 9 | 10 | private static File getExternalDirectory(Context context) { 11 | 12 | File cacheDir = context.getExternalCacheDir(); 13 | if (cacheDir != null && !cacheDir.exists()) { 14 | if (!cacheDir.mkdirs()) { 15 | Log.d(FileUtil.class.getName(), "无法创建SDCard cache"); 16 | return null; 17 | } 18 | 19 | //try { 20 | // new File(cacheDir, ".nomedia").createNewFile(); 21 | //} catch (IOException e) { 22 | // Log.d(FileUtil.class.getName(), "无法创建 .nomedia 文件"); 23 | //} 24 | } 25 | 26 | return cacheDir; 27 | } 28 | 29 | public static File getCacheDirectory(Context context, boolean preferExternal) { 30 | File appCacheDir = null; 31 | 32 | if (preferExternal && Environment.MEDIA_MOUNTED.equals( 33 | Environment.getExternalStorageState())) { 34 | appCacheDir = getExternalDirectory(context); 35 | } 36 | 37 | if (appCacheDir == null) { 38 | appCacheDir = context.getCacheDir(); 39 | } 40 | 41 | if (appCacheDir == null) { 42 | String cacheDirPath = "/data/data/" + context.getPackageName() + "/cache/"; 43 | Log.d(FileUtil.class.getName(), 44 | "Can't define system cache directory! use " + cacheDirPath); 45 | appCacheDir = new File(cacheDirPath); 46 | } 47 | 48 | return appCacheDir; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/ImageFilterActivity.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.os.AsyncTask; 7 | import android.os.Bundle; 8 | import android.support.v7.app.AppCompatActivity; 9 | import android.view.View; 10 | import android.widget.ImageView; 11 | import me.relex.camerafilter.filter.FilterManager; 12 | import me.relex.camerafilter.image.ImageEglSurface; 13 | import me.relex.camerafilter.image.ImageRenderer; 14 | 15 | public class ImageFilterActivity extends AppCompatActivity implements View.OnClickListener { 16 | 17 | private ImageView mImageView; 18 | private ImageRenderer mImageRenderer; 19 | private FilterTask mFilterTask; 20 | 21 | @Override protected void onCreate(Bundle savedInstanceState) { 22 | super.onCreate(savedInstanceState); 23 | setContentView(R.layout.activity_image_filter); 24 | 25 | mImageView = (ImageView) findViewById(R.id.image_view); 26 | 27 | findViewById(R.id.filter_normal).setOnClickListener(this); 28 | findViewById(R.id.filter_tone_curve).setOnClickListener(this); 29 | findViewById(R.id.filter_soft_light).setOnClickListener(this); 30 | 31 | mImageRenderer = 32 | new ImageRenderer(getApplicationContext(), FilterManager.FilterType.Normal); 33 | } 34 | 35 | @Override public void onClick(View v) { 36 | switch (v.getId()) { 37 | case R.id.filter_normal: 38 | startFilterTask(FilterManager.FilterType.Normal); 39 | break; 40 | case R.id.filter_tone_curve: 41 | startFilterTask(FilterManager.FilterType.ToneCurve); 42 | break; 43 | case R.id.filter_soft_light: 44 | startFilterTask(FilterManager.FilterType.SoftLight); 45 | break; 46 | } 47 | } 48 | 49 | private void startFilterTask(FilterManager.FilterType filterType) { 50 | if (mFilterTask == null || mFilterTask.getStatus() != AsyncTask.Status.RUNNING) { 51 | mFilterTask = new FilterTask(getApplicationContext(), mImageRenderer, filterType); 52 | mFilterTask.execute(); 53 | } 54 | } 55 | 56 | public class FilterTask extends AsyncTask { 57 | 58 | private Context mContext; 59 | private FilterManager.FilterType mFilterType; 60 | private ImageRenderer mRenderer; 61 | 62 | public FilterTask(Context context, ImageRenderer renderer, 63 | FilterManager.FilterType filterType) { 64 | mFilterType = filterType; 65 | mContext = context; 66 | mRenderer = renderer; 67 | } 68 | 69 | @Override protected Bitmap doInBackground(Void... params) { 70 | final BitmapFactory.Options options = new BitmapFactory.Options(); 71 | options.inScaled = false; 72 | final Bitmap bitmap = 73 | BitmapFactory.decodeResource(mContext.getResources(), R.drawable.raw_image, 74 | options); 75 | ImageEglSurface imageEglSurface = 76 | new ImageEglSurface(bitmap.getWidth(), bitmap.getHeight()); //设置输出宽高, 77 | imageEglSurface.setRenderer(mRenderer); 78 | mRenderer.changeFilter(mFilterType); 79 | mRenderer.setImageBitmap(bitmap); 80 | imageEglSurface.drawFrame(); 81 | Bitmap filterBitmap = imageEglSurface.getBitmap(); 82 | imageEglSurface.release(); 83 | mRenderer.destroy(); 84 | 85 | return filterBitmap; 86 | } 87 | 88 | @Override protected void onPostExecute(Bitmap bitmap) { 89 | super.onPostExecute(bitmap); 90 | 91 | if (bitmap == null) { 92 | return; 93 | } 94 | 95 | mImageView.setImageBitmap(bitmap); 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/MainActivity.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter; 2 | 3 | import android.content.Intent; 4 | import android.os.Bundle; 5 | import android.support.v7.app.AppCompatActivity; 6 | import android.view.View; 7 | 8 | public class MainActivity extends AppCompatActivity implements View.OnClickListener { 9 | 10 | @Override protected void onCreate(Bundle savedInstanceState) { 11 | super.onCreate(savedInstanceState); 12 | setContentView(R.layout.activity_main); 13 | 14 | findViewById(R.id.image_filter).setOnClickListener(this); 15 | findViewById(R.id.video_record).setOnClickListener(this); 16 | } 17 | 18 | @Override public void onClick(View v) { 19 | switch (v.getId()) { 20 | case R.id.image_filter: 21 | startActivity(new Intent(this, ImageFilterActivity.class)); 22 | 23 | break; 24 | case R.id.video_record: 25 | startActivity(new Intent(this, VideoRecordActivity.class)); 26 | break; 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/VideoRecordActivity.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter; 2 | 3 | import android.os.Bundle; 4 | import android.support.v7.app.AppCompatActivity; 5 | import android.view.View; 6 | import android.widget.Button; 7 | import java.io.File; 8 | import me.relex.camerafilter.camera.CameraRecordRenderer; 9 | import me.relex.camerafilter.filter.FilterManager.FilterType; 10 | import me.relex.camerafilter.video.EncoderConfig; 11 | import me.relex.camerafilter.video.TextureMovieEncoder; 12 | import me.relex.camerafilter.widget.CameraSurfaceView; 13 | 14 | public class VideoRecordActivity extends AppCompatActivity implements View.OnClickListener { 15 | 16 | private CameraSurfaceView mCameraSurfaceView; 17 | private Button mRecordButton; 18 | private boolean mIsRecordEnabled; 19 | 20 | @Override protected void onCreate(Bundle savedInstanceState) { 21 | super.onCreate(savedInstanceState); 22 | setContentView(R.layout.activity_video_record); 23 | mCameraSurfaceView = (CameraSurfaceView) findViewById(R.id.camera); 24 | mCameraSurfaceView.setAspectRatio(3, 4); 25 | 26 | findViewById(R.id.filter_normal).setOnClickListener(this); 27 | findViewById(R.id.filter_tone_curve).setOnClickListener(this); 28 | findViewById(R.id.filter_soft_light).setOnClickListener(this); 29 | 30 | mRecordButton = (Button) findViewById(R.id.record); 31 | mRecordButton.setOnClickListener(this); 32 | 33 | mIsRecordEnabled = TextureMovieEncoder.getInstance().isRecording(); 34 | updateRecordButton(); 35 | } 36 | 37 | @Override protected void onResume() { 38 | super.onResume(); 39 | mCameraSurfaceView.onResume(); 40 | updateRecordButton(); 41 | } 42 | 43 | @Override protected void onPause() { 44 | mCameraSurfaceView.onPause(); 45 | super.onPause(); 46 | } 47 | 48 | @Override protected void onDestroy() { 49 | mCameraSurfaceView.onDestroy(); 50 | super.onDestroy(); 51 | } 52 | 53 | @Override public void onClick(View v) { 54 | switch (v.getId()) { 55 | case R.id.filter_normal: 56 | mCameraSurfaceView.changeFilter(FilterType.Normal); 57 | break; 58 | case R.id.filter_tone_curve: 59 | mCameraSurfaceView.changeFilter(FilterType.ToneCurve); 60 | break; 61 | case R.id.filter_soft_light: 62 | mCameraSurfaceView.changeFilter(FilterType.SoftLight); 63 | break; 64 | case R.id.record: 65 | if (!mIsRecordEnabled) { 66 | mCameraSurfaceView.queueEvent(new Runnable() { 67 | @Override public void run() { 68 | CameraRecordRenderer renderer = mCameraSurfaceView.getRenderer(); 69 | renderer.setEncoderConfig(new EncoderConfig(new File( 70 | FileUtil.getCacheDirectory(VideoRecordActivity.this, true), 71 | "video-" + System.currentTimeMillis() + ".mp4"), 480, 640, 72 | 1024 * 1024 /* 1 Mb/s */)); 73 | } 74 | }); 75 | } 76 | mIsRecordEnabled = !mIsRecordEnabled; 77 | mCameraSurfaceView.queueEvent(new Runnable() { 78 | @Override public void run() { 79 | mCameraSurfaceView.getRenderer().setRecordingEnabled(mIsRecordEnabled); 80 | } 81 | }); 82 | updateRecordButton(); 83 | break; 84 | } 85 | } 86 | 87 | public void updateRecordButton() { 88 | mRecordButton.setText( 89 | getString(mIsRecordEnabled ? R.string.record_stop : R.string.record_start)); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraController.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.content.Context; 4 | import android.content.Intent; 5 | import android.graphics.Rect; 6 | import android.graphics.SurfaceTexture; 7 | import android.hardware.Camera; 8 | import android.os.Handler; 9 | import android.os.Looper; 10 | import android.os.Message; 11 | import android.support.v4.content.LocalBroadcastManager; 12 | import android.text.TextUtils; 13 | import android.view.MotionEvent; 14 | import android.view.View; 15 | import java.util.ArrayList; 16 | import java.util.Collections; 17 | import java.util.List; 18 | 19 | public class CameraController 20 | implements Camera.AutoFocusCallback, Camera.ErrorCallback, CommonHandlerListener { 21 | 22 | public static final String BROADCAST_ACTION_OPEN_CAMERA_ERROR = 23 | "CameraController.BROADCAST_ACTION_OPEN_CAMERA_ERROR"; 24 | 25 | public static final String TYPE_OPEN_CAMERA_ERROR_TYPE = 26 | "CameraController.TYPE_OPEN_CAMERA_ERROR_TYPE"; 27 | 28 | public static final int TYPE_OPEN_CAMERA_ERROR_UNKNOWN = 0; 29 | public static final int TYPE_OPEN_CAMERA_ERROR_PERMISSION_DISABLE = 1; 30 | 31 | private static volatile CameraController sInstance; 32 | 33 | public final static float sCameraRatio = 4f / 3f; 34 | private final CameraControllerHandler mHandler; 35 | 36 | private Camera mCamera = null; 37 | public int mCameraIndex = Camera.CameraInfo.CAMERA_FACING_BACK; 38 | public boolean mIsSupportFontFacingCamera = false; 39 | public boolean mCameraMirrored = false; 40 | public Camera.Size mCameraPictureSize; 41 | 42 | private final Object mLock = new Object(); 43 | 44 | ////////// 45 | private boolean mAutoFocusLocked = false; 46 | private boolean mIsSupportAutoFocus = false; 47 | private boolean mIsSupportAutoFocusContinuousPicture = false; 48 | 49 | private CameraPictureSizeComparator mCameraPictureSizeComparator = 50 | new CameraPictureSizeComparator(); 51 | 52 | ////////// 53 | public static CameraController getInstance() { 54 | if (sInstance == null) { 55 | synchronized (CameraController.class) { 56 | if (sInstance == null) { 57 | sInstance = new CameraController(); 58 | } 59 | } 60 | } 61 | return sInstance; 62 | } 63 | 64 | private CameraController() { 65 | mHandler = new CameraControllerHandler(this); 66 | } 67 | 68 | public boolean checkSupportFontFacingCamera(boolean frontPriority) { 69 | try { 70 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 71 | int cameraCount = Camera.getNumberOfCameras(); 72 | for (int i = 0; i < cameraCount; i++) { 73 | Camera.getCameraInfo(i, cameraInfo); 74 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 75 | if (frontPriority) { // 显示前置摄像头 76 | mCameraIndex = Camera.CameraInfo.CAMERA_FACING_FRONT; 77 | } 78 | mIsSupportFontFacingCamera = true; 79 | return true; 80 | } 81 | } 82 | } catch (Exception e) { 83 | e.printStackTrace(); 84 | } 85 | return false; 86 | } 87 | 88 | public void setupCamera(SurfaceTexture surfaceTexture, Context context, 89 | int desiredPictureWidth) { 90 | if (mCamera != null) { 91 | release(); 92 | } 93 | 94 | synchronized (mLock) { 95 | try { 96 | if (Camera.getNumberOfCameras() > 0) { 97 | mCamera = Camera.open(mCameraIndex); 98 | } else { 99 | mCamera = Camera.open(); 100 | } 101 | 102 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 103 | Camera.getCameraInfo(mCameraIndex, cameraInfo); 104 | 105 | mCameraMirrored = (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT); 106 | mCamera.setDisplayOrientation(90); 107 | mCamera.setPreviewTexture(surfaceTexture); 108 | } catch (Exception e) { 109 | e.printStackTrace(); 110 | mCamera = null; 111 | e.printStackTrace(); 112 | Intent intent = new Intent(BROADCAST_ACTION_OPEN_CAMERA_ERROR); 113 | String message = e.getMessage(); 114 | intent.putExtra(TYPE_OPEN_CAMERA_ERROR_TYPE, 115 | (!TextUtils.isEmpty(message) && message.contains("permission")) 116 | ? TYPE_OPEN_CAMERA_ERROR_PERMISSION_DISABLE 117 | : TYPE_OPEN_CAMERA_ERROR_UNKNOWN); 118 | LocalBroadcastManager.getInstance(context).sendBroadcast(intent); 119 | } 120 | 121 | if (mCamera == null) { 122 | //Toast.makeText(mContext, "Unable to start camera", Toast.LENGTH_SHORT).showFromSession(); 123 | return; 124 | } 125 | 126 | try { 127 | findCameraSupportValue(desiredPictureWidth); 128 | } catch (Exception e) { 129 | e.printStackTrace(); 130 | } 131 | } 132 | } 133 | 134 | public void configureCameraParameters(Camera.Size previewSize) { 135 | 136 | try { 137 | Camera.Parameters cp = getCameraParameters(); 138 | if (cp == null || mCamera == null) { 139 | return; 140 | } 141 | // 对焦模式 142 | synchronized (mLock) { 143 | List focusModes = cp.getSupportedFocusModes(); 144 | if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { 145 | mIsSupportAutoFocusContinuousPicture = true; 146 | cp.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);// 自动连续对焦 147 | } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { 148 | mIsSupportAutoFocus = true; 149 | cp.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);// 自动对焦 150 | } else { 151 | mIsSupportAutoFocusContinuousPicture = false; 152 | mIsSupportAutoFocus = false; 153 | } 154 | // 预览尺寸 155 | if (previewSize != null) { 156 | cp.setPreviewSize(previewSize.width, previewSize.height); 157 | } 158 | //拍照尺寸 159 | cp.setPictureSize(mCameraPictureSize.width, mCameraPictureSize.height); 160 | 161 | mCamera.setParameters(cp); 162 | mCamera.setErrorCallback(this); 163 | } 164 | } catch (Exception e) { 165 | e.printStackTrace(); 166 | } 167 | 168 | mAutoFocusLocked = false; 169 | } 170 | 171 | public boolean startCameraPreview() { 172 | //Log.d(TAG, "打开预览了"); 173 | if (mCamera != null) { 174 | synchronized (mLock) { 175 | try { 176 | mCamera.startPreview(); 177 | 178 | if (mIsSupportAutoFocusContinuousPicture) { 179 | mCamera.cancelAutoFocus(); 180 | } 181 | return true; 182 | } catch (Exception e) { 183 | e.printStackTrace(); 184 | } 185 | } 186 | } 187 | 188 | return false; 189 | } 190 | 191 | public boolean stopCameraPreview() { 192 | 193 | //Log.d(TAG, "关闭预览了"); 194 | if (mCamera != null) { 195 | synchronized (mLock) { 196 | try { 197 | mCamera.stopPreview(); 198 | return true; 199 | } catch (Exception e) { 200 | e.printStackTrace(); 201 | } 202 | } 203 | } 204 | 205 | return false; 206 | } 207 | 208 | public void release() { 209 | if (mCamera != null) { 210 | synchronized (mLock) { 211 | try { 212 | mCamera.setPreviewCallback(null); 213 | mCamera.stopPreview(); 214 | mCamera.release(); 215 | } catch (Exception e) { 216 | e.printStackTrace(); 217 | } finally { 218 | mCamera = null; 219 | } 220 | } 221 | } 222 | } 223 | 224 | public boolean startAutoFocus(Camera.AutoFocusCallback autoFocusCallback) { 225 | if ((mIsSupportAutoFocus || mIsSupportAutoFocusContinuousPicture) && mCamera != null) { 226 | try { 227 | 228 | String focusMode = getCameraParameters().getFocusMode(); 229 | 230 | if (!TextUtils.isEmpty(focusMode) && focusMode. 231 | equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { // 如果是连续自动对焦, 来一次对焦处理 232 | mCamera.autoFocus(autoFocusCallback); 233 | } else { 234 | return false; 235 | } 236 | } catch (Exception e) { 237 | e.printStackTrace(); 238 | return false; 239 | } 240 | return true; 241 | } 242 | 243 | return false; 244 | } 245 | 246 | public void startTouchAutoFocus(View v, MotionEvent event) { 247 | if ((mIsSupportAutoFocus || mIsSupportAutoFocusContinuousPicture) 248 | && mCamera != null 249 | && !mAutoFocusLocked) { 250 | try { 251 | mAutoFocusLocked = true; 252 | 253 | Camera.Parameters parameters = getCameraParameters(); 254 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); 255 | if (parameters.getMaxNumFocusAreas() > 0) { 256 | Rect focusRect = 257 | CameraHelper.calculateTapArea(v, event.getX(), event.getY(), 1f); 258 | List focusAreas = new ArrayList(); 259 | focusAreas.add(new Camera.Area(focusRect, 1000)); 260 | parameters.setFocusAreas(focusAreas); 261 | } 262 | 263 | if (parameters.getMaxNumMeteringAreas() > 0) { 264 | Rect meteringRect = 265 | CameraHelper.calculateTapArea(v, event.getX(), event.getY(), 1.5f); 266 | List meteringAreas = new ArrayList(); 267 | meteringAreas.add(new Camera.Area(meteringRect, 1000)); 268 | parameters.setMeteringAreas(meteringAreas); 269 | } 270 | 271 | mCamera.setParameters(parameters); 272 | mCamera.autoFocus(this); 273 | } catch (Exception e) { 274 | e.printStackTrace(); 275 | mAutoFocusLocked = false; 276 | } 277 | } 278 | } 279 | 280 | public Camera.Parameters getCameraParameters() { 281 | if (mCamera != null) { 282 | synchronized (mLock) { 283 | try { 284 | return mCamera.getParameters(); 285 | } catch (Exception e) { 286 | e.printStackTrace(); 287 | } 288 | } 289 | } 290 | 291 | return null; 292 | } 293 | 294 | private void findCameraSupportValue(int desiredWidth) { 295 | Camera.Parameters cp = getCameraParameters(); 296 | List cs = cp.getSupportedPictureSizes(); 297 | if (cs != null && !cs.isEmpty()) { 298 | Collections.sort(cs, mCameraPictureSizeComparator); 299 | for (Camera.Size size : cs) { 300 | if (size.width < desiredWidth && size.height < desiredWidth) { 301 | break; 302 | } 303 | float ratio = (float) size.width / size.height; 304 | if (ratio == sCameraRatio) { 305 | mCameraPictureSize = size; 306 | } 307 | } 308 | } 309 | } 310 | 311 | public void takePicture(Camera.ShutterCallback shutter, Camera.PictureCallback raw, 312 | Camera.PictureCallback jpeg) { 313 | if (mCamera != null) { 314 | mCamera.takePicture(shutter, raw, jpeg); 315 | } 316 | } 317 | 318 | //////////////////// implements //////////////////// 319 | 320 | //AutoFocusCallback 321 | @Override public void onAutoFocus(boolean success, Camera camera) { 322 | 323 | mHandler.sendEmptyMessageDelayed(RESET_TOUCH_FOCUS, RESET_TOUCH_FOCUS_DELAY); 324 | 325 | mAutoFocusLocked = false; 326 | } 327 | 328 | //ErrorCallback 329 | @Override public void onError(int error, Camera camera) { 330 | 331 | } 332 | 333 | //PictureCallback 334 | //@Override public void onPictureTaken(byte[] data, Camera camera) { 335 | // mIsTakingPicture = false; 336 | // //try { 337 | // // Camera.Parameters ps = camera.getParameters(); 338 | // // if (ps.getPictureFormat() == ImageFormat.JPEG) { 339 | // // //CommonUtil.executeAsyncTask(new SquareBitmapTask(data, mCameraMirrored) { 340 | // // // @Override protected void onPostExecute(PublishBean newPost) { 341 | // // // super.onPostExecute(newPost); 342 | // // // mIsTakingPicture = false; 343 | // // // if (mPictureCallback != null) { 344 | // // // mPictureCallback.onPictureTaken(newPost); 345 | // // // } 346 | // // // } 347 | // // //}); 348 | // // } 349 | // //} catch (Exception e) { 350 | // // e.printStackTrace(); 351 | // //} 352 | //} 353 | 354 | //public boolean onClickEvent(View v, MotionEvent event) { 355 | // if (mClickGestureDetector.onTouchEvent(event)) { 356 | // Log.e("onClickEvent", "onClickEvent 进入了 onSingleTapUp"); 357 | // startTouchAutoFocus(v, event); 358 | // return true; 359 | // } 360 | // 361 | // return false; 362 | //} 363 | 364 | //////////////////// Getter & Setter //////////////////// 365 | 366 | public Camera getCamera() { 367 | return mCamera; 368 | } 369 | 370 | public int getCameraIndex() { 371 | return mCameraIndex; 372 | } 373 | 374 | public void setCameraIndex(int cameraIndex) { 375 | this.mCameraIndex = cameraIndex; 376 | } 377 | 378 | public boolean isSupportFontFacingCamera() { 379 | return mIsSupportFontFacingCamera; 380 | } 381 | 382 | private static final int RESET_TOUCH_FOCUS = 301; 383 | private static final int RESET_TOUCH_FOCUS_DELAY = 3000; 384 | 385 | private static class CameraControllerHandler extends Handler { 386 | 387 | private CommonHandlerListener listener; 388 | 389 | public CameraControllerHandler(CommonHandlerListener listener) { 390 | super(Looper.getMainLooper()); 391 | this.listener = listener; 392 | } 393 | 394 | @Override public void handleMessage(Message msg) { 395 | listener.handleMessage(msg); 396 | } 397 | } 398 | 399 | @Override public void handleMessage(Message msg) { 400 | switch (msg.what) { 401 | case RESET_TOUCH_FOCUS: { 402 | if (mCamera == null || mAutoFocusLocked) { 403 | return; 404 | } 405 | mHandler.removeMessages(RESET_TOUCH_FOCUS); 406 | try { 407 | if (mIsSupportAutoFocusContinuousPicture) { 408 | Camera.Parameters cp = getCameraParameters(); 409 | if (cp != null) { 410 | cp.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); 411 | mCamera.setParameters(cp); 412 | } 413 | } 414 | mCamera.cancelAutoFocus(); 415 | } catch (Exception e) { 416 | e.printStackTrace(); 417 | } 418 | 419 | break; 420 | } 421 | } 422 | } 423 | } 424 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraHelper.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.graphics.Rect; 4 | import android.hardware.Camera; 5 | import android.view.View; 6 | import java.util.Collections; 7 | import java.util.List; 8 | 9 | public class CameraHelper { 10 | 11 | // 12 | public static Camera.Size getOptimalPreviewSize(Camera.Parameters parameters, 13 | Camera.Size pictureSize, int viewHeight) { 14 | 15 | if (parameters == null || pictureSize == null) { 16 | return null; 17 | } 18 | 19 | List sizes = parameters.getSupportedPreviewSizes(); 20 | 21 | Collections.sort(sizes, new CameraPreviewSizeComparator()); 22 | 23 | final double ASPECT_TOLERANCE = 0.05; 24 | double targetRatio = (double) pictureSize.width / pictureSize.height; 25 | if (sizes == null) return null; 26 | 27 | Camera.Size optimalSize = null; 28 | double minDiff = Double.MAX_VALUE; 29 | int targetHeight = pictureSize.height; 30 | // Try to find an size match aspect ratio and size 31 | for (Camera.Size size : sizes) { 32 | double ratio = (double) size.width / size.height; 33 | if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; 34 | 35 | if (optimalSize != null && size.height > viewHeight) { 36 | break; 37 | } 38 | 39 | if (Math.abs(size.height - targetHeight) < minDiff) { 40 | optimalSize = size; 41 | minDiff = Math.abs(size.height - targetHeight); 42 | } 43 | } 44 | 45 | // Cannot find the one match the aspect ratio, ignore the requirement 46 | if (optimalSize == null) { 47 | minDiff = Double.MAX_VALUE; 48 | for (Camera.Size size : sizes) { 49 | if (Math.abs(size.height - targetHeight) < minDiff) { 50 | optimalSize = size; 51 | minDiff = Math.abs(size.height - targetHeight); 52 | } 53 | } 54 | } 55 | 56 | //Log.e("CameraHelper", 57 | // "optimalSize : width=" + optimalSize.width + " height=" + optimalSize.height); 58 | return optimalSize; 59 | } 60 | 61 | // 这里只使用于旋转了90度 62 | public static Rect calculateTapArea(View v, float oldx, float oldy, float coefficient) { 63 | 64 | float x = oldy; 65 | float y = v.getHeight() - oldx; 66 | 67 | float focusAreaSize = 300; 68 | 69 | int areaSize = Float.valueOf(focusAreaSize * coefficient).intValue(); 70 | int centerX = (int) (x / v.getWidth() * 2000 - 1000); 71 | int centerY = (int) (y / v.getHeight() * 2000 - 1000); 72 | 73 | int left = clamp(centerX - areaSize / 2, -1000, 1000); 74 | int right = clamp(left + areaSize, -1000, 1000); 75 | int top = clamp(centerY - areaSize / 2, -1000, 1000); 76 | int bottom = clamp(top + areaSize, -1000, 1000); 77 | 78 | return new Rect(left, top, right, bottom); 79 | } 80 | 81 | private static int clamp(int x, int min, int max) { 82 | if (x > max) { 83 | return max; 84 | } 85 | if (x < min) { 86 | return min; 87 | } 88 | return x; 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraPictureSizeComparator.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.hardware.Camera; 4 | import java.util.Comparator; 5 | 6 | public class CameraPictureSizeComparator implements Comparator { 7 | 8 | // 拍照尺寸建议从大到小,优先获取较大尺寸 9 | public int compare(Camera.Size size1, Camera.Size size2) { 10 | return size2.width - size1.width; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraPreviewSizeComparator.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.hardware.Camera; 4 | import java.util.Comparator; 5 | 6 | public class CameraPreviewSizeComparator implements Comparator { 7 | 8 | // 预览尺寸建议从小到大,优先获取较小的尺寸 9 | public int compare(Camera.Size size1, Camera.Size size2) { 10 | return size1.width - size2.width; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraRecordRenderer.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.content.Context; 4 | import android.graphics.SurfaceTexture; 5 | import android.opengl.EGL14; 6 | import android.opengl.GLSurfaceView; 7 | import android.opengl.Matrix; 8 | import javax.microedition.khronos.egl.EGLConfig; 9 | import javax.microedition.khronos.opengles.GL10; 10 | import me.relex.camerafilter.filter.FilterManager; 11 | import me.relex.camerafilter.filter.FilterManager.FilterType; 12 | import me.relex.camerafilter.gles.FullFrameRect; 13 | import me.relex.camerafilter.gles.GlUtil; 14 | import me.relex.camerafilter.video.EncoderConfig; 15 | import me.relex.camerafilter.video.TextureMovieEncoder; 16 | import me.relex.camerafilter.widget.CameraSurfaceView; 17 | 18 | public class CameraRecordRenderer implements GLSurfaceView.Renderer { 19 | 20 | private static final int RECORDING_OFF = 0; 21 | private static final int RECORDING_ON = 1; 22 | private static final int RECORDING_RESUMED = 2; 23 | 24 | private final Context mApplicationContext; 25 | private final CameraSurfaceView.CameraHandler mCameraHandler; 26 | private int mTextureId = GlUtil.NO_TEXTURE; 27 | private FullFrameRect mFullScreen; 28 | private SurfaceTexture mSurfaceTexture; 29 | private final float[] mSTMatrix = new float[16]; 30 | 31 | private FilterType mCurrentFilterType; 32 | private FilterType mNewFilterType; 33 | private TextureMovieEncoder mVideoEncoder; 34 | 35 | private boolean mRecordingEnabled; 36 | private int mRecordingStatus; 37 | private EncoderConfig mEncoderConfig; 38 | 39 | private float mMvpScaleX = 1f, mMvpScaleY = 1f; 40 | private int mSurfaceWidth, mSurfaceHeight; 41 | private int mIncomingWidth, mIncomingHeight; 42 | 43 | public CameraRecordRenderer(Context applicationContext, 44 | CameraSurfaceView.CameraHandler cameraHandler) { 45 | mApplicationContext = applicationContext; 46 | mCameraHandler = cameraHandler; 47 | mCurrentFilterType = mNewFilterType = FilterType.Normal; 48 | mVideoEncoder = TextureMovieEncoder.getInstance(); 49 | } 50 | 51 | public void setEncoderConfig(EncoderConfig encoderConfig) { 52 | mEncoderConfig = encoderConfig; 53 | } 54 | 55 | public void setRecordingEnabled(boolean recordingEnabled) { 56 | mRecordingEnabled = recordingEnabled; 57 | } 58 | 59 | public void setCameraPreviewSize(int width, int height) { 60 | mIncomingWidth = width; 61 | mIncomingHeight = height; 62 | 63 | float scaleHeight = mSurfaceWidth / (width * 1f / height * 1f); 64 | float surfaceHeight = mSurfaceHeight; 65 | 66 | if (mFullScreen != null) { 67 | mMvpScaleX = 1f; 68 | mMvpScaleY = scaleHeight / surfaceHeight; 69 | mFullScreen.scaleMVPMatrix(mMvpScaleX, mMvpScaleY); 70 | } 71 | } 72 | 73 | @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { 74 | Matrix.setIdentityM(mSTMatrix, 0); 75 | mRecordingEnabled = mVideoEncoder.isRecording(); 76 | if (mRecordingEnabled) { 77 | mRecordingStatus = RECORDING_RESUMED; 78 | } else { 79 | mRecordingStatus = RECORDING_OFF; 80 | mVideoEncoder.initFilter(mCurrentFilterType); 81 | } 82 | mFullScreen = new FullFrameRect( 83 | FilterManager.getCameraFilter(mCurrentFilterType, mApplicationContext)); 84 | mTextureId = mFullScreen.createTexture(); 85 | mSurfaceTexture = new SurfaceTexture(mTextureId); 86 | } 87 | 88 | @Override public void onSurfaceChanged(GL10 gl, int width, int height) { 89 | mSurfaceWidth = width; 90 | mSurfaceHeight = height; 91 | 92 | if (gl != null) { 93 | gl.glViewport(0, 0, width, height); 94 | } 95 | 96 | mCameraHandler.sendMessage( 97 | mCameraHandler.obtainMessage(CameraSurfaceView.CameraHandler.SETUP_CAMERA, width, 98 | height, mSurfaceTexture)); 99 | } 100 | 101 | @Override public void onDrawFrame(GL10 gl) { 102 | mSurfaceTexture.updateTexImage(); 103 | if (mNewFilterType != mCurrentFilterType) { 104 | mFullScreen.changeProgram( 105 | FilterManager.getCameraFilter(mNewFilterType, mApplicationContext)); 106 | mCurrentFilterType = mNewFilterType; 107 | } 108 | mFullScreen.getFilter().setTextureSize(mIncomingWidth, mIncomingHeight); 109 | mSurfaceTexture.getTransformMatrix(mSTMatrix); 110 | mFullScreen.drawFrame(mTextureId, mSTMatrix); 111 | 112 | videoOnDrawFrame(mTextureId, mSTMatrix, mSurfaceTexture.getTimestamp()); 113 | } 114 | 115 | private void videoOnDrawFrame(int textureId, float[] texMatrix, long timestamp) { 116 | if (mRecordingEnabled && mEncoderConfig != null) { 117 | switch (mRecordingStatus) { 118 | case RECORDING_OFF: 119 | mEncoderConfig.updateEglContext(EGL14.eglGetCurrentContext()); 120 | mVideoEncoder.startRecording(mEncoderConfig); 121 | mVideoEncoder.setTextureId(textureId); 122 | mVideoEncoder.scaleMVPMatrix(mMvpScaleX, mMvpScaleY); 123 | mRecordingStatus = RECORDING_ON; 124 | 125 | break; 126 | case RECORDING_RESUMED: 127 | mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext()); 128 | mVideoEncoder.setTextureId(textureId); 129 | mVideoEncoder.scaleMVPMatrix(mMvpScaleX, mMvpScaleY); 130 | mRecordingStatus = RECORDING_ON; 131 | break; 132 | case RECORDING_ON: 133 | // yay 134 | break; 135 | default: 136 | throw new RuntimeException("unknown status " + mRecordingStatus); 137 | } 138 | } else { 139 | switch (mRecordingStatus) { 140 | case RECORDING_ON: 141 | case RECORDING_RESUMED: 142 | mVideoEncoder.stopRecording(); 143 | mRecordingStatus = RECORDING_OFF; 144 | break; 145 | case RECORDING_OFF: 146 | // yay 147 | break; 148 | default: 149 | throw new RuntimeException("unknown status " + mRecordingStatus); 150 | } 151 | } 152 | 153 | mVideoEncoder.updateFilter(mCurrentFilterType); 154 | mVideoEncoder.frameAvailable(texMatrix, timestamp); 155 | } 156 | 157 | public void notifyPausing() { 158 | 159 | if (mSurfaceTexture != null) { 160 | mSurfaceTexture.release(); 161 | mSurfaceTexture = null; 162 | } 163 | 164 | if (mFullScreen != null) { 165 | mFullScreen.release(false); // assume the GLSurfaceView EGL context is about 166 | mFullScreen = null; // to be destroyed 167 | } 168 | } 169 | 170 | public void changeFilter(FilterType filterType) { 171 | mNewFilterType = filterType; 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CameraSurfaceRenderer.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.content.Context; 4 | import android.graphics.SurfaceTexture; 5 | import android.opengl.GLSurfaceView; 6 | import android.opengl.Matrix; 7 | import javax.microedition.khronos.egl.EGLConfig; 8 | import javax.microedition.khronos.opengles.GL10; 9 | import me.relex.camerafilter.filter.FilterManager; 10 | import me.relex.camerafilter.filter.FilterManager.FilterType; 11 | import me.relex.camerafilter.gles.FullFrameRect; 12 | import me.relex.camerafilter.gles.GlUtil; 13 | import me.relex.camerafilter.widget.CameraSurfaceView; 14 | 15 | public class CameraSurfaceRenderer implements GLSurfaceView.Renderer { 16 | 17 | private final Context mContext; 18 | private final CameraSurfaceView.CameraHandler mCameraHandler; 19 | private int mTextureId = GlUtil.NO_TEXTURE; 20 | private FullFrameRect mFullScreen; 21 | private SurfaceTexture mSurfaceTexture; 22 | private final float[] mSTMatrix = new float[16]; 23 | 24 | private int mSurfaceWidth, mSurfaceHeight; 25 | private FilterType mCurrentFilterType; 26 | private FilterType mNewFilterType; 27 | 28 | public CameraSurfaceRenderer(Context context, CameraSurfaceView.CameraHandler cameraHandler) { 29 | mContext = context; 30 | mCameraHandler = cameraHandler; 31 | mCurrentFilterType = mNewFilterType = FilterType.Normal; 32 | } 33 | 34 | public void setCameraPreviewSize(int width, int height) { 35 | 36 | float scaleHeight = mSurfaceWidth / (width * 1f / height * 1f); 37 | float surfaceHeight = mSurfaceHeight; 38 | 39 | if (mFullScreen != null) { 40 | mFullScreen.scaleMVPMatrix(1f, scaleHeight / surfaceHeight); 41 | } 42 | } 43 | 44 | @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { 45 | Matrix.setIdentityM(mSTMatrix, 0); 46 | mFullScreen = 47 | new FullFrameRect(FilterManager.getCameraFilter(mCurrentFilterType, mContext)); 48 | mTextureId = mFullScreen.createTexture(); 49 | mSurfaceTexture = new SurfaceTexture(mTextureId); 50 | } 51 | 52 | @Override public void onSurfaceChanged(GL10 gl, int width, int height) { 53 | mSurfaceWidth = width; 54 | mSurfaceHeight = height; 55 | if (gl != null) { 56 | gl.glViewport(0, 0, width, height); 57 | } 58 | mCameraHandler.sendMessage( 59 | mCameraHandler.obtainMessage(CameraSurfaceView.CameraHandler.SETUP_CAMERA, width, 60 | height, mSurfaceTexture)); 61 | } 62 | 63 | @Override public void onDrawFrame(GL10 gl) { 64 | mSurfaceTexture.updateTexImage(); 65 | 66 | if (mNewFilterType != mCurrentFilterType) { 67 | mFullScreen.changeProgram(FilterManager.getCameraFilter(mNewFilterType, mContext)); 68 | mCurrentFilterType = mNewFilterType; 69 | } 70 | 71 | mSurfaceTexture.getTransformMatrix(mSTMatrix); 72 | mFullScreen.drawFrame(mTextureId, mSTMatrix); 73 | } 74 | 75 | public void notifyPausing() { 76 | 77 | if (mSurfaceTexture != null) { 78 | mSurfaceTexture.release(); 79 | mSurfaceTexture = null; 80 | } 81 | 82 | if (mFullScreen != null) { 83 | mFullScreen.release(false); // assume the GLSurfaceView EGL context is about 84 | mFullScreen = null; // to be destroyed 85 | } 86 | 87 | //mIncomingWidth = mIncomingHeight = -1; 88 | } 89 | 90 | public void changeFilter(FilterType filterType) { 91 | mNewFilterType = filterType; 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/camera/CommonHandlerListener.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.camera; 2 | 3 | import android.os.Message; 4 | 5 | public interface CommonHandlerListener { 6 | 7 | void handleMessage(Message msg); 8 | } 9 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/AbstractFilter.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import java.nio.FloatBuffer; 5 | 6 | public abstract class AbstractFilter { 7 | protected abstract int createProgram(Context context); 8 | 9 | protected abstract void getGLSLValues(); 10 | 11 | protected abstract void useProgram(); 12 | 13 | protected abstract void bindTexture(int textureId); 14 | 15 | //protected abstract void runningOnDraw(); 16 | 17 | protected abstract void bindGLSLValues(float[] mvpMatrix, FloatBuffer vertexBuffer, 18 | int coordsPerVertex, int vertexStride, float[] texMatrix, FloatBuffer texBuffer, 19 | int texStride); 20 | 21 | protected abstract void drawArrays(int firstVertex, int vertexCount); 22 | 23 | protected abstract void unbindGLSLValues(); 24 | 25 | protected abstract void unbindTexture(); 26 | 27 | protected abstract void disuseProgram(); 28 | } 29 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/BlurFilter/CameraFilterGaussianBlur.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter.BlurFilter; 2 | 3 | import android.content.Context; 4 | import me.relex.camerafilter.filter.CameraFilter; 5 | import me.relex.camerafilter.filter.FilterGroup; 6 | 7 | public class CameraFilterGaussianBlur extends FilterGroup { 8 | 9 | public CameraFilterGaussianBlur(Context context, float blur) { 10 | super(); 11 | addFilter(new CameraFilterGaussianSingleBlur(context, blur, false)); 12 | addFilter(new CameraFilterGaussianSingleBlur(context, blur, true)); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/BlurFilter/CameraFilterGaussianSingleBlur.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter.BlurFilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES20; 5 | import java.nio.FloatBuffer; 6 | import me.relex.camerafilter.R; 7 | import me.relex.camerafilter.filter.CameraFilter; 8 | import me.relex.camerafilter.gles.GlUtil; 9 | 10 | class CameraFilterGaussianSingleBlur extends CameraFilter { 11 | 12 | private int muTexelWidthOffset; 13 | private int muTexelHeightOffset; 14 | 15 | private float mBlurRatio; 16 | private boolean mWidthOrHeight; 17 | 18 | public CameraFilterGaussianSingleBlur(Context applicationContext, float blurRatio, 19 | boolean widthOrHeight) { 20 | super(applicationContext); 21 | mBlurRatio = blurRatio; 22 | mWidthOrHeight = widthOrHeight; 23 | } 24 | 25 | @Override protected int createProgram(Context applicationContext) { 26 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_blur, 27 | R.raw.fragment_shader_ext_blur); 28 | } 29 | 30 | @Override protected void getGLSLValues() { 31 | super.getGLSLValues(); 32 | 33 | muTexelWidthOffset = GLES20.glGetUniformLocation(mProgramHandle, "uTexelWidthOffset"); 34 | muTexelHeightOffset = GLES20.glGetUniformLocation(mProgramHandle, "uTexelHeightOffset"); 35 | } 36 | 37 | @Override 38 | protected void bindGLSLValues(float[] mvpMatrix, FloatBuffer vertexBuffer, int coordsPerVertex, 39 | int vertexStride, float[] texMatrix, FloatBuffer texBuffer, int texStride) { 40 | super.bindGLSLValues(mvpMatrix, vertexBuffer, coordsPerVertex, vertexStride, texMatrix, 41 | texBuffer, texStride); 42 | 43 | if (mWidthOrHeight) { 44 | GLES20.glUniform1f(muTexelWidthOffset, 45 | mIncomingWidth == 0 ? 0f : mBlurRatio / mIncomingWidth); 46 | } else { 47 | GLES20.glUniform1f(muTexelHeightOffset, 48 | mIncomingHeight == 0 ? 0f : mBlurRatio / mIncomingHeight); 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/BlurFilter/ImageFilterGaussianBlur.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter.BlurFilter; 2 | 3 | import android.content.Context; 4 | import me.relex.camerafilter.filter.CameraFilter; 5 | import me.relex.camerafilter.filter.FilterGroup; 6 | 7 | public class ImageFilterGaussianBlur extends FilterGroup { 8 | 9 | public ImageFilterGaussianBlur(Context context, float blur) { 10 | super(); 11 | addFilter(new ImageFilterGaussianSingleBlur(context, blur, false)); 12 | addFilter(new ImageFilterGaussianSingleBlur(context, blur, true)); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/BlurFilter/ImageFilterGaussianSingleBlur.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter.BlurFilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES20; 5 | import me.relex.camerafilter.R; 6 | import me.relex.camerafilter.gles.GlUtil; 7 | 8 | class ImageFilterGaussianSingleBlur extends CameraFilterGaussianSingleBlur { 9 | 10 | public ImageFilterGaussianSingleBlur(Context applicationContext, float blurRatio, 11 | boolean widthOrHeight) { 12 | super(applicationContext, blurRatio, widthOrHeight); 13 | } 14 | 15 | @Override public int getTextureTarget() { 16 | return GLES20.GL_TEXTURE_2D; 17 | } 18 | 19 | @Override protected int createProgram(Context applicationContext) { 20 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_blur, 21 | R.raw.fragment_shader_2d_blur); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/CameraFilter.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | import java.nio.FloatBuffer; 7 | import me.relex.camerafilter.R; 8 | import me.relex.camerafilter.gles.GlUtil; 9 | 10 | public class CameraFilter extends AbstractFilter implements IFilter { 11 | 12 | protected int mProgramHandle; 13 | private int maPositionLoc; 14 | private int muMVPMatrixLoc; 15 | private int muTexMatrixLoc; 16 | private int maTextureCoordLoc; 17 | private int mTextureLoc; 18 | 19 | protected int mIncomingWidth, mIncomingHeight; 20 | 21 | public CameraFilter(Context applicationContext) { 22 | mProgramHandle = createProgram(applicationContext); 23 | if (mProgramHandle == 0) { 24 | throw new RuntimeException("Unable to create program"); 25 | } 26 | getGLSLValues(); 27 | } 28 | 29 | @Override public int getTextureTarget() { 30 | return GLES11Ext.GL_TEXTURE_EXTERNAL_OES; 31 | } 32 | 33 | @Override public void setTextureSize(int width, int height) { 34 | if (width == 0 || height == 0) { 35 | return; 36 | } 37 | if (width == mIncomingWidth && height == mIncomingHeight) { 38 | return; 39 | } 40 | mIncomingWidth = width; 41 | mIncomingHeight = height; 42 | } 43 | 44 | @Override protected int createProgram(Context applicationContext) { 45 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader, 46 | R.raw.fragment_shader_ext); 47 | } 48 | 49 | @Override protected void getGLSLValues() { 50 | mTextureLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexture"); 51 | maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition"); 52 | muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix"); 53 | muTexMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexMatrix"); 54 | maTextureCoordLoc = GLES20.glGetAttribLocation(mProgramHandle, "aTextureCoord"); 55 | } 56 | 57 | @Override public void onDraw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex, 58 | int vertexCount, int coordsPerVertex, int vertexStride, float[] texMatrix, 59 | FloatBuffer texBuffer, int textureId, int texStride) { 60 | 61 | GlUtil.checkGlError("draw start"); 62 | 63 | useProgram(); 64 | 65 | bindTexture(textureId); 66 | 67 | //runningOnDraw(); 68 | 69 | bindGLSLValues(mvpMatrix, vertexBuffer, coordsPerVertex, vertexStride, texMatrix, texBuffer, 70 | texStride); 71 | 72 | drawArrays(firstVertex, vertexCount); 73 | 74 | unbindGLSLValues(); 75 | 76 | unbindTexture(); 77 | 78 | disuseProgram(); 79 | } 80 | 81 | @Override protected void useProgram() { 82 | GLES20.glUseProgram(mProgramHandle); 83 | //GlUtil.checkGlError("glUseProgram"); 84 | } 85 | 86 | @Override protected void bindTexture(int textureId) { 87 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 88 | GLES20.glBindTexture(getTextureTarget(), textureId); 89 | GLES20.glUniform1i(mTextureLoc, 0); 90 | } 91 | 92 | @Override 93 | protected void bindGLSLValues(float[] mvpMatrix, FloatBuffer vertexBuffer, int coordsPerVertex, 94 | int vertexStride, float[] texMatrix, FloatBuffer texBuffer, int texStride) { 95 | 96 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0); 97 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, texMatrix, 0); 98 | GLES20.glEnableVertexAttribArray(maPositionLoc); 99 | GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex, GLES20.GL_FLOAT, false, 100 | vertexStride, vertexBuffer); 101 | GLES20.glEnableVertexAttribArray(maTextureCoordLoc); 102 | GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, texStride, 103 | texBuffer); 104 | } 105 | 106 | @Override protected void drawArrays(int firstVertex, int vertexCount) { 107 | GLES20.glClearColor(0f, 0f, 0f, 1f); 108 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 109 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount); 110 | } 111 | 112 | @Override protected void unbindGLSLValues() { 113 | GLES20.glDisableVertexAttribArray(maPositionLoc); 114 | GLES20.glDisableVertexAttribArray(maTextureCoordLoc); 115 | } 116 | 117 | @Override protected void unbindTexture() { 118 | GLES20.glBindTexture(getTextureTarget(), 0); 119 | } 120 | 121 | @Override protected void disuseProgram() { 122 | GLES20.glUseProgram(0); 123 | } 124 | 125 | @Override public void releaseProgram() { 126 | GLES20.glDeleteProgram(mProgramHandle); 127 | mProgramHandle = -1; 128 | } 129 | 130 | /////////// Set Runnable //////////// 131 | //protected void addRunnableOnDraw(final Runnable runnable) { 132 | // synchronized (mRunnableOnDraw) { 133 | // mRunnableOnDraw.addLast(runnable); 134 | // } 135 | //} 136 | // 137 | //protected void setFloat(final int location, final float floatValue) { 138 | // addRunnableOnDraw(new Runnable() { 139 | // @Override public void run() { 140 | // GLES20.glUniform1f(location, floatValue); 141 | // } 142 | // }); 143 | //} 144 | // 145 | //@Override protected void runningOnDraw() { 146 | // while (!mRunnableOnDraw.isEmpty()) { 147 | // mRunnableOnDraw.removeFirst().run(); 148 | // } 149 | //} 150 | } 151 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/CameraFilterBlend.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.opengl.GLES20; 7 | import android.support.annotation.DrawableRes; 8 | import java.nio.FloatBuffer; 9 | import me.relex.camerafilter.R; 10 | import me.relex.camerafilter.gles.GlUtil; 11 | 12 | public class CameraFilterBlend extends CameraFilter { 13 | 14 | protected int mExtraTextureId; 15 | protected int maExtraTextureCoordLoc; 16 | protected int muExtraTextureLoc; 17 | 18 | public CameraFilterBlend(Context context, @DrawableRes int drawableId) { 19 | super(context); 20 | final BitmapFactory.Options options = new BitmapFactory.Options(); 21 | options.inScaled = false; // No pre-scaling 22 | final Bitmap bitmap = 23 | BitmapFactory.decodeResource(context.getResources(), drawableId, options); 24 | mExtraTextureId = GlUtil.createTexture(GLES20.GL_TEXTURE_2D, bitmap); 25 | } 26 | 27 | @Override protected int createProgram(Context applicationContext) { 28 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_two_input, 29 | R.raw.fragment_shader_ext_blend); 30 | } 31 | 32 | @Override protected void getGLSLValues() { 33 | super.getGLSLValues(); 34 | maExtraTextureCoordLoc = GLES20.glGetAttribLocation(mProgramHandle, "aExtraTextureCoord"); 35 | muExtraTextureLoc = GLES20.glGetUniformLocation(mProgramHandle, "uExtraTexture"); 36 | } 37 | 38 | @Override protected void bindTexture(int textureId) { 39 | super.bindTexture(textureId); 40 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1); 41 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mExtraTextureId); 42 | GLES20.glUniform1i(muExtraTextureLoc, 1); 43 | } 44 | 45 | @Override 46 | protected void bindGLSLValues(float[] mvpMatrix, FloatBuffer vertexBuffer, int coordsPerVertex, 47 | int vertexStride, float[] texMatrix, FloatBuffer texBuffer, int texStride) { 48 | super.bindGLSLValues(mvpMatrix, vertexBuffer, coordsPerVertex, vertexStride, texMatrix, 49 | texBuffer, texStride); 50 | GLES20.glEnableVertexAttribArray(maExtraTextureCoordLoc); 51 | GLES20.glVertexAttribPointer(maExtraTextureCoordLoc, 2, GLES20.GL_FLOAT, false, texStride, 52 | texBuffer); 53 | } 54 | 55 | @Override protected void unbindGLSLValues() { 56 | super.unbindGLSLValues(); 57 | 58 | GLES20.glDisableVertexAttribArray(maExtraTextureCoordLoc); 59 | } 60 | 61 | @Override protected void unbindTexture() { 62 | super.unbindTexture(); 63 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 64 | } 65 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/CameraFilterBlendSoftLight.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.support.annotation.DrawableRes; 5 | import me.relex.camerafilter.R; 6 | import me.relex.camerafilter.gles.GlUtil; 7 | 8 | public class CameraFilterBlendSoftLight extends CameraFilterBlend { 9 | 10 | public CameraFilterBlendSoftLight(Context context, @DrawableRes int drawableId) { 11 | super(context, drawableId); 12 | } 13 | 14 | @Override protected int createProgram(Context applicationContext) { 15 | 16 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_two_input, 17 | R.raw.fragment_shader_ext_blend_soft_light); 18 | } 19 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/CameraFilterToneCurve.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.graphics.Point; 5 | import android.graphics.PointF; 6 | import android.opengl.GLES20; 7 | import java.io.IOException; 8 | import java.io.InputStream; 9 | import java.nio.ByteBuffer; 10 | import java.nio.FloatBuffer; 11 | import java.util.ArrayList; 12 | import java.util.Arrays; 13 | import java.util.Comparator; 14 | import me.relex.camerafilter.R; 15 | import me.relex.camerafilter.gles.GlUtil; 16 | 17 | public class CameraFilterToneCurve extends CameraFilter { 18 | 19 | private final int mToneCurveTextureId; 20 | protected int muToneCurveTextureLoc; 21 | 22 | private PointF[] mRgbCompositeControlPoints; 23 | private PointF[] mRedControlPoints; 24 | private PointF[] mGreenControlPoints; 25 | private PointF[] mBlueControlPoints; 26 | 27 | private ArrayList mRgbCompositeCurve; 28 | private ArrayList mRedCurve; 29 | private ArrayList mGreenCurve; 30 | private ArrayList mBlueCurve; 31 | 32 | public CameraFilterToneCurve(Context context, InputStream inputStream) { 33 | super(context); 34 | setFromCurveFileInputStream(inputStream); 35 | setRgbCompositeControlPoints(mRgbCompositeControlPoints); 36 | setRedControlPoints(mRedControlPoints); 37 | setGreenControlPoints(mGreenControlPoints); 38 | setBlueControlPoints(mBlueControlPoints); 39 | 40 | mToneCurveTextureId = GlUtil.createTexture(GLES20.GL_TEXTURE_2D); 41 | } 42 | 43 | @Override protected int createProgram(Context applicationContext) { 44 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader, 45 | R.raw.fragment_shader_ext_tone_curve); 46 | } 47 | 48 | @Override protected void getGLSLValues() { 49 | super.getGLSLValues(); 50 | 51 | muToneCurveTextureLoc = GLES20.glGetUniformLocation(mProgramHandle, "toneCurveTexture"); 52 | } 53 | 54 | @Override protected void bindTexture(int textureId) { 55 | 56 | super.bindTexture(textureId); 57 | 58 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1); 59 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mToneCurveTextureId); 60 | GLES20.glUniform1i(muToneCurveTextureLoc, 1); 61 | 62 | if ((mRedCurve.size() >= 256) 63 | && (mGreenCurve.size() >= 256) 64 | && (mBlueCurve.size() >= 256) 65 | && (mRgbCompositeCurve.size() >= 256)) { 66 | byte[] toneCurveByteArray = new byte[256 * 4]; 67 | for (int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++) { 68 | // BGRA for upload to texture 69 | toneCurveByteArray[currentCurveIndex * 4 + 2] = (byte) ((int) Math.min(Math.max( 70 | currentCurveIndex 71 | + mBlueCurve.get(currentCurveIndex) 72 | + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff); 73 | toneCurveByteArray[currentCurveIndex * 4 + 1] = (byte) ((int) Math.min(Math.max( 74 | currentCurveIndex 75 | + mGreenCurve.get(currentCurveIndex) 76 | + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff); 77 | toneCurveByteArray[currentCurveIndex * 4] = (byte) ((int) Math.min(Math.max( 78 | currentCurveIndex 79 | + mRedCurve.get(currentCurveIndex) 80 | + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff); 81 | toneCurveByteArray[currentCurveIndex * 4 + 3] = (byte) (255 & 0xff); 82 | } 83 | 84 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 85 | 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, 86 | ByteBuffer.wrap(toneCurveByteArray)); 87 | } 88 | } 89 | 90 | @Override 91 | protected void bindGLSLValues(float[] mvpMatrix, FloatBuffer vertexBuffer, int coordsPerVertex, 92 | int vertexStride, float[] texMatrix, FloatBuffer texBuffer, int texStride) { 93 | super.bindGLSLValues(mvpMatrix, vertexBuffer, coordsPerVertex, vertexStride, texMatrix, 94 | texBuffer, texStride); 95 | //GLES20.glEnableVertexAttribArray(maExtraTextureCoordLoc); 96 | //GLES20.glVertexAttribPointer(maExtraTextureCoordLoc, 2, GLES20.GL_FLOAT, false, texStride, 97 | // texBuffer); 98 | 99 | 100 | } 101 | 102 | @Override protected void unbindGLSLValues() { 103 | super.unbindGLSLValues(); 104 | 105 | //GLES20.glDisableVertexAttribArray(maExtraTextureCoordLoc); 106 | } 107 | 108 | @Override protected void unbindTexture() { 109 | super.unbindTexture(); 110 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 111 | } 112 | 113 | /////////////////////// 114 | 115 | private short readShort(InputStream input) throws IOException { 116 | return (short) (input.read() << 8 | input.read()); 117 | } 118 | 119 | private void setFromCurveFileInputStream(InputStream input) { 120 | try { 121 | int version = readShort(input); 122 | int totalCurves = readShort(input); 123 | 124 | ArrayList curves = new ArrayList(totalCurves); 125 | float pointRate = 1.0f / 255; 126 | 127 | for (int i = 0; i < totalCurves; i++) { 128 | // 2 bytes, Count of points in the curve (short integer from 2...19) 129 | short pointCount = readShort(input); 130 | PointF[] points = new PointF[pointCount]; 131 | // point count * 4 132 | // Curve points. Each curve point is a pair of short integers where 133 | // the first number is the output value (vertical coordinate on the 134 | // Curves dialog graph) and the second is the input value. All coordinates have range 0 to 255. 135 | for (int j = 0; j < pointCount; j++) { 136 | short y = readShort(input); 137 | short x = readShort(input); 138 | 139 | points[j] = new PointF(x * pointRate, y * pointRate); 140 | } 141 | 142 | curves.add(points); 143 | } 144 | input.close(); 145 | 146 | mRgbCompositeControlPoints = curves.get(0); 147 | mRedControlPoints = curves.get(1); 148 | mGreenControlPoints = curves.get(2); 149 | mBlueControlPoints = curves.get(3); 150 | } catch (IOException e) { 151 | e.printStackTrace(); 152 | } 153 | } 154 | 155 | public void setRgbCompositeControlPoints(PointF[] points) { 156 | mRgbCompositeControlPoints = points; 157 | mRgbCompositeCurve = createSplineCurve(mRgbCompositeControlPoints); 158 | //updateToneCurveTexture(); 159 | } 160 | 161 | public void setRedControlPoints(PointF[] points) { 162 | mRedControlPoints = points; 163 | mRedCurve = createSplineCurve(mRedControlPoints); 164 | //updateToneCurveTexture(); 165 | } 166 | 167 | public void setGreenControlPoints(PointF[] points) { 168 | mGreenControlPoints = points; 169 | mGreenCurve = createSplineCurve(mGreenControlPoints); 170 | //updateToneCurveTexture(); 171 | } 172 | 173 | public void setBlueControlPoints(PointF[] points) { 174 | mBlueControlPoints = points; 175 | mBlueCurve = createSplineCurve(mBlueControlPoints); 176 | //updateToneCurveTexture(); 177 | } 178 | 179 | private ArrayList createSplineCurve(PointF[] points) { 180 | if (points == null || points.length <= 0) { 181 | return null; 182 | } 183 | 184 | // Sort the array 185 | PointF[] pointsSorted = points.clone(); 186 | Arrays.sort(pointsSorted, new Comparator() { 187 | @Override public int compare(PointF point1, PointF point2) { 188 | if (point1.x < point2.x) { 189 | return -1; 190 | } else if (point1.x > point2.x) { 191 | return 1; 192 | } else { 193 | return 0; 194 | } 195 | } 196 | }); 197 | 198 | // Convert from (0, 1) to (0, 255). 199 | Point[] convertedPoints = new Point[pointsSorted.length]; 200 | for (int i = 0; i < points.length; i++) { 201 | PointF point = pointsSorted[i]; 202 | convertedPoints[i] = new Point((int) (point.x * 255), (int) (point.y * 255)); 203 | } 204 | 205 | ArrayList splinePoints = createSplineCurve2(convertedPoints); 206 | 207 | // If we have a first point like (0.3, 0) we'll be missing some points at the beginning 208 | // that should be 0. 209 | Point firstSplinePoint = splinePoints.get(0); 210 | if (firstSplinePoint.x > 0) { 211 | for (int i = firstSplinePoint.x; i >= 0; i--) { 212 | splinePoints.add(0, new Point(i, 0)); 213 | } 214 | } 215 | 216 | // Insert points similarly at the end, if necessary. 217 | Point lastSplinePoint = splinePoints.get(splinePoints.size() - 1); 218 | if (lastSplinePoint.x < 255) { 219 | for (int i = lastSplinePoint.x + 1; i <= 255; i++) { 220 | splinePoints.add(new Point(i, 255)); 221 | } 222 | } 223 | 224 | // Prepare the spline points. 225 | ArrayList preparedSplinePoints = new ArrayList(splinePoints.size()); 226 | for (Point newPoint : splinePoints) { 227 | Point origPoint = new Point(newPoint.x, newPoint.x); 228 | 229 | float distance = (float) Math.sqrt( 230 | Math.pow((origPoint.x - newPoint.x), 2.0) + Math.pow((origPoint.y - newPoint.y), 231 | 2.0)); 232 | 233 | if (origPoint.y > newPoint.y) { 234 | distance = -distance; 235 | } 236 | 237 | preparedSplinePoints.add(distance); 238 | } 239 | 240 | return preparedSplinePoints; 241 | } 242 | 243 | private ArrayList createSplineCurve2(Point[] points) { 244 | ArrayList sdA = createSecondDerivative(points); 245 | 246 | // Is [points count] equal to [sdA count]? 247 | // int n = [points count]; 248 | int n = sdA.size(); 249 | if (n < 1) { 250 | return null; 251 | } 252 | double sd[] = new double[n]; 253 | 254 | // From NSMutableArray to sd[n]; 255 | for (int i = 0; i < n; i++) { 256 | sd[i] = sdA.get(i); 257 | } 258 | 259 | ArrayList output = new ArrayList(n + 1); 260 | 261 | for (int i = 0; i < n - 1; i++) { 262 | Point cur = points[i]; 263 | Point next = points[i + 1]; 264 | 265 | for (int x = cur.x; x < next.x; x++) { 266 | double t = (double) (x - cur.x) / (next.x - cur.x); 267 | 268 | double a = 1 - t; 269 | double b = t; 270 | double h = next.x - cur.x; 271 | 272 | double y = a * cur.y + b * next.y + (h * h / 6) * ((a * a * a - a) * sd[i] 273 | + (b * b * b - b) * sd[i + 1]); 274 | 275 | if (y > 255.0) { 276 | y = 255.0; 277 | } else if (y < 0.0) { 278 | y = 0.0; 279 | } 280 | 281 | output.add(new Point(x, (int) Math.round(y))); 282 | } 283 | } 284 | 285 | // If the last point is (255, 255) it doesn't get added. 286 | if (output.size() == 255) { 287 | output.add(points[points.length - 1]); 288 | } 289 | return output; 290 | } 291 | 292 | private ArrayList createSecondDerivative(Point[] points) { 293 | int n = points.length; 294 | if (n <= 1) { 295 | return null; 296 | } 297 | 298 | double matrix[][] = new double[n][3]; 299 | double result[] = new double[n]; 300 | matrix[0][1] = 1; 301 | // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.) 302 | matrix[0][0] = 0; 303 | matrix[0][2] = 0; 304 | 305 | for (int i = 1; i < n - 1; i++) { 306 | Point P1 = points[i - 1]; 307 | Point P2 = points[i]; 308 | Point P3 = points[i + 1]; 309 | 310 | matrix[i][0] = (double) (P2.x - P1.x) / 6; 311 | matrix[i][1] = (double) (P3.x - P1.x) / 3; 312 | matrix[i][2] = (double) (P3.x - P2.x) / 6; 313 | result[i] = 314 | (double) (P3.y - P2.y) / (P3.x - P2.x) - (double) (P2.y - P1.y) / (P2.x - P1.x); 315 | } 316 | 317 | // What about result[0] and result[n-1]? Assuming 0 for now (Brad L.) 318 | result[0] = 0; 319 | result[n - 1] = 0; 320 | 321 | matrix[n - 1][1] = 1; 322 | // What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.) 323 | matrix[n - 1][0] = 0; 324 | matrix[n - 1][2] = 0; 325 | 326 | // solving pass1 (up->down) 327 | for (int i = 1; i < n; i++) { 328 | double k = matrix[i][0] / matrix[i - 1][1]; 329 | matrix[i][1] -= k * matrix[i - 1][2]; 330 | matrix[i][0] = 0; 331 | result[i] -= k * result[i - 1]; 332 | } 333 | // solving pass2 (down->up) 334 | for (int i = n - 2; i >= 0; i--) { 335 | double k = matrix[i][2] / matrix[i + 1][1]; 336 | matrix[i][1] -= k * matrix[i + 1][0]; 337 | matrix[i][2] = 0; 338 | result[i] -= k * result[i + 1]; 339 | } 340 | 341 | ArrayList output = new ArrayList(n); 342 | for (int i = 0; i < n; i++) output.add(result[i] / matrix[i][1]); 343 | 344 | return output; 345 | } 346 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/FilterGroup.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.opengl.GLES20; 4 | import java.nio.FloatBuffer; 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | import me.relex.camerafilter.gles.Drawable2d; 8 | import me.relex.camerafilter.gles.GlUtil; 9 | 10 | public class FilterGroup implements IFilter { 11 | 12 | protected List mFilters; 13 | protected int mIncomingWidth, mIncomingHeight; 14 | 15 | private Drawable2d mDrawableFlipVertical2d; 16 | private Drawable2d mDrawable2d; 17 | 18 | private int[] mFrameBuffers; 19 | private int[] mRenderBuffers; 20 | private int[] mFrameBufferTextures; 21 | 22 | public FilterGroup() { 23 | this(null); 24 | } 25 | 26 | public FilterGroup(List filters) { 27 | if (filters != null) { 28 | mFilters = filters; 29 | } else { 30 | mFilters = new ArrayList<>(); 31 | } 32 | mDrawableFlipVertical2d = new Drawable2d(); 33 | mDrawable2d = new Drawable2d(); 34 | } 35 | 36 | public void addFilter(T filter) { 37 | if (filter == null) { 38 | return; 39 | } 40 | mFilters.add(filter); 41 | } 42 | 43 | @Override public int getTextureTarget() { 44 | //return GLES11Ext.GL_TEXTURE_EXTERNAL_OES; 45 | return GLES20.GL_TEXTURE_2D; 46 | } 47 | 48 | @Override public void setTextureSize(int width, int height) { 49 | if (width == 0 || height == 0) { 50 | return; 51 | } 52 | if (width == mIncomingWidth && height == mIncomingHeight) { 53 | return; 54 | } 55 | mIncomingWidth = width; 56 | mIncomingHeight = height; 57 | 58 | if (mFrameBuffers != null) { 59 | destroyFrameBuffers(); 60 | } 61 | 62 | for (T filter : mFilters) { 63 | filter.setTextureSize(width, height); 64 | } 65 | 66 | int size = mFilters.size(); 67 | mFrameBuffers = new int[size - 1]; 68 | mRenderBuffers = new int[size - 1]; 69 | mFrameBufferTextures = new int[size - 1]; 70 | 71 | for (int i = 0; i < size - 1; i++) { 72 | 73 | ///////////////// create FrameBufferTextures 74 | GLES20.glGenTextures(1, mFrameBufferTextures, i); 75 | GlUtil.checkGlError("glGenTextures"); 76 | 77 | GLES20.glBindTexture(getTextureTarget(), mFrameBufferTextures[i]); 78 | GlUtil.checkGlError("glBindTexture " + mFrameBufferTextures[i]); 79 | 80 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, 81 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); 82 | 83 | GLES20.glTexParameterf(getTextureTarget(), GLES20.GL_TEXTURE_MAG_FILTER, 84 | GLES20.GL_LINEAR); 85 | GLES20.glTexParameterf(getTextureTarget(), GLES20.GL_TEXTURE_MIN_FILTER, 86 | GLES20.GL_LINEAR); 87 | GLES20.glTexParameterf(getTextureTarget(), GLES20.GL_TEXTURE_WRAP_S, 88 | GLES20.GL_CLAMP_TO_EDGE); 89 | GLES20.glTexParameterf(getTextureTarget(), GLES20.GL_TEXTURE_WRAP_T, 90 | GLES20.GL_CLAMP_TO_EDGE); 91 | GlUtil.checkGlError("glTexParameter"); 92 | 93 | ////////////////////////// create FrameBuffer 94 | GLES20.glGenFramebuffers(1, mFrameBuffers, i); 95 | GlUtil.checkGlError("glGenFramebuffers"); 96 | 97 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[i]); 98 | GlUtil.checkGlError("glBindFramebuffer " + mFrameBuffers[i]); 99 | 100 | ////////////////////////// create DepthBuffer 101 | GLES20.glGenRenderbuffers(1, mRenderBuffers, 0); 102 | GlUtil.checkGlError("glRenderbuffers"); 103 | 104 | GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, mRenderBuffers[i]); 105 | GlUtil.checkGlError("glBindRenderbuffer"); 106 | 107 | GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, 108 | height); 109 | GlUtil.checkGlError("glRenderbufferStorage"); 110 | ///////////// 111 | 112 | GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, 113 | GLES20.GL_RENDERBUFFER, mRenderBuffers[i]); 114 | GlUtil.checkGlError("glFramebufferRenderbuffer"); 115 | 116 | GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, 117 | GLES20.GL_TEXTURE_2D /*getTextureTarget()*/, mFrameBufferTextures[i], 0); 118 | 119 | GlUtil.checkGlError("glFramebufferTexture2D"); 120 | 121 | int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER); 122 | if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) { 123 | throw new RuntimeException("Framebuffer not complete, status=" + status); 124 | } 125 | 126 | // Switch back to the default framebuffer. 127 | GLES20.glBindTexture(getTextureTarget(), 0); 128 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 129 | 130 | GlUtil.checkGlError("prepareFramebuffer done"); 131 | } 132 | } 133 | 134 | @Override public void onDraw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex, 135 | int vertexCount, int coordsPerVertex, int vertexStride, float[] texMatrix, 136 | FloatBuffer texBuffer, int textureId, int texStride) { 137 | 138 | // TODO 139 | int size = mFilters.size(); 140 | int previousTextureId = textureId; 141 | for (int i = 0; i < size; i++) { 142 | T filter = mFilters.get(i); 143 | boolean isNotLast = i < size - 1; 144 | 145 | if (isNotLast) { 146 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[i]); 147 | GLES20.glClearColor(0, 0, 0, 0); 148 | } 149 | 150 | if (i == 0) { 151 | filter.onDraw(mvpMatrix, vertexBuffer, firstVertex, vertexCount, coordsPerVertex, 152 | vertexStride, texMatrix, texBuffer, previousTextureId, texStride); 153 | } else if (i == size - 1) { 154 | filter.onDraw(mvpMatrix, mDrawable2d.getVertexArray(), firstVertex, vertexCount, 155 | coordsPerVertex, vertexStride, texMatrix, 156 | (size % 2 == 0) ? mDrawableFlipVertical2d.getTexCoordArray() 157 | : mDrawable2d.getTexCoordArray(), previousTextureId, texStride); 158 | } else { 159 | filter.onDraw(mvpMatrix, mDrawable2d.getVertexArray(), firstVertex, vertexCount, 160 | coordsPerVertex, vertexStride, texMatrix, mDrawable2d.getTexCoordArray(), 161 | previousTextureId, texStride); 162 | } 163 | 164 | if (isNotLast) { 165 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 166 | previousTextureId = mFrameBufferTextures[i]; 167 | } 168 | } 169 | } 170 | 171 | @Override public void releaseProgram() { 172 | destroyFrameBuffers(); 173 | for (T filter : mFilters) { 174 | filter.releaseProgram(); 175 | } 176 | } 177 | 178 | private void destroyFrameBuffers() { 179 | if (mFrameBufferTextures != null) { 180 | GLES20.glDeleteTextures(mFrameBufferTextures.length, mFrameBufferTextures, 0); 181 | mFrameBufferTextures = null; 182 | } 183 | if (mFrameBuffers != null) { 184 | GLES20.glDeleteFramebuffers(mFrameBuffers.length, mFrameBuffers, 0); 185 | mFrameBuffers = null; 186 | } 187 | 188 | if (mRenderBuffers != null) { 189 | GLES20.glDeleteRenderbuffers(mRenderBuffers.length, mRenderBuffers, 0); 190 | mRenderBuffers = null; 191 | } 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/FilterManager.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import me.relex.camerafilter.R; 5 | 6 | public class FilterManager { 7 | 8 | private static int mCurveIndex; 9 | private static int[] mCurveArrays = new int[] { 10 | R.raw.cross_1, R.raw.cross_2, R.raw.cross_3, R.raw.cross_4, R.raw.cross_5, 11 | R.raw.cross_6, R.raw.cross_7, R.raw.cross_8, R.raw.cross_9, R.raw.cross_10, 12 | R.raw.cross_11, 13 | }; 14 | 15 | private FilterManager() { 16 | } 17 | 18 | public static IFilter getCameraFilter(FilterType filterType, Context context) { 19 | switch (filterType) { 20 | case Normal: 21 | default: 22 | return new CameraFilter(context); 23 | case Blend: 24 | return new CameraFilterBlend(context, R.drawable.mask); 25 | case SoftLight: 26 | return new CameraFilterBlendSoftLight(context, R.drawable.mask); 27 | case ToneCurve: 28 | mCurveIndex++; 29 | if (mCurveIndex > 10) { 30 | mCurveIndex = 0; 31 | } 32 | return new CameraFilterToneCurve(context, 33 | context.getResources().openRawResource(mCurveArrays[mCurveIndex])); 34 | } 35 | } 36 | 37 | public static IFilter getImageFilter(FilterType filterType, Context context) { 38 | switch (filterType) { 39 | case Normal: 40 | default: 41 | return new ImageFilter(context); 42 | case Blend: 43 | return new ImageFilterBlend(context, R.drawable.mask); 44 | case SoftLight: 45 | return new ImageFilterBlendSoftLight(context, R.drawable.mask); 46 | case ToneCurve: 47 | mCurveIndex++; 48 | if (mCurveIndex > 10) { 49 | mCurveIndex = 0; 50 | } 51 | return new ImageFilterToneCurve(context, 52 | context.getResources().openRawResource(mCurveArrays[mCurveIndex])); 53 | } 54 | } 55 | 56 | public enum FilterType { 57 | Normal, Blend, SoftLight, ToneCurve 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/IFilter.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import java.nio.FloatBuffer; 4 | 5 | public interface IFilter { 6 | int getTextureTarget(); 7 | 8 | void setTextureSize(int width, int height); 9 | 10 | void onDraw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex, int vertexCount, 11 | int coordsPerVertex, int vertexStride, float[] texMatrix, FloatBuffer texBuffer, 12 | int textureId, int texStride); 13 | 14 | void releaseProgram(); 15 | } 16 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/ImageFilter.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES10; 5 | import me.relex.camerafilter.R; 6 | import me.relex.camerafilter.gles.GlUtil; 7 | 8 | public class ImageFilter extends CameraFilter { 9 | public ImageFilter(Context applicationContext) { 10 | super(applicationContext); 11 | } 12 | 13 | @Override public int getTextureTarget() { 14 | return GLES10.GL_TEXTURE_2D; 15 | } 16 | 17 | @Override protected int createProgram(Context applicationContext) { 18 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader, 19 | R.raw.fragment_shader_2d); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/ImageFilterBlend.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES10; 5 | import android.support.annotation.DrawableRes; 6 | import me.relex.camerafilter.R; 7 | import me.relex.camerafilter.gles.GlUtil; 8 | 9 | public class ImageFilterBlend extends CameraFilterBlend { 10 | 11 | public ImageFilterBlend(Context context, @DrawableRes int drawableId) { 12 | super(context, drawableId); 13 | } 14 | 15 | @Override public int getTextureTarget() { 16 | return GLES10.GL_TEXTURE_2D; 17 | } 18 | 19 | @Override protected int createProgram(Context applicationContext) { 20 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_2d_two_input, 21 | R.raw.fragment_shader_2d_blend); 22 | } 23 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/ImageFilterBlendSoftLight.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES10; 5 | import android.support.annotation.DrawableRes; 6 | import me.relex.camerafilter.R; 7 | import me.relex.camerafilter.gles.GlUtil; 8 | 9 | public class ImageFilterBlendSoftLight extends CameraFilterBlendSoftLight { 10 | 11 | public ImageFilterBlendSoftLight(Context context, @DrawableRes int drawableId) { 12 | super(context, drawableId); 13 | } 14 | 15 | @Override public int getTextureTarget() { 16 | return GLES10.GL_TEXTURE_2D; 17 | } 18 | 19 | @Override protected int createProgram(Context applicationContext) { 20 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader_2d_two_input, 21 | R.raw.fragment_shader_2d_blend_soft_light); 22 | } 23 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/ImageFilterBlur.java: -------------------------------------------------------------------------------- 1 | //package me.relex.camerafilter.filter; 2 | // 3 | //import android.content.Context; 4 | //import android.opengl.GLES10; 5 | //import me.relex.camerafilter.R; 6 | //import me.relex.camerafilter.gles.GlUtil; 7 | // 8 | //public class ImageFilterBlur extends CameraFilterGroup { 9 | // public ImageFilterBlur(Context applicationContext) { 10 | // super(applicationContext); 11 | // } 12 | // 13 | // @Override public int getTextureTarget() { 14 | // return GLES10.GL_TEXTURE_2D; 15 | // } 16 | // 17 | // @Override protected int createProgram(Context applicationContext) { 18 | // return GlUtil.createProgram(applicationContext, R.raw.vertex_shader, 19 | // R.raw.fragment_shader_2d_kernel); 20 | // } 21 | //} 22 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/filter/ImageFilterToneCurve.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.filter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES10; 5 | import java.io.InputStream; 6 | import me.relex.camerafilter.R; 7 | import me.relex.camerafilter.gles.GlUtil; 8 | 9 | public class ImageFilterToneCurve extends CameraFilterToneCurve { 10 | 11 | public ImageFilterToneCurve(Context context, InputStream inputStream) { 12 | super(context, inputStream); 13 | } 14 | 15 | @Override public int getTextureTarget() { 16 | return GLES10.GL_TEXTURE_2D; 17 | } 18 | 19 | @Override protected int createProgram(Context applicationContext) { 20 | return GlUtil.createProgram(applicationContext, R.raw.vertex_shader, 21 | R.raw.fragment_shader_2d_tone_curve); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/gles/Drawable2d.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.gles; 2 | /* 3 | * Copyright 2014 Google Inc. All rights reserved. 4 | * 5 | * Licensed under the Apache License, Version 2.0 (the "License"); 6 | * you may not use this file except in compliance with the License. 7 | * You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | import java.nio.FloatBuffer; 19 | 20 | public class Drawable2d { 21 | private static final int SIZEOF_FLOAT = 4; 22 | private static final float FULL_RECTANGLE_COORDS[] = { 23 | -1.0f, -1.0f, // 0 bottom left 24 | 1.0f, -1.0f, // 1 bottom right 25 | -1.0f, 1.0f, // 2 top left 26 | 1.0f, 1.0f, // 3 top right 27 | }; 28 | private static final float FULL_RECTANGLE_TEX_COORDS[] = { 29 | 0.0f, 0.0f, // 0 bottom left 30 | 1.0f, 0.0f, // 1 bottom right 31 | 0.0f, 1.0f, // 2 top left 32 | 1.0f, 1.0f // 3 top right 33 | }; 34 | private static final FloatBuffer FULL_RECTANGLE_BUF = 35 | GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); 36 | private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = 37 | GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); 38 | 39 | private FloatBuffer mVertexArray; 40 | private FloatBuffer mTexCoordArray; 41 | private int mVertexCount; 42 | private int mCoordsPerVertex; 43 | private int mVertexStride; 44 | private int mTexCoordStride; 45 | 46 | public Drawable2d() { 47 | mVertexArray = FULL_RECTANGLE_BUF; 48 | mTexCoordArray = FULL_RECTANGLE_TEX_BUF; 49 | mCoordsPerVertex = 2; 50 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; 51 | mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex; 52 | mTexCoordStride = 2 * SIZEOF_FLOAT; 53 | } 54 | 55 | /** 56 | * Returns the array of vertices. 57 | *

58 | * To avoid allocations, this returns internal state. The caller must not modify it. 59 | */ 60 | public FloatBuffer getVertexArray() { 61 | return mVertexArray; 62 | } 63 | 64 | /** 65 | * Returns the array of texture coordinates. 66 | *

67 | * To avoid allocations, this returns internal state. The caller must not modify it. 68 | */ 69 | public FloatBuffer getTexCoordArray() { 70 | return mTexCoordArray; 71 | } 72 | 73 | /** 74 | * Returns the number of vertices stored in the vertex array. 75 | */ 76 | public int getVertexCount() { 77 | return mVertexCount; 78 | } 79 | 80 | /** 81 | * Returns the width, in bytes, of the data for each vertex. 82 | */ 83 | public int getVertexStride() { 84 | return mVertexStride; 85 | } 86 | 87 | /** 88 | * Returns the width, in bytes, of the data for each texture coordinate. 89 | */ 90 | public int getTexCoordStride() { 91 | return mTexCoordStride; 92 | } 93 | 94 | /** 95 | * Returns the number of position coordinates per vertex. This will be 2 or 3. 96 | */ 97 | public int getCoordsPerVertex() { 98 | return mCoordsPerVertex; 99 | } 100 | 101 | 102 | } 103 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/gles/DrawableFlipVertical2d.java: -------------------------------------------------------------------------------- 1 | //package me.relex.camerafilter.gles; 2 | ///* 3 | // * Copyright 2014 Google Inc. All rights reserved. 4 | // * 5 | // * Licensed under the Apache License, Version 2.0 (the "License"); 6 | // * you may not use this file except in compliance with the License. 7 | // * You may obtain a copy of the License at 8 | // * 9 | // * http://www.apache.org/licenses/LICENSE-2.0 10 | // * 11 | // * Unless required by applicable law or agreed to in writing, software 12 | // * distributed under the License is distributed on an "AS IS" BASIS, 13 | // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | // * See the License for the specific language governing permissions and 15 | // * limitations under the License. 16 | // */ 17 | // 18 | //import java.nio.FloatBuffer; 19 | // 20 | //public class DrawableFlipVertical2d { 21 | // private static final int SIZEOF_FLOAT = 4; 22 | // private static final float FULL_RECTANGLE_COORDS[] = { 23 | // -1.0f, -1.0f, // 0 bottom left 24 | // 1.0f, -1.0f, // 1 bottom right 25 | // -1.0f, 1.0f, // 2 top left 26 | // 1.0f, 1.0f, // 3 top right 27 | // }; 28 | // private static final float FULL_RECTANGLE_TEX_COORDS[] = { 29 | // 0.0f, 1.0f, // 30 | // 1.0f, 1.0f, // 31 | // 0.0f, 0.0f, // 32 | // 1.0f, 0.0f // 33 | // }; 34 | // private static final FloatBuffer FULL_RECTANGLE_BUF = 35 | // GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); 36 | // private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = 37 | // GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); 38 | // 39 | // private FloatBuffer mVertexArray; 40 | // private FloatBuffer mTexCoordArray; 41 | // private int mVertexCount; 42 | // private int mCoordsPerVertex; 43 | // private int mVertexStride; 44 | // private int mTexCoordStride; 45 | // 46 | // public DrawableFlipVertical2d() { 47 | // mVertexArray = FULL_RECTANGLE_BUF; 48 | // mTexCoordArray = FULL_RECTANGLE_TEX_BUF; 49 | // mCoordsPerVertex = 2; 50 | // mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; 51 | // mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex; 52 | // mTexCoordStride = 2 * SIZEOF_FLOAT; 53 | // } 54 | // 55 | // /** 56 | // * Returns the array of vertices. 57 | // *

58 | // * To avoid allocations, this returns internal state. The caller must not modify it. 59 | // */ 60 | // public FloatBuffer getVertexArray() { 61 | // return mVertexArray; 62 | // } 63 | // 64 | // /** 65 | // * Returns the array of texture coordinates. 66 | // *

67 | // * To avoid allocations, this returns internal state. The caller must not modify it. 68 | // */ 69 | // public FloatBuffer getTexCoordArray() { 70 | // return mTexCoordArray; 71 | // } 72 | // 73 | // /** 74 | // * Returns the number of vertices stored in the vertex array. 75 | // */ 76 | // public int getVertexCount() { 77 | // return mVertexCount; 78 | // } 79 | // 80 | // /** 81 | // * Returns the width, in bytes, of the data for each vertex. 82 | // */ 83 | // public int getVertexStride() { 84 | // return mVertexStride; 85 | // } 86 | // 87 | // /** 88 | // * Returns the width, in bytes, of the data for each texture coordinate. 89 | // */ 90 | // public int getTexCoordStride() { 91 | // return mTexCoordStride; 92 | // } 93 | // 94 | // /** 95 | // * Returns the number of position coordinates per vertex. This will be 2 or 3. 96 | // */ 97 | // public int getCoordsPerVertex() { 98 | // return mCoordsPerVertex; 99 | // } 100 | //} 101 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/gles/FullFrameRect.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package me.relex.camerafilter.gles; 18 | 19 | /* 20 | * Copyright 2014 Google Inc. All rights reserved. 21 | * 22 | * Licensed under the Apache License, Version 2.0 (the "License"); 23 | * you may not use this file except in compliance with the License. 24 | * You may obtain a copy of the License at 25 | * 26 | * http://www.apache.org/licenses/LICENSE-2.0 27 | * 28 | * Unless required by applicable law or agreed to in writing, software 29 | * distributed under the License is distributed on an "AS IS" BASIS, 30 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 31 | * See the License for the specific language governing permissions and 32 | * limitations under the License. 33 | */ 34 | 35 | import android.graphics.Bitmap; 36 | import android.opengl.Matrix; 37 | import me.relex.camerafilter.filter.IFilter; 38 | 39 | /** 40 | * This class essentially represents a viewport-sized sprite that will be rendered with 41 | * a texture, usually from an external source like the camera or video decoder. 42 | */ 43 | public class FullFrameRect { 44 | private final Drawable2d mRectDrawable = new Drawable2d(); 45 | private IFilter mFilter; 46 | public final float[] IDENTITY_MATRIX = new float[16]; 47 | 48 | /** 49 | * Prepares the object. 50 | * 51 | * @param program The program to use. FullFrameRect takes ownership, and will release 52 | * the program when no longer needed. 53 | */ 54 | public FullFrameRect(IFilter program) { 55 | mFilter = program; 56 | Matrix.setIdentityM(IDENTITY_MATRIX, 0); 57 | } 58 | 59 | /** 60 | * Releases resources. 61 | *

62 | * This must be called with the appropriate EGL context current (i.e. the one that was 63 | * current when the constructor was called). If we're about to destroy the EGL context, 64 | * there's no value in having the caller make it current just to do this cleanup, so you 65 | * can pass a flag that will tell this function to skip any EGL-context-specific cleanup. 66 | */ 67 | public void release(boolean doEglCleanup) { 68 | if (mFilter != null) { 69 | if (doEglCleanup) { 70 | mFilter.releaseProgram(); 71 | } 72 | mFilter = null; 73 | } 74 | } 75 | 76 | /** 77 | * Returns the program currently in use. 78 | */ 79 | public IFilter getFilter() { 80 | return mFilter; 81 | } 82 | 83 | /** 84 | * Changes the program. The previous program will be released. 85 | *

86 | * The appropriate EGL context must be current. 87 | */ 88 | public void changeProgram(IFilter newFilter) { 89 | mFilter.releaseProgram(); 90 | mFilter = newFilter; 91 | } 92 | 93 | /** 94 | * Creates a texture object suitable for use with drawFrame(). 95 | */ 96 | public int createTexture() { 97 | return GlUtil.createTexture(mFilter.getTextureTarget()); 98 | } 99 | 100 | public int createTexture(Bitmap bitmap) { 101 | return GlUtil.createTexture(mFilter.getTextureTarget(), bitmap); 102 | } 103 | 104 | public void scaleMVPMatrix(float x, float y) { 105 | Matrix.setIdentityM(IDENTITY_MATRIX, 0); 106 | Matrix.scaleM(IDENTITY_MATRIX, 0, x, y, 1f); 107 | } 108 | 109 | /** 110 | * Draws a viewport-filling rect, texturing it with the specified texture object. 111 | */ 112 | 113 | public void drawFrame(int textureId, float[] texMatrix) { 114 | 115 | // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. 116 | mFilter.onDraw(IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0, 117 | mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(), 118 | mRectDrawable.getVertexStride(), texMatrix, mRectDrawable.getTexCoordArray(), 119 | textureId, mRectDrawable.getTexCoordStride()); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/gles/GlUtil.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.gles; 2 | /* 3 | * Copyright 2014 Google Inc. All rights reserved. 4 | * 5 | * Licensed under the Apache License, Version 2.0 (the "License"); 6 | * you may not use this file except in compliance with the License. 7 | * You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | import android.content.Context; 19 | import android.graphics.Bitmap; 20 | import android.graphics.Canvas; 21 | import android.graphics.Paint; 22 | import android.opengl.GLES20; 23 | import android.opengl.GLUtils; 24 | import android.support.annotation.Nullable; 25 | import android.support.annotation.RawRes; 26 | import android.util.Log; 27 | import java.io.BufferedReader; 28 | import java.io.IOException; 29 | import java.io.InputStream; 30 | import java.io.InputStreamReader; 31 | import java.nio.ByteBuffer; 32 | import java.nio.ByteOrder; 33 | import java.nio.FloatBuffer; 34 | 35 | public class GlUtil { 36 | private static final String TAG = "GlUtil"; 37 | /** Identity matrix for general use. Don't modify or life will get weird. */ 38 | 39 | public static final int NO_TEXTURE = -1; 40 | 41 | private static final int SIZEOF_FLOAT = 4; 42 | 43 | private GlUtil() { // do not instantiate 44 | } 45 | 46 | public static int createProgram(Context applicationContext, @RawRes int vertexSourceRawId, 47 | @RawRes int fragmentSourceRawId) { 48 | 49 | String vertexSource = readTextFromRawResource(applicationContext, vertexSourceRawId); 50 | String fragmentSource = readTextFromRawResource(applicationContext, fragmentSourceRawId); 51 | 52 | return createProgram(vertexSource, fragmentSource); 53 | } 54 | 55 | public static int createProgram(String vertexSource, String fragmentSource) { 56 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 57 | if (vertexShader == 0) { 58 | return 0; 59 | } 60 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 61 | if (pixelShader == 0) { 62 | return 0; 63 | } 64 | int program = GLES20.glCreateProgram(); 65 | checkGlError("glCreateProgram"); 66 | if (program == 0) { 67 | Log.e(TAG, "Could not create program"); 68 | } 69 | GLES20.glAttachShader(program, vertexShader); 70 | checkGlError("glAttachShader"); 71 | GLES20.glAttachShader(program, pixelShader); 72 | checkGlError("glAttachShader"); 73 | GLES20.glLinkProgram(program); 74 | int[] linkStatus = new int[1]; 75 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 76 | if (linkStatus[0] != GLES20.GL_TRUE) { 77 | Log.e(TAG, "Could not link program: "); 78 | Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 79 | GLES20.glDeleteProgram(program); 80 | program = 0; 81 | } 82 | return program; 83 | } 84 | 85 | public static int loadShader(int shaderType, String source) { 86 | int shader = GLES20.glCreateShader(shaderType); 87 | checkGlError("glCreateShader type=" + shaderType); 88 | GLES20.glShaderSource(shader, source); 89 | GLES20.glCompileShader(shader); 90 | int[] compiled = new int[1]; 91 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 92 | if (compiled[0] == 0) { 93 | Log.e(TAG, "Could not compile shader " + shaderType + ":"); 94 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 95 | GLES20.glDeleteShader(shader); 96 | shader = 0; 97 | } 98 | return shader; 99 | } 100 | 101 | /** 102 | * @param textureTarget Texture类型。 103 | * 1. 相机用 GLES11Ext.GL_TEXTURE_EXTERNAL_OES 104 | * 2. 图片用GLES20.GL_TEXTURE_2D 105 | * @param minFilter 缩小过滤类型 (1.GL_NEAREST ; 2.GL_LINEAR) 106 | * @param magFilter 放大过滤类型 107 | * @param wrapS X方向边缘环绕 108 | * @param wrapT Y方向边缘环绕 109 | * @return 返回创建的 Texture ID 110 | */ 111 | public static int createTexture(int textureTarget, @Nullable Bitmap bitmap, int minFilter, 112 | int magFilter, int wrapS, int wrapT) { 113 | int[] textureHandle = new int[1]; 114 | 115 | GLES20.glGenTextures(1, textureHandle, 0); 116 | GlUtil.checkGlError("glGenTextures"); 117 | GLES20.glBindTexture(textureTarget, textureHandle[0]); 118 | GlUtil.checkGlError("glBindTexture " + textureHandle[0]); 119 | GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, minFilter); 120 | GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, magFilter); //线性插值 121 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, wrapS); 122 | GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, wrapT); 123 | 124 | if (bitmap != null) { 125 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); 126 | } 127 | 128 | GlUtil.checkGlError("glTexParameter"); 129 | return textureHandle[0]; 130 | } 131 | 132 | public static int createTexture(int textureTarget) { 133 | return createTexture(textureTarget, null, GLES20.GL_LINEAR, GLES20.GL_LINEAR, 134 | GLES20.GL_CLAMP_TO_EDGE, GLES20.GL_CLAMP_TO_EDGE); 135 | } 136 | 137 | public static int createTexture(int textureTarget, Bitmap bitmap) { 138 | return createTexture(textureTarget, bitmap, GLES20.GL_LINEAR, GLES20.GL_LINEAR, 139 | GLES20.GL_CLAMP_TO_EDGE, GLES20.GL_CLAMP_TO_EDGE); 140 | } 141 | 142 | /** 143 | * Checks to see if a GLES error has been raised. 144 | */ 145 | public static void checkGlError(String op) { 146 | int error = GLES20.glGetError(); 147 | if (error != GLES20.GL_NO_ERROR) { 148 | String msg = op + ": glError 0x" + Integer.toHexString(error); 149 | Log.e(TAG, msg); 150 | throw new RuntimeException(msg); 151 | } 152 | } 153 | 154 | /** 155 | * Checks to see if the location we obtained is valid. GLES returns -1 if a label 156 | * could not be found, but does not set the GL error. 157 | *

158 | * Throws a RuntimeException if the location is invalid. 159 | */ 160 | public static void checkLocation(int location, String label) { 161 | if (location < 0) { 162 | throw new RuntimeException("Unable to locate '" + label + "' in program"); 163 | } 164 | } 165 | 166 | /** 167 | * Allocates a direct float buffer, and populates it with the float array data. 168 | */ 169 | public static FloatBuffer createFloatBuffer(float[] coords) { 170 | // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. 171 | ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT); 172 | bb.order(ByteOrder.nativeOrder()); 173 | FloatBuffer fb = bb.asFloatBuffer(); 174 | fb.put(coords); 175 | fb.position(0); 176 | return fb; 177 | } 178 | 179 | public static String readTextFromRawResource(final Context applicationContext, 180 | @RawRes final int resourceId) { 181 | final InputStream inputStream = 182 | applicationContext.getResources().openRawResource(resourceId); 183 | final InputStreamReader inputStreamReader = new InputStreamReader(inputStream); 184 | final BufferedReader bufferedReader = new BufferedReader(inputStreamReader); 185 | String nextLine; 186 | final StringBuilder body = new StringBuilder(); 187 | try { 188 | while ((nextLine = bufferedReader.readLine()) != null) { 189 | body.append(nextLine); 190 | body.append('\n'); 191 | } 192 | } catch (IOException e) { 193 | return null; 194 | } 195 | 196 | return body.toString(); 197 | } 198 | 199 | public static int createTextureWithTextContent(String text) { 200 | // Create an empty, mutable bitmap 201 | Bitmap bitmap = Bitmap.createBitmap(256, 256, Bitmap.Config.ARGB_8888); 202 | // get a canvas to paint over the bitmap 203 | Canvas canvas = new Canvas(bitmap); 204 | canvas.drawARGB(0, 0, 255, 0); 205 | // get a background image from resources 206 | // note the image format must match the bitmap format 207 | // Drawable background = context.getResources().getDrawable(R.drawable.background); 208 | // background.setBounds(0, 0, 256, 256); 209 | // background.draw(canvas); // draw the background to our bitmap 210 | // Draw the text 211 | Paint textPaint = new Paint(); 212 | textPaint.setTextSize(32); 213 | textPaint.setAntiAlias(true); 214 | textPaint.setARGB(0xff, 0xff, 0xff, 0xff); 215 | // draw the text centered 216 | canvas.drawText(text, 16, 112, textPaint); 217 | 218 | int[] textures = new int[1]; 219 | 220 | //Generate one texture pointer... 221 | GLES20.glGenTextures(1, textures, 0); 222 | 223 | //...and bind it to our array 224 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]); 225 | 226 | //Create Nearest Filtered Texture 227 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, 228 | GLES20.GL_NEAREST); 229 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, 230 | GLES20.GL_LINEAR); 231 | 232 | //Different possible texture parameters, e.g. GLES20.GL_CLAMP_TO_EDGE 233 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); 234 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); 235 | 236 | //Alpha blending 237 | //GLES20.glEnable(GLES20.GL_BLEND); 238 | //GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); 239 | 240 | //Use the Android GLUtils to specify a two-dimensional texture image from our bitmap 241 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); 242 | //Clean up 243 | bitmap.recycle(); 244 | 245 | return textures[0]; 246 | } 247 | } -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/image/ImageEglSurface.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.image; 2 | 3 | import android.graphics.Bitmap; 4 | import android.opengl.EGL14; 5 | import android.opengl.GLES20; 6 | import android.opengl.GLSurfaceView; 7 | import android.util.Log; 8 | import java.nio.ByteBuffer; 9 | import java.nio.ByteOrder; 10 | import javax.microedition.khronos.egl.EGL10; 11 | import javax.microedition.khronos.egl.EGLConfig; 12 | import javax.microedition.khronos.egl.EGLContext; 13 | import javax.microedition.khronos.egl.EGLDisplay; 14 | import javax.microedition.khronos.egl.EGLSurface; 15 | import javax.microedition.khronos.opengles.GL10; 16 | 17 | import static javax.microedition.khronos.egl.EGL10.EGL_NO_CONTEXT; 18 | 19 | // 类似 EglCore 和 EglSurfaceBase,不过EglCore使用EGL14,而这里图片滤镜可以拆处理兼容法低版本,所以重新写了个,使用EGL10 20 | // 需要放到同一个线程中处理 21 | public class ImageEglSurface { 22 | 23 | private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; 24 | 25 | private int mWidth, mHeight; 26 | 27 | private EGL10 mEGL; 28 | private EGLDisplay mEGLDisplay; 29 | private EGLConfig mEGLConfig; 30 | private EGLContext mEGLContext; 31 | private EGLSurface mEGLSurface; 32 | private GL10 mGL; 33 | 34 | private GLSurfaceView.Renderer mRenderer; 35 | 36 | public ImageEglSurface(final int width, final int height) { 37 | mWidth = width; 38 | mHeight = height; 39 | 40 | int[] version = new int[2]; 41 | int[] surfaceAttribList = { 42 | EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE 43 | }; 44 | 45 | mEGL = (EGL10) EGLContext.getEGL(); 46 | mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 47 | if (mEGLDisplay == EGL10.EGL_NO_DISPLAY) { 48 | throw new RuntimeException("unable to get EGL10 display"); 49 | } 50 | 51 | if (!mEGL.eglInitialize(mEGLDisplay, version)) { 52 | mEGLDisplay = null; 53 | throw new RuntimeException("unable to initialize EGL14"); 54 | } 55 | 56 | mEGLConfig = getConfig(); 57 | if (mEGLConfig == null) { 58 | throw new RuntimeException("Unable to find a suitable EGLConfig"); 59 | } 60 | int[] attribList = { 61 | EGL_CONTEXT_CLIENT_VERSION /*EGL14.EGL_CONTEXT_CLIENT_VERSION*/, 2, EGL14.EGL_NONE 62 | }; 63 | mEGLContext = mEGL.eglCreateContext(mEGLDisplay, mEGLConfig, EGL_NO_CONTEXT, attribList); 64 | mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceAttribList); 65 | mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext); 66 | mGL = (GL10) mEGLContext.getGL(); 67 | } 68 | 69 | private EGLConfig getConfig() { 70 | int renderableType = 4; // EGL14.EGL_OPENGL_ES2_BIT; 71 | int[] attribList = { 72 | EGL10.EGL_DEPTH_SIZE, 0, EGL10.EGL_STENCIL_SIZE, 0, EGL10.EGL_RED_SIZE, 8, 73 | EGL10.EGL_GREEN_SIZE, 8, EGL10.EGL_BLUE_SIZE, 8, EGL10.EGL_ALPHA_SIZE, 8, 74 | EGL10.EGL_RENDERABLE_TYPE, renderableType, EGL10.EGL_NONE 75 | }; 76 | 77 | EGLConfig[] configs = new EGLConfig[1]; 78 | int[] numConfigs = new int[1]; 79 | 80 | if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, configs.length, numConfigs)) { 81 | Log.w("ImageEglSurface", "unable to find RGB8888 EGLConfig"); 82 | return null; 83 | } 84 | return configs[0]; 85 | } 86 | 87 | public void setRenderer(GLSurfaceView.Renderer renderer) { 88 | mRenderer = renderer; 89 | mRenderer.onSurfaceCreated(mGL, mEGLConfig); 90 | mRenderer.onSurfaceChanged(mGL, mWidth, mHeight); 91 | } 92 | 93 | public void drawFrame() { 94 | if (mRenderer == null) { 95 | return; 96 | } 97 | mRenderer.onDrawFrame(mGL); 98 | } 99 | 100 | public Bitmap getBitmap() { 101 | ByteBuffer buf = ByteBuffer.allocateDirect(mWidth * mHeight * 4); 102 | buf.order(ByteOrder.LITTLE_ENDIAN); 103 | GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 104 | Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888); 105 | bmp.copyPixelsFromBuffer(buf); 106 | return bmp; 107 | } 108 | 109 | public void release() { 110 | mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface); 111 | if (mEGLDisplay != EGL10.EGL_NO_DISPLAY) { 112 | mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, 113 | EGL10.EGL_NO_CONTEXT); 114 | mEGL.eglDestroyContext(mEGLDisplay, mEGLContext); 115 | //EGL14.eglReleaseThread(); 116 | mEGL.eglTerminate(mEGLDisplay); 117 | } 118 | 119 | mEGLDisplay = EGL10.EGL_NO_DISPLAY; 120 | mEGLConfig = null; 121 | mEGLContext = EGL10.EGL_NO_CONTEXT; 122 | mEGLSurface = EGL10.EGL_NO_SURFACE; 123 | 124 | mWidth = mHeight = -1; 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/image/ImageRenderer.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.image; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.opengl.GLSurfaceView; 6 | import android.opengl.Matrix; 7 | import android.util.Log; 8 | import javax.microedition.khronos.egl.EGLConfig; 9 | import javax.microedition.khronos.opengles.GL10; 10 | import me.relex.camerafilter.filter.FilterManager; 11 | import me.relex.camerafilter.gles.FullFrameRect; 12 | import me.relex.camerafilter.gles.GlUtil; 13 | 14 | public class ImageRenderer implements GLSurfaceView.Renderer { 15 | 16 | private final Context mContext; 17 | private FilterManager.FilterType mCurrentFilterType; 18 | private FilterManager.FilterType mNewFilterType; 19 | 20 | private int mTextureId = GlUtil.NO_TEXTURE; 21 | private final float[] mSTMatrix = new float[16]; 22 | 23 | private int mSurfaceWidth, mSurfaceHeight; 24 | private int mIncomingWidth, mIncomingHeight; 25 | 26 | private FullFrameRect mFullScreen; 27 | 28 | public ImageRenderer(Context context, FilterManager.FilterType filterType) { 29 | mContext = context; 30 | mCurrentFilterType = mNewFilterType = filterType; 31 | } 32 | 33 | @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { 34 | Matrix.setIdentityM(mSTMatrix, 0); 35 | mFullScreen = new FullFrameRect(FilterManager.getImageFilter(mCurrentFilterType, mContext)); 36 | } 37 | 38 | @Override public void onSurfaceChanged(GL10 gl, int width, int height) { 39 | mSurfaceWidth = width; 40 | mSurfaceHeight = height; 41 | if (gl != null) { 42 | gl.glViewport(0, 0, width, height); 43 | } 44 | } 45 | 46 | public void setImageBitmap(Bitmap bitmap) { 47 | if (bitmap == null) { 48 | return; 49 | } 50 | 51 | mIncomingWidth = bitmap.getWidth(); 52 | mIncomingHeight = bitmap.getHeight(); 53 | 54 | float scaleHeight = mSurfaceWidth / (mIncomingWidth * 1f / mIncomingHeight * 1f); 55 | float surfaceHeight = mSurfaceHeight; 56 | 57 | mTextureId = mFullScreen.createTexture(bitmap); 58 | if (mFullScreen != null) { 59 | mFullScreen.scaleMVPMatrix(1f, scaleHeight / surfaceHeight); 60 | } 61 | } 62 | 63 | public void changeFilter(FilterManager.FilterType filterType) { 64 | mNewFilterType = filterType; 65 | } 66 | 67 | @Override public void onDrawFrame(GL10 gl) { 68 | 69 | if (mTextureId == GlUtil.NO_TEXTURE) { 70 | Log.e("ImageRenderer", "need setImageBitmap"); 71 | return; 72 | } 73 | 74 | if (mNewFilterType != mCurrentFilterType) { 75 | mFullScreen.changeProgram(FilterManager.getImageFilter(mNewFilterType, mContext)); 76 | mCurrentFilterType = mNewFilterType; 77 | } 78 | 79 | mFullScreen.getFilter().setTextureSize(mIncomingWidth, mIncomingHeight); 80 | 81 | mFullScreen.drawFrame(mTextureId, mSTMatrix); 82 | } 83 | 84 | public void destroy() { 85 | if (mFullScreen != null) { 86 | mFullScreen.release(false); 87 | mFullScreen = null; 88 | } 89 | 90 | mTextureId = GlUtil.NO_TEXTURE; 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/video/EglCore.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package me.relex.camerafilter.video; 18 | 19 | import android.graphics.SurfaceTexture; 20 | import android.opengl.EGL14; 21 | import android.opengl.EGLConfig; 22 | import android.opengl.EGLContext; 23 | import android.opengl.EGLDisplay; 24 | import android.opengl.EGLExt; 25 | import android.opengl.EGLSurface; 26 | import android.util.Log; 27 | import android.view.Surface; 28 | 29 | /** 30 | * Core EGL state (display, context, config). 31 | *

32 | * The EGLContext must only be attached to one thread at a time. This class is not thread-safe. 33 | */ 34 | public final class EglCore { 35 | private static final String TAG = "EglCore"; 36 | 37 | /** 38 | * Constructor flag: surface must be recordable. This discourages EGL from using a 39 | * pixel format that cannot be converted efficiently to something usable by the video 40 | * encoder. 41 | */ 42 | public static final int FLAG_RECORDABLE = 0x01; 43 | 44 | /** 45 | * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this 46 | * flag, GLES2 is used. 47 | */ 48 | public static final int FLAG_TRY_GLES3 = 0x02; 49 | 50 | // Android-specific extension. 51 | private static final int EGL_RECORDABLE_ANDROID = 0x3142; 52 | 53 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; 54 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; 55 | private EGLConfig mEGLConfig = null; 56 | private int mGlVersion = -1; 57 | 58 | /** 59 | * Prepares EGL display and context. 60 | *

61 | * Equivalent to EglCore(null, 0). 62 | */ 63 | public EglCore() { 64 | this(null, 0); 65 | } 66 | 67 | /** 68 | * Prepares EGL display and context. 69 | *

70 | * 71 | * @param sharedContext The context to share, or null if sharing is not desired. 72 | * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE. 73 | */ 74 | public EglCore(EGLContext sharedContext, int flags) { 75 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 76 | throw new RuntimeException("EGL already set up"); 77 | } 78 | 79 | if (sharedContext == null) { 80 | sharedContext = EGL14.EGL_NO_CONTEXT; 81 | } 82 | 83 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 84 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 85 | throw new RuntimeException("unable to get EGL14 display"); 86 | } 87 | int[] version = new int[2]; 88 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { 89 | mEGLDisplay = null; 90 | throw new RuntimeException("unable to initialize EGL14"); 91 | } 92 | 93 | // Try to get a GLES3 context, if requested. 94 | if ((flags & FLAG_TRY_GLES3) != 0) { 95 | //Log.d(TAG, "Trying GLES 3"); 96 | EGLConfig config = getConfig(flags, 3); 97 | if (config != null) { 98 | int[] attrib3_list = { 99 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 3, EGL14.EGL_NONE 100 | }; 101 | EGLContext context = 102 | EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, attrib3_list, 0); 103 | 104 | if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) { 105 | //Log.d(TAG, "Got GLES 3 config"); 106 | mEGLConfig = config; 107 | mEGLContext = context; 108 | mGlVersion = 3; 109 | } 110 | } 111 | } 112 | if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed 113 | //Log.d(TAG, "Trying GLES 2"); 114 | EGLConfig config = getConfig(flags, 2); 115 | if (config == null) { 116 | throw new RuntimeException("Unable to find a suitable EGLConfig"); 117 | } 118 | int[] attrib2_list = { 119 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE 120 | }; 121 | EGLContext context = 122 | EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, attrib2_list, 0); 123 | checkEglError("eglCreateContext"); 124 | mEGLConfig = config; 125 | mEGLContext = context; 126 | mGlVersion = 2; 127 | } 128 | 129 | // Confirm with query. 130 | int[] values = new int[1]; 131 | EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 132 | 0); 133 | Log.d(TAG, "EGLContext created, client version " + values[0]); 134 | } 135 | 136 | /** 137 | * Finds a suitable EGLConfig. 138 | * 139 | * @param flags Bit flags from constructor. 140 | * @param version Must be 2 or 3. 141 | */ 142 | private EGLConfig getConfig(int flags, int version) { 143 | int renderableType = EGL14.EGL_OPENGL_ES2_BIT; 144 | if (version >= 3) { 145 | renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; 146 | } 147 | 148 | // The actual surface is generally RGBA or RGBX, so situationally omitting alpha 149 | // doesn't really help. It can also lead to a huge performance hit on glReadPixels() 150 | // when reading into a GL_RGBA buffer. 151 | int[] attribList = { 152 | EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, 153 | EGL14.EGL_ALPHA_SIZE, 8, 154 | //EGL14.EGL_DEPTH_SIZE, 16, 155 | //EGL14.EGL_STENCIL_SIZE, 8, 156 | EGL14.EGL_RENDERABLE_TYPE, renderableType, EGL14.EGL_NONE, 0, 157 | // placeholder for recordable [@-3] 158 | EGL14.EGL_NONE 159 | }; 160 | if ((flags & FLAG_RECORDABLE) != 0) { 161 | attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID; 162 | attribList[attribList.length - 2] = 1; 163 | } 164 | EGLConfig[] configs = new EGLConfig[1]; 165 | int[] numConfigs = new int[1]; 166 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 167 | numConfigs, 0)) { 168 | Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig"); 169 | return null; 170 | } 171 | return configs[0]; 172 | } 173 | 174 | /** 175 | * Discards all resources held by this class, notably the EGL context. This must be 176 | * called from the thread where the context was created. 177 | *

178 | * On completion, no context will be current. 179 | */ 180 | public void release() { 181 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 182 | // Android is unusual in that it uses a reference-counted EGLDisplay. So for 183 | // every eglInitialize() we need an eglTerminate(). 184 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 185 | EGL14.EGL_NO_CONTEXT); 186 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); 187 | EGL14.eglReleaseThread(); 188 | EGL14.eglTerminate(mEGLDisplay); 189 | } 190 | 191 | mEGLDisplay = EGL14.EGL_NO_DISPLAY; 192 | mEGLContext = EGL14.EGL_NO_CONTEXT; 193 | mEGLConfig = null; 194 | } 195 | 196 | @Override protected void finalize() throws Throwable { 197 | try { 198 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 199 | // We're limited here -- finalizers don't run on the thread that holds 200 | // the EGL state, so if a surface or context is still current on another 201 | // thread we can't fully release it here. Exceptions thrown from here 202 | // are quietly discarded. Complain in the log file. 203 | Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked"); 204 | release(); 205 | } 206 | } finally { 207 | super.finalize(); 208 | } 209 | } 210 | 211 | /** 212 | * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's 213 | * still current in a context. 214 | */ 215 | public void releaseSurface(EGLSurface eglSurface) { 216 | EGL14.eglDestroySurface(mEGLDisplay, eglSurface); 217 | } 218 | 219 | /** 220 | * Creates an EGL surface associated with a Surface. 221 | *

222 | * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute. 223 | */ 224 | public EGLSurface createWindowSurface(Object surface) { 225 | if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { 226 | throw new RuntimeException("invalid surface: " + surface); 227 | } 228 | 229 | // Create a window surface, and attach it to the Surface we received. 230 | int[] surfaceAttribs = { 231 | EGL14.EGL_NONE 232 | }; 233 | EGLSurface eglSurface = 234 | EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, surfaceAttribs, 0); 235 | checkEglError("eglCreateWindowSurface"); 236 | if (eglSurface == null) { 237 | throw new RuntimeException("surface was null"); 238 | } 239 | return eglSurface; 240 | } 241 | 242 | /** 243 | * Creates an EGL surface associated with an offscreen buffer. 244 | */ 245 | public EGLSurface createOffscreenSurface(int width, int height) { 246 | int[] surfaceAttribs = { 247 | EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE 248 | }; 249 | EGLSurface eglSurface = 250 | EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceAttribs, 0); 251 | checkEglError("eglCreatePbufferSurface"); 252 | if (eglSurface == null) { 253 | throw new RuntimeException("surface was null"); 254 | } 255 | return eglSurface; 256 | } 257 | 258 | /** 259 | * Makes our EGL context current, using the supplied surface for both "draw" and "read". 260 | */ 261 | public void makeCurrent(EGLSurface eglSurface) { 262 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 263 | // called makeCurrent() before create? 264 | Log.d(TAG, "NOTE: makeCurrent w/o display"); 265 | } 266 | if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { 267 | throw new RuntimeException("eglMakeCurrent failed"); 268 | } 269 | } 270 | 271 | /** 272 | * Makes our EGL context current, using the supplied "draw" and "read" surfaces. 273 | */ 274 | public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { 275 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 276 | // called makeCurrent() before create? 277 | Log.d(TAG, "NOTE: makeCurrent w/o display"); 278 | } 279 | if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) { 280 | throw new RuntimeException("eglMakeCurrent(draw,read) failed"); 281 | } 282 | } 283 | 284 | /** 285 | * Makes no context current. 286 | */ 287 | public void makeNothingCurrent() { 288 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 289 | EGL14.EGL_NO_CONTEXT)) { 290 | throw new RuntimeException("eglMakeCurrent failed"); 291 | } 292 | } 293 | 294 | /** 295 | * Calls eglSwapBuffers. Use this to "publish" the current frame. 296 | * 297 | * @return false on failure 298 | */ 299 | public boolean swapBuffers(EGLSurface eglSurface) { 300 | return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface); 301 | } 302 | 303 | /** 304 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. 305 | */ 306 | public void setPresentationTime(EGLSurface eglSurface, long nsecs) { 307 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs); 308 | } 309 | 310 | /** 311 | * Returns true if our context and the specified surface are current. 312 | */ 313 | public boolean isCurrent(EGLSurface eglSurface) { 314 | return mEGLContext.equals(EGL14.eglGetCurrentContext()) && eglSurface.equals( 315 | EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW)); 316 | } 317 | 318 | /** 319 | * Performs a simple surface query. 320 | */ 321 | public int querySurface(EGLSurface eglSurface, int what) { 322 | int[] value = new int[1]; 323 | EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0); 324 | return value[0]; 325 | } 326 | 327 | /** 328 | * Queries a string value. 329 | */ 330 | public String queryString(int what) { 331 | return EGL14.eglQueryString(mEGLDisplay, what); 332 | } 333 | 334 | /** 335 | * Returns the GLES version this context is configured for (currently 2 or 3). 336 | */ 337 | public int getGlVersion() { 338 | return mGlVersion; 339 | } 340 | 341 | /** 342 | * Writes the current display, context, and surface to the log. 343 | */ 344 | public static void logCurrent(String msg) { 345 | EGLDisplay display; 346 | EGLContext context; 347 | EGLSurface surface; 348 | 349 | display = EGL14.eglGetCurrentDisplay(); 350 | context = EGL14.eglGetCurrentContext(); 351 | surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW); 352 | Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context + 353 | ", surface=" + surface); 354 | } 355 | 356 | /** 357 | * Checks for EGL errors. Throws an exception if an error has been raised. 358 | */ 359 | private void checkEglError(String msg) { 360 | int error; 361 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 362 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 363 | } 364 | } 365 | } 366 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/video/EglSurfaceBase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package me.relex.camerafilter.video; 18 | 19 | import android.graphics.Bitmap; 20 | import android.opengl.EGL14; 21 | import android.opengl.EGLSurface; 22 | import android.opengl.GLES20; 23 | import android.util.Log; 24 | import java.io.BufferedOutputStream; 25 | import java.io.File; 26 | import java.io.FileOutputStream; 27 | import java.io.IOException; 28 | import java.nio.ByteBuffer; 29 | import java.nio.ByteOrder; 30 | import me.relex.camerafilter.gles.GlUtil; 31 | 32 | /** 33 | * Common base class for EGL surfaces. 34 | *

35 | * There can be multiple surfaces associated with a single context. 36 | */ 37 | public class EglSurfaceBase { 38 | protected static final String TAG = "EglSurfaceBase"; 39 | 40 | // EglCore object we're associated with. It may be associated with multiple surfaces. 41 | protected EglCore mEglCore; 42 | 43 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; 44 | private int mWidth = -1; 45 | private int mHeight = -1; 46 | 47 | protected EglSurfaceBase(EglCore eglCore) { 48 | mEglCore = eglCore; 49 | } 50 | 51 | /** 52 | * Creates a window surface. 53 | *

54 | * 55 | * @param surface May be a Surface or SurfaceTexture. 56 | */ 57 | public void createWindowSurface(Object surface) { 58 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) { 59 | throw new IllegalStateException("surface already created"); 60 | } 61 | mEGLSurface = mEglCore.createWindowSurface(surface); 62 | 63 | // Don't cache width/height here, because the size of the underlying surface can change 64 | // out from under us (see e.g. HardwareScalerActivity). 65 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); 66 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); 67 | } 68 | 69 | /** 70 | * Creates an off-screen surface. 71 | */ 72 | public void createOffscreenSurface(int width, int height) { 73 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) { 74 | throw new IllegalStateException("surface already created"); 75 | } 76 | mEGLSurface = mEglCore.createOffscreenSurface(width, height); 77 | mWidth = width; 78 | mHeight = height; 79 | } 80 | 81 | /** 82 | * Returns the surface's width, in pixels. 83 | *

84 | * If this is called on a window surface, and the underlying surface is in the process 85 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged" 86 | * callback). The size should match after the next buffer swap. 87 | */ 88 | public int getWidth() { 89 | if (mWidth < 0) { 90 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); 91 | } else { 92 | return mWidth; 93 | } 94 | } 95 | 96 | /** 97 | * Returns the surface's height, in pixels. 98 | */ 99 | public int getHeight() { 100 | if (mHeight < 0) { 101 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); 102 | } else { 103 | return mHeight; 104 | } 105 | } 106 | 107 | /** 108 | * Release the EGL surface. 109 | */ 110 | public void releaseEglSurface() { 111 | mEglCore.releaseSurface(mEGLSurface); 112 | mEGLSurface = EGL14.EGL_NO_SURFACE; 113 | mWidth = mHeight = -1; 114 | } 115 | 116 | /** 117 | * Makes our EGL context and surface current. 118 | */ 119 | public void makeCurrent() { 120 | mEglCore.makeCurrent(mEGLSurface); 121 | } 122 | 123 | /** 124 | * Makes our EGL context and surface current for drawing, using the supplied surface 125 | * for reading. 126 | */ 127 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) { 128 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface); 129 | } 130 | 131 | /** 132 | * Calls eglSwapBuffers. Use this to "publish" the current frame. 133 | * 134 | * @return false on failure 135 | */ 136 | public boolean swapBuffers() { 137 | boolean result = mEglCore.swapBuffers(mEGLSurface); 138 | if (!result) { 139 | Log.d(TAG, "WARNING: swapBuffers() failed"); 140 | } 141 | return result; 142 | } 143 | 144 | /** 145 | * Sends the presentation time stamp to EGL. 146 | * 147 | * @param nsecs Timestamp, in nanoseconds. 148 | */ 149 | public void setPresentationTime(long nsecs) { 150 | mEglCore.setPresentationTime(mEGLSurface, nsecs); 151 | } 152 | 153 | /** 154 | * Saves the EGL surface to a file. 155 | *

156 | * Expects that this object's EGL surface is current. 157 | */ 158 | public void saveFrame(File file) throws IOException { 159 | if (!mEglCore.isCurrent(mEGLSurface)) { 160 | throw new RuntimeException("Expected EGL context/surface is not current"); 161 | } 162 | 163 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA 164 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap 165 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the 166 | // Bitmap "copy pixels" method wants the same format GL provides. 167 | // 168 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling 169 | // here often. 170 | // 171 | // Making this even more interesting is the upside-down nature of GL, which means 172 | // our output will look upside down relative to what appears on screen if the 173 | // typical GL conventions are used. 174 | 175 | String filename = file.toString(); 176 | 177 | int width = getWidth(); 178 | int height = getHeight(); 179 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); 180 | 181 | buf.order(ByteOrder.LITTLE_ENDIAN); 182 | GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 183 | GlUtil.checkGlError("glReadPixels"); 184 | buf.rewind(); 185 | 186 | BufferedOutputStream bos = null; 187 | try { 188 | bos = new BufferedOutputStream(new FileOutputStream(filename)); 189 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 190 | bmp.copyPixelsFromBuffer(buf); 191 | 192 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); 193 | bmp.recycle(); 194 | } finally { 195 | if (bos != null) bos.close(); 196 | } 197 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/video/EncoderConfig.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.video; 2 | 3 | /** 4 | * Created by relex on 15/6/2. 5 | */ 6 | 7 | import android.opengl.EGLContext; 8 | import java.io.File; 9 | 10 | /** 11 | * Encoder configuration. 12 | *

13 | * Object is immutable, which means we can safely pass it between threads without 14 | * explicit synchronization (and don't need to worry about it getting tweaked out from 15 | * under us). 16 | *

17 | * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern 18 | * with reasonable defaults for those and bit rate. 19 | */ 20 | public class EncoderConfig { 21 | final File mOutputFile; 22 | final int mWidth; 23 | final int mHeight; 24 | final int mBitRate; 25 | EGLContext mEglContext; 26 | 27 | public EncoderConfig(File outputFile, int width, int height, int bitRate) { 28 | mOutputFile = outputFile; 29 | mWidth = width; 30 | mHeight = height; 31 | mBitRate = bitRate; 32 | } 33 | 34 | public void updateEglContext(EGLContext eglContext) { 35 | mEglContext = eglContext; 36 | } 37 | //@Override public String toString() { 38 | // return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate + 39 | // " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext; 40 | //} 41 | } 42 | 43 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/video/VideoEncoderCore.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package me.relex.camerafilter.video; 18 | 19 | import android.media.MediaCodec; 20 | import android.media.MediaCodecInfo; 21 | import android.media.MediaFormat; 22 | import android.media.MediaMuxer; 23 | import android.util.Log; 24 | import android.view.Surface; 25 | import java.io.File; 26 | import java.io.IOException; 27 | import java.nio.ByteBuffer; 28 | 29 | /** 30 | * This class wraps up the core components used for surface-input video encoding. 31 | *

32 | * Once created, frames are fed to the input surface. Remember to provide the presentation 33 | * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the 34 | * producer side doesn't get backed up. 35 | *

36 | * This class is not thread-safe, with one exception: it is valid to use the input surface 37 | * on one thread, and drain the output on a different thread. 38 | */ 39 | public class VideoEncoderCore { 40 | private static final String TAG = "VideoEncoderCore"; 41 | private static final boolean VERBOSE = false; 42 | 43 | // TODO: these ought to be configurable as well 44 | private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding 45 | private static final int FRAME_RATE = 30; // 30fps 46 | private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames 47 | 48 | private Surface mInputSurface; 49 | private MediaMuxer mMuxer; 50 | private MediaCodec mEncoder; 51 | private MediaCodec.BufferInfo mBufferInfo; 52 | private int mTrackIndex; 53 | private boolean mMuxerStarted; 54 | 55 | /** 56 | * Configures encoder and muxer state, and prepares the input Surface. 57 | */ 58 | public VideoEncoderCore(int width, int height, int bitRate, File outputFile) 59 | throws IOException { 60 | mBufferInfo = new MediaCodec.BufferInfo(); 61 | 62 | MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); 63 | 64 | // Set some properties. Failing to specify some of these can cause the MediaCodec 65 | // configure() call to throw an unhelpful exception. 66 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 67 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 68 | format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 69 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); 70 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 71 | if (VERBOSE) Log.d(TAG, "format: " + format); 72 | 73 | // Create a MediaCodec encoder, and configure it with our format. Get a Surface 74 | // we can use for input and wrap it with a class that handles the EGL work. 75 | mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); 76 | mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 77 | mInputSurface = mEncoder.createInputSurface(); 78 | mEncoder.start(); 79 | 80 | // Create a MediaMuxer. We can't add the video track and start() the muxer here, 81 | // because our MediaFormat doesn't have the Magic Goodies. These can only be 82 | // obtained from the encoder after it has started processing data. 83 | // 84 | // We're not actually interested in multiplexing audio. We just want to convert 85 | // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. 86 | mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 87 | mTrackIndex = -1; 88 | mMuxerStarted = false; 89 | } 90 | 91 | /** 92 | * Returns the encoder's input surface. 93 | */ 94 | public Surface getInputSurface() { 95 | return mInputSurface; 96 | } 97 | 98 | /** 99 | * Releases encoder resources. 100 | */ 101 | public void release() { 102 | if (VERBOSE) Log.d(TAG, "releasing encoder objects"); 103 | if (mEncoder != null) { 104 | mEncoder.stop(); 105 | mEncoder.release(); 106 | mEncoder = null; 107 | } 108 | if (mMuxer != null) { 109 | // TODO: stop() throws an exception if you haven't fed it any data. Keep track 110 | // of frames submitted, and don't call stop() if we haven't written anything. 111 | mMuxer.stop(); 112 | mMuxer.release(); 113 | mMuxer = null; 114 | } 115 | } 116 | 117 | /** 118 | * Extracts all pending data from the encoder and forwards it to the muxer. 119 | *

120 | * If endOfStream is not set, this returns when there is no more data to drain. If it 121 | * is set, we send EOS to the encoder, and then iterate until we see EOS on the output. 122 | * Calling this with endOfStream set should be done once, right before stopping the muxer. 123 | *

124 | * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're 125 | * not recording audio. 126 | */ 127 | public void drainEncoder(boolean endOfStream) { 128 | final int TIMEOUT_USEC = 10000; 129 | if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")"); 130 | 131 | if (endOfStream) { 132 | if (VERBOSE) Log.d(TAG, "sending EOS to encoder"); 133 | mEncoder.signalEndOfInputStream(); 134 | } 135 | 136 | ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 137 | while (true) { 138 | int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); 139 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 140 | // no output available yet 141 | if (!endOfStream) { 142 | break; // out of while 143 | } else { 144 | if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS"); 145 | } 146 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 147 | // not expected for an encoder 148 | encoderOutputBuffers = mEncoder.getOutputBuffers(); 149 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 150 | // should happen before receiving buffers, and should only happen once 151 | if (mMuxerStarted) { 152 | throw new RuntimeException("format changed twice"); 153 | } 154 | MediaFormat newFormat = mEncoder.getOutputFormat(); 155 | Log.d(TAG, "encoder output format changed: " + newFormat); 156 | 157 | // now that we have the Magic Goodies, start the muxer 158 | mTrackIndex = mMuxer.addTrack(newFormat); 159 | mMuxer.start(); 160 | mMuxerStarted = true; 161 | } else if (encoderStatus < 0) { 162 | Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 163 | // let's ignore it 164 | } else { 165 | ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; 166 | if (encodedData == null) { 167 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus + 168 | " was null"); 169 | } 170 | 171 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 172 | // The codec config data was pulled out and fed to the muxer when we got 173 | // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. 174 | if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); 175 | mBufferInfo.size = 0; 176 | } 177 | 178 | if (mBufferInfo.size != 0) { 179 | if (!mMuxerStarted) { 180 | throw new RuntimeException("muxer hasn't started"); 181 | } 182 | 183 | // adjust the ByteBuffer values to match BufferInfo (not needed?) 184 | encodedData.position(mBufferInfo.offset); 185 | encodedData.limit(mBufferInfo.offset + mBufferInfo.size); 186 | 187 | mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); 188 | if (VERBOSE) { 189 | Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" + 190 | mBufferInfo.presentationTimeUs); 191 | } 192 | } 193 | 194 | mEncoder.releaseOutputBuffer(encoderStatus, false); 195 | 196 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 197 | if (!endOfStream) { 198 | Log.w(TAG, "reached end of stream unexpectedly"); 199 | } else { 200 | if (VERBOSE) Log.d(TAG, "end of stream reached"); 201 | } 202 | break; // out of while 203 | } 204 | } 205 | } 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/video/WindowSurface.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package me.relex.camerafilter.video; 17 | 18 | import android.graphics.SurfaceTexture; 19 | import android.view.Surface; 20 | 21 | /** 22 | * Recordable EGL window surface. 23 | *

24 | * It's good practice to explicitly release() the surface, preferably from a "finally" block. 25 | */ 26 | public class WindowSurface extends EglSurfaceBase { 27 | private Surface mSurface; 28 | private boolean mReleaseSurface; 29 | 30 | /** 31 | * Associates an EGL surface with the native window surface. 32 | *

33 | * Set releaseSurface to true if you want the Surface to be released when release() is 34 | * called. This is convenient, but can interfere with framework classes that expect to 35 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the 36 | * surfaceDestroyed() callback won't fire). 37 | */ 38 | public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { 39 | super(eglCore); 40 | createWindowSurface(surface); 41 | mSurface = surface; 42 | mReleaseSurface = releaseSurface; 43 | } 44 | 45 | /** 46 | * Associates an EGL surface with the SurfaceTexture. 47 | */ 48 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) { 49 | super(eglCore); 50 | createWindowSurface(surfaceTexture); 51 | } 52 | 53 | /** 54 | * Releases any resources associated with the EGL surface (and, if configured to do so, 55 | * with the Surface as well). 56 | *

57 | * Does not require that the surface's EGL context be current. 58 | */ 59 | public void release() { 60 | releaseEglSurface(); 61 | if (mSurface != null) { 62 | if (mReleaseSurface) { 63 | mSurface.release(); 64 | } 65 | mSurface = null; 66 | } 67 | } 68 | 69 | /** 70 | * Recreate the EGLSurface, using the new EglBase. The caller should have already 71 | * freed the old EGLSurface with releaseEglSurface(). 72 | *

73 | * This is useful when we want to update the EGLSurface associated with a Surface. 74 | * For example, if we want to share with a different EGLContext, which can only 75 | * be done by tearing down and recreating the context. (That's handled by the caller; 76 | * this just creates a new EGLSurface for the Surface we were handed earlier.) 77 | *

78 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a 79 | * context somewhere, the create call will fail with complaints from the Surface 80 | * about already being connected. 81 | */ 82 | public void recreate(EglCore newEglCore) { 83 | if (mSurface == null) { 84 | throw new RuntimeException("not yet implemented for SurfaceTexture"); 85 | } 86 | mEglCore = newEglCore; // switch to new context 87 | createWindowSurface(mSurface); // create new surface 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/widget/AutoFitGLSurfaceView.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.widget; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLSurfaceView; 5 | import android.util.AttributeSet; 6 | 7 | public class AutoFitGLSurfaceView extends GLSurfaceView { 8 | 9 | protected int mRatioWidth = 0; 10 | protected int mRatioHeight = 0; 11 | 12 | public AutoFitGLSurfaceView(Context context) { 13 | super(context); 14 | } 15 | 16 | public AutoFitGLSurfaceView(Context context, AttributeSet attrs) { 17 | super(context, attrs); 18 | } 19 | 20 | public void setAspectRatio(int width, int height) { 21 | if (width < 0 || height < 0) { 22 | throw new IllegalArgumentException("Size cannot be negative."); 23 | } 24 | mRatioWidth = width; 25 | mRatioHeight = height; 26 | requestLayout(); 27 | } 28 | 29 | @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { 30 | super.onMeasure(widthMeasureSpec, heightMeasureSpec); 31 | int width = MeasureSpec.getSize(widthMeasureSpec); 32 | int height = MeasureSpec.getSize(heightMeasureSpec); 33 | if (0 == mRatioWidth || 0 == mRatioHeight) { 34 | setMeasuredDimension(width, height); 35 | } else { 36 | if (width < height * mRatioWidth / mRatioHeight) { 37 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth); 38 | } else { 39 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height); 40 | } 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /app/src/main/java/me/relex/camerafilter/widget/CameraSurfaceView.java: -------------------------------------------------------------------------------- 1 | package me.relex.camerafilter.widget; 2 | 3 | import android.content.Context; 4 | import android.graphics.SurfaceTexture; 5 | import android.hardware.Camera; 6 | import android.os.Handler; 7 | import android.os.HandlerThread; 8 | import android.os.Looper; 9 | import android.os.Message; 10 | import android.util.AttributeSet; 11 | import me.relex.camerafilter.camera.CameraController; 12 | import me.relex.camerafilter.camera.CameraHelper; 13 | import me.relex.camerafilter.camera.CameraRecordRenderer; 14 | import me.relex.camerafilter.camera.CommonHandlerListener; 15 | import me.relex.camerafilter.filter.FilterManager.FilterType; 16 | 17 | public class CameraSurfaceView extends AutoFitGLSurfaceView 18 | implements CommonHandlerListener, SurfaceTexture.OnFrameAvailableListener { 19 | 20 | private CameraHandler mBackgroundHandler; 21 | private HandlerThread mHandlerThread; 22 | private CameraRecordRenderer mCameraRenderer; 23 | 24 | public CameraSurfaceView(Context context) { 25 | super(context); 26 | init(context); 27 | } 28 | 29 | public CameraSurfaceView(Context context, AttributeSet attrs) { 30 | super(context, attrs); 31 | init(context); 32 | } 33 | 34 | private void init(Context context) { 35 | 36 | setEGLContextClientVersion(2); 37 | 38 | mHandlerThread = new HandlerThread("CameraHandlerThread"); 39 | mHandlerThread.start(); 40 | 41 | mBackgroundHandler = new CameraHandler(mHandlerThread.getLooper(), this); 42 | mCameraRenderer = 43 | new CameraRecordRenderer(context.getApplicationContext(), mBackgroundHandler); 44 | 45 | setRenderer(mCameraRenderer); 46 | setRenderMode(RENDERMODE_WHEN_DIRTY); 47 | } 48 | 49 | public CameraRecordRenderer getRenderer() { 50 | return mCameraRenderer; 51 | } 52 | 53 | //public void setEncoderConfig(EncoderConfig encoderConfig) { 54 | // if (mCameraRenderer != null) { 55 | // mCameraRenderer.setEncoderConfig(encoderConfig); 56 | // } 57 | //} 58 | 59 | @Override public void onResume() { 60 | super.onResume(); 61 | } 62 | 63 | @Override public void onPause() { 64 | mBackgroundHandler.removeCallbacksAndMessages(null); 65 | CameraController.getInstance().release(); 66 | queueEvent(new Runnable() { 67 | @Override public void run() { 68 | // 跨进程 清空 Renderer数据 69 | mCameraRenderer.notifyPausing(); 70 | } 71 | }); 72 | 73 | super.onPause(); 74 | } 75 | 76 | public void onDestroy() { 77 | mBackgroundHandler.removeCallbacksAndMessages(null); 78 | if (!mHandlerThread.isInterrupted()) { 79 | try { 80 | mHandlerThread.quit(); 81 | mHandlerThread.interrupt(); 82 | } catch (Exception e) { 83 | e.printStackTrace(); 84 | } 85 | } 86 | } 87 | 88 | public void changeFilter(FilterType filterType) { 89 | mCameraRenderer.changeFilter(filterType); 90 | } 91 | 92 | @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { 93 | requestRender(); 94 | } 95 | 96 | public static class CameraHandler extends Handler { 97 | public static final int SETUP_CAMERA = 1001; 98 | public static final int CONFIGURE_CAMERA = 1002; 99 | public static final int START_CAMERA_PREVIEW = 1003; 100 | //public static final int STOP_CAMERA_PREVIEW = 1004; 101 | private CommonHandlerListener listener; 102 | 103 | public CameraHandler(Looper looper, CommonHandlerListener listener) { 104 | super(looper); 105 | this.listener = listener; 106 | } 107 | 108 | @Override public void handleMessage(Message msg) { 109 | listener.handleMessage(msg); 110 | } 111 | } 112 | 113 | @Override public void handleMessage(final Message msg) { 114 | switch (msg.what) { 115 | case CameraHandler.SETUP_CAMERA: { 116 | final int width = msg.arg1; 117 | final int height = msg.arg2; 118 | final SurfaceTexture surfaceTexture = (SurfaceTexture) msg.obj; 119 | surfaceTexture.setOnFrameAvailableListener(this); 120 | 121 | mBackgroundHandler.post(new Runnable() { 122 | @Override public void run() { 123 | CameraController.getInstance() 124 | .setupCamera(surfaceTexture, getContext().getApplicationContext(), 125 | width); 126 | mBackgroundHandler.sendMessage(mBackgroundHandler.obtainMessage( 127 | CameraSurfaceView.CameraHandler.CONFIGURE_CAMERA, width, height)); 128 | } 129 | }); 130 | } 131 | break; 132 | case CameraHandler.CONFIGURE_CAMERA: { 133 | final int width = msg.arg1; 134 | final int height = msg.arg2; 135 | Camera.Size previewSize = CameraHelper.getOptimalPreviewSize( 136 | CameraController.getInstance().getCameraParameters(), 137 | CameraController.getInstance().mCameraPictureSize, width); 138 | 139 | CameraController.getInstance().configureCameraParameters(previewSize); 140 | if (previewSize != null) { 141 | mCameraRenderer.setCameraPreviewSize(previewSize.height, previewSize.width); 142 | } 143 | mBackgroundHandler.sendEmptyMessage(CameraHandler.START_CAMERA_PREVIEW); 144 | } 145 | break; 146 | 147 | case CameraHandler.START_CAMERA_PREVIEW: 148 | mBackgroundHandler.post(new Runnable() { 149 | @Override public void run() { 150 | CameraController.getInstance().startCameraPreview(); 151 | } 152 | }); 153 | 154 | break; 155 | //case CameraHandler.STOP_CAMERA_PREVIEW: 156 | // mBackgroundHandler.post(new Runnable() { 157 | // @Override public void run() { 158 | // CameraController.getInstance().stopCameraPreview(); 159 | // } 160 | // }); 161 | // break; 162 | 163 | default: 164 | break; 165 | } 166 | } 167 | } -------------------------------------------------------------------------------- /app/src/main/res/drawable-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-nodpi/mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-nodpi/mask.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-nodpi/raw_image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-nodpi/raw_image.jpg -------------------------------------------------------------------------------- /app/src/main/res/drawable-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/drawable-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_image_filter.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | 13 | 14 | 15 | 20 | 21 | 28 | 29 | 36 | 37 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 6 | 7 | 13 | 14 | 17 | 18 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_video_record.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | 10 | 11 | 12 | 17 | 18 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 41 | 42 | 49 | 50 | 51 | 58 | 59 | 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_1.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_1.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_10.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_10.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_11.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_11.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_2.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_2.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_3.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_3.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_4.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_4.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_5.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_5.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_6.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_6.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_7.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_7.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_8.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_8.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/cross_9.acv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/app/src/main/res/raw/cross_9.acv -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d.glsl: -------------------------------------------------------------------------------- 1 | precision mediump float;//fragment中没有默认的浮点数精度修饰符。因此,对于浮点数,浮点数向量和矩阵变量声明,必须声明包含一个精度修饰符。 2 | 3 | varying vec2 vTextureCoord; 4 | uniform sampler2D uTexture; 5 | 6 | void main() { 7 | gl_FragColor = texture2D(uTexture, vTextureCoord); 8 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d_blend.glsl: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | varying vec2 vTextureCoord; 4 | varying vec2 vExtraTextureCoord; 5 | 6 | uniform sampler2D uTexture; 7 | uniform sampler2D uExtraTexture; 8 | 9 | void main() { 10 | vec4 base = texture2D(uTexture, vTextureCoord); 11 | vec4 overlay = texture2D(uExtraTexture, vExtraTextureCoord); 12 | vec4 outputColor; 13 | outputColor.r = overlay.r + base.r * base.a * (1.0 - overlay.a); 14 | outputColor.g = overlay.g + base.g * base.a * (1.0 - overlay.a); 15 | outputColor.b = overlay.b + base.b * base.a * (1.0 - overlay.a); 16 | outputColor.a = overlay.a + base.a * (1.0 - overlay.a); 17 | gl_FragColor = outputColor; 18 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d_blend_soft_light.glsl: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | varying vec2 vTextureCoord; 4 | varying vec2 vExtraTextureCoord; 5 | 6 | uniform sampler2D uTexture; 7 | uniform sampler2D uExtraTexture; 8 | 9 | void main() { 10 | vec4 base = texture2D(uTexture, vTextureCoord); 11 | vec4 overlay = texture2D(uExtraTexture, vExtraTextureCoord); 12 | gl_FragColor = base * (overlay.a * (base / base.a) + (2.0 * overlay * (1.0 - (base / base.a)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a); 13 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d_blur.glsl: -------------------------------------------------------------------------------- 1 | #define SAMPLES 9 2 | precision highp float; 3 | uniform sampler2D uTexture; 4 | 5 | varying vec2 vTextureCoord; 6 | varying vec2 vBlurTextureCoord[SAMPLES]; 7 | 8 | void main() 9 | { 10 | vec3 sum = vec3(0.0); 11 | vec4 fragColor = texture2D(uTexture,vTextureCoord); 12 | 13 | 14 | sum += texture2D(uTexture, vBlurTextureCoord[0]).rgb * 0.05; 15 | sum += texture2D(uTexture, vBlurTextureCoord[1]).rgb * 0.09; 16 | sum += texture2D(uTexture, vBlurTextureCoord[2]).rgb * 0.12; 17 | sum += texture2D(uTexture, vBlurTextureCoord[3]).rgb * 0.15; 18 | sum += texture2D(uTexture, vBlurTextureCoord[4]).rgb * 0.18; 19 | sum += texture2D(uTexture, vBlurTextureCoord[5]).rgb * 0.15; 20 | sum += texture2D(uTexture, vBlurTextureCoord[6]).rgb * 0.12; 21 | sum += texture2D(uTexture, vBlurTextureCoord[7]).rgb * 0.09; 22 | sum += texture2D(uTexture, vBlurTextureCoord[8]).rgb * 0.05; 23 | 24 | gl_FragColor = vec4(sum,fragColor.a); 25 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d_kernel.glsl: -------------------------------------------------------------------------------- 1 | #define KERNEL_SIZE 9 2 | 3 | precision highp float; //指定默认精度 4 | 5 | varying vec2 vTextureCoord; 6 | uniform sampler2D uTexture; 7 | 8 | uniform float uKernel[KERNEL_SIZE]; 9 | uniform vec2 uTexOffset[KERNEL_SIZE]; 10 | uniform float uColorAdjust; 11 | 12 | void main() { 13 | int i = 0; 14 | vec4 sum = vec4(0.0); 15 | for (i = 0; i < KERNEL_SIZE; i++) { 16 | vec4 texc = texture2D(uTexture, vTextureCoord + uTexOffset[i]); 17 | sum += texc * uKernel[i]; 18 | } 19 | sum += uColorAdjust; 20 | gl_FragColor = sum; 21 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_2d_tone_curve.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; //指定默认精度 3 | 4 | varying vec2 vTextureCoord; 5 | uniform sampler2D uTexture; 6 | uniform sampler2D toneCurveTexture; 7 | 8 | void main() { 9 | vec4 textureColor = texture2D(uTexture, vTextureCoord); 10 | 11 | float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r; 12 | float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g; 13 | float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b; 14 | 15 | gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a); 16 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; //指定默认精度 3 | 4 | varying vec2 vTextureCoord; 5 | uniform samplerExternalOES uTexture; 6 | 7 | void main() { 8 | gl_FragColor = texture2D(uTexture, vTextureCoord); 9 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_blend.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; 3 | 4 | varying vec2 vTextureCoord; 5 | varying vec2 vExtraTextureCoord; 6 | 7 | uniform samplerExternalOES uTexture; 8 | uniform sampler2D uExtraTexture; 9 | 10 | void main() { 11 | vec4 base = texture2D(uTexture, vTextureCoord); 12 | vec4 overlay = texture2D(uExtraTexture, vExtraTextureCoord); 13 | vec4 outputColor; 14 | outputColor.r = overlay.r + base.r * base.a * (1.0 - overlay.a); 15 | outputColor.g = overlay.g + base.g * base.a * (1.0 - overlay.a); 16 | outputColor.b = overlay.b + base.b * base.a * (1.0 - overlay.a); 17 | outputColor.a = overlay.a + base.a * (1.0 - overlay.a); 18 | gl_FragColor = outputColor; 19 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_blend_soft_light.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; 3 | 4 | varying vec2 vTextureCoord; 5 | varying vec2 vExtraTextureCoord; 6 | 7 | uniform samplerExternalOES uTexture; 8 | uniform sampler2D uExtraTexture; 9 | 10 | void main() { 11 | vec4 base = texture2D(uTexture, vTextureCoord); 12 | vec4 overlay = texture2D(uExtraTexture, vExtraTextureCoord); 13 | gl_FragColor = base * (overlay.a * (base / base.a) + (2.0 * overlay * (1.0 - (base / base.a)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a); 14 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_blur.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | #define SAMPLES 9 3 | precision highp float; 4 | uniform samplerExternalOES uTexture; 5 | 6 | varying vec2 vTextureCoord; 7 | varying vec2 vBlurTextureCoord[SAMPLES]; 8 | 9 | void main() 10 | { 11 | vec3 sum = vec3(0.0); 12 | vec4 fragColor = texture2D(uTexture,vTextureCoord); 13 | 14 | 15 | sum += texture2D(uTexture, vBlurTextureCoord[0]).rgb * 0.05; 16 | sum += texture2D(uTexture, vBlurTextureCoord[1]).rgb * 0.09; 17 | sum += texture2D(uTexture, vBlurTextureCoord[2]).rgb * 0.12; 18 | sum += texture2D(uTexture, vBlurTextureCoord[3]).rgb * 0.15; 19 | sum += texture2D(uTexture, vBlurTextureCoord[4]).rgb * 0.18; 20 | sum += texture2D(uTexture, vBlurTextureCoord[5]).rgb * 0.15; 21 | sum += texture2D(uTexture, vBlurTextureCoord[6]).rgb * 0.12; 22 | sum += texture2D(uTexture, vBlurTextureCoord[7]).rgb * 0.09; 23 | sum += texture2D(uTexture, vBlurTextureCoord[8]).rgb * 0.05; 24 | 25 | gl_FragColor = vec4(sum,fragColor.a); 26 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_bw.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; 3 | 4 | varying vec2 vTextureCoord; 5 | uniform samplerExternalOES uTexture; 6 | 7 | void main() { 8 | vec4 tc = texture2D(uTexture, vTextureCoord); 9 | float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11; 10 | gl_FragColor = vec4(color, color, color, 1.0); 11 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_kernel.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | #define KERNEL_SIZE 9 3 | 4 | precision highp float; //指定默认精度 5 | 6 | varying vec2 vTextureCoord; 7 | uniform samplerExternalOES uTexture; 8 | 9 | uniform float uKernel[KERNEL_SIZE]; 10 | uniform vec2 uTexOffset[KERNEL_SIZE]; 11 | uniform float uColorAdjust; 12 | 13 | void main() { 14 | int i = 0; 15 | vec4 sum = vec4(0.0); 16 | 17 | for (i = 0; i < KERNEL_SIZE; i++) { 18 | vec4 texc = texture2D(uTexture, vTextureCoord + uTexOffset[i]); 19 | sum += texc * uKernel[i]; 20 | } 21 | sum += uColorAdjust; 22 | 23 | gl_FragColor = sum; 24 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_night.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; 3 | 4 | varying vec2 vTextureCoord; 5 | uniform samplerExternalOES uTexture; 6 | 7 | void main() { 8 | vec4 tc = texture2D(uTexture, vTextureCoord); 9 | float color = ((tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11) - 0.5 * 1.5) + 0.8; 10 | gl_FragColor = vec4(color, color + 0.15, color, 1.0); 11 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/fragment_shader_ext_tone_curve.glsl: -------------------------------------------------------------------------------- 1 | #extension GL_OES_EGL_image_external : require 2 | precision mediump float; //指定默认精度 3 | 4 | varying vec2 vTextureCoord; 5 | uniform samplerExternalOES uTexture; 6 | uniform sampler2D toneCurveTexture; 7 | 8 | void main() { 9 | vec4 textureColor = texture2D(uTexture, vTextureCoord); 10 | 11 | float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r; 12 | float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g; 13 | float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b; 14 | 15 | gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a); 16 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/vertex_shader.glsl: -------------------------------------------------------------------------------- 1 | uniform mat4 uMVPMatrix; // MVP 的变换矩阵(整体变形) 2 | uniform mat4 uTexMatrix; // Texture 的变换矩阵 (只对texture变形) 3 | 4 | attribute vec4 aPosition; 5 | attribute vec4 aTextureCoord; 6 | 7 | varying vec2 vTextureCoord; 8 | 9 | void main() { 10 | gl_Position = uMVPMatrix * aPosition; 11 | vTextureCoord = (uTexMatrix * aTextureCoord).xy; 12 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/vertex_shader_2d_two_input.glsl: -------------------------------------------------------------------------------- 1 | uniform mat4 uMVPMatrix; 2 | uniform mat4 uTexMatrix; 3 | 4 | attribute vec4 aPosition; 5 | 6 | attribute vec4 aTextureCoord; 7 | attribute vec4 aExtraTextureCoord; 8 | 9 | varying vec2 vTextureCoord; 10 | varying vec2 vExtraTextureCoord; 11 | 12 | void main() { 13 | gl_Position = uMVPMatrix * aPosition; 14 | vExtraTextureCoord = aExtraTextureCoord.xy; 15 | vTextureCoord = (uTexMatrix * aTextureCoord).xy; 16 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/vertex_shader_blur.glsl: -------------------------------------------------------------------------------- 1 | #define SAMPLES 9 2 | 3 | uniform mat4 uMVPMatrix; // MVP 的变换矩阵(整体变形) 4 | uniform mat4 uTexMatrix; // Texture 的变换矩阵 (只对texture变形) 5 | 6 | uniform float uTexelWidthOffset; 7 | uniform float uTexelHeightOffset; 8 | 9 | attribute vec4 aPosition; 10 | attribute vec4 aTextureCoord; 11 | 12 | varying vec2 vTextureCoord; 13 | varying vec2 vBlurTextureCoord[SAMPLES]; 14 | 15 | 16 | void main() { 17 | gl_Position = uMVPMatrix * aPosition; 18 | vTextureCoord = (uTexMatrix * aTextureCoord).xy; 19 | 20 | int multiplier = 0; 21 | vec2 blurStep; 22 | vec2 offset = vec2(uTexelHeightOffset, uTexelWidthOffset); 23 | 24 | for (int i = 0; i < SAMPLES; i++) 25 | { 26 | multiplier = (i - ((SAMPLES-1) / 2)); 27 | // ToneCurve in x (horizontal) 28 | blurStep = float(multiplier) * offset; 29 | vBlurTextureCoord[i] = vTextureCoord + blurStep; 30 | } 31 | } -------------------------------------------------------------------------------- /app/src/main/res/raw/vertex_shader_two_input.glsl: -------------------------------------------------------------------------------- 1 | uniform mat4 uMVPMatrix; 2 | uniform mat4 uTexMatrix; 3 | 4 | attribute vec4 aPosition; 5 | 6 | attribute vec4 aTextureCoord; 7 | attribute vec4 aExtraTextureCoord; 8 | 9 | varying vec2 vTextureCoord; 10 | varying vec2 vExtraTextureCoord; 11 | 12 | void main() { 13 | gl_Position = uMVPMatrix * aPosition; 14 | vExtraTextureCoord = vec2(aExtraTextureCoord.x, 1.0 - aExtraTextureCoord.y); //OpenGL纹理系统坐标 与 Android图像坐标 Y轴是颠倒的。这里旋转过来 15 | vTextureCoord = (uTexMatrix * aTextureCoord).xy; 16 | } -------------------------------------------------------------------------------- /app/src/main/res/values/dimens.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 16dp 4 | 16dp 5 | 6 | -------------------------------------------------------------------------------- /app/src/main/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | CameraFilter 4 | 开始录制 5 | 停止录制 6 | 7 | 图片处理 8 | 视频录制 9 | 10 | 普通 11 | 叠加 12 | 柔和叠加 13 | 模糊 14 | 色调曲线 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /app/src/main/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | // Top-level build file where you can add configuration options common to all sub-projects/modules. 2 | 3 | buildscript { 4 | repositories { 5 | jcenter() 6 | } 7 | dependencies { 8 | classpath 'com.android.tools.build:gradle:1.2.3' 9 | 10 | // NOTE: Do not place your application dependencies here; they belong 11 | // in the individual module build.gradle files 12 | } 13 | } 14 | 15 | allprojects { 16 | repositories { 17 | jcenter() 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | # Project-wide Gradle settings. 2 | 3 | # IDE (e.g. Android Studio) users: 4 | # Settings specified in this file will override any Gradle settings 5 | # configured through the IDE. 6 | 7 | # For more details on how to configure your build environment visit 8 | # http://www.gradle.org/docs/current/userguide/build_environment.html 9 | 10 | # Specifies the JVM arguments used for the daemon process. 11 | # The setting is particularly useful for tweaking memory settings. 12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m 13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 14 | 15 | # When configured, Gradle will run in incubating parallel mode. 16 | # This option should only be used with decoupled projects. More details, visit 17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects 18 | # org.gradle.parallel=true -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ongakuer/CameraFilter/b8fde13626122e57d83c5eaf38b24192691b766e/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Wed May 20 19:30:49 CST 2015 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # For Cygwin, ensure paths are in UNIX format before anything is touched. 46 | if $cygwin ; then 47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 48 | fi 49 | 50 | # Attempt to set APP_HOME 51 | # Resolve links: $0 may be a link 52 | PRG="$0" 53 | # Need this for relative symlinks. 54 | while [ -h "$PRG" ] ; do 55 | ls=`ls -ld "$PRG"` 56 | link=`expr "$ls" : '.*-> \(.*\)$'` 57 | if expr "$link" : '/.*' > /dev/null; then 58 | PRG="$link" 59 | else 60 | PRG=`dirname "$PRG"`"/$link" 61 | fi 62 | done 63 | SAVED="`pwd`" 64 | cd "`dirname \"$PRG\"`/" >&- 65 | APP_HOME="`pwd -P`" 66 | cd "$SAVED" >&- 67 | 68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 69 | 70 | # Determine the Java command to use to start the JVM. 71 | if [ -n "$JAVA_HOME" ] ; then 72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 73 | # IBM's JDK on AIX uses strange locations for the executables 74 | JAVACMD="$JAVA_HOME/jre/sh/java" 75 | else 76 | JAVACMD="$JAVA_HOME/bin/java" 77 | fi 78 | if [ ! -x "$JAVACMD" ] ; then 79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 80 | 81 | Please set the JAVA_HOME variable in your environment to match the 82 | location of your Java installation." 83 | fi 84 | else 85 | JAVACMD="java" 86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 87 | 88 | Please set the JAVA_HOME variable in your environment to match the 89 | location of your Java installation." 90 | fi 91 | 92 | # Increase the maximum file descriptors if we can. 93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 94 | MAX_FD_LIMIT=`ulimit -H -n` 95 | if [ $? -eq 0 ] ; then 96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 97 | MAX_FD="$MAX_FD_LIMIT" 98 | fi 99 | ulimit -n $MAX_FD 100 | if [ $? -ne 0 ] ; then 101 | warn "Could not set maximum file descriptor limit: $MAX_FD" 102 | fi 103 | else 104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 105 | fi 106 | fi 107 | 108 | # For Darwin, add options to specify how the application appears in the dock 109 | if $darwin; then 110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 111 | fi 112 | 113 | # For Cygwin, switch paths to Windows format before running java 114 | if $cygwin ; then 115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 158 | function splitJvmOpts() { 159 | JVM_OPTS=("$@") 160 | } 161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 163 | 164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 165 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windowz variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | include ':app' 2 | --------------------------------------------------------------------------------