├── .gitignore
├── .idea
├── compiler.xml
├── copyright
│ └── profiles_settings.xml
├── encodings.xml
├── gradle.xml
├── misc.xml
├── modules.xml
└── vcs.xml
├── LICENSE.txt
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── z0kai
│ │ └── filtercamera
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ └── 7_zidi.acv
│ ├── java
│ │ └── z0kai
│ │ │ └── filtercamera
│ │ │ ├── BitmapActivity.java
│ │ │ ├── CameraActivity.java
│ │ │ ├── CameraGLSurfaceView.java
│ │ │ ├── CameraInstance.java
│ │ │ └── MainActivity.java
│ └── res
│ │ ├── layout
│ │ ├── activity_bitmap.xml
│ │ ├── activity_camera.xml
│ │ ├── activity_main.xml
│ │ └── content_main.xml
│ │ ├── mipmap-hdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-mdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── z_0kai_pet.png
│ │ ├── values-v21
│ │ └── styles.xml
│ │ ├── values-w820dp
│ │ └── dimens.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── z0kai
│ └── filtercamera
│ └── ExampleUnitTest.java
├── build.gradle
├── filterlib
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── z0kai
│ │ └── filterlib
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── z0kai
│ │ │ └── filterlib
│ │ │ ├── FilterRender.java
│ │ │ ├── OpenGlUtils.java
│ │ │ ├── PixelBuffer.java
│ │ │ ├── RotationUtil.java
│ │ │ └── filters
│ │ │ ├── BaseFilter.java
│ │ │ ├── BeautyFilter.java
│ │ │ ├── ColorMatrixFilter.java
│ │ │ ├── IFImageFilter.java
│ │ │ ├── IFInkwellFilter.java
│ │ │ ├── SepiaFilter.java
│ │ │ ├── ToneCurveFilter.java
│ │ │ └── VignetteFilter.java
│ └── res
│ │ ├── drawable
│ │ └── inkwell_map.png
│ │ ├── raw
│ │ └── beautify_fragment.glsl
│ │ └── values
│ │ └── strings.xml
│ └── test
│ └── java
│ └── z0kai
│ └── filterlib
│ └── ExampleUnitTest.java
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea
5 | .DS_Store
6 | /build
7 | /captures
8 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
18 |
19 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FilterCamera
2 | A library to filter image or camera for Android.
3 |
4 | This is an idea from [android-gpuimage](https://github.com/CyberAgent/android-gpuimage).
5 | #
6 | You can add an new Filter copy from #android-gpuimage# and then change to this project's style.
7 | #
8 | `remove the define of inputImageTexture in shader`
9 |
10 | #License
11 |
12 | Copyright 2016 0kai
13 |
14 | Licensed under the Apache License, Version 2.0 (the "License");
15 | you may not use this file except in compliance with the License.
16 | You may obtain a copy of the License at
17 |
18 | http://www.apache.org/licenses/LICENSE-2.0
19 |
20 | Unless required by applicable law or agreed to in writing, software
21 | distributed under the License is distributed on an "AS IS" BASIS,
22 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | See the License for the specific language governing permissions and
24 | limitations under the License.
25 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'com.neenbedankt.android-apt'
3 |
4 | android {
5 | compileSdkVersion 24
6 | buildToolsVersion "24.0.0"
7 | defaultConfig {
8 | applicationId "z0kai.filtercamera"
9 | minSdkVersion 15
10 | targetSdkVersion 24
11 | versionCode 1
12 | versionName "1.0"
13 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
14 | }
15 | buildTypes {
16 | release {
17 | minifyEnabled false
18 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
19 | }
20 | }
21 | }
22 |
23 | dependencies {
24 | compile fileTree(include: ['*.jar'], dir: 'libs')
25 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
26 | exclude group: 'com.android.support', module: 'support-annotations'
27 | })
28 | compile project(':filterlib')
29 |
30 |
31 | compile 'com.android.support:appcompat-v7:24.0.0'
32 | compile 'com.android.support.constraint:constraint-layout:1.0.0-alpha3'
33 | compile 'com.android.support:design:24.0.0'
34 | compile 'com.jakewharton:butterknife:8.1.0'
35 | testCompile 'junit:junit:4.12'
36 | apt 'com.jakewharton:butterknife-compiler:8.1.0'
37 | }
38 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in E:\AndroidSDK/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/z0kai/filtercamera/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumentation test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("z0kai.filtercamera", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
13 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/app/src/main/assets/7_zidi.acv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/assets/7_zidi.acv
--------------------------------------------------------------------------------
/app/src/main/java/z0kai/filtercamera/BitmapActivity.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.BitmapFactory;
5 | import android.os.Bundle;
6 | import android.support.v7.app.AppCompatActivity;
7 | import android.widget.ImageView;
8 |
9 | import butterknife.BindView;
10 | import butterknife.ButterKnife;
11 | import z0kai.filterlib.FilterRender;
12 | import z0kai.filterlib.OpenGlUtils;
13 | import z0kai.filterlib.filters.BaseFilter;
14 | import z0kai.filterlib.filters.SepiaFilter;
15 |
16 | import static z0kai.filterlib.OpenGlUtils.NO_TEXTURE;
17 |
18 | public class BitmapActivity extends AppCompatActivity {
19 |
20 | @BindView(R.id.iv_bitmap)
21 | ImageView ivBitmap;
22 |
23 | @Override
24 | protected void onCreate(Bundle savedInstanceState) {
25 | super.onCreate(savedInstanceState);
26 | setContentView(R.layout.activity_bitmap);
27 |
28 | ButterKnife.bind(this);
29 | showFilterImage();
30 | }
31 |
32 | private void showFilterImage() {
33 | Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.mipmap.z_0kai_pet);
34 | BaseFilter filter = new SepiaFilter();
35 | filter.setAsStatic();
36 | FilterRender filterRender = new FilterRender(filter);
37 | bitmap = filterRender.getFilterBitmap(bitmap);
38 | ivBitmap.setImageBitmap(bitmap);
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/z0kai/filtercamera/CameraActivity.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.content.Context;
4 | import android.graphics.PointF;
5 | import android.os.Bundle;
6 | import android.support.v7.app.AppCompatActivity;
7 | import android.support.v7.widget.LinearLayoutManager;
8 | import android.support.v7.widget.RecyclerView;
9 | import android.view.View;
10 | import android.view.ViewGroup;
11 | import android.widget.Button;
12 | import android.widget.LinearLayout;
13 | import android.widget.RelativeLayout;
14 |
15 | import java.io.IOException;
16 | import java.io.InputStream;
17 | import java.util.ArrayList;
18 | import java.util.List;
19 |
20 | import butterknife.BindView;
21 | import butterknife.ButterKnife;
22 | import z0kai.filterlib.filters.BaseFilter;
23 | import z0kai.filterlib.filters.BeautyFilter;
24 | import z0kai.filterlib.filters.ColorMatrixFilter;
25 | import z0kai.filterlib.filters.IFInkwellFilter;
26 | import z0kai.filterlib.filters.SepiaFilter;
27 | import z0kai.filterlib.filters.ToneCurveFilter;
28 | import z0kai.filterlib.filters.VignetteFilter;
29 |
30 | public class CameraActivity extends AppCompatActivity {
31 |
32 | @BindView(R.id.rv_filter_buttons)
33 | RecyclerView rvFilterButtons;
34 | @BindView(R.id.camera_view)
35 | CameraGLSurfaceView cameraView;
36 |
37 | List filterList;
38 | List filterName;
39 |
40 | Context mContext;
41 |
42 | @Override
43 | protected void onCreate(Bundle savedInstanceState) {
44 | super.onCreate(savedInstanceState);
45 | setContentView(R.layout.activity_camera);
46 | ButterKnife.bind(this);
47 | mContext = this;
48 | initFilters();
49 | }
50 |
51 | private void initFilters() {
52 | filterList = new ArrayList<>();
53 | filterName = new ArrayList<>();
54 |
55 | addFilter("None", new BaseFilter());
56 | addFilter("Sepia", new SepiaFilter());
57 | // addFilter("美颜1", new BeautyFilter(getResources(), 1));
58 | // addFilter("美颜3", new BeautyFilter(getResources(), 3));
59 | addFilter("Beauty", new BeautyFilter(getResources(), 5));
60 |
61 | try {
62 | InputStream is = mContext.getAssets().open("7_zidi.acv");
63 | ToneCurveFilter toneCurveFilter = new ToneCurveFilter();
64 | toneCurveFilter.setFromCurveFileInputStream(is);
65 | is.close();
66 | addFilter(".Acv", toneCurveFilter);
67 | } catch (IOException e) {
68 | e.printStackTrace();
69 | }
70 |
71 | addFilter("Vignette", new VignetteFilter());
72 |
73 | addFilter("Black-White", new IFInkwellFilter(mContext));
74 |
75 | RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(mContext, LinearLayoutManager.HORIZONTAL, false);
76 | rvFilterButtons.setLayoutManager(layoutManager);
77 | rvFilterButtons.setAdapter(new ButtonAdapter());
78 | }
79 |
80 | private void addFilter(String name, final BaseFilter filter) {
81 | filterName.add(name);
82 | filterList.add(filter);
83 | }
84 |
85 | private class ButtonAdapter extends RecyclerView.Adapter {
86 |
87 | @Override
88 | public ButtonAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
89 | Button button = new Button(mContext);
90 | return new ViewHolder(button);
91 | }
92 |
93 | @Override
94 | public void onBindViewHolder(final ButtonAdapter.ViewHolder holder, int position) {
95 | holder.itemView.setText(filterName.get(position));
96 | holder.itemView.setOnClickListener(new View.OnClickListener() {
97 | @Override
98 | public void onClick(View view) {
99 | cameraView.setFilter(filterList.get(holder.getLayoutPosition()));
100 | }
101 | });
102 | }
103 |
104 | @Override
105 | public int getItemCount() {
106 | return filterName.size();
107 | }
108 |
109 | public class ViewHolder extends RecyclerView.ViewHolder {
110 |
111 | private Button itemView;
112 |
113 | public ViewHolder(Button itemView) {
114 | super(itemView);
115 | this.itemView = itemView;
116 | }
117 | }
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/app/src/main/java/z0kai/filtercamera/CameraGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.content.Context;
4 | import android.graphics.SurfaceTexture;
5 | import android.hardware.Camera;
6 | import android.opengl.GLES11Ext;
7 | import android.opengl.GLES20;
8 | import android.opengl.GLSurfaceView;
9 | import android.support.annotation.NonNull;
10 | import android.util.AttributeSet;
11 | import android.util.Log;
12 | import android.view.SurfaceHolder;
13 |
14 | import javax.microedition.khronos.egl.EGLConfig;
15 | import javax.microedition.khronos.opengles.GL10;
16 |
17 | import z0kai.filterlib.FilterRender;
18 | import z0kai.filterlib.RotationUtil;
19 | import z0kai.filterlib.filters.BaseFilter;
20 | import z0kai.filterlib.filters.SepiaFilter;
21 |
22 | /**
23 | * Created by Z0Kai on 2016/7/4.
24 | */
25 |
26 | public class CameraGLSurfaceView extends GLSurfaceView implements SurfaceTexture.OnFrameAvailableListener {
27 | private static final String TAG = CameraGLSurfaceView.class.getSimpleName();
28 | Context mContext;
29 | FilterRender mFilterRender;
30 |
31 | public CameraGLSurfaceView(Context context, AttributeSet attrs) {
32 | super(context, attrs);
33 | mContext = context;
34 | mFilterRender = new FilterRender(new BaseFilter());
35 | CameraInstance.getInstance().tryOpenCamera(null, Camera.CameraInfo.CAMERA_FACING_FRONT);
36 | mFilterRender.setUpCamera(CameraInstance.getInstance().getCameraDevice(), true);
37 | mFilterRender.setGLSurfaceView(this);
38 | }
39 |
40 | @Override
41 | public void surfaceDestroyed(SurfaceHolder holder) {
42 | super.surfaceDestroyed(holder);
43 | CameraInstance.getInstance().stopCamera();
44 | }
45 |
46 | @Override
47 | public void onPause() {
48 | super.onPause();
49 | CameraInstance.getInstance().stopCamera();
50 | }
51 |
52 | @Override
53 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
54 | this.requestRender();
55 | }
56 |
57 | public void setFilter(@NonNull BaseFilter filter) {
58 | mFilterRender.setFilter(filter);
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/app/src/main/java/z0kai/filtercamera/CameraInstance.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.graphics.PixelFormat;
4 | import android.graphics.Rect;
5 | import android.graphics.SurfaceTexture;
6 | import android.hardware.Camera;
7 | import android.os.Build;
8 | import android.util.Log;
9 |
10 | import java.io.IOException;
11 | import java.util.ArrayList;
12 | import java.util.Collections;
13 | import java.util.Comparator;
14 | import java.util.List;
15 |
16 | // Camera 仅适用单例
17 | public class CameraInstance {
18 | public static final String LOG_TAG = CameraInstance.class.getSimpleName();
19 |
20 | private static final String ASSERT_MSG = "检测到CameraDevice 为 null! 请检查";
21 |
22 | private Camera mCameraDevice;
23 | private Camera.Parameters mParams;
24 |
25 | public static final int DEFAULT_PREVIEW_RATE = 30;
26 |
27 |
28 | private boolean mIsPreviewing = false;
29 |
30 | private int mDefaultCameraID = -1;
31 |
32 | private static CameraInstance mThisInstance;
33 | private int mPreviewWidth;
34 | private int mPreviewHeight;
35 |
36 | private int mPictureWidth = 1000;
37 | private int mPictureHeight = 1000;
38 |
39 | private int mPreferPreviewWidth = 640;
40 | private int mPreferPreviewHeight = 640;
41 |
42 | private int mFacing = 0;
43 |
44 | private CameraInstance() {}
45 |
46 | public static synchronized CameraInstance getInstance() {
47 | if(mThisInstance == null) {
48 | mThisInstance = new CameraInstance();
49 | }
50 | return mThisInstance;
51 | }
52 |
53 | public boolean isPreviewing() { return mIsPreviewing; }
54 |
55 | public int previewWidth() { return mPreviewWidth; }
56 | public int previewHeight() { return mPreviewHeight; }
57 | public int pictureWidth() { return mPictureWidth; }
58 | public int pictureHeight() { return mPictureHeight; }
59 |
60 | public void setPreferPreviewSize(int w, int h) {
61 | mPreferPreviewHeight = w;
62 | mPreferPreviewWidth = h;
63 | }
64 |
65 | public interface CameraOpenCallback {
66 | void cameraReady();
67 | }
68 |
69 | public boolean tryOpenCamera(CameraOpenCallback callback) {
70 | return tryOpenCamera(callback, Camera.CameraInfo.CAMERA_FACING_FRONT);
71 | }
72 |
73 | public int getFacing() {
74 | return mFacing;
75 | }
76 |
77 | public synchronized boolean tryOpenCamera(CameraOpenCallback callback, int facing) {
78 | Log.i(LOG_TAG, "try open camera...");
79 |
80 | try
81 | {
82 | if(Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO)
83 | {
84 | int numberOfCameras = Camera.getNumberOfCameras();
85 |
86 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
87 | for (int i = 0; i < numberOfCameras; i++) {
88 | Camera.getCameraInfo(i, cameraInfo);
89 | if (cameraInfo.facing == facing) {
90 | mDefaultCameraID = i;
91 | mFacing = facing;
92 | }
93 | }
94 | }
95 | stopPreview();
96 | if(mCameraDevice != null)
97 | mCameraDevice.release();
98 |
99 | if(mDefaultCameraID >= 0) {
100 | mCameraDevice = Camera.open(mDefaultCameraID);
101 | }
102 | else {
103 | mCameraDevice = Camera.open();
104 | mFacing = Camera.CameraInfo.CAMERA_FACING_BACK; //default: back facing
105 | }
106 | }
107 | catch(Exception e)
108 | {
109 | Log.e(LOG_TAG, "Open Camera Failed!");
110 | e.printStackTrace();
111 | mCameraDevice = null;
112 | return false;
113 | }
114 |
115 | if(mCameraDevice != null) {
116 | Log.i(LOG_TAG, "Camera opened!");
117 |
118 | try {
119 | initCamera(DEFAULT_PREVIEW_RATE);
120 | } catch (Exception e) {
121 | mCameraDevice.release();
122 | mCameraDevice = null;
123 | return false;
124 | }
125 |
126 | if (callback != null) {
127 | callback.cameraReady();
128 | }
129 |
130 | return true;
131 | }
132 |
133 | return false;
134 | }
135 |
136 | public synchronized void stopCamera() {
137 | if(mCameraDevice != null) {
138 | mIsPreviewing = false;
139 | mCameraDevice.stopPreview();
140 | mCameraDevice.setPreviewCallback(null);
141 | mCameraDevice.release();
142 | mCameraDevice = null;
143 | }
144 | }
145 |
146 | public boolean isCameraOpened() {
147 | return mCameraDevice != null;
148 | }
149 |
150 | public synchronized void startPreview(SurfaceTexture texture) {
151 | Log.i(LOG_TAG, "Camera startPreview...");
152 | if(mIsPreviewing) {
153 | Log.e(LOG_TAG, "Err: camera is previewing...");
154 | // stopPreview();
155 | return ;
156 | }
157 |
158 | if(mCameraDevice != null) {
159 | try {
160 | mCameraDevice.setPreviewTexture(texture);
161 | } catch (IOException e) {
162 | e.printStackTrace();
163 | }
164 |
165 | mCameraDevice.startPreview();
166 | mIsPreviewing = true;
167 | }
168 | }
169 |
170 | public synchronized void stopPreview() {
171 | if(mIsPreviewing && mCameraDevice != null) {
172 | Log.i(LOG_TAG, "Camera stopPreview...");
173 | mIsPreviewing = false;
174 | mCameraDevice.stopPreview();
175 | }
176 | }
177 |
178 | public synchronized Camera.Parameters getParams() {
179 | if(mCameraDevice != null)
180 | return mCameraDevice.getParameters();
181 | assert mCameraDevice != null : ASSERT_MSG;
182 | return null;
183 | }
184 |
185 | public synchronized void setParams(Camera.Parameters param) {
186 | if(mCameraDevice != null) {
187 | mParams = param;
188 | mCameraDevice.setParameters(mParams);
189 | }
190 | assert mCameraDevice != null : ASSERT_MSG;
191 | }
192 |
193 | public Camera getCameraDevice() {
194 | return mCameraDevice;
195 | }
196 |
197 | //保证从大到小排列
198 | private Comparator comparatorBigger = new Comparator() {
199 | @Override
200 | public int compare(Camera.Size lhs, Camera.Size rhs) {
201 | int w = rhs.width - lhs.width;
202 | if(w == 0)
203 | return rhs.height - lhs.height;
204 | return w;
205 | }
206 | };
207 |
208 | //保证从小到大排列
209 | private Comparator comparatorSmaller= new Comparator() {
210 | @Override
211 | public int compare(Camera.Size lhs, Camera.Size rhs) {
212 | int w = lhs.width - rhs.width;
213 | if(w == 0)
214 | return lhs.height - rhs.height;
215 | return w;
216 | }
217 | };
218 |
219 | public void initCamera(int previewRate) {
220 | if(mCameraDevice == null) {
221 | Log.e(LOG_TAG, "initCamera: Camera is not opened!");
222 | return;
223 | }
224 |
225 | mParams = mCameraDevice.getParameters();
226 | mParams.setRotation(mFacing == Camera.CameraInfo.CAMERA_FACING_BACK ? 90 : 270);
227 | mCameraDevice.setDisplayOrientation(90);
228 | List supportedPictureFormats = mParams.getSupportedPictureFormats();
229 |
230 | for(int fmt : supportedPictureFormats) {
231 | Log.i(LOG_TAG, String.format("Picture Format: %x", fmt));
232 | }
233 |
234 | mParams.setPictureFormat(PixelFormat.JPEG);
235 |
236 | List picSizes = mParams.getSupportedPictureSizes();
237 | Camera.Size picSz = null;
238 |
239 | Collections.sort(picSizes, comparatorBigger);
240 |
241 | for(Camera.Size sz : picSizes) {
242 | Log.i(LOG_TAG, String.format("Supported picture size: %d x %d", sz.width, sz.height));
243 | if(picSz == null || (sz.width >= mPictureWidth && sz.height >= mPictureHeight)) {
244 | picSz = sz;
245 | }
246 | }
247 |
248 | List prevSizes = mParams.getSupportedPreviewSizes();
249 | Camera.Size prevSz = null;
250 |
251 | Collections.sort(prevSizes, comparatorBigger);
252 |
253 | for(Camera.Size sz : prevSizes) {
254 | if(prevSz == null || (sz.width >= mPreferPreviewWidth && sz.height >= mPreferPreviewHeight)) {
255 | prevSz = sz;
256 | Log.i(LOG_TAG, String.format("Supported preview size: %d x %d", sz.width, sz.height));
257 | }
258 | }
259 |
260 | List frameRates = mParams.getSupportedPreviewFrameRates();
261 |
262 | int fpsMax = 0;
263 |
264 | for(Integer n : frameRates) {
265 | Log.i(LOG_TAG, "Supported frame rate: " + n);
266 | if(fpsMax < n) {
267 | fpsMax = n;
268 | }
269 | }
270 |
271 | mParams.setPreviewSize(prevSz.width, prevSz.height);
272 | mParams.setPictureSize(picSz.width, picSz.height);
273 |
274 | List focusModes = mParams.getSupportedFocusModes();
275 | if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)){
276 | mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
277 | }
278 |
279 | previewRate = fpsMax;
280 | mParams.setPreviewFrameRate(previewRate); //设置相机预览帧率
281 | // mParams.setPreviewFpsRange(20, 60);
282 |
283 | try {
284 | mCameraDevice.setParameters(mParams);
285 | }catch (Exception e) {
286 | e.printStackTrace();
287 | }
288 |
289 |
290 | mParams = mCameraDevice.getParameters();
291 |
292 | Camera.Size szPic = mParams.getPictureSize();
293 | Camera.Size szPrev = mParams.getPreviewSize();
294 |
295 | mPreviewWidth = szPrev.width;
296 | mPreviewHeight = szPrev.height;
297 |
298 | mPictureWidth = szPic.width;
299 | mPictureHeight = szPic.height;
300 |
301 | Log.i(LOG_TAG, String.format("Camera Picture Size: %d x %d", szPic.width, szPic.height));
302 | Log.i(LOG_TAG, String.format("Camera Preview Size: %d x %d", szPrev.width, szPrev.height));
303 | }
304 |
305 | public synchronized void setFocusMode(String focusMode) {
306 |
307 | if(mCameraDevice == null)
308 | return;
309 |
310 | mParams = mCameraDevice.getParameters();
311 | List focusModes = mParams.getSupportedFocusModes();
312 | if(focusModes.contains(focusMode)){
313 | mParams.setFocusMode(focusMode);
314 | }
315 | }
316 |
317 | public synchronized void setPictureSize(int width, int height, boolean isBigger) {
318 |
319 | if(mCameraDevice == null) {
320 | mPictureWidth = width;
321 | mPictureHeight = height;
322 | return;
323 | }
324 |
325 | mParams = mCameraDevice.getParameters();
326 |
327 |
328 | List picSizes = mParams.getSupportedPictureSizes();
329 | Camera.Size picSz = null;
330 |
331 | if(isBigger) {
332 | Collections.sort(picSizes, comparatorBigger);
333 | for(Camera.Size sz : picSizes) {
334 | if(picSz == null || (sz.width >= width && sz.height >= height)) {
335 | picSz = sz;
336 | }
337 | }
338 | } else {
339 | Collections.sort(picSizes, comparatorSmaller);
340 | for(Camera.Size sz : picSizes) {
341 | if(picSz == null || (sz.width <= width && sz.height <= height)) {
342 | picSz = sz;
343 | }
344 | }
345 | }
346 |
347 | mPictureWidth = picSz.width;
348 | mPictureHeight= picSz.height;
349 |
350 | try {
351 | mParams.setPictureSize(mPictureWidth, mPictureHeight);
352 | mCameraDevice.setParameters(mParams);
353 | } catch (Exception e) {
354 | e.printStackTrace();
355 | }
356 | }
357 |
358 | public void focusAtPoint(float x, float y, final Camera.AutoFocusCallback callback) {
359 | focusAtPoint(x, y, 0.2f, callback);
360 | }
361 |
362 | public synchronized void focusAtPoint(float x, float y, float radius, final Camera.AutoFocusCallback callback) {
363 | if(mCameraDevice == null) {
364 | Log.e(LOG_TAG, "Error: focus after release.");
365 | return;
366 | }
367 |
368 | mParams = mCameraDevice.getParameters();
369 |
370 | if(mParams.getMaxNumMeteringAreas() > 0) {
371 |
372 | int focusRadius = (int) (radius * 1000.0f);
373 | int left = (int) (x * 2000.0f - 1000.0f) - focusRadius;
374 | int top = (int) (y * 2000.0f - 1000.0f) - focusRadius;
375 |
376 | Rect focusArea = new Rect();
377 | focusArea.left = Math.max(left, -1000);
378 | focusArea.top = Math.max(top, -1000);
379 | focusArea.right = Math.min(left + focusRadius, 1000);
380 | focusArea.bottom = Math.min(top + focusRadius, 1000);
381 | List meteringAreas = new ArrayList();
382 | meteringAreas.add(new Camera.Area(focusArea, 800));
383 |
384 | try {
385 | mCameraDevice.cancelAutoFocus();
386 | mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
387 | mParams.setFocusAreas(meteringAreas);
388 | mCameraDevice.setParameters(mParams);
389 | mCameraDevice.autoFocus(callback);
390 | } catch (Exception e) {
391 | Log.e(LOG_TAG, "Error: focusAtPoint failed: " + e.toString());
392 | }
393 | } else {
394 | Log.i(LOG_TAG, "The device does not support metering areas...");
395 | try {
396 | mCameraDevice.autoFocus(callback);
397 | } catch (Exception e) {
398 | Log.e(LOG_TAG, "Error: focusAtPoint failed: " + e.toString());
399 | }
400 | }
401 |
402 | }
403 | }
404 |
--------------------------------------------------------------------------------
/app/src/main/java/z0kai/filtercamera/MainActivity.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import android.Manifest;
4 | import android.content.Intent;
5 | import android.content.pm.PackageManager;
6 | import android.os.Bundle;
7 | import android.support.design.widget.FloatingActionButton;
8 | import android.support.design.widget.Snackbar;
9 | import android.support.v4.app.ActivityCompat;
10 | import android.support.v7.app.AppCompatActivity;
11 | import android.support.v7.widget.Toolbar;
12 | import android.view.View;
13 |
14 | import butterknife.ButterKnife;
15 | import butterknife.OnClick;
16 |
17 | public class MainActivity extends AppCompatActivity {
18 |
19 | @Override
20 | protected void onCreate(Bundle savedInstanceState) {
21 | super.onCreate(savedInstanceState);
22 | setContentView(R.layout.activity_main);
23 | Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
24 | setSupportActionBar(toolbar);
25 |
26 | FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
27 | fab.setOnClickListener(new View.OnClickListener() {
28 | @Override
29 | public void onClick(View view) {
30 | Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
31 | .setAction("Action", null).show();
32 | }
33 | });
34 |
35 | ButterKnife.bind(this);
36 |
37 | if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
38 | ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, 0);
39 | }
40 | }
41 |
42 | @OnClick(R.id.btn_camera)
43 | public void showCamera() {
44 | startActivity(new Intent(this, CameraActivity.class));
45 | }
46 |
47 | @OnClick(R.id.btn_bitmap)
48 | public void showBitmap() {
49 | startActivity(new Intent(this, BitmapActivity.class));
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_bitmap.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
14 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
14 |
15 |
21 |
22 |
23 |
24 |
25 |
26 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/content_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
16 |
17 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/z_0kai_pet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/app/src/main/res/mipmap-xxxhdpi/z_0kai_pet.png
--------------------------------------------------------------------------------
/app/src/main/res/values-v21/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 | 16dp
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FilterCamera
3 | Settings
4 | MainActivity
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/app/src/test/java/z0kai/filtercamera/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package z0kai.filtercamera;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | repositories {
4 | jcenter()
5 | }
6 | dependencies {
7 | classpath 'com.android.tools.build:gradle:2.2.0-alpha4'
8 | classpath 'com.neenbedankt.gradle.plugins:android-apt:1.8'
9 | // NOTE: Do not place your application dependencies here; they belong
10 | // in the individual module build.gradle files
11 | }
12 | }
13 | allprojects {
14 | repositories {
15 | jcenter()
16 | }
17 | }
18 | task clean(type: Delete) {
19 | delete rootProject.buildDir
20 | }
21 |
22 | dependencies {
23 | }
--------------------------------------------------------------------------------
/filterlib/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /externalNativeBuild
3 |
--------------------------------------------------------------------------------
/filterlib/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 24
5 | buildToolsVersion "24.0.0"
6 |
7 | defaultConfig {
8 | minSdkVersion 15
9 | targetSdkVersion 24
10 | versionCode 1
11 | versionName "0.1.0"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | }
20 |
21 | dependencies {
22 | compile fileTree(dir: 'libs', include: ['*.jar'])
23 | testCompile 'junit:junit:4.12'
24 | }
25 |
--------------------------------------------------------------------------------
/filterlib/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in E:\AndroidSDK/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/filterlib/src/androidTest/java/z0kai/filterlib/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumentation test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("z0kai.filterlib.test", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/filterlib/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/FilterRender.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.SurfaceTexture;
5 | import android.hardware.Camera;
6 | import android.opengl.GLES11Ext;
7 | import android.opengl.GLES20;
8 | import android.opengl.GLSurfaceView;
9 |
10 | import java.io.IOException;
11 | import java.nio.ByteBuffer;
12 | import java.nio.ByteOrder;
13 | import java.nio.FloatBuffer;
14 | import java.nio.ShortBuffer;
15 | import java.util.LinkedList;
16 | import java.util.Queue;
17 |
18 | import javax.microedition.khronos.egl.EGLConfig;
19 | import javax.microedition.khronos.opengles.GL10;
20 |
21 | import z0kai.filterlib.filters.BaseFilter;
22 |
23 | /**
24 | * Created by Z0Kai on 2016/7/4.
25 | */
26 |
27 | public class FilterRender implements GLSurfaceView.Renderer{
28 |
29 | private final LinkedList mRunOnDraw;
30 |
31 | private FloatBuffer vertexBuffer, textureVerticesBuffer;
32 | private ShortBuffer drawListBuffer;
33 | private int mPositionHandle;
34 | private int mTextureCoordHandle;
35 |
36 | private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
37 |
38 | // number of coordinates per vertex in this array
39 | private static final int COORDS_PER_VERTEX = 2;
40 |
41 | private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
42 |
43 | static float squareCoords[] = {
44 | -1.0f, 1.0f,
45 | -1.0f, -1.0f,
46 | 1.0f, -1.0f,
47 | 1.0f, 1.0f,
48 | };
49 |
50 | private int texture;
51 |
52 | private BaseFilter mFilter;
53 | private int mProgram;
54 | private int mOutputWidth, mOutputHeight;
55 |
56 | private GLSurfaceView mGLSurfaceView;
57 | private SurfaceTexture mSurface;
58 | int mTextureID = OpenGlUtils.NO_TEXTURE;
59 |
60 | private Camera mCamera;
61 |
62 | /**
63 | * @param filter NoNull
64 | */
65 | public FilterRender(BaseFilter filter) {
66 | mRunOnDraw = new LinkedList<>();
67 | mFilter = filter;
68 | }
69 |
70 | public void init(int texture)
71 | {
72 | this.texture = texture;
73 | // initialize vertex byte buffer for shape coordinates
74 | ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
75 | bb.order(ByteOrder.nativeOrder());
76 | vertexBuffer = bb.asFloatBuffer();
77 | vertexBuffer.put(squareCoords);
78 | vertexBuffer.position(0);
79 |
80 | // initialize byte buffer for the draw list
81 | ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
82 | dlb.order(ByteOrder.nativeOrder());
83 | drawListBuffer = dlb.asShortBuffer();
84 | drawListBuffer.put(drawOrder);
85 | drawListBuffer.position(0);
86 |
87 | ByteBuffer bb2 = ByteBuffer.allocateDirect(8 * 4);
88 | bb2.order(ByteOrder.nativeOrder());
89 | textureVerticesBuffer = bb2.asFloatBuffer();
90 | // textureVerticesBuffer.put(textureVertices);
91 | // textureVerticesBuffer.position(0);
92 |
93 | if (mCamera == null) {
94 | setRotation(RotationUtil.TEXTURE_NO_ROTATION);
95 | }
96 |
97 | mFilter.init();
98 | mProgram = mFilter.getProgram();
99 | }
100 |
101 | public void setRotation(final float textureVertices[]) {
102 | runOnDraw(new Runnable() {
103 | @Override
104 | public void run() {
105 | textureVerticesBuffer.clear();
106 | textureVerticesBuffer.put(textureVertices);
107 | textureVerticesBuffer.position(0);
108 | }
109 | });
110 | }
111 |
112 | public void setUpCamera(Camera camera, boolean isFacingFront) {
113 | mCamera = camera;
114 |
115 | runOnDraw(new Runnable() {
116 | @Override
117 | public void run() {
118 | mTextureID = OpenGlUtils.createTextureID();
119 | mSurface = new SurfaceTexture(mTextureID);
120 | init(mTextureID);
121 | mSurface.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
122 | @Override
123 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
124 | if (mGLSurfaceView != null) {
125 | mGLSurfaceView.requestRender();
126 | }
127 | }
128 | });
129 | try {
130 | mCamera.setPreviewTexture(mSurface);
131 | } catch (IOException e) {
132 | e.printStackTrace();
133 | }
134 | mCamera.startPreview();
135 | }
136 | });
137 |
138 | if (!isFacingFront) {
139 | setRotation(RotationUtil.TEXTURE_ROTATED_90);
140 | } else {
141 | setRotation(RotationUtil.TEXTURE_ROTATED_270_H_FLIP);
142 | }
143 | if (mGLSurfaceView != null) {
144 | mGLSurfaceView.requestRender();
145 | }
146 | }
147 |
148 | public void setGLSurfaceView(GLSurfaceView surfaceView) {
149 | mGLSurfaceView = surfaceView;
150 | mGLSurfaceView.setEGLContextClientVersion(2);
151 | mGLSurfaceView.setRenderer(this);
152 | mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
153 | mGLSurfaceView.requestRender();
154 | }
155 |
156 | public void setOutputSize(int width, int height) {
157 | mOutputWidth = width;
158 | mOutputHeight = height;
159 | if (mFilter != null) {
160 | mFilter.onOutputSizeChanged(width, height);
161 | }
162 | }
163 |
164 | public void setFilter(final BaseFilter filter) {
165 | if (filter == mFilter) {
166 | return;
167 | }
168 | runOnDraw(new Runnable() {
169 | @Override
170 | public void run() {
171 | BaseFilter oldFilter = mFilter;
172 | mFilter = filter;
173 | if (oldFilter != null) {
174 | oldFilter.destroy();
175 | }
176 | mFilter.init();
177 | mProgram = mFilter.getProgram();
178 | GLES20.glUseProgram(mProgram);
179 | mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
180 | }
181 | });
182 | }
183 |
184 | public BaseFilter getFilter() {
185 | return mFilter;
186 | }
187 |
188 | public void deleteImage() {
189 | runOnDraw(new Runnable() {
190 |
191 | @Override
192 | public void run() {
193 | GLES20.glDeleteTextures(1, new int[]{
194 | mTextureID
195 | }, 0);
196 | mTextureID = OpenGlUtils.NO_TEXTURE;
197 | }
198 | });
199 | mGLSurfaceView.requestRender();
200 | }
201 |
202 | public void draw()
203 | {
204 | GLES20.glUseProgram(mProgram);
205 | mFilter.runPendingOnDrawTasks();
206 |
207 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
208 | if (mFilter.isForCamera()) {
209 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
210 | } else {
211 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
212 | }
213 | mFilter.onDrawArraysPre();
214 |
215 | // get handle to vertex shader's vPosition member
216 | mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
217 |
218 | // Enable a handle to the triangle vertices
219 | GLES20.glEnableVertexAttribArray(mPositionHandle);
220 |
221 | // Prepare the coordinate data
222 | GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
223 |
224 | mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
225 | GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
226 |
227 | // textureVerticesBuffer.clear();
228 | // textureVerticesBuffer.put( transformTextureCoordinates( textureVertices, mtx ));
229 | // textureVerticesBuffer.position(0);
230 | GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer);
231 |
232 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
233 |
234 | // Disable vertex array
235 | GLES20.glDisableVertexAttribArray(mPositionHandle);
236 | GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
237 | }
238 |
239 | public Bitmap getFilterBitmap(final Bitmap bitmap) {
240 | PixelBuffer buffer = new PixelBuffer(bitmap.getWidth(), bitmap.getHeight());
241 | buffer.setRenderer(this);
242 | runOnDraw(new Runnable() {
243 | @Override
244 | public void run() {
245 | mTextureID = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);
246 | mSurface = new SurfaceTexture(mTextureID);
247 | init(mTextureID);
248 | }
249 | });
250 | Bitmap result = buffer.getBitmap();
251 | // mFilter.destroy();
252 | buffer.destroy();
253 | return result;
254 | }
255 |
256 | @Override
257 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
258 | }
259 |
260 | @Override
261 | public void onSurfaceChanged(GL10 gl, int width, int height) {
262 | GLES20.glViewport(0, 0, width, height);
263 | setOutputSize(width, height);
264 | }
265 |
266 | @Override
267 | public void onDrawFrame(GL10 gl) {
268 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
269 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
270 | runAll(mRunOnDraw);
271 | if (mSurface == null) return;
272 | mSurface.updateTexImage();
273 | // float[] mtx = new float[16];
274 | // mSurface.getTransformMatrix(mtx);
275 | draw();
276 | }
277 |
278 | private void runAll(Queue queue) {
279 | synchronized (queue) {
280 | while (!queue.isEmpty()) {
281 | queue.poll().run();
282 | }
283 | }
284 | }
285 |
286 | protected void runOnDraw(final Runnable runnable) {
287 | synchronized (mRunOnDraw) {
288 | mRunOnDraw.add(runnable);
289 | }
290 | }
291 | }
292 |
293 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/OpenGlUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2012 CyberAgent
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package z0kai.filterlib;
18 |
19 | import android.content.res.Resources;
20 | import android.graphics.Bitmap;
21 | import android.opengl.GLES11Ext;
22 | import android.opengl.GLES20;
23 | import android.opengl.GLUtils;
24 |
25 | import java.io.BufferedReader;
26 | import java.io.IOException;
27 | import java.io.InputStream;
28 | import java.io.InputStreamReader;
29 |
30 | import javax.microedition.khronos.opengles.GL10;
31 |
32 | public class OpenGlUtils {
33 | public static final int NO_TEXTURE = -1;
34 |
35 | public static int loadShader(int type, String shaderCode){
36 | // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
37 | // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
38 | int shader = GLES20.glCreateShader(type);
39 |
40 | // add the source code to the shader and compile it
41 | GLES20.glShaderSource(shader, shaderCode);
42 | GLES20.glCompileShader(shader);
43 |
44 | return shader;
45 | }
46 |
47 | public static String readShaderFromRawResource(Resources resources, final int resourceId){
48 | final InputStream inputStream = resources.openRawResource(resourceId);
49 | final InputStreamReader inputStreamReader = new InputStreamReader(
50 | inputStream);
51 | final BufferedReader bufferedReader = new BufferedReader(
52 | inputStreamReader);
53 |
54 | String nextLine;
55 | final StringBuilder body = new StringBuilder();
56 |
57 | try{
58 | while ((nextLine = bufferedReader.readLine()) != null){
59 | body.append(nextLine);
60 | body.append('\n');
61 | }
62 | }
63 | catch (IOException e){
64 | return null;
65 | }
66 | return body.toString();
67 | }
68 |
69 | public static int loadTexture(Bitmap img, int usedTexId) {
70 | return loadTexture(img, usedTexId, true);
71 | }
72 |
73 | public static int loadTexture(Bitmap img, int usedTexId, boolean recycle) {
74 | int[] textures = new int[1];
75 | if(usedTexId == -1) {
76 | GLES20.glGenTextures(1, textures, 0);
77 | GLES20.glBindTexture(3553, textures[0]);
78 | GLES20.glTexParameterf(3553, 10240, 9729.0F);
79 | GLES20.glTexParameterf(3553, 10241, 9729.0F);
80 | GLES20.glTexParameterf(3553, 10242, 33071.0F);
81 | GLES20.glTexParameterf(3553, 10243, 33071.0F);
82 | GLUtils.texImage2D(3553, 0, img, 0);
83 | } else {
84 | GLES20.glBindTexture(3553, usedTexId);
85 | GLUtils.texSubImage2D(3553, 0, 0, 0, img);
86 | textures[0] = usedTexId;
87 | }
88 |
89 | if(recycle) {
90 | img.recycle();
91 | }
92 |
93 | return textures[0];
94 | }
95 |
96 | public static int createTextureID()
97 | {
98 | int[] texture = new int[1];
99 |
100 | GLES20.glGenTextures(1, texture, 0);
101 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
102 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
103 | GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
104 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
105 | GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
106 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
107 | GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
108 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
109 | GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
110 |
111 | return texture[0];
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/PixelBuffer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2012 CyberAgent
3 | * Copyright (C) 2010 jsemler
4 | *
5 | * Original publication without License
6 | * http://www.anddev.org/android-2d-3d-graphics-opengl-tutorials-f2/possible-to-do-opengl-off-screen-rendering-in-android-t13232.html#p41662
7 | */
8 |
9 | package z0kai.filterlib;
10 |
11 | import static javax.microedition.khronos.egl.EGL10.EGL_ALPHA_SIZE;
12 | import static javax.microedition.khronos.egl.EGL10.EGL_BLUE_SIZE;
13 | import static javax.microedition.khronos.egl.EGL10.EGL_DEFAULT_DISPLAY;
14 | import static javax.microedition.khronos.egl.EGL10.EGL_DEPTH_SIZE;
15 | import static javax.microedition.khronos.egl.EGL10.EGL_GREEN_SIZE;
16 | import static javax.microedition.khronos.egl.EGL10.EGL_HEIGHT;
17 | import static javax.microedition.khronos.egl.EGL10.EGL_NONE;
18 | import static javax.microedition.khronos.egl.EGL10.EGL_NO_CONTEXT;
19 | import static javax.microedition.khronos.egl.EGL10.EGL_RED_SIZE;
20 | import static javax.microedition.khronos.egl.EGL10.EGL_STENCIL_SIZE;
21 | import static javax.microedition.khronos.egl.EGL10.EGL_WIDTH;
22 | import static javax.microedition.khronos.opengles.GL10.GL_RGBA;
23 | import static javax.microedition.khronos.opengles.GL10.GL_UNSIGNED_BYTE;
24 |
25 | import java.nio.IntBuffer;
26 |
27 | import javax.microedition.khronos.egl.EGL10;
28 | import javax.microedition.khronos.egl.EGLConfig;
29 | import javax.microedition.khronos.egl.EGLContext;
30 | import javax.microedition.khronos.egl.EGLDisplay;
31 | import javax.microedition.khronos.egl.EGLSurface;
32 | import javax.microedition.khronos.opengles.GL10;
33 |
34 | import android.graphics.Bitmap;
35 | import android.opengl.GLSurfaceView;
36 | import android.util.Log;
37 |
38 | public class PixelBuffer {
39 | final static String TAG = "PixelBuffer";
40 | final static boolean LIST_CONFIGS = false;
41 |
42 | GLSurfaceView.Renderer mRenderer; // borrow this interface
43 | int mWidth, mHeight;
44 | Bitmap mBitmap;
45 |
46 | EGL10 mEGL;
47 | EGLDisplay mEGLDisplay;
48 | EGLConfig[] mEGLConfigs;
49 | EGLConfig mEGLConfig;
50 | EGLContext mEGLContext;
51 | EGLSurface mEGLSurface;
52 | GL10 mGL;
53 |
54 | String mThreadOwner;
55 |
56 | public PixelBuffer(final int width, final int height) {
57 | mWidth = width;
58 | mHeight = height;
59 |
60 | int[] version = new int[2];
61 | int[] attribList = new int[] {
62 | EGL_WIDTH, mWidth,
63 | EGL_HEIGHT, mHeight,
64 | EGL_NONE
65 | };
66 |
67 | // No error checking performed, minimum required code to elucidate logic
68 | mEGL = (EGL10) EGLContext.getEGL();
69 | mEGLDisplay = mEGL.eglGetDisplay(EGL_DEFAULT_DISPLAY);
70 | mEGL.eglInitialize(mEGLDisplay, version);
71 | mEGLConfig = chooseConfig(); // Choosing a config is a little more
72 | // complicated
73 |
74 | // mEGLContext = mEGL.eglCreateContext(mEGLDisplay, mEGLConfig,
75 | // EGL_NO_CONTEXT, null);
76 | int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
77 | int[] attrib_list = {
78 | EGL_CONTEXT_CLIENT_VERSION, 2,
79 | EGL10.EGL_NONE
80 | };
81 | mEGLContext = mEGL.eglCreateContext(mEGLDisplay, mEGLConfig, EGL_NO_CONTEXT, attrib_list);
82 |
83 | mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, attribList);
84 | mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
85 |
86 | mGL = (GL10) mEGLContext.getGL();
87 |
88 | // Record thread owner of OpenGL context
89 | mThreadOwner = Thread.currentThread().getName();
90 | }
91 |
92 | public void setRenderer(final GLSurfaceView.Renderer renderer) {
93 | mRenderer = renderer;
94 |
95 | // Does this thread own the OpenGL context?
96 | if (!Thread.currentThread().getName().equals(mThreadOwner)) {
97 | Log.e(TAG, "setRenderer: This thread does not own the OpenGL context.");
98 | return;
99 | }
100 |
101 | // Call the renderer initialization routines
102 | mRenderer.onSurfaceCreated(mGL, mEGLConfig);
103 | mRenderer.onSurfaceChanged(mGL, mWidth, mHeight);
104 | }
105 |
106 | public Bitmap getBitmap() {
107 | // Do we have a renderer?
108 | if (mRenderer == null) {
109 | Log.e(TAG, "getBitmap: Renderer was not set.");
110 | return null;
111 | }
112 |
113 | // Does this thread own the OpenGL context?
114 | if (!Thread.currentThread().getName().equals(mThreadOwner)) {
115 | Log.e(TAG, "getBitmap: This thread does not own the OpenGL context.");
116 | return null;
117 | }
118 |
119 | // Call the renderer draw routine (it seems that some filters do not
120 | // work if this is only called once)
121 | mRenderer.onDrawFrame(mGL);
122 | mRenderer.onDrawFrame(mGL);
123 | convertToBitmap();
124 | return mBitmap;
125 | }
126 |
127 | public void destroy() {
128 | mRenderer.onDrawFrame(mGL);
129 | mRenderer.onDrawFrame(mGL);
130 | mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE,
131 | EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
132 |
133 | mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
134 | mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
135 | mEGL.eglTerminate(mEGLDisplay);
136 | }
137 |
138 | private EGLConfig chooseConfig() {
139 | int[] attribList = new int[] {
140 | EGL_DEPTH_SIZE, 0,
141 | EGL_STENCIL_SIZE, 0,
142 | EGL_RED_SIZE, 8,
143 | EGL_GREEN_SIZE, 8,
144 | EGL_BLUE_SIZE, 8,
145 | EGL_ALPHA_SIZE, 8,
146 | EGL10.EGL_RENDERABLE_TYPE, 4,
147 | EGL_NONE
148 | };
149 |
150 | // No error checking performed, minimum required code to elucidate logic
151 | // Expand on this logic to be more selective in choosing a configuration
152 | int[] numConfig = new int[1];
153 | mEGL.eglChooseConfig(mEGLDisplay, attribList, null, 0, numConfig);
154 | int configSize = numConfig[0];
155 | mEGLConfigs = new EGLConfig[configSize];
156 | mEGL.eglChooseConfig(mEGLDisplay, attribList, mEGLConfigs, configSize, numConfig);
157 |
158 | if (LIST_CONFIGS) {
159 | listConfig();
160 | }
161 |
162 | return mEGLConfigs[0]; // Best match is probably the first configuration
163 | }
164 |
165 | private void listConfig() {
166 | Log.i(TAG, "Config List {");
167 |
168 | for (EGLConfig config : mEGLConfigs) {
169 | int d, s, r, g, b, a;
170 |
171 | // Expand on this logic to dump other attributes
172 | d = getConfigAttrib(config, EGL_DEPTH_SIZE);
173 | s = getConfigAttrib(config, EGL_STENCIL_SIZE);
174 | r = getConfigAttrib(config, EGL_RED_SIZE);
175 | g = getConfigAttrib(config, EGL_GREEN_SIZE);
176 | b = getConfigAttrib(config, EGL_BLUE_SIZE);
177 | a = getConfigAttrib(config, EGL_ALPHA_SIZE);
178 | Log.i(TAG, " = <" + d + "," + s + "," +
179 | r + "," + g + "," + b + "," + a + ">");
180 | }
181 |
182 | Log.i(TAG, "}");
183 | }
184 |
185 | private int getConfigAttrib(final EGLConfig config, final int attribute) {
186 | int[] value = new int[1];
187 | return mEGL.eglGetConfigAttrib(mEGLDisplay, config,
188 | attribute, value) ? value[0] : 0;
189 | }
190 |
191 | private void convertToBitmap() {
192 | int[] iat = new int[mWidth * mHeight];
193 | IntBuffer ib = IntBuffer.allocate(mWidth * mHeight);
194 | mGL.glReadPixels(0, 0, mWidth, mHeight, GL_RGBA, GL_UNSIGNED_BYTE, ib);
195 | int[] ia = ib.array();
196 |
197 | //Stupid !
198 | // Convert upside down mirror-reversed image to right-side up normal
199 | // image.
200 | for (int i = 0; i < mHeight; i++) {
201 | for (int j = 0; j < mWidth; j++) {
202 | iat[(mHeight - i - 1) * mWidth + j] = ia[i * mWidth + j];
203 | }
204 | }
205 |
206 |
207 | mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
208 | mBitmap.copyPixelsFromBuffer(IntBuffer.wrap(iat));
209 | }
210 | }
211 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/RotationUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2012 CyberAgent
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package z0kai.filterlib;
18 |
19 | public class RotationUtil {
20 |
21 | public static final float TEXTURE_NO_ROTATION[] = {
22 | 0.0f, 0.0f,
23 | 0.0f, 1.0f,
24 | 1.0f, 1.0f,
25 | 1.0f, 0.0f,
26 | };
27 |
28 | public static final float TEXTURE_ROTATED_90[] = {
29 | 0.0f, 1.0f,
30 | 1.0f, 1.0f,
31 | 1.0f, 0.0f,
32 | 0.0f, 0.0f,
33 | };
34 |
35 | public static final float TEXTURE_ROTATED_180[] = {
36 | 1.0f, 1.0f,
37 | 1.0f, 0.0f,
38 | 0.0f, 0.0f,
39 | 0.0f, 1.0f,
40 | };
41 |
42 | public static final float TEXTURE_ROTATED_270[] = {
43 | 1.0f, 0.0f,
44 | 0.0f, 0.0f,
45 | 0.0f, 1.0f,
46 | 1.0f, 1.0f,
47 | };
48 |
49 | public static final float TEXTURE_ROTATED_270_H_FLIP[] = {
50 | 1.0f, 1.0f,
51 | 0.0f, 1.0f,
52 | 0.0f, 0.0f,
53 | 1.0f, 0.0f,
54 | };
55 | }
56 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/BaseFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.graphics.PointF;
4 | import android.opengl.GLES20;
5 |
6 | import java.nio.FloatBuffer;
7 | import java.util.LinkedList;
8 |
9 | import z0kai.filterlib.OpenGlUtils;
10 |
11 | /**
12 | * Created by Z0Kai on 2016/7/5.
13 | */
14 |
15 | public class BaseFilter {
16 |
17 | protected static final String NO_FILTER_VERTEX_SHADER =
18 | "attribute vec4 vPosition;" +
19 | "attribute vec2 inputTextureCoordinate;" +
20 | "varying vec2 textureCoordinate;" +
21 | "void main()" +
22 | "{"+
23 | "gl_Position = vPosition;"+
24 | "textureCoordinate = inputTextureCoordinate;" +
25 | "}";
26 |
27 | protected static final String CAMERA_FILTER_FRAGMENT_SHADER_HEAD =
28 | "#extension GL_OES_EGL_image_external : require\n"+
29 | "uniform samplerExternalOES inputImageTexture;\n";
30 |
31 | protected static final String BITMAP_FILTER_FRAGMENT_SHADER_HEAD =
32 | "uniform sampler2D inputImageTexture;\n";
33 |
34 | protected static final String NO_FILTER_FRAGMENT_SHADER =
35 | "precision mediump float;" +
36 | "varying vec2 textureCoordinate;\n" +
37 | "void main() {" +
38 | " gl_FragColor = texture2D( inputImageTexture, textureCoordinate );\n" +
39 | "}";
40 |
41 | private final LinkedList mRunOnDraw;
42 | private String mVertexShader;
43 | private String mFragmentShader;
44 | private int mProgram;
45 | private boolean mIsInitialized;
46 |
47 | protected int mOutputWidth;
48 | protected int mOutputHeight;
49 |
50 | /**
51 | * default to filter camera image
52 | */
53 | private boolean isForCamera = true;
54 |
55 | public BaseFilter() {
56 | this(NO_FILTER_VERTEX_SHADER, NO_FILTER_FRAGMENT_SHADER);
57 | }
58 |
59 | public BaseFilter(final String vertexShader, final String fragmentShader) {
60 | mRunOnDraw = new LinkedList<>();
61 | mVertexShader = vertexShader;
62 | mFragmentShader = fragmentShader;
63 | }
64 |
65 | /**
66 | * only to filter bitmap, useful before calling init
67 | */
68 | public BaseFilter setAsStatic() {
69 | isForCamera = false;
70 | return this;
71 | }
72 |
73 | public final void init() {
74 | onInit();
75 | mIsInitialized = true;
76 | onInitialized();
77 | }
78 |
79 | public void onInit() {
80 | int vertexShader = OpenGlUtils.loadShader(GLES20.GL_VERTEX_SHADER, mVertexShader);
81 | int fragmentShader = OpenGlUtils.loadShader(GLES20.GL_FRAGMENT_SHADER,
82 | (isForCamera ? CAMERA_FILTER_FRAGMENT_SHADER_HEAD : BITMAP_FILTER_FRAGMENT_SHADER_HEAD) +
83 | mFragmentShader);
84 |
85 | mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
86 | GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
87 | GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
88 | GLES20.glLinkProgram(mProgram);
89 | }
90 |
91 | public void onInitialized() {
92 | }
93 |
94 | public void onDrawArraysPre() {
95 | }
96 |
97 | public final void destroy() {
98 | if (!isInitialized()) return;
99 | GLES20.glDeleteProgram(mProgram);
100 | mIsInitialized = false;
101 | onDestroy();
102 | }
103 |
104 | public void onDestroy() {
105 | }
106 |
107 | public void onOutputSizeChanged(final int width, final int height) {
108 | mOutputWidth = width;
109 | mOutputHeight = height;
110 | }
111 |
112 | public int getProgram() {
113 | return mProgram;
114 | }
115 |
116 | public boolean isForCamera() {
117 | return isForCamera;
118 | }
119 |
120 | public void runPendingOnDrawTasks() {
121 | while (!mRunOnDraw.isEmpty()) {
122 | mRunOnDraw.removeFirst().run();
123 | }
124 | }
125 |
126 | public boolean isInitialized() {
127 | return mIsInitialized;
128 | }
129 |
130 | protected void setInteger(final int location, final int intValue) {
131 | runOnDraw(new Runnable() {
132 | @Override
133 | public void run() {
134 | GLES20.glUniform1i(location, intValue);
135 | }
136 | });
137 | }
138 |
139 | protected void setFloat(final int location, final float floatValue) {
140 | runOnDraw(new Runnable() {
141 | @Override
142 | public void run() {
143 | GLES20.glUniform1f(location, floatValue);
144 | }
145 | });
146 | }
147 |
148 | protected void setFloatVec2(final int location, final float[] arrayValue) {
149 | runOnDraw(new Runnable() {
150 | @Override
151 | public void run() {
152 | GLES20.glUniform2fv(location, 1, FloatBuffer.wrap(arrayValue));
153 | }
154 | });
155 | }
156 |
157 | protected void setFloatVec3(final int location, final float[] arrayValue) {
158 | runOnDraw(new Runnable() {
159 | @Override
160 | public void run() {
161 | GLES20.glUniform3fv(location, 1, FloatBuffer.wrap(arrayValue));
162 | }
163 | });
164 | }
165 |
166 | protected void setFloatVec4(final int location, final float[] arrayValue) {
167 | runOnDraw(new Runnable() {
168 | @Override
169 | public void run() {
170 | GLES20.glUniform4fv(location, 1, FloatBuffer.wrap(arrayValue));
171 | }
172 | });
173 | }
174 |
175 | protected void setFloatArray(final int location, final float[] arrayValue) {
176 | runOnDraw(new Runnable() {
177 | @Override
178 | public void run() {
179 | GLES20.glUniform1fv(location, arrayValue.length, FloatBuffer.wrap(arrayValue));
180 | }
181 | });
182 | }
183 |
184 | protected void setPoint(final int location, final PointF point) {
185 | runOnDraw(new Runnable() {
186 |
187 | @Override
188 | public void run() {
189 | float[] vec2 = new float[2];
190 | vec2[0] = point.x;
191 | vec2[1] = point.y;
192 | GLES20.glUniform2fv(location, 1, vec2, 0);
193 | }
194 | });
195 | }
196 |
197 | protected void setUniformMatrix3f(final int location, final float[] matrix) {
198 | runOnDraw(new Runnable() {
199 |
200 | @Override
201 | public void run() {
202 | GLES20.glUniformMatrix3fv(location, 1, false, matrix, 0);
203 | }
204 | });
205 | }
206 |
207 | protected void setUniformMatrix4f(final int location, final float[] matrix) {
208 | runOnDraw(new Runnable() {
209 |
210 | @Override
211 | public void run() {
212 | GLES20.glUniformMatrix4fv(location, 1, false, matrix, 0);
213 | }
214 | });
215 | }
216 |
217 | protected void runOnDraw(final Runnable runnable) {
218 | synchronized (mRunOnDraw) {
219 | mRunOnDraw.addLast(runnable);
220 | }
221 | }
222 | }
223 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/BeautyFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.content.res.Resources;
4 | import android.opengl.GLES20;
5 |
6 | import z0kai.filterlib.OpenGlUtils;
7 | import z0kai.filterlib.R;
8 |
9 | /**
10 | * Created by Z0Kai on 2016/7/5.
11 | */
12 |
13 | public class BeautyFilter extends BaseFilter {
14 | private int mSingleStepOffsetLocation;
15 | private int mParamsLocation;
16 | private int level;
17 |
18 | public BeautyFilter(Resources resources) {
19 | this(resources, 2);
20 | }
21 |
22 | public BeautyFilter(Resources resources, int level) {
23 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(resources, R.raw.beautify_fragment));
24 | this.level = level;
25 | }
26 |
27 | public void onInit() {
28 | super.onInit();
29 | mSingleStepOffsetLocation = GLES20.glGetUniformLocation(getProgram(), "singleStepOffset");
30 | mParamsLocation = GLES20.glGetUniformLocation(getProgram(), "params");
31 | setBeautyLevel(level);
32 | }
33 |
34 | @Override
35 | public void onOutputSizeChanged(final int width, final int height) {
36 | super.onOutputSizeChanged(width, height);
37 | setTexelSize(width, height);
38 | }
39 |
40 | private void setTexelSize(final float w, final float h) {
41 | setFloatVec2(mSingleStepOffsetLocation, new float[] {2.0f / w, 2.0f / h});
42 | }
43 |
44 | public void setBeautyLevel(int level){
45 | switch (level) {
46 | case 1:
47 | setFloatVec4(mParamsLocation, new float[] {1.0f, 1.0f, 0.15f, 0.15f});
48 | break;
49 | case 2:
50 | setFloatVec4(mParamsLocation, new float[] {0.8f, 0.9f, 0.2f, 0.2f});
51 | break;
52 | case 3:
53 | setFloatVec4(mParamsLocation, new float[] {0.6f, 0.8f, 0.25f, 0.25f});
54 | break;
55 | case 4:
56 | setFloatVec4(mParamsLocation, new float[] {0.4f, 0.7f, 0.38f, 0.3f});
57 | break;
58 | case 5:
59 | setFloatVec4(mParamsLocation, new float[] {0.33f, 0.63f, 0.4f, 0.35f});
60 | break;
61 | default:
62 | break;
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/ColorMatrixFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.opengl.GLES20;
4 |
5 | /**
6 | * Created by Z0Kai on 2016/7/5.
7 | */
8 |
9 | public class ColorMatrixFilter extends BaseFilter {
10 | public static final String COLOR_MATRIX_FRAGMENT_SHADER =
11 | "precision mediump float;" +
12 | "varying vec2 textureCoordinate;\n" +
13 | "uniform lowp mat4 colorMatrix;\n" +
14 | "uniform lowp float intensity;\n" +
15 |
16 | "void main() {" +
17 | " lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n" +
18 | " lowp vec4 outputColor = textureColor * colorMatrix;\n" +
19 | " gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);\n" +
20 | "}";
21 |
22 | private float mIntensity;
23 | private float[] mColorMatrix;
24 | private int mColorMatrixLocation;
25 | private int mIntensityLocation;
26 |
27 | public ColorMatrixFilter() {
28 | this(1.0f, new float[] {
29 | 1.0f, 0.0f, 0.0f, 0.0f,
30 | 0.0f, 1.0f, 0.0f, 0.0f,
31 | 0.0f, 0.0f, 1.0f, 0.0f,
32 | 0.0f, 0.0f, 0.0f, 1.0f
33 | });
34 | }
35 |
36 | public ColorMatrixFilter(final float intensity, final float[] colorMatrix) {
37 | super(NO_FILTER_VERTEX_SHADER, COLOR_MATRIX_FRAGMENT_SHADER);
38 | mIntensity = intensity;
39 | mColorMatrix = colorMatrix;
40 | }
41 |
42 | @Override
43 | public void onInit() {
44 | super.onInit();
45 | mColorMatrixLocation = GLES20.glGetUniformLocation(getProgram(), "colorMatrix");
46 | mIntensityLocation = GLES20.glGetUniformLocation(getProgram(), "intensity");
47 | }
48 |
49 | @Override
50 | public void onInitialized() {
51 | super.onInitialized();
52 | setIntensity(mIntensity);
53 | setColorMatrix(mColorMatrix);
54 | }
55 |
56 | public void setIntensity(final float intensity) {
57 | mIntensity = intensity;
58 | setFloat(mIntensityLocation, intensity);
59 | }
60 |
61 | public void setColorMatrix(final float[] colorMatrix) {
62 | mColorMatrix = colorMatrix;
63 | setUniformMatrix4f(mColorMatrixLocation, colorMatrix);
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/IFImageFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.opengl.GLES20;
7 |
8 | import java.util.ArrayList;
9 | import java.util.List;
10 |
11 | import z0kai.filterlib.OpenGlUtils;
12 |
13 | /**
14 | * Created by Z0Kai on 2016/7/6.
15 | */
16 |
17 | public class IFImageFilter extends BaseFilter {
18 | private int filterInputTextureUniform2;
19 | private int filterInputTextureUniform3;
20 | private int filterInputTextureUniform4;
21 | private int filterInputTextureUniform5;
22 | private int filterInputTextureUniform6;
23 | public int filterSourceTexture2 = OpenGlUtils.NO_TEXTURE;
24 | public int filterSourceTexture3 = OpenGlUtils.NO_TEXTURE;
25 | public int filterSourceTexture4 = OpenGlUtils.NO_TEXTURE;
26 | public int filterSourceTexture5 = OpenGlUtils.NO_TEXTURE;
27 | public int filterSourceTexture6 = OpenGlUtils.NO_TEXTURE;
28 | private List mResIds;
29 | private Context mContext;
30 |
31 |
32 | public IFImageFilter(Context context, String fragmentShaderString) {
33 | super(NO_FILTER_VERTEX_SHADER, fragmentShaderString);
34 | mContext = context;
35 | }
36 |
37 | public void onInit() {
38 | super.onInit();
39 | filterInputTextureUniform2 = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture2");
40 | filterInputTextureUniform3 = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture3");
41 | filterInputTextureUniform4 = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture4");
42 | filterInputTextureUniform5 = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture5");
43 | filterInputTextureUniform6 = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture6");
44 |
45 | initInputTexture();
46 | }
47 |
48 | public void onDestroy() {
49 | super.onDestroy();
50 |
51 | if (filterSourceTexture2 != OpenGlUtils.NO_TEXTURE) {
52 | int[] arrayOfInt1 = new int[1];
53 | arrayOfInt1[0] = this.filterSourceTexture2;
54 | GLES20.glDeleteTextures(1, arrayOfInt1, 0);
55 | this.filterSourceTexture2 = OpenGlUtils.NO_TEXTURE;
56 | }
57 |
58 | if (filterSourceTexture3 != OpenGlUtils.NO_TEXTURE) {
59 | int[] arrayOfInt2 = new int[1];
60 | arrayOfInt2[0] = this.filterSourceTexture3;
61 | GLES20.glDeleteTextures(1, arrayOfInt2, 0);
62 | this.filterSourceTexture3 = OpenGlUtils.NO_TEXTURE;
63 | }
64 |
65 | if (filterSourceTexture4 != OpenGlUtils.NO_TEXTURE) {
66 | int[] arrayOfInt3 = new int[1];
67 | arrayOfInt3[0] = this.filterSourceTexture4;
68 | GLES20.glDeleteTextures(1, arrayOfInt3, 0);
69 | this.filterSourceTexture4 = OpenGlUtils.NO_TEXTURE;
70 | }
71 |
72 | if (filterSourceTexture5 != OpenGlUtils.NO_TEXTURE) {
73 | int[] arrayOfInt4 = new int[1];
74 | arrayOfInt4[0] = this.filterSourceTexture5;
75 | GLES20.glDeleteTextures(1, arrayOfInt4, 0);
76 | this.filterSourceTexture5 = OpenGlUtils.NO_TEXTURE;
77 | }
78 |
79 | if (filterSourceTexture6 != OpenGlUtils.NO_TEXTURE) {
80 | int[] arrayOfInt5 = new int[1];
81 | arrayOfInt5[0] = this.filterSourceTexture6;
82 | GLES20.glDeleteTextures(1, arrayOfInt5, 0);
83 | this.filterSourceTexture6 = OpenGlUtils.NO_TEXTURE;
84 | }
85 |
86 | }
87 |
88 | @Override
89 | public void onDrawArraysPre() {
90 | super.onDrawArraysPre();
91 |
92 | if (filterSourceTexture2 != OpenGlUtils.NO_TEXTURE) {
93 | GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
94 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, filterSourceTexture2);
95 | GLES20.glUniform1i(filterInputTextureUniform2, 3);
96 | }
97 |
98 | if (filterSourceTexture3 != OpenGlUtils.NO_TEXTURE) {
99 | GLES20.glActiveTexture(GLES20.GL_TEXTURE4);
100 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, filterSourceTexture3);
101 | GLES20.glUniform1i(filterInputTextureUniform3, 4);
102 | }
103 |
104 | if (filterSourceTexture4 != OpenGlUtils.NO_TEXTURE) {
105 | GLES20.glActiveTexture(GLES20.GL_TEXTURE5);
106 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, filterSourceTexture4);
107 | GLES20.glUniform1i(filterInputTextureUniform4, 5);
108 | }
109 |
110 | if (filterSourceTexture5 != OpenGlUtils.NO_TEXTURE) {
111 | GLES20.glActiveTexture(GLES20.GL_TEXTURE6);
112 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, filterSourceTexture5);
113 | GLES20.glUniform1i(filterInputTextureUniform5, 6);
114 | }
115 |
116 | if (filterSourceTexture6 != OpenGlUtils.NO_TEXTURE) {
117 | GLES20.glActiveTexture(GLES20.GL_TEXTURE7);
118 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, filterSourceTexture6);
119 | GLES20.glUniform1i(filterInputTextureUniform6, 7);
120 | }
121 |
122 | }
123 |
124 | public void addInputTexture(int resId) {
125 | if (mResIds == null) {
126 | mResIds = new ArrayList();
127 | }
128 | mResIds.add(resId);
129 | }
130 |
131 | public void initInputTexture() {
132 | if (mResIds == null) {
133 | return;
134 | }
135 | if (mResIds.size() > 0) {
136 | runOnDraw(new Runnable() {
137 | @Override
138 | public void run() {
139 | Bitmap b = BitmapFactory.decodeResource(mContext.getResources(), mResIds.get(0));
140 | filterSourceTexture2 = OpenGlUtils.loadTexture(b, OpenGlUtils.NO_TEXTURE, true);
141 | }
142 | });
143 | }
144 |
145 | if (mResIds.size() > 1) {
146 | runOnDraw(new Runnable() {
147 | @Override
148 | public void run() {
149 | Bitmap b = BitmapFactory.decodeResource(mContext.getResources(), mResIds.get(1));
150 | filterSourceTexture3 = OpenGlUtils.loadTexture(b, OpenGlUtils.NO_TEXTURE, true);
151 | }
152 | });
153 | }
154 |
155 | if (mResIds.size() > 2) {
156 | runOnDraw(new Runnable() {
157 | @Override
158 | public void run() {
159 | Bitmap b = BitmapFactory.decodeResource(mContext.getResources(), mResIds.get(2));
160 | filterSourceTexture4 = OpenGlUtils.loadTexture(b, OpenGlUtils.NO_TEXTURE, true);
161 | }
162 | });
163 | }
164 |
165 | if (mResIds.size() > 3) {
166 | runOnDraw(new Runnable() {
167 | @Override
168 | public void run() {
169 | Bitmap b = BitmapFactory.decodeResource(mContext.getResources(), mResIds.get(3));
170 | filterSourceTexture5 = OpenGlUtils.loadTexture(b, OpenGlUtils.NO_TEXTURE, true);
171 | }
172 | });
173 | }
174 |
175 | if (mResIds.size() > 4) {
176 | runOnDraw(new Runnable() {
177 | @Override
178 | public void run() {
179 | Bitmap b = BitmapFactory.decodeResource(mContext.getResources(), mResIds.get(4));
180 | filterSourceTexture6 = OpenGlUtils.loadTexture(b, OpenGlUtils.NO_TEXTURE, true);
181 | }
182 | });
183 | }
184 | }
185 | }
186 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/IFInkwellFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.content.Context;
4 |
5 | import z0kai.filterlib.R;
6 |
7 | /**
8 | * Created by Z0Kai on 2016/7/6.
9 | */
10 |
11 | public class IFInkwellFilter extends IFImageFilter {
12 | private static final String SHADER = "precision lowp float;\n" +
13 | " \n" +
14 | " varying highp vec2 textureCoordinate;\n" +
15 | " \n" +
16 | " uniform sampler2D inputImageTexture2;\n" +
17 | " \n" +
18 | " void main()\n" +
19 | " {\n" +
20 | " vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
21 | " texel = vec3(dot(vec3(0.3, 0.6, 0.1), texel));\n" +
22 | " texel = vec3(texture2D(inputImageTexture2, vec2(texel.r, .16666)).r);\n" +
23 | " gl_FragColor = vec4(texel, 1.0);\n" +
24 | " }\n";
25 |
26 | public IFInkwellFilter(Context paramContext) {
27 | super(paramContext, SHADER);
28 | setRes();
29 | }
30 |
31 | private void setRes() {
32 | addInputTexture(R.drawable.inkwell_map);
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/SepiaFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | /**
4 | * Created by Z0Kai on 2016/7/5.
5 | */
6 |
7 | public class SepiaFilter extends ColorMatrixFilter {
8 | public SepiaFilter() {
9 | this(1.0f);
10 | }
11 |
12 | public SepiaFilter(final float intensity) {
13 | super(intensity, new float[] {
14 | 0.3588f, 0.7044f, 0.1368f, 0.0f,
15 | 0.2990f, 0.5870f, 0.1140f, 0.0f,
16 | 0.2392f, 0.4696f, 0.0912f, 0.0f,
17 | 0f, 0f, 0f, 1.0f
18 | });
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/ToneCurveFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.graphics.Point;
4 | import android.graphics.PointF;
5 | import android.opengl.GLES20;
6 |
7 | import java.io.IOException;
8 | import java.io.InputStream;
9 | import java.nio.ByteBuffer;
10 | import java.util.ArrayList;
11 | import java.util.Arrays;
12 | import java.util.Comparator;
13 |
14 | import z0kai.filterlib.OpenGlUtils;
15 |
16 | /**
17 | * Created by Z0Kai on 2016/7/6.
18 | */
19 |
20 | public class ToneCurveFilter extends BaseFilter {
21 | public static final String TONE_CURVE_FRAGMENT_SHADER = "" +
22 | " varying highp vec2 textureCoordinate;\n" +
23 | " uniform sampler2D toneCurveTexture;\n" +
24 | "\n" +
25 | " void main()\n" +
26 | " {\n" +
27 | " lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n" +
28 | " lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;\n" +
29 | " lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;\n" +
30 | " lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;\n" +
31 | "\n" +
32 | " gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\n" +
33 | " }";
34 |
35 | private int[] mToneCurveTexture = new int[]{OpenGlUtils.NO_TEXTURE};
36 | private int mToneCurveTextureUniformLocation;
37 |
38 | private PointF[] mRgbCompositeControlPoints;
39 | private PointF[] mRedControlPoints;
40 | private PointF[] mGreenControlPoints;
41 | private PointF[] mBlueControlPoints;
42 |
43 | private ArrayList mRgbCompositeCurve;
44 | private ArrayList mRedCurve;
45 | private ArrayList mGreenCurve;
46 | private ArrayList mBlueCurve;
47 |
48 |
49 | public ToneCurveFilter() {
50 | super(NO_FILTER_VERTEX_SHADER, TONE_CURVE_FRAGMENT_SHADER);
51 |
52 | PointF[] defaultCurvePoints = new PointF[]{new PointF(0.0f, 0.0f), new PointF(0.5f, 0.5f), new PointF(1.0f, 1.0f)};
53 | mRgbCompositeControlPoints = defaultCurvePoints;
54 | mRedControlPoints = defaultCurvePoints;
55 | mGreenControlPoints = defaultCurvePoints;
56 | mBlueControlPoints = defaultCurvePoints;
57 | }
58 |
59 | @Override
60 | public void onInit() {
61 | super.onInit();
62 | mToneCurveTextureUniformLocation = GLES20.glGetUniformLocation(getProgram(), "toneCurveTexture");
63 | GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
64 | GLES20.glGenTextures(1, mToneCurveTexture, 0);
65 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mToneCurveTexture[0]);
66 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
67 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
68 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
69 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
70 | }
71 |
72 | @Override
73 | public void onInitialized() {
74 | super.onInitialized();
75 | setRgbCompositeControlPoints(mRgbCompositeControlPoints);
76 | setRedControlPoints(mRedControlPoints);
77 | setGreenControlPoints(mGreenControlPoints);
78 | setBlueControlPoints(mBlueControlPoints);
79 | }
80 |
81 | @Override
82 | public void onDrawArraysPre() {
83 | if (mToneCurveTexture[0] != OpenGlUtils.NO_TEXTURE) {
84 | GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
85 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mToneCurveTexture[0]);
86 | GLES20.glUniform1i(mToneCurveTextureUniformLocation, 3);
87 | }
88 | }
89 |
90 | public void setFromCurveFileInputStream(InputStream input) {
91 | try {
92 | int version = readShort(input);
93 | int totalCurves = readShort(input);
94 |
95 | ArrayList curves = new ArrayList(totalCurves);
96 | float pointRate = 1.0f / 255;
97 |
98 | for (int i = 0; i < totalCurves; i++) {
99 | // 2 bytes, Count of points in the curve (short integer from 2...19)
100 | short pointCount = readShort(input);
101 |
102 | PointF[] points = new PointF[pointCount];
103 |
104 | // point count * 4
105 | // Curve points. Each curve point is a pair of short integers where
106 | // the first number is the output value (vertical coordinate on the
107 | // Curves dialog graph) and the second is the input value. All coordinates have range 0 to 255.
108 | for (int j = 0; j < pointCount; j++) {
109 | short y = readShort(input);
110 | short x = readShort(input);
111 |
112 | points[j] = new PointF(x * pointRate, y * pointRate);
113 | }
114 |
115 | curves.add(points);
116 | }
117 | input.close();
118 |
119 | mRgbCompositeControlPoints = curves.get(0);
120 | mRedControlPoints = curves.get(1);
121 | mGreenControlPoints = curves.get(2);
122 | mBlueControlPoints = curves.get(3);
123 | } catch (IOException e) {
124 | e.printStackTrace();
125 | }
126 | }
127 |
128 | private short readShort(InputStream input) throws IOException {
129 | return (short) (input.read() << 8 | input.read());
130 | }
131 |
132 | public void setRgbCompositeControlPoints(PointF[] points) {
133 | mRgbCompositeControlPoints = points;
134 | mRgbCompositeCurve = createSplineCurve(mRgbCompositeControlPoints);
135 | updateToneCurveTexture();
136 | }
137 |
138 | public void setRedControlPoints(PointF[] points) {
139 | mRedControlPoints = points;
140 | mRedCurve = createSplineCurve(mRedControlPoints);
141 | updateToneCurveTexture();
142 | }
143 |
144 | public void setGreenControlPoints(PointF[] points) {
145 | mGreenControlPoints = points;
146 | mGreenCurve = createSplineCurve(mGreenControlPoints);
147 | updateToneCurveTexture();
148 | }
149 |
150 | public void setBlueControlPoints(PointF[] points) {
151 | mBlueControlPoints = points;
152 | mBlueCurve = createSplineCurve(mBlueControlPoints);
153 | updateToneCurveTexture();
154 | }
155 |
156 | private void updateToneCurveTexture() {
157 | runOnDraw(new Runnable() {
158 | @Override
159 | public void run() {
160 | GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
161 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mToneCurveTexture[0]);
162 |
163 | if ((mRedCurve.size() >= 256) && (mGreenCurve.size() >= 256) && (mBlueCurve.size() >= 256) && (mRgbCompositeCurve.size() >= 256)) {
164 | byte[] toneCurveByteArray = new byte[256 * 4];
165 | for (int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++) {
166 | // BGRA for upload to texture
167 | toneCurveByteArray[currentCurveIndex * 4 + 2] = (byte) ((int) Math.min(Math.max(currentCurveIndex + mBlueCurve.get(currentCurveIndex) + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
168 | toneCurveByteArray[currentCurveIndex * 4 + 1] = (byte) ((int) Math.min(Math.max(currentCurveIndex + mGreenCurve.get(currentCurveIndex) + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
169 | toneCurveByteArray[currentCurveIndex * 4] = (byte) ((int) Math.min(Math.max(currentCurveIndex + mRedCurve.get(currentCurveIndex) + mRgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
170 | toneCurveByteArray[currentCurveIndex * 4 + 3] = (byte) (255 & 0xff);
171 | }
172 |
173 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(toneCurveByteArray));
174 | }
175 | // Buffer pixels!
176 | // GLES20.glTexImage2D(int target,
177 | // int level,
178 | // int internalformat,
179 | // int width,
180 | // int height,
181 | // int border,
182 | // int format,
183 | // int type,
184 | // java.nio.Buffer pixels);
185 | }
186 | });
187 | }
188 |
189 | private ArrayList createSplineCurve(PointF[] points) {
190 | if (points == null || points.length <= 0) {
191 | return null;
192 | }
193 |
194 | // Sort the array
195 | PointF[] pointsSorted = points.clone();
196 | Arrays.sort(pointsSorted, new Comparator() {
197 | @Override
198 | public int compare(PointF point1, PointF point2) {
199 | if (point1.x < point2.x) {
200 | return -1;
201 | } else if (point1.x > point2.x) {
202 | return 1;
203 | } else {
204 | return 0;
205 | }
206 | }
207 | });
208 |
209 | // Convert from (0, 1) to (0, 255).
210 | Point[] convertedPoints = new Point[pointsSorted.length];
211 | for (int i = 0; i < points.length; i++) {
212 | PointF point = pointsSorted[i];
213 | convertedPoints[i] = new Point((int) (point.x * 255), (int) (point.y * 255));
214 | }
215 |
216 | ArrayList splinePoints = createSplineCurve2(convertedPoints);
217 |
218 | // If we have a first point like (0.3, 0) we'll be missing some points at the beginning
219 | // that should be 0.
220 | Point firstSplinePoint = splinePoints.get(0);
221 | if (firstSplinePoint.x > 0) {
222 | for (int i = firstSplinePoint.x; i >= 0; i--) {
223 | splinePoints.add(0, new Point(i, 0));
224 | }
225 | }
226 |
227 | // Insert points similarly at the end, if necessary.
228 | Point lastSplinePoint = splinePoints.get(splinePoints.size() - 1);
229 | if (lastSplinePoint.x < 255) {
230 | for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
231 | splinePoints.add(new Point(i, 255));
232 | }
233 | }
234 |
235 | // Prepare the spline points.
236 | ArrayList preparedSplinePoints = new ArrayList(splinePoints.size());
237 | for (Point newPoint : splinePoints) {
238 | Point origPoint = new Point(newPoint.x, newPoint.x);
239 |
240 | float distance = (float) Math.sqrt(Math.pow((origPoint.x - newPoint.x), 2.0) + Math.pow((origPoint.y - newPoint.y), 2.0));
241 |
242 | if (origPoint.y > newPoint.y) {
243 | distance = -distance;
244 | }
245 |
246 | preparedSplinePoints.add(distance);
247 | }
248 |
249 | return preparedSplinePoints;
250 | }
251 |
252 | private ArrayList createSplineCurve2(Point[] points) {
253 | ArrayList sdA = createSecondDerivative(points);
254 |
255 | // Is [points count] equal to [sdA count]?
256 | // int n = [points count];
257 | int n = sdA.size();
258 | if (n < 1) {
259 | return null;
260 | }
261 | double sd[] = new double[n];
262 |
263 | // From NSMutableArray to sd[n];
264 | for (int i = 0; i < n; i++) {
265 | sd[i] = sdA.get(i);
266 | }
267 |
268 |
269 | ArrayList output = new ArrayList(n + 1);
270 |
271 | for (int i = 0; i < n - 1; i++) {
272 | Point cur = points[i];
273 | Point next = points[i + 1];
274 |
275 | for (int x = cur.x; x < next.x; x++) {
276 | double t = (double) (x - cur.x) / (next.x - cur.x);
277 |
278 | double a = 1 - t;
279 | double b = t;
280 | double h = next.x - cur.x;
281 |
282 | double y = a * cur.y + b * next.y + (h * h / 6) * ((a * a * a - a) * sd[i] + (b * b * b - b) * sd[i + 1]);
283 |
284 | if (y > 255.0) {
285 | y = 255.0;
286 | } else if (y < 0.0) {
287 | y = 0.0;
288 | }
289 |
290 | output.add(new Point(x, (int) Math.round(y)));
291 | }
292 | }
293 |
294 | // If the last point is (255, 255) it doesn't get added.
295 | if (output.size() == 255) {
296 | output.add(points[points.length - 1]);
297 | }
298 | return output;
299 | }
300 |
301 | private ArrayList createSecondDerivative(Point[] points) {
302 | int n = points.length;
303 | if (n <= 1) {
304 | return null;
305 | }
306 |
307 | double matrix[][] = new double[n][3];
308 | double result[] = new double[n];
309 | matrix[0][1] = 1;
310 | // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
311 | matrix[0][0] = 0;
312 | matrix[0][2] = 0;
313 |
314 | for (int i = 1; i < n - 1; i++) {
315 | Point P1 = points[i - 1];
316 | Point P2 = points[i];
317 | Point P3 = points[i + 1];
318 |
319 | matrix[i][0] = (double) (P2.x - P1.x) / 6;
320 | matrix[i][1] = (double) (P3.x - P1.x) / 3;
321 | matrix[i][2] = (double) (P3.x - P2.x) / 6;
322 | result[i] = (double) (P3.y - P2.y) / (P3.x - P2.x) - (double) (P2.y - P1.y) / (P2.x - P1.x);
323 | }
324 |
325 | // What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)
326 | result[0] = 0;
327 | result[n - 1] = 0;
328 |
329 | matrix[n - 1][1] = 1;
330 | // What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)
331 | matrix[n - 1][0] = 0;
332 | matrix[n - 1][2] = 0;
333 |
334 | // solving pass1 (up->down)
335 | for (int i = 1; i < n; i++) {
336 | double k = matrix[i][0] / matrix[i - 1][1];
337 | matrix[i][1] -= k * matrix[i - 1][2];
338 | matrix[i][0] = 0;
339 | result[i] -= k * result[i - 1];
340 | }
341 | // solving pass2 (down->up)
342 | for (int i = n - 2; i >= 0; i--) {
343 | double k = matrix[i][2] / matrix[i + 1][1];
344 | matrix[i][1] -= k * matrix[i + 1][0];
345 | matrix[i][2] = 0;
346 | result[i] -= k * result[i + 1];
347 | }
348 |
349 | ArrayList output = new ArrayList(n);
350 | for (int i = 0; i < n; i++) output.add(result[i] / matrix[i][1]);
351 |
352 | return output;
353 | }
354 | }
355 |
--------------------------------------------------------------------------------
/filterlib/src/main/java/z0kai/filterlib/filters/VignetteFilter.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib.filters;
2 |
3 | import android.graphics.PointF;
4 | import android.opengl.GLES20;
5 |
6 | /**
7 | * Created by Z0Kai on 2016/7/6.
8 | */
9 |
10 | public class VignetteFilter extends BaseFilter {
11 | public static final String VIGNETTING_FRAGMENT_SHADER = "" +
12 | " varying highp vec2 textureCoordinate;\n" +
13 | " \n" +
14 | " uniform lowp vec2 vignetteCenter;\n" +
15 | " uniform lowp vec3 vignetteColor;\n" +
16 | " uniform highp float vignetteStart;\n" +
17 | " uniform highp float vignetteEnd;\n" +
18 | " \n" +
19 | " void main()\n" +
20 | " {\n" +
21 | " /*\n" +
22 | " lowp vec3 rgb = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
23 | " lowp float d = distance(textureCoordinate, vec2(0.5,0.5));\n" +
24 | " rgb *= (1.0 - smoothstep(vignetteStart, vignetteEnd, d));\n" +
25 | " gl_FragColor = vec4(vec3(rgb),1.0);\n" +
26 | " */\n" +
27 | " \n" +
28 | " lowp vec3 rgb = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
29 | " lowp float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));\n" +
30 | " lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);\n" +
31 | " gl_FragColor = vec4(mix(rgb.x, vignetteColor.x, percent), mix(rgb.y, vignetteColor.y, percent), mix(rgb.z, vignetteColor.z, percent), 1.0);\n" +
32 | " }";
33 |
34 | private int mVignetteCenterLocation;
35 | private PointF mVignetteCenter;
36 | private int mVignetteColorLocation;
37 | private float[] mVignetteColor;
38 | private int mVignetteStartLocation;
39 | private float mVignetteStart;
40 | private int mVignetteEndLocation;
41 | private float mVignetteEnd;
42 |
43 | public VignetteFilter() {
44 | this(new PointF(0.5f, 0.5f), new float[] {0.0f, 0.0f, 0.0f}, 0.40f, 0.8f);
45 | }
46 |
47 | public VignetteFilter(final PointF vignetteCenter, final float[] vignetteColor, final float vignetteStart, final float vignetteEnd) {
48 | super(NO_FILTER_VERTEX_SHADER, VIGNETTING_FRAGMENT_SHADER);
49 | mVignetteCenter = vignetteCenter;
50 | mVignetteColor = vignetteColor;
51 | mVignetteStart = vignetteStart;
52 | mVignetteEnd = vignetteEnd;
53 |
54 | }
55 |
56 | @Override
57 | public void onInit() {
58 | super.onInit();
59 | mVignetteCenterLocation = GLES20.glGetUniformLocation(getProgram(), "vignetteCenter");
60 | mVignetteColorLocation = GLES20.glGetUniformLocation(getProgram(), "vignetteColor");
61 | mVignetteStartLocation = GLES20.glGetUniformLocation(getProgram(), "vignetteStart");
62 | mVignetteEndLocation = GLES20.glGetUniformLocation(getProgram(), "vignetteEnd");
63 |
64 | setVignetteCenter(mVignetteCenter);
65 | setVignetteColor(mVignetteColor);
66 | setVignetteStart(mVignetteStart);
67 | setVignetteEnd(mVignetteEnd);
68 | }
69 |
70 |
71 | public void setVignetteCenter(final PointF vignetteCenter) {
72 | mVignetteCenter = vignetteCenter;
73 | setPoint(mVignetteCenterLocation, mVignetteCenter);
74 | }
75 |
76 | public void setVignetteColor(final float[] vignetteColor) {
77 | mVignetteColor = vignetteColor;
78 | setFloatVec3(mVignetteColorLocation, mVignetteColor);
79 | }
80 |
81 | public void setVignetteStart(final float vignetteStart) {
82 | mVignetteStart = vignetteStart;
83 | setFloat(mVignetteStartLocation, mVignetteStart);
84 | }
85 |
86 | public void setVignetteEnd(final float vignetteEnd) {
87 | mVignetteEnd = vignetteEnd;
88 | setFloat(mVignetteEndLocation, mVignetteEnd);
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/filterlib/src/main/res/drawable/inkwell_map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/filterlib/src/main/res/drawable/inkwell_map.png
--------------------------------------------------------------------------------
/filterlib/src/main/res/raw/beautify_fragment.glsl:
--------------------------------------------------------------------------------
1 | //#extension GL_OES_EGL_image_external : require
2 | //uniform samplerExternalOES inputImageTexture;
3 |
4 | precision highp float;
5 |
6 | uniform vec2 singleStepOffset;
7 | uniform highp vec4 params;
8 |
9 | varying highp vec2 textureCoordinate;
10 |
11 | const highp vec3 W = vec3(0.299,0.587,0.114);
12 |
13 | const mat3 saturateMatrix = mat3(
14 | 1.1102,-0.0598,-0.061,
15 | -0.0774,1.0826,-0.1186,
16 | -0.0228,-0.0228,1.1772);
17 |
18 | vec2 blurCoordinates[24];
19 |
20 | float hardLight(float color)
21 | {
22 | if(color <= 0.5)
23 | {
24 | color = color * color * 2.0;
25 | }
26 | else
27 | {
28 | color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);
29 | }
30 | return color;
31 | }
32 |
33 | void main(){
34 | vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
35 |
36 | blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);
37 | blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);
38 | blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);
39 | blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);
40 | blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);
41 | blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);
42 | blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);
43 | blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);
44 | blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);
45 | blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);
46 | blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);
47 | blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);
48 | blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);
49 | blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);
50 | blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);
51 | blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);
52 | blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);
53 | blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);
54 | blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);
55 | blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);
56 | blurCoordinates[20] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, -2.0);
57 | blurCoordinates[21] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, 2.0);
58 | blurCoordinates[22] = textureCoordinate.xy + singleStepOffset * vec2(2.0, -2.0);
59 | blurCoordinates[23] = textureCoordinate.xy + singleStepOffset * vec2(2.0, 2.0);
60 |
61 | float sampleColor = centralColor.g * 22.0;
62 | sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;
63 | sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;
64 | sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;
65 | sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;
66 | sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;
67 | sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;
68 | sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;
69 | sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;
70 | sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;
71 | sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;
72 | sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;
73 | sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;
74 | sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;
75 | sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;
76 | sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;
77 | sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;
78 | sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;
79 | sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;
80 | sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;
81 | sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;
82 | sampleColor += texture2D(inputImageTexture, blurCoordinates[20]).g * 3.0;
83 | sampleColor += texture2D(inputImageTexture, blurCoordinates[21]).g * 3.0;
84 | sampleColor += texture2D(inputImageTexture, blurCoordinates[22]).g * 3.0;
85 | sampleColor += texture2D(inputImageTexture, blurCoordinates[23]).g * 3.0;
86 | sampleColor = sampleColor / 62.0;
87 |
88 | float highPass = centralColor.g - sampleColor + 0.5;
89 |
90 | for(int i = 0; i < 5;i++)
91 | {
92 | highPass = hardLight(highPass);
93 | }
94 | float luminance = dot(centralColor, W);
95 | float alpha = pow(luminance, params.r);
96 |
97 | vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;
98 |
99 | smoothColor.r = clamp(pow(smoothColor.r, params.g),0.0,1.0);
100 | smoothColor.g = clamp(pow(smoothColor.g, params.g),0.0,1.0);
101 | smoothColor.b = clamp(pow(smoothColor.b, params.g),0.0,1.0);
102 |
103 | vec3 screen = vec3(1.0) - (vec3(1.0)-smoothColor) * (vec3(1.0)-centralColor);
104 | vec3 lighten = max(smoothColor, centralColor);
105 | vec3 softLight = 2.0 * centralColor*smoothColor + centralColor*centralColor
106 | - 2.0 * centralColor*centralColor * smoothColor;
107 |
108 | gl_FragColor = vec4(mix(centralColor, screen, alpha), 1.0);
109 | gl_FragColor.rgb = mix(gl_FragColor.rgb, lighten, alpha);
110 | gl_FragColor.rgb = mix(gl_FragColor.rgb, softLight, params.b);
111 |
112 | vec3 satColor = gl_FragColor.rgb * saturateMatrix;
113 | gl_FragColor.rgb = mix(gl_FragColor.rgb, satColor, params.a);
114 | }
--------------------------------------------------------------------------------
/filterlib/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FilterLib
3 |
4 |
--------------------------------------------------------------------------------
/filterlib/src/test/java/z0kai/filterlib/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package z0kai.filterlib;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0kai/FilterCamera/c378de81e5bda00835e24a9147b697bd7d30122a/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Dec 28 10:00:20 PST 2015
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.10-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app', ':filterlib'
2 |
--------------------------------------------------------------------------------