8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FaceRecognition
2 | This is an Android application for face recognition. The main purpose of this project is to serve as basis for others who want to deploy their deep learning models to an Android application.
3 |
4 | ## Face Detection
5 | The face detection package was taken from [this repository](https://github.com/vcvycy/MTCNN4Android). It uses the MTCNN model for face detection and it is runned using an old TensorFlow API (TensorflowInferenceInterface) for Android.
6 |
7 | ## Face Recognition
8 | The face recognition model used is FaceNet. It was obtained through the instructions in [this repository](https://github.com/jiangxiluning/facenet_mtcnn_to_mobile). The model is runned using the TensorFlow Lite API.
9 |
10 | ## Usage
11 | If you want to use the code of this project in your own application, then follow the steps below:
12 |
13 | - Copy the model files (mtcnn_freezed_model.pb e facenet.tflite) to your "assets" folder.
14 | - Add the following code to "build.gradle":
15 | ```
16 | android {
17 | aaptOptions {
18 | noCompress "tflite" // Prevents compression of tflite files
19 | noCompress "lite"
20 | }
21 | }
22 |
23 | dependencies {
24 | implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly' // Official API
25 | implementation 'org.tensorflow:tensorflow-android:+' // Old API (TensorflowInferenceInterface)
26 | }
27 | ```
28 | - Copy the packages FaceRecognition and FaceDetection.
29 | - To calculate the similarity score between two faces do as follows:
30 | ```java
31 |
32 | FaceNet facenet = new FaceNet(getAssets());
33 |
34 | // cropFace returns null if no face could be detected in the provided image
35 | Bitmap face1 = cropFace(bitmap1);
36 | Bitmap face2 = cropFace(bitmap2);
37 |
38 | if (face1 != null && face2 != null) { // To make sure both faces were detected successfully
39 | double score = facenet.getSimilarityScore(face1, face2); // Euclidian distance between the face descriptor vectors
40 | }
41 |
42 | facenet.close();
43 | ```
44 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/app.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | generateDebugSources
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 29
5 | buildToolsVersion "29.0.0"
6 | defaultConfig {
7 | applicationId "com.example.facerecognition"
8 | minSdkVersion 15
9 | targetSdkVersion 29
10 | versionCode 1
11 | versionName "1.0"
12 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
13 | }
14 |
15 | aaptOptions {
16 | noCompress "tflite"
17 | noCompress "lite"
18 | }
19 |
20 | buildTypes {
21 | release {
22 | minifyEnabled false
23 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
24 | }
25 | }
26 | compileOptions {
27 | sourceCompatibility = '1.8'
28 | targetCompatibility = '1.8'
29 | }
30 | }
31 |
32 | dependencies {
33 | implementation fileTree(dir: 'libs', include: ['*.jar'])
34 | implementation 'androidx.appcompat:appcompat:1.0.2'
35 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
36 | testImplementation 'junit:junit:4.12'
37 | androidTestImplementation 'androidx.test:runner:1.2.0'
38 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
39 |
40 | implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly' // New API
41 | implementation 'org.tensorflow:tensorflow-android:+' // Old API
42 | }
43 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/example/facerecognition/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition;
2 |
3 | import android.content.Context;
4 |
5 | import androidx.test.InstrumentationRegistry;
6 | import androidx.test.runner.AndroidJUnit4;
7 |
8 | import org.junit.Test;
9 | import org.junit.runner.RunWith;
10 |
11 | import static org.junit.Assert.*;
12 |
13 | /**
14 | * Instrumented test, which will execute on an Android device.
15 | *
16 | * @see Testing documentation
17 | */
18 | @RunWith(AndroidJUnit4.class)
19 | public class ExampleInstrumentedTest {
20 | @Test
21 | public void useAppContext() {
22 | // Context of the app under test.
23 | Context appContext = InstrumentationRegistry.getTargetContext();
24 |
25 | assertEquals("com.example.facerecognition", appContext.getPackageName());
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/app/src/main/assets/facenet.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/assets/facenet.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/mtcnn_freezed_model.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/assets/mtcnn_freezed_model.pb
--------------------------------------------------------------------------------
/app/src/main/java/com/example/facerecognition/FaceDetection/Box.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition.FaceDetection;
2 | /*
3 | MTCNN For Android
4 | by cjf@xmu 20180625
5 | */
6 | import android.graphics.Point;
7 | import android.graphics.Rect;
8 |
9 | import static java.lang.Math.max;
10 |
11 | public class Box {
12 |
13 | public int[] box; //left:box[0],top:box[1],right:box[2],bottom:box[3]
14 | public float score; //probability
15 | public float[] bbr; //bounding box regression
16 | public boolean deleted;
17 | public Point[] landmark; //facial landmark.只有ONet输出Landmark
18 |
19 | Box(){
20 | box=new int[4];
21 | bbr=new float[4];
22 | deleted=false;
23 | landmark=new Point[5];
24 | }
25 |
26 | public int left(){return box[0];}
27 | public int right(){return box[2];}
28 | public int top(){return box[1];}
29 | public int bottom(){return box[3];}
30 | public int width(){return box[2]-box[0]+1;}
31 | public int height(){return box[3]-box[1]+1;}
32 |
33 | //转为rect
34 | public Rect transform2Rect(){
35 | Rect rect=new Rect();
36 | rect.left=Math.round(box[0]);
37 | rect.top=Math.round(box[1]);
38 | rect.right=Math.round(box[2]);
39 | rect.bottom=Math.round(box[3]);
40 | return rect;
41 | }
42 |
43 | //面积
44 | public int area(){
45 | return width()*height();
46 | }
47 | //Bounding Box Regression
48 | public void calibrate(){
49 | int w=box[2]-box[0]+1;
50 | int h=box[3]-box[1]+1;
51 | box[0]=(int)(box[0]+w*bbr[0]);
52 | box[1]=(int)(box[1]+h*bbr[1]);
53 | box[2]=(int)(box[2]+w*bbr[2]);
54 | box[3]=(int)(box[3]+h*bbr[3]);
55 | for (int i=0;i<4;i++) bbr[i]=0.0f;
56 | }
57 |
58 | //当前box转为正方形
59 | public void toSquareShape(){
60 | int w=width();
61 | int h=height();
62 | if (w>h){
63 | box[1]-=(w-h)/2;
64 | box[3]+=(w-h+1)/2;
65 | }else{
66 | box[0]-=(h-w)/2;
67 | box[2]+=(h-w+1)/2;
68 | }
69 | }
70 |
71 | //防止边界溢出,并维持square大小
72 | public void limit_square(int w,int h){
73 | if (box[0]<0 || box[1]<0){
74 | int len=max(-box[0],-box[1]);
75 | box[0]+=len;
76 | box[1]+=len;
77 | }
78 | if (box[2]>=w || box[3]>=h){
79 | int len=max(box[2]-w+1,box[3]-h+1);
80 | box[2]-=len;
81 | box[3]-=len;
82 | }
83 | }
84 |
85 | public void limit_square2(int w,int h){
86 | if (width() > w) box[2]-=width()-w;
87 | if (height()> h) box[3]-=height()-h;
88 | if (box[0]<0){
89 | int sz=-box[0];
90 | box[0]+=sz;
91 | box[2]+=sz;
92 | }
93 | if (box[1]<0){
94 | int sz=-box[1];
95 | box[1]+=sz;
96 | box[3]+=sz;
97 | }
98 | if (box[2]>=w){
99 | int sz=box[2]-w+1;
100 | box[2]-=sz;
101 | box[0]-=sz;
102 | }
103 | if (box[3]>=h){
104 | int sz=box[3]-h+1;
105 | box[3]-=sz;
106 | box[1]-=sz;
107 | }
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/facerecognition/FaceDetection/MTCNN.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition.FaceDetection;
2 | /*
3 | MTCNN For Android
4 | by cjf@xmu 20180625
5 | */
6 | import android.content.res.AssetManager;
7 | import android.graphics.Bitmap;
8 | import android.graphics.Matrix;
9 | import android.graphics.Point;
10 | import android.util.Log;
11 |
12 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
13 |
14 | import java.util.Vector;
15 |
16 | import static java.lang.Math.max;
17 | import static java.lang.Math.min;
18 |
19 | public class MTCNN {
20 |
21 | //参数
22 | private float factor=0.709f;
23 | private float PNetThreshold=0.6f;
24 | private float RNetThreshold=0.7f;
25 | private float ONetThreshold=0.7f;
26 |
27 | //MODEL PATH
28 | private static final String MODEL_FILE = "file:///android_asset/mtcnn_freezed_model.pb";
29 |
30 | //tensor name
31 | private static final String PNetInName ="pnet/input:0";
32 | private static final String[] PNetOutName =new String[]{"pnet/prob1:0","pnet/conv4-2/BiasAdd:0"};
33 | private static final String RNetInName ="rnet/input:0";
34 | private static final String[] RNetOutName =new String[]{ "rnet/prob1:0","rnet/conv5-2/conv5-2:0",};
35 | private static final String ONetInName ="onet/input:0";
36 | private static final String[] ONetOutName =new String[]{ "onet/prob1:0","onet/conv6-2/conv6-2:0","onet/conv6-3/conv6-3:0"};
37 |
38 | //安卓相关
39 | public long lastProcessTime; //最后一张图片处理的时间ms
40 | private static final String TAG="MTCNN";
41 | private AssetManager assetManager;
42 | private TensorFlowInferenceInterface inferenceInterface;
43 |
44 | public MTCNN(AssetManager mgr){
45 | assetManager=mgr;
46 | loadModel();
47 | }
48 |
49 | public void close(){
50 | inferenceInterface.close();
51 | }
52 |
53 | private boolean loadModel() {
54 | //AssetManager
55 | try {
56 | inferenceInterface = new TensorFlowInferenceInterface(assetManager, MODEL_FILE);
57 | Log.d("Facenet","[*]load model success");
58 | }catch(Exception e){
59 | Log.e("Facenet","[*]load model failed" + e);
60 | return false;
61 | }
62 | return true;
63 | }
64 |
65 | //读取Bitmap像素值,预处理(-127.5 /128),转化为一维数组返回
66 | private float[] normalizeImage(Bitmap bitmap){
67 | int w=bitmap.getWidth();
68 | int h=bitmap.getHeight();
69 | float[] floatValues=new float[w*h*3];
70 | int[] intValues=new int[w*h];
71 | bitmap.getPixels(intValues,0,bitmap.getWidth(),0,0,bitmap.getWidth(),bitmap.getHeight());
72 | float imageMean=127.5f;
73 | float imageStd=128;
74 |
75 | for (int i=0;i> 16) & 0xFF) - imageMean) / imageStd;
78 | floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
79 | floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
80 | }
81 | return floatValues;
82 | }
83 |
84 | /*
85 | 检测人脸,minSize是最小的人脸像素值
86 | */
87 | private Bitmap bitmapResize(Bitmap bm, float scale) {
88 | int width = bm.getWidth();
89 | int height = bm.getHeight();
90 | // CREATE A MATRIX FOR THE MANIPULATION。matrix指定图片仿射变换参数
91 | Matrix matrix = new Matrix();
92 | // RESIZE THE BIT MAP
93 | matrix.postScale(scale, scale);
94 | Bitmap resizedBitmap = Bitmap.createBitmap(
95 | bm, 0, 0, width, height, matrix, true);
96 | return resizedBitmap;
97 | }
98 |
99 | //输入前要翻转,输出也要翻转
100 | private int PNetForward(Bitmap bitmap,float [][]PNetOutProb,float[][][]PNetOutBias){
101 | int w=bitmap.getWidth();
102 | int h=bitmap.getHeight();
103 |
104 | float[] PNetIn=normalizeImage(bitmap);
105 | Utils.flip_diag(PNetIn,h,w,3); //沿着对角线翻转
106 | inferenceInterface.feed(PNetInName,PNetIn,1,w,h,3);
107 | inferenceInterface.run(PNetOutName,false);
108 | int PNetOutSizeW=(int)Math.ceil(w*0.5-5);
109 | int PNetOutSizeH=(int)Math.ceil(h*0.5-5);
110 | float[] PNetOutP=new float[PNetOutSizeW*PNetOutSizeH*2];
111 | float[] PNetOutB=new float[PNetOutSizeW*PNetOutSizeH*4];
112 | inferenceInterface.fetch(PNetOutName[0],PNetOutP);
113 | inferenceInterface.fetch(PNetOutName[1],PNetOutB);
114 | //【写法一】先翻转,后转为2/3维数组
115 | Utils.flip_diag(PNetOutP,PNetOutSizeW,PNetOutSizeH,2);
116 | Utils.flip_diag(PNetOutB,PNetOutSizeW,PNetOutSizeH,4);
117 | Utils.expand(PNetOutB,PNetOutBias);
118 | Utils.expandProb(PNetOutP,PNetOutProb);
119 | /*
120 | *【写法二】这个比较快,快了3ms。意义不大,用上面的方法比较直观
121 | for (int y=0;y boxes,float threshold,String method){
135 | //NMS.两两比对
136 | //int delete_cnt=0;
137 | int cnt=0;
138 | for(int i=0;i= threshold) { //删除prob小的那个框
159 | if (box.score>box2.score)
160 | box2.deleted=true;
161 | else
162 | box.deleted=true;
163 | //delete_cnt++;
164 | }
165 | }
166 | }
167 | }
168 | }
169 | //Log.i(TAG,"[*]sum:"+boxes.size()+" delete:"+delete_cnt);
170 | }
171 |
172 | private int generateBoxes(float[][] prob,float[][][]bias,float scale,float threshold,Vector boxes){
173 | int h=prob.length;
174 | int w=prob[0].length;
175 | //Log.i(TAG,"[*]height:"+prob.length+" width:"+prob[0].length);
176 | for (int y=0;ythreadshold(0.6 here)
180 | if (score>threshold){
181 | Box box=new Box();
182 | //score
183 | box.score=score;
184 | //box
185 | box.box[0]=Math.round(x*2/scale);
186 | box.box[1]=Math.round(y*2/scale);
187 | box.box[2]=Math.round((x*2+11)/scale);
188 | box.box[3]=Math.round((y*2+11)/scale);
189 | //bbr
190 | for(int i=0;i<4;i++)
191 | box.bbr[i]=bias[y][x][i];
192 | //add
193 | boxes.addElement(box);
194 | }
195 | }
196 | return 0;
197 | }
198 |
199 | private void BoundingBoxReggression(Vector boxes){
200 | for (int i=0;i PNet(Bitmap bitmap,int minSize){
212 | int whMin=min(bitmap.getWidth(),bitmap.getHeight());
213 | float currentFaceSize=minSize; //currentFaceSize=minSize/(factor^k) k=0,1,2... until excced whMin
214 | Vector totalBoxes=new Vector();
215 | //【1】Image Paramid and Feed to Pnet
216 | while (currentFaceSize<=whMin){
217 | float scale=12.0f/currentFaceSize;
218 | //(1)Image Resize
219 | Bitmap bm=bitmapResize(bitmap,scale);
220 | int w=bm.getWidth();
221 | int h=bm.getHeight();
222 | //(2)RUN CNN
223 | int PNetOutSizeW=(int)(Math.ceil(w*0.5-5)+0.5);
224 | int PNetOutSizeH=(int)(Math.ceil(h*0.5-5)+0.5);
225 | float[][] PNetOutProb=new float[PNetOutSizeH][PNetOutSizeW];;
226 | float[][][] PNetOutBias=new float[PNetOutSizeH][PNetOutSizeW][4];
227 | PNetForward(bm,PNetOutProb,PNetOutBias);
228 | //(3)数据解析
229 | Vector curBoxes=new Vector();
230 | generateBoxes(PNetOutProb,PNetOutBias,scale,PNetThreshold,curBoxes);
231 | //Log.i(TAG,"[*]CNN Output Box number:"+curBoxes.size()+" Scale:"+scale);
232 | //(4)nms 0.5
233 | nms(curBoxes,0.5f,"Union");
234 | //(5)add to totalBoxes
235 | for (int i=0;i> 16) & 0xFF) - imageMean) / imageStd;
264 | data[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
265 | data[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
266 | }
267 | }
268 |
269 | /*
270 | * RNET跑神经网络,将score和bias写入boxes
271 | */
272 | private void RNetForward(float[] RNetIn,Vectorboxes){
273 | int num=RNetIn.length/24/24/3;
274 | //feed & run
275 | inferenceInterface.feed(RNetInName,RNetIn,num,24,24,3);
276 | inferenceInterface.run(RNetOutName,false);
277 | //fetch
278 | float[] RNetP=new float[num*2];
279 | float[] RNetB=new float[num*4];
280 | inferenceInterface.fetch(RNetOutName[0],RNetP);
281 | inferenceInterface.fetch(RNetOutName[1],RNetB);
282 | //转换
283 | for (int i=0;i RNet(Bitmap bitmap,Vector boxes){
292 | //RNet Input Init
293 | int num=boxes.size();
294 | float[] RNetIn=new float[num*24*24*3];
295 | float[] curCrop=new float[24*24*3];
296 | int RNetInIdx=0;
297 | for (int i=0;iboxes){
319 | int num=ONetIn.length/48/48/3;
320 | //feed & run
321 | inferenceInterface.feed(ONetInName,ONetIn,num,48,48,3);
322 | inferenceInterface.run(ONetOutName,false);
323 | //fetch
324 | float[] ONetP=new float[num*2]; //prob
325 | float[] ONetB=new float[num*4]; //bias
326 | float[] ONetL=new float[num*10]; //landmark
327 | inferenceInterface.fetch(ONetOutName[0],ONetP);
328 | inferenceInterface.fetch(ONetOutName[1],ONetB);
329 | inferenceInterface.fetch(ONetOutName[2],ONetL);
330 | //转换
331 | for (int i=0;i ONet(Bitmap bitmap,Vector boxes){
350 | //ONet Input Init
351 | int num=boxes.size();
352 | float[] ONetIn=new float[num*48*48*3];
353 | float[] curCrop=new float[48*48*3];
354 | int ONetInIdx=0;
355 | for (int i=0;iboxes,int w,int h){
373 | //square
374 | for (int i=0;i detectFaces(Bitmap bitmap,int minFaceSize) {
388 | long t_start = System.currentTimeMillis();
389 | //【1】PNet generate candidate boxes
390 | Vector boxes=PNet(bitmap,minFaceSize);
391 | square_limit(boxes,bitmap.getWidth(),bitmap.getHeight());
392 | //【2】RNet
393 | boxes=RNet(bitmap,boxes);
394 | square_limit(boxes,bitmap.getWidth(),bitmap.getHeight());
395 | //【3】ONet
396 | boxes=ONet(bitmap,boxes);
397 | //return
398 | Log.i(TAG,"[*]Mtcnn Detection Time:"+(System.currentTimeMillis()-t_start));
399 | lastProcessTime=(System.currentTimeMillis()-t_start);
400 | return boxes;
401 | }
402 | }
403 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/facerecognition/FaceDetection/Utils.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition.FaceDetection;
2 | /*
3 | MTCNN For Android
4 | by cjf@xmu 20180625
5 | */
6 | import android.graphics.Bitmap;
7 | import android.graphics.Canvas;
8 | import android.graphics.Color;
9 | import android.graphics.Paint;
10 | import android.graphics.Point;
11 | import android.graphics.Rect;
12 | import android.util.Log;
13 | import android.widget.ImageView;
14 |
15 | import java.util.Vector;
16 |
17 | public class Utils {
18 |
19 | //复制图片,并设置isMutable=true
20 | public static Bitmap copyBitmap(Bitmap bitmap){
21 | return bitmap.copy(bitmap.getConfig(),true);
22 | }
23 |
24 | //在bitmap中画矩形
25 | public static void drawRect(Bitmap bitmap,Rect rect){
26 | try {
27 | Canvas canvas = new Canvas(bitmap);
28 | Paint paint = new Paint();
29 | int r=255;//(int)(Math.random()*255);
30 | int g=0;//(int)(Math.random()*255);
31 | int b=0;//(int)(Math.random()*255);
32 | paint.setColor(Color.rgb(r, g, b));
33 | paint.setStrokeWidth(1+bitmap.getWidth()/500 );
34 | paint.setStyle(Paint.Style.STROKE);
35 | canvas.drawRect(rect, paint);
36 | }catch (Exception e){
37 | Log.i("Utils","[*] error"+e);
38 | }
39 | }
40 |
41 | //在图中画点
42 | public static void drawPoints(Bitmap bitmap, Point[] landmark){
43 | for (int i=0;i boxes){
91 | int cnt=0;
92 | for (int i=0;i updateBoxes(Vector boxes){
103 | Vector b=new Vector();
104 | for (int i=0;i>16)&0xff)+"G:"+((v>>8)&0xff)+ " B:"+(v&0xff));
113 | }
114 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/example/facerecognition/FaceRecognition/FaceNet.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition.FaceRecognition;
2 |
3 | import android.content.res.AssetFileDescriptor;
4 | import android.content.res.AssetManager;
5 | import android.graphics.Bitmap;
6 | import android.util.Log;
7 |
8 | import org.tensorflow.lite.Interpreter;
9 |
10 | import java.io.FileInputStream;
11 | import java.io.IOException;
12 | import java.nio.ByteBuffer;
13 | import java.nio.ByteOrder;
14 | import java.nio.MappedByteBuffer;
15 | import java.nio.channels.FileChannel;
16 | import java.util.Arrays;
17 |
18 | public class FaceNet {
19 | private static final String MODEL_PATH = "facenet.tflite";
20 |
21 | private static final float IMAGE_MEAN = 127.5f;
22 | private static final float IMAGE_STD = 127.5f;
23 |
24 | private static final int BATCH_SIZE = 1;
25 | private static final int IMAGE_HEIGHT = 160;
26 | private static final int IMAGE_WIDTH = 160;
27 | private static final int NUM_CHANNELS = 3;
28 | private static final int NUM_BYTES_PER_CHANNEL = 4;
29 | private static final int EMBEDDING_SIZE = 512;
30 |
31 | private final int[] intValues = new int[IMAGE_HEIGHT * IMAGE_WIDTH];
32 | private ByteBuffer imgData;
33 |
34 | private MappedByteBuffer tfliteModel;
35 | private Interpreter tflite;
36 | private final Interpreter.Options tfliteOptions = new Interpreter.Options();
37 |
38 | public FaceNet(AssetManager assetManager) throws IOException{
39 | tfliteModel = loadModelFile(assetManager);
40 | tflite = new Interpreter(tfliteModel, tfliteOptions);
41 | imgData = ByteBuffer.allocateDirect(
42 | BATCH_SIZE
43 | * IMAGE_HEIGHT
44 | * IMAGE_WIDTH
45 | * NUM_CHANNELS
46 | * NUM_BYTES_PER_CHANNEL);
47 | imgData.order(ByteOrder.nativeOrder());
48 | }
49 |
50 | private MappedByteBuffer loadModelFile(AssetManager assetManager) throws IOException{
51 | AssetFileDescriptor fileDescriptor = assetManager.openFd(MODEL_PATH);
52 | FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
53 | FileChannel fileChannel = inputStream.getChannel();
54 | long startOffset = fileDescriptor.getStartOffset();
55 | long declaredLength = fileDescriptor.getDeclaredLength();
56 | return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
57 | }
58 |
59 | private void convertBitmapToByteBuffer(Bitmap bitmap) {
60 | if (imgData == null) {
61 | return;
62 | }
63 | imgData.rewind();
64 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
65 | // Convert the image to floating point.
66 | int pixel = 0;
67 | for (int i = 0; i < IMAGE_HEIGHT; ++i) {
68 | for (int j = 0; j < IMAGE_WIDTH; ++j) {
69 | final int val = intValues[pixel++];
70 | addPixelValue(val);
71 | }
72 | }
73 | }
74 |
75 | private void addPixelValue(int pixelValue){
76 | //imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
77 | //imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
78 | //imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
79 | imgData.putFloat(((pixelValue >> 16) & 0xFF) / 255.0f);
80 | imgData.putFloat(((pixelValue >> 8) & 0xFF) / 255.0f);
81 | imgData.putFloat((pixelValue & 0xFF) / 255.0f);
82 | }
83 |
84 | public void inspectModel(){
85 | String tag = "Model Inspection";
86 | Log.i(tag, "Number of input tensors: " + String.valueOf(tflite.getInputTensorCount()));
87 | Log.i(tag, "Number of output tensors: " + String.valueOf(tflite.getOutputTensorCount()));
88 |
89 | Log.i(tag, tflite.getInputTensor(0).toString());
90 | Log.i(tag, "Input tensor data type: " + tflite.getInputTensor(0).dataType());
91 | Log.i(tag, "Input tensor shape: " + Arrays.toString(tflite.getInputTensor(0).shape()));
92 | Log.i(tag, "Output tensor 0 shape: " + Arrays.toString(tflite.getOutputTensor(0).shape()));
93 | }
94 |
95 | private Bitmap resizedBitmap(Bitmap bitmap, int height, int width){
96 | return Bitmap.createScaledBitmap(bitmap, width, height, true);
97 | }
98 |
99 | private Bitmap croppedBitmap(Bitmap bitmap, int upperCornerX, int upperCornerY, int height, int width){
100 | return Bitmap.createBitmap(bitmap, upperCornerX, upperCornerY, width, height);
101 | }
102 |
103 | private float[][] run(Bitmap bitmap){
104 | bitmap = resizedBitmap(bitmap, IMAGE_HEIGHT, IMAGE_WIDTH);
105 | convertBitmapToByteBuffer(bitmap);
106 |
107 | float[][] embeddings = new float[1][512];
108 | tflite.run(imgData, embeddings);
109 |
110 | return embeddings;
111 | }
112 |
113 | public double getSimilarityScore(Bitmap face1, Bitmap face2){
114 | float[][] face1_embedding = run(face1);
115 | float[][] face2_embedding = run(face2);
116 |
117 | double distance = 0.0;
118 | for (int i = 0; i < EMBEDDING_SIZE; i++){
119 | distance += (face1_embedding[0][i] - face2_embedding[0][i]) * (face1_embedding[0][i] - face2_embedding[0][i]);
120 | }
121 | distance = Math.sqrt(distance);
122 |
123 | return distance;
124 | }
125 |
126 | public void close(){
127 | if (tflite != null) {
128 | tflite.close();
129 | tflite = null;
130 | }
131 | tfliteModel = null;
132 | }
133 |
134 | /*public float[][] runFloat(Bitmap bitmap){
135 | bitmap = resizedBitmap(bitmap, IMAGE_HEIGHT, IMAGE_WIDTH);
136 | float [][][][] floatTensor = convertBitmapToFloatTensor(bitmap);
137 |
138 | float[][] embeddings = new float[1][512];
139 | tflite.run(floatTensor, embeddings);
140 |
141 | return embeddings;
142 | }
143 |
144 | private float[][][][] convertBitmapToFloatTensor(Bitmap bitmap){
145 | float[][][][] floatTensor = new float[BATCH_SIZE][IMAGE_HEIGHT][IMAGE_WIDTH][NUM_CHANNELS];
146 |
147 | int[] intArray = new int[bitmap.getWidth() * bitmap.getHeight()];
148 | bitmap.getPixels(intArray, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
149 |
150 | int pixel = 0;
151 | for (int i = 0; i < IMAGE_HEIGHT; i++){
152 | for (int j = 0; j < IMAGE_WIDTH; j++){
153 | final int val = intArray[pixel++];
154 | floatTensor[0][i][j][0] = (((val >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD;
155 | floatTensor[0][i][j][1] = (((val >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD;
156 | floatTensor[0][i][j][2] = ((val & 0xFF) - IMAGE_MEAN) / IMAGE_STD;
157 | }
158 | }
159 |
160 | return floatTensor;
161 | }*/
162 | }
163 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/facerecognition/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition;
2 |
3 | import androidx.annotation.Nullable;
4 | import androidx.appcompat.app.AppCompatActivity;
5 |
6 | import android.content.Intent;
7 | import android.graphics.Bitmap;
8 | import android.graphics.BitmapFactory;
9 | import android.net.Uri;
10 | import android.os.Bundle;
11 | import android.util.Log;
12 | import android.view.View;
13 | import android.widget.Button;
14 | import android.widget.ImageView;
15 | import android.widget.TextView;
16 | import android.widget.Toast;
17 |
18 | import com.example.facerecognition.FaceDetection.Box;
19 | import com.example.facerecognition.FaceDetection.MTCNN;
20 | import com.example.facerecognition.FaceRecognition.FaceNet;
21 |
22 | import java.io.FileNotFoundException;
23 | import java.io.IOException;
24 | import java.io.InputStream;
25 | import java.util.Vector;
26 |
27 | public class MainActivity extends AppCompatActivity {
28 |
29 | private static final int PICK_IMAGEVIEW_CONTENT = 1;
30 | private static final int PICK_IMAGEVIEW2_CONTENT = 2;
31 |
32 | private Bitmap image;
33 | private Bitmap image2;
34 | private ImageView imageView;
35 | private ImageView imageView2;
36 | private Button button;
37 | private MTCNN mtcnn;
38 | private TextView textView;
39 |
40 | private Bitmap cropFace(Bitmap bitmap, MTCNN mtcnn){
41 | Bitmap croppedBitmap = null;
42 | try {
43 | Vector boxes = mtcnn.detectFaces(bitmap, 10);
44 |
45 | Log.i("MTCNN", "No. of faces detected: " + boxes.size());
46 |
47 | int left = boxes.get(0).left();
48 | int top = boxes.get(0).top();
49 |
50 | int x = boxes.get(0).left();
51 | int y = boxes.get(0).top();
52 | int width = boxes.get(0).width();
53 | int height = boxes.get(0).height();
54 |
55 |
56 | if (y + height >= bitmap.getHeight())
57 | height -= (y + height) - (bitmap.getHeight() - 1);
58 | if (x + width >= bitmap.getWidth())
59 | width -= (x + width) - (bitmap.getWidth() - 1);
60 |
61 | Log.i("MTCNN", "Final x: " + String.valueOf(x + width));
62 | Log.i("MTCNN", "Width: " + bitmap.getWidth());
63 | Log.i("MTCNN", "Final y: " + String.valueOf(y + width));
64 | Log.i("MTCNN", "Height: " + bitmap.getWidth());
65 |
66 | croppedBitmap = Bitmap.createBitmap(bitmap, x, y, width, height);
67 | }catch (Exception e){
68 | e.printStackTrace();
69 | }
70 | return croppedBitmap;
71 | }
72 |
73 | @Override
74 | protected void onCreate(Bundle savedInstanceState) {
75 | super.onCreate(savedInstanceState);
76 | setContentView(R.layout.activity_main);
77 |
78 | imageView = findViewById(R.id.imageView);
79 | imageView2 = findViewById(R.id.imageView2);
80 | button = findViewById(R.id.button);
81 | textView = findViewById(R.id.textView);
82 |
83 | imageView.setOnClickListener(view -> {
84 | Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
85 | photoPickerIntent.setType("image/*");
86 | startActivityForResult(photoPickerIntent, PICK_IMAGEVIEW_CONTENT);
87 | });
88 |
89 | imageView2.setOnClickListener(view -> {
90 | Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
91 | photoPickerIntent.setType("image/*");
92 | startActivityForResult(photoPickerIntent, PICK_IMAGEVIEW2_CONTENT);
93 | });
94 |
95 | button.setOnClickListener(view -> {
96 | if (image == null || image2 == null){
97 | Toast.makeText(getApplicationContext(), "One of the images haven't been set yet.", Toast.LENGTH_SHORT).show();
98 | }else{
99 | MTCNN mtcnn = new MTCNN(getAssets());
100 |
101 | Bitmap face1 = cropFace(image, mtcnn);
102 | Bitmap face2 = cropFace(image2, mtcnn);
103 |
104 | mtcnn.close();
105 |
106 | FaceNet facenet = null;
107 | try {
108 | facenet = new FaceNet(getAssets());
109 | } catch (IOException e) {
110 | e.printStackTrace();
111 | }
112 |
113 | if (face1 != null)
114 | imageView.setImageBitmap(face1);
115 | else
116 | Log.i("detect", "Couldn't crop image 1.");
117 |
118 | if (face2 != null)
119 | imageView2.setImageBitmap(face2);
120 | else
121 | Log.i("detect", "Couldn't crop image 2.");
122 |
123 | if (face1 != null && face2 != null) {
124 | double score = facenet.getSimilarityScore(face1, face2);
125 | Log.i("score", String.valueOf(score));
126 | //Toast.makeText(MainActivity.this, "Similarity score: " + score, Toast.LENGTH_LONG).show();
127 | String text = String.format("Similarity score = %.2f", score);
128 | textView.setText(text);
129 | }
130 |
131 | facenet.close();
132 | }
133 | });
134 | }
135 |
136 | @Override
137 | protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
138 | super.onActivityResult(requestCode, resultCode, data);
139 |
140 | if (requestCode == PICK_IMAGEVIEW_CONTENT && resultCode == RESULT_OK) {
141 | try {
142 | Uri imageUri = data.getData();
143 | InputStream imageStream = getContentResolver().openInputStream(imageUri);
144 | image = BitmapFactory.decodeStream(imageStream);
145 | imageView.setImageBitmap(image);
146 | textView.setText("");
147 | } catch (FileNotFoundException e) {
148 | e.printStackTrace();
149 | Toast.makeText(MainActivity.this, "Error loading gallery image.", Toast.LENGTH_LONG).show();
150 | }
151 | }else if (requestCode == PICK_IMAGEVIEW2_CONTENT && resultCode == RESULT_OK) {
152 | try {
153 | Uri imageUri = data.getData();
154 | InputStream imageStream = getContentResolver().openInputStream(imageUri);
155 | image2 = BitmapFactory.decodeStream(imageStream);
156 | imageView2.setImageBitmap(image2);
157 | textView.setText("");
158 | } catch (FileNotFoundException e) {
159 | e.printStackTrace();
160 | Toast.makeText(MainActivity.this, "Error loading gallery image.", Toast.LENGTH_LONG).show();
161 | }
162 | }
163 | }
164 |
165 |
166 |
167 | }
168 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
23 |
24 |
35 |
36 |
50 |
51 |
64 |
65 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #008577
4 | #00574B
5 | #D81B60
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FaceRecognition
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/test/java/com/example/facerecognition/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.example.facerecognition;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | google()
6 | jcenter()
7 |
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.5.1'
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 |
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 |
21 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/felipegnunes/android-face-recognition/9b0187ea03685c0a77808e73432daf5ffe8cdec3/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Oct 21 22:16:34 BRT 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/local.properties:
--------------------------------------------------------------------------------
1 | ## This file is automatically generated by Android Studio.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file should *NOT* be checked into Version Control Systems,
5 | # as it contains information specific to your local configuration.
6 | #
7 | # Location of the SDK. This is only used by Gradle.
8 | # For customization when using a Version Control System, please read the
9 | # header note.
10 | sdk.dir=/home/felipe/Android/Sdk
11 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------